|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
'''---compulsory---''' |
|
import hoho; hoho.setup() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import webdataset as wds |
|
from tqdm import tqdm |
|
from typing import Dict |
|
import pandas as pd |
|
from transformers import AutoTokenizer |
|
import os |
|
import time |
|
import io |
|
from PIL import Image as PImage |
|
import numpy as np |
|
|
|
from hoho.read_write_colmap import read_cameras_binary, read_images_binary, read_points3D_binary |
|
from hoho import proc, Sample |
|
|
|
def convert_entry_to_human_readable(entry): |
|
out = {} |
|
already_good = ['__key__', 'wf_vertices', 'wf_edges', 'edge_semantics', 'mesh_vertices', 'mesh_faces', 'face_semantics', 'K', 'R', 't'] |
|
for k, v in entry.items(): |
|
if k in already_good: |
|
out[k] = v |
|
continue |
|
if k == 'points3d': |
|
out[k] = read_points3D_binary(fid=io.BytesIO(v)) |
|
if k == 'cameras': |
|
out[k] = read_cameras_binary(fid=io.BytesIO(v)) |
|
if k == 'images': |
|
out[k] = read_images_binary(fid=io.BytesIO(v)) |
|
if k in ['ade20k', 'gestalt']: |
|
out[k] = [PImage.open(io.BytesIO(x)).convert('RGB') for x in v] |
|
if k == 'depthcm': |
|
out[k] = [PImage.open(io.BytesIO(x)) for x in entry['depthcm']] |
|
return out |
|
|
|
'''---end of compulsory---''' |
|
|
|
|
|
|
|
def save_submission(submission, path): |
|
""" |
|
Saves the submission to a specified path. |
|
|
|
Parameters: |
|
submission (List[Dict[]]): The submission to save. |
|
path (str): The path to save the submission to. |
|
""" |
|
sub = pd.DataFrame(submission, columns=["__key__", "wf_vertices", "wf_edges", "edge_semantics"]) |
|
sub.to_parquet(path) |
|
print(f"Submission saved to {path}") |
|
|
|
if __name__ == "__main__": |
|
from handcrafted_solution import predict |
|
print ("------------ Loading dataset------------ ") |
|
params = hoho.get_params() |
|
dataset = hoho.get_dataset(decode=None, split='all', dataset_type='webdataset') |
|
print('------------ Now you can do your solution ---------------') |
|
solution = [] |
|
from concurrent.futures import ProcessPoolExecutor |
|
with ProcessPoolExecutor(max_workers=8) as pool: |
|
results = [] |
|
for i, sample in enumerate(tqdm(dataset)): |
|
results.append(pool.submit(predict, sample, visualize=False)) |
|
|
|
for i, result in enumerate(tqdm(results)): |
|
key = pred_vertices, pred_edges, semantics = result.result() |
|
solution.append({ |
|
'__key__': key, |
|
'wf_vertices': pred_vertices.tolist(), |
|
'wf_edges': pred_edges, |
|
'edge_semantics': semantics, |
|
}) |
|
if i % 100 == 0: |
|
|
|
print(f"Processed {i} samples") |
|
|
|
print('------------ Saving results ---------------') |
|
save_submission(solution, Path(params['output_path']) / "submission.parquet") |
|
print("------------ Done ------------ ") |
|
|