File size: 3,534 Bytes
c28dddb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
import json
import argparse
import numpy as np
from retrieval.obj_retrieval import find_obj_candidates, pick_and_rescale_parts
import trimesh
import shutil

def _retrieve_part_meshes(info_dict, save_dir, gt_data_root):
    mesh_save_dir = os.path.join(save_dir, "plys")
    obj_save_dir = os.path.join(save_dir, "objs")
    os.makedirs(mesh_save_dir, exist_ok=True)
    os.makedirs(obj_save_dir, exist_ok=True)
    print(save_dir)
    if os.path.exists(os.path.join(save_dir, "object.ply")):
        return 
    HASHBOOK_PATH = "retrieval/retrieval_hash_no_handles.json"

    obj_candidates = find_obj_candidates(
        info_dict,
        gt_data_root,
        HASHBOOK_PATH,
        gt_file_name="object.json",
        num_states=5,
        metric_num_samples=4096,
        keep_top=3,
    )
    
    retrieved_mesh_specs = pick_and_rescale_parts(
        info_dict, obj_candidates, gt_data_root, gt_file_name="object.json"
    )

    scene = trimesh.Scene()
    for i, mesh_spec in enumerate(retrieved_mesh_specs):
        part_spec = info_dict["diffuse_tree"][i]
        current_part_meshes = []
        file_paths = []
        for file in mesh_spec["files"]:
            file = os.path.join(mesh_spec["dir"], file).replace("ply", "obj")
            file_paths.append(file)
            m = trimesh.load(file, force="mesh")
            current_part_meshes.append(m)

        if not current_part_meshes:
            continue

        bounds = np.array([m.bounds for m in current_part_meshes])
        min_extents = bounds[:, 0, :].min(axis=0)
        max_extents = bounds[:, 1, :].max(axis=0)
        group_centroid = (min_extents + max_extents) / 2.0

        transformation = trimesh.transformations.compose_matrix(
            scale=mesh_spec["scale_factor"],
            angles=[0, 0, np.radians(90) if mesh_spec["z_rotate_90"] else 0],
            translate=part_spec["aabb"]["center"],
        )

        part_scene = trimesh.Scene()
        for mesh in current_part_meshes:
            mesh.vertices -= group_centroid
            mesh.apply_transform(transformation)
            part_scene.add_geometry(mesh)
            scene.add_geometry(mesh)
            
        obj_path = os.path.join(obj_save_dir, f"part_{i}/part_{i}.obj")
        os.makedirs(os.path.dirname(obj_path), exist_ok=True)
        part_scene.export(obj_path, include_texture=True)
        info_dict["diffuse_tree"][i]["objs"] = [f"objs/part_{i}/part_{i}.obj"]

    scene.export(os.path.join(save_dir, "object.ply"))
    del mesh, scene
    return info_dict

def main(args):
    with open(os.path.join(args.src_dir, args.json_name), "r") as f:
        info_dict = json.load(f)

    if 'meta' not in info_dict.keys():
        info_dict['meta'] = {
            'obj_cat': 'StroageFurniture'
        }

    updated_json = _retrieve_part_meshes(info_dict, args.src_dir, args.gt_data_root)

    if updated_json is not None:
        with open(os.path.join(args.src_dir, args.json_name), "w") as f:
            json.dump(updated_json, f)

if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('--src_dir', type=str, required=True, help='path to the directory containing object.json')
    parser.add_argument('--json_name', type=str, default='object.json', help='name of the json file')
    parser.add_argument('--gt_data_root', type=str, default='./', help='path to the ground truth data')
    args = parser.parse_args()
    main(args)