| | import glob |
| | import json |
| | import multiprocessing |
| | import shutil |
| | import subprocess |
| | import time |
| | from dataclasses import dataclass |
| | from typing import Optional |
| | import os |
| |
|
| | import boto3 |
| |
|
| |
|
| | from glob import glob |
| |
|
| | import argparse |
| |
|
| | parser = argparse.ArgumentParser(description='distributed rendering') |
| |
|
| | parser.add_argument('--workers_per_gpu', type=int, default=10, |
| | help='number of workers per gpu.') |
| | parser.add_argument('--input_models_path', type=str, default='/data/lipeng/human_scan/', |
| | help='Path to a json file containing a list of 3D object files.') |
| | parser.add_argument('--num_gpus', type=int, default=-1, |
| | help='number of gpus to use. -1 means all available gpus.') |
| | parser.add_argument('--gpu_list',nargs='+', type=int, |
| | help='the avalaible gpus') |
| | parser.add_argument('--resolution', type=int, default=512, |
| | help='') |
| | parser.add_argument('--random_images', type=int, default=0) |
| | parser.add_argument('--start_i', type=int, default=0, |
| | help='the index of first object to be rendered.') |
| | parser.add_argument('--end_i', type=int, default=-1, |
| | help='the index of the last object to be rendered.') |
| |
|
| | parser.add_argument('--data_dir', type=str, default='/data/lipeng/human_scan/', |
| | help='Path to a json file containing a list of 3D object files.') |
| |
|
| | parser.add_argument('--json_path', type=str, default='2K2K.json') |
| |
|
| | parser.add_argument('--save_dir', type=str, default='/data/lipeng/human_8view', |
| | help='Path to a json file containing a list of 3D object files.') |
| |
|
| | parser.add_argument('--ortho_scale', type=float, default=1., |
| | help='ortho rendering usage; how large the object is') |
| |
|
| |
|
| | args = parser.parse_args() |
| |
|
| | def parse_obj_list(xs): |
| | cases = [] |
| | |
| |
|
| | for x in xs: |
| | if 'THuman3.0' in x: |
| | |
| | splits = x.split('/') |
| | x = os.path.join('THuman3.0', splits[-2]) |
| | elif 'THuman2.1' in x: |
| | splits = x.split('/') |
| | x = os.path.join('THuman2.1', splits[-2]) |
| | elif 'CustomHumans' in x: |
| | splits = x.split('/') |
| | x = os.path.join('CustomHumans', splits[-2]) |
| | elif '1M' in x: |
| | splits = x.split('/') |
| | x = os.path.join('2K2K', splits[-2]) |
| | elif 'realistic_8k_model' in x: |
| | splits = x.split('/') |
| | x = os.path.join('realistic_8k_model', splits[-1].split('.')[0]) |
| | cases.append(f'{args.save_dir}/{x}') |
| | return cases |
| | |
| |
|
| | with open(args.json_path, 'r') as f: |
| | glb_list = json.load(f) |
| |
|
| | |
| | |
| | |
| | |
| | |
| |
|
| | total_num_glbs = len(glb_list) |
| |
|
| |
|
| |
|
| | def worker( |
| | queue: multiprocessing.JoinableQueue, |
| | count: multiprocessing.Value, |
| | gpu: int, |
| | s3: Optional[boto3.client], |
| | ) -> None: |
| | print("Worker started") |
| | while True: |
| | case, save_p = queue.get() |
| | src_path = os.path.join(args.data_dir, case) |
| | smpl_path = src_path.replace('mesh', 'smplx', 1) |
| | |
| | command = ('blender -b -P blender_render_human_ortho.py' |
| | f' -- --object_path {src_path}' |
| | f' --smpl_path {smpl_path}' |
| | f' --output_dir {save_p} --engine CYCLES' |
| | f' --resolution {args.resolution}' |
| | f' --random_images {args.random_images}' |
| | ) |
| | |
| | print(command) |
| | subprocess.run(command, shell=True) |
| |
|
| | with count.get_lock(): |
| | count.value += 1 |
| |
|
| | queue.task_done() |
| | |
| |
|
| | if __name__ == "__main__": |
| | |
| |
|
| | s3 = None |
| | queue = multiprocessing.JoinableQueue() |
| | count = multiprocessing.Value("i", 0) |
| |
|
| | |
| | for gpu_i in range(args.num_gpus): |
| | for worker_i in range(args.workers_per_gpu): |
| | worker_i = gpu_i * args.workers_per_gpu + worker_i |
| | process = multiprocessing.Process( |
| | target=worker, args=(queue, count, args.gpu_list[gpu_i], s3) |
| | ) |
| | process.daemon = True |
| | process.start() |
| | |
| | |
| | |
| | save_dirs = parse_obj_list(glb_list) |
| | args.end_i = len(save_dirs) if args.end_i > len(save_dirs) or args.end_i==-1 else args.end_i |
| | |
| | for case_sub, save_dir in zip(glb_list[args.start_i:args.end_i], save_dirs[args.start_i:args.end_i]): |
| | queue.put([case_sub, save_dir]) |
| |
|
| | |
| |
|
| | |
| | queue.join() |
| |
|
| | |
| | for i in range(args.num_gpus * args.workers_per_gpu): |
| | queue.put(None) |
| |
|