CG Pipelines: User interface refactor

This commit is contained in:
brothermechanic 2023-12-18 07:48:45 +00:00 committed by Igor Brylyov
parent e305d486f2
commit 021e5862ff
7 changed files with 431 additions and 280 deletions

View file

@ -51,6 +51,9 @@ def json_to_blend(js_data, **cg_config):
bobj = None
if js_data[fc_file][js_obj]['type'] == 'LCS':
if not js_obj.endswith(cg_config['lcs_inlet']) and not js_obj.endswith(cg_config['lcs_outlet']):
logger.info('LCS %s is not defined!', js_obj)
continue
bobj = bpy.data.objects.new(js_obj, None)
bobj.empty_display_type = 'ARROWS'
bobj.empty_display_size = round(random.uniform(0.05, 0.15), 3)

View file

@ -28,7 +28,7 @@ from blender.utils.mesh_tools import select_peaks, select_stratched_edges
logger = logging.getLogger(__name__)
def parts_to_shells(part_names, lcs_pipeline, **cg_config):
def parts_to_shells(part_names, **cg_config):
''' Create lowpoly shells from parts collections. '''
logger.info('Lowpoly shells creation launched...')
@ -41,15 +41,14 @@ def parts_to_shells(part_names, lcs_pipeline, **cg_config):
lowpoly_obj = bpy.data.objects.new(lowpoly_name, lowpoly_mesh)
bpy.context.view_layer.update()
lowpoly_col.objects.link(lowpoly_obj)
if lcs_pipeline:
lcs_inlet = bpy.data.objects[part_name].parent
lowpoly_obj.matrix_world = lcs_inlet.matrix_world.copy()
parenting(lcs_inlet, lowpoly_obj)
part_col = bpy.data.collections[
'{}_{}'.format(part_name, cg_config['hightpoly'])]
if bpy.data.objects[part_name].parent:
root_locator = bpy.data.objects[part_name].parent
else:
part_col = bpy.data.collections[cg_config['parts_col_name']]
root_locator = bpy.data.objects[part_name]
lowpoly_obj.matrix_world = root_locator.matrix_world.copy()
parenting(root_locator, lowpoly_obj)
part_col = bpy.data.collections[
'{}_{}'.format(part_name, cg_config['hightpoly'])]
shell_remesher(lowpoly_obj, 'remesh_nodes', 'robossembler')
lowpoly_obj.modifiers['remesh_nodes']['Input_0'] = part_col
@ -66,10 +65,7 @@ def parts_to_shells(part_names, lcs_pipeline, **cg_config):
decimate.ratio = 0.1
# apply all modifiers to mesh
if lcs_pipeline:
parenting(lcs_inlet, mesh_to_mesh(lowpoly_obj))
else:
mesh_to_mesh(lowpoly_obj)
parenting(root_locator, mesh_to_mesh(lowpoly_obj))
# fix non_manifold shape
for lowpoly_obj in lowpoly_col.objects:

View file

@ -27,7 +27,7 @@ from blender.utils.mesh_tools import collect_less_volume_objs
logger = logging.getLogger(__name__)
def hightpoly_collections_to_midpoly(collection_name, part_names, lcs_pipeline, **cg_config):
def hightpoly_collections_to_midpoly(collection_name, part_names, **cg_config):
''' Convert part's collecttions to single objects. '''
logger.info('Midpoly objects creation launched...')
midpoly_obj_names = []
@ -36,10 +36,12 @@ def hightpoly_collections_to_midpoly(collection_name, part_names, lcs_pipeline,
midpoly_mesh = bpy.data.meshes.new(midpoly_name)
midpoly_obj = bpy.data.objects.new(midpoly_name, midpoly_mesh)
bpy.context.view_layer.update()
if lcs_pipeline:
lcs_inlet = bpy.data.objects[part_name].parent
midpoly_obj.matrix_world = lcs_inlet.matrix_world.copy()
parenting(lcs_inlet, midpoly_obj)
if bpy.data.objects[part_name].parent:
root_locator = bpy.data.objects[part_name].parent
else:
root_locator = bpy.data.objects[part_name]
midpoly_obj.matrix_world = root_locator.matrix_world.copy()
parenting(root_locator, midpoly_obj)
midpoly_parts_col = bpy.data.collections[collection_name]
midpoly_parts_col.objects.link(midpoly_obj)
for col in midpoly_parts_col.children:

View file

@ -0,0 +1,251 @@
# coding: utf-8
# Copyright (C) 2023 Ilia Kurochkin <brothermechanic@yandex.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
'''
DESCRIPTION.
Reorganization and restructuring of assembly structure.
'''
__version__ = '0.3'
import logging
import bpy
import mathutils
from blender.utils.object_relations import (parenting,
unparenting)
from blender.utils.object_transforms import round_transforms
from blender.utils.collection_tools import unlink_from_collections
logger = logging.getLogger(__name__)
def hierarchy_assembly(lcs_names, parts_sequence, **cg_config):
''' Hierarchy by LCS and Parts Assembling Sequence. '''
# collect scene hierarchy start info
main_locators = [obj for obj in bpy.data.objects if not obj.parent]
lcs_inlet_objects = []
lcs_outlet_objects = []
for name in lcs_names:
if name.endswith(cg_config['lcs_inlet']):
lcs_inlet_objects.append(bpy.data.objects[name])
else:
lcs_outlet_objects.append(bpy.data.objects[name])
# get main_locator
main_locator = None
for locator in main_locators:
if set(lcs_inlet_objects + lcs_outlet_objects).issubset(
locator.children_recursive
):
main_locator = locator
if not main_locator:
# TODO need checking
return logger.error('CAD root locator should be parent of all LCS!')
# check parts_sequence objects in scene
for part in parts_sequence:
if not bpy.data.objects.get(part):
return logger.error('%s part object not found!', part)
first_part_obj = bpy.data.objects[parts_sequence[0]]
# create root lcs by parts sequence
root_lcs = None
for lcs_inlet in first_part_obj.children:
# drop non lcs objs
if lcs_inlet.name not in lcs_names:
continue
# drop non inlet objs
if lcs_inlet.name.endswith(cg_config['lcs_outlet']):
continue
root_lcs_name = cg_config['lcs_root']
root_lcs = bpy.data.objects.new(root_lcs_name, None)
root_lcs.empty_display_type = 'ARROWS'
root_lcs.empty_display_size = 0.15
root_lcs.show_in_front = True
root_lcs.location = lcs_inlet.location
root_lcs.rotation_euler = lcs_inlet.rotation_euler
root_lcs.parent = lcs_inlet.parent
bpy.data.collections[cg_config['lcs_col_name']].objects.link(root_lcs)
logger.info('Root Inlet LCS object created!')
unparenting(root_lcs)
round_transforms(root_lcs)
parenting(root_lcs, main_locator)
# retree_by lcs
for lcs_inlet_obj in lcs_inlet_objects:
# lcs inlet
parent_locator = lcs_inlet_obj.parent
if not parent_locator:
return logger.error('LCS %s should have a parent', lcs_inlet_obj.name)
unparenting(lcs_inlet_obj)
round_transforms(lcs_inlet_obj)
if parent_locator:
if parent_locator.parent:
unparenting(parent_locator)
parenting(lcs_inlet_obj, parent_locator)
parenting(root_lcs, lcs_inlet_obj)
# lcs outlet
lcs_outlet = '{}_{}'.format(
lcs_inlet_obj.name.rpartition('_')[0], cg_config['lcs_outlet'])
if bpy.data.objects.get(lcs_outlet):
lcs_outlet_obj = bpy.data.objects[lcs_outlet]
unparenting(lcs_outlet_obj)
round_transforms(lcs_outlet_obj)
parenting(lcs_inlet_obj, lcs_outlet_obj)
# reset transforms for root_lcs
root_lcs.matrix_world = mathutils.Matrix()
# lcs collections
part_names = []
for lcs_inlet_obj in root_lcs.children:
# remove unmarked parts
if lcs_inlet_obj not in lcs_inlet_objects:
for obj in lcs_inlet_obj.children_recursive:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.objects.remove(lcs_inlet_obj, do_unlink=True)
continue
# collect part names
part_name = None
for locator in lcs_inlet_obj.children:
if locator not in lcs_outlet_objects:
part_name = locator.name
part_names.append(part_name)
# pack parts to collections
part_col = bpy.data.collections.new('{}_{}'.format(
part_name, cg_config['hightpoly']))
bpy.data.collections[cg_config['parts_col_name']].children.link(part_col)
for obj in lcs_inlet_obj.children_recursive:
# outlet lcs objects are already in place, don't move it
if obj in lcs_outlet_objects:
continue
unlink_from_collections(obj)
part_col.objects.link(obj)
# parts assembling
for idx, part_name in enumerate(parts_sequence):
# TODO clones for clones
if part_name not in part_names:
continue
lcs_inlet_obj = bpy.data.objects[part_name].parent
constraint = lcs_inlet_obj.constraints.new(type='COPY_TRANSFORMS')
# drop first_part_obj
if idx == 0:
constraint.target = root_lcs
continue
# if asm pair exists
part_before = bpy.data.objects.get(parts_sequence[idx - 1])
if part_before:
lcs_outlet_objs = [
lcs_out
for lcs_out in part_before.parent.children
if lcs_out in lcs_outlet_objects]
if lcs_outlet_objs:
constraint.target = lcs_outlet_objs[0]
else:
constraint.target = root_lcs
constraint.enabled = False
# for reset transforms when exporting
for lcs in lcs_outlet_objects:
constraint = lcs.constraints.new(type='COPY_TRANSFORMS')
constraint.target = root_lcs
constraint.enabled = False
logger.info('Restructuring assembly pipeline finished!')
return part_names
def hierarchy_separated_parts(lcs_names, **cg_config):
''' Restructuring pipeline as separated parts. '''
# collect scene hierarchy start info
lcs_inlet_objects = []
lcs_outlet_objects = []
for name in lcs_names:
if name.endswith(cg_config['lcs_inlet']):
lcs_inlet_objects.append(bpy.data.objects[name])
else:
lcs_outlet_objects.append(bpy.data.objects[name])
# retree_by lcs
part_names = []
for lcs_inlet_obj in lcs_inlet_objects:
# outlet lcs
parent_locator = lcs_inlet_obj.parent
if not parent_locator:
return logger.error('LCS %s should have a parent', lcs_inlet_obj.name)
unparenting(lcs_inlet_obj)
round_transforms(lcs_inlet_obj)
if parent_locator:
if parent_locator.parent:
unparenting(parent_locator)
parenting(lcs_inlet_obj, parent_locator)
# lcs outlet
lcs_outlet = '{}_{}'.format(
lcs_inlet_obj.name.rpartition('_')[0], cg_config['lcs_outlet'])
if bpy.data.objects.get(lcs_outlet):
lcs_outlet_obj = bpy.data.objects[lcs_outlet]
unparenting(lcs_outlet_obj)
round_transforms(lcs_outlet_obj)
parenting(lcs_inlet_obj, lcs_outlet_obj)
# reset transforms for inlet_lcs
lcs_inlet_obj.matrix_world = mathutils.Matrix()
# pack parts to collections
part_name = None
for locator in lcs_inlet_obj.children:
if locator not in lcs_outlet_objects:
part_name = locator.name
part_names.append(part_name)
part_col = bpy.data.collections.new('{}_{}'.format(
part_name, cg_config['hightpoly']))
bpy.data.collections[cg_config['parts_col_name']].children.link(part_col)
for obj in lcs_inlet_obj.children_recursive:
unlink_from_collections(obj)
part_col.objects.link(obj)
# remove unmarked objects
marked_objs = sum(
[lcs_inlet_obj.children_recursive for lcs_inlet_obj in lcs_inlet_objects],
[])
parts_col_objs = bpy.data.collections[cg_config['parts_col_name']].objects
unmarked_objs = list(set(parts_col_objs) - set(marked_objs))
if unmarked_objs:
removed_objs = list(map(bpy.data.objects.remove, unmarked_objs))
logger.info('%s unmarked objects removed!', len(removed_objs))
logger.info('Restructuring pipeline as separated parts finished!')
return part_names
def hierarchy_single_part(**cg_config):
''' Restructuring pipeline as single part. '''
# collect scene hierarchy start info
main_locators = [obj for obj in bpy.data.objects if not obj.parent]
# pack parts to collections
part_names = []
for main_locator in main_locators:
part_name = main_locator.name
part_names.append(part_name)
part_col = bpy.data.collections.new('{}_{}'.format(
part_name, cg_config['hightpoly']))
bpy.data.collections[cg_config['parts_col_name']].children.link(part_col)
for obj in main_locator.children_recursive:
unlink_from_collections(obj)
part_col.objects.link(obj)
unlink_from_collections(main_locator)
part_col.objects.link(main_locator)
logger.info('Restructuring pipeline as single part finished!')
return part_names

View file

@ -1,162 +0,0 @@
# coding: utf-8
# Copyright (C) 2023 Ilia Kurochkin <brothermechanic@yandex.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
'''
DESCRIPTION.
Reorganization and restructuring of assembly structure
based on LCS point objects.
'''
__version__ = '0.2'
import logging
import bpy
import mathutils
from blender.utils.object_relations import (parenting,
unparenting)
from blender.utils.object_transforms import round_transforms
from blender.utils.collection_tools import unlink_from_collections
logger = logging.getLogger(__name__)
def restruct_hierarchy(lcs_names, parts_sequence=None, **cg_config):
''' Execute restructurisation. '''
main_locators = [obj for obj in bpy.data.objects if not obj.parent]
assert len(main_locators) == 1, (
'Scene should have only one root parent locator!')
lcs_inlet_objects = []
lcs_outlet_objects = []
for name in lcs_names:
if name.endswith(cg_config['lcs_inlet']):
lcs_inlet_objects.append(bpy.data.objects[name])
if name.endswith(cg_config['lcs_outlet']):
lcs_outlet_objects.append(bpy.data.objects[name])
if len(lcs_inlet_objects) > 1:
assert parts_sequence, (
'Parts sequence do not assign! Process stopped!')
for locator in main_locators[0].children:
assert locator.name in parts_sequence, (
'Can not find {} in "parts_sequence" config!'
.format(locator.name))
first_part_obj = bpy.data.objects[parts_sequence[0]]
elif len(lcs_inlet_objects) == 1:
first_part_obj = lcs_inlet_objects[0].parent
else:
# TODO
first_part_obj = None
assert lcs_inlet_objects, (
'Scene do not contain any inlet lcs! Process stopped!')
# create root lcs by parts sequence
root_lcs = None
for lcs in first_part_obj.children:
if lcs.name not in lcs_names:
continue
if lcs.name.endswith(cg_config['lcs_outlet']):
continue
root_lcs_name = cg_config['lcs_root']
root_lcs = bpy.data.objects.new(root_lcs_name, None)
root_lcs.empty_display_type = 'ARROWS'
root_lcs.empty_display_size = 0.15
root_lcs.show_in_front = True
root_lcs.location = lcs.location
root_lcs.rotation_euler = lcs.rotation_euler
root_lcs.parent = lcs.parent
bpy.data.collections[cg_config['lcs_col_name']].objects.link(root_lcs)
logger.info('Root Inlet LCS object created!')
unparenting(root_lcs)
round_transforms(root_lcs)
parenting(root_lcs, main_locators[0])
# retree_by lcs
for lcs in lcs_inlet_objects:
locator = lcs.parent
unparenting(lcs)
round_transforms(lcs)
if locator:
if locator.parent:
unparenting(locator)
parenting(lcs, locator)
parenting(root_lcs, lcs)
for lcs in lcs_outlet_objects:
unparenting(lcs)
round_transforms(lcs)
parenting(
lcs_inlet_objects[lcs_inlet_objects.index(bpy.data.objects[
'{}_{}'.format(lcs.name.rpartition('_')[0], cg_config['lcs_inlet'])
])],
lcs)
# reset transforms for root_lcs
root_lcs.matrix_world = mathutils.Matrix()
# lcs collections
part_names = []
for lcs in root_lcs.children:
# remove unmarked parts
if lcs not in lcs_inlet_objects:
for obj in lcs.children_recursive:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.objects.remove(lcs, do_unlink=True)
continue
# collect part names
part_name = None
for locator in lcs.children:
if locator not in lcs_outlet_objects:
part_name = locator.name
part_names.append(part_name)
# pack parts to collections
part_col = bpy.data.collections.new('{}_{}'.format(
part_name, cg_config['hightpoly']))
bpy.data.collections[cg_config['parts_col_name']].children.link(part_col)
for obj in lcs.children_recursive:
# outlet lcs objects are already in place, don't move it
if obj in lcs_outlet_objects:
continue
unlink_from_collections(obj)
part_col.objects.link(obj)
# parts assembling TODO clones
if len(lcs_inlet_objects) > 1:
for idx, part_name in enumerate(parts_sequence):
# for clones
if part_name not in part_names:
continue
lcs_in = bpy.data.objects[part_name].parent
constraint = lcs_in.constraints.new(type='COPY_TRANSFORMS')
if idx == 0:
constraint.target = root_lcs
continue
# if asm pair exists
if bpy.data.objects.get(parts_sequence[idx - 1]):
lcs_target = [
lcs_out
for lcs_out in bpy.data.objects[parts_sequence[idx - 1]].parent.children
if lcs_out in lcs_outlet_objects][0]
constraint.target = lcs_target
else:
constraint.target = root_lcs
constraint.enabled = False
# for reseet transforms when exporting
for lcs in lcs_outlet_objects:
constraint = lcs.constraints.new(type='COPY_TRANSFORMS')
constraint.target = root_lcs
constraint.enabled = False
logger.info('Restructuring pipeline by LCS finished!')
return part_names

View file

@ -127,6 +127,7 @@ def bw_submit(lowpoly_obj_names,
tree.tree_version = BW_TREE_VERSION
tree.initialised = True
tree.use_fake_user = True
# clear default nodes
for node in tree.nodes:

View file

@ -2,11 +2,10 @@
#!/usr/bin/env python
'''
DESCRIPTION.
Convert and setup scene from FreeCAD data.
CG asset (models) creation pipelines from FreeCAD data.
Support Blender compiled as a Python Module only!
'''
__version__ = '0.7'
import collections
__version__ = '0.8'
import json
import logging
import shutil
@ -18,7 +17,9 @@ from blender.utils.collection_tools import (copy_collections_recursive,
remove_collections_with_objects)
from utils.cmd_proc import cmd_proc
from blender.import_cad.build_blender_scene import json_to_blend
from blender.processing.restruct_hierarchy_by_lcs import restruct_hierarchy
from blender.processing.restruct_hierarchy import (hierarchy_assembly,
hierarchy_separated_parts,
hierarchy_single_part)
from blender.processing.highpoly_setup import setup_meshes
from blender.processing.midpoly_setup import hightpoly_collections_to_midpoly
from blender.processing.lowpoly_setup import parts_to_shells
@ -29,6 +30,7 @@ from blender.texturing.shading import assign_pbr_material
from blender.export.dae import export_dae
from blender.export.stl import export_stl
from blender.export.fbx import export_fbx
from blender.export.ply import export_ply
import bpy
# TODO Path
@ -57,34 +59,36 @@ cg_config = {
'render': 'render',
}
defined_pipeline_list = ['cad', 'highpoly', 'midpoly', 'lowpoly', 'baking', 'export']
def cg_pipeline(**kwargs):
''' CG asset creation pipeline '''
assembly_name = kwargs['fcstd_path'].rpartition('/')[2].rpartition('.')[0]
# freecad don't like other paths
''' CG assets (models) creation pipeline '''
# set defaults
# ______________________________
assembly_name = (
kwargs['fcstd_path'].replace('\\', '/').rpartition('/')[2].rpartition('.')[0])
# clear other paths from kwargs (freecad issue)
parts_sequence_path = kwargs.pop('parts_sequence_path', None)
blend_path = kwargs.pop('blend_path', None)
export_path = kwargs.pop('export_path', None)
# for eatch sequence
parts_sequence = None
if parts_sequence_path and os.path.isfile(parts_sequence_path):
with open(parts_sequence_path, 'r', encoding='utf-8') as sequence_file:
parts_sequence = json.load(sequence_file)
if kwargs['pipeline_type'] not in defined_pipeline_list:
return logger.error('Pipeline type %s is not defined!', kwargs['pipeline_type'])
# output file management
if not blend_path:
blend_path = kwargs['fcstd_path'].replace('\\', '/').rpartition('/')[0]
if not export_path:
export_path = os.path.join(blend_path, 'assets').replace('\\', '/')
export_path = os.path.join(blend_path, 'models').replace('\\', '/')
os.makedirs(blend_path, exist_ok=True)
# prepare blend file
blend_file = os.path.join(blend_path, f'{assembly_name}.blend').replace('\\', '/')
#blend_file = os.path.join(blend_path, f'{assembly_name}.blend').replace('\\', '/')
remove_collections_with_objects()
cleanup_orphan_data()
# 0 сonvert FreeCAD scene to Blender scene
imported_objects = json_to_blend(
# 1) сonvert FreeCAD scene to Blender scene
# ______________________________
cad_objects = json_to_blend(
json.loads(
cmd_proc(freecad_bin,
freecad_to_json_script,
@ -94,51 +98,119 @@ def cg_pipeline(**kwargs):
), **cg_config
)
# Save original freecad setup as blender scene
bpy.ops.wm.save_as_mainfile(filepath=f'{blend_file.rpartition(".")[0]}_orig.blend')
# Save original cad setup as blender scene
if kwargs['pipeline_type'] == 'cad':
blend_file = os.path.join(
blend_path,
'{}_{}.blend'.format(assembly_name, kwargs['pipeline_type'])
).replace('\\', '/')
bpy.ops.wm.save_as_mainfile(filepath=blend_file)
return blend_file
# 1 prepare highpoly
part_names = None
lcs_pipeline = True
if imported_objects['objs_lcs']:
part_names = restruct_hierarchy(
imported_objects['objs_lcs'], parts_sequence, **cg_config)
# non lcs pipeline
if not part_names:
lcs_pipeline = False
part_names = [[obj for obj in bpy.data.objects if not obj.parent][0].name]
if imported_objects['objs_foreground']:
setup_meshes(imported_objects['objs_foreground'],
sharpness=True, shading=True)
# 2) prepare highpoly (depend of cad_objects['objs_lcs'] and parts_sequence)
# ______________________________
if parts_sequence_path and os.path.isfile(parts_sequence_path):
with open(parts_sequence_path, 'r', encoding='utf-8') as sequence_file:
parts_sequence = json.load(sequence_file)
else:
setup_meshes(imported_objects['objs_background'],
parts_sequence = None
lcs_inlet_objects = [
inlet for inlet in cad_objects['objs_lcs']
if inlet.endswith(cg_config['lcs_inlet'])]
# input cases
# 1 case
if parts_sequence and len(lcs_inlet_objects) > 1:
logger.info('Parts assembling sequence and LCS points found! '
'Launch "hierarchy_assembly" restructuring pipeline.')
part_names = hierarchy_assembly(
cad_objects['objs_lcs'], parts_sequence, **cg_config)
# 2 case
elif parts_sequence and len(lcs_inlet_objects) < 2:
return logger.error('Assembly do not have enough LCS points!')
# 3 case
elif not parts_sequence and lcs_inlet_objects:
logger.info('Parts assembling sequence not found! '
'Launch "hierarchy_separated_parts" restructuring pipeline.')
part_names = hierarchy_separated_parts(
cad_objects['objs_lcs'], **cg_config)
# 4 case
elif not parts_sequence and not lcs_inlet_objects:
logger.info('Parts assembling sequence and LCS points not found! '
'Launch "hierarchy_single_part" restructuring pipeline.')
part_names = hierarchy_single_part(**cg_config)
if not part_names:
return logger.error('Can not generate parts!')
# setup highpolys with materials only
if cad_objects['objs_foreground']:
setup_meshes(cad_objects['objs_foreground'],
sharpness=True, shading=True)
# setup all highpolys
else:
setup_meshes(cad_objects['objs_background'],
sharpness=True, shading=True)
# 2 prepare midpoly
copy_col_name = copy_collections_recursive(
# Save highpoly setup as blender scene
if kwargs['pipeline_type'] == 'highpoly':
blend_file = os.path.join(
blend_path,
'{}_{}.blend'.format(assembly_name, kwargs['pipeline_type'])
).replace('\\', '/')
bpy.ops.wm.save_as_mainfile(filepath=blend_file)
logger.info('%s original hightpoly collections ready!', len(part_names))
return blend_file
# 3) prepare midpoly
# ______________________________
tmp_col_name = copy_collections_recursive(
bpy.data.collections[cg_config['parts_col_name']],
suffix=cg_config['midpoly']
)
midpoly_obj_names = hightpoly_collections_to_midpoly(
copy_col_name, part_names, lcs_pipeline, **cg_config)
tmp_col_name, part_names, **cg_config)
# 3 prepare lowpoly
lowpoly_obj_names = parts_to_shells(part_names, lcs_pipeline, **cg_config)
# Save midpoly setup as blender scene
if kwargs['pipeline_type'] == 'midpoly':
blend_file = os.path.join(
blend_path,
'{}_{}.blend'.format(assembly_name, kwargs['pipeline_type'])
).replace('\\', '/')
bpy.ops.wm.save_as_mainfile(filepath=blend_file)
logger.info('%s midpoly objects ready!', len(midpoly_obj_names))
return blend_file
# 4) prepare lowpoly
# ______________________________
lowpoly_obj_names = parts_to_shells(part_names, **cg_config)
uv_unwrap(lowpoly_obj_names)
# Save before baking
bpy.ops.wm.save_as_mainfile(filepath=blend_file)
# Save lowpoly setup as blender scene
if kwargs['pipeline_type'] == 'lowpoly':
blend_file = os.path.join(
blend_path,
'{}_{}.blend'.format(assembly_name, kwargs['pipeline_type'])
).replace('\\', '/')
bpy.ops.wm.save_as_mainfile(filepath=blend_file)
logger.info('%s lowpoly objects ready!', len(lowpoly_obj_names))
return blend_file
# 4 bake textures
if kwargs['textures_resolution'] != 0:
# 5) bake textures
# ______________________________
if kwargs['textures_resolution'] == 0:
logger.info('Baking pipeline has been canceled!')
else:
textures_path = os.path.join(blend_path, 'textures').replace('\\', '/')
bake_paths = bw_bake(lowpoly_obj_names,
textures_path,
kwargs['textures_resolution'],
**cg_config)
# Save baking result
blend_file = os.path.join(
blend_path,
'{}_{}.blend'.format(assembly_name, 'baking')
).replace('\\', '/')
bpy.ops.wm.save_as_mainfile(filepath=blend_file)
# 5 prepare textures
bpy.ops.wm.quit_blender()
@ -148,64 +220,45 @@ def cg_pipeline(**kwargs):
bpy.ops.wm.open_mainfile(filepath=blend_file)
assign_pbr_material(lowpoly_obj_names, textures_path)
bpy.ops.file.make_paths_relative()
# Save assigned lowpoly assets
# Save baking setup as blender scene
if kwargs['pipeline_type'] == 'baking':
blend_file = os.path.join(
blend_path,
'{}_{}.blend'.format(assembly_name, kwargs['pipeline_type'])
).replace('\\', '/')
bpy.ops.wm.save_as_mainfile(filepath=blend_file)
for part_name in part_names:
export_fbx(
obj_name=f'{part_name}_{cg_config["lowpoly"]}',
path=export_path,
subdir='fbx')
logger.info('%s lowpoly objects baked!', len(lowpoly_obj_names))
return blend_file
# 6 export object meshes
# ______________________________
# TODO asset manager
# Save blender scene
if kwargs['pipeline_type'] == 'export':
blend_file = os.path.join(
blend_path,
'{}_{}.blend'.format(assembly_name, kwargs['pipeline_type'])
).replace('\\', '/')
bpy.ops.wm.save_as_mainfile(filepath=blend_file)
# 6 export object meshes and urdf
to_urdf = collections.defaultdict(list)
link = {}
for part_name in part_names:
link_prop = {}
link_prop['visual'] = export_dae(
obj_name=f'{part_name}_{cg_config["midpoly"]}',
path=export_path,
subdir='dae')
link_prop['collision'] = export_stl(
export_fbx(
obj_name=f'{part_name}_{cg_config["lowpoly"]}',
path=export_path,
subdir='collision')
link[part_name] = link_prop
to_urdf['links'].append(link)
path=os.path.join(export_path, part_name, 'meshes').replace('\\', '/'))
export_ply(
obj_name=f'{part_name}_{cg_config["midpoly"]}',
path=os.path.join(export_path, part_name, 'meshes').replace('\\', '/'))
export_dae(
obj_name=f'{part_name}_{cg_config["midpoly"]}',
path=os.path.join(export_path, part_name, 'meshes').replace('\\', '/'))
export_stl(
obj_name=f'{part_name}_{cg_config["lowpoly"]}',
path=os.path.join(export_path, part_name, 'meshes').replace('\\', '/'))
# TODO export urdf
config = kwargs.pop('config', None)
# config = {'sequences': [['cube1', 'cube2', 'cube3', 'cube4'], ['cube2', 'cube1', 'cube4', 'cube3']]}
if config:
for sequence in config['sequences']:
joint = {}
# TODO collect pairs 0_1, 1_2, 2_3, 3_4, ...
for pair in zip_longest(sequence[0::2], sequence[1::2]):
joint_prop = {}
if pair[1]:
joint_prop['type'] = 'fixed'
location = list(bpy.data.objects.get(pair[1]).location)
rotation = list(bpy.data.objects.get(pair[0]).rotation_euler)
# origin
# round location values
for idx, axis in enumerate(location):
location[idx] = round(axis, 5)
joint_prop['location'] = location
joint_prop['rotation'] = rotation
# parent
joint_prop['parent'] = pair[0]
# child
joint_prop['child'] = pair[1]
logger.info('%s parts exported!', len(part_names))
return blend_file
joint['_'.join(pair)] = joint_prop
to_urdf['sequence'].append(joint)
print(json.dumps(to_urdf, indent=4))
logger.info('%s original hightpoly collections ready!', len(part_names))
logger.info('%s midpoly objects ready!', len(midpoly_obj_names))
logger.info('%s lowpoly objects ready!', len(lowpoly_obj_names))
if __name__ == '__main__':
import argparse
@ -261,14 +314,23 @@ if __name__ == '__main__':
default='Robossembler_NonSolid',
required=False
)
parser.add_argument(
'--pipeline_type',
type=str,
help='Set pipeline type: "cad", "highpoly", "midpoly", "lowpoly", "baking", "export"',
default='export',
required=False
)
parser.add_argument(
'--parts_sequence_path',
type=str, help='Path to parts assembling sequence json file.',
type=str,
help='Path to parts assembling sequence json file.',
required=False
)
parser.add_argument(
'--export_path',
type=str, help='Path for export assets. If not, fcstd_path will be used instead.',
type=str,
help='Path for export assets. If not, fcstd_path will be used instead.',
required=False
)
parser.add_argument(
@ -290,5 +352,3 @@ if __name__ == '__main__':
cg_kwargs = {key: getattr(args, key) for key in dir(args) if not key.startswith('_')}
cg_pipeline(**cg_kwargs)
logger.info('CG Pipeline Completed!')