[CG Pipeline] Refactor
This commit is contained in:
parent
6538f70d54
commit
b3612d8655
23 changed files with 634 additions and 645 deletions
|
@ -20,7 +20,6 @@ import bpy
|
||||||
import mathutils
|
import mathutils
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def export_decorator(func):
|
def export_decorator(func):
|
||||||
|
@ -38,6 +37,7 @@ def export_decorator(func):
|
||||||
bpy.context.view_layer.objects.active = obj
|
bpy.context.view_layer.objects.active = obj
|
||||||
# clean hierarchy and transforms
|
# clean hierarchy and transforms
|
||||||
obj.parent = None
|
obj.parent = None
|
||||||
|
# reset transforms
|
||||||
obj.matrix_world = mathutils.Matrix()
|
obj.matrix_world = mathutils.Matrix()
|
||||||
# construct path
|
# construct path
|
||||||
filename = bpy.context.active_object.name
|
filename = bpy.context.active_object.name
|
||||||
|
|
|
@ -13,7 +13,6 @@ import bpy
|
||||||
import os
|
import os
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def export_col_stl(path, subdir=""):
|
def export_col_stl(path, subdir=""):
|
||||||
|
|
71
cg/blender/export/fbx.py
Normal file
71
cg/blender/export/fbx.py
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright (C) 2023 Ilia Kurochkin <brothermechanic@gmail.com>
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
'''
|
||||||
|
DESCRIPTION.
|
||||||
|
Collada mesh exporter.
|
||||||
|
Exports all objects in scene.
|
||||||
|
You can set export path and subdir.
|
||||||
|
'''
|
||||||
|
__version__ = "0.1"
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from blender.export import export_decorator
|
||||||
|
|
||||||
|
|
||||||
|
@export_decorator
|
||||||
|
def export_fbx(**kwargs):
|
||||||
|
outpath = ('{}.fbx'.format(kwargs['outpath']))
|
||||||
|
|
||||||
|
bpy.ops.export_scene.fbx(
|
||||||
|
filepath=outpath,
|
||||||
|
check_existing=False,
|
||||||
|
filter_glob="*.fbx",
|
||||||
|
use_selection=True,
|
||||||
|
use_visible=False,
|
||||||
|
use_active_collection=False,
|
||||||
|
global_scale=1,
|
||||||
|
apply_unit_scale=True,
|
||||||
|
apply_scale_options='FBX_SCALE_NONE',
|
||||||
|
use_space_transform=True,
|
||||||
|
bake_space_transform=False,
|
||||||
|
object_types={'MESH'},
|
||||||
|
use_mesh_modifiers=True,
|
||||||
|
use_mesh_modifiers_render=True,
|
||||||
|
mesh_smooth_type='FACE',
|
||||||
|
colors_type='SRGB',
|
||||||
|
use_subsurf=False,
|
||||||
|
use_mesh_edges=False,
|
||||||
|
use_tspace=False,
|
||||||
|
use_triangles=True,
|
||||||
|
use_custom_props=False,
|
||||||
|
add_leaf_bones=True,
|
||||||
|
primary_bone_axis='Y',
|
||||||
|
secondary_bone_axis='X',
|
||||||
|
use_armature_deform_only=False,
|
||||||
|
armature_nodetype='NULL',
|
||||||
|
bake_anim=False,
|
||||||
|
bake_anim_use_all_bones=True,
|
||||||
|
bake_anim_use_nla_strips=True,
|
||||||
|
bake_anim_use_all_actions=True,
|
||||||
|
bake_anim_force_startend_keying=True,
|
||||||
|
bake_anim_step=1,
|
||||||
|
bake_anim_simplify_factor=1,
|
||||||
|
path_mode='AUTO',
|
||||||
|
embed_textures=False,
|
||||||
|
batch_mode='OFF',
|
||||||
|
use_batch_own_dir=True,
|
||||||
|
use_metadata=True,
|
||||||
|
axis_forward='-Z',
|
||||||
|
axis_up='Y')
|
||||||
|
|
||||||
|
return outpath
|
|
@ -12,7 +12,6 @@ import bpy
|
||||||
import os
|
import os
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def export_obj(path, subdir="", filename=None):
|
def export_obj(path, subdir="", filename=None):
|
||||||
|
|
|
@ -18,7 +18,7 @@ DESCRIPTION.
|
||||||
- Setup LCS points.
|
- Setup LCS points.
|
||||||
- Apply Bledner scene transforms.
|
- Apply Bledner scene transforms.
|
||||||
'''
|
'''
|
||||||
__version__ = '0.1'
|
__version__ = '0.2'
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import logging
|
import logging
|
||||||
|
@ -26,43 +26,22 @@ import random
|
||||||
import bpy
|
import bpy
|
||||||
from blender.utils.object_transforms import apply_transforms
|
from blender.utils.object_transforms import apply_transforms
|
||||||
from blender.import_cad.import_hierarchy import (fc_placement,
|
from blender.import_cad.import_hierarchy import (fc_placement,
|
||||||
hierarchy)
|
hierarchy_list)
|
||||||
from blender.import_cad.import_materials import (assign_materials,
|
from blender.import_cad.import_materials import (assign_materials,
|
||||||
assign_black)
|
assign_black)
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
# COLLECTIONS NAMIG CONVENTION
|
|
||||||
part_col_name = 'Parts'
|
|
||||||
lcs_col_name = 'LCS'
|
|
||||||
hierarchy_col_name = 'Hierarchy'
|
|
||||||
lowpoly_col_name = 'Lowpoly'
|
|
||||||
# LCS POINT'S SUFFIXES CONVENTION
|
|
||||||
inlet = '_in'
|
|
||||||
outlet = '_out'
|
|
||||||
root = '_root'
|
|
||||||
# CG ASSETS SUFFIXES CONVENTION
|
|
||||||
hightpoly = '_hp'
|
|
||||||
lowpoly = '_lp'
|
|
||||||
render = '_render'
|
|
||||||
|
|
||||||
scene_scale = 0.001
|
scene_scale = 0.001
|
||||||
blackbody_mat_name = 'Robossembler_Black_Body'
|
blackbody_mat_name = 'Robossembler_Black_Body'
|
||||||
|
|
||||||
|
|
||||||
def json_to_blend(js_data):
|
def json_to_blend(js_data, **cg_config):
|
||||||
''' Reads JSON data and creates Blender scene '''
|
''' Reads JSON data and creates Blender scene '''
|
||||||
|
lcs_collection = bpy.data.collections.new(cg_config['lcs_col_name'])
|
||||||
part_collection = bpy.data.collections.new(part_col_name)
|
|
||||||
bpy.context.scene.collection.children.link(part_collection)
|
|
||||||
|
|
||||||
lcs_collection = bpy.data.collections.new(lcs_col_name)
|
|
||||||
bpy.context.scene.collection.children.link(lcs_collection)
|
bpy.context.scene.collection.children.link(lcs_collection)
|
||||||
|
parts_collection = bpy.data.collections.new(cg_config['parts_col_name'])
|
||||||
hierarchy_collection = bpy.data.collections.new(hierarchy_col_name)
|
bpy.context.scene.collection.children.link(parts_collection)
|
||||||
bpy.context.scene.collection.children.link(hierarchy_collection)
|
|
||||||
|
|
||||||
fc_file = list(js_data.keys())[0]
|
fc_file = list(js_data.keys())[0]
|
||||||
|
|
||||||
|
@ -90,7 +69,7 @@ def json_to_blend(js_data):
|
||||||
bmesh.from_pydata(verts, edges, faces)
|
bmesh.from_pydata(verts, edges, faces)
|
||||||
bmesh.update()
|
bmesh.update()
|
||||||
bobj = bpy.data.objects.new(js_obj, bmesh)
|
bobj = bpy.data.objects.new(js_obj, bmesh)
|
||||||
part_collection.objects.link(bobj)
|
parts_collection.objects.link(bobj)
|
||||||
|
|
||||||
if bobj:
|
if bobj:
|
||||||
fc_placement(bobj,
|
fc_placement(bobj,
|
||||||
|
@ -102,11 +81,10 @@ def json_to_blend(js_data):
|
||||||
apply_transforms(bobj, scale=True)
|
apply_transforms(bobj, scale=True)
|
||||||
|
|
||||||
# construct assembly hierarchy
|
# construct assembly hierarchy
|
||||||
hierarchy_objs = hierarchy(bobj,
|
hierarchy_objs = hierarchy_list(
|
||||||
js_data[fc_file][js_obj]['hierarchy'],
|
bobj, js_data[fc_file][js_obj]['hierarchy'], scene_scale)
|
||||||
scene_scale)
|
|
||||||
for hierarchy_obj in hierarchy_objs:
|
for hierarchy_obj in hierarchy_objs:
|
||||||
hierarchy_collection.objects.link(hierarchy_obj)
|
parts_collection.objects.link(hierarchy_obj)
|
||||||
imported_objects['objs_hierarchy'].append(hierarchy_obj.name)
|
imported_objects['objs_hierarchy'].append(hierarchy_obj.name)
|
||||||
|
|
||||||
# one material for the whole object
|
# one material for the whole object
|
||||||
|
@ -119,35 +97,6 @@ def json_to_blend(js_data):
|
||||||
assign_black(bobj)
|
assign_black(bobj)
|
||||||
imported_objects['objs_background'].append(bobj.name)
|
imported_objects['objs_background'].append(bobj.name)
|
||||||
|
|
||||||
# losted root lcs inlet workaround
|
|
||||||
if imported_objects['objs_lcs']:
|
|
||||||
root_lcs = None
|
|
||||||
for obj_name in imported_objects['objs_lcs']:
|
|
||||||
if obj_name.endswith(root):
|
|
||||||
root_lcs = bpy.data.objects[obj_name]
|
|
||||||
break
|
|
||||||
if root_lcs:
|
|
||||||
root_inlet_name = '{}{}'.format(root_lcs.name.split(root)[0], inlet)
|
|
||||||
if not bpy.data.objects.get(root_inlet_name):
|
|
||||||
root_inlet = bpy.data.objects.new(root_inlet_name, None)
|
|
||||||
root_inlet.empty_display_type = 'ARROWS'
|
|
||||||
root_inlet.empty_display_size = 0.1
|
|
||||||
root_inlet.show_in_front = True
|
|
||||||
root_inlet.location = root_lcs.location
|
|
||||||
root_inlet.rotation_euler = root_lcs.rotation_euler
|
|
||||||
root_inlet.parent = root_lcs.parent
|
|
||||||
lcs_collection.objects.link(root_inlet)
|
|
||||||
imported_objects['objs_lcs'].append(root_inlet.name)
|
|
||||||
logger.info('Root Inlet LCS object created!')
|
|
||||||
else:
|
|
||||||
logger.info('Root Inlet LCS object already exists!')
|
|
||||||
else:
|
|
||||||
logger.info('Lost Root LCS object!')
|
|
||||||
else:
|
|
||||||
logger.info('No LCS objects found!')
|
|
||||||
|
|
||||||
# TODO
|
|
||||||
# update do not dork
|
|
||||||
logger.info('Generated %s objects without errors',
|
logger.info('Generated %s objects without errors',
|
||||||
len(sum(list(imported_objects.values()), [])))
|
len(sum(list(imported_objects.values()), [])))
|
||||||
return imported_objects
|
return imported_objects
|
||||||
|
|
|
@ -20,7 +20,6 @@ import bpy
|
||||||
from mathutils import Vector
|
from mathutils import Vector
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def fc_placement(bobj, fc_location, fc_rotation, scene_scale):
|
def fc_placement(bobj, fc_location, fc_rotation, scene_scale):
|
||||||
|
@ -30,12 +29,12 @@ def fc_placement(bobj, fc_location, fc_rotation, scene_scale):
|
||||||
bobj.rotation_mode = 'QUATERNION'
|
bobj.rotation_mode = 'QUATERNION'
|
||||||
# FreeCAD Quaternion is XYZW while Blender is WXYZ
|
# FreeCAD Quaternion is XYZW while Blender is WXYZ
|
||||||
fc_rotation.insert(0, fc_rotation.pop(3))
|
fc_rotation.insert(0, fc_rotation.pop(3))
|
||||||
bobj.rotation_quaternion = (fc_rotation)
|
bobj.rotation_quaternion = fc_rotation
|
||||||
bobj.rotation_mode = m
|
bobj.rotation_mode = m
|
||||||
return bobj
|
return bobj
|
||||||
|
|
||||||
|
|
||||||
def hierarchy(bobj, hierarchy, scene_scale):
|
def hierarchy_list(bobj, hierarchy, scene_scale):
|
||||||
''' Blender object, dict, Blender World Scale factor. '''
|
''' Blender object, dict, Blender World Scale factor. '''
|
||||||
hierarchy_objs = []
|
hierarchy_objs = []
|
||||||
for parent_name in hierarchy.keys():
|
for parent_name in hierarchy.keys():
|
||||||
|
|
|
@ -17,7 +17,6 @@ from bpy_extras.node_shader_utils import PrincipledBSDFWrapper
|
||||||
from blender.utils.shininess_to_roughness import shiny_to_rough
|
from blender.utils.shininess_to_roughness import shiny_to_rough
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
blackbody_mat_name = 'Robossembler_Black_Body'
|
blackbody_mat_name = 'Robossembler_Black_Body'
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
DESCRIPTION.
|
DESCRIPTION.
|
||||||
OBJ mesh importer.
|
OBJ mesh importer.
|
||||||
Import files in blender scene.
|
Import files in blender scene.
|
||||||
|
DEPRECATED
|
||||||
"""
|
"""
|
||||||
__version__ = "0.2"
|
__version__ = "0.2"
|
||||||
|
|
||||||
|
|
|
@ -21,16 +21,16 @@ import bpy
|
||||||
import math
|
import math
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_meshes(obj_names, cleanup=False, sharpness=False, shading=False):
|
def setup_meshes(obj_names, cleanup=False, sharpness=False, shading=False):
|
||||||
''' Setup raw meshes list after importing '''
|
''' Setup raw meshes list after importing '''
|
||||||
logger.info('Hightpoly meshes setup launched...')
|
logger.info('Hightpoly meshes setup launched...')
|
||||||
|
fixed_obj_names = []
|
||||||
for obj_name in obj_names:
|
for obj_name in obj_names:
|
||||||
obj = bpy.data.objects[obj_name]
|
if not bpy.data.objects.get(obj_name):
|
||||||
if not obj.type == 'MESH':
|
|
||||||
continue
|
continue
|
||||||
|
obj = bpy.data.objects[obj_name]
|
||||||
bpy.ops.object.select_all(action='DESELECT')
|
bpy.ops.object.select_all(action='DESELECT')
|
||||||
obj.select_set(state=True)
|
obj.select_set(state=True)
|
||||||
bpy.context.view_layer.objects.active = obj
|
bpy.context.view_layer.objects.active = obj
|
||||||
|
@ -58,7 +58,7 @@ def setup_meshes(obj_names, cleanup=False, sharpness=False, shading=False):
|
||||||
bpy.ops.object.mode_set(mode='OBJECT')
|
bpy.ops.object.mode_set(mode='OBJECT')
|
||||||
|
|
||||||
if shading:
|
if shading:
|
||||||
# fix shading
|
# fix shading TODO
|
||||||
bpy.ops.object.shade_smooth()
|
bpy.ops.object.shade_smooth()
|
||||||
bpy.context.view_layer.objects.active.data.use_auto_smooth = 1
|
bpy.context.view_layer.objects.active.data.use_auto_smooth = 1
|
||||||
bpy.context.view_layer.objects.active.modifiers.new(type='DECIMATE', name='decimate')
|
bpy.context.view_layer.objects.active.modifiers.new(type='DECIMATE', name='decimate')
|
||||||
|
@ -69,4 +69,6 @@ def setup_meshes(obj_names, cleanup=False, sharpness=False, shading=False):
|
||||||
bpy.context.object.modifiers['triangulate'].keep_custom_normals = 1
|
bpy.context.object.modifiers['triangulate'].keep_custom_normals = 1
|
||||||
bpy.context.object.modifiers['triangulate'].show_expanded = 0
|
bpy.context.object.modifiers['triangulate'].show_expanded = 0
|
||||||
|
|
||||||
return logger.info('Setup of %s hightpoly meshes is finished!', len(obj_names))
|
fixed_obj_names.append(obj_name)
|
||||||
|
|
||||||
|
return logger.info('Setup of %s hightpoly meshes is finished!', len(fixed_obj_names))
|
||||||
|
|
|
@ -18,6 +18,7 @@ __version__ = '0.1'
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import bpy
|
import bpy
|
||||||
|
import math
|
||||||
|
|
||||||
from blender.utils.generative_modifiers import shell_remesher
|
from blender.utils.generative_modifiers import shell_remesher
|
||||||
from blender.utils.object_converter import mesh_to_mesh
|
from blender.utils.object_converter import mesh_to_mesh
|
||||||
|
@ -25,44 +26,31 @@ from blender.utils.object_relations import parenting
|
||||||
from blender.utils.mesh_tools import select_peaks, select_stratched_edges
|
from blender.utils.mesh_tools import select_peaks, select_stratched_edges
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
# COLLECTIONS NAMIG CONVENTION
|
def parts_to_shells(part_names, lcs_pipeline, **cg_config):
|
||||||
parts_col_name = 'Parts'
|
|
||||||
lcs_col_name = 'LCS'
|
|
||||||
hierarchy_col_name = 'Hierarchy'
|
|
||||||
lowpoly_col_name = 'Lowpoly'
|
|
||||||
# LCS POINT'S SUFFIXES CONVENTION
|
|
||||||
inlet = '_in'
|
|
||||||
outlet = '_out'
|
|
||||||
root = '_root'
|
|
||||||
# CG ASSETS SUFFIXES CONVENTION
|
|
||||||
hightpoly = '_hp'
|
|
||||||
lowpoly = '_lp'
|
|
||||||
render = '_render'
|
|
||||||
|
|
||||||
|
|
||||||
def parts_to_shells(hightpoly_part_names):
|
|
||||||
''' Create lowpoly shells from parts collections. '''
|
''' Create lowpoly shells from parts collections. '''
|
||||||
logger.info('Lowpoly shells creation launched...')
|
logger.info('Lowpoly shells creation launched...')
|
||||||
|
|
||||||
lowpoly_col = bpy.data.collections.new(lowpoly_col_name)
|
lowpoly_col = bpy.data.collections.new(cg_config['lowpoly_col_name'])
|
||||||
bpy.context.scene.collection.children.link(lowpoly_col)
|
bpy.context.scene.collection.children.link(lowpoly_col)
|
||||||
|
for part_name in part_names:
|
||||||
for part_name in hightpoly_part_names:
|
|
||||||
# generate lowpoly objects from part collections
|
# generate lowpoly objects from part collections
|
||||||
lowpoly_name = ('{}{}'.format(part_name, lowpoly))
|
lowpoly_name = '{}_{}'.format(part_name, cg_config['lowpoly'])
|
||||||
lowpoly_mesh = bpy.data.meshes.new(lowpoly_name)
|
lowpoly_mesh = bpy.data.meshes.new(lowpoly_name)
|
||||||
lowpoly_obj = bpy.data.objects.new(lowpoly_name, lowpoly_mesh)
|
lowpoly_obj = bpy.data.objects.new(lowpoly_name, lowpoly_mesh)
|
||||||
bpy.context.view_layer.update()
|
bpy.context.view_layer.update()
|
||||||
part_inlet = bpy.data.objects.get('{}{}'.format(part_name, inlet))
|
|
||||||
lowpoly_obj.matrix_world = part_inlet.matrix_world.copy()
|
|
||||||
parenting(part_inlet, lowpoly_obj)
|
|
||||||
lowpoly_col.objects.link(lowpoly_obj)
|
lowpoly_col.objects.link(lowpoly_obj)
|
||||||
|
if lcs_pipeline:
|
||||||
|
lcs_inlet = bpy.data.objects[part_name].parent
|
||||||
|
lowpoly_obj.matrix_world = lcs_inlet.matrix_world.copy()
|
||||||
|
parenting(lcs_inlet, lowpoly_obj)
|
||||||
|
part_col = bpy.data.collections[
|
||||||
|
'{}_{}'.format(part_name, cg_config['hightpoly'])]
|
||||||
|
else:
|
||||||
|
part_col = bpy.data.collections[cg_config['parts_col_name']]
|
||||||
|
|
||||||
shell_remesher(lowpoly_obj, 'remesh_nodes', 'robossembler')
|
shell_remesher(lowpoly_obj, 'remesh_nodes', 'robossembler')
|
||||||
part_col = bpy.data.collections[('{}{}'.format(part_name, hightpoly))]
|
|
||||||
lowpoly_obj.modifiers['remesh_nodes']['Input_0'] = part_col
|
lowpoly_obj.modifiers['remesh_nodes']['Input_0'] = part_col
|
||||||
|
|
||||||
remesh_voxel = lowpoly_obj.modifiers.new('remesh_voxel', type='REMESH')
|
remesh_voxel = lowpoly_obj.modifiers.new('remesh_voxel', type='REMESH')
|
||||||
|
@ -78,7 +66,10 @@ def parts_to_shells(hightpoly_part_names):
|
||||||
decimate.ratio = 0.1
|
decimate.ratio = 0.1
|
||||||
|
|
||||||
# apply all modifiers to mesh
|
# apply all modifiers to mesh
|
||||||
parenting(part_inlet, mesh_to_mesh(lowpoly_obj))
|
if lcs_pipeline:
|
||||||
|
parenting(lcs_inlet, mesh_to_mesh(lowpoly_obj))
|
||||||
|
else:
|
||||||
|
mesh_to_mesh(lowpoly_obj)
|
||||||
|
|
||||||
# fix non_manifold shape
|
# fix non_manifold shape
|
||||||
for lowpoly_obj in lowpoly_col.objects:
|
for lowpoly_obj in lowpoly_col.objects:
|
||||||
|
@ -134,6 +125,12 @@ def parts_to_shells(hightpoly_part_names):
|
||||||
bpy.ops.mesh.select_all(action='DESELECT')
|
bpy.ops.mesh.select_all(action='DESELECT')
|
||||||
bpy.ops.mesh.select_mode(type='FACE')
|
bpy.ops.mesh.select_mode(type='FACE')
|
||||||
bpy.ops.object.mode_set(mode='OBJECT')
|
bpy.ops.object.mode_set(mode='OBJECT')
|
||||||
|
# shading
|
||||||
|
bpy.ops.object.shade_smooth(use_auto_smooth=True)
|
||||||
|
lowpoly_obj.data.auto_smooth_angle = math.radians(10)
|
||||||
|
lowpoly_obj.modifiers.new(type='WEIGHTED_NORMAL', name='WeightedNormal')
|
||||||
|
lowpoly_obj.modifiers['WeightedNormal'].keep_sharp = True
|
||||||
|
bpy.ops.object.modifier_apply(modifier="WeightedNormal")
|
||||||
|
|
||||||
logger.info('Generation of %s lowpoly shells is finished!', len(lowpoly_col.objects))
|
logger.info('Generation of %s lowpoly shells is finished!', len(lowpoly_col.objects))
|
||||||
|
|
||||||
|
|
|
@ -21,58 +21,46 @@ import bpy
|
||||||
import math
|
import math
|
||||||
|
|
||||||
from blender.utils.object_relations import parenting
|
from blender.utils.object_relations import parenting
|
||||||
from blender.utils.remove_collections import remove_collections
|
from blender.utils.collection_tools import remove_collections_with_objects
|
||||||
from blender.utils.mesh_tools import collect_less_volume_objs
|
from blender.utils.mesh_tools import collect_less_volume_objs
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
# COLLECTIONS NAMIG CONVENTION
|
def hightpoly_collections_to_midpoly(collection_name, part_names, lcs_pipeline, **cg_config):
|
||||||
parts_col_name = 'Parts'
|
|
||||||
lcs_col_name = 'LCS'
|
|
||||||
hierarchy_col_name = 'Hierarchy'
|
|
||||||
midpoly_col_name = 'Midpoly'
|
|
||||||
lowpoly_col_name = 'Lowpoly'
|
|
||||||
# LCS POINT'S SUFFIXES CONVENTION
|
|
||||||
inlet = '_in'
|
|
||||||
outlet = '_out'
|
|
||||||
root = '_root'
|
|
||||||
# CG ASSETS SUFFIXES CONVENTION
|
|
||||||
hightpoly = '_hp'
|
|
||||||
midpoly = 'mp'
|
|
||||||
lowpoly = '_lp'
|
|
||||||
render = '_render'
|
|
||||||
|
|
||||||
|
|
||||||
def hightpoly_collections_to_midpoly(part_names):
|
|
||||||
''' Convert part's collecttions to single objects. '''
|
''' Convert part's collecttions to single objects. '''
|
||||||
|
logger.info('Midpoly objects creation launched...')
|
||||||
|
midpoly_obj_names = []
|
||||||
for part_name in part_names:
|
for part_name in part_names:
|
||||||
midpoly_name = '_'.join((part_name, midpoly))
|
midpoly_name = '_'.join((part_name, cg_config['midpoly']))
|
||||||
midpoly_mesh = bpy.data.meshes.new(midpoly_name)
|
midpoly_mesh = bpy.data.meshes.new(midpoly_name)
|
||||||
midpoly_obj = bpy.data.objects.new(midpoly_name, midpoly_mesh)
|
midpoly_obj = bpy.data.objects.new(midpoly_name, midpoly_mesh)
|
||||||
bpy.context.view_layer.update()
|
bpy.context.view_layer.update()
|
||||||
part_inlet = bpy.data.objects.get('{}{}'.format(part_name, inlet))
|
if lcs_pipeline:
|
||||||
midpoly_obj.matrix_world = part_inlet.matrix_world.copy()
|
lcs_inlet = bpy.data.objects[part_name].parent
|
||||||
parenting(part_inlet, midpoly_obj)
|
midpoly_obj.matrix_world = lcs_inlet.matrix_world.copy()
|
||||||
midpoly_parts_col = bpy.data.collections['_'.join((parts_col_name, midpoly))]
|
parenting(lcs_inlet, midpoly_obj)
|
||||||
|
midpoly_parts_col = bpy.data.collections[collection_name]
|
||||||
midpoly_parts_col.objects.link(midpoly_obj)
|
midpoly_parts_col.objects.link(midpoly_obj)
|
||||||
for col in midpoly_parts_col.children.keys():
|
for col in midpoly_parts_col.children:
|
||||||
if part_name in col:
|
# only for current part
|
||||||
bpy.ops.object.select_all(action='DESELECT')
|
if part_name not in col.name:
|
||||||
exclude_objs = collect_less_volume_objs(
|
continue
|
||||||
bpy.data.collections[col].objects, min_volume=2.0e-06)
|
bpy.ops.object.select_all(action='DESELECT')
|
||||||
for obj in bpy.data.collections[col].objects:
|
exclude_objs = collect_less_volume_objs(col.objects, min_volume=2.0e-06)
|
||||||
if obj not in exclude_objs:
|
for obj in col.objects:
|
||||||
obj.select_set(state=True)
|
if obj not in exclude_objs:
|
||||||
midpoly_obj.select_set(state=True)
|
obj.select_set(state=True)
|
||||||
bpy.context.view_layer.objects.active = midpoly_obj
|
midpoly_obj.select_set(state=True)
|
||||||
bpy.ops.object.join()
|
bpy.context.view_layer.objects.active = midpoly_obj
|
||||||
bpy.ops.object.shade_smooth(use_auto_smooth=True)
|
bpy.ops.object.join()
|
||||||
break
|
bpy.ops.object.shade_smooth(use_auto_smooth=True)
|
||||||
|
break
|
||||||
|
midpoly_obj_names.append(midpoly_name)
|
||||||
|
|
||||||
midpoly_parts_col.name = midpoly_col_name
|
midpoly_parts_col.name = cg_config['midpoly_col_name']
|
||||||
for col in midpoly_parts_col.children.keys():
|
for col in midpoly_parts_col.children:
|
||||||
remove_collections(col)
|
remove_collections_with_objects(col)
|
||||||
|
|
||||||
return logger.info('Setup of %s midpoly meshes is finished!', len(part_names))
|
logger.info('Setup of %s midpoly meshes is finished!', len(part_names))
|
||||||
|
return midpoly_obj_names
|
||||||
|
|
|
@ -15,158 +15,148 @@ DESCRIPTION.
|
||||||
Reorganization and restructuring of assembly structure
|
Reorganization and restructuring of assembly structure
|
||||||
based on LCS point objects.
|
based on LCS point objects.
|
||||||
'''
|
'''
|
||||||
__version__ = '0.1'
|
__version__ = '0.2'
|
||||||
import logging
|
import logging
|
||||||
import math
|
|
||||||
|
|
||||||
import bpy
|
import bpy
|
||||||
from mathutils import Matrix
|
import mathutils
|
||||||
|
|
||||||
from blender.utils.object_relations import (parenting,
|
from blender.utils.object_relations import (parenting,
|
||||||
unparenting)
|
unparenting)
|
||||||
from blender.utils.object_transforms import round_transforms
|
from blender.utils.object_transforms import round_transforms
|
||||||
|
from blender.utils.collection_tools import unlink_from_collections
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
# COLLECTIONS NAMIG CONVENTION
|
def restruct_hierarchy(lcs_names, parts_sequence=None, **cg_config):
|
||||||
parts_col_name = 'Parts'
|
''' Execute restructurisation. '''
|
||||||
lcs_col_name = 'LCS'
|
|
||||||
hierarchy_col_name = 'Hierarchy'
|
|
||||||
lowpoly_col_name = 'Lowpoly'
|
|
||||||
# LCS POINT'S SUFFIXES CONVENTION
|
|
||||||
inlet = '_in'
|
|
||||||
outlet = '_out'
|
|
||||||
root = '_root'
|
|
||||||
# CG ASSETS SUFFIXES CONVENTION
|
|
||||||
hightpoly = '_hp'
|
|
||||||
lowpoly = '_lp'
|
|
||||||
render = '_render'
|
|
||||||
|
|
||||||
|
main_locators = [obj for obj in bpy.data.objects if not obj.parent]
|
||||||
|
assert len(main_locators) == 1, (
|
||||||
|
'Scene should have only one root parent locator!')
|
||||||
|
|
||||||
def retree_by_lcs(lcs_objects, root_lcs):
|
lcs_inlet_objects = []
|
||||||
''' Organizing project structure based on LCS. '''
|
lcs_outlet_objects = []
|
||||||
for lcs in lcs_objects:
|
for name in lcs_names:
|
||||||
|
if name.endswith(cg_config['lcs_inlet']):
|
||||||
|
lcs_inlet_objects.append(bpy.data.objects[name])
|
||||||
|
if name.endswith(cg_config['lcs_outlet']):
|
||||||
|
lcs_outlet_objects.append(bpy.data.objects[name])
|
||||||
|
|
||||||
|
if len(lcs_inlet_objects) > 1:
|
||||||
|
assert parts_sequence, (
|
||||||
|
'Parts sequence do not assign! Process stopped!')
|
||||||
|
for locator in main_locators[0].children:
|
||||||
|
assert locator.name in parts_sequence, (
|
||||||
|
'Can not find {} in "parts_sequence" config!'
|
||||||
|
.format(locator.name))
|
||||||
|
first_part_obj = bpy.data.objects[parts_sequence[0]]
|
||||||
|
elif len(lcs_inlet_objects) == 1:
|
||||||
|
first_part_obj = lcs_inlet_objects[0].parent
|
||||||
|
else:
|
||||||
|
# TODO
|
||||||
|
first_part_obj = None
|
||||||
|
assert lcs_inlet_objects, (
|
||||||
|
'Scene do not contain any inlet lcs! Process stopped!')
|
||||||
|
|
||||||
|
# create root lcs by parts sequence
|
||||||
|
root_lcs = None
|
||||||
|
for lcs in first_part_obj.children:
|
||||||
|
if lcs.name not in lcs_names:
|
||||||
|
continue
|
||||||
|
if lcs.name.endswith(cg_config['lcs_outlet']):
|
||||||
|
continue
|
||||||
|
root_lcs_name = cg_config['lcs_root']
|
||||||
|
root_lcs = bpy.data.objects.new(root_lcs_name, None)
|
||||||
|
root_lcs.empty_display_type = 'ARROWS'
|
||||||
|
root_lcs.empty_display_size = 0.15
|
||||||
|
root_lcs.show_in_front = True
|
||||||
|
root_lcs.location = lcs.location
|
||||||
|
root_lcs.rotation_euler = lcs.rotation_euler
|
||||||
|
root_lcs.parent = lcs.parent
|
||||||
|
bpy.data.collections[cg_config['lcs_col_name']].objects.link(root_lcs)
|
||||||
|
logger.info('Root Inlet LCS object created!')
|
||||||
|
|
||||||
|
unparenting(root_lcs)
|
||||||
|
round_transforms(root_lcs)
|
||||||
|
parenting(root_lcs, main_locators[0])
|
||||||
|
|
||||||
|
# retree_by lcs
|
||||||
|
for lcs in lcs_inlet_objects:
|
||||||
locator = lcs.parent
|
locator = lcs.parent
|
||||||
if lcs.name.endswith(inlet):
|
unparenting(lcs)
|
||||||
unparenting(lcs)
|
round_transforms(lcs)
|
||||||
round_transforms(lcs)
|
if locator:
|
||||||
if locator.parent:
|
if locator.parent:
|
||||||
unparenting(locator)
|
unparenting(locator)
|
||||||
parenting(lcs, locator)
|
parenting(lcs, locator)
|
||||||
parenting(root_lcs, lcs)
|
parenting(root_lcs, lcs)
|
||||||
for lcs in lcs_objects:
|
for lcs in lcs_outlet_objects:
|
||||||
if lcs.name.endswith(outlet):
|
unparenting(lcs)
|
||||||
unparenting(lcs)
|
round_transforms(lcs)
|
||||||
round_transforms(lcs)
|
parenting(
|
||||||
parenting(
|
lcs_inlet_objects[lcs_inlet_objects.index(bpy.data.objects[
|
||||||
lcs_objects[lcs_objects.index(
|
'{}_{}'.format(lcs.name.rpartition('_')[0], cg_config['lcs_inlet'])
|
||||||
bpy.data.objects[
|
])],
|
||||||
'{}{}'.format(lcs.name.split(outlet)[0], inlet)])],
|
lcs)
|
||||||
lcs)
|
# reset transforms for root_lcs
|
||||||
|
root_lcs.matrix_world = mathutils.Matrix()
|
||||||
|
|
||||||
root_lcs.matrix_world = Matrix()
|
# lcs collections
|
||||||
return lcs_objects
|
part_names = []
|
||||||
|
|
||||||
|
|
||||||
def closest_lcs(lcs_objects):
|
|
||||||
''' Finding closest outlet to inlet LCS. '''
|
|
||||||
target_dists = {}
|
|
||||||
for target in lcs_objects:
|
|
||||||
if target.name.endswith(inlet):
|
|
||||||
dists = {}
|
|
||||||
for lcs in lcs_objects:
|
|
||||||
if lcs.name.endswith(outlet):
|
|
||||||
dist = math.dist(
|
|
||||||
target.matrix_world.translation,
|
|
||||||
lcs.matrix_world.translation)
|
|
||||||
dists[lcs.name] = dist
|
|
||||||
min_dist = min(dists.values())
|
|
||||||
if min_dist < 0.01:
|
|
||||||
min_lcs = [k for k, v in dists.items() if v == min_dist][0]
|
|
||||||
target_dists[target.name] = min_lcs
|
|
||||||
return target_dists
|
|
||||||
|
|
||||||
|
|
||||||
def lcs_constrainting(lcs_objects, root_lcs):
|
|
||||||
''' Placing inlet right on outlet LCS. '''
|
|
||||||
closests = closest_lcs(lcs_objects)
|
|
||||||
for lcs in lcs_objects:
|
|
||||||
if lcs.name in closests:
|
|
||||||
constraint = lcs.constraints.new(type='COPY_TRANSFORMS')
|
|
||||||
constraint.target = bpy.data.objects[closests[lcs.name]]
|
|
||||||
if lcs.name.endswith(outlet):
|
|
||||||
constraint = lcs.constraints.new(type='COPY_TRANSFORMS')
|
|
||||||
constraint.target = root_lcs
|
|
||||||
constraint.enabled = False
|
|
||||||
for lcs in lcs_objects:
|
|
||||||
if len(lcs.constraints) == 0:
|
|
||||||
constraint = lcs.constraints.new(type='COPY_TRANSFORMS')
|
|
||||||
constraint.target = root_lcs
|
|
||||||
constraint.enabled = False
|
|
||||||
return lcs_objects
|
|
||||||
|
|
||||||
|
|
||||||
def unlink_from_col(obj):
|
|
||||||
''' Unlinking object from all collections. '''
|
|
||||||
for col in bpy.data.collections:
|
|
||||||
if obj.name in col.objects:
|
|
||||||
col.objects.unlink(obj)
|
|
||||||
return obj
|
|
||||||
|
|
||||||
|
|
||||||
def lcs_collections(root_lcs, lcs_objects):
|
|
||||||
''' Create LCS based hierarchy. '''
|
|
||||||
for lcs in root_lcs.children:
|
for lcs in root_lcs.children:
|
||||||
lcs_col = bpy.data.collections.new(
|
# remove unmarked parts
|
||||||
'{}{}'.format(lcs.name.split(inlet)[0], hightpoly))
|
if lcs not in lcs_inlet_objects:
|
||||||
bpy.data.collections[parts_col_name].children.link(lcs_col)
|
for obj in lcs.children_recursive:
|
||||||
|
bpy.data.objects.remove(obj, do_unlink=True)
|
||||||
|
bpy.data.objects.remove(lcs, do_unlink=True)
|
||||||
|
continue
|
||||||
|
# collect part names
|
||||||
|
part_name = None
|
||||||
|
for locator in lcs.children:
|
||||||
|
if locator not in lcs_outlet_objects:
|
||||||
|
part_name = locator.name
|
||||||
|
part_names.append(part_name)
|
||||||
|
# pack parts to collections
|
||||||
|
part_col = bpy.data.collections.new('{}_{}'.format(
|
||||||
|
part_name, cg_config['hightpoly']))
|
||||||
|
bpy.data.collections[cg_config['parts_col_name']].children.link(part_col)
|
||||||
for obj in lcs.children_recursive:
|
for obj in lcs.children_recursive:
|
||||||
unlink_from_col(obj)
|
# outlet lcs objects are already in place, don't move it
|
||||||
lcs_col.objects.link(obj)
|
if obj in lcs_outlet_objects:
|
||||||
if lcs not in lcs_objects:
|
continue
|
||||||
unlink_from_col(lcs)
|
unlink_from_collections(obj)
|
||||||
lcs_col.objects.link(lcs)
|
part_col.objects.link(obj)
|
||||||
return root_lcs.children
|
|
||||||
|
|
||||||
|
# parts assembling TODO clones
|
||||||
|
if len(lcs_inlet_objects) > 1:
|
||||||
|
for idx, part_name in enumerate(parts_sequence):
|
||||||
|
# for clones
|
||||||
|
if part_name not in part_names:
|
||||||
|
continue
|
||||||
|
|
||||||
def restruct_hierarchy(lcs_names):
|
lcs_in = bpy.data.objects[part_name].parent
|
||||||
''' Execute restructurisation. '''
|
constraint = lcs_in.constraints.new(type='COPY_TRANSFORMS')
|
||||||
|
if idx == 0:
|
||||||
|
constraint.target = root_lcs
|
||||||
|
continue
|
||||||
|
# if asm pair exists
|
||||||
|
if bpy.data.objects.get(parts_sequence[idx - 1]):
|
||||||
|
lcs_target = [
|
||||||
|
lcs_out
|
||||||
|
for lcs_out in bpy.data.objects[parts_sequence[idx - 1]].parent.children
|
||||||
|
if lcs_out in lcs_outlet_objects][0]
|
||||||
|
constraint.target = lcs_target
|
||||||
|
else:
|
||||||
|
constraint.target = root_lcs
|
||||||
|
constraint.enabled = False
|
||||||
|
# for reseet transforms when exporting
|
||||||
|
for lcs in lcs_outlet_objects:
|
||||||
|
constraint = lcs.constraints.new(type='COPY_TRANSFORMS')
|
||||||
|
constraint.target = root_lcs
|
||||||
|
constraint.enabled = False
|
||||||
|
|
||||||
#lcs_objects = bpy.data.collections[lcs_col_name].objects
|
logger.info('Restructuring pipeline by LCS finished!')
|
||||||
lcs_objects = []
|
|
||||||
root_lcs = None
|
|
||||||
if lcs_names:
|
|
||||||
for obj_name in lcs_names:
|
|
||||||
if obj_name.endswith(root):
|
|
||||||
root_lcs = bpy.data.objects[obj_name]
|
|
||||||
lcs_objects.append(bpy.data.objects[obj_name])
|
|
||||||
|
|
||||||
main_locators = [obj for obj in bpy.data.objects if not obj.parent]
|
return part_names
|
||||||
if len(main_locators) > 1:
|
|
||||||
logger.info('Scene has several main (root) locators! '
|
|
||||||
'This may cause an error!')
|
|
||||||
|
|
||||||
if root_lcs:
|
|
||||||
lcs_objects = [lcs for lcs in lcs_objects if lcs != root_lcs]
|
|
||||||
root_locator = root_lcs.parent
|
|
||||||
unparenting(root_lcs)
|
|
||||||
round_transforms(root_lcs)
|
|
||||||
unparenting(root_locator)
|
|
||||||
parenting(root_lcs, root_locator)
|
|
||||||
parenting(root_lcs, main_locators[0])
|
|
||||||
|
|
||||||
retree_by_lcs(lcs_objects, root_lcs)
|
|
||||||
lcs_constrainting(lcs_objects, root_lcs)
|
|
||||||
|
|
||||||
lcs_collections(root_lcs, lcs_objects)
|
|
||||||
|
|
||||||
# remove unused for now collection
|
|
||||||
bpy.data.collections.remove(bpy.data.collections[hierarchy_col_name])
|
|
||||||
|
|
||||||
return logger.info('Restructuring pipeline finished!')
|
|
||||||
else:
|
|
||||||
return logger.info('Lost root LCS object!')
|
|
||||||
else:
|
|
||||||
return logger.info('Restructuring pipeline canceled!')
|
|
||||||
|
|
|
@ -21,7 +21,6 @@ import math
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def uv_unwrap(obj_names, angle_limit=30):
|
def uv_unwrap(obj_names, angle_limit=30):
|
||||||
|
@ -63,4 +62,3 @@ def uv_unwrap(obj_names, angle_limit=30):
|
||||||
obj.select_set(False)
|
obj.select_set(False)
|
||||||
|
|
||||||
return logger.info('UV setup of %s lowpoly meshes is finished!', len(obj_names))
|
return logger.info('UV setup of %s lowpoly meshes is finished!', len(obj_names))
|
||||||
|
|
||||||
|
|
|
@ -18,43 +18,94 @@ __version__ = '0.2'
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
|
import time
|
||||||
import bpy
|
import bpy
|
||||||
import addon_utils
|
|
||||||
import BakeWrangler
|
import BakeWrangler
|
||||||
from BakeWrangler.nodes.node_tree import BW_TREE_VERSION
|
from BakeWrangler.nodes.node_tree import BW_TREE_VERSION
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
# COLLECTIONS NAMIG CONVENTION
|
bw_config_1 = {
|
||||||
parts_col_name = 'Parts'
|
'passes': {
|
||||||
lcs_col_name = 'LCS'
|
'diffuse': {
|
||||||
hierarchy_col_name = 'Hierarchy'
|
'bake_cat': 'PBR',
|
||||||
lowpoly_col_name = 'Lowpoly'
|
'bake_subcat': 'ALBEDO',
|
||||||
# LCS POINT'S SUFFIXES CONVENTION
|
'suffix': 'D',
|
||||||
inlet = '_in'
|
},
|
||||||
outlet = '_out'
|
'nornal': {
|
||||||
root = '_root'
|
'bake_cat': 'CORE',
|
||||||
# CG ASSETS SUFFIXES CONVENTION
|
'bake_subcat': 'NORMAL',
|
||||||
hightpoly = '_hp'
|
'suffix': 'N',
|
||||||
midpoly = 'mp'
|
},
|
||||||
lowpoly = '_lp'
|
'ao': {
|
||||||
render = '_render'
|
'bake_cat': 'CORE',
|
||||||
|
'bake_subcat': 'AO',
|
||||||
|
'bake_samples': 16,
|
||||||
|
'suffix': 'AO',
|
||||||
|
},
|
||||||
|
'roughness': {
|
||||||
|
'bake_cat': 'CORE',
|
||||||
|
'bake_subcat': 'ROUGHNESS',
|
||||||
|
'suffix': 'R',
|
||||||
|
},
|
||||||
|
'metallic': {
|
||||||
|
'bake_cat': 'PBR',
|
||||||
|
'bake_subcat': 'METALLIC',
|
||||||
|
'suffix': 'M',
|
||||||
|
},
|
||||||
|
'uv': {
|
||||||
|
'bake_cat': 'WRANG',
|
||||||
|
'bake_subcat': 'ISLANDID',
|
||||||
|
'suffix': 'UV',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'margin': 8,
|
||||||
|
'ray_dist': 0.001,
|
||||||
|
'bake_samples': 4,
|
||||||
|
}
|
||||||
|
|
||||||
|
bw_config_10 = {
|
||||||
|
'passes': {
|
||||||
|
'nornal': {
|
||||||
|
'bake_cat': 'CORE',
|
||||||
|
'bake_subcat': 'NORMAL',
|
||||||
|
'suffix': 'N',
|
||||||
|
},
|
||||||
|
'ao': {
|
||||||
|
'bake_cat': 'CORE',
|
||||||
|
'bake_subcat': 'AO',
|
||||||
|
'bake_samples': 16,
|
||||||
|
'suffix': 'AO',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'margin': 8,
|
||||||
|
'ray_dist': 0.01,
|
||||||
|
'bake_samples': 4,
|
||||||
|
}
|
||||||
|
|
||||||
|
configs = {'bake_1': bw_config_1, 'bake_10': bw_config_10}
|
||||||
|
|
||||||
|
|
||||||
def bw_submit(lowpoly_obj_names, resolution=4096, tree_name='robossembler', area=None):
|
def bw_submit(lowpoly_obj_names,
|
||||||
|
bw_config,
|
||||||
|
resolution,
|
||||||
|
bake_path=None,
|
||||||
|
area=None,
|
||||||
|
**cg_config):
|
||||||
''' Submit session and bake textures with BakeWrangler addon. '''
|
''' Submit session and bake textures with BakeWrangler addon. '''
|
||||||
BakeWrangler.register()
|
|
||||||
|
|
||||||
asm_name = os.path.basename(bpy.context.blend_data.filepath).split('.')[0]
|
if not bake_path:
|
||||||
asm_path = os.path.dirname(bpy.context.blend_data.filepath)
|
asm_path = os.path.dirname(bpy.context.blend_data.filepath)
|
||||||
textures_path = os.path.join(asm_path,'textures').replace('\\', '/')
|
textures_path = os.path.join(asm_path, 'textures').replace('\\', '/')
|
||||||
bake_path = os.path.join(textures_path,'bake')
|
bake_path = os.path.join(textures_path, 'bake').replace('\\', '/')
|
||||||
bake_path_double = os.path.join(textures_path,'bake_double')
|
if os.path.exists(bake_path):
|
||||||
os.makedirs(bake_path, exist_ok=True)
|
shutil.rmtree(bake_path)
|
||||||
|
os.makedirs(bake_path, exist_ok=True)
|
||||||
|
|
||||||
# create node tree
|
# create node tree
|
||||||
tree = bpy.data.node_groups.new(name=tree_name, type='BakeWrangler_Tree')
|
baking_tree_name = ''.join([item[0] for item in bw_config['passes'].keys()])
|
||||||
|
tree = bpy.data.node_groups.new(name=baking_tree_name, type='BakeWrangler_Tree')
|
||||||
|
|
||||||
# for default used Compositing Node Editor area
|
# for default used Compositing Node Editor area
|
||||||
if not area:
|
if not area:
|
||||||
|
@ -88,10 +139,10 @@ def bw_submit(lowpoly_obj_names, resolution=4096, tree_name='robossembler', area
|
||||||
tree.nodes.new('BakeWrangler_OutputSettings').location = (600, 0)
|
tree.nodes.new('BakeWrangler_OutputSettings').location = (600, 0)
|
||||||
|
|
||||||
tree.nodes['Mesh Settings'].pinned = True
|
tree.nodes['Mesh Settings'].pinned = True
|
||||||
tree.nodes['Mesh Settings']['margin'] = 8
|
tree.nodes['Mesh Settings']['margin'] = bw_config['margin']
|
||||||
tree.nodes['Mesh Settings']['ray_dist'] = 0.001
|
tree.nodes['Mesh Settings']['ray_dist'] = bw_config['ray_dist']
|
||||||
tree.nodes['Sample Settings'].pinned = True
|
tree.nodes['Sample Settings'].pinned = True
|
||||||
tree.nodes['Sample Settings']['bake_samples'] = 4
|
tree.nodes['Sample Settings']['bake_samples'] = bw_config['bake_samples']
|
||||||
tree.nodes['Pass Settings'].pinned = True
|
tree.nodes['Pass Settings'].pinned = True
|
||||||
tree.nodes['Pass Settings']['res_bake_x'] = resolution
|
tree.nodes['Pass Settings']['res_bake_x'] = resolution
|
||||||
tree.nodes['Pass Settings']['res_bake_y'] = resolution
|
tree.nodes['Pass Settings']['res_bake_y'] = resolution
|
||||||
|
@ -105,187 +156,100 @@ def bw_submit(lowpoly_obj_names, resolution=4096, tree_name='robossembler', area
|
||||||
# batch bake node
|
# batch bake node
|
||||||
node_batch = tree.nodes.new('BakeWrangler_Output_Batch_Bake')
|
node_batch = tree.nodes.new('BakeWrangler_Output_Batch_Bake')
|
||||||
node_batch.location = (1000, -500)
|
node_batch.location = (1000, -500)
|
||||||
|
node_batch.name = 'Batch'
|
||||||
node_batch_double = tree.nodes.new('BakeWrangler_Output_Batch_Bake')
|
|
||||||
node_batch_double.location = (2000, -1500)
|
|
||||||
|
|
||||||
node_y_pos = 0
|
node_y_pos = 0
|
||||||
|
node_pos = 500
|
||||||
pass_socket = 0
|
pass_socket = 0
|
||||||
pass_socket_double = 0
|
|
||||||
for lp_name in lowpoly_obj_names:
|
for lp_name in lowpoly_obj_names:
|
||||||
# run for eatch lowpoly object
|
# run for eatch lowpoly object
|
||||||
lp = bpy.data.objects[lp_name]
|
lp = bpy.data.objects[lp_name]
|
||||||
mp = bpy.data.objects['_'.join(lp.name.split('_')[:-1] + [midpoly])]
|
mp = bpy.data.objects[
|
||||||
|
'_'.join(lp.name.split('_')[:-1] + [cg_config['midpoly']])
|
||||||
|
]
|
||||||
img_name = '_'.join(lp.name.split('_')[:-1]) + '_'
|
img_name = '_'.join(lp.name.split('_')[:-1]) + '_'
|
||||||
|
|
||||||
node_inputs = tree.nodes.new('BakeWrangler_Bake_Mesh')
|
node_inputs = tree.nodes.new('BakeWrangler_Bake_Mesh')
|
||||||
node_inputs.location = (-700, node_y_pos - 500)
|
node_inputs.location = (-700, node_y_pos - node_pos)
|
||||||
|
|
||||||
node_inputs.inputs['Target'].value = lp
|
node_inputs.inputs['Target'].value = lp
|
||||||
node_inputs.inputs['Source'].value = mp
|
node_inputs.inputs['Source'].value = mp
|
||||||
|
|
||||||
|
def bw_pass(node_pos, bake_pass, pass_socket):
|
||||||
|
node_pass = tree.nodes.new('BakeWrangler_Bake_Pass')
|
||||||
|
node_pass.location = (-200, node_y_pos - node_pos)
|
||||||
|
node_img = tree.nodes.new('BakeWrangler_Output_Image_Path')
|
||||||
|
node_img.location = (200, node_y_pos - node_pos)
|
||||||
|
|
||||||
|
tree.links.new(node_pass.outputs['Color'], node_img.inputs['Color'])
|
||||||
|
|
||||||
|
node_pass.bake_cat = bake_pass['bake_cat']
|
||||||
|
if bake_pass.get('bake_cat') == 'CORE':
|
||||||
|
node_pass.bake_core = bake_pass['bake_subcat']
|
||||||
|
elif bake_pass.get('bake_cat') == 'PBR':
|
||||||
|
node_pass.bake_pbr = bake_pass['bake_subcat']
|
||||||
|
elif bake_pass.get('bake_cat') == 'WRANG':
|
||||||
|
node_pass.bake_wrang = bake_pass['bake_subcat']
|
||||||
|
if bake_pass.get('bake_samples'):
|
||||||
|
node_pass.bake_samples = bake_pass['bake_samples']
|
||||||
|
node_img.inputs['Color'].suffix = bake_pass['suffix']
|
||||||
|
node_img.inputs['Split Output'].disp_path = bake_path
|
||||||
|
node_img.inputs['Split Output'].img_name = img_name
|
||||||
|
|
||||||
|
tree.links.new(node_inputs.outputs['Mesh'], node_pass.inputs[1])
|
||||||
|
tree.links.new(node_img.outputs['Bake'], node_batch.inputs[pass_socket])
|
||||||
|
|
||||||
# bake passes
|
# bake passes
|
||||||
# Diffuse
|
for bake_pass in bw_config['passes'].keys():
|
||||||
node_d_pass = tree.nodes.new('BakeWrangler_Bake_Pass')
|
bw_pass(node_pos, bw_config['passes'][bake_pass], pass_socket)
|
||||||
node_d_pass.location = (-200, node_y_pos - 500)
|
pass_socket += 1
|
||||||
node_d_img = tree.nodes.new('BakeWrangler_Output_Image_Path')
|
node_pos += 500
|
||||||
node_d_img.location = (200, node_y_pos - 500)
|
|
||||||
|
|
||||||
tree.links.new(node_d_pass.outputs['Color'], node_d_img.inputs['Color'])
|
|
||||||
|
|
||||||
node_d_pass.bake_cat = 'PBR'
|
|
||||||
node_d_pass.bake_pbr = 'ALBEDO'
|
|
||||||
node_d_img.inputs['Color'].suffix = 'D'
|
|
||||||
node_d_img.inputs['Split Output'].disp_path = bake_path
|
|
||||||
node_d_img.inputs['Split Output'].img_name = img_name
|
|
||||||
'''
|
|
||||||
## Curvature
|
|
||||||
node_c_pass = tree.nodes.new('BakeWrangler_Bake_Pass')
|
|
||||||
node_c_pass.location = (-200, node_y_pos -1000)
|
|
||||||
node_c_img = tree.nodes.new('BakeWrangler_Output_Image_Path')
|
|
||||||
node_c_img.location = (200, node_y_pos -1000)
|
|
||||||
|
|
||||||
tree.links.new(node_c_pass.outputs['Color'], node_c_img.inputs['Color'])
|
|
||||||
|
|
||||||
node_c_pass.bake_cat = 'WRANG'
|
|
||||||
node_c_pass.bake_wrang = 'CURVATURE'
|
|
||||||
node_c_img.inputs['Color'].suffix = 'C'
|
|
||||||
node_c_img.inputs['Split Output'].disp_path = bake_path
|
|
||||||
node_c_img.inputs['Split Output'].img_name = img_name
|
|
||||||
'''
|
|
||||||
# Normal
|
|
||||||
node_n_pass = tree.nodes.new('BakeWrangler_Bake_Pass')
|
|
||||||
node_n_pass.location = (-200, node_y_pos - 1500)
|
|
||||||
node_n_img = tree.nodes.new('BakeWrangler_Output_Image_Path')
|
|
||||||
node_n_img.location = (200, node_y_pos - 1500)
|
|
||||||
|
|
||||||
node_n_img_double = tree.nodes.new('BakeWrangler_Output_Image_Path')
|
|
||||||
node_n_img_double.location = (200, node_y_pos - 1750)
|
|
||||||
|
|
||||||
tree.links.new(node_n_pass.outputs['Color'], node_n_img.inputs['Color'])
|
|
||||||
tree.links.new(node_n_pass.outputs['Color'], node_n_img_double.inputs['Color'])
|
|
||||||
|
|
||||||
node_n_pass.bake_cat = 'CORE'
|
|
||||||
node_n_pass.bake_core = 'NORMAL'
|
|
||||||
node_n_img.inputs['Color'].suffix = 'N'
|
|
||||||
node_n_img.inputs['Split Output'].disp_path = bake_path
|
|
||||||
node_n_img.inputs['Split Output'].img_name = img_name
|
|
||||||
|
|
||||||
node_n_img_double.inputs['Color'].suffix = 'N'
|
|
||||||
node_n_img_double.inputs['Split Output'].disp_path = bake_path_double
|
|
||||||
node_n_img_double.inputs['Split Output'].img_name = img_name
|
|
||||||
|
|
||||||
# AO
|
|
||||||
node_ao_pass = tree.nodes.new('BakeWrangler_Bake_Pass')
|
|
||||||
node_ao_pass.location = (-200, node_y_pos - 2000)
|
|
||||||
node_ao_img = tree.nodes.new('BakeWrangler_Output_Image_Path')
|
|
||||||
node_ao_img.location = (200, node_y_pos - 2000)
|
|
||||||
|
|
||||||
node_ao_img_double = tree.nodes.new('BakeWrangler_Output_Image_Path')
|
|
||||||
node_ao_img_double.location = (200, node_y_pos - 2250)
|
|
||||||
|
|
||||||
tree.links.new(node_ao_pass.outputs['Color'], node_ao_img.inputs['Color'])
|
|
||||||
tree.links.new(node_ao_pass.outputs['Color'], node_ao_img_double.inputs['Color'])
|
|
||||||
|
|
||||||
node_ao_pass.bake_cat = 'CORE'
|
|
||||||
node_ao_pass.bake_core = 'AO'
|
|
||||||
node_ao_pass.bake_samples = 32
|
|
||||||
node_ao_img.inputs['Color'].suffix = 'AO'
|
|
||||||
node_ao_img.inputs['Split Output'].disp_path = bake_path
|
|
||||||
node_ao_img.inputs['Split Output'].img_name = img_name
|
|
||||||
|
|
||||||
node_ao_img_double.inputs['Color'].suffix = 'AO'
|
|
||||||
node_ao_img_double.inputs['Split Output'].disp_path = bake_path_double
|
|
||||||
node_ao_img_double.inputs['Split Output'].img_name = img_name
|
|
||||||
|
|
||||||
# Roughness
|
|
||||||
node_r_pass = tree.nodes.new('BakeWrangler_Bake_Pass')
|
|
||||||
node_r_pass.location = (-200, node_y_pos - 2500)
|
|
||||||
node_r_img = tree.nodes.new('BakeWrangler_Output_Image_Path')
|
|
||||||
node_r_img.location = (200, node_y_pos - 2500)
|
|
||||||
|
|
||||||
tree.links.new(node_r_pass.outputs['Color'], node_r_img.inputs['Color'])
|
|
||||||
|
|
||||||
node_r_pass.bake_cat = 'CORE'
|
|
||||||
node_r_pass.bake_core = 'ROUGHNESS'
|
|
||||||
node_r_img.inputs['Color'].suffix = 'R'
|
|
||||||
node_r_img.inputs['Split Output'].disp_path = bake_path
|
|
||||||
node_r_img.inputs['Split Output'].img_name = img_name
|
|
||||||
|
|
||||||
# Metallic
|
|
||||||
node_m_pass = tree.nodes.new('BakeWrangler_Bake_Pass')
|
|
||||||
node_m_pass.location = (-200, node_y_pos - 3000)
|
|
||||||
node_m_img = tree.nodes.new('BakeWrangler_Output_Image_Path')
|
|
||||||
node_m_img.location = (200, node_y_pos - 3000)
|
|
||||||
|
|
||||||
tree.links.new(node_m_pass.outputs['Color'], node_m_img.inputs['Color'])
|
|
||||||
|
|
||||||
node_m_pass.bake_cat = 'PBR'
|
|
||||||
node_m_pass.bake_pbr = 'METALLIC'
|
|
||||||
node_m_img.inputs['Color'].suffix = 'M'
|
|
||||||
node_m_img.inputs['Split Output'].disp_path = bake_path
|
|
||||||
node_m_img.inputs['Split Output'].img_name = img_name
|
|
||||||
|
|
||||||
# UV
|
|
||||||
node_uv_pass = tree.nodes.new('BakeWrangler_Bake_Pass')
|
|
||||||
node_uv_pass.location = (-200, node_y_pos - 3500)
|
|
||||||
node_uv_img = tree.nodes.new('BakeWrangler_Output_Image_Path')
|
|
||||||
node_uv_img.location = (200, node_y_pos - 3500)
|
|
||||||
|
|
||||||
tree.links.new(node_uv_pass.outputs['Color'], node_uv_img.inputs['Color'])
|
|
||||||
|
|
||||||
node_uv_pass.bake_cat = 'WRANG'
|
|
||||||
node_uv_pass.bake_wrang = 'ISLANDID'
|
|
||||||
node_uv_img.inputs['Color'].suffix = 'UV'
|
|
||||||
node_uv_img.inputs['Split Output'].disp_path = bake_path
|
|
||||||
node_uv_img.inputs['Split Output'].img_name = img_name
|
|
||||||
|
|
||||||
# connect meshes to passes
|
|
||||||
tree.links.new(node_inputs.outputs['Mesh'], node_d_pass.inputs[1])
|
|
||||||
'''
|
|
||||||
tree.links.new(node_inputs.outputs['Mesh'], node_c_pass.inputs[1])
|
|
||||||
'''
|
|
||||||
tree.links.new(node_inputs.outputs['Mesh'], node_n_pass.inputs[1])
|
|
||||||
tree.links.new(node_inputs.outputs['Mesh'], node_ao_pass.inputs[1])
|
|
||||||
tree.links.new(node_inputs.outputs['Mesh'], node_r_pass.inputs[1])
|
|
||||||
tree.links.new(node_inputs.outputs['Mesh'], node_m_pass.inputs[1])
|
|
||||||
tree.links.new(node_inputs.outputs['Mesh'], node_uv_pass.inputs[1])
|
|
||||||
|
|
||||||
# batch bake node
|
|
||||||
tree.links.new(node_d_img.outputs['Bake'], node_batch.inputs[pass_socket])
|
|
||||||
'''
|
|
||||||
pass_socket += 1
|
|
||||||
tree.links.new(node_c_img.outputs['Bake'], node_batch.inputs[pass_socket])
|
|
||||||
'''
|
|
||||||
pass_socket += 1
|
|
||||||
tree.links.new(node_n_img.outputs['Bake'], node_batch.inputs[pass_socket])
|
|
||||||
pass_socket += 1
|
|
||||||
tree.links.new(node_ao_img.outputs['Bake'], node_batch.inputs[pass_socket])
|
|
||||||
pass_socket += 1
|
|
||||||
tree.links.new(node_r_img.outputs['Bake'], node_batch.inputs[pass_socket])
|
|
||||||
pass_socket += 1
|
|
||||||
tree.links.new(node_m_img.outputs['Bake'], node_batch.inputs[pass_socket])
|
|
||||||
pass_socket += 1
|
|
||||||
tree.links.new(node_uv_img.outputs['Bake'], node_batch.inputs[pass_socket])
|
|
||||||
pass_socket += 1
|
|
||||||
|
|
||||||
# batch bake node double
|
|
||||||
tree.links.new(node_n_img_double.outputs['Bake'],
|
|
||||||
node_batch_double.inputs[pass_socket_double])
|
|
||||||
pass_socket_double += 1
|
|
||||||
tree.links.new(node_ao_img_double.outputs['Bake'],
|
|
||||||
node_batch_double.inputs[pass_socket_double])
|
|
||||||
pass_socket_double += 1
|
|
||||||
|
|
||||||
|
reroute1 = tree.nodes.new('NodeReroute')
|
||||||
|
reroute1.location = (-1000, 0)
|
||||||
|
reroute2 = tree.nodes.new('NodeReroute')
|
||||||
|
reroute2.location = (2000, 0)
|
||||||
|
tree.links.new(reroute1.outputs[0], reroute2.inputs[0])
|
||||||
|
node_pos = 500
|
||||||
node_y_pos -= 4000
|
node_y_pos -= 4000
|
||||||
|
|
||||||
bpy.ops.bake_wrangler.bake_pass(tree=tree_name, node=node_batch.name, sock=-1)
|
return tree, pass_socket
|
||||||
logger.info('Baking first pass is finished!')
|
|
||||||
|
|
||||||
# double pass
|
|
||||||
tree.nodes['Mesh Settings']['ray_dist'] = 0.01
|
|
||||||
|
|
||||||
bpy.ops.bake_wrangler.bake_pass(tree=tree_name, node=node_batch_double.name, sock=-1)
|
def bw_bake(lowpoly_obj_names, textures_path, resolution, **cg_config):
|
||||||
logger.info('Baking double pass is finished!')
|
BakeWrangler.register()
|
||||||
|
output_paths = []
|
||||||
|
for config in configs:
|
||||||
|
# 1 pass
|
||||||
|
bake_path = os.path.join(textures_path, config).replace('\\', '/')
|
||||||
|
if os.path.exists(bake_path):
|
||||||
|
shutil.rmtree(bake_path)
|
||||||
|
os.makedirs(bake_path, exist_ok=True)
|
||||||
|
tree_obj, count = bw_submit(
|
||||||
|
lowpoly_obj_names, configs[config], resolution, bake_path, **cg_config)
|
||||||
|
|
||||||
return textures_path
|
node_batch = tree_obj.nodes['Batch']
|
||||||
|
|
||||||
|
bpy.ops.bake_wrangler.bake_pass(tree=tree_obj.name, node=node_batch.name, sock=-1)
|
||||||
|
|
||||||
|
logger.info('Baking pipeline is started!')
|
||||||
|
# delay until render will be complete
|
||||||
|
# ______________________________
|
||||||
|
exit_delay = 180
|
||||||
|
start_time = time.time()
|
||||||
|
while bpy.context.window_manager.bw_status == 1:
|
||||||
|
time.sleep(2)
|
||||||
|
if not os.listdir(bake_path):
|
||||||
|
if time.time() > start_time + exit_delay:
|
||||||
|
break
|
||||||
|
if not len(os.listdir(bake_path)) < count:
|
||||||
|
break
|
||||||
|
# ______________________________
|
||||||
|
assert len(os.listdir(bake_path)) == count, (
|
||||||
|
'Warning! Baked only {} of {} textures!'.format(
|
||||||
|
len(os.listdir(bake_path)), count))
|
||||||
|
logger.info('Baking %s textures is finished!', len(os.listdir(bake_path)))
|
||||||
|
|
||||||
|
output_paths.append(bake_path)
|
||||||
|
|
||||||
|
return output_paths
|
||||||
|
|
|
@ -21,16 +21,19 @@ import os
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def compose_baked_textures(textures_path, resolution=4096):
|
def compose_baked_textures(textures_path, bake_paths, resolution):
|
||||||
''' Mix, edit, and inpaint baked textures to production condition. '''
|
''' Mix, edit, and inpaint baked textures to production condition. '''
|
||||||
|
logger.info('Composing textures started!')
|
||||||
|
# cleanup
|
||||||
|
for img in os.listdir(textures_path):
|
||||||
|
if img.startswith('T_'):
|
||||||
|
os.remove(os.path.join(textures_path, img).replace('\\', '/'))
|
||||||
|
|
||||||
bake_path = os.path.join(textures_path, 'bake')
|
bake_path_1, bake_path_10, = bake_paths
|
||||||
bake_path_double = os.path.join(textures_path, 'bake_double')
|
|
||||||
|
|
||||||
baked_images = sorted(os.listdir(bake_path))
|
baked_images = sorted(os.listdir(bake_path_1))
|
||||||
|
|
||||||
bpy.context.scene.render.resolution_x = resolution
|
bpy.context.scene.render.resolution_x = resolution
|
||||||
bpy.context.scene.render.resolution_y = resolution
|
bpy.context.scene.render.resolution_y = resolution
|
||||||
|
@ -40,7 +43,7 @@ def compose_baked_textures(textures_path, resolution=4096):
|
||||||
bpy.context.scene.render.filepath = textures_path
|
bpy.context.scene.render.filepath = textures_path
|
||||||
bpy.context.scene.render.image_settings.file_format = 'PNG'
|
bpy.context.scene.render.image_settings.file_format = 'PNG'
|
||||||
bpy.context.scene.render.image_settings.color_mode = 'RGB'
|
bpy.context.scene.render.image_settings.color_mode = 'RGB'
|
||||||
bpy.context.scene.render.image_settings.compression = 100
|
bpy.context.scene.render.image_settings.compression = 50
|
||||||
|
|
||||||
bpy.context.scene.display_settings.display_device = 'sRGB'
|
bpy.context.scene.display_settings.display_device = 'sRGB'
|
||||||
bpy.context.scene.view_settings.view_transform = 'Standard'
|
bpy.context.scene.view_settings.view_transform = 'Standard'
|
||||||
|
@ -49,6 +52,8 @@ def compose_baked_textures(textures_path, resolution=4096):
|
||||||
bpy.context.scene.view_settings.gamma = 1
|
bpy.context.scene.view_settings.gamma = 1
|
||||||
bpy.context.scene.view_settings.use_curve_mapping = False
|
bpy.context.scene.view_settings.use_curve_mapping = False
|
||||||
|
|
||||||
|
bpy.context.scene.cycles.use_auto_tile = False
|
||||||
|
|
||||||
bpy.context.scene.render.use_compositing = True
|
bpy.context.scene.render.use_compositing = True
|
||||||
bpy.context.scene.render.dither_intensity = 2
|
bpy.context.scene.render.dither_intensity = 2
|
||||||
bpy.context.scene.use_nodes = True
|
bpy.context.scene.use_nodes = True
|
||||||
|
@ -63,7 +68,7 @@ def compose_baked_textures(textures_path, resolution=4096):
|
||||||
# render for eatch uv packs
|
# render for eatch uv packs
|
||||||
uv_images = [image for image in baked_images if 'UV' in image]
|
uv_images = [image for image in baked_images if 'UV' in image]
|
||||||
for uv_image in uv_images:
|
for uv_image in uv_images:
|
||||||
uv_obj = bpy.data.images.load(os.path.join(bake_path, uv_image))
|
uv_obj = bpy.data.images.load(os.path.join(bake_path_1, uv_image))
|
||||||
uv_node = tree.nodes.new(type='CompositorNodeImage')
|
uv_node = tree.nodes.new(type='CompositorNodeImage')
|
||||||
uv_node.image = bpy.data.images[uv_obj.name]
|
uv_node.image = bpy.data.images[uv_obj.name]
|
||||||
uv_node.location = 0, (node_y_pos + 500)
|
uv_node.location = 0, (node_y_pos + 500)
|
||||||
|
@ -77,16 +82,19 @@ def compose_baked_textures(textures_path, resolution=4096):
|
||||||
tree.links.new(key_node.outputs['Matte'], de_node.inputs['Mask'])
|
tree.links.new(key_node.outputs['Matte'], de_node.inputs['Mask'])
|
||||||
|
|
||||||
slot_name = uv_image.split('_UV')[0]
|
slot_name = uv_image.split('_UV')[0]
|
||||||
|
logger.info('Prepare %s image slot!', slot_name)
|
||||||
for image in baked_images:
|
for image in baked_images:
|
||||||
if '_UV' in image and image.startswith(slot_name):
|
if '_UV' in image:
|
||||||
|
continue
|
||||||
|
if not image.startswith(slot_name):
|
||||||
continue
|
continue
|
||||||
if '_AO' in image:
|
if '_AO' in image:
|
||||||
image_obj = bpy.data.images.load(os.path.join(bake_path, image))
|
image_obj = bpy.data.images.load(os.path.join(bake_path_1, image))
|
||||||
image_node = tree.nodes.new(type='CompositorNodeImage')
|
image_node = tree.nodes.new(type='CompositorNodeImage')
|
||||||
image_node.location = -500, (node_y_pos + 200)
|
image_node.location = -500, (node_y_pos + 200)
|
||||||
image_node.image = bpy.data.images[image_obj.name]
|
image_node.image = bpy.data.images[image_obj.name]
|
||||||
|
|
||||||
image_obj_double = bpy.data.images.load(os.path.join(bake_path_double, image))
|
image_obj_double = bpy.data.images.load(os.path.join(bake_path_10, image))
|
||||||
image_node_double = tree.nodes.new(type='CompositorNodeImage')
|
image_node_double = tree.nodes.new(type='CompositorNodeImage')
|
||||||
image_node_double.location = -500, (node_y_pos - 200)
|
image_node_double.location = -500, (node_y_pos - 200)
|
||||||
image_node_double.image = bpy.data.images[image_obj_double.name]
|
image_node_double.image = bpy.data.images[image_obj_double.name]
|
||||||
|
@ -104,12 +112,12 @@ def compose_baked_textures(textures_path, resolution=4096):
|
||||||
tree.links.new(lighten_node.outputs['Image'], aa_node.inputs['Image'])
|
tree.links.new(lighten_node.outputs['Image'], aa_node.inputs['Image'])
|
||||||
|
|
||||||
elif '_N' in image:
|
elif '_N' in image:
|
||||||
image_obj = bpy.data.images.load(os.path.join(bake_path, image))
|
image_obj = bpy.data.images.load(os.path.join(bake_path_1, image))
|
||||||
image_node = tree.nodes.new(type='CompositorNodeImage')
|
image_node = tree.nodes.new(type='CompositorNodeImage')
|
||||||
image_node.location = -1000, (node_y_pos + 200)
|
image_node.location = -1000, (node_y_pos + 200)
|
||||||
image_node.image = bpy.data.images[image_obj.name]
|
image_node.image = bpy.data.images[image_obj.name]
|
||||||
|
|
||||||
image_obj_double = bpy.data.images.load(os.path.join(bake_path_double, image))
|
image_obj_double = bpy.data.images.load(os.path.join(bake_path_10, image))
|
||||||
image_node_double = tree.nodes.new(type='CompositorNodeImage')
|
image_node_double = tree.nodes.new(type='CompositorNodeImage')
|
||||||
image_node_double.location = -1000, (node_y_pos - 200)
|
image_node_double.location = -1000, (node_y_pos - 200)
|
||||||
image_node_double.image = bpy.data.images[image_obj_double.name]
|
image_node_double.image = bpy.data.images[image_obj_double.name]
|
||||||
|
@ -133,7 +141,7 @@ def compose_baked_textures(textures_path, resolution=4096):
|
||||||
tree.links.new(mix_node.outputs['Image'], aa_node.inputs['Image'])
|
tree.links.new(mix_node.outputs['Image'], aa_node.inputs['Image'])
|
||||||
|
|
||||||
elif '_R' in image:
|
elif '_R' in image:
|
||||||
image_obj = bpy.data.images.load(os.path.join(bake_path, image))
|
image_obj = bpy.data.images.load(os.path.join(bake_path_1, image))
|
||||||
image_node = tree.nodes.new(type='CompositorNodeImage')
|
image_node = tree.nodes.new(type='CompositorNodeImage')
|
||||||
image_node.location = -600, (node_y_pos + 100)
|
image_node.location = -600, (node_y_pos + 100)
|
||||||
image_node.image = bpy.data.images[image_obj.name]
|
image_node.image = bpy.data.images[image_obj.name]
|
||||||
|
@ -159,7 +167,7 @@ def compose_baked_textures(textures_path, resolution=4096):
|
||||||
tree.links.new(mix_node.outputs['Image'], aa_node.inputs['Image'])
|
tree.links.new(mix_node.outputs['Image'], aa_node.inputs['Image'])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
image_obj = bpy.data.images.load(os.path.join(bake_path, image))
|
image_obj = bpy.data.images.load(os.path.join(bake_path_1, image))
|
||||||
image_node = tree.nodes.new(type='CompositorNodeImage')
|
image_node = tree.nodes.new(type='CompositorNodeImage')
|
||||||
image_node.location = 0, node_y_pos
|
image_node.location = 0, node_y_pos
|
||||||
image_node.image = bpy.data.images[image_obj.name]
|
image_node.image = bpy.data.images[image_obj.name]
|
||||||
|
@ -216,7 +224,7 @@ def compose_baked_textures(textures_path, resolution=4096):
|
||||||
node_y_pos -= 500
|
node_y_pos -= 500
|
||||||
|
|
||||||
# render
|
# render
|
||||||
bpy.ops.render.render(use_viewport=True)
|
bpy.ops.render.render(animation=False)
|
||||||
# remove all nodes after rendering
|
# remove all nodes after rendering
|
||||||
for node in tree.nodes:
|
for node in tree.nodes:
|
||||||
tree.nodes.remove(node)
|
tree.nodes.remove(node)
|
||||||
|
@ -229,8 +237,8 @@ def compose_baked_textures(textures_path, resolution=4096):
|
||||||
]
|
]
|
||||||
for texture in composed_textures:
|
for texture in composed_textures:
|
||||||
os.rename(
|
os.rename(
|
||||||
os.path.join(textures_path, texture),
|
os.path.join(textures_path, texture).replace('\\', '/'),
|
||||||
os.path.join(textures_path, texture.split('000')[0] + '.png')
|
os.path.join(textures_path, texture.rpartition('0001')[0] + '.png').replace('\\', '/')
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info('Composing %s textures is finished!', len(composed_textures))
|
logger.info('Composing %s textures is finished!', len(composed_textures))
|
||||||
|
|
|
@ -18,10 +18,10 @@ __version__ = '0.1'
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import random
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def assign_pbr_material(obj_names, textures_path):
|
def assign_pbr_material(obj_names, textures_path):
|
||||||
|
@ -44,41 +44,48 @@ def assign_pbr_material(obj_names, textures_path):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# create Principled BSDF based material
|
# create Principled BSDF based material
|
||||||
bmat = bpy.data.materials.new(name=part_name)
|
bmat = bpy.data.materials.new(name=f'M_{part_name}')
|
||||||
bmat.use_nodes = True
|
bmat.use_nodes = True
|
||||||
principled_node = bmat.node_tree.nodes["Principled BSDF"]
|
principled_node = bmat.node_tree.nodes["Principled BSDF"]
|
||||||
|
random_color = (
|
||||||
|
round(random.uniform(0.1, 0.9), 3),
|
||||||
|
round(random.uniform(0.1, 0.9), 3),
|
||||||
|
round(random.uniform(0.1, 0.9), 3),
|
||||||
|
1.0)
|
||||||
|
bmat.diffuse_color = random_color
|
||||||
|
principled_node.inputs['Base Color'].default_value = random_color
|
||||||
|
|
||||||
for texture in textures:
|
for texture in textures:
|
||||||
print(1, 'texture', texture)
|
|
||||||
if not texture.startswith('T_' + part_name):
|
if not texture.startswith('T_' + part_name):
|
||||||
continue
|
continue
|
||||||
print(2, 'texture', texture)
|
|
||||||
if '_ao.' in texture:
|
if '_ao.' in texture:
|
||||||
image_obj = bpy.data.images.load(os.path.join(textures_path, texture))
|
image_obj = bpy.data.images.load(os.path.join(textures_path, texture))
|
||||||
image_obj.colorspace_settings.name = 'Linear'
|
image_obj.colorspace_settings.name = 'Linear'
|
||||||
texture_ao = bmat.node_tree.nodes.new(type="ShaderNodeTexImage")
|
texture_ao = bmat.node_tree.nodes.new(type="ShaderNodeTexImage")
|
||||||
|
texture_ao.name = texture_ao.label = 'ambient_occlusion'
|
||||||
texture_ao.location = -1000, 500
|
texture_ao.location = -1000, 500
|
||||||
texture_ao.image = bpy.data.images[image_obj.name]
|
texture_ao.image = bpy.data.images[image_obj.name]
|
||||||
|
|
||||||
mix_node = bmat.node_tree.nodes.new(type="ShaderNodeMix")
|
mix_node = bmat.node_tree.nodes.new(type="ShaderNodeMix")
|
||||||
mix_node.location = -500, 500
|
mix_node.location = -500, 500
|
||||||
#mix_node.data_type = 'RGBA'
|
mix_node.data_type = 'RGBA'
|
||||||
mix_node.blend_type = 'MULTIPLY'
|
mix_node.blend_type = 'MULTIPLY'
|
||||||
mix_node.inputs['Factor'].default_value = 0.5
|
mix_node.inputs['Factor'].default_value = 0.5
|
||||||
|
|
||||||
bmat.node_tree.links.new(texture_ao.outputs['Color'], mix_node.inputs['B'])
|
bmat.node_tree.links.new(texture_ao.outputs['Color'], mix_node.inputs[7])
|
||||||
bmat.node_tree.links.new(mix_node.outputs[0], principled_node.inputs['Base Color'])
|
bmat.node_tree.links.new(mix_node.outputs[2], principled_node.inputs['Base Color'])
|
||||||
|
|
||||||
|
|
||||||
if '_d.' in texture:
|
if '_d.' in texture:
|
||||||
image_obj = bpy.data.images.load(os.path.join(textures_path, texture))
|
image_obj = bpy.data.images.load(os.path.join(textures_path, texture))
|
||||||
image_obj.colorspace_settings.name = 'sRGB'
|
image_obj.colorspace_settings.name = 'sRGB'
|
||||||
texture_d = bmat.node_tree.nodes.new(type="ShaderNodeTexImage")
|
texture_d = bmat.node_tree.nodes.new(type="ShaderNodeTexImage")
|
||||||
|
texture_d.name = texture_d.label = 'diffuse'
|
||||||
texture_d.location = -1000, 0
|
texture_d.location = -1000, 0
|
||||||
texture_d.image = bpy.data.images[image_obj.name]
|
texture_d.image = bpy.data.images[image_obj.name]
|
||||||
|
|
||||||
if texture_ao:
|
if texture_ao:
|
||||||
bmat.node_tree.links.new(texture_d.outputs['Color'], mix_node.inputs['A'])
|
bmat.node_tree.links.new(texture_d.outputs['Color'], mix_node.inputs[6])
|
||||||
else:
|
else:
|
||||||
bmat.node_tree.links.new(texture_d.outputs['Color'], principled_node.inputs['Base Color'])
|
bmat.node_tree.links.new(texture_d.outputs['Color'], principled_node.inputs['Base Color'])
|
||||||
|
|
||||||
|
@ -86,6 +93,7 @@ def assign_pbr_material(obj_names, textures_path):
|
||||||
image_obj = bpy.data.images.load(os.path.join(textures_path, texture))
|
image_obj = bpy.data.images.load(os.path.join(textures_path, texture))
|
||||||
image_obj.colorspace_settings.name = 'Linear'
|
image_obj.colorspace_settings.name = 'Linear'
|
||||||
texture_m = bmat.node_tree.nodes.new(type="ShaderNodeTexImage")
|
texture_m = bmat.node_tree.nodes.new(type="ShaderNodeTexImage")
|
||||||
|
texture_m.name = texture_m.label = 'metallic'
|
||||||
texture_m.location = -1000, -500
|
texture_m.location = -1000, -500
|
||||||
texture_m.image = bpy.data.images[image_obj.name]
|
texture_m.image = bpy.data.images[image_obj.name]
|
||||||
|
|
||||||
|
@ -95,6 +103,7 @@ def assign_pbr_material(obj_names, textures_path):
|
||||||
image_obj = bpy.data.images.load(os.path.join(textures_path, texture))
|
image_obj = bpy.data.images.load(os.path.join(textures_path, texture))
|
||||||
image_obj.colorspace_settings.name = 'Linear'
|
image_obj.colorspace_settings.name = 'Linear'
|
||||||
texture_r = bmat.node_tree.nodes.new(type="ShaderNodeTexImage")
|
texture_r = bmat.node_tree.nodes.new(type="ShaderNodeTexImage")
|
||||||
|
texture_r.name = texture_r.label = 'roughness'
|
||||||
texture_r.location = -1000, -1000
|
texture_r.location = -1000, -1000
|
||||||
texture_r.image = bpy.data.images[image_obj.name]
|
texture_r.image = bpy.data.images[image_obj.name]
|
||||||
|
|
||||||
|
@ -104,6 +113,7 @@ def assign_pbr_material(obj_names, textures_path):
|
||||||
image_obj = bpy.data.images.load(os.path.join(textures_path, texture))
|
image_obj = bpy.data.images.load(os.path.join(textures_path, texture))
|
||||||
image_obj.colorspace_settings.name = 'Non-Color'
|
image_obj.colorspace_settings.name = 'Non-Color'
|
||||||
texture_n = bmat.node_tree.nodes.new(type="ShaderNodeTexImage")
|
texture_n = bmat.node_tree.nodes.new(type="ShaderNodeTexImage")
|
||||||
|
texture_n.name = texture_n.label = 'normal'
|
||||||
texture_n.location = -1000, -1500
|
texture_n.location = -1000, -1500
|
||||||
texture_n.image = bpy.data.images[image_obj.name]
|
texture_n.image = bpy.data.images[image_obj.name]
|
||||||
|
|
||||||
|
@ -113,7 +123,6 @@ def assign_pbr_material(obj_names, textures_path):
|
||||||
bmat.node_tree.links.new(texture_n.outputs['Color'], normal_node.inputs['Color'])
|
bmat.node_tree.links.new(texture_n.outputs['Color'], normal_node.inputs['Color'])
|
||||||
bmat.node_tree.links.new(normal_node.outputs['Normal'], principled_node.inputs['Normal'])
|
bmat.node_tree.links.new(normal_node.outputs['Normal'], principled_node.inputs['Normal'])
|
||||||
|
|
||||||
|
|
||||||
obj.data.materials.append(bmat)
|
obj.data.materials.append(bmat)
|
||||||
|
|
||||||
logger.info('Shading of %s objects is finished!', len(obj_names))
|
logger.info('Shading of %s objects is finished!', len(obj_names))
|
||||||
|
|
|
@ -54,4 +54,26 @@ def copy_collections_recursive(collection, suffix='copy', linked=False):
|
||||||
parent = double_lut[obj.parent]
|
parent = double_lut[obj.parent]
|
||||||
if parent:
|
if parent:
|
||||||
double.parent = parent
|
double.parent = parent
|
||||||
|
return '_'.join((collection.name, suffix))
|
||||||
|
|
||||||
|
|
||||||
|
def unlink_from_collections(obj):
|
||||||
|
''' Unlinking object from all collections. '''
|
||||||
|
for col in bpy.data.collections:
|
||||||
|
if obj.name in col.objects:
|
||||||
|
col.objects.unlink(obj)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
def remove_collections_with_objects(collection=None):
|
||||||
|
'''Removes all collection (or given) with objects from scene '''
|
||||||
|
if collection:
|
||||||
|
for obj in collection.objects:
|
||||||
|
bpy.data.objects.remove(obj, do_unlink=True)
|
||||||
|
bpy.data.collections.remove(collection)
|
||||||
|
else:
|
||||||
|
for col in bpy.data.collections:
|
||||||
|
for obj in col.objects:
|
||||||
|
bpy.data.objects.remove(obj, do_unlink=True)
|
||||||
|
bpy.data.collections.remove(col)
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -16,12 +16,8 @@ Basic mesh processing for asset pipeline.
|
||||||
'''
|
'''
|
||||||
__version__ = '0.1'
|
__version__ = '0.1'
|
||||||
|
|
||||||
import logging
|
|
||||||
import bpy
|
import bpy
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def shell_remesher(lowpoly_obj, mod_name='shell_mod', tree_name='shell_tree'):
|
def shell_remesher(lowpoly_obj, mod_name='shell_mod', tree_name='shell_tree'):
|
||||||
''' Conctruct geometry nodes modifier. '''
|
''' Conctruct geometry nodes modifier. '''
|
||||||
|
|
|
@ -16,22 +16,22 @@ Various mesh tools for Edit Mode.
|
||||||
'''
|
'''
|
||||||
__version__ = '0.1'
|
__version__ = '0.1'
|
||||||
|
|
||||||
|
import math
|
||||||
import bpy
|
import bpy
|
||||||
import bmesh
|
import bmesh
|
||||||
from math import radians
|
|
||||||
|
|
||||||
|
|
||||||
def select_peaks(me, peak_limit_angle=60, peak_accuracy_angle=10):
|
def select_peaks(me, peak_limit_angle=60, peak_accuracy_angle=10):
|
||||||
''' Select sharp vertices that stand alone. '''
|
''' Select sharp vertices that stand alone. '''
|
||||||
bm = bmesh.from_edit_mesh(me)
|
bm = bmesh.from_edit_mesh(me)
|
||||||
|
|
||||||
def is_sharp(vert, eps=radians(peak_limit_angle)):
|
def is_sharp(vert, eps=math.radians(peak_limit_angle)):
|
||||||
sharps = []
|
sharps = []
|
||||||
face_before = None
|
face_before = None
|
||||||
for face in vert.link_faces:
|
for face in vert.link_faces:
|
||||||
if face_before:
|
if face_before:
|
||||||
face_angle = face.normal.angle(face_before.normal)
|
face_angle = face.normal.angle(face_before.normal)
|
||||||
if face_angle > radians(peak_accuracy_angle):
|
if face_angle > math.radians(peak_accuracy_angle):
|
||||||
angle = vert.normal.angle(face.normal)
|
angle = vert.normal.angle(face.normal)
|
||||||
if angle > eps:
|
if angle > eps:
|
||||||
sharps.append(angle)
|
sharps.append(angle)
|
||||||
|
@ -65,7 +65,9 @@ def select_peaks(me, peak_limit_angle=60, peak_accuracy_angle=10):
|
||||||
def select_zero_faces(me):
|
def select_zero_faces(me):
|
||||||
''' Select very small faces. '''
|
''' Select very small faces. '''
|
||||||
bm = bmesh.from_edit_mesh(me)
|
bm = bmesh.from_edit_mesh(me)
|
||||||
[f.select_set(True) for f in bm.faces if f.calc_area() < 1e-7]
|
for myface in bm.faces:
|
||||||
|
if myface.calc_area() < 1e-7:
|
||||||
|
myface.select_set(True)
|
||||||
bmesh.update_edit_mesh(me)
|
bmesh.update_edit_mesh(me)
|
||||||
return me
|
return me
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ def mesh_to_mesh(obj):
|
||||||
eval_mesh = obj.evaluated_get(deg).data.copy()
|
eval_mesh = obj.evaluated_get(deg).data.copy()
|
||||||
|
|
||||||
orig_name = obj.name
|
orig_name = obj.name
|
||||||
obj.name = ('{}_temp'.format(orig_name))
|
obj.name = '{}_temp'.format(orig_name)
|
||||||
converted_obj = bpy.data.objects.new(orig_name, eval_mesh)
|
converted_obj = bpy.data.objects.new(orig_name, eval_mesh)
|
||||||
converted_obj.matrix_world = obj.matrix_world
|
converted_obj.matrix_world = obj.matrix_world
|
||||||
|
|
||||||
|
|
|
@ -1,36 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright (C) 2023 Ilia Kurochkin <brothermechanic@gmail.com>
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation; either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
__version__ = '0.1'
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import bpy
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def remove_collections(collection_name=None):
|
|
||||||
'''Removes all all collection or collection_name only'''
|
|
||||||
if collection_name:
|
|
||||||
collection = bpy.data.collections.get(collection_name)
|
|
||||||
try:
|
|
||||||
for obj in collection.objects:
|
|
||||||
bpy.data.objects.remove(obj, do_unlink=True)
|
|
||||||
bpy.data.collections.remove(collection)
|
|
||||||
except Exception:
|
|
||||||
logger.info(f'There is no collection {collection_name!r} in scene')
|
|
||||||
else:
|
|
||||||
for collection in bpy.data.collections:
|
|
||||||
for obj in collection.objects:
|
|
||||||
bpy.data.objects.remove(obj, do_unlink=True)
|
|
||||||
bpy.data.collections.remove(collection)
|
|
|
@ -28,7 +28,6 @@ from freecad.utils.is_object_solid import is_object_solid
|
||||||
from utils.custom_parser import CustomArgumentParser
|
from utils.custom_parser import CustomArgumentParser
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def freecad_to_json(**kwargs):
|
def freecad_to_json(**kwargs):
|
||||||
|
@ -180,8 +179,8 @@ parser.add_argument(
|
||||||
required=False
|
required=False
|
||||||
)
|
)
|
||||||
|
|
||||||
kwargs = vars(parser.parse_known_args()[0])
|
fc_kwargs = vars(parser.parse_known_args()[0])
|
||||||
|
|
||||||
freecad_to_json(**kwargs)
|
freecad_to_json(**fc_kwargs)
|
||||||
|
|
||||||
logger.info('FreeCAD scene passed!')
|
logger.info('FreeCAD scene passed!')
|
||||||
|
|
|
@ -2,19 +2,20 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
'''
|
'''
|
||||||
DESCRIPTION.
|
DESCRIPTION.
|
||||||
Convert and setup FreeCAD solid objects to 3d assets.
|
Convert and setup FreeCAD scene to cg assets.
|
||||||
Support Blender compiled as a Python Module only!
|
Support Blender compiled as a Python Module only!
|
||||||
'''
|
'''
|
||||||
__version__ = '0.6'
|
__version__ = '0.7'
|
||||||
import collections
|
import collections
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import shutil
|
||||||
import os
|
import os
|
||||||
from itertools import zip_longest
|
from itertools import zip_longest
|
||||||
|
|
||||||
from blender.utils.remove_collections import remove_collections
|
|
||||||
from blender.utils.cleanup_orphan_data import cleanup_orphan_data
|
from blender.utils.cleanup_orphan_data import cleanup_orphan_data
|
||||||
from blender.utils.collection_tools import copy_collections_recursive
|
from blender.utils.collection_tools import (copy_collections_recursive,
|
||||||
|
remove_collections_with_objects)
|
||||||
from utils.cmd_proc import cmd_proc
|
from utils.cmd_proc import cmd_proc
|
||||||
from blender.import_cad.build_blender_scene import json_to_blend
|
from blender.import_cad.build_blender_scene import json_to_blend
|
||||||
from blender.processing.restruct_hierarchy_by_lcs import restruct_hierarchy
|
from blender.processing.restruct_hierarchy_by_lcs import restruct_hierarchy
|
||||||
|
@ -22,142 +23,159 @@ from blender.processing.highpoly_setup import setup_meshes
|
||||||
from blender.processing.midpoly_setup import hightpoly_collections_to_midpoly
|
from blender.processing.midpoly_setup import hightpoly_collections_to_midpoly
|
||||||
from blender.processing.lowpoly_setup import parts_to_shells
|
from blender.processing.lowpoly_setup import parts_to_shells
|
||||||
from blender.processing.uv_setup import uv_unwrap
|
from blender.processing.uv_setup import uv_unwrap
|
||||||
from blender.texturing.bake_submitter import bw_submit
|
from blender.texturing.bake_submitter import bw_bake
|
||||||
from blender.texturing.composing import compose_baked_textures
|
from blender.texturing.composing import compose_baked_textures
|
||||||
from blender.texturing.shading import assign_pbr_material
|
from blender.texturing.shading import assign_pbr_material
|
||||||
from blender.export.dae import export_dae
|
from blender.export.dae import export_dae
|
||||||
from blender.export.stl import export_stl
|
from blender.export.stl import export_stl
|
||||||
|
from blender.export.fbx import export_fbx
|
||||||
import bpy
|
import bpy
|
||||||
import mathutils
|
|
||||||
|
# TODO Path
|
||||||
|
freecad_to_json_script = 'freecad_to_json.py'
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
'''
|
# TODO NIX
|
||||||
IMPORT COLLECTIONS NAMIG CONVENTION:
|
freecad_bin = 'freecadcmd'
|
||||||
Parts - collection for mesh objects
|
# TODO NIX BakwWrangler/nodes/node_tree.py:659
|
||||||
LCS - collection for location points
|
blender_bin = 'blender'
|
||||||
Hierarchy - collection for hierarchy locators
|
|
||||||
|
|
||||||
LCS POINT'S SUFFIXES CONVENTION:
|
# TODO WEBAPP
|
||||||
'_in' - inlet suffix
|
cg_config = {
|
||||||
'_out' - outlet suffix
|
'lcs_col_name': 'LCS',
|
||||||
'_root' - root suffix
|
'parts_col_name': 'Parts',
|
||||||
|
'midpoly_col_name': 'Midpoly',
|
||||||
CG ASSETS SUFFIXES CONVENTION:
|
'lowpoly_col_name': 'Lowpoly',
|
||||||
'_hp' - hightpoly asset (reference baking source)
|
'lcs_inlet': 'in',
|
||||||
'_lp' - lowpoly asset (prepared for game engines)
|
'lcs_outlet': 'out',
|
||||||
'_render' - root suffix (prepared for render engines)
|
'lcs_root': 'root',
|
||||||
'''
|
'hightpoly': 'hp',
|
||||||
|
'midpoly': 'mp',
|
||||||
# ENV
|
'lowpoly': 'lp',
|
||||||
freecadcmd = 'freecadcmd'
|
'render': 'render',
|
||||||
fcstd_data_script = 'freecad_to_json.py'
|
}
|
||||||
# COLLECTIONS NAMIG CONVENTION
|
|
||||||
parts_col_name = 'Parts'
|
|
||||||
lcs_col_name = 'LCS'
|
|
||||||
hierarchy_col_name = 'Hierarchy'
|
|
||||||
lowpoly_col_name = 'Lowpoly'
|
|
||||||
# LCS POINT'S SUFFIXES CONVENTION
|
|
||||||
inlet = '_in'
|
|
||||||
outlet = '_out'
|
|
||||||
root = '_root'
|
|
||||||
# CG ASSETS SUFFIXES CONVENTION
|
|
||||||
hightpoly = '_hp'
|
|
||||||
midpoly = 'mp'
|
|
||||||
lowpoly = '_lp'
|
|
||||||
render = '_render'
|
|
||||||
|
|
||||||
|
|
||||||
def cg_pipeline(**kwargs):
|
def cg_pipeline(**kwargs):
|
||||||
''' CG asset creation pipeline '''
|
''' CG asset creation pipeline '''
|
||||||
|
assembly_name = kwargs['fcstd_path'].rpartition('/')[2].rpartition('.')[0]
|
||||||
|
# freecad don't like other paths
|
||||||
|
parts_sequence_path = kwargs.pop('parts_sequence_path', None)
|
||||||
blend_path = kwargs.pop('blend_path', None)
|
blend_path = kwargs.pop('blend_path', None)
|
||||||
mesh_export_path = kwargs.pop('mesh_export_path', None)
|
export_path = kwargs.pop('export_path', None)
|
||||||
config = kwargs.pop('config', None)
|
|
||||||
|
# for eatch sequence
|
||||||
|
parts_sequence = None
|
||||||
|
if parts_sequence_path and os.path.isfile(parts_sequence_path):
|
||||||
|
with open(parts_sequence_path, 'r', encoding='utf-8') as sequence_file:
|
||||||
|
parts_sequence = json.load(sequence_file)
|
||||||
|
# output file management
|
||||||
|
if not blend_path:
|
||||||
|
blend_path = kwargs['fcstd_path'].replace('\\', '/').rpartition('/')[0]
|
||||||
|
if not export_path:
|
||||||
|
export_path = os.path.join(blend_path, 'assets').replace('\\', '/')
|
||||||
|
os.makedirs(blend_path, exist_ok=True)
|
||||||
|
|
||||||
# prepare blend file
|
# prepare blend file
|
||||||
|
blend_file = os.path.join(blend_path, f'{assembly_name}.blend').replace('\\', '/')
|
||||||
remove_collections()
|
remove_collections_with_objects()
|
||||||
cleanup_orphan_data()
|
cleanup_orphan_data()
|
||||||
|
|
||||||
# convert FreeCAD scene to Blender scene
|
# 1 convert FreeCAD scene to Blender scene
|
||||||
imported_objects = json_to_blend(
|
imported_objects = json_to_blend(
|
||||||
json.loads(
|
json.loads(
|
||||||
cmd_proc(freecadcmd,
|
cmd_proc(freecad_bin,
|
||||||
fcstd_data_script,
|
freecad_to_json_script,
|
||||||
'--',
|
'--',
|
||||||
**kwargs
|
**kwargs
|
||||||
).split('FreeCAD ')[0]
|
).split('FreeCAD ')[0]
|
||||||
)
|
), **cg_config
|
||||||
)
|
)
|
||||||
|
|
||||||
# save import in blender scene
|
# Save original freecad setup as blender scene
|
||||||
if blend_path is not None:
|
bpy.ops.wm.save_as_mainfile(filepath=f'{blend_file.rpartition(".")[0]}_orig.blend')
|
||||||
if not os.path.isdir(os.path.dirname(blend_path)):
|
|
||||||
os.makedirs(os.path.dirname(blend_path))
|
|
||||||
bpy.ops.wm.save_as_mainfile(filepath=blend_path)
|
|
||||||
|
|
||||||
# restructuring hierarchy by lcs points
|
# 1 prepare highpoly
|
||||||
|
part_names = None
|
||||||
|
lcs_pipeline = True
|
||||||
if imported_objects['objs_lcs']:
|
if imported_objects['objs_lcs']:
|
||||||
restruct_hierarchy(imported_objects['objs_lcs'])
|
part_names = restruct_hierarchy(
|
||||||
|
imported_objects['objs_lcs'], parts_sequence, **cg_config)
|
||||||
|
|
||||||
|
# non lcs pipeline
|
||||||
|
if not part_names:
|
||||||
|
lcs_pipeline = False
|
||||||
|
part_names = [[obj for obj in bpy.data.objects if not obj.parent][0].name]
|
||||||
|
|
||||||
# prepare highpoly
|
|
||||||
if imported_objects['objs_foreground']:
|
if imported_objects['objs_foreground']:
|
||||||
setup_meshes(imported_objects['objs_foreground'], sharpness=True, shading=True)
|
setup_meshes(imported_objects['objs_foreground'],
|
||||||
|
sharpness=True, shading=True)
|
||||||
else:
|
else:
|
||||||
setup_meshes(imported_objects['objs_background'], sharpness=True, shading=True)
|
setup_meshes(imported_objects['objs_background'],
|
||||||
|
sharpness=True, shading=True)
|
||||||
|
|
||||||
# TODO Part's names from LCS names?
|
# 2 prepare midpoly
|
||||||
part_names = [lcs_name.split(inlet)[0]
|
copy_col_name = copy_collections_recursive(
|
||||||
for lcs_name in imported_objects['objs_lcs']
|
bpy.data.collections[cg_config['parts_col_name']],
|
||||||
if lcs_name.endswith(inlet)]
|
suffix=cg_config['midpoly']
|
||||||
|
|
||||||
# prepare midpoly
|
|
||||||
copy_collections_recursive(
|
|
||||||
bpy.data.collections[parts_col_name], suffix=midpoly
|
|
||||||
)
|
)
|
||||||
hightpoly_collections_to_midpoly(part_names)
|
midpoly_obj_names = hightpoly_collections_to_midpoly(
|
||||||
|
copy_col_name, part_names, lcs_pipeline, **cg_config)
|
||||||
|
|
||||||
# prepare lowpoly
|
# 3 prepare lowpoly
|
||||||
lowpoly_obj_names = parts_to_shells(part_names)
|
lowpoly_obj_names = parts_to_shells(part_names, lcs_pipeline, **cg_config)
|
||||||
uv_unwrap(lowpoly_obj_names)
|
uv_unwrap(lowpoly_obj_names)
|
||||||
|
|
||||||
# save lowpoly in blender scene
|
# Save before baking
|
||||||
if blend_path is not None:
|
bpy.ops.wm.save_as_mainfile(filepath=blend_file)
|
||||||
if not os.path.isdir(os.path.dirname(blend_path)):
|
|
||||||
os.makedirs(os.path.dirname(blend_path))
|
|
||||||
bpy.ops.wm.save_as_mainfile(filepath=blend_path)
|
|
||||||
|
|
||||||
# bake textures
|
# 4 bake textures
|
||||||
bpy.ops.wm.open_mainfile(filepath=blend_path)
|
if kwargs['textures_resolution'] != 0:
|
||||||
textures_path = bw_submit(lowpoly_obj_names)
|
textures_path = os.path.join(blend_path, 'textures').replace('\\', '/')
|
||||||
compose_baked_textures(textures_path)
|
bake_paths = bw_bake(lowpoly_obj_names,
|
||||||
assign_pbr_material(lowpoly_obj_names, textures_path)
|
textures_path,
|
||||||
|
kwargs['textures_resolution'],
|
||||||
|
**cg_config)
|
||||||
|
# Save baking result
|
||||||
|
bpy.ops.wm.save_as_mainfile(filepath=blend_file)
|
||||||
|
# 5 prepare textures
|
||||||
|
bpy.ops.wm.quit_blender()
|
||||||
|
compose_baked_textures(textures_path, bake_paths, kwargs['textures_resolution'])
|
||||||
|
for bake_path in bake_paths:
|
||||||
|
shutil.rmtree(bake_path)
|
||||||
|
bpy.ops.wm.open_mainfile(filepath=blend_file)
|
||||||
|
assign_pbr_material(lowpoly_obj_names, textures_path)
|
||||||
|
bpy.ops.file.make_paths_relative()
|
||||||
|
# Save assigned lowpoly assets
|
||||||
|
bpy.ops.wm.save_as_mainfile(filepath=blend_file)
|
||||||
|
for part_name in part_names:
|
||||||
|
export_fbx(
|
||||||
|
obj_name=f'{part_name}_{cg_config["lowpoly"]}',
|
||||||
|
path=export_path,
|
||||||
|
subdir='fbx')
|
||||||
|
|
||||||
# export object meshes and urdf
|
|
||||||
|
# 6 export object meshes and urdf
|
||||||
to_urdf = collections.defaultdict(list)
|
to_urdf = collections.defaultdict(list)
|
||||||
|
|
||||||
if lowpoly_obj_names:
|
|
||||||
export_obj_names = lowpoly_obj_names
|
|
||||||
else:
|
|
||||||
export_obj_names = sum([imported_objects['objs_foreground'],
|
|
||||||
imported_objects['objs_background']], [])
|
|
||||||
|
|
||||||
link = {}
|
link = {}
|
||||||
for export_obj_name in export_obj_names:
|
for part_name in part_names:
|
||||||
link_prop = {}
|
link_prop = {}
|
||||||
if mesh_export_path is not None:
|
link_prop['visual'] = export_dae(
|
||||||
link_prop['visual'] = export_dae(
|
obj_name=f'{part_name}_{cg_config["midpoly"]}',
|
||||||
obj_name=export_obj_name, path=mesh_export_path, subdir='visual')
|
path=export_path,
|
||||||
link_prop['collision'] = export_stl(
|
subdir='dae')
|
||||||
obj_name=export_obj_name, path=mesh_export_path, subdir='collision')
|
link_prop['collision'] = export_stl(
|
||||||
|
obj_name=f'{part_name}_{cg_config["lowpoly"]}',
|
||||||
link[export_obj_name] = link_prop
|
path=export_path,
|
||||||
|
subdir='collision')
|
||||||
|
link[part_name] = link_prop
|
||||||
to_urdf['links'].append(link)
|
to_urdf['links'].append(link)
|
||||||
|
|
||||||
#config = {'sequences': [['cube1', 'cube2', 'cube3', 'cube4'], ['cube2', 'cube1', 'cube4', 'cube3']]}
|
# TODO export urdf
|
||||||
|
config = kwargs.pop('config', None)
|
||||||
|
# config = {'sequences': [['cube1', 'cube2', 'cube3', 'cube4'], ['cube2', 'cube1', 'cube4', 'cube3']]}
|
||||||
if config:
|
if config:
|
||||||
for sequence in config['sequences']:
|
for sequence in config['sequences']:
|
||||||
joint = {}
|
joint = {}
|
||||||
|
@ -185,6 +203,9 @@ def cg_pipeline(**kwargs):
|
||||||
|
|
||||||
print(json.dumps(to_urdf, indent=4))
|
print(json.dumps(to_urdf, indent=4))
|
||||||
|
|
||||||
|
logger.info('%s original hightpoly collections ready!', len(part_names))
|
||||||
|
logger.info('%s midpoly objects ready!', len(midpoly_obj_names))
|
||||||
|
logger.info('%s lowpoly objects ready!', len(lowpoly_obj_names))
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import argparse
|
import argparse
|
||||||
|
@ -241,21 +262,33 @@ if __name__ == '__main__':
|
||||||
required=False
|
required=False
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--mesh_export_path',
|
'--parts_sequence_path',
|
||||||
type=str, help='Path for export meshes',
|
type=str, help='Path to parts assembling sequence json file.',
|
||||||
|
required=False
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--export_path',
|
||||||
|
type=str, help='Path for export assets. If not, fcstd_path will be used instead.',
|
||||||
required=False
|
required=False
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--blend_path',
|
'--blend_path',
|
||||||
type=str,
|
type=str,
|
||||||
help='Path for export blend assembly file',
|
help='Path for blender scene. If not, fcstd_path will be used instead.',
|
||||||
|
required=False
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--textures_resolution',
|
||||||
|
type=int,
|
||||||
|
help='Set baking texture resolution. Recomended - 4096 pix ',
|
||||||
|
default=512,
|
||||||
required=False
|
required=False
|
||||||
)
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
kwargs = {key: getattr(args, key) for key in dir(args) if not key.startswith('_')}
|
cg_kwargs = {key: getattr(args, key) for key in dir(args) if not key.startswith('_')}
|
||||||
|
|
||||||
cg_pipeline(**kwargs)
|
cg_pipeline(**cg_kwargs)
|
||||||
|
|
||||||
logger.info('CG Pipeline Completed!')
|
logger.info('CG Pipeline Completed!')
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue