2025-07-01
This commit is contained in:
@@ -0,0 +1,50 @@
|
||||
#!/usr/bin/python3
|
||||
# copyright (c) 2018- polygoniq xyz s.r.o.
|
||||
|
||||
# Minimal library that contains code for spawning Blender assets. We try to keep this library
|
||||
# as minimal as possible, because every change in this module triggers huge rebuilds
|
||||
# (mostly rendering previews). We want this library to be only external dependency of
|
||||
# render_previews, so don't ever import polib here!
|
||||
|
||||
|
||||
if "bounding_box" not in locals():
|
||||
from . import bounding_box
|
||||
from . import displacement
|
||||
from . import load
|
||||
from . import spawn
|
||||
from . import textures
|
||||
from . import utils
|
||||
else:
|
||||
import importlib
|
||||
|
||||
bounding_box = importlib.reload(bounding_box)
|
||||
displacement = importlib.reload(displacement)
|
||||
load = importlib.reload(load)
|
||||
spawn = importlib.reload(spawn)
|
||||
textures = importlib.reload(textures)
|
||||
utils = importlib.reload(utils)
|
||||
|
||||
|
||||
# fake bl_info so that this gets picked up by vscode blender integration
|
||||
bl_info = {
|
||||
"name": "hatchery",
|
||||
"description": "",
|
||||
}
|
||||
|
||||
|
||||
def register(): # stub just to avoid an AttributeError when using blender_vscode extension
|
||||
pass
|
||||
|
||||
|
||||
def unregister(): # stub just to avoid an AttributeError when using blender_vscode extension
|
||||
pass
|
||||
|
||||
|
||||
__all__ = [
|
||||
"bounding_box",
|
||||
"displacement",
|
||||
"load",
|
||||
"spawn",
|
||||
"textures",
|
||||
"utils",
|
||||
]
|
||||
@@ -0,0 +1,101 @@
|
||||
#!/usr/bin/python3
|
||||
# copyright (c) 2018- polygoniq xyz s.r.o.
|
||||
|
||||
import bpy
|
||||
import itertools
|
||||
import mathutils
|
||||
import math
|
||||
import typing
|
||||
|
||||
|
||||
class AlignedBox:
|
||||
"""Axis-aligned bounding box"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
min: typing.Optional[mathutils.Vector] = None,
|
||||
max: typing.Optional[mathutils.Vector] = None,
|
||||
):
|
||||
self.min = min if min is not None else mathutils.Vector((math.inf,) * 3)
|
||||
self.max = max if max is not None else mathutils.Vector((-math.inf,) * 3)
|
||||
|
||||
def is_valid(self) -> bool:
|
||||
"""Checks whether this aligned box is valid
|
||||
|
||||
Aligned box is valid if and only if its volume is non-negative,
|
||||
any aligned box becomes valid if it was extended by at least one point or any other object
|
||||
"""
|
||||
for min_field, max_field in zip(self.min, self.max):
|
||||
if min_field > max_field:
|
||||
return False
|
||||
return True
|
||||
|
||||
def extend_by_point(self, point: mathutils.Vector) -> None:
|
||||
"""Extends this aligned box by given infinitesimal point
|
||||
|
||||
This makes sure the resulting aligned box contains everything it contained before, plus
|
||||
the given point.
|
||||
"""
|
||||
self.min.x = min(self.min.x, point.x)
|
||||
self.min.y = min(self.min.y, point.y)
|
||||
self.min.z = min(self.min.z, point.z)
|
||||
|
||||
self.max.x = max(self.max.x, point.x)
|
||||
self.max.y = max(self.max.y, point.y)
|
||||
self.max.z = max(self.max.z, point.z)
|
||||
|
||||
def extend_by_object(
|
||||
self,
|
||||
obj: bpy.types.Object,
|
||||
parent_collection_matrix: mathutils.Matrix = mathutils.Matrix.Identity(4),
|
||||
) -> None:
|
||||
"""Extend the bounding box to cover given object
|
||||
|
||||
If the AlignedBox is extended by object then min_x, max_x,... values are in world space,
|
||||
not object local space. When object moves after initialization of the AlignedBox,
|
||||
coordinate properties are not recomputed to match new object's position - this class does
|
||||
not store any reference to initialization objects.
|
||||
AlignedBox computes boundaries even for instanced collection objects, that's its main
|
||||
difference compared to the bound_box property of bpy.types.Object.
|
||||
|
||||
Note: Other methods of this class are space-neutral but this method only makes sense if
|
||||
the bounding box is considered a world-space bounding box.
|
||||
"""
|
||||
# matrix_world is matrix relative to object's blend.
|
||||
# Thus collection objects have offset inside collection defined by their matrix_world.
|
||||
# We need to multiply parent_collection_matrix by obj.matrix_world in recursion
|
||||
# to get matrix relevant to top-most collection world space.
|
||||
obj_matrix = parent_collection_matrix @ obj.matrix_world
|
||||
# if object is a collection, it has bounding box ((0,0,0), (0,0,0), ...)
|
||||
# we need to manually traverse objects from collections and extend main bounding box
|
||||
# to contain all objects
|
||||
if obj.instance_type == 'COLLECTION':
|
||||
collection = obj.instance_collection
|
||||
if collection is not None: # if this happens we assume no objects
|
||||
for collection_obj in collection.objects:
|
||||
self.extend_by_object(collection_obj, obj_matrix)
|
||||
else:
|
||||
for corner in obj.bound_box:
|
||||
self.extend_by_point(obj_matrix @ mathutils.Vector(corner))
|
||||
|
||||
def get_eccentricity(self) -> mathutils.Vector:
|
||||
"""Returns relative eccentricity in each axis."""
|
||||
return (self.max - self.min) / 2.0
|
||||
|
||||
def get_center(self) -> mathutils.Vector:
|
||||
return (self.min + self.max) / 2.0
|
||||
|
||||
def get_size(self) -> mathutils.Vector:
|
||||
return self.max - self.min
|
||||
|
||||
def get_corners(self) -> typing.Iterable[mathutils.Vector]:
|
||||
for i, j, k in itertools.product([self.min, self.max], repeat=3):
|
||||
yield mathutils.Vector((i.x, j.y, k.z))
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
f"Aligned Box\n"
|
||||
f"X = ({self.min.x}, {self.max.x})\n"
|
||||
f"Y = ({self.min.y}, {self.max.y})\n"
|
||||
f"Z = ({self.min.z}, {self.max.z})"
|
||||
)
|
||||
@@ -0,0 +1,88 @@
|
||||
# copyright (c) 2018- polygoniq xyz s.r.o.
|
||||
# This module contains the materialiq displacement functionality.
|
||||
|
||||
# Some of the code is repeated and could be reused from 'polib/node_utils_bpy', but that would mean
|
||||
# we have to split the semantically correct module into two modules in 'polib' and in 'hatchery'
|
||||
# as we don't want to import polib here.
|
||||
|
||||
|
||||
import bpy
|
||||
import typing
|
||||
|
||||
|
||||
def _get_top_level_material_outputs(
|
||||
node_tree: bpy.types.NodeTree,
|
||||
) -> typing.Set[bpy.types.ShaderNodeOutputMaterial]:
|
||||
ret = set()
|
||||
for node in node_tree.nodes:
|
||||
if isinstance(node, bpy.types.ShaderNodeOutputMaterial):
|
||||
ret.add(node)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _get_displacement_nodegroups(
|
||||
node_tree: bpy.types.NodeTree,
|
||||
) -> typing.Set[bpy.types.ShaderNodeGroup]:
|
||||
ret = set()
|
||||
for node in node_tree.nodes:
|
||||
if not hasattr(node, "node_tree"):
|
||||
continue
|
||||
|
||||
if node.node_tree.name.startswith("mq_Displacement"):
|
||||
ret.add(node)
|
||||
else:
|
||||
ret.update(_get_displacement_nodegroups(node.node_tree))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def unlink_displacement(material: bpy.types.Material) -> None:
|
||||
if material.node_tree is None:
|
||||
# it's not using nodes or the node_tree is invalid
|
||||
return
|
||||
|
||||
material_output_nodes = _get_top_level_material_outputs(material.node_tree)
|
||||
|
||||
for material_output_node in material_output_nodes:
|
||||
# Find links connected to the material output node "Displacement" socket and unlink them
|
||||
for link in material.node_tree.links:
|
||||
if link.to_node != material_output_node:
|
||||
continue
|
||||
if link.to_socket.name != "Displacement":
|
||||
continue
|
||||
|
||||
material.node_tree.links.remove(link)
|
||||
break
|
||||
|
||||
|
||||
def can_link_displacement(material: bpy.types.Material) -> bool:
|
||||
if material.node_tree is None:
|
||||
return False
|
||||
|
||||
displacement_nodegroups = _get_displacement_nodegroups(material.node_tree)
|
||||
|
||||
return len(displacement_nodegroups) == 1
|
||||
|
||||
|
||||
def link_displacement(material: bpy.types.Material) -> None:
|
||||
if material.node_tree is None:
|
||||
# it's not using nodes or the node_tree is invalid
|
||||
return
|
||||
|
||||
displacement_nodegroups = _get_displacement_nodegroups(material.node_tree)
|
||||
if len(displacement_nodegroups) != 1:
|
||||
raise RuntimeError(
|
||||
f"Tried to link materialiq displacement in {material.name} which does not have the "
|
||||
f"mq_Displacement node or there are multiple such nodes."
|
||||
)
|
||||
|
||||
displacement_nodegroup = displacement_nodegroups.pop()
|
||||
|
||||
material_output_nodes = _get_top_level_material_outputs(material.node_tree)
|
||||
|
||||
for material_output_node in material_output_nodes:
|
||||
material.node_tree.links.new(
|
||||
displacement_nodegroup.outputs["Displacement"],
|
||||
material_output_node.inputs["Displacement"],
|
||||
)
|
||||
@@ -0,0 +1,130 @@
|
||||
# copyright (c) 2018- polygoniq xyz s.r.o.
|
||||
|
||||
import bpy
|
||||
import os
|
||||
import typing
|
||||
|
||||
|
||||
def try_get_linked_datablock(
|
||||
datablock_collection: bpy.types.bpy_prop_collection, datablock_name: str, blend_path: str
|
||||
) -> typing.Optional[bpy.types.ID]:
|
||||
"""Returns datablock 'datablock_name' linked from 'blend_path' or None if datablock wasn't linked yet.
|
||||
|
||||
Tries to find library corresponding to 'blend_path' and then checks if there's the datablock
|
||||
'datablock_name' linked from this library.
|
||||
"""
|
||||
# Filenames longer than 63 characters are cropped in Blender
|
||||
expected_lib_name = os.path.basename(blend_path)[:63]
|
||||
# This is not 100% reliable, there can be multiple libraries with the same name, so we also
|
||||
# check if library.filepath is the same as the blend_path later.
|
||||
library = bpy.data.libraries.get(expected_lib_name, None)
|
||||
if library is None:
|
||||
return None
|
||||
|
||||
lib_path = bpy.path.abspath(library.filepath)
|
||||
blend_path = os.path.abspath(blend_path)
|
||||
try:
|
||||
if os.path.samefile(lib_path, blend_path):
|
||||
return datablock_collection.get((datablock_name, library.filepath), None)
|
||||
except OSError:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def load_master_collection(blend_path: str, link: bool = True) -> bpy.types.Collection:
|
||||
"""Links master collection from 'blend_path' and returns it.
|
||||
|
||||
Master collection is the collection with the same name as basename of the 'blend_path'
|
||||
"""
|
||||
asset_name, _ = os.path.splitext(os.path.basename(blend_path))
|
||||
if link:
|
||||
# Check if collection is already linked. Linking already linked collection doesn't do
|
||||
# anything wrong, Blender recognizes that the same collection is already linked. However it
|
||||
# prints warning: "WARN (blo.readfile): ...\readfile.c:4543 link_named_part: Append: ID 'ASSET_NAME' is already linked"
|
||||
# which looks unprofessional.
|
||||
linked_collection = try_get_linked_datablock(bpy.data.collections, asset_name, blend_path)
|
||||
if linked_collection is not None:
|
||||
return linked_collection
|
||||
|
||||
with bpy.data.libraries.load(blend_path, link=link) as (data_from, data_to):
|
||||
# The root collection of the asset should have the same name as the asset name
|
||||
assert asset_name in data_from.collections
|
||||
data_to.collections = [asset_name]
|
||||
|
||||
return data_to.collections[0]
|
||||
|
||||
|
||||
def load_material(blend_path: str) -> bpy.types.Material:
|
||||
"""Appends material 'blend_path' to current file and returns it.
|
||||
|
||||
This allows loading materials from .blend file that are linked. The assumption here is
|
||||
that the .blend has to contain a mesh with the same name as the material - this loads the
|
||||
mesh and gets access to its material which is then returned.
|
||||
"""
|
||||
asset_name, _ = os.path.splitext(os.path.basename(blend_path))
|
||||
# We use two approaches to load material:
|
||||
# 1. Material is present in the blend_path -> load first one
|
||||
# 2. Material is not available in data_from -> Material can be linked in the source file so it
|
||||
# isn't available through the load API. We take the first mesh in the data and load the
|
||||
# material from there.
|
||||
#
|
||||
# We use those two approaches because the materials can be linked from the library in the
|
||||
# material sources directly if artists want to use the materials in assets too (simplifies
|
||||
# linking and changes a lot).
|
||||
using_transfer_mesh = False
|
||||
with bpy.data.libraries.load(blend_path, link=False) as (data_from, data_to):
|
||||
if len(data_from.materials) > 0:
|
||||
assert len(data_from.materials) > 0
|
||||
data_to.materials = [data_from.materials[0]]
|
||||
else:
|
||||
if asset_name in data_from.meshes:
|
||||
data_to.meshes = [asset_name]
|
||||
using_transfer_mesh = True
|
||||
|
||||
if using_transfer_mesh:
|
||||
transfer_mesh: bpy.types.Mesh = data_to.meshes[0]
|
||||
assert len(transfer_mesh.materials) > 0
|
||||
material = transfer_mesh.materials[0].make_local()
|
||||
bpy.data.meshes.remove(transfer_mesh)
|
||||
else:
|
||||
material = data_to.materials[0]
|
||||
|
||||
return material
|
||||
|
||||
|
||||
def load_particles(blend_path: str) -> typing.List[bpy.types.ParticleSettings]:
|
||||
"""Loads are particle system from 'blend_path' and returns them."""
|
||||
with bpy.data.libraries.load(blend_path, link=False) as (data_from, data_to):
|
||||
data_to.particles = data_from.particles
|
||||
|
||||
assert len(data_to.particles) > 0
|
||||
return data_to.particles
|
||||
|
||||
|
||||
def load_world(blend_path: str) -> bpy.types.World:
|
||||
"""Loads first world from 'blend_path' and returns it."""
|
||||
with bpy.data.libraries.load(blend_path, link=False) as (data_from, data_to):
|
||||
assert len(data_from.worlds) > 0
|
||||
data_to.worlds = [data_from.worlds[0]]
|
||||
|
||||
world = data_to.worlds[0]
|
||||
return world
|
||||
|
||||
|
||||
def load_scene(blend_path: str) -> bpy.types.Scene:
|
||||
"""Loads first scene from 'blend_path' and returns it."""
|
||||
with bpy.data.libraries.load(blend_path, link=False) as (data_from, data_to):
|
||||
assert len(data_from.scenes) > 0
|
||||
data_to.scenes = [data_from.scenes[0]]
|
||||
|
||||
return data_to.scenes[0]
|
||||
|
||||
|
||||
def load_master_object(blend_path: str) -> bpy.types.Object:
|
||||
"""Loads object with the same name as basename of the given .blend path"""
|
||||
asset_name, _ = os.path.splitext(os.path.basename(blend_path))
|
||||
with bpy.data.libraries.load(blend_path, link=False) as (data_from, data_to):
|
||||
assert len(data_from.objects) > 0
|
||||
data_to.objects = [asset_name]
|
||||
|
||||
return data_to.objects[0]
|
||||
@@ -0,0 +1,302 @@
|
||||
# copyright (c) 2018- polygoniq xyz s.r.o.
|
||||
|
||||
# This module contains functions that are used to spawn the asset - get the asset into the scene.
|
||||
#
|
||||
# The function here can adjust context and are more high level. The 'load' module is more low-level
|
||||
# and serves only for loading the datablocks.
|
||||
#
|
||||
# Each asset type has its function and spawn options. Spawn options define properties that can
|
||||
# change the behavior of the spawn function.
|
||||
|
||||
import bpy
|
||||
import abc
|
||||
import dataclasses
|
||||
import mathutils
|
||||
import typing
|
||||
import logging
|
||||
|
||||
from . import utils
|
||||
from . import load
|
||||
from . import textures
|
||||
from . import displacement
|
||||
|
||||
logger = logging.getLogger(f"polygoniq.{__name__}")
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class DatablockSpawnOptions:
|
||||
"""Base for all spawn options
|
||||
|
||||
Currently this is empty, but can store option relevant to all asset types.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class SpawnedData(abc.ABC):
|
||||
def __init__(self, datablocks: typing.Set[bpy.types.ID]):
|
||||
self.datablocks = datablocks
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ModelSpawnOptions(DatablockSpawnOptions):
|
||||
parent_collection: typing.Optional[bpy.types.Collection] = None
|
||||
# If present the spawned model instancer is selected, other objects are deselected
|
||||
select_spawned: bool = False
|
||||
location_override: typing.Optional[mathutils.Vector] = None
|
||||
rotation_euler_override: typing.Optional[mathutils.Euler] = None
|
||||
|
||||
|
||||
class ModelSpawnedData(SpawnedData):
|
||||
def __init__(self, collection: bpy.types.Collection, instancer: bpy.types.Object):
|
||||
self.collection = collection
|
||||
self.instancer = instancer
|
||||
super().__init__({collection, instancer})
|
||||
|
||||
|
||||
def spawn_model(
|
||||
path: str, context: bpy.types.Context, options: ModelSpawnOptions
|
||||
) -> ModelSpawnedData:
|
||||
"""Loads model from given 'path' and instances it on empty on cursor position.
|
||||
|
||||
This assumes the path contains 'master' collection - check load.load_master_collection.
|
||||
Further spawn behavior like parent collection can be tweaked in ModelSpawnOptions.
|
||||
|
||||
Returns the empty that instances the model 'master' collection.
|
||||
"""
|
||||
|
||||
if options.parent_collection is None and options.select_spawned:
|
||||
raise RuntimeError(
|
||||
"Wrong arguments: Cannot select spawned model objects without a parent collection. "
|
||||
"The object wouldn't be present in the View Layer!"
|
||||
)
|
||||
|
||||
root_collection = load.load_master_collection(path)
|
||||
root_empty = bpy.data.objects.new(root_collection.name, None)
|
||||
root_empty.instance_type = 'COLLECTION'
|
||||
root_empty.instance_collection = root_collection
|
||||
root_empty.location = context.scene.cursor.location
|
||||
if options.location_override is not None:
|
||||
root_empty.location = options.location_override
|
||||
|
||||
if options.rotation_euler_override is not None:
|
||||
root_empty.rotation_euler = options.rotation_euler_override
|
||||
|
||||
# Copy all children properties from the instanced objects to the instancer object
|
||||
for obj in root_empty.instance_collection.all_objects:
|
||||
if obj.library is None:
|
||||
continue
|
||||
|
||||
utils.copy_custom_props(obj, root_empty)
|
||||
|
||||
for col in root_empty.users_collection:
|
||||
col.objects.unlink(root_empty)
|
||||
|
||||
if options.parent_collection is not None:
|
||||
options.parent_collection.objects.link(root_empty)
|
||||
|
||||
# Only change selection if we linked the object, so it is present in view layer and if
|
||||
# caller wants to.
|
||||
if options.select_spawned:
|
||||
for selected_obj in context.selected_objects:
|
||||
selected_obj.select_set(False)
|
||||
|
||||
root_empty.select_set(True)
|
||||
|
||||
return ModelSpawnedData(root_collection, root_empty)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class MaterialSpawnOptions(DatablockSpawnOptions):
|
||||
texture_size: int = 2048
|
||||
use_displacement: bool = False
|
||||
target_objects: typing.Set[bpy.types.Object] = dataclasses.field(default_factory=set)
|
||||
|
||||
|
||||
class MaterialSpawnedData(SpawnedData):
|
||||
def __init__(self, material: bpy.types.Material):
|
||||
self.material = material
|
||||
super().__init__({material})
|
||||
|
||||
|
||||
def spawn_material(
|
||||
path: str, context: bpy.types.Context, options: MaterialSpawnOptions
|
||||
) -> MaterialSpawnedData:
|
||||
"""Loads material from 'path' and adds it to all selected objects containing material slots.
|
||||
|
||||
(materialiq materials only)
|
||||
Automatically changes texture sizes and links / unlinks displacement based on spawning options.
|
||||
|
||||
Returns the spawned material.
|
||||
"""
|
||||
material = load.load_material(path)
|
||||
for obj in options.target_objects:
|
||||
if not utils.can_have_materials_assigned(obj):
|
||||
continue
|
||||
if len(obj.material_slots) < 1:
|
||||
obj.data.materials.append(material)
|
||||
else:
|
||||
obj.material_slots[obj.active_material_index].material = material
|
||||
|
||||
textures.change_texture_sizes(options.texture_size, textures.get_used_textures(material))
|
||||
|
||||
if displacement.can_link_displacement(material):
|
||||
if options.use_displacement:
|
||||
displacement.link_displacement(material)
|
||||
else:
|
||||
displacement.unlink_displacement(material)
|
||||
|
||||
return MaterialSpawnedData(material)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ParticleSystemSpawnOptions(DatablockSpawnOptions):
|
||||
display_type: str = 'TEXTURED'
|
||||
display_percentage: float = 100.0
|
||||
instance_collection_parent: typing.Optional[bpy.types.Collection] = None
|
||||
include_base_material: bool = True
|
||||
max_particle_count: int = 10000
|
||||
# count is used when preserve_density is False
|
||||
count: int = 1000
|
||||
preserve_density: bool = True
|
||||
target_objects: typing.Set[bpy.types.Object] = dataclasses.field(default_factory=set)
|
||||
|
||||
|
||||
class ParticlesSpawnedData(SpawnedData):
|
||||
def __init__(
|
||||
self,
|
||||
particles: typing.Iterable[bpy.types.ParticleSettings],
|
||||
material: typing.Optional[bpy.types.Material] = None,
|
||||
):
|
||||
self.particles = particles
|
||||
self.material = material
|
||||
datablocks = set(particles)
|
||||
if material is not None:
|
||||
datablocks.add(material)
|
||||
super().__init__(datablocks)
|
||||
|
||||
|
||||
def spawn_particles(
|
||||
path: str, context: bpy.types.Context, options: ParticleSystemSpawnOptions
|
||||
) -> ParticlesSpawnedData:
|
||||
"""Loads all particle systems from a given path and puts them on objects based on options.
|
||||
|
||||
Returns list of particle settings that were loaded.
|
||||
"""
|
||||
all_particle_settings = load.load_particles(path)
|
||||
|
||||
# Get lowest z location from target objects and calculate total mesh are of all target objects
|
||||
# so the instanced objects locations and particle counts are adjusted properly.
|
||||
lowest_obj_z = 0.0
|
||||
total_mesh_area = 0.0
|
||||
for target_obj in options.target_objects:
|
||||
lowest_obj_z = min(target_obj.location.z, lowest_obj_z)
|
||||
total_mesh_area += utils.calculate_mesh_area(target_obj)
|
||||
|
||||
for particle_settings in all_particle_settings:
|
||||
particle_settings.display_percentage = options.display_percentage
|
||||
for obj in particle_settings.instance_collection.all_objects:
|
||||
# We spawn all objects 10units below the lowest location of target objects
|
||||
obj.location.z = lowest_obj_z - 10.0
|
||||
obj.display_type = options.display_type
|
||||
|
||||
if options.instance_collection_parent is not None:
|
||||
options.instance_collection_parent.children.link(particle_settings.instance_collection)
|
||||
|
||||
if options.preserve_density:
|
||||
new_count = int(total_mesh_area * particle_settings.pps_density)
|
||||
if new_count > options.max_particle_count:
|
||||
logger.warning(
|
||||
f"Particle count exceeded maximum by: {int(new_count - options.max_particle_count)}"
|
||||
)
|
||||
new_count = options.max_particle_count
|
||||
else:
|
||||
new_count = options.count
|
||||
particle_settings.count = new_count
|
||||
|
||||
for target_obj in options.target_objects:
|
||||
# Create modifiers and adjust particle system settings based on spawn options
|
||||
mod: bpy.types.ParticleSystemModifier = target_obj.modifiers.new(
|
||||
particle_settings.name, type='PARTICLE_SYSTEM'
|
||||
)
|
||||
mod.particle_system.settings = particle_settings
|
||||
utils.ensure_particle_naming_consistency(mod, mod.particle_system)
|
||||
|
||||
spawned_material_data = None
|
||||
if options.include_base_material:
|
||||
spawned_material_data = spawn_material(
|
||||
path, context, MaterialSpawnOptions(target_objects=options.target_objects)
|
||||
)
|
||||
|
||||
return ParticlesSpawnedData(
|
||||
all_particle_settings,
|
||||
spawned_material_data.material if spawned_material_data is not None else None,
|
||||
)
|
||||
|
||||
|
||||
class WorldSpawnedData(SpawnedData):
|
||||
def __init__(self, world: bpy.types.World):
|
||||
self.world = world
|
||||
super().__init__({world})
|
||||
|
||||
|
||||
def spawn_world(
|
||||
path: str, context: bpy.types.Context, options: DatablockSpawnOptions
|
||||
) -> WorldSpawnedData:
|
||||
"""Loads world from 'path' and replaces current scene world with it, returns the loaded world."""
|
||||
world = load.load_world(path)
|
||||
context.scene.world = world
|
||||
return WorldSpawnedData(world)
|
||||
|
||||
|
||||
class SceneSpawnedData(SpawnedData):
|
||||
def __init__(self, scene: bpy.types.Scene):
|
||||
self.scene = scene
|
||||
super().__init__({scene})
|
||||
|
||||
|
||||
def spawn_scene(
|
||||
path: str, context: bpy.types.Context, options: DatablockSpawnOptions
|
||||
) -> SceneSpawnedData:
|
||||
"""Loads scene from 'path' and replaces current scene with it, returns the loaded scene."""
|
||||
scene = load.load_scene(path)
|
||||
context.window.scene = scene
|
||||
return SceneSpawnedData(scene)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class GeometryNodesSpawnOptions(DatablockSpawnOptions):
|
||||
parent_collection: typing.Optional[bpy.types.Collection] = None
|
||||
|
||||
|
||||
class GeometryNodesSpawnedData(SpawnedData):
|
||||
def __init__(
|
||||
self, container_obj: bpy.types.Object, modifiers: typing.Iterable[bpy.types.Modifier]
|
||||
):
|
||||
self.container_obj = container_obj
|
||||
self.modifiers = modifiers
|
||||
super().__init__({container_obj} | {m.node_group for m in modifiers})
|
||||
|
||||
|
||||
def spawn_geometry_nodes(
|
||||
path: str, context: bpy.types.Context, options: GeometryNodesSpawnOptions
|
||||
) -> GeometryNodesSpawnedData:
|
||||
"""Loads object with the same name as basename of 'path' and adds it to the scene collection"""
|
||||
# Currently default behavior is to append the object containing the geometry nodes.
|
||||
# TODO: In future we want to load either node group into node tree, apply onto active
|
||||
# object and choose whether to start draw, or edit mode.
|
||||
obj = load.load_master_object(path)
|
||||
if options.parent_collection is not None:
|
||||
options.parent_collection.objects.link(obj)
|
||||
|
||||
# Due to a bug in Blender while converting boolean inputs we reassign the modifier node
|
||||
# group when spawning. The bug happens when object with modifiers is appended from a blend
|
||||
# file, where the modifier node group is linked from a different file. First append is
|
||||
# correct, but any subsequently appended object with the same modifier triggers the:
|
||||
# 'Property type does not match input socket (NAME)' error and can make some setups not work
|
||||
# Issue link: https://projects.blender.org/blender/blender/issues/110825
|
||||
for mod in obj.modifiers:
|
||||
if mod.type == 'NODES':
|
||||
mod.node_group = mod.node_group
|
||||
|
||||
return GeometryNodesSpawnedData(obj, {m for m in obj.modifiers if m.type == 'NODES'})
|
||||
@@ -0,0 +1,102 @@
|
||||
# copyright (c) 2018- polygoniq xyz s.r.o.
|
||||
# This module contains materialiq texture switching related functions.
|
||||
|
||||
import bpy
|
||||
import os
|
||||
import typing
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(f"polygoniq.{__name__}")
|
||||
|
||||
TEXTURE_EXTENSIONS = {".png", ".jpg"}
|
||||
|
||||
|
||||
def generate_filepath(texture_path: str, basename: str, max_size: str, ext: str) -> str:
|
||||
if basename.startswith("mq_") and basename.split("_")[-1].isdigit():
|
||||
name_without_resolution = basename.rsplit("_", 1)[0]
|
||||
|
||||
return os.path.join(texture_path, f"{name_without_resolution}_{max_size}{ext}")
|
||||
|
||||
|
||||
def is_materialiq_texture(image: bpy.types.Image) -> bool:
|
||||
basename, _ = os.path.splitext(os.path.basename(image.filepath))
|
||||
if basename.startswith("mq_") and basename.split("_")[-1].isdigit():
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def change_texture_size(max_size: int, image: bpy.types.Image):
|
||||
if not is_materialiq_texture(image):
|
||||
return
|
||||
|
||||
basename, ext = os.path.splitext(os.path.basename(image.filepath))
|
||||
if ext not in TEXTURE_EXTENSIONS:
|
||||
return
|
||||
|
||||
logger.debug(f"Changing {image.name} to {max_size}...")
|
||||
|
||||
new_path = None
|
||||
found = False
|
||||
parent_dir = os.path.dirname(image.filepath)
|
||||
for ext in TEXTURE_EXTENSIONS:
|
||||
new_path = generate_filepath(parent_dir, basename, str(max_size), ext)
|
||||
new_abs_path = bpy.path.abspath(new_path)
|
||||
# We getsize() to check that the file is not empty. Because of compress_texture, there could
|
||||
# exist different file formats of the same texture, and all except one of them would be empty.
|
||||
if os.path.exists(new_abs_path) and os.path.getsize(new_abs_path) > 0:
|
||||
found = True
|
||||
break
|
||||
|
||||
if not found:
|
||||
logger.warning(f"Can't find {image.name} in size {max_size}, skipping...")
|
||||
return
|
||||
|
||||
image.filepath = new_path
|
||||
image.name = os.path.basename(new_path)
|
||||
|
||||
|
||||
def change_texture_sizes(
|
||||
max_size: int, only_textures: typing.Optional[typing.Set[bpy.types.Image]] = None
|
||||
):
|
||||
logger.debug(f"mq: changing textures to {max_size}...")
|
||||
|
||||
if only_textures is not None:
|
||||
for image in only_textures:
|
||||
change_texture_size(max_size, image)
|
||||
else:
|
||||
for image in bpy.data.images:
|
||||
change_texture_size(max_size, image)
|
||||
|
||||
|
||||
def get_used_textures_in_node(node: bpy.types.Node) -> typing.Set[bpy.types.Image]:
|
||||
ret = set()
|
||||
|
||||
if hasattr(node, "node_tree"):
|
||||
for child_node in node.node_tree.nodes:
|
||||
ret.update(get_used_textures_in_node(child_node))
|
||||
|
||||
if hasattr(node, "image"):
|
||||
if node.image:
|
||||
ret.add(node.image)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def get_used_textures(material: bpy.types.Material) -> typing.Set[bpy.types.Image]:
|
||||
if material is None:
|
||||
return set()
|
||||
|
||||
if not material.use_nodes:
|
||||
logger.warning(
|
||||
f"Can't get used textures from material '{material.name}' that is not using "
|
||||
f"the node system!"
|
||||
)
|
||||
return set()
|
||||
|
||||
assert material.node_tree is not None, "use_nodes is True, yet node_tree is None"
|
||||
ret = set()
|
||||
for node in material.node_tree.nodes:
|
||||
ret.update(get_used_textures_in_node(node))
|
||||
|
||||
return ret
|
||||
@@ -0,0 +1,150 @@
|
||||
# copyright (c) 2018- polygoniq xyz s.r.o.
|
||||
|
||||
import bpy
|
||||
import typing
|
||||
import bmesh
|
||||
import rna_prop_ui
|
||||
|
||||
|
||||
def copy_custom_prop(src: bpy.types.ID, dst: bpy.types.ID, prop_name: str) -> None:
|
||||
"""Copies custom property 'prop_name' from 'src' to 'dst' while preserving its settings"""
|
||||
# In order to copy the property with its configuration (min, max, subtype, etc)
|
||||
# we need to use following code. Code is taken from the "Copy Attributes" addon that's
|
||||
# shipped within Blender.
|
||||
|
||||
# Create the property.
|
||||
dst[prop_name] = src[prop_name]
|
||||
# Copy the settings of the property.
|
||||
try:
|
||||
dst_prop_manager = dst.id_properties_ui(prop_name)
|
||||
except TypeError:
|
||||
# Python values like lists or dictionaries don't have any settings to copy.
|
||||
# They just consist of a value and nothing else.
|
||||
# Note: This also skips copying the properties that cannot be edited by
|
||||
# id_properties_ui
|
||||
return
|
||||
|
||||
src_prop_manager = src.id_properties_ui(prop_name)
|
||||
assert src_prop_manager, f"Property '{prop_name}' not found in {src}"
|
||||
|
||||
dst_prop_manager.update_from(src_prop_manager)
|
||||
|
||||
# Copy the Library Overridable flag, which is stored elsewhere, sometimes it's not possible
|
||||
# to copy the library override
|
||||
try:
|
||||
prop_rna_path = f'["{prop_name}"]'
|
||||
is_lib_overridable = src.is_property_overridable_library(prop_rna_path)
|
||||
dst.property_overridable_library_set(prop_rna_path, is_lib_overridable)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def copy_custom_props(
|
||||
src: bpy.types.ID, dst: bpy.types.ID, only_existing: bool = False, recursive: bool = False
|
||||
) -> None:
|
||||
"""Copies all custom properties from 'src' to 'dst'
|
||||
|
||||
If 'only_existing' is True, then properties that don't exist on
|
||||
the 'dst' object are not created, only values of existing properties are
|
||||
updated.
|
||||
|
||||
If 'recursive' is provided the property is copied to all children of 'dst' object
|
||||
"""
|
||||
if recursive:
|
||||
for child in dst.children:
|
||||
copy_custom_props(src, child, only_existing, recursive)
|
||||
|
||||
for prop_name in src.keys():
|
||||
if only_existing and prop_name not in dst:
|
||||
continue
|
||||
|
||||
copy_custom_prop(src, dst, prop_name)
|
||||
|
||||
|
||||
def ensure_particle_naming_consistency(
|
||||
modifier: bpy.types.ParticleSystemModifier, particle_system: bpy.types.ParticleSystem
|
||||
) -> None:
|
||||
"""
|
||||
Particle data gets duplicated and has the object duplicate suffix on copy, but modifiers and particle system names do not.
|
||||
This function ensures the same naming on the whole particle system -> modifier, data, particle system, instance_collection
|
||||
|
||||
Using the name from instance collection is currently the best approach. Creating modifier creates particle data automatically,
|
||||
but we don't want to use those, we use the ones loaded from our blends (this gives them .001). Instance collections have the most
|
||||
correct duplicate suffix because we have almost full control over them (at least when we are creating them).
|
||||
"""
|
||||
if modifier is None or particle_system is None:
|
||||
raise RuntimeError(
|
||||
"Cannot ensure naming consistency if modifier or particle_system is None!"
|
||||
)
|
||||
|
||||
ps_settings = particle_system.settings
|
||||
if ps_settings is None or ps_settings.instance_collection is None:
|
||||
raise RuntimeError(
|
||||
f"Cannot ensure naming consistency if particle_system ({particle_system.name}) has no settings or no instance_collection!"
|
||||
)
|
||||
|
||||
modifier.name = particle_system.name = ps_settings.name = ps_settings.instance_collection.name
|
||||
|
||||
|
||||
def get_area_based_particle_count(
|
||||
obj: bpy.types.Object, density: float, max_particle_count: int, include_weights: bool = False
|
||||
) -> typing.Tuple[int, int]:
|
||||
mesh_area = calculate_mesh_area(obj, include_weights)
|
||||
particle_count = int(mesh_area * density)
|
||||
if particle_count > max_particle_count:
|
||||
return max_particle_count, particle_count - max_particle_count
|
||||
return particle_count, 0
|
||||
|
||||
|
||||
def calculate_mesh_area(obj: bpy.types.Object, include_weight: bool = False) -> float:
|
||||
mesh = obj.data
|
||||
try:
|
||||
if obj.mode == 'EDIT':
|
||||
bm = bmesh.from_edit_mesh(mesh)
|
||||
else:
|
||||
bm = bmesh.new()
|
||||
bm.from_mesh(mesh)
|
||||
|
||||
bm.transform(obj.matrix_world)
|
||||
if include_weight:
|
||||
vg = obj.vertex_groups.active
|
||||
mesh_area = 0
|
||||
for face in bm.faces:
|
||||
f_area = face.calc_area()
|
||||
weighted_verts = 0
|
||||
weight = 0
|
||||
for v in face.verts:
|
||||
# heavy approach, but we don't know whether i vertex is in the group :(
|
||||
try:
|
||||
weight += vg.weight(v.index)
|
||||
weighted_verts += 1
|
||||
except:
|
||||
pass
|
||||
if weighted_verts > 0:
|
||||
mesh_area += (weight / weighted_verts) * f_area
|
||||
else:
|
||||
mesh_area = sum(f.calc_area() for f in bm.faces)
|
||||
|
||||
finally:
|
||||
bm.free()
|
||||
|
||||
return mesh_area
|
||||
|
||||
|
||||
def can_have_materials_assigned(obj: bpy.types.Object) -> bool:
|
||||
"""Checks whether given object can have materials assigned
|
||||
|
||||
We check for multiple things: type of the object and the availability of material_slots.
|
||||
"""
|
||||
|
||||
# In theory checking the availability of material_slots is not necessary, all these
|
||||
# object types should have it. We check for it to avoid exceptions and errors in our code.
|
||||
return obj.type in {
|
||||
'MESH',
|
||||
'CURVE',
|
||||
'SURFACE',
|
||||
'META',
|
||||
'FONT',
|
||||
'GPENCIL',
|
||||
'VOLUME',
|
||||
} and hasattr(obj, "material_slots")
|
||||
Reference in New Issue
Block a user