2025-07-01

This commit is contained in:
2026-03-17 14:30:01 -06:00
parent f9a22056dd
commit 62b5978595
4579 changed files with 1257472 additions and 0 deletions
@@ -0,0 +1,190 @@
"""
Copyright (C) 2019 Remington Creative
This file is part of Atomic Data Manager.
Atomic Data Manager is free software: you can redistribute
it and/or modify it under the terms of the GNU General Public License
as published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Atomic Data Manager is distributed in the hope that it will
be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License along
with Atomic Data Manager. If not, see <https://www.gnu.org/licenses/>.
---
This file contains functions that count quantities of various sets of data.
"""
import bpy
from atomic_data_manager.stats import unused
from atomic_data_manager.stats import unnamed
from atomic_data_manager.stats import missing
def collections():
# returns the number of collections in the project
return len(bpy.data.collections)
def collections_unused():
# returns the number of unused collections in the project
return len(unused.collections_shallow())
def collections_unnamed():
# returns the number of unnamed collections in the project
return len(unnamed.collections())
def images():
# returns the number of images in the project
return len(bpy.data.images)
def images_unused():
# returns the number of unused images in the project
return len(unused.images_shallow())
def images_unnamed():
# returns the number of unnamed images in the project
return len(unnamed.images())
def images_missing():
# returns the number of missing images in the project
return len(missing.images())
def lights():
# returns the number of lights in the project
return len(bpy.data.lights)
def lights_unused():
# returns the number of unused lights in the project
return len(unused.lights_shallow())
def lights_unnamed():
# returns the number of unnamed lights in the project
return len(unnamed.lights())
def materials():
# returns the number of materials in the project
return len(bpy.data.materials)
def materials_unused():
# returns the number of unused materials in the project
return len(unused.materials_shallow())
def materials_unnamed():
# returns the number of unnamed materials in the project
return len(unnamed.materials())
def node_groups():
# returns the number of node groups in the project
return len(bpy.data.node_groups)
def node_groups_unused():
# returns the number of unused node groups in the project
return len(unused.node_groups_shallow())
def node_groups_unnamed():
# returns the number of unnamed node groups in the project
return len(unnamed.node_groups())
def objects():
# returns the number of objects in the project
return len(bpy.data.objects)
def objects_unnamed():
# returns the number of unnamed objects in the project
return len(unnamed.objects())
def particles():
# returns the number of particles in the project
return len(bpy.data.particles)
def particles_unused():
# returns the number of unused particles in the project
return len(unused.particles_shallow())
def particles_unnamed():
# returns the number of unnamed particle systems in the project
return len(unnamed.particles())
def textures():
# returns the number of textures in the project
return len(bpy.data.textures)
def textures_unused():
# returns the number of unused textures in the project
return len(unused.textures_shallow())
def textures_unnamed():
# returns the number of unnamed textures in the project
return len(unnamed.textures())
def worlds():
# returns the number of worlds in the project
return len(bpy.data.worlds)
def worlds_unused():
# returns the number of unused worlds in the project
return len(unused.worlds())
def worlds_unnamed():
# returns the number of unnamed worlds in the project
return len(unnamed.worlds())
@@ -0,0 +1,50 @@
"""
Copyright (C) 2019 Remington Creative
This file is part of Atomic Data Manager.
Atomic Data Manager is free software: you can redistribute
it and/or modify it under the terms of the GNU General Public License
as published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Atomic Data Manager is distributed in the hope that it will
be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License along
with Atomic Data Manager. If not, see <https://www.gnu.org/licenses/>.
---
This file contains miscellaneous statistics functions.
"""
import bpy
import os
def blend_size():
# returns the size of the current Blender file as a string
filepath = bpy.data.filepath
size_bytes = os.stat(filepath).st_size if filepath != '' else -1
kilobyte = 1024 # bytes
megabyte = 1048576 # bytes
gigabyte = 1073741824 # bytes
if 0 <= size_bytes < kilobyte:
size_scaled = "{:.1f} B".format(size_bytes)
elif kilobyte <= size_bytes < megabyte:
size_scaled = "{:.1f} KB".format(size_bytes / kilobyte)
elif megabyte <= size_bytes < gigabyte:
size_scaled = "{:.1f} MB".format(size_bytes / megabyte)
elif size_bytes >= gigabyte:
size_scaled = "{:.1f} GB".format(size_bytes / gigabyte)
else:
size_scaled = "No Data!"
return size_scaled
@@ -0,0 +1,70 @@
"""
Copyright (C) 2019 Remington Creative
This file is part of Atomic Data Manager.
Atomic Data Manager is free software: you can redistribute
it and/or modify it under the terms of the GNU General Public License
as published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Atomic Data Manager is distributed in the hope that it will
be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License along
with Atomic Data Manager. If not, see <https://www.gnu.org/licenses/>.
---
This file contains functions that detect missing files in the Blender
project.
"""
import bpy
import os
def get_missing(data):
# returns a list of keys of unpacked data-blocks with non-existent
# filepaths
missing = []
# list of keys that should not be flagged
do_not_flag = ["Render Result", "Viewer Node", "D-NOISE Export"]
for datablock in data:
# the absolute path to our data-block
abspath = bpy.path.abspath(datablock.filepath)
# if data-block is not packed and has an invalid filepath
if not datablock.packed_files and not os.path.isfile(abspath):
# if data-block is not in our do not flag list
# append it to the missing data list
if datablock.name not in do_not_flag:
missing.append(datablock.name)
# if data-block is packed but it does not have a filepath
elif datablock.packed_files and not abspath:
# if data-block is not in our do not flag list
# append it to the missing data list
if datablock.name not in do_not_flag:
missing.append(datablock.name)
return missing
def images():
# returns a list of keys of images with a non-existent filepath
return get_missing(bpy.data.images)
def libraries():
# returns a list of keys of libraries with a non-existent filepath
return get_missing(bpy.data.libraries)
@@ -0,0 +1,207 @@
"""
Copyright (C) 2019 Remington Creative
This file is part of Atomic Data Manager.
Atomic Data Manager is free software: you can redistribute
it and/or modify it under the terms of the GNU General Public License
as published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Atomic Data Manager is distributed in the hope that it will
be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License along
with Atomic Data Manager. If not, see <https://www.gnu.org/licenses/>.
---
This file contains functions that detect unnamed data-blocks in the
Blender project.
"""
import bpy
import re
def collections():
# returns the keys of all unnamed collections in the project
unnamed = []
for collection in bpy.data.collections:
if re.match(r'.*\.\d\d\d$', collection.name) or \
collection.name.startswith("Collection"):
unnamed.append(collection.name)
return unnamed
def images():
# returns the keys of all unnamed images in the project
unnamed = []
for image in bpy.data.images:
if re.match(r'.*\.\d\d\d$', image.name) or \
image.name.startswith("Untitled"):
unnamed.append(image.name)
return unnamed
def lights():
# returns the keys of all unnamed lights in the project
unnamed = []
for light in bpy.data.lights:
if re.match(r'.*\.\d\d\d$', light.name) or \
light.name.startswith("Light"):
unnamed.append(light.name)
return unnamed
def materials():
# returns the keys of all unnamed materials in the project
unnamed = []
for material in bpy.data.lights:
if re.match(r'.*\.\d\d\d$', material.name) or \
material.name.startswith("Material"):
unnamed.append(material.name)
return unnamed
def objects():
# returns the keys of all unnamed materials in the project
# NOTE: lists of default names must be tuples!
# the default names all curve objects
curve_names = (
"BezierCircle",
"BezierCurve",
"NurbsCircle",
"NurbsCurve",
"NurbsPath"
)
# the default names of all grease pencil objects
gpencil_names = (
"GPencil",
"Stroke"
)
# the default names of all light objects
light_names = (
"Area",
"Light",
"Point",
"Spot",
"Sun"
)
# the default names of all light probe objects
lprobe_names = (
"IrradianceVolume",
"ReflectionCubemap",
"ReflectionPlane"
)
# the default names of all mesh objects
mesh_names = (
"Circle",
"Cone",
"Cube",
"Cylinder",
"Grid",
"Icosphere",
"Plane",
"Sphere",
"Torus"
)
# the default names of all miscellaneous objects
misc_names = (
"Mball",
"Text",
"Armature",
"Lattice",
"Empty",
"Camera",
"Speaker",
"Field"
)
# the default names of all nurbs objects
nurbs_names = (
"SurfCircle",
"SurfCurve",
"SurfPatch",
"SurfTorus",
"Surface"
)
# the default names of all objects compiled into one tuple
default_obj_names = \
curve_names + gpencil_names + light_names + lprobe_names + \
mesh_names + misc_names + nurbs_names
unnamed = []
for obj in bpy.data.objects:
if re.match(r'.*\.\d\d\d$', obj.name) or \
obj.name.startswith(default_obj_names):
unnamed.append(obj.name)
return unnamed
def node_groups():
# returns the keys of all unnamed node groups in the project
unnamed = []
for node_group in bpy.data.node_groups:
if re.match(r'.*\.\d\d\d$', node_group.name) or \
node_group.name.startswith("NodeGroup"):
unnamed.append(node_group.name)
return unnamed
def particles():
# returns the keys of all unnamed particle systems in the project
unnamed = []
for particle in bpy.data.particles:
if re.match(r'.*\.\d\d\d$', particle.name) or \
particle.name.startswith("ParticleSettings"):
unnamed.append(particle.name)
return unnamed
def textures():
# returns the keys of all unnamed textures in the project
unnamed = []
for texture in bpy.data.textures:
if re.match(r'.*\.\d\d\d$', texture.name) or \
texture.name.startswith("Texture"):
unnamed.append(texture.name)
return unnamed
def worlds():
# returns the keys of all unnamed worlds in the project
unnamed = []
for world in bpy.data.worlds:
if re.match(r'.*\.\d\d\d$', world.name) or \
world.name.startswith("World"):
unnamed.append(world.name)
return unnamed
@@ -0,0 +1,244 @@
"""
Copyright (C) 2019 Remington Creative
This file is part of Atomic Data Manager.
Atomic Data Manager is free software: you can redistribute
it and/or modify it under the terms of the GNU General Public License
as published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Atomic Data Manager is distributed in the hope that it will
be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License along
with Atomic Data Manager. If not, see <https://www.gnu.org/licenses/>.
---
This file contains functions that detect data-blocks that have no users,
as determined by stats.users.py
"""
import bpy
from atomic_data_manager import config
from atomic_data_manager.stats import users
def shallow(data):
# returns a list of keys of unused data-blocks in the data that may be
# incomplete, but is significantly faster than doing a deep search
unused = []
for datablock in data:
# if data-block has no users or if it has a fake user and
# ignore fake users is enabled
if datablock.users == 0 or (datablock.users == 1 and
datablock.use_fake_user and
config.include_fake_users):
unused.append(datablock.name)
return unused
def collections_deep():
# returns a full list of keys of unused collections
unused = []
for collection in bpy.data.collections:
if not users.collection_all(collection.name):
unused.append(collection.name)
return unused
def collections_shallow():
# returns a list of keys of unused collections that may be
# incomplete, but is significantly faster.
unused = []
for collection in bpy.data.collections:
if not (collection.objects or collection.children):
unused.append(collection.name)
return unused
def images_deep():
# returns a full list of keys of unused images
unused = []
# a list of image keys that should not be flagged as unused
# this list also exists in images_shallow()
do_not_flag = ["Render Result", "Viewer Node", "D-NOISE Export"]
for image in bpy.data.images:
if not users.image_all(image.name):
# check if image has a fake user or if ignore fake users
# is enabled
if not image.use_fake_user or config.include_fake_users:
# if image is not in our do not flag list
if image.name not in do_not_flag:
unused.append(image.name)
return unused
def images_shallow():
# returns a list of keys of unused images that may be
# incomplete, but is significantly faster than doing a deep search
unused_images = shallow(bpy.data.images)
# a list of image keys that should not be flagged as unused
# this list also exists in images_deep()
do_not_flag = ["Render Result", "Viewer Node", "D-NOISE Export"]
# remove do not flag keys from unused images
for key in do_not_flag:
if key in unused_images:
unused_images.remove(key)
return unused_images
def lights_deep():
# returns a list of keys of unused lights
unused = []
for light in bpy.data.lights:
if not users.light_all(light.name):
# check if light has a fake user or if ignore fake users
# is enabled
if not light.use_fake_user or config.include_fake_users:
unused.append(light.name)
return unused
def lights_shallow():
# returns a list of keys of unused lights that may be
# incomplete, but is significantly faster than doing a deep search
return shallow(bpy.data.lights)
def materials_deep():
# returns a list of keys of unused materials
unused = []
for material in bpy.data.materials:
if not users.material_all(material.name):
# check if material has a fake user or if ignore fake users
# is enabled
if not material.use_fake_user or config.include_fake_users:
unused.append(material.name)
return unused
def materials_shallow():
# returns a list of keys of unused material that may be
# incomplete, but is significantly faster than doing a deep search
return shallow(bpy.data.materials)
def node_groups_deep():
# returns a list of keys of unused node_groups
unused = []
for node_group in bpy.data.node_groups:
if not users.node_group_all(node_group.name):
# check if node group has a fake user or if ignore fake users
# is enabled
if not node_group.use_fake_user or config.include_fake_users:
unused.append(node_group.name)
return unused
def node_groups_shallow():
# returns a list of keys of unused node groups that may be
# incomplete, but is significantly faster than doing a deep search
return shallow(bpy.data.node_groups)
def particles_deep():
# returns a list of keys of unused particle systems
unused = []
for particle in bpy.data.particles:
if not users.particle_all(particle.name):
# check if particle system has a fake user or if ignore fake
# users is enabled
if not particle.use_fake_user or config.include_fake_users:
unused.append(particle.name)
return unused
def particles_shallow():
# returns a list of keys of unused particle systems that may be
# incomplete, but is significantly faster than doing a deep search
return shallow(bpy.data.particles)
def textures_deep():
# returns a list of keys of unused textures
unused = []
for texture in bpy.data.textures:
if not users.texture_all(texture.name):
# check if texture has a fake user or if ignore fake users
# is enabled
if not texture.use_fake_user or config.include_fake_users:
unused.append(texture.name)
return unused
def textures_shallow():
# returns a list of keys of unused textures that may be
# incomplete, but is significantly faster than doing a deep search
return shallow(bpy.data.textures)
def worlds():
# returns a full list of keys of unused worlds
unused = []
for world in bpy.data.worlds:
# if data-block has no users or if it has a fake user and
# ignore fake users is enabled
if world.users == 0 or (world.users == 1 and
world.use_fake_user and
config.include_fake_users):
unused.append(world.name)
return unused
@@ -0,0 +1,786 @@
"""
Copyright (C) 2019 Remington Creative
This file is part of Atomic Data Manager.
Atomic Data Manager is free software: you can redistribute
it and/or modify it under the terms of the GNU General Public License
as published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Atomic Data Manager is distributed in the hope that it will
be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License along
with Atomic Data Manager. If not, see <https://www.gnu.org/licenses/>.
---
This file contains functions that return the keys of data-blocks that
use other data-blocks.
They are titled as such that the first part of the function name is the
type of the data being passed in and the second part of the function name
is the users of that type.
e.g. If you were searching for all of the places where an image is used in
a material would be searching for the image_materials() function.
"""
import bpy
def collection_all(collection_key):
# returns a list of keys of every data-block that uses this collection
return collection_cameras(collection_key) + \
collection_children(collection_key) + \
collection_lights(collection_key) + \
collection_meshes(collection_key) + \
collection_others(collection_key)
def collection_cameras(collection_key):
# recursively returns a list of camera object keys that are in the
# collection and its child collections
users = []
collection = bpy.data.collections[collection_key]
# append all camera objects in our collection
for obj in collection.objects:
if obj.type == 'CAMERA':
users.append(obj.name)
# list of all child collections in our collection
children = collection_children(collection_key)
# append all camera objects from the child collections
for child in children:
for obj in bpy.data.collections[child].objects:
if obj.type == 'CAMERA':
users.append(obj.name)
return distinct(users)
def collection_children(collection_key):
# returns a list of all child collections under the specified
# collection using recursive functions
collection = bpy.data.collections[collection_key]
children = collection_children_recursive(collection_key)
children.remove(collection.name)
return children
def collection_children_recursive(collection_key):
# recursively returns a list of all child collections under the
# specified collection including the collection itself
collection = bpy.data.collections[collection_key]
# base case
if not collection.children:
return [collection.name]
# recursion case
else:
children = []
for child in collection.children:
children += collection_children(child.name)
children.append(collection.name)
return children
def collection_lights(collection_key):
# returns a list of light object keys that are in the collection
users = []
collection = bpy.data.collections[collection_key]
# append all light objects in our collection
for obj in collection.objects:
if obj.type == 'LIGHT':
users.append(obj.name)
# list of all child collections in our collection
children = collection_children(collection_key)
# append all light objects from the child collections
for child in children:
for obj in bpy.data.collections[child].objects:
if obj.type == 'LIGHT':
users.append(obj.name)
return distinct(users)
def collection_meshes(collection_key):
# returns a list of mesh object keys that are in the collection
users = []
collection = bpy.data.collections[collection_key]
# append all mesh objects in our collection and from child
# collections
for obj in collection.all_objects:
if obj.type == 'MESH':
users.append(obj.name)
return distinct(users)
def collection_others(collection_key):
# returns a list of other object keys that are in the collection
# NOTE: excludes cameras, lights, and meshes
users = []
collection = bpy.data.collections[collection_key]
# object types to exclude from this search
excluded_types = ['CAMERA', 'LIGHT', 'MESH']
# append all other objects in our collection and from child
# collections
for obj in collection.all_objects:
if obj.type not in excluded_types:
users.append(obj.name)
return distinct(users)
def image_all(image_key):
# returns a list of keys of every data-block that uses this image
return image_compositors(image_key) + \
image_materials(image_key) + \
image_node_groups(image_key) + \
image_textures(image_key) + \
image_worlds(image_key)
def image_compositors(image_key):
# returns a list containing "Compositor" if the image is used in
# the scene's compositor
users = []
image = bpy.data.images[image_key]
# a list of node groups that use our image
node_group_users = image_node_groups(image_key)
# if our compositor uses nodes and has a valid node tree
if bpy.context.scene.use_nodes and bpy.context.scene.node_tree:
# check each node in the compositor
for node in bpy.context.scene.node_tree.nodes:
# if the node is an image node with a valid image
if hasattr(node, 'image') and node.image:
# if the node's image is our image
if node.image.name == image.name:
users.append("Compositor")
# if the node is a group node with a valid node tree
elif hasattr(node, 'node_tree') and node.node_tree:
# if the node tree's name is in our list of node group
# users
if node.node_tree.name in node_group_users:
users.append("Compositor")
return distinct(users)
def image_materials(image_key):
# returns a list of material keys that use the image
users = []
image = bpy.data.images[image_key]
# list of node groups that use this image
node_group_users = image_node_groups(image_key)
for mat in bpy.data.materials:
# if material uses a valid node tree, check each node
if mat.use_nodes and mat.node_tree:
for node in mat.node_tree.nodes:
# if node is has a not none image attribute
if hasattr(node, 'image') and node.image:
# if the nodes image is our image
if node.image.name == image.name:
users.append(mat.name)
# if image in node in node group in node tree
elif node.type == 'GROUP':
# if node group has a valid node tree and is in our
# list of node groups that use this image
if node.node_tree and \
node.node_tree.name in node_group_users:
users.append(mat.name)
return distinct(users)
def image_node_groups(image_key):
# returns a list of keys of node groups that use this image
users = []
image = bpy.data.images[image_key]
# for each node group
for node_group in bpy.data.node_groups:
# if node group contains our image
if node_group_has_image(node_group.name, image.name):
users.append(node_group.name)
return distinct(users)
def image_textures(image_key):
# returns a list of texture keys that use the image
users = []
image = bpy.data.images[image_key]
# list of node groups that use this image
node_group_users = image_node_groups(image_key)
for texture in bpy.data.textures:
# if texture uses a valid node tree, check each node
if texture.use_nodes and texture.node_tree:
for node in texture.node_tree.nodes:
# check image nodes that use this image
if hasattr(node, 'image') and node.image:
if node.image.name == image.name:
users.append(texture.name)
# check for node groups that use this image
elif hasattr(node, 'node_tree') and node.node_tree:
# if node group is in our list of node groups that
# use this image
if node.node_tree.name in node_group_users:
users.append(texture.name)
# otherwise check the texture's image attribute
else:
# if texture uses an image
if hasattr(texture, 'image') and texture.image:
# if texture image is our image
if texture.image.name == image.name:
users.append(texture.name)
return distinct(users)
def image_worlds(image_key):
# returns a list of world keys that use the image
users = []
image = bpy.data.images[image_key]
# list of node groups that use this image
node_group_users = image_node_groups(image_key)
for world in bpy.data.worlds:
# if world uses a valid node tree, check each node
if world.use_nodes and world.node_tree:
for node in world.node_tree.nodes:
# check image nodes
if hasattr(node, 'image') and node.image:
if node.image.name == image.name:
users.append(world.name)
# check for node groups that use this image
elif hasattr(node, 'node_tree') and node.node_tree:
if node.node_tree.name in node_group_users:
users.append(world.name)
return distinct(users)
def light_all(light_key):
# returns a list of keys of every data-block that uses this light
return light_objects(light_key)
def light_objects(light_key):
# returns a list of light object keys that use the light data
users = []
light = bpy.data.lights[light_key]
for obj in bpy.data.objects:
if obj.type == 'LIGHT' and obj.data:
if obj.data.name == light.name:
users.append(obj.name)
return distinct(users)
def material_all(material_key):
# returns a list of keys of every data-block that uses this material
return material_objects(material_key)
def material_objects(material_key):
# returns a list of object keys that use this material
users = []
material = bpy.data.materials[material_key]
for obj in bpy.data.objects:
# if the object has the option to add materials
if hasattr(obj, 'material_slots'):
# for each material slot
for slot in obj.material_slots:
# if material slot has a valid material and it is our
# material
if slot.material and slot.material.name == material.name:
users.append(obj.name)
return distinct(users)
def node_group_all(node_group_key):
# returns a list of keys of every data-block that uses this node group
return node_group_compositors(node_group_key) + \
node_group_materials(node_group_key) + \
node_group_node_groups(node_group_key) + \
node_group_textures(node_group_key) + \
node_group_worlds(node_group_key)
def node_group_compositors(node_group_key):
# returns a list containing "Compositor" if the node group is used in
# the scene's compositor
users = []
node_group = bpy.data.node_groups[node_group_key]
# a list of node groups that use our node group
node_group_users = node_group_node_groups(node_group_key)
# if our compositor uses nodes and has a valid node tree
if bpy.context.scene.use_nodes and bpy.context.scene.node_tree:
# check each node in the compositor
for node in bpy.context.scene.node_tree.nodes:
# if the node is a group and has a valid node tree
if hasattr(node, 'node_tree') and node.node_tree:
# if the node group is our node group
if node.node_tree.name == node_group.name:
users.append("Compositor")
# if the node group is in our list of node group users
if node.node_tree.name in node_group_users:
users.append("Compositor")
return distinct(users)
def node_group_materials(node_group_key):
# returns a list of material keys that use the node group in their
# node trees
users = []
node_group = bpy.data.node_groups[node_group_key]
# node groups that use this node group
node_group_users = node_group_node_groups(node_group_key)
for material in bpy.data.materials:
# if material uses nodes and has a valid node tree, check each node
if material.use_nodes and material.node_tree:
for node in material.node_tree.nodes:
# if node is a group node
if hasattr(node, 'node_tree') and node.node_tree:
# if node is the node group
if node.node_tree.name == node_group.name:
users.append(material.name)
# if node is using a node group contains our node group
if node.node_tree.name in node_group_users:
users.append(material.name)
return distinct(users)
def node_group_node_groups(node_group_key):
# returns a list of all node groups that use this node group in
# their node tree
users = []
node_group = bpy.data.node_groups[node_group_key]
# for each search group
for search_group in bpy.data.node_groups:
# if the search group contains our node group
if node_group_has_node_group(
search_group.name, node_group.name):
users.append(search_group.name)
return distinct(users)
def node_group_textures(node_group_key):
# returns a list of texture keys that use this node group in their
# node trees
users = []
node_group = bpy.data.node_groups[node_group_key]
# list of node groups that use this node group
node_group_users = node_group_node_groups(node_group_key)
for texture in bpy.data.textures:
# if texture uses a valid node tree, check each node
if texture.use_nodes and texture.node_tree:
for node in texture.node_tree.nodes:
# check if node is a node group and has a valid node tree
if hasattr(node, 'node_tree') and node.node_tree:
# if node is our node group
if node.node_tree.name == node_group.name:
users.append(texture.name)
# if node is a node group that contains our node group
if node.node_tree.name in node_group_users:
users.append(texture.name)
return distinct(users)
def node_group_worlds(node_group_key):
# returns a list of world keys that use the node group in their node
# trees
users = []
node_group = bpy.data.node_groups[node_group_key]
# node groups that use this node group
node_group_users = node_group_node_groups(node_group_key)
for world in bpy.data.worlds:
# if world uses nodes and has a valid node tree
if world.use_nodes and world.node_tree:
for node in world.node_tree.nodes:
# if node is a node group and has a valid node tree
if hasattr(node, 'node_tree') and node.node_tree:
# if this node is our node group
if node.node_tree.name == node_group.name:
users.append(world.name)
# if this node is one of the node groups that use
# our node group
elif node.node_tree.name in node_group_users:
users.append(world.name)
return distinct(users)
def node_group_has_image(node_group_key, image_key):
# recursively returns true if the node group contains this image
# directly or if it contains a node group a node group that contains
# the image indirectly
has_image = False
node_group = bpy.data.node_groups[node_group_key]
image = bpy.data.images[image_key]
# for each node in our search group
for node in node_group.nodes:
# base case
# if node has a not none image attribute
if hasattr(node, 'image') and node.image:
# if the node group is our node group
if node.image.name == image.name:
has_image = True
# recurse case
# if node is a node group and has a valid node tree
elif hasattr(node, 'node_tree') and node.node_tree:
has_image = node_group_has_image(
node.node_tree.name, image.name)
# break the loop if the image is found
if has_image:
break
return has_image
def node_group_has_node_group(search_group_key, node_group_key):
# returns true if a node group contains this node group
has_node_group = False
search_group = bpy.data.node_groups[search_group_key]
node_group = bpy.data.node_groups[node_group_key]
# for each node in our search group
for node in search_group.nodes:
# if node is a node group and has a valid node tree
if hasattr(node, 'node_tree') and node.node_tree:
if node.node_tree.name == "RG_MetallicMap":
print(node.node_tree.name)
print(node_group.name)
# base case
# if node group is our node group
if node.node_tree.name == node_group.name:
has_node_group = True
# recurse case
# if node group is any other node group
else:
has_node_group = node_group_has_node_group(
node.node_tree.name, node_group.name)
# break the loop if the node group is found
if has_node_group:
break
return has_node_group
def node_group_has_texture(node_group_key, texture_key):
# returns true if a node group contains this image
has_texture = False
node_group = bpy.data.node_groups[node_group_key]
texture = bpy.data.textures[texture_key]
# for each node in our search group
for node in node_group.nodes:
# base case
# if node has a not none image attribute
if hasattr(node, 'texture') and node.texture:
# if the node group is our node group
if node.texture.name == texture.name:
has_texture = True
# recurse case
# if node is a node group and has a valid node tree
elif hasattr(node, 'node_tree') and node.node_tree:
has_texture = node_group_has_texture(
node.node_tree.name, texture.name)
# break the loop if the texture is found
if has_texture:
break
return has_texture
def particle_all(particle_key):
# returns a list of keys of every data-block that uses this particle
# system
return particle_objects(particle_key)
def particle_objects(particle_key):
# returns a list of object keys that use the particle system
users = []
particle_system = bpy.data.particles[particle_key]
for obj in bpy.data.objects:
# if object can have a particle system
if hasattr(obj, 'particle_systems'):
for particle in obj.particle_systems:
# if particle settings is our particle system
if particle.settings.name == particle_system.name:
users.append(obj.name)
return distinct(users)
def texture_all(texture_key):
# returns a list of keys of every data-block that uses this texture
return texture_brushes(texture_key) + \
texture_compositor(texture_key) + \
texture_objects(texture_key) + \
texture_node_groups(texture_key) + \
texture_particles(texture_key)
def texture_brushes(texture_key):
# returns a list of brush keys that use the texture
users = []
texture = bpy.data.textures[texture_key]
for brush in bpy.data.brushes:
# if brush has a texture
if brush.texture:
# if brush texture is our texture
if brush.texture.name == texture.name:
users.append(brush.name)
return distinct(users)
def texture_compositor(texture_key):
# returns a list containing "Compositor" if the texture is used in
# the scene's compositor
users = []
texture = bpy.data.textures[texture_key]
# a list of node groups that use our image
node_group_users = texture_node_groups(texture_key)
# if our compositor uses nodes and has a valid node tree
if bpy.context.scene.use_nodes and bpy.context.scene.node_tree:
# check each node in the compositor
for node in bpy.context.scene.node_tree.nodes:
# if the node is an texture node with a valid texture
if hasattr(node, 'texture') and node.texture:
# if the node's texture is our texture
if node.texture.name == texture.name:
users.append("Compositor")
# if the node is a group node with a valid node tree
elif hasattr(node, 'node_tree') and node.node_tree:
# if the node tree's name is in our list of node group
# users
if node.node_tree.name in node_group_users:
users.append("Compositor")
return distinct(users)
def texture_objects(texture_key):
# returns a list of object keys that use the texture in one of their
# modifiers
users = []
texture = bpy.data.textures[texture_key]
# list of particle systems that use our texture
particle_users = texture_particles(texture_key)
# append objects that use the texture in a modifier
for obj in bpy.data.objects:
# if object can have modifiers applied to it
if hasattr(obj, 'modifiers'):
for modifier in obj.modifiers:
# if the modifier has a texture attribute that is not None
if hasattr(modifier, 'texture') \
and modifier.texture:
if modifier.texture.name == texture.name:
users.append(obj.name)
# if the modifier has a mask_texture attribute that is
# not None
elif hasattr(modifier, 'mask_texture') \
and modifier.mask_texture:
if modifier.mask_texture.name == texture.name:
users.append(obj.name)
# append objects that use the texture in a particle system
for particle in particle_users:
# append all objects that use the particle system
users += particle_objects(particle)
return distinct(users)
def texture_node_groups(texture_key):
# returns a list of keys of all node groups that use this texture
users = []
texture = bpy.data.textures[texture_key]
# for each node group
for node_group in bpy.data.node_groups:
# if node group contains our texture
if node_group_has_texture(
node_group.name, texture.name):
users.append(node_group.name)
return distinct(users)
def texture_particles(texture_key):
# returns a list of particle system keys that use the texture in
# their texture slots
users = []
texture = bpy.data.textures[texture_key]
for particle in bpy.data.particles:
# for each texture slot in the particle system
for texture_slot in particle.texture_slots:
# if texture slot has a texture that is not None
if hasattr(texture_slot, 'texture') and texture_slot.texture:
# if texture in texture slot is our texture
if texture_slot.texture.name == texture.name:
users.append(particle.name)
return distinct(users)
def distinct(seq):
# returns a list of distinct elements
return list(set(seq))