2025-12-01
This commit is contained in:
@@ -0,0 +1,23 @@
|
||||
from . import (
|
||||
better_delete,
|
||||
cleanup_shotfile,
|
||||
drivers,
|
||||
id_management_pie,
|
||||
libraries,
|
||||
names,
|
||||
users,
|
||||
overrides,
|
||||
relink_overridden_asset,
|
||||
)
|
||||
|
||||
modules = [
|
||||
better_delete,
|
||||
cleanup_shotfile,
|
||||
drivers,
|
||||
id_management_pie,
|
||||
libraries,
|
||||
names,
|
||||
users,
|
||||
overrides,
|
||||
relink_overridden_asset,
|
||||
]
|
||||
@@ -0,0 +1,170 @@
|
||||
# SPDX-FileCopyrightText: 2025 Blender Studio Tools Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
import bpy
|
||||
from typing import List
|
||||
|
||||
from ..hotkeys import addon_hotkey_register
|
||||
from ..util import get_addon_prefs
|
||||
|
||||
class OBJECT_OT_blenlog_unlink_from_scene(bpy.types.Operator):
|
||||
bl_idname = "object.blenlog_unlink_from_scene"
|
||||
bl_label = "Unlink Selected From Scene"
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
if context.area.type == 'OUTLINER':
|
||||
return bool(get_objects_to_unlink(context) or get_collections_to_unlink(context))
|
||||
elif context.area.type == 'VIEW_3D':
|
||||
return bool(get_objects_to_unlink(context))
|
||||
|
||||
def execute(self, context):
|
||||
unlink_collections_from_scene(get_collections_to_unlink(context), context.scene)
|
||||
unlink_objects_from_scene(get_objects_to_unlink(context), context.scene)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class OUTLINER_OT_blenlog_better_delete(bpy.types.Operator):
|
||||
bl_idname = "outliner.blenlog_better_delete"
|
||||
bl_label = "Delete Datablocks From File"
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.area.type == 'OUTLINER' and context.selected_ids
|
||||
|
||||
def execute(self, context):
|
||||
if context.scene in context.selected_ids:
|
||||
self.report({'ERROR'}, "Cannot delete active Scene.")
|
||||
return {'CANCELLED'}
|
||||
if context.workspace in context.selected_ids:
|
||||
self.report({'ERROR'}, "Cannot delete active Workspace.")
|
||||
return {'CANCELLED'}
|
||||
for id in context.selected_ids:
|
||||
if id.id_type in {'SCREEN', 'WINDOWMANAGER'}:
|
||||
self.report({'ERROR'}, f"Cannot delete type: {id.id_type}")
|
||||
return {'CANCELLED'}
|
||||
|
||||
count = len(context.selected_ids)
|
||||
plural = "s" if count>1 else ""
|
||||
bpy.data.batch_remove(context.selected_ids)
|
||||
|
||||
self.report({'INFO'}, f"Deleted {count} datablock{plural}.")
|
||||
return {'FINISHED'}
|
||||
|
||||
def get_objects_to_unlink(context) -> List[bpy.types.Object]:
|
||||
if context.area.type == 'OUTLINER':
|
||||
selected_objs = [id for id in context.selected_ids if type(id) == bpy.types.Object]
|
||||
elif context.area.type == 'VIEW_3D':
|
||||
selected_objs = context.selected_objects
|
||||
|
||||
scene_objs = set(context.scene.objects)
|
||||
return [ob for ob in selected_objs if ob in scene_objs]
|
||||
|
||||
def unlink_objects_from_scene(objects, scene):
|
||||
for obj in objects:
|
||||
for coll in [scene.collection] + scene.collection.children_recursive:
|
||||
if obj.name in coll.objects:
|
||||
coll.objects.unlink(obj)
|
||||
|
||||
def get_collections_to_unlink(context) -> List[bpy.types.Collection]:
|
||||
if context.area.type == 'OUTLINER':
|
||||
return [id for id in context.selected_ids if type(id) == bpy.types.Collection]
|
||||
elif context.area.type == 'VIEW_3D':
|
||||
return []
|
||||
|
||||
def unlink_collections_from_scene(collections_to_unlink, scene):
|
||||
for coll_to_unlink in collections_to_unlink:
|
||||
for coll in scene.collection.children_recursive:
|
||||
if coll_to_unlink.name in coll.children:
|
||||
coll.children.unlink(coll_to_unlink)
|
||||
|
||||
|
||||
class OBJECT_MT_delete_pie(bpy.types.Menu):
|
||||
# bl_label is displayed at the center of the pie menu
|
||||
bl_label = 'Unlink / Delete'
|
||||
bl_idname = 'OBJECT_MT_delete_pie'
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.area.type in {'VIEW_3D', 'OUTLINER'}
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
pie = layout.menu_pie()
|
||||
# <
|
||||
pie.operator(OBJECT_OT_blenlog_unlink_from_scene.bl_idname, icon='TRASH', text="Unlink From Scene")
|
||||
|
||||
if context.area.type == 'VIEW_3D':
|
||||
# > 3D View
|
||||
op = pie.operator('object.delete', icon='X', text="Delete From File")
|
||||
op.use_global = True
|
||||
op.confirm = False
|
||||
return
|
||||
|
||||
# >
|
||||
pie.operator('outliner.blenlog_better_delete', icon='X', text="Delete From File")
|
||||
|
||||
# V
|
||||
pie.operator('outliner.id_operation', text="Unlink From Collection", icon="OUTLINER_COLLECTION").type='UNLINK'
|
||||
|
||||
# ^
|
||||
pie.operator('outliner.delete', text="Delete Hierarchy", icon="OUTLINER").hierarchy=True
|
||||
|
||||
registry = [
|
||||
OBJECT_OT_blenlog_unlink_from_scene,
|
||||
OUTLINER_OT_blenlog_better_delete,
|
||||
OBJECT_MT_delete_pie
|
||||
]
|
||||
|
||||
addon_hotkeys = []
|
||||
|
||||
def register_hotkeys():
|
||||
addon_hotkeys.append(
|
||||
addon_hotkey_register(
|
||||
op_idname='wm.call_menu_pie',
|
||||
keymap_name='Outliner',
|
||||
key_id='X',
|
||||
op_kwargs={'name': OBJECT_MT_delete_pie.bl_idname},
|
||||
|
||||
add_on_conflict=True,
|
||||
warn_on_conflict=False,
|
||||
error_on_conflict=False,
|
||||
)
|
||||
)
|
||||
addon_hotkeys.append(
|
||||
addon_hotkey_register(
|
||||
op_idname='wm.call_menu_pie',
|
||||
keymap_name='Object Mode',
|
||||
key_id='X',
|
||||
op_kwargs={'name': OBJECT_MT_delete_pie.bl_idname},
|
||||
|
||||
add_on_conflict=True,
|
||||
warn_on_conflict=False,
|
||||
error_on_conflict=False,
|
||||
)
|
||||
)
|
||||
|
||||
def delayed_hotkey_register():
|
||||
"""We want to register hotkeys when Blender opens, but
|
||||
only is the user preference is enabled.
|
||||
And we can't access the user preference during registration, so
|
||||
we gotta use a timer to do it with a delay.
|
||||
"""
|
||||
if get_addon_prefs().use_deletion_pie and addon_hotkeys == []:
|
||||
register_hotkeys()
|
||||
|
||||
def unregister_hotkeys():
|
||||
global addon_hotkeys
|
||||
for keymap, kmi in addon_hotkeys:
|
||||
keymap.keymap_items.remove(kmi)
|
||||
addon_hotkeys = []
|
||||
|
||||
def register():
|
||||
bpy.app.timers.register(delayed_hotkey_register)
|
||||
|
||||
def unregister():
|
||||
unregister_hotkeys()
|
||||
@@ -0,0 +1,281 @@
|
||||
# SPDX-FileCopyrightText: 2025 Blender Studio Tools Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""
|
||||
This script is meant to be executed by Blender Crawl on every single shot file,
|
||||
hopefully removing any useless data, fixing some names, reporting missing links,
|
||||
resyncing overrides, all without breaking anything.
|
||||
"""
|
||||
|
||||
import bpy
|
||||
from typing import Set
|
||||
from bpy.types import Library
|
||||
from bpy.props import BoolProperty
|
||||
import os
|
||||
from .relink_overridden_asset import relink_all_override_hierarchies
|
||||
|
||||
|
||||
def clean_file(
|
||||
context, allow_remove_suffix=False, allow_replace_suffix=False, only_warn_local_issues=False
|
||||
) -> int:
|
||||
# Reset frame to start.
|
||||
context.scene.frame_current = context.scene.frame_start
|
||||
|
||||
# Enable Simplify (only in .anim files)
|
||||
if '.anim' in bpy.data.filepath:
|
||||
context.scene.render.use_simplify = True
|
||||
context.scene.render.simplify_subdivision = 0
|
||||
|
||||
nuke_addon_properties()
|
||||
|
||||
issue_counter = 0
|
||||
issue_counter += warn_primitive_names(only_local=only_warn_local_issues)
|
||||
issue_counter += warn_number_names(
|
||||
only_local=only_warn_local_issues,
|
||||
allow_remove_suffix=allow_remove_suffix,
|
||||
allow_replace_suffix=allow_replace_suffix,
|
||||
)
|
||||
issue_counter += warn_bad_libs()
|
||||
issue_counter += warn_broken_links()
|
||||
|
||||
fix_local_obdata_names()
|
||||
|
||||
return issue_counter
|
||||
|
||||
|
||||
def nuke_override_hidden():
|
||||
nukelist = [c for c in bpy.data.collections if 'OVERRIDE_HIDDEN' in c.name]
|
||||
for coll in nukelist:
|
||||
bpy.data.collections.remove(coll)
|
||||
|
||||
|
||||
def resync_overrides(context):
|
||||
ui_type = context.area.ui_type
|
||||
context.area.ui_type = 'OUTLINER'
|
||||
|
||||
for coll in get_override_roots():
|
||||
with context.temp_override(collection=coll):
|
||||
bpy.ops.outliner.liboverride_troubleshoot_operation(
|
||||
type='OVERRIDE_LIBRARY_RESYNC_HIERARCHY', selection_set='SELECTED'
|
||||
)
|
||||
|
||||
context.area.ui_type = ui_type
|
||||
|
||||
|
||||
def get_override_roots():
|
||||
override_roots = set()
|
||||
for coll in bpy.data.collections:
|
||||
if coll.override_library:
|
||||
override_roots.add(coll.override_library.hierarchy_root)
|
||||
return override_roots
|
||||
|
||||
|
||||
def warn_primitive_names(only_local=False) -> int:
|
||||
issue_counter = 0
|
||||
primitive_names = [
|
||||
"Plane",
|
||||
"Cube",
|
||||
"Circle",
|
||||
"Sphere",
|
||||
"Icosphere",
|
||||
"Cylinder",
|
||||
"Cone",
|
||||
"Torus",
|
||||
"Suzanne",
|
||||
"BezierCurve",
|
||||
"BezierCircle",
|
||||
"Empty",
|
||||
"Key",
|
||||
"Material",
|
||||
]
|
||||
for id in bpy.data.user_map().keys():
|
||||
if (id.library or id.override_library) and only_local:
|
||||
continue
|
||||
if "WGT" in id.name:
|
||||
# Widgets are allowed to be named after primitives.
|
||||
continue
|
||||
for prim_name in primitive_names:
|
||||
if prim_name in id.name:
|
||||
msg = f"WARNING: Primitive name: {id.name}, {type(id)}"
|
||||
if id.override_library:
|
||||
msg += " " + id.override_library.reference.library.filepath
|
||||
print(msg)
|
||||
issue_counter += 1
|
||||
return issue_counter
|
||||
|
||||
|
||||
def warn_number_names(
|
||||
only_local=False, allow_remove_suffix=False, allow_replace_suffix=False
|
||||
) -> int:
|
||||
issue_counter = 0
|
||||
all_ids = {id.name: id for id in bpy.data.user_map().keys()}
|
||||
all_local_ids = {id.name: id for id in all_ids.values() if not id.library}
|
||||
for name, id in all_ids.items():
|
||||
if (id.library or id.override_library) and only_local:
|
||||
continue
|
||||
|
||||
if len(id.name) < 4:
|
||||
if type(id) == bpy.types.Brush:
|
||||
continue
|
||||
print("WARNING: Very short ID name: ", id.name, type(id))
|
||||
issue_counter += 1
|
||||
continue
|
||||
|
||||
if id.name[-4] == ".":
|
||||
try:
|
||||
int(id.name[-3:])
|
||||
except:
|
||||
# Suffix is not a number, so it's fine.
|
||||
continue
|
||||
msg = "WARNING: Number suffix in name: " + id.name
|
||||
if id.override_library:
|
||||
msg += " " + id.override_library.reference.library.filepath
|
||||
if id.library:
|
||||
msg += " " + id.library.filepath
|
||||
if not id.override_library and not id.library:
|
||||
if allow_remove_suffix:
|
||||
name_without_suffix = id.name[:-4]
|
||||
existing = all_local_ids.get(name_without_suffix)
|
||||
if not existing:
|
||||
id.name = name_without_suffix
|
||||
elif allow_replace_suffix:
|
||||
id.name = id.name[:-4] + "_" + id.name[-3:]
|
||||
else:
|
||||
print(msg)
|
||||
issue_counter += 1
|
||||
else:
|
||||
print(msg)
|
||||
issue_counter += 1
|
||||
|
||||
return issue_counter
|
||||
|
||||
|
||||
def fix_local_obdata_names():
|
||||
for o in bpy.data.objects:
|
||||
if o.library or o.override_library:
|
||||
continue
|
||||
|
||||
if o.data:
|
||||
if (
|
||||
o.data.name != o.name
|
||||
): # This matters because for some reason, sometimes setting the name to what it already is, gives it a .001... lol.
|
||||
o.data.name = o.name
|
||||
if (
|
||||
hasattr(o.data, 'shape_keys')
|
||||
and o.data.shape_keys
|
||||
and o.data.shape_keys.name != o.name
|
||||
):
|
||||
o.data.shape_keys.name = o.name
|
||||
|
||||
|
||||
def warn_broken_links():
|
||||
issue_counter = 0
|
||||
for id in bpy.data.user_map().keys():
|
||||
if id.is_missing:
|
||||
msg = "MISSING ID: " + id.name
|
||||
if id.library:
|
||||
msg += " " + id.library.filepath
|
||||
print(msg)
|
||||
issue_counter += 1
|
||||
return issue_counter
|
||||
|
||||
|
||||
def warn_bad_libs() -> int:
|
||||
issue_counter = 0
|
||||
|
||||
invalid_libs = get_invalid_libraries()
|
||||
for invalid_lib in invalid_libs:
|
||||
print("INVALID LIBRARY: ", invalid_lib.filepath)
|
||||
issue_counter += 1
|
||||
|
||||
absolute_libs = get_absolute_libraries()
|
||||
for absolute_lib in absolute_libs:
|
||||
print("ABSOLUTE LIBRARY: ", absolute_lib.filepath)
|
||||
issue_counter += 1
|
||||
|
||||
return issue_counter
|
||||
|
||||
|
||||
def get_invalid_libraries() -> Set[Library]:
|
||||
"""Return a set of library datablocks whose filepath does not exist."""
|
||||
invalid_libs: Set[Library] = set()
|
||||
for l in bpy.data.libraries:
|
||||
if not os.path.exists(bpy.path.abspath(l.filepath)):
|
||||
invalid_libs.add(l)
|
||||
return invalid_libs
|
||||
|
||||
|
||||
def get_absolute_libraries() -> Set[Library]:
|
||||
"""Return a set of library datablocks whose filepaths are not relative."""
|
||||
abs_libs: Set[Library] = set()
|
||||
for lib in bpy.data.libraries:
|
||||
if not lib.filepath.startswith("//"):
|
||||
abs_libs.add(lib)
|
||||
return abs_libs
|
||||
|
||||
|
||||
def nuke_addon_properties():
|
||||
property_blacklist = {"hops"}
|
||||
for id in bpy.data.user_map().keys():
|
||||
if id.library or id.override_library:
|
||||
continue
|
||||
for key in list(id.keys()):
|
||||
if key in property_blacklist:
|
||||
del id[key]
|
||||
|
||||
|
||||
class OUTLINER_OT_cleanup_shotfile(bpy.types.Operator):
|
||||
bl_idname = "outliner.cleanup_shotfile"
|
||||
bl_label = "Cleanup Shotfile"
|
||||
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
||||
allow_remove_suffix: BoolProperty(
|
||||
name="Allow Removing Number Suffix",
|
||||
description="If a local ID has a .00x suffix, and it can be removed without conflicting with another existing ID, just do it",
|
||||
default=True,
|
||||
)
|
||||
allow_replace_suffix: BoolProperty(
|
||||
name="Allow Replacing Suffix Separator",
|
||||
description="If a local ID has a .00x suffix, change it to _00x instead. It's best to manually look over these cases first",
|
||||
default=False,
|
||||
)
|
||||
only_warn_local_issues: BoolProperty(
|
||||
name="Only Warn Local Issues",
|
||||
description="If an issue relates to a linked ID, don't warn about it",
|
||||
default=False,
|
||||
)
|
||||
relink_overrides: BoolProperty(
|
||||
name="Relink Overrides",
|
||||
description="Relink all overridden hierarchy roots. Keyframe any overridden RNA Paths that were overridden but not keyed. Preserve the assigned action, constraints, pointers. Only armatures will be editable overrides, unless there was an actual override on a non-armature object",
|
||||
default=False,
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_recursive=True, do_linked_ids=True)
|
||||
|
||||
if self.relink_overrides:
|
||||
# Relink ALL overridden assets from scratch, while (hopefully)
|
||||
# preserving all intentionally created data on them.
|
||||
relink_all_override_hierarchies(context)
|
||||
|
||||
issue_count = clean_file(
|
||||
context,
|
||||
allow_remove_suffix=self.allow_remove_suffix,
|
||||
allow_replace_suffix=self.allow_replace_suffix,
|
||||
only_warn_local_issues=self.only_warn_local_issues,
|
||||
)
|
||||
|
||||
if issue_count > 0:
|
||||
self.report(
|
||||
{'WARNING'},
|
||||
f"Cleanup complete. {issue_count} issues still need attention. See terminal for details.",
|
||||
)
|
||||
else:
|
||||
self.report({'INFO'}, "Cleanup complete. No remaining issues detected.")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
registry = [OUTLINER_OT_cleanup_shotfile]
|
||||
@@ -0,0 +1,123 @@
|
||||
# SPDX-FileCopyrightText: 2025 Blender Studio Tools Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
import bpy
|
||||
from bpy.props import StringProperty, IntProperty
|
||||
from bpy.types import Object
|
||||
|
||||
from typing import List
|
||||
|
||||
def report_invalid_drivers(context, objects: List[Object]):
|
||||
blenlog = context.scene.blender_log
|
||||
|
||||
cat_leftover = 'Leftover Driver'
|
||||
blenlog.clear_category(cat_leftover)
|
||||
|
||||
cat_invalid = 'Invalid Driver'
|
||||
blenlog.clear_category(cat_invalid)
|
||||
|
||||
counter = 0
|
||||
for obj in objects:
|
||||
if obj.library or obj.override_library:
|
||||
continue
|
||||
if not obj.animation_data:
|
||||
continue
|
||||
for fcurve in obj.animation_data.drivers:
|
||||
kwargs = {
|
||||
'icon': 'DRIVER_TRANSFORM',
|
||||
'operator': BLENLOG_OT_delete_driver.bl_idname,
|
||||
'op_kwargs': {
|
||||
'object_name': obj.name,
|
||||
'driver_path': fcurve.data_path,
|
||||
'array_index': fcurve.array_index,
|
||||
},
|
||||
}
|
||||
try:
|
||||
obj.path_resolve(fcurve.data_path)
|
||||
except ValueError:
|
||||
# If the RNA path of the driver fails to resolve to some value,
|
||||
# that means the driver is pointing at nothing; A deleted modifier,
|
||||
# constraint, bone, whatever.
|
||||
blenlog.add(
|
||||
description=f"Driver RNA path leads to nothing: '{fcurve.data_path}'.\nThis can happen when removing modifiers, constraints, bones, etc. that previously had drivers on them. Such driver can be safely deleted, else they will spam the console.",
|
||||
name=obj.name,
|
||||
category=cat_leftover,
|
||||
**kwargs
|
||||
)
|
||||
counter += 1
|
||||
continue
|
||||
if not fcurve.driver.is_valid:
|
||||
blenlog.add(
|
||||
description=f"Invalid Driver: '{fcurve.data_path}'.\nThis can happen when a driver var target datablock is removed, an expression is invalid, etc.",
|
||||
name=obj.name,
|
||||
category=cat_invalid,
|
||||
**kwargs
|
||||
)
|
||||
counter += 1
|
||||
|
||||
return counter
|
||||
|
||||
report_leftover_drivers = report_invalid_drivers
|
||||
|
||||
class BLENLOG_OT_report_invalid_drivers(bpy.types.Operator):
|
||||
"""Report drivers that point to nothing"""
|
||||
|
||||
bl_idname = "blenlog.report_invalid_drivers"
|
||||
bl_label = "Report Leftover Drivers"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
def execute(self, context):
|
||||
counter = report_invalid_drivers(context, bpy.data.objects)
|
||||
|
||||
if counter > 0:
|
||||
self.report({'WARNING'}, f"Found {counter} invalid drivers.")
|
||||
else:
|
||||
self.report({'INFO'}, f"No invalid drivers found.")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class BLENLOG_OT_delete_driver(bpy.types.Operator):
|
||||
"""Delete a driver on a local object"""
|
||||
|
||||
bl_idname = "blenlog.delete_driver"
|
||||
bl_label = "Delete Driver"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
object_name: StringProperty()
|
||||
driver_path: StringProperty()
|
||||
array_index: IntProperty()
|
||||
|
||||
def execute(self, context):
|
||||
obj = bpy.data.objects.get((self.object_name, None))
|
||||
if not obj:
|
||||
self.report({'INFO'}, f'Object "{self.object_name}" had already been removed.')
|
||||
return {'CANCELLED'}
|
||||
if not obj.animation_data or not obj.animation_data.drivers:
|
||||
self.report(
|
||||
{'INFO'}, f'All drivers of object "{self.object_name}" had already been removed.'
|
||||
)
|
||||
return {'CANCELLED'}
|
||||
|
||||
logs = context.scene.blender_log
|
||||
|
||||
driver = obj.animation_data.drivers.find(self.driver_path, index=self.array_index)
|
||||
if driver:
|
||||
obj.animation_data.drivers.remove(driver)
|
||||
self.report({'INFO'}, f'Removed driver "{self.driver_path}" from object "{obj.name}"')
|
||||
else:
|
||||
self.report(
|
||||
{'INFO'},
|
||||
f'Driver "{self.driver_path} on object "{obj.name}" had already been removed.',
|
||||
)
|
||||
|
||||
logs.remove(logs.active_log)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
registry = [
|
||||
BLENLOG_OT_report_invalid_drivers,
|
||||
BLENLOG_OT_delete_driver,
|
||||
]
|
||||
@@ -0,0 +1,254 @@
|
||||
# SPDX-FileCopyrightText: 2025 Blender Studio Tools Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# TODO: This pie menu was moved to the 3D Viewport Pie Menus add-on, could be considered for removal here.
|
||||
|
||||
import bpy
|
||||
from typing import List, Optional
|
||||
from bpy.props import StringProperty
|
||||
from bpy_extras import id_map_utils
|
||||
|
||||
from .relink_overridden_asset import OUTLINER_OT_relink_overridden_asset, outliner_get_active_id
|
||||
from .. import hotkeys
|
||||
from ..id_types import get_datablock_icon, get_library_icon, get_id_storage_by_type_str
|
||||
|
||||
|
||||
### Pie Menu UI
|
||||
class IDMAN_MT_relationship_pie(bpy.types.Menu):
|
||||
# bl_label is displayed at the center of the pie menu
|
||||
bl_label = 'Datablock Relationships'
|
||||
bl_idname = 'IDMAN_MT_relationship_pie'
|
||||
|
||||
@staticmethod
|
||||
def get_id(context) -> Optional[bpy.types.ID]:
|
||||
return outliner_get_active_id(context)
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
pie = layout.menu_pie()
|
||||
# <
|
||||
pie.operator(OUTLINER_OT_blenlog_list_datablock_users.bl_idname, icon='LOOP_BACK')
|
||||
# >
|
||||
pie.operator(OUTLINER_OT_blenlog_list_datablock_deps.bl_idname, icon='LOOP_FORWARDS')
|
||||
# V
|
||||
pie.operator('outliner.orphans_purge', icon='ORPHAN_DATA')
|
||||
|
||||
id = self.get_id(context)
|
||||
if id:
|
||||
# ^
|
||||
remap = pie.operator(
|
||||
'outliner.remap_users', icon='FILE_REFRESH', text="Remap Users"
|
||||
)
|
||||
remap.id_type = id.id_type
|
||||
remap.id_name_source = id.name
|
||||
if id.library:
|
||||
remap.library_path_source = id.library.filepath
|
||||
|
||||
# ^>
|
||||
id = OUTLINER_OT_relink_overridden_asset.get_id(context)
|
||||
if id:
|
||||
pie.operator('object.relink_overridden_asset', icon='LIBRARY_DATA_OVERRIDE')
|
||||
else:
|
||||
pie.separator()
|
||||
|
||||
# <^
|
||||
if id and id.override_library:
|
||||
pie.operator(
|
||||
'outliner.liboverride_troubleshoot_operation',
|
||||
icon='UV_SYNC_SELECT',
|
||||
text="Resync Override Hierarchy",
|
||||
).type = 'OVERRIDE_LIBRARY_RESYNC_HIERARCHY_ENFORCE'
|
||||
else:
|
||||
pie.separator()
|
||||
else:
|
||||
pie.separator()
|
||||
pie.separator()
|
||||
pie.separator()
|
||||
|
||||
# v>
|
||||
if OUTLINER_OT_blenlog_instancer_to_collection.should_draw(context):
|
||||
pie.operator(OUTLINER_OT_blenlog_instancer_to_collection.bl_idname, icon='LINKED')
|
||||
else:
|
||||
pie.separator()
|
||||
|
||||
|
||||
### Relationship visualization operators
|
||||
class RelationshipOperatorMixin:
|
||||
datablock_name: StringProperty()
|
||||
datablock_storage: StringProperty()
|
||||
library_filepath: StringProperty()
|
||||
|
||||
def get_datablock(self, context) -> Optional[bpy.types.ID]:
|
||||
if self.datablock_name and self.datablock_storage:
|
||||
storage = getattr(bpy.data, self.datablock_storage)
|
||||
lib_path = self.library_filepath or None
|
||||
return storage.get((self.datablock_name, lib_path))
|
||||
elif context.area.type == 'OUTLINER':
|
||||
return outliner_get_active_id(context)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.area.type == 'OUTLINER' and len(context.selected_ids) > 0
|
||||
|
||||
def invoke(self, context, _event):
|
||||
return context.window_manager.invoke_props_dialog(self, width=600)
|
||||
|
||||
def get_datablocks_to_display(self, id: bpy.types.ID) -> List[bpy.types.ID]:
|
||||
raise NotImplementedError
|
||||
|
||||
def get_label(self):
|
||||
return "Listing datablocks that reference this:"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_decorate = False
|
||||
layout.use_property_split = True
|
||||
|
||||
datablock = self.get_datablock(context)
|
||||
if not datablock:
|
||||
layout.alert = True
|
||||
layout.label(
|
||||
text=f"Failed to find datablock: {self.datablock_storage}, {self.datablock_name}, {self.library_filepath}"
|
||||
)
|
||||
return
|
||||
|
||||
row = layout.row()
|
||||
split = row.split()
|
||||
row = split.row()
|
||||
row.alignment = 'RIGHT'
|
||||
row.label(text=self.get_label())
|
||||
id_row = split.row(align=True)
|
||||
name_row = id_row.row()
|
||||
name_row.enabled = False
|
||||
name_row.prop(datablock, 'name', icon=get_datablock_icon(datablock), text="")
|
||||
fake_user_row = id_row.row()
|
||||
fake_user_row.prop(datablock, 'use_fake_user', text="")
|
||||
|
||||
layout.separator()
|
||||
|
||||
datablocks = self.get_datablocks_to_display(datablock)
|
||||
if not datablocks:
|
||||
layout.label(text="There are none.")
|
||||
return
|
||||
|
||||
for user in self.get_datablocks_to_display(datablock):
|
||||
if user == datablock:
|
||||
# Scenes are users of themself for technical reasons,
|
||||
# I think it's confusing to display that.
|
||||
continue
|
||||
row = layout.row()
|
||||
name_row = row.row()
|
||||
name_row.enabled = False
|
||||
name_row.prop(user, 'name', icon=get_datablock_icon(user), text="")
|
||||
op_row = row.row()
|
||||
op = op_row.operator(type(self).bl_idname, text="", icon='LOOP_FORWARDS')
|
||||
op.datablock_name = user.name
|
||||
storage = get_id_storage_by_type_str(user.id_type)[1]
|
||||
if not storage:
|
||||
print("Error: Can't find storage: ", user.name, user.id_type)
|
||||
op.datablock_storage = storage
|
||||
if user.library:
|
||||
op.library_filepath = user.library.filepath
|
||||
name_row.prop(
|
||||
user.library,
|
||||
'filepath',
|
||||
icon=get_library_icon(user.library.filepath),
|
||||
text="",
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class OUTLINER_OT_blenlog_list_datablock_users(RelationshipOperatorMixin, bpy.types.Operator):
|
||||
"""Show list of users of this datablock"""
|
||||
|
||||
bl_idname = "object.blenlog_list_datablock_users"
|
||||
bl_label = "List Datablock Users"
|
||||
|
||||
datablock_name: StringProperty()
|
||||
datablock_storage: StringProperty()
|
||||
library_filepath: StringProperty()
|
||||
|
||||
def get_datablocks_to_display(self, datablock: bpy.types.ID) -> List[bpy.types.ID]:
|
||||
user_map = bpy.data.user_map()
|
||||
users = user_map[datablock]
|
||||
return sorted(users, key=lambda u: (str(type(u)), u.name))
|
||||
|
||||
|
||||
class OUTLINER_OT_blenlog_list_datablock_deps(RelationshipOperatorMixin, bpy.types.Operator):
|
||||
"""Show list of dependencies of this datablock"""
|
||||
|
||||
bl_idname = "object.blenlog_list_datablock_deps"
|
||||
bl_label = "List Datablock Dependencies"
|
||||
|
||||
def get_label(self):
|
||||
return "Listing datablocks that are referenced by this:"
|
||||
|
||||
def get_datablocks_to_display(self, datablock: bpy.types.ID) -> List[bpy.types.ID]:
|
||||
dependencies = id_map_utils.get_id_reference_map().get(datablock)
|
||||
if not dependencies:
|
||||
return []
|
||||
return sorted(dependencies, key=lambda u: (str(type(u)), u.name))
|
||||
|
||||
|
||||
### Instance Collection To Scene
|
||||
class OUTLINER_OT_blenlog_instancer_to_collection(bpy.types.Operator):
|
||||
"""Replace an Empty that instances a collection, with the collection itself"""
|
||||
|
||||
bl_idname = "outliner.blenlog_instancer_to_collection"
|
||||
bl_label = "Instancer Empty To Collection"
|
||||
bl_options = {'UNDO'}
|
||||
|
||||
@staticmethod
|
||||
def should_draw(context):
|
||||
return (
|
||||
context.area.ui_type == 'OUTLINER'
|
||||
and context.id
|
||||
and type(context.id) == bpy.types.Object
|
||||
and context.id.type == 'EMPTY'
|
||||
and context.id.instance_type == 'COLLECTION'
|
||||
and context.id.instance_collection
|
||||
and context.id.instance_collection not in set(context.scene.collection.children)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return cls.should_draw(context)
|
||||
|
||||
def execute(self, context):
|
||||
coll = context.id.instance_collection
|
||||
bpy.data.objects.remove(context.id)
|
||||
context.scene.collection.children.link(coll)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
registry = [
|
||||
IDMAN_MT_relationship_pie,
|
||||
OUTLINER_OT_blenlog_list_datablock_users,
|
||||
OUTLINER_OT_blenlog_list_datablock_deps,
|
||||
OUTLINER_OT_blenlog_instancer_to_collection,
|
||||
]
|
||||
|
||||
|
||||
addon_hotkeys = []
|
||||
|
||||
|
||||
def register():
|
||||
addon_hotkeys.append(
|
||||
hotkeys.addon_hotkey_register(
|
||||
op_idname='wm.call_menu_pie',
|
||||
keymap_name='Outliner',
|
||||
key_id='Y',
|
||||
op_kwargs={'name': IDMAN_MT_relationship_pie.bl_idname},
|
||||
add_on_conflict=True,
|
||||
warn_on_conflict=True,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def unregister():
|
||||
for keymap, kmi in addon_hotkeys[:]:
|
||||
keymap.keymap_items.remove(kmi)
|
||||
@@ -0,0 +1,149 @@
|
||||
# SPDX-FileCopyrightText: 2025 Blender Studio Tools Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
import bpy
|
||||
from bpy.props import StringProperty
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def report_missing_libraries(context, libraries=[]):
|
||||
if not libraries:
|
||||
libraries = bpy.data.libraries
|
||||
|
||||
blenlog = context.scene.blender_log
|
||||
cat_missing = "Missing Library"
|
||||
blenlog.clear_category(cat_missing)
|
||||
|
||||
for lib in libraries:
|
||||
if lib.is_missing:
|
||||
blenlog.add(
|
||||
name=lib.filepath,
|
||||
description="Library file is not found on file system.",
|
||||
category=cat_missing,
|
||||
category_icon='LIBRARY_DATA_BROKEN',
|
||||
# operator='outliner.lib_operation', # This operator demands to be run in the outliner for no particular reason.
|
||||
# op_kwargs={'type': 'RELOCATE'},
|
||||
)
|
||||
|
||||
return blenlog.categories.get(cat_missing)
|
||||
|
||||
|
||||
def report_absolute_libraries(context, libraries=[]):
|
||||
if not libraries:
|
||||
libraries = bpy.data.libraries
|
||||
|
||||
blenlog = context.scene.blender_log
|
||||
cat_absolute = "Absolute Library"
|
||||
blenlog.clear_category(cat_absolute)
|
||||
|
||||
for lib in libraries:
|
||||
if not lib.filepath.startswith("//"):
|
||||
blenlog.add(
|
||||
name=lib.filepath,
|
||||
description=f"{lib.filepath}\nLibrary path is not relative to this .blend, but absolute.",
|
||||
category=cat_absolute,
|
||||
category_icon='FILEBROWSER',
|
||||
# operator='outliner.lib_operation',
|
||||
# op_kwargs={'type': 'RELOCATE'},
|
||||
)
|
||||
|
||||
return blenlog.categories.get(cat_absolute)
|
||||
|
||||
|
||||
def report_libraries_out_of_folder(context, project_path: Path or str, libraries=[]):
|
||||
if type(project_path) == str:
|
||||
project_path = Path(project_path)
|
||||
|
||||
if not libraries:
|
||||
libraries = bpy.data.libraries
|
||||
|
||||
blenlog = context.scene.blender_log
|
||||
cat_not_prod = "Library Outside Project"
|
||||
blenlog.clear_category(cat_not_prod)
|
||||
|
||||
for lib in libraries:
|
||||
abs_path = Path(bpy.path.abspath(lib.filepath)).resolve()
|
||||
if project_path not in abs_path.parents:
|
||||
blenlog.add(
|
||||
name=lib.filepath,
|
||||
description=f"{lib.filepath}\nLibrary is not a part of this project.",
|
||||
category=cat_not_prod,
|
||||
category_icon='ERROR',
|
||||
# operator='outliner.lib_operation',
|
||||
# op_kwargs={'type': 'RELOCATE'},
|
||||
)
|
||||
|
||||
return blenlog.categories.get(cat_not_prod)
|
||||
|
||||
|
||||
class BLENLOG_OT_report_missing_libraries(bpy.types.Operator):
|
||||
"""Report libraries whose files don't exist on the system"""
|
||||
|
||||
bl_idname = "blenlog.report_missing_libraries"
|
||||
bl_label = "Report Missing Libraries"
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
||||
def execute(self, context):
|
||||
category = report_missing_libraries(context, bpy.data.libraries)
|
||||
|
||||
if len(category.logs) > 0:
|
||||
self.report({'WARNING'}, f"Found {len(category.logs)} missing libraries.")
|
||||
else:
|
||||
self.report({'INFO'}, f"No missing libraries found.")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class BLENLOG_OT_report_absolute_libraries(bpy.types.Operator):
|
||||
"""Report libraries whose filepath is referenced absolute rather than relative"""
|
||||
|
||||
bl_idname = "blenlog.report_absolute_libraries"
|
||||
bl_label = "Report Absolute Libraries"
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
||||
def execute(self, context):
|
||||
category = report_absolute_libraries(context, bpy.data.libraries)
|
||||
|
||||
if len(category.logs) > 0:
|
||||
self.report({'WARNING'}, f"Found {len(category.logs)} absolute libraries.")
|
||||
else:
|
||||
self.report({'INFO'}, f"No absolute libraries found.")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class BLENLOG_OT_report_libraries_out_of_folder(bpy.types.Operator):
|
||||
"""Report libraries whose filepath is outside of a specified folder"""
|
||||
|
||||
bl_idname = "blenlog.report_libraries_out_of_folder"
|
||||
bl_label = "Report Libraries Out of Folder"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
project_root_path: StringProperty(
|
||||
name="Project Root Path",
|
||||
subtype='DIR_PATH',
|
||||
description="Your project's root directory. Any libraries outside of this will be reported",
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
category = report_libraries_out_of_folder(
|
||||
context, bpy.data.libraries, self.project_root_path
|
||||
)
|
||||
|
||||
if len(category.logs) > 0:
|
||||
self.report(
|
||||
{'WARNING'}, f"Found {len(category.logs)} libraries outside of the project root."
|
||||
)
|
||||
else:
|
||||
self.report({'INFO'}, f"No libraries outside of project found.")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
registry = [
|
||||
BLENLOG_OT_report_missing_libraries,
|
||||
BLENLOG_OT_report_absolute_libraries,
|
||||
BLENLOG_OT_report_libraries_out_of_folder,
|
||||
]
|
||||
@@ -0,0 +1,155 @@
|
||||
# SPDX-FileCopyrightText: 2025 Blender Studio Tools Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
import bpy
|
||||
from bpy.props import StringProperty
|
||||
from ..id_types import get_id
|
||||
|
||||
|
||||
def get_blender_number_suffix(name):
|
||||
if name[-4] == "." and str.isdecimal(name[-3:]):
|
||||
return name[-4:]
|
||||
return ""
|
||||
|
||||
|
||||
def name_without_number(name):
|
||||
suffix = get_blender_number_suffix(name)
|
||||
if suffix:
|
||||
return name[:-4]
|
||||
|
||||
|
||||
class BLENLOG_OT_rename_obdata(bpy.types.Operator):
|
||||
"""Disable fake user flag on the collection"""
|
||||
|
||||
bl_idname = "blenlog.rename_object_data"
|
||||
bl_label = "Rename Object Data"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
obj_name: StringProperty()
|
||||
|
||||
def execute(self, context):
|
||||
obj = bpy.data.objects.get((self.obj_name, None))
|
||||
if not obj:
|
||||
self.report({'WARNING'}, "Object no longer exists.")
|
||||
else:
|
||||
if obj.data.name != obj.name:
|
||||
obj.data.name = obj.name
|
||||
if hasattr(obj.data, 'shape_keys') and obj.data.shape_keys:
|
||||
if obj.data.shape_keys.name != obj.name:
|
||||
obj.data.shape_keys.name = obj.name
|
||||
self.report({'INFO'}, "Object data renamed.")
|
||||
|
||||
context.scene.blender_log.remove_active()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class BLENLOG_OT_report_obdata_names(bpy.types.Operator):
|
||||
"""Report objects with data or shape keys not named the same as the object"""
|
||||
|
||||
bl_idname = "blenlog.report_obdata_name_mismatch"
|
||||
bl_label = "Report Mis-Named Object Datas"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
def execute(self, context):
|
||||
blenlog = context.scene.blender_log
|
||||
|
||||
category = "Object Data Name Mismatch"
|
||||
blenlog.clear_category(category)
|
||||
|
||||
counter = 0
|
||||
for obj in bpy.data.objects:
|
||||
if not obj.data:
|
||||
continue
|
||||
# Skip if obj or data is linked
|
||||
if obj.library or obj.override_library or obj.data.library or obj.data.override_library:
|
||||
continue
|
||||
|
||||
if obj.data.name != obj.name:
|
||||
blenlog.add(
|
||||
name=obj.name + " (Data)",
|
||||
category=category,
|
||||
description="Object data is not named the same as the containing object. This is unavoidable for multi-user object datas though.",
|
||||
icon='FILE_TEXT',
|
||||
operator=BLENLOG_OT_rename_obdata.bl_idname,
|
||||
op_kwargs={'obj_name': obj.name},
|
||||
op_icon='GREASEPENCIL',
|
||||
)
|
||||
counter += 1
|
||||
|
||||
if not hasattr(obj.data, 'shape_keys'):
|
||||
continue
|
||||
if not obj.data.shape_keys:
|
||||
continue
|
||||
|
||||
if obj.data.shape_keys.name != obj.name:
|
||||
blenlog.add(
|
||||
name=obj.name + " (Shape Key Data)",
|
||||
category=category,
|
||||
description="Shape Key datablock is not named the same as the containing object. This is unavoidable for multi-user object datas though.",
|
||||
icon='FILE_TEXT',
|
||||
operator=BLENLOG_OT_rename_obdata.bl_idname,
|
||||
op_kwargs={'obj_name': obj.name},
|
||||
op_icon='GREASEPENCIL',
|
||||
)
|
||||
counter += 1
|
||||
|
||||
if counter == 0:
|
||||
self.report({'INFO'}, "No objects with mismatched data names.")
|
||||
else:
|
||||
self.report({'WARNING'}, f"Found {counter} mismatched datablock names.")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class BLENLOG_OT_rename_id(bpy.types.Operator):
|
||||
"""Rename a local ID"""
|
||||
|
||||
bl_idname = "blenlog.rename_id"
|
||||
bl_label = "Rename ID"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
id_name: StringProperty()
|
||||
id_type: StringProperty()
|
||||
new_name: StringProperty(name="Name")
|
||||
|
||||
def invoke(self, context, _event):
|
||||
if not self.new_name:
|
||||
self.new_name = self.id_name
|
||||
if get_blender_number_suffix(self.new_name):
|
||||
self.new_name = name_without_number(self.new_name)
|
||||
|
||||
return context.window_manager.invoke_props_dialog(self)
|
||||
|
||||
def draw(self, context):
|
||||
self.layout.prop(self, 'new_name')
|
||||
if get_id(self.new_name, self.id_type):
|
||||
self.layout.alert = True
|
||||
self.layout.label(text="This name is already taken.", icon='ERROR')
|
||||
|
||||
def execute(self, context):
|
||||
id = get_id(self.id_name, self.id_type)
|
||||
if not id:
|
||||
self.report({'ERROR'}, f"ID no longer exists: {self.id_name}.")
|
||||
return {'CANCELLED'}
|
||||
existing_id = get_id(self.new_name, self.id_type)
|
||||
if existing_id:
|
||||
self.report({'ERROR'}, f"ID name already taken: {self.new_name}.")
|
||||
return {'CANCELLED'}
|
||||
|
||||
if id.name != self.new_name:
|
||||
id.name = self.new_name
|
||||
self.report(
|
||||
{'INFO'},
|
||||
f"{self.id_type.title()} successfully renamed from {self.id_name} to {self.new_name}.",
|
||||
)
|
||||
context.scene.blender_log.remove_active()
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
registry = [
|
||||
BLENLOG_OT_report_obdata_names,
|
||||
BLENLOG_OT_rename_obdata,
|
||||
BLENLOG_OT_rename_id,
|
||||
]
|
||||
@@ -0,0 +1,191 @@
|
||||
# SPDX-FileCopyrightText: 2025 Blender Studio Tools Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
import bpy
|
||||
from bpy.props import StringProperty
|
||||
from .names import get_blender_number_suffix
|
||||
from ..id_types import get_id
|
||||
|
||||
|
||||
def get_desired_override_name(id):
|
||||
override = id.override_library
|
||||
if not override:
|
||||
return id.name
|
||||
suffix = get_blender_number_suffix(override.hierarchy_root.name)
|
||||
return override.reference.name + suffix
|
||||
|
||||
|
||||
class BLENLOG_OT_report_library_overrides(bpy.types.Operator):
|
||||
"""Report various issues relating to library overrides"""
|
||||
|
||||
bl_idname = "blenlog.report_library_overrides"
|
||||
bl_label = "Report Library Overrides"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
def execute(self, context):
|
||||
blenlog = context.scene.blender_log
|
||||
|
||||
leftovers = report_leftover_overrides(context)
|
||||
if leftovers:
|
||||
self.report(
|
||||
{'WARNING'}, f"There are override leftover collections in the file: {leftovers}"
|
||||
)
|
||||
|
||||
cat_name_taken = "Override Name Occupied"
|
||||
blenlog.clear_category(cat_name_taken)
|
||||
|
||||
cat_name_wrong = "Override Name Mismatch"
|
||||
blenlog.clear_category(cat_name_wrong)
|
||||
|
||||
cat_name_conflict = "Override Name Conflict"
|
||||
blenlog.clear_category(cat_name_conflict)
|
||||
|
||||
objects = [obj for obj in bpy.data.objects if obj.override_library]
|
||||
collections = [coll for coll in bpy.data.collections if coll.override_library]
|
||||
iter_stuff = [
|
||||
('OBJECT', bpy.data.objects, objects),
|
||||
('COLLECTION', bpy.data.collections, collections),
|
||||
]
|
||||
counter = 0
|
||||
for id_type, propcoll, ids in iter_stuff:
|
||||
for id in ids:
|
||||
desired_name = get_desired_override_name(id)
|
||||
if id.name == desired_name:
|
||||
continue
|
||||
counter += 1
|
||||
occupied = propcoll.get((desired_name, None))
|
||||
if occupied:
|
||||
if get_desired_override_name(occupied) == occupied.name:
|
||||
blenlog.add(
|
||||
description=f"Inherent override name conflict! {id.name} should be named {desired_name}, which is already taken by an object that is named correctly. This issue cannot be fixed locally. The number suffix in the name must be removed in the original library ({id.override_library.reference.library.filepath}), or one of the overridden objects must be deleted.",
|
||||
icon='LIBRARY_DATA_OVERRIDE',
|
||||
name=id.name,
|
||||
category=cat_name_conflict,
|
||||
)
|
||||
else:
|
||||
blenlog.add(
|
||||
description=f"Desired overridden {id_type} name '{desired_name}' is already taken from {id.name}. All names should be fixed recursively such that each ID is named after its reference library ID, plus the number suffix of the override hierarchy root.",
|
||||
icon='LIBRARY_DATA_OVERRIDE',
|
||||
name=id.name,
|
||||
category=cat_name_taken,
|
||||
operator=BLENLOG_OT_recursive_override_name_fix.bl_idname,
|
||||
op_kwargs={'id_name': id.name, 'id_type': id_type},
|
||||
)
|
||||
else:
|
||||
blenlog.add(
|
||||
description=f"Overridden object name doesn't match referenced library object name.",
|
||||
icon='LIBRARY_DATA_OVERRIDE',
|
||||
name=id.name,
|
||||
category=cat_name_wrong,
|
||||
operator='blenlog.rename_id',
|
||||
op_kwargs={
|
||||
'id_name': id.name,
|
||||
'id_type': id_type,
|
||||
'new_name': desired_name,
|
||||
},
|
||||
)
|
||||
|
||||
if counter > 0:
|
||||
self.report({'WARNING'}, f"Found {counter} wrong override names.")
|
||||
else:
|
||||
self.report({'INFO'}, f"All overrides are named correctly.")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def report_leftover_overrides(context):
|
||||
blenlog = context.scene.blender_log
|
||||
category = 'Leftover Overrides'
|
||||
blenlog.clear_category(category)
|
||||
leftovers = [
|
||||
c
|
||||
for c in bpy.data.collections
|
||||
if c.name in {'OVERRIDE_RESYNC_LEFTOVERS', 'OVERRIDE_HIDDEN'}
|
||||
]
|
||||
for leftover in leftovers:
|
||||
blenlog.add(
|
||||
description=f"Override Resync Leftovers are left behind when an override data hierarchy became ambiguous. This should be extremely rare. Check your overrides for any issues, then you can delete these leftovers.",
|
||||
icon='LIBRARY_DATA_OVERRIDE',
|
||||
name=leftover.name,
|
||||
category=category,
|
||||
operator=BLENLOG_OT_delete_collection_hierarchy.bl_idname,
|
||||
op_kwargs={
|
||||
'coll_name': leftover.name,
|
||||
},
|
||||
)
|
||||
return leftovers
|
||||
|
||||
|
||||
class BLENLOG_OT_delete_collection_hierarchy(bpy.types.Operator):
|
||||
"""Delete a collection hierarchy"""
|
||||
|
||||
bl_idname = "blenlog.delete_collection_hierarchy"
|
||||
bl_label = "Delete Collection Hierarchy"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
coll_name: StringProperty()
|
||||
|
||||
def execute(self, context):
|
||||
coll = bpy.data.objects.get((self.coll_name, None))
|
||||
if not coll:
|
||||
self.report({'INFO'}, f'Collection "{self.coll_name}" had already been removed.')
|
||||
return {'CANCELLED'}
|
||||
|
||||
bpy.data.collections.remove(coll)
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=False, do_recursive=True)
|
||||
|
||||
self.report(
|
||||
{'INFO'},
|
||||
f"Removed collection {self.coll_name} and purged the blend file of any unused datablocks.",
|
||||
)
|
||||
|
||||
logs = context.scene.blender_log
|
||||
logs.remove(logs.active_log)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class BLENLOG_OT_recursive_override_name_fix(bpy.types.Operator):
|
||||
"""Recursively rename override object names that occupy each other's names, to the correct suffixes. Useful when the object names of duplicated overrides get tangled up"""
|
||||
|
||||
bl_idname = "blenlog.recursive_override_name_fix"
|
||||
bl_label = "Recursive Override Name Fix"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
id_name: StringProperty()
|
||||
id_type: StringProperty()
|
||||
|
||||
def execute(self, context):
|
||||
blenlog = context.scene.blender_log
|
||||
|
||||
id = get_id(self.id_name, self.id_type)
|
||||
if not id:
|
||||
self.report({'ERROR'}, f"{self.id_type} '{self.id_name}' no longer exists.")
|
||||
blenlog.remove_active()
|
||||
return {'CANCELLED'}
|
||||
|
||||
override_recursive_rename(id)
|
||||
blenlog.remove_active()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def override_recursive_rename(override_id):
|
||||
"""Try to rename an override object to its desired name, which is the name of its
|
||||
referenced link ID, plus this override's root hierarchy number suffix, if any."""
|
||||
desired_name = get_desired_override_name(override_id)
|
||||
if desired_name == override_id.name:
|
||||
return
|
||||
occupying = get_id(desired_name, override_id.id_type)
|
||||
if occupying:
|
||||
override_recursive_rename(occupying)
|
||||
print("Renaming ", override_id.name, "to", desired_name)
|
||||
override_id.name = desired_name
|
||||
|
||||
|
||||
registry = [
|
||||
BLENLOG_OT_report_library_overrides,
|
||||
BLENLOG_OT_delete_collection_hierarchy,
|
||||
BLENLOG_OT_recursive_override_name_fix,
|
||||
]
|
||||
@@ -0,0 +1,502 @@
|
||||
# SPDX-FileCopyrightText: 2025 Blender Studio Tools Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
import bpy
|
||||
from typing import List, Dict, Set, Optional, Tuple
|
||||
from bpy.types import Collection, Object, Operator
|
||||
|
||||
|
||||
def outliner_get_active_id(context):
|
||||
"""Helper function for Blender 3.6 and 4.0 cross-compatibility."""
|
||||
if not context.area.type == 'OUTLINER':
|
||||
return
|
||||
|
||||
if hasattr(context, 'id'):
|
||||
# Blender 4.0: Active ID is explicitly exposed to PyAPI, yay.
|
||||
return context.id
|
||||
elif len(context.selected_ids) > 0:
|
||||
# Blender 3.6 and below: We can only hope first selected ID happens to be the active one.
|
||||
return context.selected_ids[0]
|
||||
|
||||
|
||||
class OUTLINER_OT_relink_overridden_asset(Operator):
|
||||
"""Relink an overridden asset. Can be useful to recover assets from all sorts of broken states, but may lose un-keyed overridden values. Should preserve bone constraints, active actions of armatures, and any outside references to objects within the asset. Will also purge the .blend file and unlink the OVERRIDE_HIDDEN collection if present, out of necessity"""
|
||||
|
||||
bl_idname = "object.relink_overridden_asset"
|
||||
bl_label = "Relink Overridden Asset"
|
||||
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return cls.get_id(context)
|
||||
|
||||
def invoke(self, context, _event):
|
||||
return context.window_manager.invoke_props_dialog(self)
|
||||
|
||||
@staticmethod
|
||||
def get_id(context) -> Optional[bpy.types.ID]:
|
||||
if context.area.type == 'OUTLINER':
|
||||
active_id = outliner_get_active_id(context)
|
||||
if not active_id:
|
||||
return
|
||||
if active_id.override_library:
|
||||
return active_id
|
||||
else:
|
||||
if context.object and context.object.override_library:
|
||||
return context.object
|
||||
elif context.collection and context.collection.override_library:
|
||||
return context.collection
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.label(text="Relink this asset? Un-keyed values may get reset!")
|
||||
id = self.get_id(context)
|
||||
layout.prop(id.override_library, 'hierarchy_root')
|
||||
|
||||
def execute(self, context):
|
||||
id = self.get_id(context)
|
||||
|
||||
old_hierarchy_root = id.override_library.hierarchy_root
|
||||
relink_single_override_hierarchy(context, old_hierarchy_root)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
def relink_all_override_hierarchies(context):
|
||||
hierarchy_roots = get_override_hierarchy_roots()
|
||||
new_hierarchy_roots = []
|
||||
|
||||
for hierarchy_root in hierarchy_roots:
|
||||
print("Relinking override hierarchy: ", hierarchy_root.name)
|
||||
new_hierarchy_root = recreate_override_hierarchy(context, hierarchy_root)
|
||||
relink_data(new_hierarchy_root, hierarchy_root)
|
||||
insert_missing_keys(new_hierarchy_root, hierarchy_root)
|
||||
empty_map = remap_users_to_temp_empties(new_hierarchy_root, hierarchy_root)
|
||||
clear_collection_hierarchy_fake_user(hierarchy_root)
|
||||
remap_users_from_temp_empties(empty_map, new_hierarchy_root)
|
||||
new_hierarchy_roots.append(new_hierarchy_root)
|
||||
|
||||
nuke_override_hidden()
|
||||
better_purge(context)
|
||||
|
||||
for new_hierarchy_root in new_hierarchy_roots:
|
||||
restore_names(new_hierarchy_root)
|
||||
|
||||
|
||||
def get_override_hierarchy_roots():
|
||||
hierarchy_roots = set()
|
||||
for coll in bpy.data.collections:
|
||||
if coll.override_library:
|
||||
parents = get_parent_collections(coll)
|
||||
if any([parent.library or parent.override_library for parent in parents]):
|
||||
# If this is a collection nested inside another override library, it is NOT considered a hierarchy root in this scene.
|
||||
continue
|
||||
hierarchy_roots.add(coll.override_library.hierarchy_root)
|
||||
return hierarchy_roots
|
||||
|
||||
|
||||
def relink_single_override_hierarchy(context, hierarchy_root: Collection):
|
||||
new_hierarchy_root = recreate_override_hierarchy(context, hierarchy_root)
|
||||
|
||||
relink_data(new_hierarchy_root, hierarchy_root)
|
||||
insert_missing_keys(new_hierarchy_root, hierarchy_root)
|
||||
empty_map = remap_users_to_temp_empties(new_hierarchy_root, hierarchy_root)
|
||||
|
||||
clear_collection_hierarchy_fake_user(hierarchy_root)
|
||||
nuke_override_hidden()
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_recursive=True, do_linked_ids=True)
|
||||
|
||||
remap_users_from_temp_empties(empty_map, new_hierarchy_root)
|
||||
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_recursive=True, do_linked_ids=True)
|
||||
restore_names(new_hierarchy_root)
|
||||
|
||||
|
||||
def relink_data(new_hierarchy_root, old_hierarchy_root):
|
||||
new_to_old_obj_map = map_new_to_old_objects(new_hierarchy_root, old_hierarchy_root)
|
||||
|
||||
for new_obj, old_obj in new_to_old_obj_map.items():
|
||||
# Re-link Object Constraints.
|
||||
if old_obj:
|
||||
for old_con in old_obj.constraints:
|
||||
if old_con.name not in new_obj.constraints:
|
||||
new_obj.constraints.copy(old_con)
|
||||
# Mark Object with added constraints as editable.
|
||||
new_obj.override_library.is_system_override = False
|
||||
|
||||
if new_obj.type == 'ARMATURE':
|
||||
# Re-link Action.
|
||||
if old_obj.animation_data and old_obj.animation_data.action:
|
||||
new_obj.animation_data_create()
|
||||
new_obj.animation_data.action = old_obj.animation_data.action
|
||||
|
||||
# Re-link Pose Constraints.
|
||||
for pb_old in old_obj.pose.bones:
|
||||
pb_new = new_obj.pose.bones[pb_old.name]
|
||||
for old_con in pb_old.constraints:
|
||||
if old_con.name not in pb_new.constraints:
|
||||
pb_new.constraints.copy(old_con)
|
||||
|
||||
|
||||
def map_new_to_old_objects(
|
||||
new_hierarchy_root: Collection,
|
||||
old_hierarchy_root: Collection,
|
||||
) -> Dict[Object, Object]:
|
||||
"""Use the common linked ID reference of two override collection hierarchies to
|
||||
construct a name-agnostic object mapping from one to the other."""
|
||||
assert (
|
||||
new_hierarchy_root.override_library.reference.library
|
||||
== old_hierarchy_root.override_library.reference.library
|
||||
), "The two collections must be an override of the same linked collection."
|
||||
|
||||
new_to_old_obj_map = {}
|
||||
|
||||
old_link_map = map_objects_of_linked_to_override_hierarchy(old_hierarchy_root)
|
||||
|
||||
for new_obj in new_hierarchy_root.all_objects:
|
||||
if not new_obj.override_library:
|
||||
# Some objects could still be directly linked, ie. rig widgets.
|
||||
continue
|
||||
ref = old_link_map.get(new_obj.override_library.reference)
|
||||
if ref:
|
||||
new_to_old_obj_map[new_obj] = ref
|
||||
|
||||
return new_to_old_obj_map
|
||||
|
||||
|
||||
def recreate_override_hierarchy(context, old_hierarchy_root: Collection) -> Collection:
|
||||
"""Create a fresh overridden copy of an existing overridden collection,
|
||||
replacing it in existing collections with the fresh copy.
|
||||
- All collection assignments are preserved
|
||||
- Only Armature objects are marked as editable
|
||||
"""
|
||||
linked_hierarchy_root = old_hierarchy_root.override_library.reference
|
||||
|
||||
parent_colls = collection_unlink_from_parents(old_hierarchy_root)
|
||||
assert (
|
||||
parent_colls
|
||||
), "Expected the override hierarchy root to be assigned to at least one parent collection."
|
||||
|
||||
override_hierarchy_root = linked_hierarchy_root.override_hierarchy_create(
|
||||
context.scene, context.view_layer
|
||||
)
|
||||
|
||||
# By default, this gets linked to the scene's root. We don't want that.
|
||||
context.scene.collection.children.unlink(override_hierarchy_root)
|
||||
|
||||
# Link the collection to the parent collections.
|
||||
for parent_coll in parent_colls:
|
||||
parent_coll.children.link(override_hierarchy_root)
|
||||
|
||||
for obj in override_hierarchy_root.all_objects:
|
||||
if obj.type == 'ARMATURE':
|
||||
obj.override_library.is_system_override = False
|
||||
|
||||
return override_hierarchy_root
|
||||
|
||||
|
||||
def get_overridden_but_not_animated_properties(
|
||||
hierarchy_root: Collection,
|
||||
) -> Dict[Object, List[str]]:
|
||||
obj_to_prop_list_map = {}
|
||||
for obj in hierarchy_root.all_objects:
|
||||
props_that_need_keying = []
|
||||
if not obj.override_library or not obj.override_library.reference:
|
||||
continue
|
||||
all_overridden_props = [prop.rna_path for prop in obj.override_library.properties]
|
||||
|
||||
driven_props = []
|
||||
animated_props = []
|
||||
if obj.animation_data and obj.animation_data.drivers:
|
||||
driven_props = [fcurve.data_path for fcurve in obj.animation_data.drivers]
|
||||
if obj.animation_data and obj.animation_data.action:
|
||||
animated_props = [fcurve.data_path for fcurve in obj.animation_data.action.fcurves]
|
||||
|
||||
for overridden_prop in all_overridden_props:
|
||||
if overridden_prop not in driven_props and overridden_prop not in animated_props:
|
||||
props_that_need_keying.append(overridden_prop)
|
||||
|
||||
if props_that_need_keying:
|
||||
obj_to_prop_list_map[obj.override_library.reference] = props_that_need_keying
|
||||
|
||||
return obj_to_prop_list_map
|
||||
|
||||
|
||||
def insert_missing_keys(new_hierarchy_root, old_hierarchy_root):
|
||||
old_overridden_not_animated = get_overridden_but_not_animated_properties(old_hierarchy_root)
|
||||
new_overridden_not_animated = get_overridden_but_not_animated_properties(new_hierarchy_root)
|
||||
|
||||
old_link_map = map_objects_of_linked_to_override_hierarchy(old_hierarchy_root)
|
||||
new_link_map = map_objects_of_linked_to_override_hierarchy(new_hierarchy_root)
|
||||
|
||||
for linked_obj, old_prop_list in old_overridden_not_animated.items():
|
||||
if not old_prop_list or linked_obj not in new_overridden_not_animated:
|
||||
continue
|
||||
|
||||
new_prop_list = new_overridden_not_animated[linked_obj]
|
||||
missing = []
|
||||
for rna_path in old_prop_list:
|
||||
if rna_path in ['animation_data.action']:
|
||||
# This should be overridden by now, not sure why it shows up here...
|
||||
continue
|
||||
if "is_active" in rna_path:
|
||||
continue
|
||||
owner_path, prop_name = rna_path_split_owner(rna_path)
|
||||
if prop_name in ["", "name", "mute", "lock", "nla_tracks"]:
|
||||
continue
|
||||
if rna_path not in new_prop_list:
|
||||
missing.append(rna_path)
|
||||
|
||||
if missing:
|
||||
old_override_obj = old_link_map[linked_obj]
|
||||
new_override_obj = new_link_map[linked_obj]
|
||||
print(f"{old_override_obj.name} had manually overridden but not animated RNA paths: ")
|
||||
for rna_path in missing:
|
||||
old_value = old_override_obj.path_resolve(rna_path)
|
||||
owner_path, prop_name = rna_path_split_owner(rna_path)
|
||||
new_owner = new_override_obj.path_resolve(owner_path)
|
||||
setattr(new_owner, prop_name, old_value)
|
||||
success = new_owner.keyframe_insert(prop_name)
|
||||
if success:
|
||||
print(" Inserted keyframe:", rna_path, old_value)
|
||||
else:
|
||||
print(" Failed to insert keyframe ", rna_path, old_value)
|
||||
|
||||
|
||||
def rna_path_split_owner(rna_path: str) -> Tuple[str, str]:
|
||||
"""Split the last part of an RNA path from the rest.
|
||||
This supports 2 cases:
|
||||
Either the path ends in `.some_prop`
|
||||
Or it ends in a custom property name which looks `["like this"]`.
|
||||
"""
|
||||
if rna_path.endswith('"]'):
|
||||
owner, prop = rna_path.rsplit('["', maxsplit=1)
|
||||
prop = '["' + prop
|
||||
return owner, prop
|
||||
elif "." in rna_path:
|
||||
return rna_path.rsplit(".", maxsplit=1)
|
||||
else:
|
||||
return rna_path, ""
|
||||
|
||||
|
||||
def remap_users_to_temp_empties(new_hierarchy_root, old_hierarchy_root) -> Dict[Object, Object]:
|
||||
# We need to remap users from the old to the new objects, but doing
|
||||
# that in a straight forward way causes a crash.
|
||||
# So, let's create placeholder objects for each old object, that will
|
||||
# get user remapped to, then delete all the old stuff, then user remap from the placeholders to the new objects.
|
||||
|
||||
empty_map = {}
|
||||
|
||||
old_objs = list(old_hierarchy_root.all_objects) + get_objects_in_override_hidden(
|
||||
new_hierarchy_root
|
||||
)
|
||||
|
||||
for old_obj in old_objs:
|
||||
if not old_obj.override_library or not old_obj.override_library.reference:
|
||||
continue
|
||||
# Create an empty object corresponding to each old object...
|
||||
name = "REMAP_" + str(old_obj.name)
|
||||
empty = bpy.data.objects.new(name=name, object_data=None)
|
||||
empty.use_fake_user = True
|
||||
old_obj.user_remap(empty)
|
||||
empty_map[old_obj.override_library.reference] = empty
|
||||
|
||||
return empty_map
|
||||
|
||||
|
||||
def remap_users_from_temp_empties(empty_map, new_hierarchy_root):
|
||||
for new_obj in list(new_hierarchy_root.all_objects):
|
||||
if not new_obj.override_library:
|
||||
continue
|
||||
# Map users from empty to new obj, then delete the empty
|
||||
empty = empty_map.get(new_obj.override_library.reference)
|
||||
if not empty:
|
||||
continue
|
||||
empty.user_remap(new_obj)
|
||||
empty_map.pop(new_obj.override_library.reference)
|
||||
bpy.data.objects.remove(empty)
|
||||
|
||||
for empty in empty_map.values():
|
||||
# This usually won't do anything, but since we gave them fake users, let's keep it here just in case.
|
||||
bpy.data.objects.remove(empty)
|
||||
|
||||
|
||||
def rename_override_objects():
|
||||
"""Try renaming overridden objects back to the name of the linked ID they are overriding."""
|
||||
for o in bpy.data.objects:
|
||||
if not o.override_library:
|
||||
continue
|
||||
desired_name = o.override_library.reference.name
|
||||
if o.name == desired_name:
|
||||
continue
|
||||
|
||||
occupier = bpy.data.objects.get((desired_name, None))
|
||||
if occupier:
|
||||
new_name = occupier.name + ".temp"
|
||||
occupier.name = new_name
|
||||
|
||||
print("Renaming ob: ", o.name, " -> ", desired_name)
|
||||
o.name = desired_name
|
||||
|
||||
if occupier:
|
||||
occupier.name = o.name
|
||||
|
||||
|
||||
def get_mapping_from_linked_to_overriding_ids() -> Dict[bpy.types.ID, List[bpy.types.ID]]:
|
||||
"""Build a mapping from linked datablocks to their override IDs."""
|
||||
override_map = {}
|
||||
for o in bpy.data.objects:
|
||||
if o.override_library:
|
||||
linked_id = o.override_library.reference
|
||||
if linked_id not in override_map:
|
||||
override_map[linked_id] = []
|
||||
override_map[linked_id].append(o)
|
||||
return override_map
|
||||
|
||||
|
||||
def reassign_objects_to_collections():
|
||||
"""This function would re-assign objects to the right collections;
|
||||
This is blocked by the PyAPI though, so this code doesn't actually work."""
|
||||
override_map = get_mapping_from_linked_to_overriding_ids()
|
||||
|
||||
for coll in bpy.context.scene.collection.children_recursive:
|
||||
if not coll.override_library:
|
||||
continue
|
||||
linked_coll = coll.override_library.reference
|
||||
for linked_id in linked_coll.objects:
|
||||
override_ids = override_map.get(linked_id)
|
||||
if not override_ids:
|
||||
continue
|
||||
if len(override_ids) == 1:
|
||||
override = override_ids[0]
|
||||
if not override.name in coll.objects:
|
||||
coll.objects.link(override)
|
||||
else:
|
||||
print("Multiple overriding IDs, not sure which one to assign:", linked_id)
|
||||
|
||||
|
||||
def get_parent_collections(target_coll: bpy.types.Collection) -> Set[bpy.types.Collection]:
|
||||
"""Return a set of the parents of a collection."""
|
||||
parent_colls = set()
|
||||
for parent_coll in [scene.collection for scene in bpy.data.scenes] + list(bpy.data.collections):
|
||||
for child_coll in parent_coll.children:
|
||||
if child_coll == target_coll:
|
||||
parent_colls.add(parent_coll)
|
||||
return parent_colls
|
||||
|
||||
|
||||
def collection_unlink_from_parents(coll: bpy.types.Collection) -> Set[bpy.types.Collection]:
|
||||
"""Unlink a collection from all of its parents, and return a set of those parents."""
|
||||
parent_colls = get_parent_collections(coll)
|
||||
for parent_coll in parent_colls:
|
||||
parent_coll.children.unlink(coll)
|
||||
return parent_colls
|
||||
|
||||
|
||||
def clear_collection_hierarchy_fake_user(coll: bpy.types.Collection):
|
||||
"""Set the Fake User flag of a collection and all of its children to False,
|
||||
so that it can be purged."""
|
||||
for coll in [coll] + coll.children_recursive:
|
||||
coll.use_fake_user = False
|
||||
|
||||
|
||||
def map_objects_of_linked_to_override_hierarchy(
|
||||
root_override: bpy.types.Collection,
|
||||
) -> Dict[bpy.types.Object, bpy.types.Object]:
|
||||
"""Map linked objects to their overrides, given a hierarchy root of that override."""
|
||||
obj_map = {}
|
||||
for obj in root_override.all_objects:
|
||||
if not obj.override_library:
|
||||
continue
|
||||
obj_map[obj.override_library.reference] = obj
|
||||
return obj_map
|
||||
|
||||
|
||||
def restore_names(override_root: bpy.types.Collection):
|
||||
for obj in override_root.all_objects:
|
||||
if obj.override_library and obj.name != obj.override_library.reference.name:
|
||||
obj.name = obj.override_library.reference.name
|
||||
for coll in [override_root] + override_root.children_recursive:
|
||||
if coll.override_library and coll.name != coll.override_library.reference.name:
|
||||
coll.name = coll.override_library.reference.name
|
||||
|
||||
|
||||
def __cleanup_override_hidden(override_root: bpy.types.Collection):
|
||||
"""This sadly doesn't work.
|
||||
It was meant to unlink only those objects from the OVERRIDE_HIDDEN collection,
|
||||
which have copies in the passet override_root collection (override referencing same linked ID)
|
||||
|
||||
But I don't have time to troubleshoot it; It's easier to just nuke the OVERRIDE_HIDDEN collection.
|
||||
"""
|
||||
override_hidden = bpy.data.collections.get('OVERRIDE_HIDDEN')
|
||||
if not override_hidden:
|
||||
return
|
||||
|
||||
link_map = map_objects_of_linked_to_override_hierarchy(override_hidden)
|
||||
|
||||
print("LINK MAP:")
|
||||
for key, value in link_map.items():
|
||||
print(key, " : ", value)
|
||||
|
||||
objs_to_remove = []
|
||||
|
||||
for obj in override_root.all_objects:
|
||||
if not obj.override_library:
|
||||
continue
|
||||
hidden_obj = link_map.get(obj.override_library.reference)
|
||||
if not hidden_obj:
|
||||
continue
|
||||
print("MATCH: ", obj, " : ", hidden_obj)
|
||||
objs_to_remove.append(hidden_obj)
|
||||
|
||||
for obj in objs_to_remove:
|
||||
override_hidden.objects.unlink(obj)
|
||||
|
||||
|
||||
def nuke_override_hidden():
|
||||
nukelist = [c for c in bpy.data.collections if 'OVERRIDE_HIDDEN' in c.name]
|
||||
for coll in nukelist:
|
||||
bpy.data.collections.remove(coll)
|
||||
|
||||
|
||||
def get_objects_in_override_hidden(
|
||||
linked_collection: bpy.types.Collection,
|
||||
) -> List[bpy.types.Object]:
|
||||
override_hidden_list = [c for c in bpy.data.collections if 'OVERRIDE_HIDDEN' in c.name]
|
||||
if not override_hidden_list:
|
||||
return []
|
||||
|
||||
all_linked_objs = list(linked_collection.all_objects)
|
||||
ret = []
|
||||
|
||||
for coll in override_hidden_list:
|
||||
for obj in coll.all_objects:
|
||||
if not obj.override_library:
|
||||
continue
|
||||
if obj.override_library.reference in all_linked_objs:
|
||||
ret.append(obj)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def draw_relink_ui(self, context):
|
||||
self.layout.separator()
|
||||
self.layout.operator(OUTLINER_OT_relink_overridden_asset.bl_idname, text="Purge & Re-link")
|
||||
|
||||
|
||||
registry = [
|
||||
OUTLINER_OT_relink_overridden_asset,
|
||||
]
|
||||
|
||||
|
||||
def register():
|
||||
bpy.types.VIEW3D_MT_object_liboverride.append(draw_relink_ui)
|
||||
bpy.types.OUTLINER_MT_liboverride.append(draw_relink_ui)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.types.OUTLINER_MT_liboverride.append(draw_relink_ui)
|
||||
bpy.types.OUTLINER_MT_liboverride.remove(draw_relink_ui)
|
||||
@@ -0,0 +1,263 @@
|
||||
# SPDX-FileCopyrightText: 2025 Blender Studio Tools Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
import bpy
|
||||
from bpy.props import StringProperty, CollectionProperty
|
||||
from ..id_types import get_id, get_id_storage_by_type_str, get_datablock_icon, get_library_icon
|
||||
|
||||
|
||||
class BLENLOG_OT_report_fake_users(bpy.types.Operator):
|
||||
"""Report Fake User IDs. Ignores Text and Brush IDs"""
|
||||
|
||||
bl_idname = "blenlog.report_fake_users"
|
||||
bl_label = "Report Fake Users"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
def execute(self, context):
|
||||
user_map = bpy.data.user_map()
|
||||
|
||||
blenlog = context.scene.blender_log
|
||||
|
||||
category = "Fake User ID"
|
||||
blenlog.clear_category(category)
|
||||
|
||||
for id, users in user_map.items():
|
||||
if id.library or id.override_library:
|
||||
continue
|
||||
if id.id_type not in {'BRUSH', 'TEXT'} and id.use_fake_user:
|
||||
blenlog.add(
|
||||
name=f"{id.id_type}: {id.name} (Users: {len(users)})",
|
||||
category=category,
|
||||
description="Datablocks with fake users can cause further referenced datablocks to linger in the file. It is recommended not to use fake users, in order to keep files clear of trash data.",
|
||||
icon='FAKE_USER_ON',
|
||||
operator=BLENLOG_OT_clear_fake_user.bl_idname,
|
||||
op_kwargs={'id_name': id.name, 'id_type': id.id_type},
|
||||
op_icon='FAKE_USER_OFF',
|
||||
)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class BLENLOG_OT_remap_users(bpy.types.Operator):
|
||||
"""Remap users of an ID to another of the same type"""
|
||||
|
||||
bl_idname = "blenlog.remap_users"
|
||||
bl_label = "Remap Users"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
redundant_id: StringProperty()
|
||||
id_type: StringProperty()
|
||||
preserved_id: StringProperty()
|
||||
|
||||
def execute(self, context):
|
||||
redundant_id = get_id(self.redundant_id, self.id_type)
|
||||
if not redundant_id:
|
||||
self.report({'ERROR'}, f"ID no longer exists: {self.redundant_id}.")
|
||||
return {'CANCELLED'}
|
||||
preserved_id = get_id(self.preserved_id, self.id_type)
|
||||
if not preserved_id:
|
||||
self.report({'ERROR'}, f"ID no longer exists: {self.preserved_id}.")
|
||||
return {'CANCELLED'}
|
||||
|
||||
redundant_id.user_remap(preserved_id)
|
||||
redundant_id.use_fake_user = False
|
||||
self.report({'INFO'}, f"{self.redundant_id} has been replaced with {self.preserved_id}")
|
||||
|
||||
context.scene.blender_log.remove_active()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
### Remap Users
|
||||
class RemapTarget(bpy.types.PropertyGroup):
|
||||
pass
|
||||
|
||||
|
||||
# TODO: This code also exists in the 3D Viewport Pie Menus add-on. Keep code in sync at least?
|
||||
class BLENLOG_OT_remap_users_ui(bpy.types.Operator):
|
||||
"""Remap users of a selected ID to any other ID of the same type"""
|
||||
|
||||
bl_idname = "outliner.remap_users"
|
||||
bl_label = "Remap Users"
|
||||
bl_options = {'INTERNAL', 'UNDO'}
|
||||
|
||||
def update_library_path(self, context):
|
||||
# Prepare the ID selector.
|
||||
blenlog_remap_targets = context.scene.blenlog_remap_targets
|
||||
blenlog_remap_targets.clear()
|
||||
source_id = get_id(self.id_name_source, self.id_type, self.library_path_source)
|
||||
for id in get_id_storage_by_type_str(self.id_type)[0]:
|
||||
if id == source_id:
|
||||
continue
|
||||
if (self.library_path == 'Local Data' and not id.library) or (
|
||||
id.library and (self.library_path == id.library.filepath)
|
||||
):
|
||||
id_entry = blenlog_remap_targets.add()
|
||||
id_entry.name = id.name
|
||||
|
||||
library_path: StringProperty(
|
||||
name="Library",
|
||||
description="Library path, if we want to remap to a linked ID",
|
||||
update=update_library_path,
|
||||
)
|
||||
id_type: StringProperty(description="ID type, eg. 'OBJECT' or 'MESH'")
|
||||
library_path_source: StringProperty()
|
||||
id_name_source: StringProperty(
|
||||
name="Source ID Name", description="Name of the ID we're remapping the users of"
|
||||
)
|
||||
id_name_target: StringProperty(
|
||||
name="Target ID Name", description="Name of the ID we're remapping users to"
|
||||
)
|
||||
|
||||
def invoke(self, context, _event):
|
||||
# Populate the blenlog_remap_targets string list with possible options based on
|
||||
# what was passed to the operator.
|
||||
|
||||
assert (
|
||||
self.id_type and self.id_name_source
|
||||
), "Error: UI must provide ID and ID type to this operator."
|
||||
|
||||
# Prepare the library selector.
|
||||
blenlog_remap_target_libs = context.scene.blenlog_remap_target_libs
|
||||
blenlog_remap_target_libs.clear()
|
||||
local = blenlog_remap_target_libs.add()
|
||||
local.name = "Local Data"
|
||||
source_id = get_id(self.id_name_source, self.id_type, self.library_path_source)
|
||||
for lib in bpy.data.libraries:
|
||||
for id in lib.users_id:
|
||||
if type(id) == type(source_id):
|
||||
lib_entry = blenlog_remap_target_libs.add()
|
||||
lib_entry.name = lib.filepath
|
||||
break
|
||||
|
||||
self.library_path = "Local Data"
|
||||
if source_id.name[-4] == ".":
|
||||
storage = get_id_storage_by_type_str(self.id_type)[0]
|
||||
suggestion = storage.get(source_id.name[:-4])
|
||||
if suggestion:
|
||||
self.id_name_target = suggestion.name
|
||||
if suggestion.library:
|
||||
self.library_path = suggestion.library.filepath
|
||||
|
||||
return context.window_manager.invoke_props_dialog(self, width=800)
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False
|
||||
|
||||
scene = context.scene
|
||||
row = layout.row()
|
||||
id = get_id(self.id_name_source, self.id_type, self.library_path_source)
|
||||
id_icon = get_datablock_icon(id)
|
||||
split = row.split()
|
||||
split.row().label(text="Anything that was referencing this:")
|
||||
row = split.row()
|
||||
row.prop(self, 'id_name_source', text="", icon=id_icon)
|
||||
row.enabled = False
|
||||
|
||||
layout.separator()
|
||||
col = layout.column()
|
||||
col.label(text="Will now reference this instead: ")
|
||||
if len(scene.blenlog_remap_target_libs) > 1:
|
||||
col.prop_search(
|
||||
self,
|
||||
'library_path',
|
||||
scene,
|
||||
'blenlog_remap_target_libs',
|
||||
icon=get_library_icon(self.library_path),
|
||||
)
|
||||
col.prop_search(
|
||||
self,
|
||||
'id_name_target',
|
||||
scene,
|
||||
'blenlog_remap_targets',
|
||||
text="Datablock",
|
||||
icon=id_icon,
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
source_id = get_id(self.id_name_source, self.id_type, self.library_path_source)
|
||||
target_id = get_id(self.id_name_target, self.id_type, self.library_path)
|
||||
assert source_id and target_id, "Error: Failed to find source or target."
|
||||
|
||||
source_id.user_remap(target_id)
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class BLENLOG_OT_clear_fake_user(bpy.types.Operator):
|
||||
"""Clear the fake user flag of an ID."""
|
||||
|
||||
bl_idname = "blenlog.clear_fake_user"
|
||||
bl_label = "Clear Fake User"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
id_name: StringProperty()
|
||||
id_type: StringProperty()
|
||||
|
||||
def execute(self, context):
|
||||
id = get_id(self.id_name, self.id_type)
|
||||
if not id:
|
||||
self.report({'INFO'}, f"{self.id_type} {self.id_name} had already been removed.")
|
||||
else:
|
||||
id.use_fake_user = False
|
||||
self.report(
|
||||
{'INFO'}, f"{self.id_type} {self.id_name} no longer marked with a fake user."
|
||||
)
|
||||
|
||||
context.scene.blender_log.remove_active()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class BLENLOG_OT_report_missing_IDs(bpy.types.Operator):
|
||||
"""Report Fake User IDs. Ignores Text and Brush IDs"""
|
||||
|
||||
bl_idname = "blenlog.report_missing_ids"
|
||||
bl_label = "Report Missing IDs"
|
||||
bl_options = {'INTERNAL', 'REGISTER', 'UNDO'}
|
||||
|
||||
def execute(self, context):
|
||||
user_map = bpy.data.user_map()
|
||||
|
||||
blenlog = context.scene.blender_log
|
||||
|
||||
category = "Missing ID"
|
||||
blenlog.clear_category(category)
|
||||
|
||||
for id, users in user_map.items():
|
||||
|
||||
if id.library or id.override_library:
|
||||
continue
|
||||
if id.id_type not in {'BRUSH', 'TEXT'} and id.use_fake_user:
|
||||
blenlog.add(
|
||||
name=f"{id.id_type}: {id.name} (Users: {len(users)})",
|
||||
category=category,
|
||||
description="A linked ID was being referenced locally, and then removed from its library. The missing ID can be remapped to another.",
|
||||
icon='LIBRARY_DATA_BROKEN',
|
||||
operator=BLENLOG_OT_clear_fake_user.bl_idname,
|
||||
op_kwargs={'id_name': id.name, 'id_type': id.id_type},
|
||||
op_icon='FAKE_USER_OFF',
|
||||
)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
registry = [
|
||||
BLENLOG_OT_report_fake_users,
|
||||
BLENLOG_OT_remap_users,
|
||||
RemapTarget,
|
||||
BLENLOG_OT_remap_users_ui,
|
||||
BLENLOG_OT_clear_fake_user,
|
||||
]
|
||||
|
||||
|
||||
def register():
|
||||
bpy.types.Scene.blenlog_remap_targets = CollectionProperty(type=RemapTarget)
|
||||
bpy.types.Scene.blenlog_remap_target_libs = CollectionProperty(type=RemapTarget)
|
||||
|
||||
|
||||
def unregister():
|
||||
del bpy.types.Scene.blenlog_remap_targets
|
||||
del bpy.types.Scene.blenlog_remap_target_libs
|
||||
Reference in New Issue
Block a user