diff --git a/openpype/hosts/blender/api/lib.py b/openpype/hosts/blender/api/lib.py index e80ed61bc81..032c8e21c82 100644 --- a/openpype/hosts/blender/api/lib.py +++ b/openpype/hosts/blender/api/lib.py @@ -365,3 +365,62 @@ def maintained_time(): yield finally: bpy.context.scene.frame_current = current_time + + +def get_all_parents(obj): + """Get all recursive parents of object. + + Arguments: + obj (bpy.types.Object): Object to get all parents for. + + Returns: + List[bpy.types.Object]: All parents of object + + """ + result = [] + while True: + obj = obj.parent + if not obj: + break + result.append(obj) + return result + + +def get_highest_root(objects): + """Get the highest object (the least parents) among the objects. + + If multiple objects have the same amount of parents (or no parents) the + first object found in the input iterable will be returned. + + Note that this will *not* return objects outside of the input list, as + such it will not return the root of node from a child node. It is purely + intended to find the highest object among a list of objects. To instead + get the root from one object use, e.g. `get_all_parents(obj)[-1]` + + Arguments: + objects (List[bpy.types.Object]): Objects to find the highest root in. + + Returns: + Optional[bpy.types.Object]: First highest root found or None if no + `bpy.types.Object` found in input list. + + """ + included_objects = {obj.name_full for obj in objects} + num_parents_to_obj = {} + for obj in objects: + if isinstance(obj, bpy.types.Object): + parents = get_all_parents(obj) + # included parents + parents = [parent for parent in parents if + parent.name_full in included_objects] + if not parents: + # A node without parents must be a highest root + return obj + + num_parents_to_obj.setdefault(len(parents), obj) + + if not num_parents_to_obj: + return + + minimum_parent = min(num_parents_to_obj) + return num_parents_to_obj[minimum_parent] diff --git a/openpype/hosts/blender/api/plugin.py b/openpype/hosts/blender/api/plugin.py index 568d8f66955..b04348bb5d5 100644 --- a/openpype/hosts/blender/api/plugin.py +++ b/openpype/hosts/blender/api/plugin.py @@ -25,7 +25,8 @@ ) from .lib import imprint -VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx"] +VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx", + ".usd", ".usdc", ".usda"] def prepare_scene_name( diff --git a/openpype/hosts/blender/plugins/create/create_usd.py b/openpype/hosts/blender/plugins/create/create_usd.py new file mode 100644 index 00000000000..b7e6db077a9 --- /dev/null +++ b/openpype/hosts/blender/plugins/create/create_usd.py @@ -0,0 +1,30 @@ +"""Create a USD Export.""" + +from openpype.hosts.blender.api import plugin, lib + + +class CreateUSD(plugin.BaseCreator): + """Create USD Export""" + + identifier = "io.openpype.creators.blender.usd" + name = "usdMain" + label = "USD" + family = "usd" + icon = "gears" + + def create( + self, subset_name: str, instance_data: dict, pre_create_data: dict + ): + # Run parent create method + collection = super().create( + subset_name, instance_data, pre_create_data + ) + + if pre_create_data.get("use_selection"): + objects = lib.get_selection() + for obj in objects: + collection.objects.link(obj) + if obj.type == 'EMPTY': + objects.extend(obj.children) + + return collection diff --git a/openpype/hosts/blender/plugins/load/load_abc.py b/openpype/hosts/blender/plugins/load/load_abc.py index d7e82d1900c..61c5a038815 100644 --- a/openpype/hosts/blender/plugins/load/load_abc.py +++ b/openpype/hosts/blender/plugins/load/load_abc.py @@ -26,10 +26,11 @@ class CacheModelLoader(plugin.AssetLoader): Note: At least for now it only supports Alembic files. """ - families = ["model", "pointcache", "animation"] - representations = ["abc"] + families = ["model", "pointcache", "animation", "usd"] + representations = ["abc", "usd"] - label = "Load Alembic" + # TODO: Should USD loader be a separate loader instead? + label = "Load Alembic/USD" icon = "code-fork" color = "orange" @@ -53,10 +54,21 @@ def _process(self, libpath, asset_group, group_name): plugin.deselect_all() relative = bpy.context.preferences.filepaths.use_relative_paths - bpy.ops.wm.alembic_import( - filepath=libpath, - relative_path=relative - ) + + if any(libpath.lower().endswith(ext) + for ext in [".usd", ".usda", ".usdc"]): + # USD + bpy.ops.wm.usd_import( + filepath=libpath, + relative_path=relative + ) + + else: + # Alembic + bpy.ops.wm.alembic_import( + filepath=libpath, + relative_path=relative + ) imported = lib.get_selection() diff --git a/openpype/hosts/blender/plugins/publish/collect_instance.py b/openpype/hosts/blender/plugins/publish/collect_instance.py index 4685472213e..a278b95ac8c 100644 --- a/openpype/hosts/blender/plugins/publish/collect_instance.py +++ b/openpype/hosts/blender/plugins/publish/collect_instance.py @@ -12,7 +12,7 @@ class CollectBlenderInstanceData(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder hosts = ["blender"] families = ["model", "pointcache", "animation", "rig", "camera", "layout", - "blendScene"] + "blendScene", "usd"] label = "Collect Instance" def process(self, instance): diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py index 4fc8230a1b0..1ec1621c67f 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py @@ -7,40 +7,10 @@ from openpype.pipeline import publish from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.lib import get_highest_root from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY -def get_all_parents(obj): - """Get all recursive parents of object""" - result = [] - while True: - obj = obj.parent - if not obj: - break - result.append(obj) - return result - - -def get_highest_root(objects): - # Get the highest object that is also in the collection - included_objects = {obj.name_full for obj in objects} - num_parents_to_obj = {} - for obj in objects: - if isinstance(obj, bpy.types.Object): - parents = get_all_parents(obj) - # included parents - parents = [parent for parent in parents if - parent.name_full in included_objects] - if not parents: - # A node without parents must be a highest root - return obj - - num_parents_to_obj.setdefault(len(parents), obj) - - minimum_parent = min(num_parents_to_obj) - return num_parents_to_obj[minimum_parent] - - class ExtractAnimationFBX( publish.Extractor, publish.OptionalPyblishPluginMixin, @@ -68,19 +38,24 @@ def process(self, instance): # and for those objects include the children hierarchy # TODO: Would it make more sense for the Collect Instance collector # to also always retrieve all the children? - objects = set(asset_group.objects) # From the direct children of the collection find the 'root' node # that we want to export - it is the 'highest' node in a hierarchy - root = get_highest_root(objects) + root = get_highest_root(asset_group.objects) + if not root: + raise publish.KnownPublishError( + f"No root object found in instance: {asset_group.name}" + f"No objects in asset group: {asset_group.name}" + ) + objects = set(asset_group.objects) for obj in list(objects): objects.update(obj.children_recursive) # Find all armatures among the objects, assume to find only one armatures = [obj for obj in objects if obj.type == "ARMATURE"] if not armatures: - raise RuntimeError( + raise publish.KnownPublishError( f"Unable to find ARMATURE in collection: " f"{asset_group.name}" ) diff --git a/openpype/hosts/blender/plugins/publish/extract_usd.py b/openpype/hosts/blender/plugins/publish/extract_usd.py new file mode 100644 index 00000000000..de0e880e446 --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/extract_usd.py @@ -0,0 +1,79 @@ +import os + +import bpy + +from openpype.pipeline import publish +from openpype.hosts.blender.api import plugin, lib + + +class ExtractUSD(publish.Extractor): + """Extract as USD.""" + + label = "Extract USD" + hosts = ["blender"] + families = ["usd"] + + def process(self, instance): + + # Ignore runtime instances (e.g. USD layers) + # TODO: This is better done via more specific `families` + if not instance.data.get("transientData", {}).get("instance_node"): + return + + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = f"{instance.name}.usd" + filepath = os.path.join(stagingdir, filename) + + # Perform extraction + self.log.debug("Performing extraction..") + + # Select all members to "export selected" + plugin.deselect_all() + + selected = [] + for obj in instance: + if isinstance(obj, bpy.types.Object): + obj.select_set(True) + selected.append(obj) + + root = lib.get_highest_root(objects=instance[:]) + if not root: + instance_node = instance.data["transientData"]["instance_node"] + raise publish.KnownPublishError( + f"No root object found in instance: {instance_node.name}" + ) + self.log.debug(f"Exporting using active root: {root.name}") + + context = plugin.create_blender_context( + active=root, selected=selected) + + # Export USD + bpy.ops.wm.usd_export( + context, + filepath=filepath, + selected_objects_only=True, + export_textures=False, + relative_paths=False, + export_animation=False, + export_hair=False, + export_uvmaps=True, + # TODO: add for new version of Blender (4+?) + # export_mesh_colors=True, + export_normals=True, + export_materials=True, + use_instancing=True + ) + + plugin.deselect_all() + + # Add representation + representation = { + 'name': 'usd', + 'ext': 'usd', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data.setdefault("representations", []).append(representation) + self.log.debug("Extracted instance '%s' to: %s", + instance.name, representation) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index edd50f10c19..4ef342922d0 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -525,6 +525,30 @@ def maintained_selection(): node.setSelected(on=True) +@contextmanager +def parm_values(overrides): + """Override Parameter values during the context. + + Arguments: + overrides (List[Tuple[hou.Parm, Any]]): The overrides per parm + that should be applied during context. + + """ + + originals = [] + try: + for parm, value in overrides: + originals.append((parm, parm.eval())) + parm.set(value) + yield + finally: + for parm, value in originals: + # Parameter might not exist anymore so first + # check whether it's still valid + if hou.parm(parm.path()): + parm.set(value) + + def reset_framerange(): """Set frame range and FPS to current asset""" diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index e162d0e4614..d543fd5c439 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -197,6 +197,7 @@ def create(self, subset_name, instance_data, pre_create_data): instance_data["instance_node"] = instance_node.path() instance_data["instance_id"] = instance_node.path() + instance_data["families"] = self.get_publish_families() instance = CreatedInstance( self.family, subset_name, @@ -245,6 +246,7 @@ def collect_instances(self): node_path = instance.path() node_data["instance_id"] = node_path node_data["instance_node"] = node_path + node_data["families"] = self.get_publish_families() created_instance = CreatedInstance.from_existing( node_data, self @@ -270,6 +272,7 @@ def imprint(self, node, values, update=False): # from the node's path values.pop("instance_node", None) values.pop("instance_id", None) + values.pop("families", None) imprint(node, values, update=update) def remove_instances(self, instances): @@ -311,6 +314,22 @@ def customize_node_look( node.setUserData('nodeshape', shape) node.setColor(color) + def get_publish_families(self): + """Return families for the instances of this creator. + + Allow a Creator to define multiple families so that a creator can + e.g. specify `usd` and `usdrop`. + + There is no need to override this method if you only have the + primary family defined by the `family` property as that will always + be set. + + Returns: + list: families for instances of this creator + + """ + return [] + def get_network_categories(self): """Return in which network view type this creator should show. diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py index b935dfdf309..0a672474644 100644 --- a/openpype/hosts/houdini/api/usd.py +++ b/openpype/hosts/houdini/api/usd.py @@ -2,93 +2,15 @@ import contextlib import logging +import json -from qtpy import QtWidgets, QtCore, QtGui - -from openpype import style -from openpype.client import get_asset_by_name -from openpype.pipeline import legacy_io -from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget - -from pxr import Sdf +import hou +from pxr import Sdf, Vt log = logging.getLogger(__name__) -class SelectAssetDialog(QtWidgets.QWidget): - """Frameless assets dialog to select asset with double click. - - Args: - parm: Parameter where selected asset name is set. - """ - - def __init__(self, parm): - self.setWindowTitle("Pick Asset") - self.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup) - - assets_widget = SingleSelectAssetsWidget(legacy_io, parent=self) - - layout = QtWidgets.QHBoxLayout(self) - layout.addWidget(assets_widget) - - assets_widget.double_clicked.connect(self._set_parameter) - self._assets_widget = assets_widget - self._parm = parm - - def _set_parameter(self): - name = self._assets_widget.get_selected_asset_name() - self._parm.set(name) - self.close() - - def _on_show(self): - pos = QtGui.QCursor.pos() - # Select the current asset if there is any - select_id = None - name = self._parm.eval() - if name: - project_name = legacy_io.active_project() - db_asset = get_asset_by_name(project_name, name, fields=["_id"]) - if db_asset: - select_id = db_asset["_id"] - - # Set stylesheet - self.setStyleSheet(style.load_stylesheet()) - # Refresh assets (is threaded) - self._assets_widget.refresh() - # Select asset - must be done after refresh - if select_id is not None: - self._assets_widget.select_asset(select_id) - - # Show cursor (top right of window) near cursor - self.resize(250, 400) - self.move(self.mapFromGlobal(pos) - QtCore.QPoint(self.width(), 0)) - - def showEvent(self, event): - super(SelectAssetDialog, self).showEvent(event) - self._on_show() - - -def pick_asset(node): - """Show a user interface to select an Asset in the project - - When double clicking an asset it will set the Asset value in the - 'asset' parameter. - - """ - - parm = node.parm("asset_name") - if not parm: - log.error("Node has no 'asset' parameter: %s", node) - return - - # Construct a frameless popup so it automatically - # closes when clicked outside of it. - global tool - tool = SelectAssetDialog(parm) - tool.show() - - def add_usd_output_processor(ropnode, processor): """Add USD Output Processor to USD Rop node. @@ -199,11 +121,13 @@ def get_usd_rop_loppath(node): return node.parm("loppath").evalAsNode() -def get_layer_save_path(layer): +def get_layer_save_path(layer, expand_string=True): """Get custom HoudiniLayerInfo->HoudiniSavePath from SdfLayer. Args: layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from. + expand_string (bool): Whether to expand any houdini vars in the save + path before computing the absolute path. Returns: str or None: Path to save to when data exists. @@ -216,6 +140,8 @@ def get_layer_save_path(layer): save_path = hou_layer_info.customData.get("HoudiniSavePath", None) if save_path: # Unfortunately this doesn't actually resolve the full absolute path + if expand_string: + save_path = hou.text.expandString(save_path) return layer.ComputeAbsolutePath(save_path) @@ -261,7 +187,18 @@ def iter_layer_recursive(layer): yield layer -def get_configured_save_layers(usd_rop): +def get_configured_save_layers(usd_rop, strip_above_layer_break=True): + """Retrieve the layer save paths from a USD ROP. + + Arguments: + usdrop (hou.RopNode): USD Rop Node + strip_above_layer_break (Optional[bool]): Whether to exclude any + layers that are above layer breaks. This defaults to True. + + Returns: + List[Sdf.Layer]: The layers with configured save paths. + + """ lop_node = get_usd_rop_loppath(usd_rop) stage = lop_node.stage(apply_viewport_overrides=False) @@ -272,10 +209,101 @@ def get_configured_save_layers(usd_rop): root_layer = stage.GetRootLayer() + if strip_above_layer_break: + layers_above_layer_break = set(lop_node.layersAboveLayerBreak()) + else: + layers_above_layer_break = set() + save_layers = [] for layer in iter_layer_recursive(root_layer): + if ( + strip_above_layer_break and + layer.identifier in layers_above_layer_break + ): + continue + save_path = get_layer_save_path(layer) if save_path is not None: save_layers.append(layer) return save_layers + + +def setup_lop_python_layer(layer, node, savepath=None, + apply_file_format_args=True): + """Set up Sdf.Layer with HoudiniLayerInfo prim for metadata. + + This is the same as `loputils.createPythonLayer` but can be run on top + of `pxr.Sdf.Layer` instances that are already created in a Python LOP node. + That's useful if your layer creation itself is built to be DCC agnostic, + then we just need to run this after per layer to make it explicitly + stored for houdini. + + By default, Houdini doesn't apply the FileFormatArguments supplied to + the created layer; however it does support USD's file save suffix + of `:SDF_FORMAT_ARGS:` to supply them. With `apply_file_format_args` any + file format args set on the layer's creation will be added to the + save path through that. + + Note: The `node.addHeldLayer` call will only work from a LOP python node + whenever `node.editableStage()` or `node.editableLayer()` was called. + + Arguments: + layer (Sdf.Layer): An existing layer (most likely just created + in the current runtime) + node (hou.LopNode): The Python LOP node to attach the layer to so + it does not get garbage collected/mangled after the downstream. + savepath (Optional[str]): When provided the HoudiniSaveControl + will be set to Explicit with HoudiniSavePath to this path. + apply_file_format_args (Optional[bool]): When enabled any + FileFormatArgs defined for the layer on creation will be set + in the HoudiniSavePath so Houdini USD ROP will use them top. + + Returns: + Sdf.PrimSpec: The Created HoudiniLayerInfo prim spec. + + """ + # Add a Houdini Layer Info prim where we can put the save path. + p = Sdf.CreatePrimInLayer(layer, '/HoudiniLayerInfo') + p.specifier = Sdf.SpecifierDef + p.typeName = 'HoudiniLayerInfo' + if savepath: + if apply_file_format_args: + args = layer.GetFileFormatArguments() + if args: + args = ":".join("{}={}".format(key, value) + for key, value in args.items()) + savepath = "{}:SDF_FORMAT_ARGS:{}".format(savepath, args) + + p.customData['HoudiniSavePath'] = savepath + p.customData['HoudiniSaveControl'] = 'Explicit' + # Let everyone know what node created this layer. + p.customData['HoudiniCreatorNode'] = node.sessionId() + p.customData['HoudiniEditorNodes'] = Vt.IntArray([node.sessionId()]) + node.addHeldLayer(layer.identifier) + + return p + + +@contextlib.contextmanager +def remap_paths(rop_node, mapping): + """Enable the AyonRemapPaths output processor with provided `mapping`""" + from openpype.hosts.houdini.api.lib import parm_values + + if not mapping: + # Do nothing + yield + return + + # Houdini string parms need to escape backslashes due to the support + # of expressions - as such we do so on the json data + value = json.dumps(mapping).replace("\\", "\\\\") + with outputprocessors( + rop_node, + processors=["ayon_remap_paths"], + disable_all_others=True, + ): + with parm_values([ + (rop_node.parm("ayon_remap_paths_remap_json"), value) + ]): + yield diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index e05d254863b..4c7b1f6cdc4 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -9,7 +9,7 @@ class CreateUSD(plugin.HoudiniCreator): """Universal Scene Description""" identifier = "io.openpype.creators.houdini.usd" - label = "USD (experimental)" + label = "USD" family = "usd" icon = "gears" enabled = False @@ -50,3 +50,6 @@ def get_network_categories(self): hou.ropNodeTypeCategory(), hou.lopNodeTypeCategory() ] + + def get_publish_families(self): + return ["usd", "usdrop"] diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index f78f0bed503..047b131683e 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -7,7 +7,7 @@ class CreateUSDRender(plugin.HoudiniCreator): """USD Render ROP in /stage""" identifier = "io.openpype.creators.houdini.usdrender" - label = "USD Render (experimental)" + label = "USD Render" family = "usdrender" icon = "magic" diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py b/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py deleted file mode 100644 index d154cdc7c0b..00000000000 --- a/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py +++ /dev/null @@ -1,148 +0,0 @@ -import hou -import pyblish.api -from openpype.hosts.houdini.api import lib -import openpype.hosts.houdini.api.usd as hou_usdlib -import openpype.lib.usdlib as usdlib - - -class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): - """Collect Instances from a ROP Network and its configured layer paths. - - The output nodes of the ROP node will only be published when *any* of the - layers remain set to 'publish' by the user. - - This works differently from most of our Avalon instances in the pipeline. - As opposed to storing `pyblish.avalon.instance` as id on the node we store - `pyblish.avalon.usdlayered`. - - Additionally this instance has no need for storing family, asset, subset - or name on the nodes. Instead all information is retrieved solely from - the output filepath, which is an Avalon URI: - avalon://{asset}/{subset}.{representation} - - Each final ROP node is considered a dependency for any of the Configured - Save Path layers it sets along the way. As such, the instances shown in - the Pyblish UI are solely the configured layers. The encapsulating usd - files are generated whenever *any* of the dependencies is published. - - These dependency instances are stored in: - instance.data["publishDependencies"] - - """ - - order = pyblish.api.CollectorOrder - 0.01 - label = "Collect Instances (USD Configured Layers)" - hosts = ["houdini"] - - def process(self, context): - - stage = hou.node("/stage") - if not stage: - # Likely Houdini version <18 - return - - nodes = stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop) - for node in nodes: - - if not node.parm("id"): - continue - - if node.evalParm("id") != "pyblish.avalon.usdlayered": - continue - - has_family = node.evalParm("family") - assert has_family, "'%s' is missing 'family'" % node.name() - - self.process_node(node, context) - - def sort_by_family(instance): - """Sort by family""" - return instance.data.get("families", instance.data.get("family")) - - # Sort/grouped by family (preserving local index) - context[:] = sorted(context, key=sort_by_family) - - return context - - def process_node(self, node, context): - - # Allow a single ROP node or a full ROP network of USD ROP nodes - # to be processed as a single entry that should "live together" on - # a publish. - if node.type().name() == "ropnet": - # All rop nodes inside ROP Network - ropnodes = node.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop) - else: - # A single node - ropnodes = [node] - - data = lib.read(node) - - # Don't use the explicit "colorbleed.usd.layered" family for publishing - # instead use the "colorbleed.usd" family to integrate. - data["publishFamilies"] = ["colorbleed.usd"] - - # For now group ALL of them into USD Layer subset group - # Allow this subset to be grouped into a USD Layer on creation - data["subsetGroup"] = "USD Layer" - - instances = list() - dependencies = [] - for ropnode in ropnodes: - - # Create a dependency instance per ROP Node. - lopoutput = ropnode.evalParm("lopoutput") - dependency_save_data = self.get_save_data(lopoutput) - dependency = context.create_instance(dependency_save_data["name"]) - dependency.append(ropnode) - dependency.data.update(data) - dependency.data.update(dependency_save_data) - dependency.data["family"] = "colorbleed.usd.dependency" - dependency.data["optional"] = False - dependencies.append(dependency) - - # Hide the dependency instance from the context - context.pop() - - # Get all configured layers for this USD ROP node - # and create a Pyblish instance for each one - layers = hou_usdlib.get_configured_save_layers(ropnode) - for layer in layers: - save_path = hou_usdlib.get_layer_save_path(layer) - save_data = self.get_save_data(save_path) - if not save_data: - continue - self.log.info(save_path) - - instance = context.create_instance(save_data["name"]) - instance[:] = [node] - - # Set the instance data - instance.data.update(data) - instance.data.update(save_data) - instance.data["usdLayer"] = layer - - instances.append(instance) - - # Store the collected ROP node dependencies - self.log.debug("Collected dependencies: %s" % (dependencies,)) - for instance in instances: - instance.data["publishDependencies"] = dependencies - - def get_save_data(self, save_path): - - # Resolve Avalon URI - uri_data = usdlib.parse_avalon_uri(save_path) - if not uri_data: - self.log.warning("Non Avalon URI Layer Path: %s" % save_path) - return {} - - # Collect asset + subset from URI - name = "{subset} ({asset})".format(**uri_data) - fname = "{asset}_{subset}.{ext}".format(**uri_data) - - data = dict(uri_data) - data["usdSavePath"] = save_path - data["usdFilename"] = fname - data["name"] = name - return data diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py deleted file mode 100644 index 462cf99b9c5..00000000000 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ /dev/null @@ -1,120 +0,0 @@ -import pyblish.api - -from openpype.client import ( - get_subset_by_name, - get_asset_by_name, - get_asset_name_identifier, -) -import openpype.lib.usdlib as usdlib - - -class CollectUsdBootstrap(pyblish.api.InstancePlugin): - """Collect special Asset/Shot bootstrap instances if those are needed. - - Some specific subsets are intended to be part of the default structure - of an "Asset" or "Shot" in our USD pipeline. For example, for an Asset - we layer a Model and Shade USD file over each other and expose that in - a Asset USD file, ready to use. - - On the first publish of any of the components of a Asset or Shot the - missing pieces are bootstrapped and generated in the pipeline too. This - means that on the very first publish of your model the Asset USD file - will exist too. - - """ - - order = pyblish.api.CollectorOrder + 0.35 - label = "Collect USD Bootstrap" - hosts = ["houdini"] - families = ["usd", "usd.layered"] - - def process(self, instance): - - # Detect whether the current subset is a subset in a pipeline - def get_bootstrap(instance): - instance_subset = instance.data["subset"] - for name, layers in usdlib.PIPELINE.items(): - if instance_subset in set(layers): - return name # e.g. "asset" - break - else: - return - - bootstrap = get_bootstrap(instance) - if bootstrap: - self.add_bootstrap(instance, bootstrap) - - # Check if any of the dependencies requires a bootstrap - for dependency in instance.data.get("publishDependencies", list()): - bootstrap = get_bootstrap(dependency) - if bootstrap: - self.add_bootstrap(dependency, bootstrap) - - def add_bootstrap(self, instance, bootstrap): - - self.log.debug("Add bootstrap for: %s" % bootstrap) - - project_name = instance.context.data["projectName"] - asset_name = instance.data["asset"] - asset_doc = get_asset_by_name(project_name, asset_name) - assert asset_doc, "Asset must exist: %s" % asset_name - - # Check which are not about to be created and don't exist yet - required = {"shot": ["usdShot"], "asset": ["usdAsset"]}.get(bootstrap) - - require_all_layers = instance.data.get("requireAllLayers", False) - if require_all_layers: - # USD files load fine in usdview and Houdini even when layered or - # referenced files do not exist. So by default we don't require - # the layers to exist. - layers = usdlib.PIPELINE.get(bootstrap) - if layers: - required += list(layers) - - self.log.debug("Checking required bootstrap: %s" % required) - for subset_name in required: - if self._subset_exists( - project_name, instance, subset_name, asset_doc - ): - continue - - self.log.debug( - "Creating {0} USD bootstrap: {1} {2}".format( - bootstrap, asset_name, subset_name - ) - ) - - new = instance.context.create_instance(subset_name) - new.data["subset"] = subset_name - new.data["label"] = "{0} ({1})".format(subset_name, asset_name) - new.data["family"] = "usd.bootstrap" - new.data["comment"] = "Automated bootstrap USD file." - new.data["publishFamilies"] = ["usd"] - - # Do not allow the user to toggle this instance - new.data["optional"] = False - - # Copy some data from the instance for which we bootstrap - for key in ["asset"]: - new.data[key] = instance.data[key] - - def _subset_exists(self, project_name, instance, subset_name, asset_doc): - """Return whether subset exists in current context or in database.""" - # Allow it to be created during this publish session - context = instance.context - - asset_doc_name = get_asset_name_identifier(asset_doc) - for inst in context: - if ( - inst.data["subset"] == subset_name - and inst.data["asset"] == asset_doc_name - ): - return True - - # Or, if they already exist in the database we can - # skip them too. - if get_subset_by_name( - project_name, subset_name, asset_doc["_id"], fields=["_id"] - ): - return True - return False diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index 696560a5906..32fc3809619 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -1,20 +1,67 @@ +import copy import os +import re import pyblish.api + +from openpype.pipeline.create import get_subset_name +from openpype.client import get_asset_by_name import openpype.hosts.houdini.api.usd as usdlib import hou +def copy_instance_data(instance_src, instance_dest, attr): + """Copy instance data from `src` instance to `dest` instance. + + Examples: + >>> copy_instance_data(instance_src, instance_dest, + >>> attr="publish_attributes.CollectRopFrameRange") + + Arguments: + instance_src (pyblish.api.Instance): Source instance to copy from + instance_dest (pyblish.api.Instance): Target instance to copy to + attr (str): Attribute on the source instance to copy. This can be + a nested key joined by `.` to only copy sub entries of dictionaries + in the source instance's data. + + Raises: + KeyError: If the key does not exist on the source instance. + AssertionError: If a parent key already exists on the destination + instance but is not of the correct type (= is not a dict) + + """ + + src_data = instance_src.data + dest_data = instance_dest.data + keys = attr.split(".") + for i, key in enumerate(keys): + if key not in src_data: + break + + src_value = src_data[key] + if i != len(key): + dest_data = dest_data.setdefault(key, {}) + assert isinstance(dest_data, dict), "Destination must be a dict" + src_data = src_value + else: + # Last iteration - assign the value + dest_data[key] = copy.deepcopy(src_value) + + class CollectUsdLayers(pyblish.api.InstancePlugin): """Collect the USD Layers that have configured save paths.""" - order = pyblish.api.CollectorOrder + 0.35 + order = pyblish.api.CollectorOrder + 0.25 label = "Collect USD Layers" hosts = ["houdini"] - families = ["usd"] + families = ["usdrop"] def process(self, instance): + # TODO: Replace this with a Hidden Creator so we collect these BEFORE + # starting the publish so the user sees them before publishing + # - however user should not be able to individually enable/disable + # this from the main ROP its created from? output = instance.data.get("output_node") if not output: @@ -31,13 +78,16 @@ def process(self, instance): creator = info.customData.get("HoudiniCreatorNode") self.log.debug("Found configured save path: " - "%s -> %s" % (layer, save_path)) + "%s -> %s", layer, save_path) # Log node that configured this save path - if creator: - self.log.debug("Created by: %s" % creator) + creator_node = hou.nodeBySessionId(creator) if creator else None + if creator_node: + self.log.debug( + "Created by: %s", creator_node.path() + ) - save_layers.append((layer, save_path)) + save_layers.append((layer, save_path, creator_node)) # Store on the instance instance.data["usdConfiguredSavePaths"] = save_layers @@ -45,22 +95,65 @@ def process(self, instance): # Create configured layer instances so User can disable updating # specific configured layers for publishing. context = instance.context - for layer, save_path in save_layers: + for layer, save_path, creator_node in save_layers: name = os.path.basename(save_path) - label = "{0} -> {1}".format(instance.data["name"], name) layer_inst = context.create_instance(name) - family = "usdlayer" + # include same USD ROP + layer_inst.append(rop_node) + + staging_dir, fname = os.path.split(save_path) + fname_no_ext, ext = os.path.splitext(fname) + + variant = fname_no_ext + + # Strip off any trailing version number in the form of _v[0-9]+ + variant = re.sub("_v[0-9]+$", "", variant) + + layer_inst.data["usd_layer"] = layer + layer_inst.data["usd_layer_save_path"] = save_path + + project_name = context.data["projectName"] + asset_doc = get_asset_by_name(project_name, + asset_name=instance.data["asset"]) + variant_base = instance.data["variant"] + subset = get_subset_name( + family="usd", + variant=variant_base + "_" + variant, + task_name=context.data["anatomyData"]["task"]["name"], + asset_doc=asset_doc, + project_name=project_name, + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] + ) + + label = "{0} -> {1}".format(instance.data["name"], subset) + family = "usd" layer_inst.data["family"] = family layer_inst.data["families"] = [family] - layer_inst.data["subset"] = "__stub__" + layer_inst.data["subset"] = subset layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] layer_inst.data["instance_node"] = instance.data["instance_node"] - # include same USD ROP - layer_inst.append(rop_node) - # include layer data - layer_inst.append((layer, save_path)) + layer_inst.data["render"] = False + layer_inst.data["output_node"] = creator_node + + # Inherit "use handles" from the source instance + # TODO: Do we want to maybe copy full `publish_attributes` instead? + copy_instance_data( + instance, layer_inst, + attr="publish_attributes.CollectRopFrameRange.use_handles" + ) # Allow this subset to be grouped into a USD Layer on creation layer_inst.data["subsetGroup"] = "USD Layer" + + # For now just assume the representation will get published + representation = { + "name": "usd", + "ext": ext.lstrip("."), + "stagingDir": staging_dir, + "files": fname + } + layer_inst.data.setdefault("representations", []).append( + representation) diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 61c1b477b2d..1722f309cd9 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -4,17 +4,17 @@ from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +from openpype.hosts.houdini.api.usd import remap_paths import hou + class ExtractUSD(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract USD" hosts = ["houdini"] - families = ["usd", - "usdModel", - "usdSetDress"] + families = ["usdrop"] def process(self, instance): @@ -28,7 +28,9 @@ def process(self, instance): self.log.info("Writing USD '%s' to '%s'" % (file_name, staging_dir)) - render_rop(ropnode) + mapping = self.get_source_to_publish_paths(instance.context) + with remap_paths(ropnode, mapping): + render_rop(ropnode) assert os.path.exists(output), "Output does not exist: %s" % output @@ -42,3 +44,47 @@ def process(self, instance): "stagingDir": staging_dir, } instance.data["representations"].append(representation) + + def get_source_to_publish_paths(self, context): + """Define a mapping of all current instances in context from source + file to publish file so this can be used on the USD save to remap + asset layer paths on publish via AyonRemapPaths output processor""" + + from openpype.lib.usdlib import get_instance_expected_output_path + + mapping = {} + for instance in context: + if not instance.data.get("active", True): + continue + + if not instance.data.get("publish", True): + continue + + for repre in instance.data.get("representations", []): + name = repre.get("name") + ext = repre.get("ext") + + # TODO: The remapping might need to get more involved if the + # asset paths that are set use e.g. $F + # TODO: If the representation has multiple files we might need + # to define the path remapping per file of the sequence + path = get_instance_expected_output_path( + instance, representation_name=name, ext=ext + ) + for source_path in get_source_paths(instance, repre): + source_path = os.path.normpath(source_path) + mapping[source_path] = path + + return mapping + + +def get_source_paths(instance, repre): + """Return the full source filepath for an instance's representations""" + + staging = repre.get("stagingDir", instance.data.get("stagingDir")) + files = repre.get("files", []) + if isinstance(files, list): + return [os.path.join(staging, fname) for fname in files] + else: + # Single file + return [os.path.join(staging, files)] diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py deleted file mode 100644 index d6193f13c17..00000000000 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ /dev/null @@ -1,319 +0,0 @@ -import os -import contextlib -import hou -import sys -from collections import deque - -import pyblish.api - -from openpype.client import ( - get_asset_by_name, - get_subset_by_name, - get_last_version_by_subset_id, - get_representation_by_name, -) -from openpype.pipeline import ( - get_representation_path, - publish, -) -import openpype.hosts.houdini.api.usd as hou_usdlib -from openpype.hosts.houdini.api.lib import render_rop - - -class ExitStack(object): - """Context manager for dynamic management of a stack of exit callbacks. - - For example: - - with ExitStack() as stack: - files = [stack.enter_context(open(fname)) for fname in filenames] - # All opened files will automatically be closed at the end of - # the with statement, even if attempts to open files later - # in the list raise an exception - - """ - - def __init__(self): - self._exit_callbacks = deque() - - def pop_all(self): - """Preserve the context stack by transferring it to a new instance""" - new_stack = type(self)() - new_stack._exit_callbacks = self._exit_callbacks - self._exit_callbacks = deque() - return new_stack - - def _push_cm_exit(self, cm, cm_exit): - """Helper to correctly register callbacks to __exit__ methods""" - - def _exit_wrapper(*exc_details): - return cm_exit(cm, *exc_details) - - _exit_wrapper.__self__ = cm - self.push(_exit_wrapper) - - def push(self, exit): - """Registers a callback with the standard __exit__ method signature. - - Can suppress exceptions the same way __exit__ methods can. - - Also accepts any object with an __exit__ method (registering a call - to the method instead of the object itself) - - """ - # We use an unbound method rather than a bound method to follow - # the standard lookup behaviour for special methods - _cb_type = type(exit) - try: - exit_method = _cb_type.__exit__ - except AttributeError: - # Not a context manager, so assume its a callable - self._exit_callbacks.append(exit) - else: - self._push_cm_exit(exit, exit_method) - return exit # Allow use as a decorator - - def callback(self, callback, *args, **kwds): - """Registers an arbitrary callback and arguments. - - Cannot suppress exceptions. - """ - - def _exit_wrapper(exc_type, exc, tb): - callback(*args, **kwds) - - # We changed the signature, so using @wraps is not appropriate, but - # setting __wrapped__ may still help with introspection - _exit_wrapper.__wrapped__ = callback - self.push(_exit_wrapper) - return callback # Allow use as a decorator - - def enter_context(self, cm): - """Enters the supplied context manager - - If successful, also pushes its __exit__ method as a callback and - returns the result of the __enter__ method. - """ - # We look up the special methods on the type to match the with - # statement - _cm_type = type(cm) - _exit = _cm_type.__exit__ - result = _cm_type.__enter__(cm) - self._push_cm_exit(cm, _exit) - return result - - def close(self): - """Immediately unwind the context stack""" - self.__exit__(None, None, None) - - def __enter__(self): - return self - - def __exit__(self, *exc_details): - # We manipulate the exception state so it behaves as though - # we were actually nesting multiple with statements - frame_exc = sys.exc_info()[1] - - def _fix_exception_context(new_exc, old_exc): - while 1: - exc_context = new_exc.__context__ - if exc_context in (None, frame_exc): - break - new_exc = exc_context - new_exc.__context__ = old_exc - - # Callbacks are invoked in LIFO order to match the behaviour of - # nested context managers - suppressed_exc = False - while self._exit_callbacks: - cb = self._exit_callbacks.pop() - try: - if cb(*exc_details): - suppressed_exc = True - exc_details = (None, None, None) - except Exception: - new_exc_details = sys.exc_info() - # simulate the stack of exceptions by setting the context - _fix_exception_context(new_exc_details[1], exc_details[1]) - if not self._exit_callbacks: - raise - exc_details = new_exc_details - return suppressed_exc - - -@contextlib.contextmanager -def parm_values(overrides): - """Override Parameter values during the context.""" - - originals = [] - try: - for parm, value in overrides: - originals.append((parm, parm.eval())) - parm.set(value) - yield - finally: - for parm, value in originals: - # Parameter might not exist anymore so first - # check whether it's still valid - if hou.parm(parm.path()): - parm.set(value) - - -class ExtractUSDLayered(publish.Extractor): - - order = pyblish.api.ExtractorOrder - label = "Extract Layered USD" - hosts = ["houdini"] - families = ["usdLayered", "usdShade"] - - # Force Output Processors so it will always save any file - # into our unique staging directory with processed Avalon paths - output_processors = ["avalon_uri_processor", "stagingdir_processor"] - - def process(self, instance): - - self.log.info("Extracting: %s" % instance) - - staging_dir = self.staging_dir(instance) - fname = instance.data.get("usdFilename") - - # The individual rop nodes are collected as "publishDependencies" - dependencies = instance.data["publishDependencies"] - ropnodes = [dependency[0] for dependency in dependencies] - assert all( - node.type().name() in {"usd", "usd_rop"} for node in ropnodes - ) - - # Main ROP node, either a USD Rop or ROP network with - # multiple USD ROPs - node = hou.node(instance.data["instance_node"]) - - # Collect any output dependencies that have not been processed yet - # during extraction of other instances - outputs = [fname] - active_dependencies = [ - dep - for dep in dependencies - if dep.data.get("publish", True) - and not dep.data.get("_isExtracted", False) - ] - for dependency in active_dependencies: - outputs.append(dependency.data["usdFilename"]) - - pattern = r"*[/\]{0} {0}" - save_pattern = " ".join(pattern.format(fname) for fname in outputs) - - # Run a stack of context managers before we start the render to - # temporarily adjust USD ROP settings for our publish output. - rop_overrides = { - # This sets staging directory on the processor to force our - # output files to end up in the Staging Directory. - "stagingdiroutputprocessor_stagingDir": staging_dir, - # Force the Avalon URI Output Processor to refactor paths for - # references, payloads and layers to published paths. - "avalonurioutputprocessor_use_publish_paths": True, - # Only write out specific USD files based on our outputs - "savepattern": save_pattern, - } - overrides = list() - with ExitStack() as stack: - - for ropnode in ropnodes: - manager = hou_usdlib.outputprocessors( - ropnode, - processors=self.output_processors, - disable_all_others=True, - ) - stack.enter_context(manager) - - # Some of these must be added after we enter the output - # processor context manager because those parameters only - # exist when the Output Processor is added to the ROP node. - for name, value in rop_overrides.items(): - parm = ropnode.parm(name) - assert parm, "Parm not found: %s.%s" % ( - ropnode.path(), - name, - ) - overrides.append((parm, value)) - - stack.enter_context(parm_values(overrides)) - - # Render the single ROP node or the full ROP network - render_rop(node) - - # Assert all output files in the Staging Directory - for output_fname in outputs: - path = os.path.join(staging_dir, output_fname) - assert os.path.exists(path), "Output file must exist: %s" % path - - # Set up the dependency for publish if they have new content - # compared to previous publishes - project_name = instance.context.data["projectName"] - for dependency in active_dependencies: - dependency_fname = dependency.data["usdFilename"] - - filepath = os.path.join(staging_dir, dependency_fname) - similar = self._compare_with_latest_publish( - project_name, dependency, filepath - ) - if similar: - # Deactivate this dependency - self.log.debug( - "Dependency matches previous publish version," - " deactivating %s for publish" % dependency - ) - dependency.data["publish"] = False - else: - self.log.debug("Extracted dependency: %s" % dependency) - # This dependency should be published - dependency.data["files"] = [dependency_fname] - dependency.data["stagingDir"] = staging_dir - dependency.data["_isExtracted"] = True - - # Store the created files on the instance - if "files" not in instance.data: - instance.data["files"] = [] - instance.data["files"].append(fname) - - def _compare_with_latest_publish(self, project_name, dependency, new_file): - import filecmp - - _, ext = os.path.splitext(new_file) - - # Compare this dependency with the latest published version - # to detect whether we should make this into a new publish - # version. If not, skip it. - asset = get_asset_by_name( - project_name, dependency.data["asset"], fields=["_id"] - ) - subset = get_subset_by_name( - project_name, - dependency.data["subset"], - asset["_id"], - fields=["_id"] - ) - if not subset: - # Subset doesn't exist yet. Definitely new file - self.log.debug("No existing subset..") - return False - - version = get_last_version_by_subset_id( - project_name, subset["_id"], fields=["_id"] - ) - if not version: - self.log.debug("No existing version..") - return False - - representation = get_representation_by_name( - project_name, ext.lstrip("."), version["_id"] - ) - if not representation: - self.log.debug("No existing representation..") - return False - - old_file = get_representation_path(representation) - if not os.path.exists(old_file): - return False - - return filecmp.cmp(old_file, new_file) diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index c10c5a2c05d..4820889f98b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -4,6 +4,7 @@ import hou + class ValidateBypassed(pyblish.api.InstancePlugin): """Validate all primitives build hierarchy from attribute when enabled. @@ -20,9 +21,10 @@ class ValidateBypassed(pyblish.api.InstancePlugin): def process(self, instance): - if len(instance) == 0: - # Ignore instances without any nodes - # e.g. in memory bootstrap instances + if not instance.data.get("instance_node"): + self.log.debug( + "Skipping instance without instance node: {}".format(instance) + ) return invalid = self.get_invalid(instance) diff --git a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py index 108a700bbea..c3f807e61ca 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py +++ b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py @@ -20,7 +20,7 @@ class ValidateHoudiniNotApprenticeLicense(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["usd", "abc", "fbx", "camera"] + families = ["usdrop", "abc", "fbx", "camera"] hosts = ["houdini"] label = "Houdini Apprentice License" diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index 6c48eae70a2..90c2f080a13 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -35,6 +35,13 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): def process(self, instance): + if not instance.data.get("instance_node"): + self.log.debug( + "Skipping 'Validate no errors' because instance " + "has no instance node: {}".format(instance) + ) + return + validate_nodes = [] if len(instance) > 0: diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index f2c7878c4ed..0c6b0911be2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -2,12 +2,16 @@ import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib -from openpype.pipeline import PublishValidationError +from openpype.pipeline import ( + PublishValidationError, + publish +) import hou -class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): +class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin, + publish.OptionalPyblishPluginMixin): """Validate USD loaded paths have no backslashes. This is a crucial validation for HUSK USD rendering as Houdini's @@ -21,12 +25,14 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["usdSetDress", "usdShade", "usd", "usdrender"] + families = ["usdrop", "usdrender"] hosts = ["houdini"] label = "USD Layer path backslashes" optional = True def process(self, instance): + if not self.is_active(instance.data): + return rop = hou.node(instance.data.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 5cb5bd35fb4..75a55b1d34e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -14,7 +14,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["usd"] + families = ["usdrop"] hosts = ["houdini"] label = "Validate Output Node (USD)" diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py deleted file mode 100644 index 0db782d5453..00000000000 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -import re - -import pyblish.api - -from openpype.client import get_subset_by_name -from openpype.pipeline.publish import ValidateContentsOrder -from openpype.pipeline import PublishValidationError - - -class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): - """Validate the Instance has no current cooking errors.""" - - order = ValidateContentsOrder - hosts = ["houdini"] - families = ["usdShade"] - label = "USD Shade model exists" - - def process(self, instance): - project_name = instance.context.data["projectName"] - asset_name = instance.data["asset"] - subset = instance.data["subset"] - - # Assume shading variation starts after a dot separator - shade_subset = subset.split(".", 1)[0] - model_subset = re.sub("^usdShade", "usdModel", shade_subset) - - asset_doc = instance.data.get("assetEntity") - if not asset_doc: - raise RuntimeError("Asset document is not filled on instance.") - - subset_doc = get_subset_by_name( - project_name, model_subset, asset_doc["_id"], fields=["_id"] - ) - if not subset_doc: - raise PublishValidationError( - ("USD Model subset not found: " - "{} ({})").format(model_subset, asset_name), - title=self.label - ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py deleted file mode 100644 index cb2099437da..00000000000 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -import pyblish.api -from openpype.pipeline import PublishValidationError - -import hou - - -class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): - """Validate USD Shading Workspace is correct version. - - There have been some issues with outdated/erroneous Shading Workspaces - so this is to confirm everything is set as it should. - - """ - - order = pyblish.api.ValidatorOrder - hosts = ["houdini"] - families = ["usdShade"] - label = "USD Shade Workspace" - - def process(self, instance): - - rop = hou.node(instance.data.get("instance_node")) - workspace = rop.parent() - - definition = workspace.type().definition() - name = definition.nodeType().name() - library = definition.libraryFilePath() - - all_definitions = hou.hda.definitionsInFile(library) - node_type, version = name.rsplit(":", 1) - version = float(version) - - highest = version - for other_definition in all_definitions: - other_name = other_definition.nodeType().name() - other_node_type, other_version = other_name.rsplit(":", 1) - other_version = float(other_version) - - if node_type != other_node_type: - continue - - # Get the highest version - highest = max(highest, other_version) - - if version != highest: - raise PublishValidationError( - ("Shading Workspace is not the latest version." - " Found {}. Latest is {}.").format(version, highest), - title=self.label - ) - - # There were some issues with the editable node not having the right - # configured path. So for now let's assure that is correct to.from - value = ( - 'avalon://`chs("../asset_name")`/' - 'usdShade`chs("../model_variantname1")`.usd' - ) - rop_value = rop.parm("lopoutput").rawValue() - if rop_value != value: - raise PublishValidationError( - ("Shading Workspace has invalid 'lopoutput'" - " parameter value. The Shading Workspace" - " needs to be reset to its default values."), - title=self.label - ) diff --git a/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py b/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py new file mode 100644 index 00000000000..cd163c34918 --- /dev/null +++ b/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py @@ -0,0 +1,134 @@ +import logging + +from husd.outputprocessor import OutputProcessor + +from openpype.lib import usdlib + + +class AyonURIOutputProcessor(OutputProcessor): + """Process Ayon URIs into their full path equivalents.""" + + def __init__(self): + """ There is only one object of each output processor class that is + ever created in a Houdini session. Therefore be very careful + about what data gets put in this object. + """ + self._save_cache = dict() + self._ref_cache = dict() + self._publish_context = None + self.log = logging.getLogger(__name__) + + @staticmethod + def name(): + return "ayon_uri_processor" + + @staticmethod + def displayName(): + return "Ayon URI Output Processor" + + def processReferencePath(self, + asset_path, + referencing_layer_path, + asset_is_layer): + """ + Args: + asset_path (str): The path to the asset, as specified in Houdini. + If this asset is being written to disk, this will be the final + output of the `processSavePath()` calls on all output + processors. + referencing_layer_path (str): The absolute file path of the file + containing the reference to the asset. You can use this to make + the path pointer relative. + asset_is_layer (bool): A boolean value indicating whether this + asset is a USD layer file. If this is `False`, the asset is + something else (for example, a texture or volume file). + + Returns: + The refactored reference path. + + """ + + cache = self._ref_cache + + # Retrieve from cache if this query occurred before (optimization) + if asset_path in cache: + return cache[asset_path] + + uri_data = usdlib.parse_ayon_uri(asset_path) + if not uri_data: + cache[asset_path] = asset_path + return asset_path + + # Try and find it as an existing publish + query = { + "project_name": uri_data["project"], + "asset_name": uri_data["asset"], + "subset_name": uri_data["product"], + "version_name": uri_data["version"], + "representation_name": uri_data["representation"], + } + path = usdlib.get_representation_path_by_names( + **query + ) + if path: + self.log.debug( + "Ayon URI Resolver - ref: %s -> %s", asset_path, path + ) + cache[asset_path] = path + return path + + elif self._publish_context: + # Query doesn't resolve to an existing version - likely + # points to a version defined in the current publish session + # as such we should resolve it using the current publish + # context if that was set prior to this publish + raise NotImplementedError("TODO") + + self.log.warning(f"Unable to resolve AYON URI: {asset_path}") + cache[asset_path] = asset_path + return asset_path + + def processSavePath(self, + asset_path, + referencing_layer_path, + asset_is_layer): + """ + Args: + asset_path (str): The path to the asset, as specified in Houdini. + If this asset is being written to disk, this will be the final + output of the `processSavePath()` calls on all output + processors. + referencing_layer_path (str): The absolute file path of the file + containing the reference to the asset. You can use this to make + the path pointer relative. + asset_is_layer (bool): A boolean value indicating whether this + asset is a USD layer file. If this is `False`, the asset is + something else (for example, a texture or volume file). + + Returns: + The refactored save path. + + """ + cache = self._save_cache + + # Retrieve from cache if this query occurred before (optimization) + if asset_path in cache: + return cache[asset_path] + + uri_data = usdlib.parse_ayon_uri(asset_path) + if not uri_data: + cache[asset_path] = asset_path + return asset_path + + relative_template = "{asset}_{product}_{version}_{representation}.usd" + # Set save output path to a relative path so other + # processors can potentially manage it easily? + path = relative_template.format(**uri_data) + + self.log.debug("Ayon URI Resolver - save: %s -> %s", asset_path, path) + cache[asset_path] = path + return path + + +def usdOutputProcessor(): + return AyonURIOutputProcessor diff --git a/openpype/hosts/houdini/startup/husdplugins/outputprocessors/remap_to_publish.py b/openpype/hosts/houdini/startup/husdplugins/outputprocessors/remap_to_publish.py new file mode 100644 index 00000000000..17d2db0a17e --- /dev/null +++ b/openpype/hosts/houdini/startup/husdplugins/outputprocessors/remap_to_publish.py @@ -0,0 +1,66 @@ +import os +import json + +import hou +from husd.outputprocessor import OutputProcessor + + +class AyonRemapPaths(OutputProcessor): + """Remap paths based on a mapping dict on rop node.""" + + def __init__(self): + self._mapping = dict() + + @staticmethod + def name(): + return "ayon_remap_paths" + + @staticmethod + def displayName(): + return "Ayon Remap Paths" + + @staticmethod + def hidden(): + return True + + @staticmethod + def parameters(): + group = hou.ParmTemplateGroup() + + parm_template = hou.StringParmTemplate( + "ayon_remap_paths_remap_json", + "Remapping dict (json)", + default_value="{}", + num_components=1, + string_type=hou.stringParmType.Regular, + ) + group.append(parm_template) + + return group.asDialogScript() + + def beginSave(self, config_node, config_overrides, lop_node, t): + super(AyonRemapPaths, self).beginSave(config_node, + config_overrides, + lop_node, + t) + + value = config_node.evalParm("ayon_remap_paths_remap_json") + mapping = json.loads(value) + assert isinstance(self._mapping, dict) + + # Ensure all keys are normalized paths so the lookup can be done + # correctly + mapping = { + os.path.normpath(key): value for key, value in mapping.items() + } + self._mapping = mapping + + def processReferencePath(self, + asset_path, + referencing_layer_path, + asset_is_layer): + return self._mapping.get(os.path.normpath(asset_path), asset_path) + + +def usdOutputProcessor(): + return AyonRemapPaths diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index af726409d4b..1cc62736f8f 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1622,6 +1622,10 @@ def get_container_members(container): # Assume it's a container dictionary container = container["objectName"] + if "," in container: + # Assume it's a UFE path - return it as the only member + return [container] + members = cmds.sets(container, query=True) or [] members = cmds.ls(members, long=True, objectsOnly=True) or [] all_members = set(members) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 1ecfdfaa404..05489738651 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -360,6 +360,25 @@ def parse_container(container): return data +def parse_usd_prim_container(prim, proxy): + """Parse instance container from UsdPrim if it is marked as one""" + data = prim.GetCustomDataByKey("openpype") + if not data or not data.get("id") == AVALON_CONTAINER_ID: + return + + # Store transient data + data["prim"] = prim + data["proxy"] = proxy + + # Store the maya UFE path as objectName + prim_path = str(prim.GetPath()) + data["objectName"] = "{},{}".format(proxy, prim_path) + data["namespace"] = prim_path + data["name"] = proxy + + return data + + def _ls(): """Yields Avalon container node names. @@ -415,6 +434,24 @@ def ls(): for container in sorted(container_names): yield parse_container(container) + for container in ls_maya_usd_proxy_prims(): + yield container + + +def ls_maya_usd_proxy_prims(): + # TODO: This might be nicer once the Loader API gets a refactor where + # the loaders themselves can return the containers from the scene + if cmds.pluginInfo("mayaUsdPlugin", query=True, loaded=True): + usd_proxies = cmds.ls(type="mayaUsdProxyShape", long=True) + if usd_proxies: + import mayaUsd.ufe + for proxy in usd_proxies: + stage = mayaUsd.ufe.getStage('|world' + proxy) + for prim in stage.TraverseAll(): + container = parse_usd_prim_container(prim, proxy=proxy) + if container: + yield container + def containerise(name, namespace, diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index e684a91fe23..22d70fc2c84 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -147,18 +147,39 @@ def get_publish_families(self): """ return [] + def add_transient_instance_data(self, instance_data): + """Add data into the `instance.data` after the read of the node data + + This can be overridden by subclasses to sneak in extra instance data + specific to the creator. + """ + # Allow a Creator to define multiple families + publish_families = self.get_publish_families() + if publish_families: + families = instance_data.setdefault("families", []) + for family in self.get_publish_families(): + if family not in families: + families.append(family) + + def remove_transient_instance_data(self, instance_data): + """Remove data `instance.data` before storing/imprinting to the node + + This can be overridden by subclasses to remove extra instance data + added in `add_transient_instance_data` specific to the creator. + """ + # Don't store `families` since it's up to the creator itself + # to define the initial publish families - not a stored attribute of + # `families` + instance_data.pop("families", None) + def imprint_instance_node(self, node, data): + self.remove_transient_instance_data(data) # We never store the instance_node as value on the node since # it's the node name itself data.pop("instance_node", None) data.pop("instance_id", None) - # Don't store `families` since it's up to the creator itself - # to define the initial publish families - not a stored attribute of - # `families` - data.pop("families", None) - # We store creator attributes at the root level and assume they # will not clash in names with `subset`, `task`, etc. and other # default names. This is just so these attributes in many cases @@ -231,11 +252,6 @@ def read_instance_node(self, node): node_data["instance_node"] = node node_data["instance_id"] = node - # If the creator plug-in specifies - families = self.get_publish_families() - if families: - node_data["families"] = families - return node_data def _default_collect_instances(self): @@ -243,6 +259,7 @@ def _default_collect_instances(self): cached_subsets = self.collection_shared_data["maya_cached_subsets"] for node in cached_subsets.get(self.identifier, []): node_data = self.read_instance_node(node) + self.add_transient_instance_data(node_data) created_instance = CreatedInstance.from_existing(node_data, self) self._add_instance_to_context(created_instance) @@ -280,17 +297,10 @@ def create(self, subset_name, instance_data, pre_create_data): if pre_create_data.get("use_selection"): members = cmds.ls(selection=True) - # Allow a Creator to define multiple families - publish_families = self.get_publish_families() - if publish_families: - families = instance_data.setdefault("families", []) - for family in self.get_publish_families(): - if family not in families: - families.append(family) - with lib.undo_chunk(): instance_node = cmds.sets(members, name=subset_name) instance_data["instance_node"] = instance_node + self.add_transient_instance_data(instance_data) instance = CreatedInstance( self.family, subset_name, diff --git a/openpype/hosts/maya/api/usdlib.py b/openpype/hosts/maya/api/usdlib.py new file mode 100644 index 00000000000..9693c7e4fe2 --- /dev/null +++ b/openpype/hosts/maya/api/usdlib.py @@ -0,0 +1,61 @@ +from openpype.pipeline.constants import AVALON_CONTAINER_ID +from pxr import Sdf + +from maya import cmds + + +def remove_spec(spec): + """Delete Sdf.PrimSpec or Sdf.PropertySpec + + Also see: + https://forum.aousd.org/t/api-basics-for-designing-a-manage-edits-editor-for-usd/676/1 # noqa + https://gist.github.com/BigRoy/4d2bf2eef6c6a83f4fda3c58db1489a5 + + """ + if spec.expired: + return + + if isinstance(spec, Sdf.PrimSpec): + # PrimSpec + parent = spec.nameParent + if parent: + view = parent.nameChildren + else: + # Assume PrimSpec is root prim + view = spec.layer.rootPrims + del view[spec.name] + + elif isinstance(spec, Sdf.PropertySpec): + # Relationship and Attribute specs + del spec.owner.properties[spec.name] + else: + raise TypeError(f"Unsupported spec type: {spec}") + + +def iter_ufe_usd_selection(): + for path in cmds.ls(selection=True, ufeObjects=True, long=True, + absoluteName=True): + if "," not in path: + continue + + node, ufe_path = path.split(",", 1) + if cmds.nodeType(node) != "mayaUsdProxyShape": + continue + + yield path + + +def containerise_prim(prim, + name, + namespace, + context, + loader): + for key, value in [ + ("openpype:schema", "openpype:container-2.0"), + ("openpype:id", AVALON_CONTAINER_ID), + ("openpype:name", name), + ("openpype:namespace", namespace), + ("openpype:loader", loader), + ("openpype:representation", context["representation"]["_id"]), + ]: + prim.SetCustomDataByKey(key, str(value)) diff --git a/openpype/hosts/maya/plugins/create/create_maya_usd.py b/openpype/hosts/maya/plugins/create/create_maya_usd.py index cc9a14bd3a5..dce17fc52bf 100644 --- a/openpype/hosts/maya/plugins/create/create_maya_usd.py +++ b/openpype/hosts/maya/plugins/create/create_maya_usd.py @@ -2,21 +2,22 @@ from openpype.lib import ( BoolDef, EnumDef, - TextDef + TextDef, + UILabelDef, + UISeparatorDef, ) from maya import cmds class CreateMayaUsd(plugin.MayaCreator): - """Create Maya USD Export""" + """Create Maya USD Export from maya scene objects""" identifier = "io.openpype.creators.maya.mayausd" label = "Maya USD" family = "usd" icon = "cubes" description = "Create Maya USD Export" - cache = {} def get_publish_families(self): @@ -100,3 +101,147 @@ def get_instance_attr_defs(self): ]) return defs + + +class CreateMayaUsdContribution(CreateMayaUsd): + """ + + When writing a USD as 'contribution' it will be added into what it's + contributing to. It will usually contribute to either the main *asset* + or *shot* but can be customized. + + Usually the contribution is done into a Department Layer, like e.g. + model, rig, look for models and layout, animation, fx, lighting for shots. + + Each department contribution will be 'sublayered' into the departments + contribution. + + """ + + identifier = "io.openpype.creators.maya.mayausd.assetcontribution" + label = "Maya USD Asset Contribution" + family = "usd" + icon = "cubes" + description = "Create Maya USD Contribution" + + # default_variants = ["main"] + # TODO: Do not include material for model publish + # TODO: Do only include material + assignments for material publish + # + attribute overrides onto existing geo? (`over`?) + # Define all in `geo` as `over`? + + bootstrap = "asset" + + contribution_asset_layer = None + + def create_template_hierarchy(self, asset_name, variant): + """Create the asset root template to hold the geo for the usd asset. + + Args: + asset_name: Asset name to use for the group + variant: Variant name to use as namespace. + This is needed so separate asset contributions can be + correctly created from a single scene. + + Returns: + list: The root node and geometry group. + + """ + + def set_usd_type(node, value): + attr = "USD_typeName" + if not cmds.attributeQuery(attr, node=node, exists=True): + cmds.addAttr(node, ln=attr, dt="string") + cmds.setAttr(f"{node}.{attr}", value, type="string") + + # Ensure simple unique namespace (add trailing number) + namespace = variant + name = f"{namespace}:{asset_name}" + i = 1 + while cmds.objExists(name): + name = f"{namespace}{i}:{asset_name}" + i += 1 + + # Define template hierarchy {asset_name}/geo + root = cmds.createNode("transform", + name=name, + skipSelect=True) + geo = cmds.createNode("transform", + name="geo", + parent=root, + skipSelect=True) + set_usd_type(geo, "Scope") + # Lock + hide transformations since we're exporting as Scope + for attr in ["tx", "ty", "tz", "rx", "ry", "rz", "sx", "sy", "sz"]: + cmds.setAttr(f"{geo}.{attr}", lock=True, keyable=False) + + return [root, geo] + + def create(self, subset_name, instance_data, pre_create_data): + + # Create template hierarchy + if pre_create_data.get("createTemplateHierarchy", True): + members = [] + if pre_create_data.get("use_selection"): + members = cmds.ls(selection=True, + long=True, + type="dagNode") + + root, geo = self.create_template_hierarchy( + asset_name=instance_data["asset"], + variant=instance_data["variant"] + ) + + if members: + cmds.parent(members, geo) + + # Select root and enable selection just so parent class' + # create adds it to the created instance + cmds.select(root, replace=True, noExpand=True) + pre_create_data["use_selection"] = True + + # Create as if we're the other plug-in so that the instance after + # creation thinks it was created by `CreateMayaUsd` and this Creator + # here is solely used to apply different default values + # TODO: Improve this hack + CreateMayaUsd( + project_settings=self.project_settings, + system_settings=None, + create_context=self.create_context + ).create( + subset_name, + instance_data, + pre_create_data + ) + + def get_pre_create_attr_defs(self): + defs = super(CreateMayaUsdContribution, + self).get_pre_create_attr_defs() + defs.extend([ + BoolDef("createTemplateHierarchy", + label="Create template hierarchy", + default=True) + ]) + return defs + + +# class CreateUsdLookContribution(CreateMayaUsdContribution): +# """Look layer contribution to the USD Asset""" +# identifier = CreateMayaUsdContribution.identifier + ".look" +# label = "USD Look" +# icon = "paint-brush" +# description = "Create USD Look contribution" +# family = "usd.look" +# +# contribution_asset_layer = "look" +# +# +# class CreateUsdModelContribution(CreateMayaUsdContribution): +# """Model layer contribution to the USD Asset""" +# identifier = CreateMayaUsdContribution.identifier + ".model" +# label = "USD Model" +# icon = "cube" +# description = "Create USD Model contribution" +# family = "usd.model" +# +# contribution_asset_layer = "model" diff --git a/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py b/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py new file mode 100644 index 00000000000..cfeae9c8453 --- /dev/null +++ b/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py @@ -0,0 +1,59 @@ +from openpype.hosts.maya.api import plugin +from openpype.lib import EnumDef + + +class CreateMayaUsdLayer(plugin.MayaCreator): + """Create Maya USD Export from `mayaUsdProxyShape` layer""" + + identifier = "io.openpype.creators.maya.mayausdlayer" + label = "Maya USD Export Layer" + family = "usd" + icon = "cubes" + description = "Create mayaUsdProxyShape layer export" + + def get_publish_families(self): + return ["usd", "mayaUsdLayer"] + + def get_instance_attr_defs(self): + + from maya import cmds + import mayaUsd + + # Construct the stage + layer EnumDef from the maya proxies in the + # scene and the Sdf.Layer stack of the Usd.Stage per proxy. + items = [] + for proxy in cmds.ls(type="mayaUsdProxyShape", long=True): + stage = mayaUsd.ufe.getStage("|world{}".format(proxy)) + if not stage: + continue + + for layer in stage.GetLayerStack(includeSessionLayers=False): + + proxy_nice_name = proxy.rsplit("|", 2)[-2] + layer_nice_name = layer.GetDisplayName() + label = "{} -> {}".format(proxy_nice_name, layer_nice_name) + value = ">".join([proxy, layer.identifier]) + + items.append({ + "label": label, + "value": value + }) + + if not items: + # EnumDef is not allowed to be empty + items.append("") + + defs = [ + EnumDef("defaultUSDFormat", + label="File format", + items={ + "usdc": "Binary", + "usda": "ASCII" + }, + default="usdc"), + EnumDef("stageLayerIdentifier", + label="Stage and Layer Identifier", + items=items) + ] + + return defs diff --git a/openpype/hosts/maya/plugins/inventory/select_containers.py b/openpype/hosts/maya/plugins/inventory/select_containers.py index f85bf17ab09..a443b99cb8f 100644 --- a/openpype/hosts/maya/plugins/inventory/select_containers.py +++ b/openpype/hosts/maya/plugins/inventory/select_containers.py @@ -17,6 +17,7 @@ def process(self, containers): all_members = [] for container in containers: members = get_container_members(container) + print(members) all_members.extend(members) cmds.select(all_members, replace=True, noExpand=True) @@ -31,7 +32,8 @@ class HighlightBySceneSelection(InventoryAction): def process(self, containers): - selection = set(cmds.ls(selection=True, long=True, objectsOnly=True)) + selection = set(cmds.ls(selection=True, long=True, objectsOnly=True, + ufeObjects=True)) host = registered_host() to_select = [] diff --git a/openpype/hosts/maya/plugins/load/load_maya_usd_add_maya_reference.py b/openpype/hosts/maya/plugins/load/load_maya_usd_add_maya_reference.py new file mode 100644 index 00000000000..83666c09937 --- /dev/null +++ b/openpype/hosts/maya/plugins/load/load_maya_usd_add_maya_reference.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +import contextlib + +from openpype.pipeline import ( + load, + get_representation_path, +) +from openpype.pipeline.load import get_representation_path_from_context +from openpype.hosts.maya.api.usdlib import ( + containerise_prim, + iter_ufe_usd_selection +) + +from maya import cmds +import mayaUsd + + +@contextlib.contextmanager +def no_edit_mode(prim, restore_after=True): + """Ensure MayaReference prim is not in edit mode during context""" + pulled_node = mayaUsd.lib.PrimUpdaterManager.readPullInformation(prim) + ufe_path = None + try: + # remove edit state if pulled + if pulled_node: + import mayaUsdUtils + assert mayaUsdUtils.isPulledMayaReference(pulled_node) + cmds.mayaUsdDiscardEdits(pulled_node) + + # Discarding the edits directly selects the prim + # so we can get the UFE path from selection + ufe_path = cmds.ls(selection=True, ufeObjects=True, long=True)[0] + + yield prim, ufe_path, pulled_node + finally: + if restore_after and pulled_node and ufe_path: + cmds.mayaUsdEditAsMaya(ufe_path) + + +class MayaUsdProxyAddMayaReferenceLoader(load.LoaderPlugin): + """Read USD data in a Maya USD Proxy + + TODO: It'd be much easier if this loader would be capable of returning the + available containers in the scene based on the AYON URLs inside a USD + stage. That way we could potentially avoid the need for custom metadata + keys, stay closer to USD native data and rely solely on the + AYON:asset=blue,subset=modelMain,version=1 url + + """ + + families = ["*"] + representations = ["*"] + extensions = ["ma", "mb"] + + label = "USD Add Maya Reference" + order = -998 + icon = "code-fork" + color = "orange" + + identifier_key = "openpype_identifier" + + def load(self, context, name=None, namespace=None, options=None): + + selection = list(iter_ufe_usd_selection()) + assert len(selection) == 1, "Select only one PRIM please" + ufe_path = selection[0] + path = get_representation_path_from_context(context) + + import mayaUsdAddMayaReference + + namespace = "test" + prim = mayaUsdAddMayaReference.createMayaReferencePrim( + ufe_path, + path, + namespace, + # todo: add more of the arguments + # mayaReferencePrimName Nameprim_name, + # groupPrim (3-tuple, group name, type and kind) + # variantSet (2-tuple, variant set name and variant name) + ) + if not prim: + # Failed to add a reference + raise RuntimeError(f"Failed to add a reference at {ufe_path}") + + containerise_prim( + prim, + name=name, + namespace=namespace or "", + context=context, + loader=self.__class__.__name__ + ) + + return prim + + def _update_reference_path(self, prim, filepath): + """Update MayaReference prim 'mayaReference' in nearest prim spec""" + + from pxr import Sdf + + # We want to update the authored opinion in the right place, e.g. + # within a VariantSet if it's authored there. We go through the + # PrimStack to find the first prim spec that authors an opinion + # on the 'mayaReference' attribute where we have permission to + # change it. This could technically mean we're altering it in + # layers that we might not want to (e.g. a published USD file?) + stack = prim.GetPrimStack() + for prim_spec in stack: + if "mayaReference" not in prim_spec.attributes: + # prim spec defines no opinion on mayaRefernce attribute? + continue + + attr = prim_spec.attributes["mayaReference"] + if attr.permission != Sdf.PermissionPublic: + print(f"Not allowed to edit: {attr}") + continue + + if filepath != attr.default: + print( + f"Updating {attr.path} - {attr.default} -> {filepath}") + attr.default = filepath + + # Attribute is either updated or already set to + # the value in that layer + return + + # Just define in the current edit layer? + attr = prim.GetAttribute("mayaReference") + attr.Set(filepath) + + def update(self, container, representation): + # type: (dict, dict) -> None + """Update container with specified representation.""" + + prim = container["prim"] + filepath = get_representation_path(representation) + + with no_edit_mode(prim): + self._update_reference_path(prim, filepath) + + # Update representation id + # TODO: Do this in prim spec where we update reference path? + prim.SetCustomDataByKey( + "openpype:representation", str(representation["_id"]) + ) + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + # type: (dict) -> None + """Remove loaded container.""" + + from openpype.hosts.maya.api.usdlib import remove_spec + + prim = container["prim"] + with no_edit_mode(prim, restore_after=False): + for spec in prim.GetPrimStack(): + remove_spec(spec) diff --git a/openpype/hosts/maya/plugins/load/load_maya_usd_add_reference.py b/openpype/hosts/maya/plugins/load/load_maya_usd_add_reference.py new file mode 100644 index 00000000000..f9a725d6c1b --- /dev/null +++ b/openpype/hosts/maya/plugins/load/load_maya_usd_add_reference.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +import uuid + +from openpype.pipeline import ( + load, + get_representation_path, +) +from openpype.pipeline.load import get_representation_path_from_context +from openpype.hosts.maya.api.usdlib import ( + containerise_prim, + iter_ufe_usd_selection +) + +from maya import cmds +import mayaUsd + + +class MayaUsdProxyReferenceUsd(load.LoaderPlugin): + """Add a USD Reference into mayaUsdProxyShape + + TODO: It'd be much easier if this loader would be capable of returning the + available containers in the scene based on the AYON URLs inside a USD + stage. That way we could potentially avoid the need the custom + identifier, stay closer to USD native data and rely solely on the + AYON:asset=blue,subset=modelMain,version=1 url + + """ + + families = ["model", "usd", "pointcache", "animation"] + representations = ["usd", "usda", "usdc", "usdz", "abc"] + + label = "USD Add Reference" + order = -999 + icon = "code-fork" + color = "orange" + + identifier_key = "openpype_identifier" + + def load(self, context, name=None, namespace=None, options=None): + + from pxr import Sdf + + selection = list(iter_ufe_usd_selection()) + if not selection: + # Create a maya USD proxy with /root prim and add the reference + import mayaUsd_createStageWithNewLayer + from pxr import UsdGeom + + # Make sure we can load the plugin + cmds.loadPlugin("mayaUsdPlugin", quiet=True) + + shape = mayaUsd_createStageWithNewLayer.createStageWithNewLayer() + stage = mayaUsd.ufe.getStage('|world' + shape) + prim_path = "/root" + UsdGeom.Xform.Define(stage, prim_path) + root_layer = stage.GetRootLayer() + root_layer.defaultPrim = prim_path + prim = stage.GetPrimAtPath(prim_path) + else: + assert len(selection) == 1, "Select only one PRIM please" + ufe_path = selection[0] + prim = mayaUsd.ufe.ufePathToPrim(ufe_path) + + if not prim: + raise RuntimeError("Invalid primitive") + + # Define reference using Sdf.Reference so we can directly set custom + # data for it + path = get_representation_path_from_context(context) + + references = prim.GetReferences() + + # Add unique containerised data to the reference + identifier = str(prim.GetPath()) + ":" + str(uuid.uuid4()) + identifier_data = {self.identifier_key: identifier} + reference = Sdf.Reference(assetPath=path, + customData=identifier_data) + + success = references.AddReference(reference) + if not success: + raise RuntimeError("Failed to add reference") + + # TODO: We should actually just use the data on the `Sdf.Reference` + # instead of on the USDPrim + container = containerise_prim( + prim, + name=name, + namespace=namespace or "", + context=context, + loader=self.__class__.__name__ + ) + + return container + + def update(self, container, representation): + # type: (dict, dict) -> None + """Update container with specified representation.""" + + from pxr import Sdf + + prim = container["prim"] + path = get_representation_path(representation) + for references, index in self._get_prim_references(prim): + reference = references[index] + new_reference = Sdf.Reference( + assetPath=path, + customData=reference.customData, + layerOffset=reference.layerOffset, + primPath=reference.primPath + ) + references[index] = new_reference + + # Update representation id + # TODO: Do this in prim spec where we update reference path? + # TODO: Store this in the Sdf.Reference CustomData instead? + prim.SetCustomDataByKey( + "openpype:representation", str(representation["_id"]) + ) + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + # type: (dict) -> None + """Remove loaded container.""" + prim = container["prim"] + + # Pop the references from the prepended items list + related_references = reversed(list(self._get_prim_references(prim))) + for references, index in related_references: + references.remove(references[index]) + + prim.ClearCustomDataByKey("openpype") + + def _get_prim_references(self, prim): + + # Get a list of all prepended references + for prim_spec in prim.GetPrimStack(): + if not prim_spec: + continue + + if not prim_spec.hasReferences: + continue + + prepended_items = prim_spec.referenceList.prependedItems + for index, _reference in enumerate(prepended_items): + # Override the matching reference identifier + # TODO: Make sure we only return the correct reference + yield prepended_items, index diff --git a/openpype/hosts/maya/plugins/load/show_usdview.py b/openpype/hosts/maya/plugins/load/show_usdview.py new file mode 100644 index 00000000000..ff5f0df5dc4 --- /dev/null +++ b/openpype/hosts/maya/plugins/load/show_usdview.py @@ -0,0 +1,60 @@ +import os +import subprocess + +from openpype.pipeline import load + +MAYA_LOCATION = os.environ['MAYA_LOCATION'] +MAYAPY = os.path.join(MAYA_LOCATION, 'bin', 'mayapy') +USD_LOCATION = os.getenv("USD_LOCATION") +USDVIEW = os.path.join(USD_LOCATION, 'bin', 'usdview') + + +class ShowInUsdview(load.LoaderPlugin): + """Open USD file in usdview + + This requires `OpenGL` python package to be available to Maya. For more + details also see the "Using Usdiew in Maya" section in maya documentation. + + See Also: + https://help.autodesk.com/view/MAYAUL/2024/ENU/?guid=GUID-C452B331-7C4D-4B58-A1BA-D03775F266A8 # noqa + + """ + + label = "Show in usdview" + representations = ["*"] + families = ["*"] + extensions = {"usd", "usda", "usdlc", "usdnc", "abc"} + order = 15 + + icon = "code-fork" + color = "white" + + # Enable if usd location is defined (which maya usd plugin does) + enabled = USD_LOCATION and os.path.isdir(USD_LOCATION) + + def load(self, context, name=None, namespace=None, data=None): + + try: + import OpenGL # noqa + except ImportError: + self.log.error( + "usdview for maya requires to have `OpenGL` python library " + "available. Please make sure to install it.\n" + "The `OpenGL` library can be installed for maya using:\n" + "mayapy -m pip install PyOpenGL==3.1.0" + ) + filepath = self.filepath_from_context(context) + filepath = os.path.normpath(filepath) + filepath = filepath.replace("\\", "/") + + if not os.path.exists(filepath): + self.log.error("File does not exist: %s" % filepath) + return + + self.log.info("Start maya variant of usdview...") + CREATE_NO_WINDOW = 0x08000000 + subprocess.Popen([MAYAPY, USDVIEW, filepath], + creationflags=CREATE_NO_WINDOW, + # Set current working directory so that browsing + # from usdview itself starts from that folder too + cwd=os.path.dirname(filepath)) diff --git a/openpype/hosts/maya/plugins/publish/collect_instances.py b/openpype/hosts/maya/plugins/publish/collect_instances.py index 5058da3d01c..08ccba903fd 100644 --- a/openpype/hosts/maya/plugins/publish/collect_instances.py +++ b/openpype/hosts/maya/plugins/publish/collect_instances.py @@ -28,8 +28,6 @@ class CollectNewInstances(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder hosts = ["maya"] - valid_empty_families = {"workfile", "renderlayer"} - def process(self, instance): objset = instance.data.get("instance_node") @@ -60,8 +58,6 @@ def process(self, instance): instance[:] = members_hierarchy - elif instance.data["family"] not in self.valid_empty_families: - self.log.warning("Empty instance: \"%s\" " % objset) # Store the exact members of the object set instance.data["setMembers"] = members diff --git a/openpype/hosts/maya/plugins/publish/extract_maya_usd.py b/openpype/hosts/maya/plugins/publish/extract_maya_usd.py index 8c32ac1e395..ece96551be6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_maya_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_maya_usd.py @@ -4,12 +4,28 @@ import contextlib from maya import cmds +import maya.api.OpenMaya as om import pyblish.api from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection +def get_node_hash(node): + """Return integer MObjectHandle hash code. + + Arguments: + node (str): Maya node path. + + Returns: + int: MObjectHandle.hashCode() + + """ + sel = om.MSelectionList() + sel.add(node) + return om.MObjectHandle(sel.getDependNode(0)).hashCode() + + @contextlib.contextmanager def usd_export_attributes(nodes, attrs=None, attr_prefixes=None, mapping=None): """Define attributes for the given nodes that should be exported. @@ -44,8 +60,6 @@ def usd_export_attributes(nodes, attrs=None, attr_prefixes=None, mapping=None): # todo: this might be better done with a custom export chaser # see `chaser` argument for `mayaUSDExport` - import maya.api.OpenMaya as om - if not attrs and not attr_prefixes: # context manager does nothing yield @@ -61,16 +75,23 @@ def usd_export_attributes(nodes, attrs=None, attr_prefixes=None, mapping=None): usd_json_attr = "USD_UserExportedAttributesJson" strings = attrs + ["{}*".format(prefix) for prefix in attr_prefixes] context_state = {} + + # Keep track of the processed nodes as a node might appear more than once + # e.g. when there are instances. + processed = set() for node in set(nodes): node_attrs = cmds.listAttr(node, st=strings) if not node_attrs: # Nothing to do for this node continue + hash_code = get_node_hash(node) + if hash_code in processed: + continue + node_attr_data = {} for node_attr in set(node_attrs): node_attr_data[node_attr] = mapping.get(node_attr, {}) - if cmds.attributeQuery(usd_json_attr, node=node, exists=True): existing_node_attr_value = cmds.getAttr( "{}.{}".format(node, usd_json_attr) @@ -82,6 +103,7 @@ def usd_export_attributes(nodes, attrs=None, attr_prefixes=None, mapping=None): existing_node_attr_data = json.loads(existing_node_attr_value) node_attr_data.update(existing_node_attr_data) + processed.add(hash_code) context_state[node] = json.dumps(node_attr_data) sel = om.MSelectionList() @@ -148,6 +170,8 @@ def options(self): "exportRefsAsInstanceable": bool, "eulerFilter": bool, "renderableOnly": bool, + "convertMaterialsTo": str, + "shadingMode": (str, None), # optional str "jobContext": (list, None) # optional list # "worldspace": bool, } @@ -170,6 +194,8 @@ def default_options(self): "exportRefsAsInstanceable": False, "eulerFilter": True, "renderableOnly": False, + "shadingMode": "none", + "convertMaterialsTo": "none", "jobContext": None # "worldspace": False } diff --git a/openpype/hosts/maya/plugins/publish/extract_maya_usd_layer.py b/openpype/hosts/maya/plugins/publish/extract_maya_usd_layer.py new file mode 100644 index 00000000000..914874ede95 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_maya_usd_layer.py @@ -0,0 +1,63 @@ +import os + +from maya import cmds +from openpype.pipeline import publish + + +class ExtractMayaUsdLayer(publish.Extractor): + """Extractor for Maya USD Layer from `mayaUsdProxyShape` + + Exports a single Sdf.Layer from a mayaUsdPlugin `mayaUsdProxyShape`. + These layers are the same managed via Maya's Windows > USD Layer Editor. + + """ + + label = "Extract Maya USD Layer" + hosts = ["maya"] + families = ["mayaUsdLayer"] + + def process(self, instance): + + import mayaUsd + + # Load plugin first + cmds.loadPlugin("mayaUsdPlugin", quiet=True) + + data = instance.data["stageLayerIdentifier"] + proxy, layer_identifier = data.split(">", 1) + + # TODO: The stage and layer should actually be retrieved during + # Collecting so that they can be validated upon and potentially that + # any 'child layers' can potentially be recursively exported along + stage = mayaUsd.ufe.getStage('|world' + proxy) + layers = stage.GetLayerStack(includeSessionLayers=False) + layer = next( + layer for layer in layers if layer.identifier == layer_identifier + ) + + # Define output file path + staging_dir = self.staging_dir(instance) + file_name = "{0}.usd".format(instance.name) + file_path = os.path.join(staging_dir, file_name) + file_path = file_path.replace('\\', '/') + + self.log.debug("Exporting USD layer to: {}".format(file_path)) + layer.Export(file_path, args={ + "format": instance.data.get("defaultUSDFormat", "usdc") + }) + + # TODO: We might want to remap certain paths - to do so we could take + # the SdfLayer and transfer its contents into a anonymous SdfLayer + # then we can use the copy to alter it in memory to our like before + # writing out + + representation = { + 'name': "usd", + 'ext': "usd", + 'files': file_name, + 'stagingDir': staging_dir + } + instance.data.setdefault("representations", []).append(representation) + self.log.debug( + "Extracted instance {} to {}".format(instance.name, file_path) + ) diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py index 7234f5a0258..a3d65828715 100644 --- a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py +++ b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py @@ -25,8 +25,15 @@ def get_invalid(cls, instance): def process(self, instance): # Allow renderlayer, rendersetup and workfile to be empty - skip_families = {"workfile", "renderlayer", "rendersetup"} - if instance.data.get("family") in skip_families: + skip_families = {"workfile", + "renderlayer", + "rendersetup", + "mayaUsdLayer", + "usdLayer", + "usdAsset"} + families = {instance.data.get("family")} + families.update(instance.data.get("families", [])) + if families.intersection(skip_families): return invalid = self.get_invalid(instance) diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index c166feb3a6b..442fcf97f3d 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -1,6 +1,8 @@ +import dataclasses import os -import re +import copy import logging +from urllib.parse import urlparse, parse_qs try: from pxr import Usd, UsdGeom, Sdf, Kind @@ -8,84 +10,236 @@ # Allow to fall back on Multiverse 6.3.0+ pxr usd library from mvpxr import Usd, UsdGeom, Sdf, Kind -from openpype.client import get_project, get_asset_by_name -from openpype.pipeline import Anatomy, get_current_project_name +from openpype.client import ( + get_asset_by_name, + get_subset_by_name, + get_representation_by_name, + get_hero_version_by_subset_id, + get_version_by_name, + get_last_version_by_subset_id +) +from openpype.pipeline import ( + get_representation_path +) log = logging.getLogger(__name__) -# The predefined steps order used for bootstrapping USD Shots and Assets. -# These are ordered in order from strongest to weakest opinions, like in USD. -PIPELINE = { - "shot": [ - "usdLighting", - "usdFx", - "usdSimulation", - "usdAnimation", - "usdLayout", - ], - "asset": ["usdShade", "usdModel"], -} +@dataclasses.dataclass +class Layer: + layer: Sdf.Layer + path: str + # Allow to anchor a layer to another so that when the layer would be + # exported it'd write itself out relative to its anchor + anchor: 'Layer' = None + + @property + def identifier(self): + return self.layer.identifier + + def get_full_path(self): + """Return full path relative to the anchor layer""" + if not os.path.isabs(self.path) and self.anchor: + anchor_path = self.anchor.get_full_path() + root = os.path.dirname(anchor_path) + return os.path.normpath(os.path.join(root, self.path)) + else: + return self.path + def export(self, path=None, args=None): + """Save the layer""" + if path is None: + path = self.get_full_path() -def create_asset( - filepath, asset_name, reference_layers, kind=Kind.Tokens.component + if args is None: + args = self.layer.GetFileFormatArguments() + + self.layer.Export(path, args=args) + + @classmethod + def create_anonymous(cls, path, tag="LOP", anchor=None): + sdf_layer = Sdf.Layer.CreateAnonymous(tag) + return cls(layer=sdf_layer, path=path, anchor=anchor, tag=tag) + + +def setup_asset_layer( + layer, + asset_name, + reference_layers=None, + kind=Kind.Tokens.component, + define_class=True, + force_add_payload=False, + set_payload_path=False ): """ - Creates an asset file that consists of a top level layer and sublayers for - shading and geometry. + Adds an asset prim to the layer with the `reference_layers` added as + references for e.g. geometry and shading. + + The referenced layers will be moved into a separate `./payload.usd` file + that the asset file uses to allow deferred loading of the heavier + geometrical data. An example would be: + + asset.usd <-- out filepath + payload.usd <-- always automatically added in-between + look.usd <-- reference layer 0 from `reference_layers` argument + model.usd <-- reference layer 1 from `reference_layers` argument + + If `define_class` is enabled then a `/__class__/{asset_name}` class + definition will be created that the root asset inherits from + + Examples: + >>> create_asset("/path/to/asset.usd", + >>> asset_name="test", + >>> reference_layers=["./model.usd", "./look.usd"]) + + Returns: + List[Tuple[Sdf.Layer, str]]: List of created layers with their + preferred output save paths. Args: - filepath (str): Filepath where the asset.usd file will be saved. + layer (Sdf.Layer): Layer to set up the asset structure for. + asset_name (str): The name for the Asset identifier and default prim. reference_layers (list): USD Files to reference in the asset. Note that the bottom layer (first file, like a model) would be last in the list. The strongest layer will be the first index. - asset_name (str): The name for the Asset identifier and default prim. kind (pxr.Kind): A USD Kind for the root asset. + define_class: Define a `/__class__/{asset_name}` class which the + root asset prim will inherit from. + force_add_payload (bool): Generate payload layer even if no + reference paths are set - thus generating an enmpty layer. + set_payload_path (bool): Whether to directly set the payload asset + path to `./payload.usd` or not Defaults to True. """ - # Also see create_asset.py in PixarAnimationStudios/USD endToEnd example - - log.info("Creating asset at %s", filepath) - - # Make the layer ascii - good for readability, plus the file is small - root_layer = Sdf.Layer.CreateNew(filepath, args={"format": "usda"}) - stage = Usd.Stage.Open(root_layer) + # Define root prim for the asset and make it the default for the stage. + prim_name = asset_name + + if define_class: + class_prim = Sdf.PrimSpec( + layer.pseudoRoot, + "__class__", + Sdf.SpecifierClass, + ) + Sdf.PrimSpec( + class_prim, + prim_name, + Sdf.SpecifierClass, + ) - # Define a prim for the asset and make it the default for the stage. - asset_prim = UsdGeom.Xform.Define(stage, "/%s" % asset_name).GetPrim() - stage.SetDefaultPrim(asset_prim) + asset_prim = Sdf.PrimSpec( + layer.pseudoRoot, + prim_name, + Sdf.SpecifierDef, + "Xform" + ) - # Let viewing applications know how to orient a free camera properly - UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + if define_class: + asset_prim.inheritPathList.prependedItems[:] = [ + "/__class__/{}".format(prim_name) + ] + # Define Kind # Usually we will "loft up" the kind authored into the exported geometry # layer rather than re-stamping here; we'll leave that for a later # tutorial, and just be explicit here. - model = Usd.ModelAPI(asset_prim) - if kind: - model.SetKind(kind) + asset_prim.kind = kind + + # Set asset info + asset_prim.assetInfo["name"] = asset_name + asset_prim.assetInfo["identifier"] = "%s/%s.usd" % (asset_name, asset_name) + + # asset.assetInfo["version"] = asset_version + set_layer_defaults(layer, default_prim=asset_name) - model.SetAssetName(asset_name) - model.SetAssetIdentifier("%s/%s.usd" % (asset_name, asset_name)) + created_layers = [] # Add references to the asset prim - references = asset_prim.GetReferences() - for reference_filepath in reference_layers: - references.AddReference(reference_filepath) + if force_add_payload or reference_layers: + # Create a relative payload file to filepath through which we sublayer + # the heavier payloads + # Prefix with `LOP` just so so that if Houdini ROP were to save + # the nodes it's capable of exporting with explicit save path + payload_layer = Sdf.Layer.CreateAnonymous("LOP", + args={"format": "usda"}) + set_layer_defaults(payload_layer, default_prim=asset_name) + created_layers.append(Layer(layer=payload_layer, + path="./payload.usd")) + + # Add payload + if set_payload_path: + payload_identifier = "./payload.usd" + else: + payload_identifier = payload_layer.identifier + + asset_prim.payloadList.prependedItems[:] = [ + Sdf.Payload(assetPath=payload_identifier) + ] + + # Add sublayers to the payload layer + # Note: Sublayering is tricky because it requires that the sublayers + # actually define the path at defaultPrim otherwise the payload + # reference will not find the defaultPrim and turn up empty. + if reference_layers: + for ref_layer in reference_layers: + payload_layer.subLayerPaths.append(ref_layer) + + return created_layers + + +def create_asset( + filepath, + asset_name, + reference_layers=None, + kind=Kind.Tokens.component, + define_class=True +): + """Creates and saves a prepared asset stage layer. + + Creates an asset file that consists of a top level asset prim, asset info + and references in the provided `reference_layers`. + + Returns: + list: Created layers + + """ + # Also see create_asset.py in PixarAnimationStudios/USD endToEnd example + + sdf_layer = Sdf.Layer.CreateAnonymous() + layer = Layer(layer=sdf_layer, path=filepath) + + created_layers = setup_asset_layer( + layer=sdf_layer, + asset_name=asset_name, + reference_layers=reference_layers, + kind=kind, + define_class=define_class, + set_payload_path=True + ) + for created_layer in created_layers: + created_layer.anchor = layer + created_layer.export() + + # Make the layer ascii - good for readability, plus the file is small + log.debug("Creating asset at %s", filepath) + layer.export(args={"format": "usda"}) - stage.GetRootLayer().Save() + return [layer] + created_layers def create_shot(filepath, layers, create_layers=False): """Create a shot with separate layers for departments. + Examples: + >>> create_shot("/path/to/shot.usd", + >>> layers=["lighting.usd", "fx.usd", "animation.usd"]) + "/path/to/shot.usd" + Args: filepath (str): Filepath where the asset.usd file will be saved. - layers (str): When provided this will be added verbatim in the + layers (list): When provided this will be added verbatim in the subLayerPaths layers. When the provided layer paths do not exist - they are generated using Sdf.Layer.CreateNew + they are generated using Sdf.Layer.CreateNew create_layers (bool): Whether to create the stub layers on disk if they do not exist yet. @@ -94,10 +248,9 @@ def create_shot(filepath, layers, create_layers=False): """ # Also see create_shot.py in PixarAnimationStudios/USD endToEnd example + root_layer = Sdf.Layer.CreateAnonymous() - stage = Usd.Stage.CreateNew(filepath) - log.info("Creating shot at %s" % filepath) - + created_layers = [root_layer] for layer_path in layers: if create_layers and not os.path.exists(layer_path): # We use the Sdf API here to quickly create layers. Also, we're @@ -107,255 +260,651 @@ def create_shot(filepath, layers, create_layers=False): if not os.path.exists(layer_folder): os.makedirs(layer_folder) - Sdf.Layer.CreateNew(layer_path) + new_layer = Sdf.Layer.CreateNew(layer_path) + created_layers.append(new_layer) + + root_layer.subLayerPaths.append(layer_path) + + set_layer_defaults(root_layer) + log.debug("Creating shot at %s" % filepath) + root_layer.Export(filepath, args={"format": "usda"}) + + return created_layers + - stage.GetRootLayer().subLayerPaths.append(layer_path) +def add_ordered_sublayer(layer, contribution_path, layer_id, order=None, + add_sdf_arguments_metadata=True): + """Add sublayer paths in the Sdf.Layer at given "orders" - # Lets viewing applications know how to orient a free camera properly - UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) - stage.GetRootLayer().Save() + USD does not provide a way to set metadata per sublayer entry, but we can + 'sneak it in' by adding it as part of the file url after :SDF_FORMAT_ARGS: + There they will then just be unused args that we can parse later again + to access our data. - return filepath + A higher order will appear earlier in the subLayerPaths as a stronger + opinion. An unordered layer (`order=None`) will be stronger than any + ordered opinion and thus will be inserted at the start of the list. + Args: + layer (Sdf.Layer): Layer to add sublayers in. + contribution_path (str): Path/URI to add. + layer_id (str): Token that if found for an existing layer it will + replace that layer. + order (Any[int, None]): Order to place the contribution in + the sublayers. When `None` no ordering is considered nor will + ordering metadata be written if `add_sdf_arguments_metadata` is + False. + add_sdf_arguments_metadata (bool): Add metadata into the filepath + to store the `layer_id` and `order` so ordering can be maintained + in the future as intended. + + Returns: + str: The resulting contribution path (which maybe include the + sdf format args metadata if enabled) + + """ -def create_model(filename, asset, variant_subsets): - """Create a USD Model file. + # Add the order with the contribution path so that for future + # contributions we can again use it to magically fit into the + # ordering. We put this in the path because sublayer paths do + # not allow customData to be stored. + def _format_path(path, layer_id, order): + # TODO: Avoid this hack to store 'order' and 'layer' metadata + # for sublayers; in USD sublayers can't hold customdata + if add_sdf_arguments_metadata: + parts = [ + contribution_path, + # Special separator for SDF Format Args used in USD + "SDF_FORMAT_ARGS", + "layer_id={}".format(layer_id) + ] + if order is not None: + parts.append("order={}".format(order)) + return ":".join(parts) + + # If the layer was already in the layers, then replace it + for index, existing_path in enumerate(layer.subLayerPaths): + args = get_sdf_format_args(existing_path) + existing_layer = args.get("layer_id") + if existing_layer == layer_id: + # Put it in the same position where it was before when swapping + # it with the original, also take over its order metadata + order = args.get("order") + if order is not None: + order = int(order) + else: + order = None + contribution_path = _format_path(contribution_path, + order=order, + layer_id=layer_id) + log.debug( + f"Replacing existing layer: {layer.subLayerPaths[index]} " + f"-> {contribution_path}" + ) + layer.subLayerPaths[index] = contribution_path + return contribution_path + + contribution_path = _format_path(contribution_path, + order=order, + layer_id=layer_id) + + # If an order is defined and other layers are ordered than place it before + # the first order where existing order is lower + if order is not None: + for index, existing_path in enumerate(layer.subLayerPaths): + args = get_sdf_format_args(existing_path) + existing_order = args.get("order") + if existing_order is not None and int(existing_order) < order: + log.debug( + f"Inserting new layer at {index}: {contribution_path}" + ) + layer.subLayerPaths.insert(index, contribution_path) + return + # Weakest ordered opinion + layer.subLayerPaths.append(contribution_path) + return contribution_path + + # If no paths found with an order to put it next to + # then put the sublayer at the end + log.debug(f"Appending new layer: {contribution_path}") + layer.subLayerPaths.insert(0, contribution_path) + return contribution_path + + +def add_variant_references_to_layer( + variants, + variantset, + default_variant=None, + variant_prim="/root", + reference_prim=None, + set_default_variant=True, + as_payload=False, + skip_variant_on_single_file=False, + layer=None +): + """Add or set a prim's variants to reference specified paths in the layer. + + Note: + This does not clear any of the other opinions than replacing + `prim.referenceList.prependedItems` with the new reference. + If `as_payload=True` then this only does it for payloads and leaves + references as they were in-tact. + + Note: + If `skip_variant_on_single_file=True` it does *not* check if any + other variants do exist; it only checks whether you are currently + adding more than one since it'd be hard to find out whether previously + this was also skipped and should now if you're adding a new one + suddenly also be its original 'variant'. As such it's recommended to + keep this disabled unless you know you're not updating the file later + into the same variant set. + + Examples: + >>> layer = add_variant_references_to_layer("model.usd", + >>> variants=[ + >>> ("main", "main.usd"), + >>> ("damaged", "damaged.usd"), + >>> ("twisted", "twisted.usd") + >>> ], + >>> variantset="model") + >>> layer.Export("model.usd", args={"format": "usda"}) + + Arguments: + variants (List[List[str, str]): List of two-tuples of variant name to + the filepath that should be referenced in for that variant. + variantset (str): Name of the variant set + default_variant (str): Default variant to set. If not provided + the first variant will be used. + variant_prim (str): Variant prim? + reference_prim (str): Path to the reference prim where to add the + references and variant sets. + set_default_variant (bool): Whether to set the default variant. + When False no default variant will be set, even if a value + was provided to `default_variant` + as_payload (bool): When enabled, instead of referencing use payloads + skip_variant_on_single_file (bool): If this is enabled and only + a single variant is provided then do not create the variant set + but just reference that single file. + layer (Sdf.Layer): When provided operate on this layer, otherwise + create an anonymous layer in memory. - For each of the variation paths it will payload the path and set its - relevant variation name. + Returns: + Usd.Stage: The saved usd stage """ + if layer is None: + layer = Sdf.Layer.CreateAnonymous() + set_layer_defaults(layer, default_prim=variant_prim.strip("/")) + + prim_path_to_get_variants = Sdf.Path(variant_prim) + root_prim = get_or_define_prim_spec(layer, variant_prim, "Xform") - project_name = get_current_project_name() - asset_doc = get_asset_by_name(project_name, asset) - assert asset_doc, "Asset not found: %s" % asset + # TODO: Define why there's a need for separate variant_prim and + # reference_prim attribute. When should they differ? Does it even work? + if not reference_prim: + reference_prim = root_prim + else: + reference_prim = get_or_define_prim_spec(layer, reference_prim, + "Xform") + + assert variants, "Must have variants, got: %s" % variants - variants = [] - for subset in variant_subsets: - prefix = "usdModel" - if subset.startswith(prefix): - # Strip off `usdModel_` - variant = subset[len(prefix):] + if skip_variant_on_single_file and len(variants) == 1: + # Reference directly, no variants + variant_path = variants[0][1] + if as_payload: + # Payload + reference_prim.payloadList.prependedItems.append( + Sdf.Payload(variant_path) + ) else: - raise ValueError( - "Model subsets must start " "with usdModel: %s" % subset + # Reference + reference_prim.referenceList.prependedItems.append( + Sdf.Reference(variant_path) ) - path = get_usd_master_path( - asset=asset_doc, subset=subset, representation="usd" - ) - variants.append((variant, path)) - - stage = _create_variants_file( - filename, - variants=variants, - variantset="model", - variant_prim="/root", - reference_prim="/root/geo", - as_payload=True, - ) + log.debug("Creating without variants due to single file only.") + log.debug("Path: %s", variant_path) - UsdGeom.SetStageMetersPerUnit(stage, 1) - UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + else: + # Variants + for variant, variant_filepath in variants: + if default_variant is None: + default_variant = variant - # modelAPI = Usd.ModelAPI(root_prim) - # modelAPI.SetKind(Kind.Tokens.component) + set_variant_reference(layer, + prim_path=prim_path_to_get_variants, + variant_selections=[[variantset, variant]], + path=variant_filepath, + as_payload=as_payload) - # See http://openusd.org/docs/api/class_usd_model_a_p_i.html#details - # for more on assetInfo - # modelAPI.SetAssetName(asset) - # modelAPI.SetAssetIdentifier(asset) + if set_default_variant and default_variant is not None: + # Set default variant selection + root_prim.variantSelections[variantset] = default_variant - stage.GetRootLayer().Save() + return layer -def create_shade(filename, asset, variant_subsets): - """Create a master USD shade file for an asset. +def set_layer_defaults(layer, + up_axis=UsdGeom.Tokens.y, + meters_per_unit=1.0, + default_prim=None): + """Set some default metadata for the SdfLayer. - For each available model variation this should generate a reference - to a `usdShade_{modelVariant}` subset. + Arguments: + layer (Sdf.Layer): The layer to set default for via Sdf API. + up_axis (UsdGeom.Token); Which axis is the up-axis + meters_per_unit (float): Meters per unit + default_prim (Optional[str]: Default prim name """ + # Set default prim + if default_prim is not None: + layer.defaultPrim = default_prim - project_name = get_current_project_name() - asset_doc = get_asset_by_name(project_name, asset) - assert asset_doc, "Asset not found: %s" % asset + # Let viewing applications know how to orient a free camera properly + # Similar to: UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + layer.pseudoRoot.SetInfo(UsdGeom.Tokens.upAxis, up_axis) - variants = [] + # Set meters per unit + layer.pseudoRoot.SetInfo(UsdGeom.Tokens.metersPerUnit, + float(meters_per_unit)) - for subset in variant_subsets: - prefix = "usdModel" - if subset.startswith(prefix): - # Strip off `usdModel_` - variant = subset[len(prefix):] - else: - raise ValueError( - "Model subsets must start " "with usdModel: %s" % subset - ) - shade_subset = re.sub("^usdModel", "usdShade", subset) - path = get_usd_master_path( - asset=asset_doc, subset=shade_subset, representation="usd" - ) - variants.append((variant, path)) +def get_or_define_prim_spec(layer, prim_path, type_name): + """Get or create a PrimSpec in the layer. - stage = _create_variants_file( - filename, variants=variants, variantset="model", variant_prim="/root" - ) + Note: + This creates a Sdf.PrimSpec with Sdf.SpecifierDef but if the PrimSpec + already exists this will not force it to be a Sdf.SpecifierDef and + it may remain what it was, e.g. Sdf.SpecifierOver - stage.GetRootLayer().Save() + Args: + layer (Sdf.Layer): The layer to create it in. + prim_path (Any[str, Sdf.Path]): Prim path to create. + type_name (str): Type name for the PrimSpec. + This will only be set if the prim does not exist in the layer + yet. It does not update type for an existing prim. + Returns: + Sdf.PrimSpec: The PrimSpec in the layer for the given prim path. -def create_shade_variation(filename, asset, model_variant, shade_variants): - """Create the master Shade file for a specific model variant. + """ + prim_spec = layer.GetPrimAtPath(prim_path) + if prim_spec: + return prim_spec - This should reference all shade variants for the specific model variant. + prim_spec = Sdf.CreatePrimInLayer(layer, prim_path) + prim_spec.specifier = Sdf.SpecifierDef + prim_spec.typeName = type_name + return prim_spec - """ - project_name = get_current_project_name() - asset_doc = get_asset_by_name(project_name, asset) - assert asset_doc, "Asset not found: %s" % asset +def variant_nested_prim_path(prim_path, variant_selections): + """Return the Sdf.Path for a nested variant selection at prim path. - variants = [] - for variant in shade_variants: - subset = "usdShade_{model}_{shade}".format( - model=model_variant, shade=variant - ) - path = get_usd_master_path( - asset=asset_doc, subset=subset, representation="usd" - ) - variants.append((variant, path)) + Examples: + >>> prim_path = Sdf.Path("/asset") + >>> variant_spec = variant_nested_prim_path( + >>> prim_path, + >>> variant_selections=[["model", "main"], ["look", "main"]] + >>> ) + >>> variant_spec.path - stage = _create_variants_file( - filename, variants=variants, variantset="shade", variant_prim="/root" - ) + Args: + prim_path (Sdf.PrimPath): The prim path to create the spec in + variant_selections (List[List[str, str]]): A list of variant set names + and variant names to get the prim spec in. - stage.GetRootLayer().Save() + Returns: + Sdf.Path: The variant prim path + """ + variant_prim_path = Sdf.Path(prim_path) + for variant_set_name, variant_name in variant_selections: + variant_prim_path = variant_prim_path.AppendVariantSelection( + variant_set_name, variant_name) + return variant_prim_path -def _create_variants_file( - filename, - variants, - variantset, - default_variant=None, - variant_prim="/root", - reference_prim=None, - set_default_variant=True, - as_payload=False, - skip_variant_on_single_file=True, -): - root_layer = Sdf.Layer.CreateNew(filename, args={"format": "usda"}) - stage = Usd.Stage.Open(root_layer) +def set_variant_reference(sdf_layer, prim_path, variant_selections, path, + as_payload=False, + append=True): + """Get or define variant selection at prim path and add a reference - root_prim = stage.DefinePrim(variant_prim) - stage.SetDefaultPrim(root_prim) + If the Variant Prim already exists the prepended references are replaced + with a reference to `path`, it is overridden. - def _reference(path): - """Reference/Payload path depending on function arguments""" + Args: + sdf_layer (Sdf.Layer): Layer to operate in. + prim_path (Any[str, Sdf.Path]): Prim path to add variant to. + variant_selections (List[List[str, str]]): A list of variant set names + and variant names to get the prim spec in. + path (str): Path to reference or payload + as_payload (bool): When enabled it will generate a payload instead of + a reference. Defaults to False. + append (bool): When enabled it will append the reference of payload + to prepended items, otherwise it will replace it. - if reference_prim: - prim = stage.DefinePrim(reference_prim) - else: - prim = root_prim + Returns: + Sdf.PrimSpec: The prim spec for the prim path at the given + variant selection. - if as_payload: - # Payload - prim.GetPayloads().AddPayload(Sdf.Payload(path)) + """ + prim_path = Sdf.Path(prim_path) + # TODO: inherit type from outside of variants if it has it + get_or_define_prim_spec(sdf_layer, prim_path, "Xform") + variant_prim_path = variant_nested_prim_path(prim_path, variant_selections) + variant_prim = get_or_define_prim_spec(sdf_layer, + variant_prim_path, + "Xform") + # Replace the prepended references or payloads + if as_payload: + # Payload + if append: + variant_prim.payloadList.prependedItems.append( + Sdf.Payload(assetPath=path) + ) else: - # Reference - prim.GetReferences().AddReference(Sdf.Reference(path)) + variant_prim.payloadList.prependedItems.append( + Sdf.Payload(assetPath=path) + ) + else: + # Reference + if append: + variant_prim.referenceList.prependedItems[:] = [ + Sdf.Reference(assetPath=path) + ] + else: + variant_prim.payloadList.prependedItems[:] = [ + Sdf.Payload(assetPath=path) + ] - assert variants, "Must have variants, got: %s" % variants + return variant_prim - log.info(filename) - if skip_variant_on_single_file and len(variants) == 1: - # Reference directly, no variants - variant_path = variants[0][1] - _reference(variant_path) +def get_sdf_format_args(path): + """Return SDF_FORMAT_ARGS parsed to `dict`""" + if ":SDF_FORMAT_ARGS:" not in path: + return {} - log.info("Non-variants..") - log.info("Path: %s" % variant_path) + format_args_str = path.split(":SDF_FORMAT_ARGS:", 1)[-1] + args = {} + for arg_str in format_args_str.split(":"): + if "=" not in arg_str: + # ill-formed argument key=value + continue - else: - # Variants - append = Usd.ListPositionBackOfAppendList - variant_set = root_prim.GetVariantSets().AddVariantSet( - variantset, append - ) + key, value = arg_str.split("=", 1) + args[key] = value + return args - for variant, variant_path in variants: +# TODO: Functions below are not necessarily USD functions and hence should not +# be in this file. Refactor by moving them elsewhere +# region representations and Ayon uris - if default_variant is None: - default_variant = variant - variant_set.AddVariant(variant, append) - variant_set.SetVariantSelection(variant) - with variant_set.GetVariantEditContext(): - _reference(variant_path) +def get_representation_by_names( + project_name, + asset_name, + subset_name, + version_name, + representation_name, +): + """Get representation entity for asset and subset. - log.info("Variants..") - log.info("Variant: %s" % variant) - log.info("Path: %s" % variant_path) + If version_name is "hero" then return the hero version + If version_name is "latest" then return the latest version + Otherwise use version_name as the exact integer version name. - if set_default_variant: - variant_set.SetVariantSelection(default_variant) + """ - return stage + if isinstance(asset_name, dict) and "name" in asset_name: + # Allow explicitly passing asset document + asset_doc = asset_name + else: + asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) + if not asset_doc: + return + if isinstance(subset_name, dict) and "name" in subset_name: + # Allow explicitly passing subset document + subset_doc = subset_name + else: + subset_doc = get_subset_by_name(project_name, + subset_name, + asset_id=asset_doc["_id"], + fields=["_id"]) + if not subset_doc: + return + + if version_name == "hero": + version = get_hero_version_by_subset_id(project_name, + subset_id=subset_doc["_id"]) + elif version_name == "latest": + version = get_last_version_by_subset_id(project_name, + subset_id=subset_doc["_id"]) + else: + version = get_version_by_name(project_name, + version_name, + subset_id=subset_doc["_id"]) + if not version: + return -def get_usd_master_path(asset, subset, representation): - """Get the filepath for a .usd file of a subset. + return get_representation_by_name(project_name, + representation_name, + version_id=version["_id"]) - This will return the path to an unversioned master file generated by - `usd_master_file.py`. + +def get_representation_path_by_names( + project_name, + asset_name, + subset_name, + version_name, + representation_name): + """Get (latest) filepath for representation for asset and subset. + + See `get_representation_by_names` for more details. + + Returns: + str: The representation path if the representation exists. + + """ + representation = get_representation_by_names( + project_name, + asset_name, + subset_name, + version_name, + representation_name + ) + if representation: + path = get_representation_path(representation) + return path.replace("\\", "/") + + +def parse_ayon_uri(uri): + """Parse ayon entity URI into individual components. + + URI specification: + ayon+entity://{project}/{asset}?product={product} + &version={version} + &representation={representation} + URI example: + ayon+entity://test/hero?modelMain&version=2&representation=usd + + However - if the netloc is `ayon://` it will by default also resolve as + `ayon+entity://` on AYON server, thus we need to support both. The shorter + `ayon://` is preferred for user readability. + + Example: + >>> parse_ayon_uri( + >>> "ayon://test/villain?product=modelMain&version=2&representation=usd" # noqa: E501 + >>> ) + {'project': 'test', 'asset': 'villain', + 'product': 'modelMain', 'version': 1, + 'representation': 'usd'} + >>> parse_ayon_uri( + >>> "ayon+entity://project/asset?product=renderMain&version=3&representation=exr" # noqa: E501 + >>> ) + {'project': 'project', 'asset': 'asset', + 'product': 'renderMain', 'version': 3, + 'representation': 'exr'} + + Returns: + dict: The individual keys of the ayon entity query. """ - project_name = get_current_project_name() - anatomy = Anatomy(project_name) - project_doc = get_project( + if not (uri.startswith("ayon+entity://") or uri.startswith("ayon://")): + return + + parsed = urlparse(uri) + if parsed.scheme not in {"ayon+entity", "ayon"}: + return + + result = { + "project": parsed.netloc, + "asset": parsed.path.strip("/") + } + query = parse_qs(parsed.query) + for key in ["product", "version", "representation"]: + if key in query: + result[key] = query[key][0] + + # Convert version to integer if it is a digit + version = result.get("version") + if version is not None and version.isdigit(): + result["version"] = int(version) + + return result + + +def construct_ayon_uri( project_name, - fields=["name", "data.code"] + asset_name, + product, + version, + representation_name +): + """Construct Ayon entity URI from its components + + Returns: + str: Ayon Entity URI to query entity path. + Also works with `get_representation_path_by_ayon_uri` + """ + if not (isinstance(version, int) or version in {"latest", "hero"}): + raise ValueError( + "Version must either be integer, 'latest' or 'hero'. " + "Got: {}".format(version) + ) + return ( + "ayon://{project}/{asset}?product={product}&version={version}" + "&representation={representation}".format( + project=project_name, + asset=asset_name, + product=product, + version=version, + representation=representation_name + ) ) - if isinstance(asset, dict) and "name" in asset: - # Allow explicitly passing asset document - asset_doc = asset - else: - asset_doc = get_asset_by_name(project_name, asset, fields=["name"]) - - template_obj = anatomy.templates_obj["publish"]["path"] - path = template_obj.format_strict( - { - "project": { - "name": project_name, - "code": project_doc.get("data", {}).get("code") - }, - "folder": { - "name": asset_doc["name"], - }, - "asset": asset_doc["name"], - "subset": subset, - "representation": representation, - "version": 0, # stub version zero - } + +def get_representation_path_by_ayon_uri( + uri, + context=None +): + """Return resolved path for Ayon entity URI. + + Allow resolving 'latest' paths from a publishing context's instances + as if they will exist after publishing without them being integrated yet. + + Args: + uri (str): Ayon entity URI. See `parse_ayon_uri` + context (pyblish.api.Context): Publishing context. + + Returns: + Union[str, None]: Returns the path if it could be resolved + + """ + query = parse_ayon_uri(uri) + + if context is not None and context.data["projectName"] == query["project"]: + # Search first in publish context to allow resolving latest versions + # from e.g. the current publish session if the context is provided + if query["version"] == "hero": + raise NotImplementedError( + "Hero version resolving not implemented from context" + ) + + specific_version = isinstance(query["version"], int) + for instance in context: + if instance.data.get("asset") != query["asset"]: + continue + + if instance.data.get("subset") != query["product"]: + continue + + # Only consider if the instance has a representation by + # that name + representations = instance.data.get("representations", []) + if not any(representation.get("name") == query["representation"] + for representation in representations): + continue + + return get_instance_expected_output_path( + instance, + representation_name=query["representation"], + version=query["version"] if specific_version else None + ) + + return get_representation_path_by_names( + project_name=query["project"], + asset_name=query["asset"], + subset_name=query["product"], + version_name=query["version"], + representation_name=query["representation"], ) - # Remove the version folder - subset_folder = os.path.dirname(os.path.dirname(path)) - master_folder = os.path.join(subset_folder, "master") - fname = "{0}.{1}".format(subset, representation) - return os.path.join(master_folder, fname).replace("\\", "/") +def get_instance_expected_output_path(instance, representation_name, + ext=None, version=None): + """Return expected publish filepath for representation in instance + This does not validate whether the instance has any representation by the + given name, extension and/or version. + + Arguments: + instance (pyblish.api.Instance): publish instance + representation_name (str): representation name + ext (Optional[str]): extension for the file, useful if `name` += `ext` + version (Optional[int]): if provided, force it to format to this + particular version. + representation_name (str): representation name + + Returns: + str: Resolved path + + """ -def parse_avalon_uri(uri): - # URI Pattern: avalon://{asset}/{subset}.{ext} - pattern = r"avalon://(?P[^/.]*)/(?P[^/]*)\.(?P.*)" - if uri.startswith("avalon://"): - match = re.match(pattern, uri) - if match: - return match.groupdict() + if ext is None: + ext = representation_name + if version is None: + version = instance.data["version"] + + context = instance.context + anatomy = context.data["anatomy"] + path_template_obj = anatomy.templates_obj["publish"]["path"] + template_data = copy.deepcopy(instance.data["anatomyData"]) + template_data.update({ + "ext": ext, + "representation": representation_name, + "subset": instance.data["subset"], + "asset": instance.data["asset"], + "variant": instance.data.get("variant"), + "version": version + }) + + template_filled = path_template_obj.format_strict(template_data) + return os.path.normpath(template_filled) + +# endregion diff --git a/openpype/pipeline/colorspace.py b/openpype/pipeline/colorspace.py index 9f720f6ae95..605e7612c3b 100644 --- a/openpype/pipeline/colorspace.py +++ b/openpype/pipeline/colorspace.py @@ -1096,7 +1096,8 @@ def set_colorspace_data_to_representation( # check if `file_ext` in lower case is in CachedData.allowed_exts if file_ext.lstrip(".").lower() not in CachedData.allowed_exts: log.debug( - "Extension '{}' is not in allowed extensions.".format(file_ext) + "Extension '{}' is not in allowed extensions to retrieve " + "colorspace data for, ignoring...".format(file_ext) ) return diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 683699a0d1b..a978d4ff70f 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -1803,10 +1803,10 @@ def _reset_creator_plugins(self): creator_identifier = creator_class.identifier if creator_identifier in creators: - self.log.warning(( - "Duplicated Creator identifier. " - "Using first and skipping following" - )) + self.log.warning( + "Duplicated Creator identifier. Using first and " + "skipping following: {}".format(str(creator_class)) + ) continue # Filter by host name diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index c8b67a3d055..93a15fcbb6b 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -64,7 +64,8 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): "skeletalMesh", "xgen", "yeticacheUE", - "tycache" + "tycache", + "usd" ] def process(self, instance): diff --git a/openpype/plugins/publish/extract_usd_layer_contributions.py b/openpype/plugins/publish/extract_usd_layer_contributions.py new file mode 100644 index 00000000000..3513a2381d3 --- /dev/null +++ b/openpype/plugins/publish/extract_usd_layer_contributions.py @@ -0,0 +1,609 @@ +from operator import attrgetter +import dataclasses +import os + +import pyblish.api +from pxr import Sdf + +from openpype.lib import ( + TextDef, + BoolDef, + UISeparatorDef, + UILabelDef, + EnumDef +) +from openpype.lib.usdlib import ( + set_variant_reference, + setup_asset_layer, + add_ordered_sublayer, + construct_ayon_uri, + get_representation_path_by_ayon_uri, + get_representation_path_by_names, + set_layer_defaults +) +from openpype.pipeline import publish + + +# A contribution defines a contribution into a (department) layer which will +# get layered into the target product, usually the asset or shot. +# We need to at least know what it targets (e.g. where does it go into) and +# in what order (which contribution is stronger?) +# Preferably the bootstrapped data (e.g. the Shot) preserves metadata about +# the contributions so that we can design a system where custom contributions +# outside of the predefined orders are possible to be managed. So that if a +# particular asset requires an extra contribution level, you can add it +# directly from the publisher at that particular order. Future publishes will +# then see the existing contribution and will persist adding it to future +# bootstraps at that order +# TODO: Avoid hardcoded ordering - might need to be set through settings? +LAYER_ORDERS = { + # asset layers + "model": 100, + "assembly": 150, + "look": 200, + "rig": 300, + # shot layers + "layout": 200, + "animation": 300, + "simulation": 400, + "fx": 500, + "lighting": 600, +} + +# This global toggle is here mostly for debugging purposes and should usually +# be True so that new publishes merge and extend on previous contributions. +# With this enabled a new variant model layer publish would e.g. merge with +# the model layer's other variants nicely, so you can build up an asset by +# individual publishes instead of requiring to republish each contribution +# all the time at the same time +BUILD_INTO_LAST_VERSIONS = True + + +@dataclasses.dataclass +class Contribution: + # What are we contributing? + instance: pyblish.api.Instance # instance that contributes it + + # Where are we contributing to? + layer_id: str # usually the department or task name + target_product: str = "usdAsset" # target subset the layer should merge to + + # Variant + apply_as_variant: bool = False + variant_set_name: str = "" + variant_name: str = "" + variant_is_default: bool = False + + order: int = 0 + + +def get_instance_uri_path( + instance, + resolve=True +): + """Return path for instance's usd representation""" + context = instance.context + asset = instance.data["asset"] + subset = instance.data["subset"] + project_name = context.data["projectName"] + + # Get the layer's published path + path = construct_ayon_uri( + project_name=project_name, + asset_name=asset, + product=subset, + version="latest", + representation_name="usd" + ) + + # Resolve contribution path + # TODO: Remove this when Asset Resolver is used + if resolve: + path = get_representation_path_by_ayon_uri( + path, + # Allow also resolving live to entries from current context + context=instance.context + ) + # Ensure `None` for now is also a string + path = str(path) + + return path + + +def get_last_publish(instance, representation="usd"): + return get_representation_path_by_names( + project_name=instance.context.data["projectName"], + asset_name=instance.data["asset"], + subset_name=instance.data["subset"], + version_name="latest", + representation_name=representation + ) + + +def add_representation(instance, name, + files, staging_dir, ext=None, + output_name=None): + """Add a representation to publish and integrate. + + A representation must exist of either a single file or a + single file sequence. It can *not* contain multiple files. + + For the integration to succeed the instance must provide the context + for asset, frame range, etc. even though the representation can + override some parts of it. + + Arguments: + instance (pyblish.api.Instance): Publish instance + name (str): The representation name + ext (Optional[str]): Explicit extension for the output + output_name (Optional[str]): Output name suffix for the + destination file to ensure the file is unique if + multiple representations share the same extension. + + Returns: + dict: Representation data for integration. + + """ + if ext is None: + # TODO: Use filename + ext = name + + representation = { + "name": name, + "ext": ext, + "stagingDir": staging_dir, + "files": files + } + if output_name: + representation["outputName"] = output_name + + instance.data.setdefault("representations", []).append(representation) + return representation + + +class CollectUSDLayerContributions(pyblish.api.InstancePlugin, + publish.OpenPypePyblishPluginMixin): + """Collect the USD Layer Contributions and create dependent instances. + + Our contributions go to the layer + + Instance representation -> Department Layer -> Asset + + So that for example: + modelMain --> variant 'main' in model.usd -> asset.usd + modelDamaged --> variant 'damaged' in model.usd -> asset.usd + + """ + + order = pyblish.api.CollectorOrder + 0.35 + label = "Collect USD Layer Contributions (Asset/Shot)" + families = ["usd"] + + def process(self, instance): + + attr_values = self.get_attr_values_from_data(instance.data) + if not attr_values.get("contribution_enabled"): + return + + instance.data["subsetGroup"] = ( + instance.data.get("subsetGroup") or "USD Layer" + ) + + # Allow formatting in variant set name and variant name + data = instance.data.copy() + data["layer"] = attr_values["contribution_layer"] + for key in [ + "contribution_variant_set_name", + "contribution_variant" + ]: + attr_values[key] = attr_values[key].format(**data) + + # Define contribution + order = LAYER_ORDERS.get(attr_values["contribution_layer"], 0) + contribution = Contribution( + instance=instance, + layer_id=attr_values["contribution_layer"], + target_product=attr_values["contribution_target_product"], + apply_as_variant=attr_values["contribution_apply_as_variant"], + variant_set_name=attr_values["contribution_variant_set_name"], + variant_name=attr_values["contribution_variant"], + variant_is_default=attr_values["contribution_variant_is_default"], + order=order + ) + asset_subset = contribution.target_product + layer_subset = "{}_{}".format(asset_subset, contribution.layer_id) + + # Layer contribution instance + layer_instance = self.get_or_create_instance( + subset=layer_subset, + variant=contribution.layer_id, + source_instance=instance, + families=["usd", "usdLayer"], + ) + layer_instance.data.setdefault("usd_contributions", []).append( + contribution + ) + layer_instance.data["usd_layer_id"] = contribution.layer_id + layer_instance.data["usd_layer_order"] = contribution.order + + layer_instance.data["subsetGroup"] = ( + instance.data.get("subsetGroup") or "USD Layer" + ) + + # Asset/Shot contribution instance + target_instance = self.get_or_create_instance( + subset=asset_subset, + variant=asset_subset, + source_instance=layer_instance, + families=["usd", "usdAsset"], + ) + target_instance.data["contribution_target_product_init"] = attr_values[ + "contribution_target_product_init" + ] + + self.log.info( + f"Contributing {instance.data['subset']} to " + f"{layer_subset} -> {asset_subset}" + ) + + def find_instance(self, context, data, ignore_instance): + for instance in context: + if instance is ignore_instance: + continue + + if all(instance.data.get(key) == value + for key, value in data.items()): + return instance + + def get_or_create_instance(self, + subset, + variant, + source_instance, + families): + """Get or create the instance matching the subset/variant. + + The source instance will be used to do additional matching, like + ensuring it's a subset for the same asset and task. If the instance + already exists in the `context` then the existing one is returned. + + For each source instance this is called the sources will be appended + to a `instance.data["source_instances"]` list on the returned instance. + + Arguments: + subset (str): Subset name + variant (str): Variant name + source_instance (pyblish.api.Instance): Source instance to + be related to for asset, task. + families (list): The families required to be set on the instance. + + Returns: + pyblish.api.Instance: The resulting instance. + + """ + + # Potentially the instance already exists due to multiple instances + # contributing to the same layer or asset - so we first check for + # existence + context = source_instance.context + + # Required matching vars + data = { + "asset": source_instance.data["asset"], + "task": source_instance.data.get("task"), + "subset": subset, + "variant": variant, + "families": families + } + existing_instance = self.find_instance(context, data, + ignore_instance=source_instance) + if existing_instance: + existing_instance.append(source_instance.id) + existing_instance.data["source_instances"].append(source_instance) + return existing_instance + + # Otherwise create the instance + new_instance = context.create_instance(name=subset) + new_instance.data.update(data) + + new_instance.data["label"] = ( + "{0} ({1})".format(subset, new_instance.data["asset"]) + ) + new_instance.data["family"] = "usd" + new_instance.data["icon"] = "link" + new_instance.data["comment"] = "Automated bootstrap USD file." + new_instance.append(source_instance.id) + new_instance.data["source_instances"] = [source_instance] + + return new_instance + + @classmethod + def get_attribute_defs(cls): + + return [ + UISeparatorDef("usd_container_settings1"), + UILabelDef(label="USD Contribution"), + BoolDef("contribution_enabled", + label="Enable", + tooltip=( + "When enabled this publish instance will be added " + "into a department layer into a target product, " + "usually an asset or shot.\n" + "When disabled this publish instance will not be " + "added into another USD file and remain as is.\n" + "In both cases the USD data itself is free to have " + "references and sublayers of its own." + ), + default=True), + TextDef("contribution_target_product", + label="Target product", + tooltip=( + "The target product the contribution should be added " + "to. Usually this is the asset or shot product.\nThe " + "department layer will be added to this product, and " + "the contribution itself will be added to the " + "department layer." + ), + default="usdAsset"), + EnumDef("contribution_target_product_init", + label="Initialize as", + tooltip=( + "The target product's USD file will be initialized " + "based on this type if there's no existing USD of " + "that product yet.\nIf there's already an existing " + "product with the name of the 'target product' this " + "setting will do nothing." + ), + items=["asset", "shot"], + default="asset"), + + # Asset layer, e.g. model.usd, look.usd, rig.usd + EnumDef("contribution_layer", + label="Add to department layer", + tooltip=( + "The layer the contribution should be made to in the " + "target product.\nThe layers have their own " + "predefined ordering.\nA higher order (further down " + "the list) will contribute as a stronger opinion." + ), + items=list(LAYER_ORDERS.keys()), + default="model"), + BoolDef("contribution_apply_as_variant", + label="Add as variant", + tooltip=( + "When enabled the contribution to the department " + "layer will be added as a variant where the variant " + "on the default root prim will be added as a " + "reference.\nWhen disabled the contribution will be " + "appended to as a sublayer to the department layer " + "instead." + ), + default=True), + TextDef("contribution_variant_set_name", + label="Variant Set Name", + default="{layer}"), + TextDef("contribution_variant", + label="Variant Name", + default="{variant}"), + BoolDef("contribution_variant_is_default", + label="Set as default variant selection", + tooltip=( + "Whether to set this instance's variant name as the " + "default selected variant name for the variant set.\n" + "It is always expected to be enabled for only one " + "variant name in the variant set.\n" + "The behavior is unpredictable if multiple instances " + "for the same variant set have this enabled." + ), + default=False), + UISeparatorDef("usd_container_settings3"), + ] + + +class ExtractUSDLayerContribution(publish.Extractor): + + families = ["usdLayer"] + label = "Extract USD Layer Contributions (Asset/Shot)" + order = pyblish.api.ExtractorOrder + 0.45 + + def process(self, instance): + from pxr import Sdf + + asset = instance.data["asset"] + product = instance.data["subset"] + self.log.debug(f"Building layer: {asset} > {product}") + + path = get_last_publish(instance) + if path and BUILD_INTO_LAST_VERSIONS: + sdf_layer = Sdf.Layer.OpenAsAnonymous(path) + default_prim = sdf_layer.defaultPrim + else: + default_prim = asset + sdf_layer = Sdf.Layer.CreateAnonymous() + set_layer_defaults(sdf_layer, default_prim=default_prim) + + contributions = instance.data.get("usd_contributions", []) + for contribution in sorted(contributions, key=attrgetter("order")): + path = get_instance_uri_path(contribution.instance) + if contribution.apply_as_variant: + # Add contribution as variants to their layer subsets + self.log.debug(f"Adding variant: {contribution}") + prim_path = f"/{default_prim}" + variant_set_name = contribution.variant_set_name + variant_name = contribution.variant_name + set_variant_reference( + sdf_layer, + prim_path=prim_path, + variant_selections=[(variant_set_name, variant_name)], + path=path + ) + prim = sdf_layer.GetPrimAtPath(prim_path) + + # Set default variant selection + if contribution.variant_is_default or \ + variant_set_name not in prim.variantSelections: + prim.variantSelections[variant_set_name] = variant_name + + else: + # Sublayer source file + self.log.debug(f"Adding sublayer: {contribution}") + + # This replaces existing versions of itself so that + # republishing does not continuously add more versions of the + # same subset + subset = contribution.instance.data["subset"] + add_ordered_sublayer( + layer=sdf_layer, + contribution_path=path, + layer_id=subset, + order=None, # unordered + add_sdf_arguments_metadata=True + ) + + # Save the file + staging_dir = self.staging_dir(instance) + filename = f"{instance.name}.usd" + filepath = os.path.join(staging_dir, filename) + sdf_layer.Export(filepath, args={"format": "usda"}) + + add_representation( + instance, + name="usd", + files=filename, + staging_dir=staging_dir + ) + + +class ExtractUSDAssetContribution(publish.Extractor): + + families = ["usdAsset"] + label = "Extract USD Asset/Shot Contributions" + order = ExtractUSDLayerContribution.order + 0.01 + + def process(self, instance): + from pxr import Sdf + + asset = instance.data["asset"] + subset = instance.data["subset"] + self.log.debug(f"Building asset: {asset} > {subset}") + + # Contribute layers to asset + # Use existing asset and add to it, or initialize a new asset layer + path = get_last_publish(instance) + payload_layer = None + if path and BUILD_INTO_LAST_VERSIONS: + # If there's a payload file, put it in the payload instead + folder = os.path.dirname(path) + payload_path = os.path.join(folder, "payload.usd") + if os.path.exists(payload_path): + payload_layer = Sdf.Layer.OpenAsAnonymous(payload_path) + + asset_layer = Sdf.Layer.OpenAsAnonymous(path) + else: + # If not existing publish of this product yet then we initialize + # the layer as either a default asset or shot structure. + init_type = instance.data["contribution_target_product_init"] + asset_layer, payload_layer = self.init_layer(asset_name=asset, + init_type=init_type) + + target_layer = payload_layer if payload_layer else asset_layer + + # Get unique layer instances (remove duplicate entries) + processed_ids = set() + layer_instances = [] + for layer_inst in instance.data["source_instances"]: + if layer_inst.id in processed_ids: + continue + layer_instances.append(layer_inst) + processed_ids.add(layer_inst.id) + + # Insert the layer in contributions order + def sort_by_order(instance): + return instance.data["usd_layer_order"] + + for layer_instance in sorted(layer_instances, + key=sort_by_order, + reverse=True): + + layer_id = layer_instance.data["usd_layer_id"] + order = layer_instance.data["usd_layer_order"] + + path = get_instance_uri_path(instance=layer_instance) + add_ordered_sublayer(target_layer, + contribution_path=path, + layer_id=layer_id, + order=order, + # Add the sdf argument metadata which allows + # us to later detect whether another path + # has the same layer id, so we can replace it + # it. + add_sdf_arguments_metadata=True) + + # Save the file + staging_dir = self.staging_dir(instance) + filename = f"{instance.name}.usd" + filepath = os.path.join(staging_dir, filename) + asset_layer.Export(filepath, args={"format": "usda"}) + + add_representation( + instance, + name="usd", + files=filename, + staging_dir=staging_dir + ) + + if payload_layer: + payload_path = os.path.join(staging_dir, "payload.usd") + payload_layer.Export(payload_path, args={"format": "usda"}) + self.add_relative_file(instance, payload_path) + + def init_layer(self, asset_name, init_type): + """Initialize layer if no previous version exists""" + + if init_type == "asset": + asset_layer = Sdf.Layer.CreateAnonymous() + created_layers = setup_asset_layer(asset_layer, asset_name, + force_add_payload=True, + set_payload_path=True) + payload_layer = created_layers[0].layer + return asset_layer, payload_layer + + elif init_type == "shot": + shot_layer = Sdf.Layer.CreateAnonymous() + set_layer_defaults(shot_layer, default_prim=None) + return shot_layer, None + + else: + raise ValueError( + "USD Target Product contribution can only initialize " + "as 'asset' or 'shot', got: '{}'".format(init_type) + ) + + def add_relative_file(self, instance, source, staging_dir=None): + """Add transfer for a relative path form staging to publish dir. + + Unlike files in representations, the file will not be renamed and + will be ingested one-to-one into the publish directory. + + Note: This file does not get registered as a representation, because + representation files always get renamed by the publish template + system. These files get included in the `representation["files"]` + info with all the representations of the version - and thus will + appear multiple times per version. + + """ + # TODO: It can be nice to force a particular representation no matter + # what to adhere to a certain filename on integration because e.g. a + # particular file format relies on that file named like that or alike + # and still allow regular registering with the database as a file of + # the version. As such we might want to tweak integrator logic? + if staging_dir is None: + staging_dir = self.staging_dir(instance) + publish_dir = instance.data["publishDir"] + + relative_path = os.path.relpath(source, staging_dir) + destination = os.path.join(publish_dir, relative_path) + destination = os.path.normpath(destination) + + transfers = instance.data.setdefault("transfers", []) + self.log.debug(f"Adding relative file {source} -> {relative_path}") + transfers.append((source, destination)) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json index 213ec9d04e1..66dcabe5cd7 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json @@ -136,11 +136,11 @@ "template_data": [ { "key": "CreateUSD", - "label": "Create USD (experimental)" + "label": "Create USD" }, { "key": "CreateUSDRender", - "label": "Create USD render (experimental)" + "label": "Create USD render" }, { "key": "CreateVDBCache", diff --git a/server_addon/houdini/server/settings/create.py b/server_addon/houdini/server/settings/create.py index a5ca4d477b4..b79354ecc32 100644 --- a/server_addon/houdini/server/settings/create.py +++ b/server_addon/houdini/server/settings/create.py @@ -79,10 +79,10 @@ class CreatePluginsModel(BaseSettingsModel): title="Create Static Mesh") CreateUSD: CreatorModel = Field( default_factory=CreatorModel, - title="Create USD (experimental)") + title="Create USD") CreateUSDRender: CreatorModel = Field( default_factory=CreatorModel, - title="Create USD render (experimental)") + title="Create USD render") CreateVDBCache: CreatorModel = Field( default_factory=CreatorModel, title="Create VDB Cache") diff --git a/tests/unit/openpype/lib/test_usdlib.py b/tests/unit/openpype/lib/test_usdlib.py new file mode 100644 index 00000000000..a21f498f6e4 --- /dev/null +++ b/tests/unit/openpype/lib/test_usdlib.py @@ -0,0 +1,306 @@ +from openpype.lib import usdlib +from pxr import Sdf + + +def test_create_asset(tmp_path): + """Test creating the basics of an asset structure.""" + layers = usdlib.create_asset(str(tmp_path / "asset.usd"), + asset_name="test", + reference_layers=["./model.usd", + "./look.usd"]) + assert len( + layers) == 2, "Expecting two files, the asset.usd and payload.usd" + assert (tmp_path / "asset.usd").exists() + assert (tmp_path / "payload.usd").exists() + assert not (tmp_path / "model.usd").exists() + assert not (tmp_path / "look.usd").exists() + + +def test_add_contributions_to_asset(tmp_path): + """Test adding contributions on top of each other works as expected""" + asset_usd = str(tmp_path / "asset.usd") + usdlib.create_asset(asset_usd, + asset_name="test", + reference_layers=["./model.usd", + "./look.usd"]) + + layer = Sdf.Layer.OpenAsAnonymous(asset_usd) + prim_path = Sdf.Path("/test") # prim is named by `asset_name` + + path_in_variant = prim_path.AppendVariantSelection("model", "modelMain") + assert not layer.GetPrimAtPath(path_in_variant), ( + "Variant should not exist yet and thus the prim should not exist" + ) + + # Adding a variant with a single prepended reference should work + usdlib.set_variant_reference( + layer, + prim_path=prim_path, + variant_selections=[["model", "modelMain"]], + path="./modelMain.usd" + ) + + prim_in_variant = layer.GetPrimAtPath(path_in_variant) + assert prim_in_variant, "Path in variant should be defined" + references = prim_in_variant.referenceList.prependedItems[:] + assert len(references) == 1, \ + "Must have only one reference" + assert references[0].assetPath == "./modelMain.usd", \ + "Must reference ./modelMain.usd" + + # Replacing an existing variant reference should work + usdlib.set_variant_reference( + layer, + prim_path=prim_path, + variant_selections=[["model", "modelMain"]], + path="./modelMain_v2.usd" + ) + prim_in_variant = layer.GetPrimAtPath(path_in_variant) + references = prim_in_variant.referenceList.prependedItems[:] + assert len(references) == 1, \ + "Must have only one reference" + assert references[0].assetPath == "./modelMain_v2.usd", \ + "Must reference ./modelMain_v2.usd" + + # Adding multiple variants should work and should not adjust original + usdlib.set_variant_reference( + layer, + prim_path=prim_path, + variant_selections=[["model", "modelDamaged"]], + path="./modelDamaged.usd" + ) + usdlib.set_variant_reference( + layer, + prim_path=prim_path, + variant_selections=[["look", "lookMain"]], + path="./lookMain.usd", + ) + + # Validate all exist and paths are set as expected path + for variant_set_name, variant_name, expected_path in [ + ("model", "modelMain", "./modelMain_v2.usd"), + ("model", "modelDamaged", "./modelDamaged.usd"), + ("look", "lookMain", "./lookMain.usd"), + ]: + path_in_variant = prim_path.AppendVariantSelection(variant_set_name, + variant_name) + prim_in_variant = layer.GetPrimAtPath(path_in_variant) + references = prim_in_variant.referenceList.prependedItems[:] + assert len(references) == 1, \ + "Must have only one reference" + assert references[0].assetPath == expected_path, \ + f"Must reference {expected_path}" + + print(layer.ExportToString()) + + +def test_create_shot(tmp_path): + """Test creating shot structure; which is just a bunch of layers""" + usdlib.create_shot(str(tmp_path / "shot.usd"), + layers=["./lighting.usd", + "./fx.usd", + "./animation.usd" + "./layout.usd"]) + assert (tmp_path / "shot.usd").exists() + assert not (tmp_path / "lighting.usd").exists() + assert not (tmp_path / "fx.usd").exists() + assert not (tmp_path / "animation.usd").exists() + assert not (tmp_path / "layout.usd").exists() + + +def test_add_variant_references_to_layer(tmp_path): + """Test adding variants to a layer, replacing older ones""" + + def get_references(layer, prim_path, variant_set, variant): + """Return prepended reference asset paths for prim in variant set""" + path = Sdf.Path(prim_path).AppendVariantSelection(variant_set, variant) + prim_spec = layer.GetPrimAtPath(path) + references = list(prim_spec.referenceList.prependedItems) + return [ref.assetPath for ref in references] + + prim_path = "/root" + layer = usdlib.add_variant_references_to_layer( + variants=[ + ("main", "./main.usd"), + ("twist", "./twist.usd"), + ("tall", "./tall.usd"), + ], + variantset="model", + variant_prim=prim_path + ) + assert get_references(layer, prim_path, "model", "main") == ["./main.usd"] + assert get_references(layer, prim_path, "model", "twist") == ["./twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "model", "tall") == ["./tall.usd"] + + # Allow recalling with a layer provided to operate on that layer + # instead; adding more variant definitions, keeping existing definitions + # as well + layer = usdlib.add_variant_references_to_layer( + variants=[ + ("main", "./look_main.usd"), + ("twist", "./look_twist.usd"), + ("tall", "./look_tall.usd"), + ], + variantset="look", + layer=layer, + variant_prim=prim_path + ) + assert get_references(layer, prim_path, "model", "main") == ["./main.usd"] + assert get_references(layer, prim_path, "model", "twist") == ["./twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "model", "tall") == ["./tall.usd"] + assert get_references(layer, prim_path, "look", "main") == ["./look_main.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "twist") == ["./look_twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "tall") == ["./look_tall.usd"] # noqa: E501 + + # Allow with a layer provided to operate on that layer + # instead; adding more variant names to an existing variant set + layer = usdlib.add_variant_references_to_layer( + variants=[ + ("short", "./look_short.usd"), + ], + variantset="look", + layer=layer, + set_default_variant=False, + variant_prim=prim_path + ) + assert get_references(layer, prim_path, "model", "main") == ["./main.usd"] + assert get_references(layer, prim_path, "model", "twist") == ["./twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "model", "tall") == ["./tall.usd"] + assert get_references(layer, prim_path, "look", "main") == ["./look_main.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "twist") == ["./look_twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "tall") == ["./look_tall.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "short") == ["./look_short.usd"] # noqa: E501 + + # Allow updating an existing variant with a new file + layer = usdlib.add_variant_references_to_layer( + variants=[ + ("short", "./look_short_v02.usd"), + ], + variantset="look", + layer=layer, + set_default_variant=False, + variant_prim=prim_path + ) + assert get_references(layer, prim_path, "model", "main") == ["./main.usd"] + assert get_references(layer, prim_path, "model", "twist") == ["./twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "model", "tall") == ["./tall.usd"] + assert get_references(layer, prim_path, "look", "main") == ["./look_main.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "twist") == ["./look_twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "tall") == ["./look_tall.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "short") == ["./look_short_v02.usd"] # noqa: E501 + + # Applying variants to another prim should not affect first prim + layer = usdlib.add_variant_references_to_layer( + variants=[ + ("short", "./look_short.usd"), + ], + variantset="look", + layer=layer, + set_default_variant=False, + variant_prim="/other_root" + ) + assert get_references(layer, prim_path, "model", "main") == ["./main.usd"] + assert get_references(layer, prim_path, "model", "twist") == ["./twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "model", "tall") == ["./tall.usd"] + assert get_references(layer, prim_path, "look", "main") == ["./look_main.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "twist") == ["./look_twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "tall") == ["./look_tall.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "short") == ["./look_short_v02.usd"] # noqa: E501 + assert get_references(layer, "/other_root", "look", "short") == ["./look_short.usd"] # noqa: E501 + + # Export layer should work + layer.Export( + str(tmp_path / "model.usd"), + args={"format": "usda"}, + ) + assert (tmp_path / "model.usd").exists() + + # Debug print generated file (pytest excludes it by default but will + # show it if the -s flag is passed) + print(layer.ExportToString()) + + +def test_add_ordered_sublayer(): + """Test addinng sublayers by order and uniqueness""" + # TODO: The code doesn't error but the data should still be validated + + layer = Sdf.Layer.CreateAnonymous() + + def get_paths(layer, remove_format_args=True): + paths = layer.subLayerPaths + # Remove stored metadata in string + if remove_format_args: + paths = [path.split(":SDF_FORMAT_ARGS:", 1)[0] for path in paths] + return paths + + # The layer stack should have the higher orders earlier in the list + # because those state "stronger opinions" - as such the order needs to be + # reversed + orders = [300, 500, 350, 600, 50, 150, 450] + for order in orders: + usdlib.add_ordered_sublayer(layer, + contribution_path=str(order), + layer_id=str(order), + order=order) + + paths = get_paths(layer) + assert paths == ["600", "500", "450", "350", "300", "150", "50"] + + # This should not add a sublayer but should replace by `layer_id` + usdlib.add_ordered_sublayer(layer, + contribution_path="300_v2", + layer_id="300", + order=300) + + paths = get_paths(layer) + assert paths == ["600", "500", "450", "350", "300_v2", "150", "50"] + + # When replacing a layer with a new 'id' the ordering is preserved from + # before; the new order is not applied. + usdlib.add_ordered_sublayer(layer, + contribution_path=f"500_v2", + layer_id="500", + order=9999) + + paths = get_paths(layer) + assert paths == ["600", "500_v2", "450", "350", "300_v2", "150", "50"] + + # When replacing a layer with a new 'id' the ordering is preserved from + # before; the new order is not applied even when it is None + usdlib.add_ordered_sublayer(layer, + contribution_path=f"500_v3", + layer_id="500", + order=None) + + paths = get_paths(layer) + assert paths == ["600", "500_v3", "450", "350", "300_v2", "150", "50"] + + # Adding new layer id should also work to insert the new layer + usdlib.add_ordered_sublayer(layer, + contribution_path=f"75", + layer_id="75", + order=75) + + paths = get_paths(layer) + assert paths == ["600", "500_v3", "450", "350", "300_v2", "150", "75", "50"] # noqa: E501 + + # Adding a layer with `order=None` should append at the start as a + # strongest opinion + usdlib.add_ordered_sublayer(layer, + contribution_path=f"None", + layer_id="None", + order=None) + paths = get_paths(layer) + assert paths == ["None", "600", "500_v3", "450", "350", "300_v2", "150", "75", "50"] # noqa: E501 + + # Adding a layer with `order=None` should also be replaceable + usdlib.add_ordered_sublayer(layer, + contribution_path=f"None_v2", + layer_id="None", + order=None) + paths = get_paths(layer) + assert paths == ["None_v2", "600", "500_v3", "450", "350", "300_v2", "150", "75", "50"] # noqa: E501 + + # Debug print generated file (pytest excludes it by default but will + # show it if the -s flag is passed) + print(layer.ExportToString())