From 997f1b1c46ac29eff2fbdd6c0741bda640ec6439 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 12 Oct 2023 12:12:50 +0200 Subject: [PATCH 01/56] Draft prototype for Maya USD pipeline --- openpype/hosts/maya/api/lib.py | 4 + openpype/hosts/maya/api/pipeline.py | 37 ++++ openpype/hosts/maya/api/usdlib.py | 61 +++++++ .../maya/plugins/create/create_maya_usd.py | 26 +++ .../plugins/inventory/select_containers.py | 4 +- .../maya/plugins/load/load_into_maya_usd.py | 148 ++++++++++++++++ .../load/load_reference_into_maya_usd.py | 158 ++++++++++++++++++ .../plugins/publish/collect_usd_bootstrap.py | 120 +++++++++++++ .../plugins/publish/extract_usd_bootstrap.py | 100 +++++++++++ .../publish/validate_instance_has_members.py | 3 +- openpype/lib/usdlib.py | 123 +++++++------- 11 files changed, 724 insertions(+), 60 deletions(-) create mode 100644 openpype/hosts/maya/api/usdlib.py create mode 100644 openpype/hosts/maya/plugins/load/load_into_maya_usd.py create mode 100644 openpype/hosts/maya/plugins/load/load_reference_into_maya_usd.py create mode 100644 openpype/hosts/maya/plugins/publish/collect_usd_bootstrap.py create mode 100644 openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 510d4ecc850..0dd7f12bfc2 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1633,6 +1633,10 @@ def get_container_members(container): # Assume it's a container dictionary container = container["objectName"] + if "," in container: + # Assume it's a UFE path - return it as the only member + return [container] + members = cmds.sets(container, query=True) or [] members = cmds.ls(members, long=True, objectsOnly=True) or [] all_members = set(members) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index 6b791c96658..3e518a62663 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -360,6 +360,25 @@ def parse_container(container): return data +def parse_usd_prim_container(prim, proxy): + """Parse instance container from UsdPrim if it is marked as one""" + data = prim.GetCustomDataByKey("openpype") + if not data or not data.get("id") == AVALON_CONTAINER_ID: + return + + # Store transient data + data["prim"] = prim + data["proxy"] = proxy + + # Store the maya UFE path as objectName + prim_path = str(prim.GetPath()) + data["objectName"] = "{},{}".format(proxy, prim_path) + data["namespace"] = prim_path + data["name"] = proxy + + return data + + def _ls(): """Yields Avalon container node names. @@ -415,6 +434,24 @@ def ls(): for container in sorted(container_names): yield parse_container(container) + for container in ls_maya_usd_proxy_prims(): + yield container + + +def ls_maya_usd_proxy_prims(): + # TODO: This might be nicer once the Loader API gets a refactor where + # the loaders themselves can return the containers from the scene + if cmds.pluginInfo("mayaUsdPlugin", query=True, loaded=True): + usd_proxies = cmds.ls(type="mayaUsdProxyShape", long=True) + if usd_proxies: + import mayaUsd.ufe + for proxy in usd_proxies: + stage = mayaUsd.ufe.getStage('|world' + proxy) + for prim in stage.TraverseAll(): + container = parse_usd_prim_container(prim, proxy=proxy) + if container: + yield container + def containerise(name, namespace, diff --git a/openpype/hosts/maya/api/usdlib.py b/openpype/hosts/maya/api/usdlib.py new file mode 100644 index 00000000000..9693c7e4fe2 --- /dev/null +++ b/openpype/hosts/maya/api/usdlib.py @@ -0,0 +1,61 @@ +from openpype.pipeline.constants import AVALON_CONTAINER_ID +from pxr import Sdf + +from maya import cmds + + +def remove_spec(spec): + """Delete Sdf.PrimSpec or Sdf.PropertySpec + + Also see: + https://forum.aousd.org/t/api-basics-for-designing-a-manage-edits-editor-for-usd/676/1 # noqa + https://gist.github.com/BigRoy/4d2bf2eef6c6a83f4fda3c58db1489a5 + + """ + if spec.expired: + return + + if isinstance(spec, Sdf.PrimSpec): + # PrimSpec + parent = spec.nameParent + if parent: + view = parent.nameChildren + else: + # Assume PrimSpec is root prim + view = spec.layer.rootPrims + del view[spec.name] + + elif isinstance(spec, Sdf.PropertySpec): + # Relationship and Attribute specs + del spec.owner.properties[spec.name] + else: + raise TypeError(f"Unsupported spec type: {spec}") + + +def iter_ufe_usd_selection(): + for path in cmds.ls(selection=True, ufeObjects=True, long=True, + absoluteName=True): + if "," not in path: + continue + + node, ufe_path = path.split(",", 1) + if cmds.nodeType(node) != "mayaUsdProxyShape": + continue + + yield path + + +def containerise_prim(prim, + name, + namespace, + context, + loader): + for key, value in [ + ("openpype:schema", "openpype:container-2.0"), + ("openpype:id", AVALON_CONTAINER_ID), + ("openpype:name", name), + ("openpype:namespace", namespace), + ("openpype:loader", loader), + ("openpype:representation", context["representation"]["_id"]), + ]: + prim.SetCustomDataByKey(key, str(value)) diff --git a/openpype/hosts/maya/plugins/create/create_maya_usd.py b/openpype/hosts/maya/plugins/create/create_maya_usd.py index cc9a14bd3a5..02f707813c3 100644 --- a/openpype/hosts/maya/plugins/create/create_maya_usd.py +++ b/openpype/hosts/maya/plugins/create/create_maya_usd.py @@ -19,6 +19,11 @@ class CreateMayaUsd(plugin.MayaCreator): cache = {} + # TODO: Remove these default variants - this is just to trivialize + # the usage of the bootstrapping that was once built for Houdini + # that bootstrapped usdModel and usdShade as usdAsset + default_variants = ["Model", "Shade"] + def get_publish_families(self): return ["usd", "mayaUsd"] @@ -100,3 +105,24 @@ def get_instance_attr_defs(self): ]) return defs + + +class CreateMayaUsdContribution(CreateMayaUsd): + + + identifier = "io.openpype.creators.maya.mayausd.contribution" + label = "Maya USD Contribution" + family = "usd.layered" + icon = "cubes" + description = "Create Maya USD Contribution" + + def get_instance_attr_defs(self): + + import os + defs = super(CreateMayaUsdContribution, self).get_instance_attr_defs() + defs.insert(0, TextDef( + "sublayer", + label="Sublayer", + default=os.environ["AVALON_TASK"] + )) + return defs diff --git a/openpype/hosts/maya/plugins/inventory/select_containers.py b/openpype/hosts/maya/plugins/inventory/select_containers.py index f85bf17ab09..a443b99cb8f 100644 --- a/openpype/hosts/maya/plugins/inventory/select_containers.py +++ b/openpype/hosts/maya/plugins/inventory/select_containers.py @@ -17,6 +17,7 @@ def process(self, containers): all_members = [] for container in containers: members = get_container_members(container) + print(members) all_members.extend(members) cmds.select(all_members, replace=True, noExpand=True) @@ -31,7 +32,8 @@ class HighlightBySceneSelection(InventoryAction): def process(self, containers): - selection = set(cmds.ls(selection=True, long=True, objectsOnly=True)) + selection = set(cmds.ls(selection=True, long=True, objectsOnly=True, + ufeObjects=True)) host = registered_host() to_select = [] diff --git a/openpype/hosts/maya/plugins/load/load_into_maya_usd.py b/openpype/hosts/maya/plugins/load/load_into_maya_usd.py new file mode 100644 index 00000000000..9597f5d81f8 --- /dev/null +++ b/openpype/hosts/maya/plugins/load/load_into_maya_usd.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- +import uuid + +from openpype.pipeline import ( + load, + get_representation_path, +) +from openpype.pipeline.load import get_representation_path_from_context +from openpype.hosts.maya.api.usdlib import ( + containerise_prim, + iter_ufe_usd_selection +) + +from maya import cmds +import mayaUsd + + +class MayaUsdProxyReferenceUsd(load.LoaderPlugin): + """Add a USD Reference into mayaUsdProxyShape + + TODO: It'd be much easier if this loader would be capable of returning the + available containers in the scene based on the AYON URLs inside a USD + stage. That way we could potentially avoid the need the custom + identifier, stay closer to USD native data and rely solely on the + AYON:asset=blue,subset=modelMain,version=1 url + + """ + + families = ["model", "usd", "pointcache", "animation"] + representations = ["usd", "usda", "usdc", "usdz", "abc"] + + label = "USD Add Reference" + order = -999 + icon = "code-fork" + color = "orange" + + identifier_key = "openpype_identifier" + + def load(self, context, name=None, namespace=None, options=None): + + from pxr import Sdf + + selection = list(iter_ufe_usd_selection()) + if not selection: + # Create a maya USD proxy with /root prim and add the reference + import mayaUsd_createStageWithNewLayer + from pxr import UsdGeom + + # Make sure we can load the plugin + cmds.loadPlugin("mayaUsdPlugin", quiet=True) + + shape = mayaUsd_createStageWithNewLayer.createStageWithNewLayer() + stage = mayaUsd.ufe.getStage('|world' + shape) + prim_path = "/root" + UsdGeom.Xform.Define(stage, prim_path) + root_layer = stage.GetRootLayer() + root_layer.defaultPrim = prim_path + prim = stage.GetPrimAtPath(prim_path) + else: + assert len(selection) == 1, "Select only one PRIM please" + ufe_path = selection[0] + prim = mayaUsd.ufe.ufePathToPrim(ufe_path) + + if not prim: + raise RuntimeError("Invalid primitive") + + # Define reference using Sdf.Reference so we can directly set custom + # data for it + path = get_representation_path_from_context(context) + + references = prim.GetReferences() + + # Add unique containerised data to the reference + identifier = str(prim.GetPath()) + ":" + str(uuid.uuid4()) + identifier_data = {self.identifier_key: identifier} + reference = Sdf.Reference(assetPath=path, + customData=identifier_data) + + success = references.AddReference(reference) + if not success: + raise RuntimeError("Failed to add reference") + + # TODO: We should actually just use the data on the `Sdf.Reference` + # instead of on the USDPrim + container = containerise_prim( + prim, + name=name, + namespace=namespace or "", + context=context, + loader=self.__class__.__name__ + ) + + return container + + def update(self, container, representation): + # type: (dict, dict) -> None + """Update container with specified representation.""" + + from pxr import Sdf + + prim = container["prim"] + path = get_representation_path(representation) + for references, index in self._get_prim_references(prim): + reference = references[index] + new_reference = Sdf.Reference( + assetPath=path, + customData=reference.customData, + layerOffset=reference.layerOffset, + primPath=reference.primPath + ) + references[index] = new_reference + + # Update representation id + # TODO: Do this in prim spec where we update reference path? + prim.SetCustomDataByKey( + "openpype:representation", str(representation["_id"]) + ) + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + # type: (dict) -> None + """Remove loaded container.""" + prim = container["prim"] + + # Pop the references from the prepended items list + related_references = reversed(list(self._get_prim_references(prim))) + for references, index in related_references: + references.remove(references[index]) + + prim.ClearCustomDataByKey("openpype") + + def _get_prim_references(self, prim): + + # Get a list of all prepended references + for prim_spec in prim.GetPrimStack(): + if not prim_spec: + continue + + if not prim_spec.hasReferences: + continue + + prepended_items = prim_spec.referenceList.prependedItems + for index, reference in enumerate(prepended_items): + # Override the matching reference identifier + # TODO: Make sure we only return the correct reference + yield prepended_items, index diff --git a/openpype/hosts/maya/plugins/load/load_reference_into_maya_usd.py b/openpype/hosts/maya/plugins/load/load_reference_into_maya_usd.py new file mode 100644 index 00000000000..83666c09937 --- /dev/null +++ b/openpype/hosts/maya/plugins/load/load_reference_into_maya_usd.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +import contextlib + +from openpype.pipeline import ( + load, + get_representation_path, +) +from openpype.pipeline.load import get_representation_path_from_context +from openpype.hosts.maya.api.usdlib import ( + containerise_prim, + iter_ufe_usd_selection +) + +from maya import cmds +import mayaUsd + + +@contextlib.contextmanager +def no_edit_mode(prim, restore_after=True): + """Ensure MayaReference prim is not in edit mode during context""" + pulled_node = mayaUsd.lib.PrimUpdaterManager.readPullInformation(prim) + ufe_path = None + try: + # remove edit state if pulled + if pulled_node: + import mayaUsdUtils + assert mayaUsdUtils.isPulledMayaReference(pulled_node) + cmds.mayaUsdDiscardEdits(pulled_node) + + # Discarding the edits directly selects the prim + # so we can get the UFE path from selection + ufe_path = cmds.ls(selection=True, ufeObjects=True, long=True)[0] + + yield prim, ufe_path, pulled_node + finally: + if restore_after and pulled_node and ufe_path: + cmds.mayaUsdEditAsMaya(ufe_path) + + +class MayaUsdProxyAddMayaReferenceLoader(load.LoaderPlugin): + """Read USD data in a Maya USD Proxy + + TODO: It'd be much easier if this loader would be capable of returning the + available containers in the scene based on the AYON URLs inside a USD + stage. That way we could potentially avoid the need for custom metadata + keys, stay closer to USD native data and rely solely on the + AYON:asset=blue,subset=modelMain,version=1 url + + """ + + families = ["*"] + representations = ["*"] + extensions = ["ma", "mb"] + + label = "USD Add Maya Reference" + order = -998 + icon = "code-fork" + color = "orange" + + identifier_key = "openpype_identifier" + + def load(self, context, name=None, namespace=None, options=None): + + selection = list(iter_ufe_usd_selection()) + assert len(selection) == 1, "Select only one PRIM please" + ufe_path = selection[0] + path = get_representation_path_from_context(context) + + import mayaUsdAddMayaReference + + namespace = "test" + prim = mayaUsdAddMayaReference.createMayaReferencePrim( + ufe_path, + path, + namespace, + # todo: add more of the arguments + # mayaReferencePrimName Nameprim_name, + # groupPrim (3-tuple, group name, type and kind) + # variantSet (2-tuple, variant set name and variant name) + ) + if not prim: + # Failed to add a reference + raise RuntimeError(f"Failed to add a reference at {ufe_path}") + + containerise_prim( + prim, + name=name, + namespace=namespace or "", + context=context, + loader=self.__class__.__name__ + ) + + return prim + + def _update_reference_path(self, prim, filepath): + """Update MayaReference prim 'mayaReference' in nearest prim spec""" + + from pxr import Sdf + + # We want to update the authored opinion in the right place, e.g. + # within a VariantSet if it's authored there. We go through the + # PrimStack to find the first prim spec that authors an opinion + # on the 'mayaReference' attribute where we have permission to + # change it. This could technically mean we're altering it in + # layers that we might not want to (e.g. a published USD file?) + stack = prim.GetPrimStack() + for prim_spec in stack: + if "mayaReference" not in prim_spec.attributes: + # prim spec defines no opinion on mayaRefernce attribute? + continue + + attr = prim_spec.attributes["mayaReference"] + if attr.permission != Sdf.PermissionPublic: + print(f"Not allowed to edit: {attr}") + continue + + if filepath != attr.default: + print( + f"Updating {attr.path} - {attr.default} -> {filepath}") + attr.default = filepath + + # Attribute is either updated or already set to + # the value in that layer + return + + # Just define in the current edit layer? + attr = prim.GetAttribute("mayaReference") + attr.Set(filepath) + + def update(self, container, representation): + # type: (dict, dict) -> None + """Update container with specified representation.""" + + prim = container["prim"] + filepath = get_representation_path(representation) + + with no_edit_mode(prim): + self._update_reference_path(prim, filepath) + + # Update representation id + # TODO: Do this in prim spec where we update reference path? + prim.SetCustomDataByKey( + "openpype:representation", str(representation["_id"]) + ) + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + # type: (dict) -> None + """Remove loaded container.""" + + from openpype.hosts.maya.api.usdlib import remove_spec + + prim = container["prim"] + with no_edit_mode(prim, restore_after=False): + for spec in prim.GetPrimStack(): + remove_spec(spec) diff --git a/openpype/hosts/maya/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/maya/plugins/publish/collect_usd_bootstrap.py new file mode 100644 index 00000000000..0b08cfeb8ac --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_usd_bootstrap.py @@ -0,0 +1,120 @@ +import pyblish.api + +from openpype.client import get_subset_by_name, get_asset_by_name +import openpype.lib.usdlib as usdlib + + +class CollectUsdBootstrap(pyblish.api.InstancePlugin): + """Collect special Asset/Shot bootstrap instances if those are needed. + + Some specific subsets are intended to be part of the default structure + of an "Asset" or "Shot" in our USD pipeline. For example, for an Asset + we layer a Model and Shade USD file over each other and expose that in + a Asset USD file, ready to use. + + On the first publish of any of the components of a Asset or Shot the + missing pieces are bootstrapped and generated in the pipeline too. This + means that on the very first publish of your model the Asset USD file + will exist too. + + """ + + order = pyblish.api.CollectorOrder + 0.35 + label = "Collect USD Bootstrap" + hosts = ["maya"] + families = ["usd", "usd.layered"] + # TODO: Implement feature, then enable + enabled = False + + def process(self, instance): + + # Detect whether the current subset is a subset in a pipeline + def get_bootstrap(instance): + instance_subset = instance.data["subset"] + for name, layers in usdlib.PIPELINE.items(): + if instance_subset in set(layers): + return name # e.g. "asset" + break + else: + return + + bootstrap = get_bootstrap(instance) + if bootstrap: + self.add_bootstrap(instance, bootstrap) + + # Check if any of the dependencies requires a bootstrap + for dependency in instance.data.get("publishDependencies", list()): + bootstrap = get_bootstrap(dependency) + if bootstrap: + self.add_bootstrap(dependency, bootstrap) + + def add_bootstrap(self, instance, bootstrap): + + self.log.debug("Add bootstrap for: %s" % bootstrap) + + project_name = instance.context.data["projectName"] + asset = get_asset_by_name(project_name, instance.data["asset"]) + assert asset, "Asset must exist: %s" % asset + + # Check which are not about to be created and don't exist yet + required = {"shot": ["usdShot"], "asset": ["usdAsset"]}.get(bootstrap) + + require_all_layers = instance.data.get("requireAllLayers", False) + if require_all_layers: + # USD files load fine in usdview and Houdini even when layered or + # referenced files do not exist. So by default we don't require + # the layers to exist. + layers = usdlib.PIPELINE.get(bootstrap) + if layers: + required += list(layers) + + if not required: + return + + for subset in required: + self.log.info("USD bootstrapping to: %s" % subset) + if self._subset_exists(project_name, instance, subset, asset): + continue + + self.log.debug( + "Creating {bootstrap} USD bootstrap: " + "{asset} > {subset}".format( + bootstrap=bootstrap, + asset=asset["name"], + subset=subset + ) + ) + + new = instance.context.create_instance(subset) + new.data["subset"] = subset + #new.data["label"] = "{0} ({1})".format(subset, asset["name"]) + new.data["family"] = "usd.bootstrap" + new.data["icon"] = "link" + new.data["comment"] = "Automated bootstrap USD file." + new.data["publishFamilies"] = ["usd"] + + # Do not allow the user to toggle this instance + new.data["optional"] = False + + # Copy some data from the instance for which we bootstrap + for key in ["asset"]: + new.data[key] = instance.data[key] + + def _subset_exists(self, project_name, instance, subset, asset): + """Return whether subset exists in current context or in database.""" + # Allow it to be created during this publish session + context = instance.context + for inst in context: + if ( + inst.data["subset"] == subset + and inst.data["asset"] == asset["name"] + ): + return True + + # Or, if they already exist in the database we can + # skip them too. + if get_subset_by_name( + project_name, subset, asset["_id"], fields=["_id"] + ): + return True + return False diff --git a/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py b/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py new file mode 100644 index 00000000000..1a053887cba --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py @@ -0,0 +1,100 @@ +import os + +import pyblish.api +from openpype.pipeline import publish + + +class ExtractBootstrapUSD(publish.Extractor): + """Extract in-memory bootstrap USD files for Assets and Shots. + + See `collect_usd_bootstrap_asset.py` for more details. + + """ + + order = pyblish.api.ExtractorOrder + 0.1 + label = "Bootstrap USD" + hosts = ["houdini", "maya"] + targets = ["local"] + families = ["usd.bootstrap"] + + def process(self, instance): + from openpype.lib import usdlib + + staging_dir = self.staging_dir(instance) + filename = "{subset}.usd".format(**instance.data) + filepath = os.path.join(staging_dir, filename) + self.log.info("Bootstrap USD '%s' to '%s'" % (filename, staging_dir)) + + subset = instance.data["subset"] + if subset == "usdAsset": + # Asset + steps = usdlib.PIPELINE["asset"] + layers = self.get_usd_master_paths(steps, instance) + usdlib.create_asset(filepath, + asset_name=instance.data["asset"], + reference_layers=layers) + + elif subset == "usdShot": + # Shot + steps = usdlib.PIPELINE["shot"] + layers = self.get_usd_master_paths(steps, instance) + usdlib.create_shot(filepath, + layers=layers) + + elif subset == "usdModel": + variant_subsets = instance.data["variantSubsets"] + usdlib.create_model(filepath, + asset=instance.data["asset"], + variant_subsets=variant_subsets) + + elif subset == "usdShade": + variant_subsets = instance.data["variantSubsets"] + usdlib.create_shade(filepath, + asset=instance.data["asset"], + variant_subsets=variant_subsets) + + elif subset in usdlib.PIPELINE["asset"]: + # Asset layer + # Generate the stub files with root primitive + # TODO: implement + #usdlib.create_stub_usd(filepath) + raise NotImplementedError("TODO") + + elif subset in usdlib.PIPELINE["shot"]: + # Shot Layer + # Generate the stub file for an Sdf Layer + # TODO: implement + #usdlib.create_stub_usd_sdf_layer(filepath) + raise NotImplementedError("TODO") + + else: + raise RuntimeError("No bootstrap method " + "available for: %s" % subset) + + representations = instance.data.setdefault("representations", []) + representations.append({ + "name": "usd", + "ext": "usd", + "files": filename, + "stagingDir": staging_dir + }) + + def get_usd_master_paths(self, subsets, instance): + + raise NotImplementedError("TODO") + # TODO: Implement the retrieval of the right paths + # TODO: preferably with AYON asset resolver these would be AYON URIs + # asset = instance.data["asset"] + # + # template = _get_project_publish_template() + # layer_paths = [] + # for layer in subsets: + # layer_path = self._get_usd_master_path( + # subset=layer, + # asset=asset, + # template=template + # ) + # layer_paths.append(layer_path) + # self.log.info("Asset references: %s" % layer_path) + # + # return layer_paths diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py index 7234f5a0258..95905115bc0 100644 --- a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py +++ b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py @@ -25,7 +25,8 @@ def get_invalid(cls, instance): def process(self, instance): # Allow renderlayer, rendersetup and workfile to be empty - skip_families = {"workfile", "renderlayer", "rendersetup"} + skip_families = {"workfile", "renderlayer", "rendersetup", + "usd.bootstrap"} if instance.data.get("family") in skip_families: return diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index c166feb3a6b..416bca35094 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -8,8 +8,16 @@ # Allow to fall back on Multiverse 6.3.0+ pxr usd library from mvpxr import Usd, UsdGeom, Sdf, Kind -from openpype.client import get_project, get_asset_by_name -from openpype.pipeline import Anatomy, get_current_project_name +from openpype.client import ( + get_asset_by_name, + get_subset_by_name, + get_representation_by_name, + get_hero_version_by_subset_id +) +from openpype.pipeline import ( + get_current_project_name, + get_representation_path +) log = logging.getLogger(__name__) @@ -29,7 +37,7 @@ def create_asset( - filepath, asset_name, reference_layers, kind=Kind.Tokens.component + filepath, asset_name, reference_layers=None, kind=Kind.Tokens.component ): """ Creates an asset file that consists of a top level layer and sublayers for @@ -50,32 +58,42 @@ def create_asset( log.info("Creating asset at %s", filepath) # Make the layer ascii - good for readability, plus the file is small - root_layer = Sdf.Layer.CreateNew(filepath, args={"format": "usda"}) - stage = Usd.Stage.Open(root_layer) - - # Define a prim for the asset and make it the default for the stage. - asset_prim = UsdGeom.Xform.Define(stage, "/%s" % asset_name).GetPrim() - stage.SetDefaultPrim(asset_prim) - - # Let viewing applications know how to orient a free camera properly - UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + layer = Sdf.Layer.CreateNew(filepath, args={"format": "usda"}) + + # Define root prim for the asset and make it the default for the stage. + prim_name = asset_name + asset_prim = Sdf.PrimSpec( + layer.pseudoRoot, + prim_name, + Sdf.SpecifierDef, + "Xform" + ) + # Define Kind # Usually we will "loft up" the kind authored into the exported geometry # layer rather than re-stamping here; we'll leave that for a later # tutorial, and just be explicit here. - model = Usd.ModelAPI(asset_prim) - if kind: - model.SetKind(kind) + asset_prim.kind = kind + + # Set asset info + asset_prim.assetInfo["name"] = asset_name + asset_prim.assetInfo["identifier"] = "%s/%s.usd" % (asset_name, asset_name) + # asset.assetInfo["version"] = asset_version - model.SetAssetName(asset_name) - model.SetAssetIdentifier("%s/%s.usd" % (asset_name, asset_name)) + # Set default prim + layer.defaultPrim = prim_name + + # Let viewing applications know how to orient a free camera properly + # Similar to: UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + layer.pseudoRoot.SetInfo(UsdGeom.Tokens.upAxis, UsdGeom.Tokens.y) # Add references to the asset prim - references = asset_prim.GetReferences() - for reference_filepath in reference_layers: - references.AddReference(reference_filepath) + if reference_layers: + asset_prim.referenceList.prependedItems[:] = [ + Sdf.Reference(assetPath=path) for path in reference_layers + ] - stage.GetRootLayer().Save() + layer.Save() def create_shot(filepath, layers, create_layers=False): @@ -83,9 +101,9 @@ def create_shot(filepath, layers, create_layers=False): Args: filepath (str): Filepath where the asset.usd file will be saved. - layers (str): When provided this will be added verbatim in the + layers (list): When provided this will be added verbatim in the subLayerPaths layers. When the provided layer paths do not exist - they are generated using Sdf.Layer.CreateNew + they are generated using Sdf.Layer.CreateNew create_layers (bool): Whether to create the stub layers on disk if they do not exist yet. @@ -94,8 +112,7 @@ def create_shot(filepath, layers, create_layers=False): """ # Also see create_shot.py in PixarAnimationStudios/USD endToEnd example - - stage = Usd.Stage.CreateNew(filepath) + root_layer = Sdf.Layer.CreateNew(filepath) log.info("Creating shot at %s" % filepath) for layer_path in layers: @@ -109,11 +126,12 @@ def create_shot(filepath, layers, create_layers=False): Sdf.Layer.CreateNew(layer_path) - stage.GetRootLayer().subLayerPaths.append(layer_path) + root_layer.subLayerPaths.append(layer_path) - # Lets viewing applications know how to orient a free camera properly - UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) - stage.GetRootLayer().Save() + # Let viewing applications know how to orient a free camera properly + # Similar to: UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + root_layer.pseudoRoot.SetInfo(UsdGeom.Tokens.upAxis, UsdGeom.Tokens.y) + root_layer.Save() return filepath @@ -315,41 +333,30 @@ def get_usd_master_path(asset, subset, representation): """ project_name = get_current_project_name() - anatomy = Anatomy(project_name) - project_doc = get_project( - project_name, - fields=["name", "data.code"] - ) if isinstance(asset, dict) and "name" in asset: # Allow explicitly passing asset document asset_doc = asset else: - asset_doc = get_asset_by_name(project_name, asset, fields=["name"]) - - template_obj = anatomy.templates_obj["publish"]["path"] - path = template_obj.format_strict( - { - "project": { - "name": project_name, - "code": project_doc.get("data", {}).get("code") - }, - "folder": { - "name": asset_doc["name"], - }, - "asset": asset_doc["name"], - "subset": subset, - "representation": representation, - "version": 0, # stub version zero - } - ) + asset_doc = get_asset_by_name(project_name, asset, fields=["_id"]) - # Remove the version folder - subset_folder = os.path.dirname(os.path.dirname(path)) - master_folder = os.path.join(subset_folder, "master") - fname = "{0}.{1}".format(subset, representation) - - return os.path.join(master_folder, fname).replace("\\", "/") + if isinstance(subset, dict) and "name" in subset: + # Allow explicitly passing subset document + subset_doc = subset + else: + subset_doc = get_subset_by_name(project_name, + subset, + asset_id=asset_doc["_id"], + fields=["_id"]) + + version = get_hero_version_by_subset_id(project_name, + subset_id=subset_doc["_id"]) + representation = get_representation_by_name(project_name, + representation, + version_id=version["_id"]) + + path = get_representation_path(representation) + return path.replace("\\", "/") def parse_avalon_uri(uri): From 8bac8f3bcd4bd576cd61a68ba8092eaaaebe132e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 3 Nov 2023 00:33:54 +0100 Subject: [PATCH 02/56] Houdini USD functionality draft cleanup/refactor --- openpype/hosts/houdini/api/pipeline.py | 56 --- openpype/hosts/houdini/api/usd.py | 146 ++++---- .../publish/collect_instances_usd_layered.py | 10 +- .../plugins/publish/extract_usd_layered.py | 230 ++++++------ .../outputprocessors/ayon_uri_processor.py | 134 +++++++ .../vendor/husdoutputprocessors/__init__.py | 1 - .../avalon_uri_processor.py | 152 -------- .../stagingdir_processor.py | 90 ----- openpype/lib/usdlib.py | 345 ++++++++++++++---- 9 files changed, 606 insertions(+), 558 deletions(-) create mode 100644 openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py delete mode 100644 openpype/hosts/houdini/vendor/husdoutputprocessors/__init__.py delete mode 100644 openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py delete mode 100644 openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index f8db45c56bd..11135e20b2c 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -3,7 +3,6 @@ import os import sys import logging -import contextlib import hou # noqa @@ -66,10 +65,6 @@ def install(self): register_event_callback("open", on_open) register_event_callback("new", on_new) - pyblish.api.register_callback( - "instanceToggled", on_pyblish_instance_toggled - ) - self._has_been_setup = True # add houdini vendor packages hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") @@ -406,54 +401,3 @@ def _set_context_settings(): lib.reset_framerange() lib.update_houdini_vars_context() - - -def on_pyblish_instance_toggled(instance, new_value, old_value): - """Toggle saver tool passthrough states on instance toggles.""" - @contextlib.contextmanager - def main_take(no_update=True): - """Enter root take during context""" - original_take = hou.takes.currentTake() - original_update_mode = hou.updateModeSetting() - root = hou.takes.rootTake() - has_changed = False - try: - if original_take != root: - has_changed = True - if no_update: - hou.setUpdateMode(hou.updateMode.Manual) - hou.takes.setCurrentTake(root) - yield - finally: - if has_changed: - if no_update: - hou.setUpdateMode(original_update_mode) - hou.takes.setCurrentTake(original_take) - - if not instance.data.get("_allowToggleBypass", True): - return - - nodes = instance[:] - if not nodes: - return - - # Assume instance node is first node - instance_node = nodes[0] - - if not hasattr(instance_node, "isBypassed"): - # Likely not a node that can actually be bypassed - log.debug("Can't bypass node: %s", instance_node.path()) - return - - if instance_node.isBypassed() != (not old_value): - print("%s old bypass state didn't match old instance state, " - "updating anyway.." % instance_node.path()) - - try: - # Go into the main take, because when in another take changing - # the bypass state of a note cannot be done due to it being locked - # by default. - with main_take(no_update=True): - instance_node.bypass(not new_value) - except hou.PermissionError as exc: - log.warning("%s - %s", instance_node.path(), exc) diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py index b935dfdf309..1d4415ae94a 100644 --- a/openpype/hosts/houdini/api/usd.py +++ b/openpype/hosts/houdini/api/usd.py @@ -3,92 +3,13 @@ import contextlib import logging -from qtpy import QtWidgets, QtCore, QtGui - -from openpype import style -from openpype.client import get_asset_by_name -from openpype.pipeline import legacy_io -from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget - -from pxr import Sdf +import hou +from pxr import Sdf, Vt log = logging.getLogger(__name__) -class SelectAssetDialog(QtWidgets.QWidget): - """Frameless assets dialog to select asset with double click. - - Args: - parm: Parameter where selected asset name is set. - """ - - def __init__(self, parm): - self.setWindowTitle("Pick Asset") - self.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup) - - assets_widget = SingleSelectAssetsWidget(legacy_io, parent=self) - - layout = QtWidgets.QHBoxLayout(self) - layout.addWidget(assets_widget) - - assets_widget.double_clicked.connect(self._set_parameter) - self._assets_widget = assets_widget - self._parm = parm - - def _set_parameter(self): - name = self._assets_widget.get_selected_asset_name() - self._parm.set(name) - self.close() - - def _on_show(self): - pos = QtGui.QCursor.pos() - # Select the current asset if there is any - select_id = None - name = self._parm.eval() - if name: - project_name = legacy_io.active_project() - db_asset = get_asset_by_name(project_name, name, fields=["_id"]) - if db_asset: - select_id = db_asset["_id"] - - # Set stylesheet - self.setStyleSheet(style.load_stylesheet()) - # Refresh assets (is threaded) - self._assets_widget.refresh() - # Select asset - must be done after refresh - if select_id is not None: - self._assets_widget.select_asset(select_id) - - # Show cursor (top right of window) near cursor - self.resize(250, 400) - self.move(self.mapFromGlobal(pos) - QtCore.QPoint(self.width(), 0)) - - def showEvent(self, event): - super(SelectAssetDialog, self).showEvent(event) - self._on_show() - - -def pick_asset(node): - """Show a user interface to select an Asset in the project - - When double clicking an asset it will set the Asset value in the - 'asset' parameter. - - """ - - parm = node.parm("asset_name") - if not parm: - log.error("Node has no 'asset' parameter: %s", node) - return - - # Construct a frameless popup so it automatically - # closes when clicked outside of it. - global tool - tool = SelectAssetDialog(parm) - tool.show() - - def add_usd_output_processor(ropnode, processor): """Add USD Output Processor to USD Rop node. @@ -199,11 +120,13 @@ def get_usd_rop_loppath(node): return node.parm("loppath").evalAsNode() -def get_layer_save_path(layer): +def get_layer_save_path(layer, expand_string=True): """Get custom HoudiniLayerInfo->HoudiniSavePath from SdfLayer. Args: layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from. + expand_string (bool): Whether to expand any houdini vars in the save + path before computing the absolute path. Returns: str or None: Path to save to when data exists. @@ -216,6 +139,8 @@ def get_layer_save_path(layer): save_path = hou_layer_info.customData.get("HoudiniSavePath", None) if save_path: # Unfortunately this doesn't actually resolve the full absolute path + if expand_string: + save_path = hou.text.expandString(save_path) return layer.ComputeAbsolutePath(save_path) @@ -262,6 +187,7 @@ def iter_layer_recursive(layer): def get_configured_save_layers(usd_rop): + """Retrieve the layer save paths from a USD ROP.""" lop_node = get_usd_rop_loppath(usd_rop) stage = lop_node.stage(apply_viewport_overrides=False) @@ -279,3 +205,59 @@ def get_configured_save_layers(usd_rop): save_layers.append(layer) return save_layers + + +def setup_lop_python_layer(layer, node, savepath=None, + apply_file_format_args=True): + """Set up Sdf.Layer with HoudiniLayerInfo prim for metadata. + + This is the same as `loputils.createPythonLayer` but can be run on top + of `pxr.Sdf.Layer` instances that are already created in a Python LOP node. + That's useful if your layer creation itself is built to be DCC agnostic, + then we just need to run this after per layer to make it explicitly + stored for houdini. + + By default, Houdini doesn't apply the FileFormatArguments supplied to + the created layer; however it does support USD's file save suffix + of `:SDF_FORMAT_ARGS:` to supply them. With `apply_file_format_args` any + file format args set on the layer's creation will be added to the + save path through that. + + Note: The `node.addHeldLayer` call will only work from a LOP python node + whenever `node.editableStage()` or `node.editableLayer()` was called. + + Arguments: + layer (Sdf.Layer): An existing layer (most likely just created + in the current runtime) + node (hou.LopNode): The Python LOP node to attach the layer to so + it does not get garbage collected/mangled after the downstream. + savepath (Optional[str]): When provided the HoudiniSaveControl + will be set to Explicit with HoudiniSavePath to this path. + apply_file_format_args (Optional[bool]): When enabled any + FileFormatArgs defined for the layer on creation will be set + in the HoudiniSavePath so Houdini USD ROP will use them top. + + Returns: + Sdf.PrimSpec: The Created HoudiniLayerInfo prim spec. + + """ + # Add a Houdini Layer Info prim where we can put the save path. + p = Sdf.CreatePrimInLayer(layer, '/HoudiniLayerInfo') + p.specifier = Sdf.SpecifierDef + p.typeName = 'HoudiniLayerInfo' + if savepath: + if apply_file_format_args: + args = layer.GetFileFormatArguments() + if args: + args = ":".join("{}={}".format(key, value) + for key, value in args.items()) + savepath = "{}:SDF_FORMAT_ARGS:{}".format(savepath, args) + + p.customData['HoudiniSavePath'] = savepath + p.customData['HoudiniSaveControl'] = 'Explicit' + # Let everyone know what node created this layer. + p.customData['HoudiniCreatorNode'] = node.sessionId() + p.customData['HoudiniEditorNodes'] = Vt.IntArray([node.sessionId()]) + node.addHeldLayer(layer.identifier) + + return p diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py b/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py index 0600730d005..80af830a8be 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py @@ -122,10 +122,6 @@ def process_node(self, node, context): instance.data.update(save_data) instance.data["usdLayer"] = layer - # Don't allow the Pyblish `instanceToggled` we have installed - # to set this node to bypass. - instance.data["_allowToggleBypass"] = False - instances.append(instance) # Store the collected ROP node dependencies @@ -136,14 +132,14 @@ def process_node(self, node, context): def get_save_data(self, save_path): # Resolve Avalon URI - uri_data = usdlib.parse_avalon_uri(save_path) + uri_data = usdlib.parse_ayon_uri(save_path) if not uri_data: self.log.warning("Non Avalon URI Layer Path: %s" % save_path) return {} # Collect asset + subset from URI - name = "{subset} ({asset})".format(**uri_data) - fname = "{asset}_{subset}.{ext}".format(**uri_data) + name = "{product} ({asset})".format(**uri_data) + fname = "{asset}_{subset}.usd".format(**uri_data) data = dict(uri_data) data["usdSavePath"] = save_path diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index d6193f13c17..c9785120064 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -19,11 +19,15 @@ import openpype.hosts.houdini.api.usd as hou_usdlib from openpype.hosts.houdini.api.lib import render_rop +try: + # Py 3.3+ + from contextlib import ExitStack +except ImportError: + # Implement for backwards compatibility + class ExitStack(object): + """Context manager for dynamic management of a stack of exit callbacks. -class ExitStack(object): - """Context manager for dynamic management of a stack of exit callbacks. - - For example: + For example: with ExitStack() as stack: files = [stack.enter_context(open(fname)) for fname in filenames] @@ -31,114 +35,114 @@ class ExitStack(object): # the with statement, even if attempts to open files later # in the list raise an exception - """ - - def __init__(self): - self._exit_callbacks = deque() - - def pop_all(self): - """Preserve the context stack by transferring it to a new instance""" - new_stack = type(self)() - new_stack._exit_callbacks = self._exit_callbacks - self._exit_callbacks = deque() - return new_stack - - def _push_cm_exit(self, cm, cm_exit): - """Helper to correctly register callbacks to __exit__ methods""" - - def _exit_wrapper(*exc_details): - return cm_exit(cm, *exc_details) + """ - _exit_wrapper.__self__ = cm - self.push(_exit_wrapper) + def __init__(self): + self._exit_callbacks = deque() - def push(self, exit): - """Registers a callback with the standard __exit__ method signature. + def pop_all(self): + """Preserve context stack by transferring it to a new instance""" + new_stack = type(self)() + new_stack._exit_callbacks = self._exit_callbacks + self._exit_callbacks = deque() + return new_stack - Can suppress exceptions the same way __exit__ methods can. + def _push_cm_exit(self, cm, cm_exit): + """Helper to correctly register callbacks to __exit__ methods""" - Also accepts any object with an __exit__ method (registering a call - to the method instead of the object itself) + def _exit_wrapper(*exc_details): + return cm_exit(cm, *exc_details) - """ - # We use an unbound method rather than a bound method to follow - # the standard lookup behaviour for special methods - _cb_type = type(exit) - try: - exit_method = _cb_type.__exit__ - except AttributeError: - # Not a context manager, so assume its a callable - self._exit_callbacks.append(exit) - else: - self._push_cm_exit(exit, exit_method) - return exit # Allow use as a decorator - - def callback(self, callback, *args, **kwds): - """Registers an arbitrary callback and arguments. - - Cannot suppress exceptions. - """ + _exit_wrapper.__self__ = cm + self.push(_exit_wrapper) - def _exit_wrapper(exc_type, exc, tb): - callback(*args, **kwds) + def push(self, exit): + """Registers a callback with the standard __exit__ signature. - # We changed the signature, so using @wraps is not appropriate, but - # setting __wrapped__ may still help with introspection - _exit_wrapper.__wrapped__ = callback - self.push(_exit_wrapper) - return callback # Allow use as a decorator + Can suppress exceptions the same way __exit__ methods can. - def enter_context(self, cm): - """Enters the supplied context manager + Also accepts any object with an __exit__ method (registering a call + to the method instead of the object itself) - If successful, also pushes its __exit__ method as a callback and - returns the result of the __enter__ method. - """ - # We look up the special methods on the type to match the with - # statement - _cm_type = type(cm) - _exit = _cm_type.__exit__ - result = _cm_type.__enter__(cm) - self._push_cm_exit(cm, _exit) - return result - - def close(self): - """Immediately unwind the context stack""" - self.__exit__(None, None, None) - - def __enter__(self): - return self - - def __exit__(self, *exc_details): - # We manipulate the exception state so it behaves as though - # we were actually nesting multiple with statements - frame_exc = sys.exc_info()[1] - - def _fix_exception_context(new_exc, old_exc): - while 1: - exc_context = new_exc.__context__ - if exc_context in (None, frame_exc): - break - new_exc = exc_context - new_exc.__context__ = old_exc - - # Callbacks are invoked in LIFO order to match the behaviour of - # nested context managers - suppressed_exc = False - while self._exit_callbacks: - cb = self._exit_callbacks.pop() + """ + # We use an unbound method rather than a bound method to follow + # the standard lookup behaviour for special methods + _cb_type = type(exit) try: - if cb(*exc_details): - suppressed_exc = True - exc_details = (None, None, None) - except Exception: - new_exc_details = sys.exc_info() - # simulate the stack of exceptions by setting the context - _fix_exception_context(new_exc_details[1], exc_details[1]) - if not self._exit_callbacks: - raise - exc_details = new_exc_details - return suppressed_exc + exit_method = _cb_type.__exit__ + except AttributeError: + # Not a context manager, so assume its a callable + self._exit_callbacks.append(exit) + else: + self._push_cm_exit(exit, exit_method) + return exit # Allow use as a decorator + + def callback(self, callback, *args, **kwds): + """Registers an arbitrary callback and arguments. + + Cannot suppress exceptions. + """ + + def _exit_wrapper(exc_type, exc, tb): + callback(*args, **kwds) + + # We changed the signature, so using @wraps is not appropriate, but + # setting __wrapped__ may still help with introspection + _exit_wrapper.__wrapped__ = callback + self.push(_exit_wrapper) + return callback # Allow use as a decorator + + def enter_context(self, cm): + """Enters the supplied context manager + + If successful, also pushes its __exit__ method as a callback and + returns the result of the __enter__ method. + """ + # We look up the special methods on the type to match the with + # statement + _cm_type = type(cm) + _exit = _cm_type.__exit__ + result = _cm_type.__enter__(cm) + self._push_cm_exit(cm, _exit) + return result + + def close(self): + """Immediately unwind the context stack""" + self.__exit__(None, None, None) + + def __enter__(self): + return self + + def __exit__(self, *exc_details): + # We manipulate the exception state so it behaves as though + # we were actually nesting multiple with statements + frame_exc = sys.exc_info()[1] + + def _fix_exception_context(new_exc, old_exc): + while 1: + exc_context = new_exc.__context__ + if exc_context in (None, frame_exc): + break + new_exc = exc_context + new_exc.__context__ = old_exc + + # Callbacks are invoked in LIFO order to match the behaviour of + # nested context managers + suppressed_exc = False + while self._exit_callbacks: + cb = self._exit_callbacks.pop() + try: + if cb(*exc_details): + suppressed_exc = True + exc_details = (None, None, None) + except Exception: + new_exc_details = sys.exc_info() + # simulate the stack of exceptions by setting the context + _fix_exception_context(new_exc_details[1], exc_details[1]) + if not self._exit_callbacks: + raise + exc_details = new_exc_details + return suppressed_exc @contextlib.contextmanager @@ -168,7 +172,7 @@ class ExtractUSDLayered(publish.Extractor): # Force Output Processors so it will always save any file # into our unique staging directory with processed Avalon paths - output_processors = ["avalon_uri_processor", "stagingdir_processor"] + output_processors = ["ayon_uri_processor", "savepathsrelativetooutput"] def process(self, instance): @@ -208,10 +212,10 @@ def process(self, instance): rop_overrides = { # This sets staging directory on the processor to force our # output files to end up in the Staging Directory. - "stagingdiroutputprocessor_stagingDir": staging_dir, + "savepathsrelativetooutput_rootdir": staging_dir, # Force the Avalon URI Output Processor to refactor paths for # references, payloads and layers to published paths. - "avalonurioutputprocessor_use_publish_paths": True, + "ayonurioutputprocessor_use_publish_paths": True, # Only write out specific USD files based on our outputs "savepattern": save_pattern, } @@ -226,7 +230,7 @@ def process(self, instance): ) stack.enter_context(manager) - # Some of these must be added after we enter the output + # These must be added after we enter the output # processor context manager because those parameters only # exist when the Output Processor is added to the ROP node. for name, value in rop_overrides.items(): @@ -261,11 +265,11 @@ def process(self, instance): # Deactivate this dependency self.log.debug( "Dependency matches previous publish version," - " deactivating %s for publish" % dependency + " deactivating %s for publish", dependency ) dependency.data["publish"] = False else: - self.log.debug("Extracted dependency: %s" % dependency) + self.log.debug("Extracted dependency: %s", dependency) # This dependency should be published dependency.data["files"] = [dependency_fname] dependency.data["stagingDir"] = staging_dir @@ -277,6 +281,12 @@ def process(self, instance): instance.data["files"].append(fname) def _compare_with_latest_publish(self, project_name, dependency, new_file): + """Compare whether last published version matches the current new file. + + Returns: + bool: Whether it's a match or not. + + """ import filecmp _, ext = os.path.splitext(new_file) @@ -306,7 +316,9 @@ def _compare_with_latest_publish(self, project_name, dependency, new_file): return False representation = get_representation_by_name( - project_name, ext.lstrip("."), version["_id"] + project_name, + representation_name=ext.lstrip("."), + version_id=version["_id"] ) if not representation: self.log.debug("No existing representation..") diff --git a/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py b/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py new file mode 100644 index 00000000000..a0822d73d54 --- /dev/null +++ b/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py @@ -0,0 +1,134 @@ +import logging +import os + +from husd.outputprocessor import OutputProcessor + +from openpype.lib import usdlib + + +class AyonURIOutputProcessor(OutputProcessor): + """Process Ayon URIs into their full path equivalents.""" + + def __init__(self): + """ There is only one object of each output processor class that is + ever created in a Houdini session. Therefore be very careful + about what data gets put in this object. + """ + self._save_cache = dict() + self._ref_cache = dict() + self._publish_context = None + self.log = logging.getLogger(__name__) + @staticmethod + def name(): + return "ayon_uri_processor" + + @staticmethod + def displayName(): + return "Avalon URI Output Processor" + + def processReferencePath(self, + asset_path, + referencing_layer_path, + asset_is_layer): + """ + Args: + asset_path (str): The path to the asset, as specified in Houdini. + If this asset is being written to disk, this will be the final + output of the `processSavePath()` calls on all output + processors. + referencing_layer_path (str): The absolute file path of the file + containing the reference to the asset. You can use this to make + the path pointer relative. + asset_is_layer (bool): A boolean value indicating whether this + asset is a USD layer file. If this is `False`, the asset is + something else (for example, a texture or volume file). + + Returns: + The refactored reference path. + + """ + + cache = self._ref_cache + + # Retrieve from cache if this query occurred before (optimization) + if asset_path in cache: + return cache[asset_path] + + uri_data = usdlib.parse_ayon_uri(asset_path) + if not uri_data: + cache[asset_path] = asset_path + return asset_path + + # Try and find it as an existing publish + query = { + "project_name": uri_data["project"], + "asset_name": uri_data["asset"], + "subset_name": uri_data["product"], + "version_name": uri_data["version"], + "representation_name": uri_data["representation"], + } + path = usdlib.get_representation_path_by_names( + **query + ) + if path: + self.log.debug( + "Ayon URI Resolver - ref: %s -> %s", asset_path, path + ) + cache[asset_path] = path + return path + + elif self._publish_context: + # Query doesn't resolve to an existing version - likely + # points to a version defined in the current publish session + # as such we should resolve it using the current publish + # context if that was set prior to this publish + raise NotImplementedError("TODO") + + self.log.warning(f"Unable to resolve AYON URI: {asset_path}") + cache[asset_path] = asset_path + return asset_path + + def processSavePath(self, + asset_path, + referencing_layer_path, + asset_is_layer): + """ + Args: + asset_path (str): The path to the asset, as specified in Houdini. + If this asset is being written to disk, this will be the final + output of the `processSavePath()` calls on all output + processors. + referencing_layer_path (str): The absolute file path of the file + containing the reference to the asset. You can use this to make + the path pointer relative. + asset_is_layer (bool): A boolean value indicating whether this + asset is a USD layer file. If this is `False`, the asset is + something else (for example, a texture or volume file). + + Returns: + The refactored save path. + + """ + cache = self._save_cache + + # Retrieve from cache if this query occurred before (optimization) + if asset_path in cache: + return cache[asset_path] + + uri_data = usdlib.parse_ayon_uri(asset_path) + if not uri_data: + cache[asset_path] = asset_path + return asset_path + + relative_template = "{asset}_{product}_{version}_{representation}.usd" + # Set save output path to a relative path so other + # processors can potentially manage it easily? + path = relative_template.format(**uri_data) + + self.log.debug("Ayon URI Resolver - save: %s -> %s", asset_path, path) + cache[asset_path] = path + return path + + +def usdOutputProcessor(): + return AyonURIOutputProcessor diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/__init__.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/__init__.py deleted file mode 100644 index 69e3be50dac..00000000000 --- a/openpype/hosts/houdini/vendor/husdoutputprocessors/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py deleted file mode 100644 index 310d057a113..00000000000 --- a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py +++ /dev/null @@ -1,152 +0,0 @@ -import os -import hou -import husdoutputprocessors.base as base - -import colorbleed.usdlib as usdlib - -from openpype.client import get_asset_by_name -from openpype.pipeline import Anatomy, get_current_project_name - - -class AvalonURIOutputProcessor(base.OutputProcessorBase): - """Process Avalon URIs into their full path equivalents. - - """ - - _parameters = None - _param_prefix = 'avalonurioutputprocessor_' - _parms = { - "use_publish_paths": _param_prefix + "use_publish_paths" - } - - def __init__(self): - """ There is only one object of each output processor class that is - ever created in a Houdini session. Therefore be very careful - about what data gets put in this object. - """ - self._use_publish_paths = False - self._cache = dict() - - def displayName(self): - return 'Avalon URI Output Processor' - - def parameters(self): - - if not self._parameters: - parameters = hou.ParmTemplateGroup() - use_publish_path = hou.ToggleParmTemplate( - name=self._parms["use_publish_paths"], - label='Resolve Reference paths to publish paths', - default_value=False, - help=("When enabled any paths for Layers, References or " - "Payloads are resolved to published master versions.\n" - "This is usually only used by the publishing pipeline, " - "but can be used for testing too.")) - parameters.append(use_publish_path) - self._parameters = parameters.asDialogScript() - - return self._parameters - - def beginSave(self, config_node, t): - parm = self._parms["use_publish_paths"] - self._use_publish_paths = config_node.parm(parm).evalAtTime(t) - self._cache.clear() - - def endSave(self): - self._use_publish_paths = None - self._cache.clear() - - def processAsset(self, - asset_path, - asset_path_for_save, - referencing_layer_path, - asset_is_layer, - for_save): - """ - Args: - asset_path (str): The incoming file path you want to alter or not. - asset_path_for_save (bool): Whether the current path is a - referenced path in the USD file. When True, return the path - you want inside USD file. - referencing_layer_path (str): ??? - asset_is_layer (bool): Whether this asset is a USD layer file. - If this is False, the asset is something else (for example, - a texture or volume file). - for_save (bool): Whether the asset path is for a file to be saved - out. If so, then return actual written filepath. - - Returns: - The refactored asset path. - - """ - - # Retrieve from cache if this query occurred before (optimization) - cache_key = (asset_path, asset_path_for_save, asset_is_layer, for_save) - if cache_key in self._cache: - return self._cache[cache_key] - - relative_template = "{asset}_{subset}.{ext}" - uri_data = usdlib.parse_avalon_uri(asset_path) - if uri_data: - - if for_save: - # Set save output path to a relative path so other - # processors can potentially manage it easily? - path = relative_template.format(**uri_data) - - print("Avalon URI Resolver: %s -> %s" % (asset_path, path)) - self._cache[cache_key] = path - return path - - if self._use_publish_paths: - # Resolve to an Avalon published asset for embedded paths - path = self._get_usd_master_path(**uri_data) - else: - path = relative_template.format(**uri_data) - - print("Avalon URI Resolver: %s -> %s" % (asset_path, path)) - self._cache[cache_key] = path - return path - - self._cache[cache_key] = asset_path - return asset_path - - def _get_usd_master_path(self, - asset, - subset, - ext): - """Get the filepath for a .usd file of a subset. - - This will return the path to an unversioned master file generated by - `usd_master_file.py`. - - """ - - PROJECT = get_current_project_name() - anatomy = Anatomy(PROJECT) - asset_doc = get_asset_by_name(PROJECT, asset) - if not asset_doc: - raise RuntimeError("Invalid asset name: '%s'" % asset) - - template_obj = anatomy.templates_obj["publish"]["path"] - path = template_obj.format_strict({ - "project": PROJECT, - "asset": asset_doc["name"], - "subset": subset, - "representation": ext, - "version": 0 # stub version zero - }) - - # Remove the version folder - subset_folder = os.path.dirname(os.path.dirname(path)) - master_folder = os.path.join(subset_folder, "master") - fname = "{0}.{1}".format(subset, ext) - - return os.path.join(master_folder, fname).replace("\\", "/") - - -output_processor = AvalonURIOutputProcessor() - - -def usdOutputProcessor(): - return output_processor diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py deleted file mode 100644 index d8e36d5aa81..00000000000 --- a/openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py +++ /dev/null @@ -1,90 +0,0 @@ -import hou -import husdoutputprocessors.base as base -import os - - -class StagingDirOutputProcessor(base.OutputProcessorBase): - """Output all USD Rop file nodes into the Staging Directory - - Ignore any folders and paths set in the Configured Layers - and USD Rop node, just take the filename and save into a - single directory. - - """ - theParameters = None - parameter_prefix = "stagingdiroutputprocessor_" - stagingdir_parm_name = parameter_prefix + "stagingDir" - - def __init__(self): - self.staging_dir = None - - def displayName(self): - return 'StagingDir Output Processor' - - def parameters(self): - if not self.theParameters: - parameters = hou.ParmTemplateGroup() - rootdirparm = hou.StringParmTemplate( - self.stagingdir_parm_name, - 'Staging Directory', 1, - string_type=hou.stringParmType.FileReference, - file_type=hou.fileType.Directory - ) - parameters.append(rootdirparm) - self.theParameters = parameters.asDialogScript() - return self.theParameters - - def beginSave(self, config_node, t): - - # Use the Root Directory parameter if it is set. - root_dir_parm = config_node.parm(self.stagingdir_parm_name) - if root_dir_parm: - self.staging_dir = root_dir_parm.evalAtTime(t) - - if not self.staging_dir: - out_file_parm = config_node.parm('lopoutput') - if out_file_parm: - self.staging_dir = out_file_parm.evalAtTime(t) - if self.staging_dir: - (self.staging_dir, filename) = os.path.split(self.staging_dir) - - def endSave(self): - self.staging_dir = None - - def processAsset(self, asset_path, - asset_path_for_save, - referencing_layer_path, - asset_is_layer, - for_save): - """ - Args: - asset_path (str): The incoming file path you want to alter or not. - asset_path_for_save (bool): Whether the current path is a - referenced path in the USD file. When True, return the path - you want inside USD file. - referencing_layer_path (str): ??? - asset_is_layer (bool): Whether this asset is a USD layer file. - If this is False, the asset is something else (for example, - a texture or volume file). - for_save (bool): Whether the asset path is for a file to be saved - out. If so, then return actual written filepath. - - Returns: - The refactored asset path. - - """ - - # Treat save paths as being relative to the output path. - if for_save and self.staging_dir: - # Whenever we're processing a Save Path make sure to - # resolve it to the Staging Directory - filename = os.path.basename(asset_path) - return os.path.join(self.staging_dir, filename) - - return asset_path - - -output_processor = StagingDirOutputProcessor() -def usdOutputProcessor(): - return output_processor - diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index 416bca35094..eb511b2fb99 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -1,6 +1,8 @@ import os import re import logging +from urllib.parse import urlparse, parse_qs +from collections import namedtuple try: from pxr import Usd, UsdGeom, Sdf, Kind @@ -12,7 +14,9 @@ get_asset_by_name, get_subset_by_name, get_representation_by_name, - get_hero_version_by_subset_id + get_hero_version_by_subset_id, + get_version_by_name, + get_last_version_by_subset_id ) from openpype.pipeline import ( get_current_project_name, @@ -22,26 +26,62 @@ log = logging.getLogger(__name__) +# A contribution defines a layer or references into a particular bootstrap. +# The idea is that contributions can be bootstrapped so, that for example +# the bootstrap of a look variant would update the look bootstrap which updates +# the asset bootstrap. The exact data structure to access and configure these +# easily is still to be defined, but we need to at least know what it targets +# (e.g. where does it go into) and in what order (which contribution is stronger?) +# Preferably the bootstrapped data (e.g. the Shot) preserves metadata about +# the contributions so that we can design a system where custom contributions +# outside of the predefined orders are possible to be managed. So that if a +# particular asset requires an extra contribution level, you can add it +# directly from the publisher at that particular order. Future publishes will +# then see the existing contribution and will persist adding it to future +# bootstraps at that order +Contribution = namedtuple("Contribution", + ("family", "variant", "order", "step")) + # The predefined steps order used for bootstrapping USD Shots and Assets. # These are ordered in order from strongest to weakest opinions, like in USD. PIPELINE = { "shot": [ - "usdLighting", - "usdFx", - "usdSimulation", - "usdAnimation", - "usdLayout", + Contribution(family="usd", variant="lighting", order=500, step="lighting"), + Contribution(family="usd", variant="fx", order=400, step="fx"), + Contribution(family="usd", variant="simulation", order=300, step="simulation"), + Contribution(family="usd", variant="animation", order=200, step="animation"), + Contribution(family="usd", variant="layout", order=100, step="layout"), + ], + "asset": [ + Contribution(family="usd.rig", variant="main", order=300, step="rig"), + Contribution(family="usd.look", variant="main", order=200, step="look"), + Contribution(family="usd.model", variant="main", order=100, step="model") ], - "asset": ["usdShade", "usdModel"], } -def create_asset( - filepath, asset_name, reference_layers=None, kind=Kind.Tokens.component +def setup_asset_layer( + layer, + asset_name, + reference_layers=None, + kind=Kind.Tokens.component, + define_class=True ): """ - Creates an asset file that consists of a top level layer and sublayers for - shading and geometry. + Adds an asset prim to the layer with the `reference_layers` added as + references for e.g. geometry and shading. + + The referenced layers will be moved into a separate `./payload.usd` file + that the asset file uses to allow deferred loading of the heavier + geometrical data. An example would be: + + asset.usd <-- out filepath + payload.usd <-- always automatically added in-between + look.usd <-- reference layer 0 from `reference_layers` argument + model.usd <-- reference layer 1 from `reference_layers` argument + + If `define_class` is enabled then a `/__class__/{asset_name}` class + definition will be created that the root asset inherits from Args: filepath (str): Filepath where the asset.usd file will be saved. @@ -51,17 +91,25 @@ def create_asset( index. asset_name (str): The name for the Asset identifier and default prim. kind (pxr.Kind): A USD Kind for the root asset. + define_class: Define a `/__class__/{asset_name}` class which the + root asset prim will inherit from. """ - # Also see create_asset.py in PixarAnimationStudios/USD endToEnd example - - log.info("Creating asset at %s", filepath) - - # Make the layer ascii - good for readability, plus the file is small - layer = Sdf.Layer.CreateNew(filepath, args={"format": "usda"}) - # Define root prim for the asset and make it the default for the stage. prim_name = asset_name + + if define_class: + class_prim = Sdf.PrimSpec( + layer.pseudoRoot, + "__class__", + Sdf.SpecifierClass, + ) + _class_asset_prim = Sdf.PrimSpec( + class_prim, + prim_name, + Sdf.SpecifierClass, + ) + asset_prim = Sdf.PrimSpec( layer.pseudoRoot, prim_name, @@ -69,6 +117,11 @@ def create_asset( "Xform" ) + if define_class: + asset_prim.inheritPathList.prependedItems[:] = [ + "/__class__/{}".format(prim_name) + ] + # Define Kind # Usually we will "loft up" the kind authored into the exported geometry # layer rather than re-stamping here; we'll leave that for a later @@ -78,23 +131,86 @@ def create_asset( # Set asset info asset_prim.assetInfo["name"] = asset_name asset_prim.assetInfo["identifier"] = "%s/%s.usd" % (asset_name, asset_name) - # asset.assetInfo["version"] = asset_version - # Set default prim - layer.defaultPrim = prim_name + # asset.assetInfo["version"] = asset_version + set_layer_defaults(layer, default_prim=asset_name) - # Let viewing applications know how to orient a free camera properly - # Similar to: UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) - layer.pseudoRoot.SetInfo(UsdGeom.Tokens.upAxis, UsdGeom.Tokens.y) + created_layers = [] # Add references to the asset prim if reference_layers: - asset_prim.referenceList.prependedItems[:] = [ - Sdf.Reference(assetPath=path) for path in reference_layers + # Create a relative payload file to filepath through which we sublayer + # the heavier payloads + # Prefix with `LOP` just so so that if Houdini ROP were to save + # the nodes it's capable of exporting with explicit save path + payload_layer = Sdf.Layer.CreateAnonymous("LOP", + args={"format": "usda"}) + set_layer_defaults(payload_layer, default_prim=asset_name) + created_layers.append(payload_layer) + + # Add sublayers to the payload layer + # Note: Sublayering is tricky because it requires that the sublayers + # actually define the path at defaultPrim otherwise the payload + # reference will not find the defaultPrim and turn up empty. + for ref_layer in reference_layers: + payload_layer.subLayerPaths.append(ref_layer) + + # TODO: Remove referencing logic (for now just there for testing) + # payload_asset_prim = Sdf.PrimSpec( + # payload_layer, + # prim_name, + # Sdf.SpecifierDef, + # "Xform" + # ) + # payload_asset_prim.referenceList.prependedItems[:] = [ + # Sdf.Reference(assetPath=path) for path in reference_layers + # ] + + # Add payload + asset_prim.payloadList.prependedItems[:] = [ + Sdf.Payload(assetPath=payload_layer.identifier) ] + return created_layers + + +def create_asset( + filepath, + asset_name, + reference_layers=None, + kind=Kind.Tokens.component, + define_class=True +): + """Creates and saves a prepared asset stage layer. + + Creates an asset file that consists of a top level asset prim, asset info + and references in the provided `reference_layers`. + + Returns: + list: Created layers + + """ + # Also see create_asset.py in PixarAnimationStudios/USD endToEnd example + log.debug("Creating asset at %s", filepath) + + # Make the layer ascii - good for readability, plus the file is small + layer = Sdf.Layer.CreateNew(filepath, args={"format": "usda"}) + + created_layers = setup_asset_layer( + layer=layer, + asset_name=asset_name, + reference_layers=reference_layers, + kind=kind, + define_class=define_class + ) + for created_layer in created_layers: + created_layer.save() + layer.Save() + layers = [layer] + created_layers + return layers + def create_shot(filepath, layers, create_layers=False): """Create a shot with separate layers for departments. @@ -113,7 +229,7 @@ def create_shot(filepath, layers, create_layers=False): """ # Also see create_shot.py in PixarAnimationStudios/USD endToEnd example root_layer = Sdf.Layer.CreateNew(filepath) - log.info("Creating shot at %s" % filepath) + log.debug("Creating shot at %s" % filepath) for layer_path in layers: if create_layers and not os.path.exists(layer_path): @@ -159,7 +275,7 @@ def create_model(filename, asset, variant_subsets): "Model subsets must start " "with usdModel: %s" % subset ) - path = get_usd_master_path( + path = get_latest_representation( asset=asset_doc, subset=subset, representation="usd" ) variants.append((variant, path)) @@ -212,7 +328,7 @@ def create_shade(filename, asset, variant_subsets): ) shade_subset = re.sub("^usdModel", "usdShade", subset) - path = get_usd_master_path( + path = get_latest_representation( asset=asset_doc, subset=shade_subset, representation="usd" ) variants.append((variant, path)) @@ -240,7 +356,7 @@ def create_shade_variation(filename, asset, model_variant, shade_variants): subset = "usdShade_{model}_{shade}".format( model=model_variant, shade=variant ) - path = get_usd_master_path( + path = get_latest_representation( asset=asset_doc, subset=subset, representation="usd" ) variants.append((variant, path)) @@ -263,6 +379,29 @@ def _create_variants_file( as_payload=False, skip_variant_on_single_file=True, ): + """Create a USD file with references to given variants and their paths. + + Arguments: + filename (str): USD file containing the variant sets. + variants (List[List[str, str]): List of two-tuples of variant name to + the filepath that should be referenced in for that variant. + variantset (str): Name of the variant set + default_variant (str): Default variant to set. If not provided + the first variant will be used. + reference_prim (str): Path to the reference prim where to add the + references and variant sets. + set_default_variant (bool): Whether to set the default variant. + When False no default variant will be set, even if a value + was provided to `default_variant` + as_payload (bool): When enabled, instead of referencing use payloads + skip_variant_on_single_file (bool): If this is enabled and only + a single variant is provided then do not create the variant set + but just reference that single file. + + Returns: + Usd.Stage: The saved usd stage + + """ root_layer = Sdf.Layer.CreateNew(filename, args={"format": "usda"}) stage = Usd.Stage.Open(root_layer) @@ -287,15 +426,13 @@ def _reference(path): assert variants, "Must have variants, got: %s" % variants - log.info(filename) - if skip_variant_on_single_file and len(variants) == 1: # Reference directly, no variants variant_path = variants[0][1] _reference(variant_path) - log.info("Non-variants..") - log.info("Path: %s" % variant_path) + log.debug("Creating without variants due to single file only.") + log.debug("Path: %s", variant_path) else: # Variants @@ -303,6 +440,7 @@ def _reference(path): variant_set = root_prim.GetVariantSets().AddVariantSet( variantset, append ) + debug_label = "Payloading" if as_payload else "Referencing" for variant, variant_path in variants: @@ -314,55 +452,140 @@ def _reference(path): with variant_set.GetVariantEditContext(): _reference(variant_path) - log.info("Variants..") - log.info("Variant: %s" % variant) - log.info("Path: %s" % variant_path) + log.debug("%s variants.", debug_label) + log.debug("Variant: %s", variant) + log.debug("Path: %s", variant_path) - if set_default_variant: + if set_default_variant and default_variant is not None: variant_set.SetVariantSelection(default_variant) return stage -def get_usd_master_path(asset, subset, representation): - """Get the filepath for a .usd file of a subset. +def get_representation_path_by_names( + project_name, + asset_name, + subset_name, + version_name, + representation_name, +): + """Get (latest) filepath for representation for asset and subset. - This will return the path to an unversioned master file generated by - `usd_master_file.py`. + If version_name is "hero" then return the hero version + If version_name is "latest" then return the latest version + Otherwise use version_name as the exact integer version name. """ - project_name = get_current_project_name() - - if isinstance(asset, dict) and "name" in asset: + if isinstance(asset_name, dict) and "name" in asset_name: # Allow explicitly passing asset document - asset_doc = asset + asset_doc = asset_name else: - asset_doc = get_asset_by_name(project_name, asset, fields=["_id"]) - - if isinstance(subset, dict) and "name" in subset: + asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) + if not asset_doc: + return + print(asset_doc) + if isinstance(subset_name, dict) and "name" in subset_name: # Allow explicitly passing subset document - subset_doc = subset + subset_doc = subset_name else: subset_doc = get_subset_by_name(project_name, - subset, + subset_name, asset_id=asset_doc["_id"], fields=["_id"]) + if not subset_doc: + return + print(subset_doc) + + if version_name == "hero": + version = get_hero_version_by_subset_id(project_name, + subset_id=subset_doc["_id"]) + elif version_name == "latest": + version = get_last_version_by_subset_id(project_name, + subset_id=subset_doc["_id"]) + else: + version = get_version_by_name(project_name, + version_name, + subset_id=subset_doc["_id"]) + if not version: + return - version = get_hero_version_by_subset_id(project_name, - subset_id=subset_doc["_id"]) representation = get_representation_by_name(project_name, - representation, + representation_name, version_id=version["_id"]) path = get_representation_path(representation) return path.replace("\\", "/") -def parse_avalon_uri(uri): - # URI Pattern: avalon://{asset}/{subset}.{ext} - pattern = r"avalon://(?P[^/.]*)/(?P[^/]*)\.(?P.*)" - if uri.startswith("avalon://"): - match = re.match(pattern, uri) - if match: - return match.groupdict() +def parse_ayon_uri(uri): + """Parse ayon+entity URI into individual components. + + URI specification: + ayon+entity://{project}/{asset}?product={product} + &version={version} + &representation={representation} + URI example: + ayon+entity://test/hero?modelMain&version=2&representation=usd + + Example: + >>> parse_ayon_uri( + >>> "ayon+entity://test/villain?product=modelMain&version=2&representation=usd" # noqa: E501 + >>> ) + {'project': 'test', 'asset': 'villain', + 'product': 'modelMain', 'version': 1, + 'representation': 'usd'} + + Returns: + dict: The individual keys of the ayon entity query. + + """ + + if not uri.startswith("ayon+entity://"): + return + + parsed = urlparse(uri) + if parsed.scheme != "ayon+entity": + return + + result = { + "project": parsed.netloc, + "asset": parsed.path.strip("/") + } + query = parse_qs(parsed.query) + for key in ["product", "version", "representation"]: + if key in query: + result[key] = query[key][0] + + # Convert version to integer if it is a digit + version = result.get("version") + if version is not None and version.isdigit(): + result["version"] = int(version) + + return result + + +def set_layer_defaults(layer, + up_axis=UsdGeom.Tokens.y, + meters_per_unit=1.0, + default_prim=None): + """Set some default metadata for the SdfLayer. + + Arguments: + layer (Sdf.Layer): The layer to set default for via Sdf API. + up_axis (UsdGeom.Token); Which axis is the up-axis + meters_per_unit (float): Meters per unit + default_prim (Optional[str]: Default prim name + + """ + # Set default prim + if default_prim is not None: + layer.defaultPrim = default_prim + + # Let viewing applications know how to orient a free camera properly + # Similar to: UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + layer.pseudoRoot.SetInfo(UsdGeom.Tokens.upAxis, up_axis) + + # Set meters per unit + layer.pseudoRoot.SetInfo(UsdGeom.Tokens.metersPerUnit, + float(meters_per_unit)) From f0f473e5ee23a6c9d5ca2f3c787712c8b8e2b067 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 3 Nov 2023 00:36:32 +0100 Subject: [PATCH 03/56] Report which Creator was duplicated --- openpype/pipeline/create/context.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 25f03ddd3b4..ee3e626900a 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -1786,10 +1786,10 @@ def _reset_creator_plugins(self): creator_identifier = creator_class.identifier if creator_identifier in creators: - self.log.warning(( - "Duplicated Creator identifier. " - "Using first and skipping following" - )) + self.log.warning( + "Duplicated Creator identifier. Using first and " + "skipping following: {}".format(str(creator_class)) + ) continue # Filter by host name From d1f6b92df37bb9c7390034d442bc9ecacea8802c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 3 Nov 2023 00:37:41 +0100 Subject: [PATCH 04/56] Define `add_transient_instance_data` and `remove_transient_instance_data` so behavior can be overridden by subclasses --- openpype/hosts/maya/api/plugin.py | 46 +++++++++++++++++++------------ 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 3b54954c8a0..d643d9075df 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -146,18 +146,39 @@ def get_publish_families(self): """ return [] + def add_transient_instance_data(self, instance_data): + """Add data into the `instance.data` after the read of the node data + + This can be overridden by subclasses to sneak in extra instance data + specific to the creator. + """ + # Allow a Creator to define multiple families + publish_families = self.get_publish_families() + if publish_families: + families = instance_data.setdefault("families", []) + for family in self.get_publish_families(): + if family not in families: + families.append(family) + + def remove_transient_instance_data(self, instance_data): + """Remove data `instance.data` before storing/imprinting to the node + + This can be overridden by subclasses to remove extra instance data + added in `add_transient_instance_data` specific to the creator. + """ + # Don't store `families` since it's up to the creator itself + # to define the initial publish families - not a stored attribute of + # `families` + instance_data.pop("families", None) + def imprint_instance_node(self, node, data): + self.remove_transient_instance_data(data) # We never store the instance_node as value on the node since # it's the node name itself data.pop("instance_node", None) data.pop("instance_id", None) - # Don't store `families` since it's up to the creator itself - # to define the initial publish families - not a stored attribute of - # `families` - data.pop("families", None) - # We store creator attributes at the root level and assume they # will not clash in names with `subset`, `task`, etc. and other # default names. This is just so these attributes in many cases @@ -230,11 +251,6 @@ def read_instance_node(self, node): node_data["instance_node"] = node node_data["instance_id"] = node - # If the creator plug-in specifies - families = self.get_publish_families() - if families: - node_data["families"] = families - return node_data def _default_collect_instances(self): @@ -242,6 +258,7 @@ def _default_collect_instances(self): cached_subsets = self.collection_shared_data["maya_cached_subsets"] for node in cached_subsets.get(self.identifier, []): node_data = self.read_instance_node(node) + self.add_transient_instance_data(node_data) created_instance = CreatedInstance.from_existing(node_data, self) self._add_instance_to_context(created_instance) @@ -279,17 +296,10 @@ def create(self, subset_name, instance_data, pre_create_data): if pre_create_data.get("use_selection"): members = cmds.ls(selection=True) - # Allow a Creator to define multiple families - publish_families = self.get_publish_families() - if publish_families: - families = instance_data.setdefault("families", []) - for family in self.get_publish_families(): - if family not in families: - families.append(family) - with lib.undo_chunk(): instance_node = cmds.sets(members, name=subset_name) instance_data["instance_node"] = instance_node + self.add_transient_instance_data(instance_data) instance = CreatedInstance( self.family, subset_name, From b281bcb9d2e7cdb271d57f9e6371169a1b288653 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 3 Nov 2023 00:46:45 +0100 Subject: [PATCH 05/56] Add Maya show in usdview loader --- .../hosts/maya/plugins/load/show_usdview.py | 50 +++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 openpype/hosts/maya/plugins/load/show_usdview.py diff --git a/openpype/hosts/maya/plugins/load/show_usdview.py b/openpype/hosts/maya/plugins/load/show_usdview.py new file mode 100644 index 00000000000..16ab28ab21b --- /dev/null +++ b/openpype/hosts/maya/plugins/load/show_usdview.py @@ -0,0 +1,50 @@ +import os +import subprocess + +from openpype.pipeline import load + +MAYA_LOCATION = os.environ['MAYA_LOCATION'] +MAYAPY = os.path.join(MAYA_LOCATION, 'bin', 'mayapy') +USD_LOCATION = os.getenv("USD_LOCATION") +USDVIEW = os.path.join(USD_LOCATION, 'bin', 'usdview') + + +class ShowInUsdview(load.LoaderPlugin): + """Open USD file in usdview""" + + label = "Show in usdview" + representations = ["*"] + families = ["*"] + extensions = {"usd", "usda", "usdlc", "usdnc", "abc"} + order = 15 + + icon = "code-fork" + color = "white" + + # Enable if usd location is defined (which maya usd plugin does) + enabled = USD_LOCATION and os.path.isdir(USD_LOCATION) + + def load(self, context, name=None, namespace=None, data=None): + + try: + import OpenGL + except ImportError: + self.log.error( + "usdview for mayapy requires to have `OpenGL` python library " + "available. Please make sure to install it." + ) + filepath = self.filepath_from_context(context) + filepath = os.path.normpath(filepath) + filepath = filepath.replace("\\", "/") + + if not os.path.exists(filepath): + self.log.error("File does not exist: %s" % filepath) + return + + self.log.info("Start maya variant of usdview...") + CREATE_NO_WINDOW = 0x08000000 + subprocess.Popen([MAYAPY, USDVIEW, filepath], + creationflags=CREATE_NO_WINDOW, + # Set current working directory so that browsing + # from usdview itself starts from that folder too + cwd=os.path.dirname(filepath)) From 9cf29b281ae6d507e204b1008c06bb99dbe02da1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 3 Nov 2023 00:47:35 +0100 Subject: [PATCH 06/56] Collect resources dir for `usd` family --- openpype/plugins/publish/collect_resources_path.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index cfb4d63c1b3..15256b9e1f9 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -64,7 +64,8 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): "skeletalMesh", "xgen", "yeticacheUE", - "tycache" + "tycache", + "usd" ] def process(self, instance): From 038e4f22f53385f5ae6c98dd2999fb3294f38941 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 3 Nov 2023 00:48:38 +0100 Subject: [PATCH 07/56] WIP draft prototyping for Maya USD --- .../maya/plugins/create/create_maya_usd.py | 123 ++++++++- .../plugins/publish/collect_usd_bootstrap.py | 95 +++---- .../plugins/publish/extract_usd_bootstrap.py | 244 ++++++++++++++++-- .../publish/validate_instance_has_members.py | 6 +- 4 files changed, 380 insertions(+), 88 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_maya_usd.py b/openpype/hosts/maya/plugins/create/create_maya_usd.py index 02f707813c3..b268503b434 100644 --- a/openpype/hosts/maya/plugins/create/create_maya_usd.py +++ b/openpype/hosts/maya/plugins/create/create_maya_usd.py @@ -2,8 +2,12 @@ from openpype.lib import ( BoolDef, EnumDef, - TextDef + TextDef, + UILabelDef, + UISeparatorDef, + usdlib ) +from openpype.pipeline.context_tools import get_current_context from maya import cmds @@ -19,11 +23,6 @@ class CreateMayaUsd(plugin.MayaCreator): cache = {} - # TODO: Remove these default variants - this is just to trivialize - # the usage of the bootstrapping that was once built for Houdini - # that bootstrapped usdModel and usdShade as usdAsset - default_variants = ["Model", "Shade"] - def get_publish_families(self): return ["usd", "mayaUsd"] @@ -108,21 +107,117 @@ def get_instance_attr_defs(self): class CreateMayaUsdContribution(CreateMayaUsd): + """ + + When writing a USD as 'contribution' it will be added into what it's + contributing to. It will usually contribute to either the main *asset* + or *shot* but can be customized. + + Usually the contribution is done into a Department Layer, like e.g. + model, rig, look for models and layout, animation, fx, lighting for shots. + Each department contribution will be 'sublayered' into the departments + contribution. + + """ identifier = "io.openpype.creators.maya.mayausd.contribution" label = "Maya USD Contribution" - family = "usd.layered" + family = "usd" icon = "cubes" description = "Create Maya USD Contribution" + def get_publish_families(self): + families = ["usd", "mayaUsd", "usd.layered"] + if self.family not in families: + families.append(self.family) + return families + def get_instance_attr_defs(self): - import os - defs = super(CreateMayaUsdContribution, self).get_instance_attr_defs() - defs.insert(0, TextDef( - "sublayer", - label="Sublayer", - default=os.environ["AVALON_TASK"] - )) + context = get_current_context() + + # The departments must be 'ordered' so that e.g. a look can apply + # overrides to any opinion from the model department. + department = context["task_name"] # usually equals the department? + variant = "Main" # used as default for sublayer + + defs = [ + UISeparatorDef("contribution_settings1"), + UILabelDef(label="Contribution"), + UISeparatorDef("contribution_settings2"), + BoolDef("contribution_enabled", + label="Add to USD container", + default=True), + TextDef("contribution_department_layer", + label="Department layer", + default=department), + TextDef("contribution_sublayer", + label="Sublayer", + # Usually e.g. usdModel, usdLook, usdLookRed + default=variant), + TextDef("contribution_variant_set_name", + label="Variant Set Name", + default=""), + TextDef("contribution_variant", + label="Variant Name", + default=""), + UISeparatorDef("export_settings1"), + UILabelDef(label="Export Settings"), + UISeparatorDef("export_settings2"), + ] + defs += super(CreateMayaUsdContribution, self).get_instance_attr_defs() return defs + + +for contribution in usdlib.PIPELINE["asset"]: + + step = contribution.step + + class CreateMayaUsdDynamicStepContribution(CreateMayaUsdContribution): + identifier = f"{CreateMayaUsdContribution.identifier}.{step}" + default_variants = plugin.MayaCreator.default_variants + label = f"USD {step.title()}" + family = contribution.family + + # Define some nice icons + icon = { + "look": "paint-brush", + "model": "cube", + "rig": "wheelchair" + }.get(step, "cubes") + + description = f"Create USD {step.title()} Contribution" + + bootstrap = "asset" + + contribution = contribution + + # TODO: Should these still be customizable + # contribution_sublayer_order = contribution.order + # contribution_department = contribution.step + # contribution_variant_set_name = contribution.step + # contribution_variant_name = "{variant}" + + def add_transient_instance_data(self, instance_data): + super().add_transient_instance_data(instance_data) + instance_data["usd_bootstrap"] = self.bootstrap + instance_data["usd_contribution"] = self.contribution + + def remove_transient_instance_data(self, instance_data): + super().remove_transient_instance_data(instance_data) + instance_data.pop("usd_bootstrap", None) + instance_data.pop("usd_contribution", None) + + # Dynamically create USD creators for easy access to a certain step + # in production + global_variables = globals() + klass_name = f"CreateMayaUsd{step.title()}Contribution" + klass = type(klass_name, (CreateMayaUsdDynamicStepContribution,), {}) + global_variables[klass_name] = klass + + # We only want to store the global variables, and don't want the last + # iteration of the loop to persist after because Create Context will + # pick those up too + del klass + del CreateMayaUsdDynamicStepContribution diff --git a/openpype/hosts/maya/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/maya/plugins/publish/collect_usd_bootstrap.py index 0b08cfeb8ac..e69199cba65 100644 --- a/openpype/hosts/maya/plugins/publish/collect_usd_bootstrap.py +++ b/openpype/hosts/maya/plugins/publish/collect_usd_bootstrap.py @@ -2,6 +2,7 @@ from openpype.client import get_subset_by_name, get_asset_by_name import openpype.lib.usdlib as usdlib +from openpype.pipeline.create import get_subset_name class CollectUsdBootstrap(pyblish.api.InstancePlugin): @@ -9,75 +10,71 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): Some specific subsets are intended to be part of the default structure of an "Asset" or "Shot" in our USD pipeline. For example, for an Asset - we layer a Model and Shade USD file over each other and expose that in + we layer a Model and Look USD file over each other and expose that in a Asset USD file, ready to use. - On the first publish of any of the components of a Asset or Shot the + On the first publish of any components of an Asset or Shot the missing pieces are bootstrapped and generated in the pipeline too. This means that on the very first publish of your model the Asset USD file will exist too. """ - order = pyblish.api.CollectorOrder + 0.35 + order = pyblish.api.CollectorOrder - 0.4 label = "Collect USD Bootstrap" hosts = ["maya"] - families = ["usd", "usd.layered"] - # TODO: Implement feature, then enable - enabled = False + families = ["usd"] def process(self, instance): - # Detect whether the current subset is a subset in a pipeline - def get_bootstrap(instance): - instance_subset = instance.data["subset"] - for name, layers in usdlib.PIPELINE.items(): - if instance_subset in set(layers): - return name # e.g. "asset" - break - else: - return - - bootstrap = get_bootstrap(instance) + bootstrap = instance.data.get("usd_bootstrap") if bootstrap: + self.log.debug("Add bootstrap for: %s" % bootstrap) self.add_bootstrap(instance, bootstrap) - # Check if any of the dependencies requires a bootstrap - for dependency in instance.data.get("publishDependencies", list()): - bootstrap = get_bootstrap(dependency) - if bootstrap: - self.add_bootstrap(dependency, bootstrap) - def add_bootstrap(self, instance, bootstrap): - self.log.debug("Add bootstrap for: %s" % bootstrap) - project_name = instance.context.data["projectName"] asset = get_asset_by_name(project_name, instance.data["asset"]) assert asset, "Asset must exist: %s" % asset # Check which are not about to be created and don't exist yet - required = {"shot": ["usdShot"], "asset": ["usdAsset"]}.get(bootstrap) + variants_to_create = [bootstrap] require_all_layers = instance.data.get("requireAllLayers", False) if require_all_layers: # USD files load fine in usdview and Houdini even when layered or # referenced files do not exist. So by default we don't require # the layers to exist. - layers = usdlib.PIPELINE.get(bootstrap) - if layers: - required += list(layers) + contributions = usdlib.PIPELINE.get(bootstrap) + if contributions: + variants_to_create.extend( + contribution.variant for contribution in contributions + ) - if not required: + if not variants_to_create: return - for subset in required: - self.log.info("USD bootstrapping to: %s" % subset) - if self._subset_exists(project_name, instance, subset, asset): + for variant in variants_to_create: + self.log.info("USD bootstrapping usd-variant: %s", variant) + + subset = get_subset_name( + family="usd", + variant=variant.title(), + task_name=instance.data["task"], + asset_doc=asset, + project_name=project_name + ) + self.log.info(subset) + + defined = self.get_subset_in_context(instance, subset, asset) + if defined: + defined.append(instance.id) + self.log.info("defined..") continue self.log.debug( - "Creating {bootstrap} USD bootstrap: " + "Creating USD bootstrap: " "{asset} > {subset}".format( bootstrap=bootstrap, asset=asset["name"], @@ -86,22 +83,27 @@ def add_bootstrap(self, instance, bootstrap): ) new = instance.context.create_instance(subset) + + # Define subset with new.data["subset"] = subset - #new.data["label"] = "{0} ({1})".format(subset, asset["name"]) - new.data["family"] = "usd.bootstrap" + new.data["variant"] = variant + new.data["label"] = "{0} ({1})".format(subset, asset["name"]) + new.data["family"] = "usd" + new.data["families"] = ["usd", "usd.bootstrap"] new.data["icon"] = "link" new.data["comment"] = "Automated bootstrap USD file." new.data["publishFamilies"] = ["usd"] + new[:] = [instance.id] # Do not allow the user to toggle this instance new.data["optional"] = False # Copy some data from the instance for which we bootstrap - for key in ["asset"]: + for key in ["asset", "task"]: new.data[key] = instance.data[key] - def _subset_exists(self, project_name, instance, subset, asset): - """Return whether subset exists in current context or in database.""" + def get_subset_in_context(self, instance, subset, asset): + """Return whether subset exists in current context.""" # Allow it to be created during this publish session context = instance.context for inst in context: @@ -109,12 +111,15 @@ def _subset_exists(self, project_name, instance, subset, asset): inst.data["subset"] == subset and inst.data["asset"] == asset["name"] ): - return True + return inst + # TODO: Since we don't have an asset resolver that will resolve + # 'to latest' we currently always want to push an update to the + # bootstrap explicitly # Or, if they already exist in the database we can # skip them too. - if get_subset_by_name( - project_name, subset, asset["_id"], fields=["_id"] - ): - return True - return False + # if get_subset_by_name( + # project_name, subset, asset["_id"], fields=["_id"] + # ): + # return True + # return False diff --git a/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py b/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py index 1a053887cba..6adbbf27319 100644 --- a/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py +++ b/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py @@ -1,7 +1,10 @@ import os +import copy +import operator import pyblish.api from openpype.pipeline import publish +from openpype.pipeline.create import get_subset_name class ExtractBootstrapUSD(publish.Extractor): @@ -11,7 +14,7 @@ class ExtractBootstrapUSD(publish.Extractor): """ - order = pyblish.api.ExtractorOrder + 0.1 + order = pyblish.api.ExtractorOrder + 0.2 label = "Bootstrap USD" hosts = ["houdini", "maya"] targets = ["local"] @@ -23,21 +26,25 @@ def process(self, instance): staging_dir = self.staging_dir(instance) filename = "{subset}.usd".format(**instance.data) filepath = os.path.join(staging_dir, filename) - self.log.info("Bootstrap USD '%s' to '%s'" % (filename, staging_dir)) + self.log.debug("Bootstrap USD '%s' to '%s'" % (filename, staging_dir)) subset = instance.data["subset"] if subset == "usdAsset": # Asset - steps = usdlib.PIPELINE["asset"] - layers = self.get_usd_master_paths(steps, instance) - usdlib.create_asset(filepath, - asset_name=instance.data["asset"], - reference_layers=layers) + contributions = usdlib.PIPELINE["asset"] + layers = self.get_contribution_paths(contributions, instance) + relative_files = usdlib.create_asset( + filepath, + asset_name=instance.data["asset"], + reference_layers=layers + ) + for relative_file in relative_files: + self.add_relative_file(instance, relative_file) elif subset == "usdShot": # Shot steps = usdlib.PIPELINE["shot"] - layers = self.get_usd_master_paths(steps, instance) + layers = self.get_contribution_paths(steps, instance) usdlib.create_shot(filepath, layers=layers) @@ -79,22 +86,205 @@ def process(self, instance): "stagingDir": staging_dir }) - def get_usd_master_paths(self, subsets, instance): - - raise NotImplementedError("TODO") - # TODO: Implement the retrieval of the right paths - # TODO: preferably with AYON asset resolver these would be AYON URIs - # asset = instance.data["asset"] - # - # template = _get_project_publish_template() - # layer_paths = [] - # for layer in subsets: - # layer_path = self._get_usd_master_path( - # subset=layer, - # asset=asset, - # template=template - # ) - # layer_paths.append(layer_path) - # self.log.info("Asset references: %s" % layer_path) - # - # return layer_paths + def add_relative_file(self, instance, relative_path, staging_dir=None): + """Add transfer for a relative path form staging to publish dir. + + Unlike files in representations, the file will not be renamed and + will be ingested one-to-one into the publish directory. + + """ + if staging_dir is None: + staging_dir = self.staging_dir(instance) + publish_dir = instance.data["publishDir"] + source = os.path.join(staging_dir, relative_path) + source = os.path.normpath(source) + destination = os.path.join(publish_dir, relative_path) + destination = os.path.normpath(destination) + + transfers = instance.data.setdefault("transfers", []) + transfers.append((source, destination)) + + def get_contribution_paths(self, contributions, instance): + """Return the asset paths (filepath) for the contributions. + + If the contribution is not found in the current publish context nor + as an existing entity in the database it will be silently excluded + from the result. + + """ + # TODO: create paths for AYON asset resolver as AYON URIs + # TODO: Get any contributions from the last version of the instance + # so that we ensure we're always adding into the last existing + # version instead of replacing + # last_contributions = self.get_last_contributions(instance) + # for contribution in last_contributions: + # if contribution not in contributions: + # contributions.append(last_contributions) + contributions.sort(key=operator.attrgetter("order")) + + # Define subsets from family + variant + subsets = [] + for contribution in contributions: + subset = get_subset_name( + family=contribution.family, + variant=contribution.variant, + task_name=instance.data["task"], + asset_doc=instance.data["assetEntity"], + project_name=instance.context.data["projectName"] + ) + subsets.append(subset) + + # Find all subsets in the current publish session + result = self.get_representation_path_per_subset_in_publish(subsets, + instance) + + # Find last existing version for those not in current publish session + missing = [subset for subset in subsets if subset not in result] + if missing: + existing = self.get_existing_representation_path_per_subset( + missing, instance + ) + result.update(existing) + + order = {subset: index for index, subset in enumerate(subsets)} + result = { + subset: path for subset, path in sorted(result.items(), + key=lambda x: order[x[0]]) + } + + self.log.debug( + "Found subsets to contribute: {}".format(", ".join(result)) + ) + assert result, "Must have one subset to contribute at least" + return list(result.values()) + + def get_representation_path_per_subset_in_publish(self, subsets, instance): + """Get path for representations in the current publish session + + Given the input subset names compute all destination paths for + active instances in the current publish session that will be + ingested as the new versions for those publishes. + + This assumes those subset will generate a USD representation and + must already have it added in `instance.data["representations"]` + + """ + asset = instance.data["asset"] + result = {} + context = instance.context + self.log.debug(f"Looking for subsets: {subsets}") + for other_instance in context: + if other_instance is instance: + continue + + if not other_instance.data.get("active", True): + continue + + if not other_instance.data.get("publish", True): + continue + + if other_instance.data["asset"] != asset: + continue + + if other_instance.data["subset"] not in subsets: + continue + + subset = other_instance.data["subset"] + + # Make sure the instance has a `usd` representation; note that + # usually the extractors add these so we want this plug-in to + # run quite late as an extractor to ensure others have run before + if not any( + repre["name"] == "usd" for repre in + other_instance.data.get("representations", []) + ): + raise RuntimeError( + "Missing `usd` representation on instance with " + "subset {}".format(subset) + ) + + anatomy = context.data["anatomy"] + path_template_obj = anatomy.templates_obj["publish"]["path"] + template_data = copy.deepcopy(instance.data["anatomyData"]) + template_data.update({ + "ext": "usd", + "representation": "usd", + "subset": subset, + "asset": other_instance.data["asset"], + "variant": other_instance.data["variant"], + "version": other_instance.data["version"] + }) + if "version" in other_instance.data: + template_data["version"] = other_instance.data["version"] + + self.log.debug( + "Found publish session subset '{}' version 'v{:03d}'".format( + subset, other_instance.data["version"] + )) + + template_filled = path_template_obj.format_strict(template_data) + result[subset] = os.path.normpath(template_filled) + + return result + + def get_existing_representation_path_per_subset(self, subsets, instance): + """Get last version for subsets in the database + + Given the input subset names find all latest existing version in the + database and retrieve their `usd` representation paths. + + """ + context = instance.context + project_name = context.data["projectName"] + asset_entity = instance.data["assetEntity"] + + from openpype.pipeline import get_representation_path + from openpype.client import ( + get_subsets, + get_last_versions, + get_representations + ) + + def to_id(entity): + return entity["_id"] + + subsets_docs = list( + get_subsets(project_name, + subset_names=subsets, + asset_ids=[asset_entity["_id"]]) + ) + if not subsets_docs: + return {} + + version_docs = list(get_last_versions( + project_name, + subset_ids=map(to_id, subsets_docs) + ).values()) + if not version_docs: + return {} + + representation_docs = list(get_representations( + project_name, + version_ids=map(to_id, version_docs), + representation_names=["usd"] + )) + if not representation_docs: + return {} + + result = {} + versions_by_id = {v["_id"]: v for v in version_docs} + subsets_by_id = {s["_id"]: s for s in subsets_docs} + for representation in representation_docs: + version_doc = versions_by_id[representation["parent"]] + subset_doc = subsets_by_id[version_doc["parent"]] + subset = subset_doc["name"] + + self.log.debug( + "Found existing subset '{}' version 'v{:03d}'".format( + subset, version_doc["name"] + )) + + path = get_representation_path(representation) + result[subset] = path + + return result diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py index 95905115bc0..fbc94627091 100644 --- a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py +++ b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py @@ -25,11 +25,13 @@ def get_invalid(cls, instance): def process(self, instance): # Allow renderlayer, rendersetup and workfile to be empty - skip_families = {"workfile", "renderlayer", "rendersetup", - "usd.bootstrap"} + skip_families = {"workfile", "renderlayer", "rendersetup"} if instance.data.get("family") in skip_families: return + if "usd.bootstrap" in instance.data.get("families", []): + return + invalid = self.get_invalid(instance) if invalid: # Invalid will always be a single entry, we log the single name From faa75ad3f65999b954f022c8e73e9bf1b1acc985 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 6 Nov 2023 14:12:32 +0100 Subject: [PATCH 08/56] Fix issues with instances --- .../maya/plugins/publish/extract_maya_usd.py | 28 +++++++++++++++++-- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_maya_usd.py b/openpype/hosts/maya/plugins/publish/extract_maya_usd.py index 8c32ac1e395..4ee4b676bc3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_maya_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_maya_usd.py @@ -4,12 +4,28 @@ import contextlib from maya import cmds +import maya.api.OpenMaya as om import pyblish.api from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection +def get_node_hash(node): + """Return integer MObjectHandle hash code. + + Arguments: + node (str): Maya node path. + + Returns: + int: MObjectHandle.hashCode() + + """ + sel = om.MSelectionList() + sel.add(node) + return om.MObjectHandle(sel.getDependNode(0)).hashCode() + + @contextlib.contextmanager def usd_export_attributes(nodes, attrs=None, attr_prefixes=None, mapping=None): """Define attributes for the given nodes that should be exported. @@ -44,8 +60,6 @@ def usd_export_attributes(nodes, attrs=None, attr_prefixes=None, mapping=None): # todo: this might be better done with a custom export chaser # see `chaser` argument for `mayaUSDExport` - import maya.api.OpenMaya as om - if not attrs and not attr_prefixes: # context manager does nothing yield @@ -61,16 +75,23 @@ def usd_export_attributes(nodes, attrs=None, attr_prefixes=None, mapping=None): usd_json_attr = "USD_UserExportedAttributesJson" strings = attrs + ["{}*".format(prefix) for prefix in attr_prefixes] context_state = {} + + # Keep track of the processed nodes as a node might appear more than once + # e.g. when there are instances. + processed = set() for node in set(nodes): node_attrs = cmds.listAttr(node, st=strings) if not node_attrs: # Nothing to do for this node continue + hash_code = get_node_hash(node) + if hash_code in processed: + continue + node_attr_data = {} for node_attr in set(node_attrs): node_attr_data[node_attr] = mapping.get(node_attr, {}) - if cmds.attributeQuery(usd_json_attr, node=node, exists=True): existing_node_attr_value = cmds.getAttr( "{}.{}".format(node, usd_json_attr) @@ -82,6 +103,7 @@ def usd_export_attributes(nodes, attrs=None, attr_prefixes=None, mapping=None): existing_node_attr_data = json.loads(existing_node_attr_value) node_attr_data.update(existing_node_attr_data) + processed.add(hash_code) context_state[node] = json.dumps(node_attr_data) sel = om.MSelectionList() From 9a18e40bc8da82c7c3a996f0d367bd3f526ce891 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 7 Nov 2023 01:26:02 +0100 Subject: [PATCH 09/56] Tweak usdlib to support creating asset using both anonymous layers for Houdini LOP Python node and creating asset with saved layers for e.g. bootstrapping assets during publishing. --- openpype/lib/usdlib.py | 108 +++++++++++++++++++++++++++-------------- 1 file changed, 72 insertions(+), 36 deletions(-) diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index eb511b2fb99..b920fff33e2 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -83,6 +83,15 @@ def setup_asset_layer( If `define_class` is enabled then a `/__class__/{asset_name}` class definition will be created that the root asset inherits from + Examples: + >>> create_asset("/path/to/asset.usd", + >>> asset_name="test", + >>> reference_layers=["./model.usd", "./look.usd"]) + + Returns: + List[Tuple[Sdf.Layer, str]]: List of created layers with their + preferred output save paths. + Args: filepath (str): Filepath where the asset.usd file will be saved. reference_layers (list): USD Files to reference in the asset. @@ -146,7 +155,7 @@ def setup_asset_layer( payload_layer = Sdf.Layer.CreateAnonymous("LOP", args={"format": "usda"}) set_layer_defaults(payload_layer, default_prim=asset_name) - created_layers.append(payload_layer) + created_layers.append((payload_layer, "./payload.usd")) # Add sublayers to the payload layer # Note: Sublayering is tricky because it requires that the sublayers @@ -155,17 +164,6 @@ def setup_asset_layer( for ref_layer in reference_layers: payload_layer.subLayerPaths.append(ref_layer) - # TODO: Remove referencing logic (for now just there for testing) - # payload_asset_prim = Sdf.PrimSpec( - # payload_layer, - # prim_name, - # Sdf.SpecifierDef, - # "Xform" - # ) - # payload_asset_prim.referenceList.prependedItems[:] = [ - # Sdf.Reference(assetPath=path) for path in reference_layers - # ] - # Add payload asset_prim.payloadList.prependedItems[:] = [ Sdf.Payload(assetPath=payload_layer.identifier) @@ -194,27 +192,53 @@ def create_asset( log.debug("Creating asset at %s", filepath) # Make the layer ascii - good for readability, plus the file is small - layer = Sdf.Layer.CreateNew(filepath, args={"format": "usda"}) + layer = Sdf.Layer.CreateAnonymous() - created_layers = setup_asset_layer( + created_layers_with_paths = setup_asset_layer( layer=layer, asset_name=asset_name, reference_layers=reference_layers, kind=kind, define_class=define_class ) - for created_layer in created_layers: - created_layer.save() + _save_layer_paths_anchored_to_layer(layer, filepath, + created_layers_with_paths) + layer.Export(filepath, args={"format": "usda"}) + return [layer] + [layer for layer, _ in created_layers_with_paths] + + +def _save_layer_paths_anchored_to_layer(base_layer, + base_layer_path, + sublayers_with_paths): + """Export and update layer asset identifiers for sublayers with paths. + + The layers will be anchorde relative to the base layer and base layer's + filepath to allow for relative anchoring and saving for anonymous layers. + + """ + for layer, path in sublayers_with_paths: + if not os.path.isabs(path): + # Use relative path anchoring to the base layer + folder = os.path.dirname(base_layer_path) + full_path = os.path.join(folder, path) + else: + full_path = path - layer.Save() + # Export the layer + layer.Export(full_path, args=layer.GetFileFormatArguments()) - layers = [layer] + created_layers - return layers + # Update dependencies on the base layer + base_layer.UpdateCompositionAssetDependency(layer.identifier, path) def create_shot(filepath, layers, create_layers=False): """Create a shot with separate layers for departments. + Examples: + >>> create_shot("/path/to/shot.usd", + >>> layers=["lighting.usd", "fx.usd", "animation.usd"]) + "/path/to/shot.usd" + Args: filepath (str): Filepath where the asset.usd file will be saved. layers (list): When provided this will be added verbatim in the @@ -228,9 +252,9 @@ def create_shot(filepath, layers, create_layers=False): """ # Also see create_shot.py in PixarAnimationStudios/USD endToEnd example - root_layer = Sdf.Layer.CreateNew(filepath) - log.debug("Creating shot at %s" % filepath) + root_layer = Sdf.Layer.CreateAnonymous() + created_layers = [root_layer] for layer_path in layers: if create_layers and not os.path.exists(layer_path): # We use the Sdf API here to quickly create layers. Also, we're @@ -240,16 +264,16 @@ def create_shot(filepath, layers, create_layers=False): if not os.path.exists(layer_folder): os.makedirs(layer_folder) - Sdf.Layer.CreateNew(layer_path) + new_layer = Sdf.Layer.CreateNew(layer_path) + created_layers.append(new_layer) root_layer.subLayerPaths.append(layer_path) - # Let viewing applications know how to orient a free camera properly - # Similar to: UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) - root_layer.pseudoRoot.SetInfo(UsdGeom.Tokens.upAxis, UsdGeom.Tokens.y) - root_layer.Save() + set_layer_defaults(root_layer) + log.debug("Creating shot at %s" % filepath) + root_layer.Export(filepath, args={"format": "usda"}) - return filepath + return created_layers def create_model(filename, asset, variant_subsets): @@ -275,10 +299,15 @@ def create_model(filename, asset, variant_subsets): "Model subsets must start " "with usdModel: %s" % subset ) - path = get_latest_representation( - asset=asset_doc, subset=subset, representation="usd" + path = get_representation_path_by_names( + project_name=project_name, + asset_name=asset_doc, + subset_name=subset, + version_name="latest", + representation_name="usd" ) - variants.append((variant, path)) + if path: + variants.append((variant, path)) stage = _create_variants_file( filename, @@ -328,8 +357,12 @@ def create_shade(filename, asset, variant_subsets): ) shade_subset = re.sub("^usdModel", "usdShade", subset) - path = get_latest_representation( - asset=asset_doc, subset=shade_subset, representation="usd" + path = get_representation_path_by_names( + project_name=project_name, + asset_name=asset_doc, + subset_name=shade_subset, + version_name="latest", + representation_name="usd" ) variants.append((variant, path)) @@ -356,8 +389,12 @@ def create_shade_variation(filename, asset, model_variant, shade_variants): subset = "usdShade_{model}_{shade}".format( model=model_variant, shade=variant ) - path = get_latest_representation( - asset=asset_doc, subset=subset, representation="usd" + path = get_representation_path_by_names( + project_name=project_name, + asset_name=asset_doc, + subset_name=subset, + version_name="latest", + representation_name="usd" ) variants.append((variant, path)) @@ -484,7 +521,7 @@ def get_representation_path_by_names( asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) if not asset_doc: return - print(asset_doc) + if isinstance(subset_name, dict) and "name" in subset_name: # Allow explicitly passing subset document subset_doc = subset_name @@ -495,7 +532,6 @@ def get_representation_path_by_names( fields=["_id"]) if not subset_doc: return - print(subset_doc) if version_name == "hero": version = get_hero_version_by_subset_id(project_name, From 3ace260247fef36d81058b31ddd897120b2c2173 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 8 Nov 2023 17:20:20 +0100 Subject: [PATCH 10/56] Do not warn on empty instances - that's up to validator to do something with --- openpype/hosts/maya/plugins/publish/collect_instances.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_instances.py b/openpype/hosts/maya/plugins/publish/collect_instances.py index 5058da3d01c..08ccba903fd 100644 --- a/openpype/hosts/maya/plugins/publish/collect_instances.py +++ b/openpype/hosts/maya/plugins/publish/collect_instances.py @@ -28,8 +28,6 @@ class CollectNewInstances(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder hosts = ["maya"] - valid_empty_families = {"workfile", "renderlayer"} - def process(self, instance): objset = instance.data.get("instance_node") @@ -60,8 +58,6 @@ def process(self, instance): instance[:] = members_hierarchy - elif instance.data["family"] not in self.valid_empty_families: - self.log.warning("Empty instance: \"%s\" " % objset) # Store the exact members of the object set instance.data["setMembers"] = members From 8b1cb12f5aff1590f8028220c848829d1ebefde5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 8 Nov 2023 17:21:12 +0100 Subject: [PATCH 11/56] Allow simple `mayaUsdProxyShape` layer export to usd --- .../plugins/create/create_maya_usd_layer.py | 60 +++++++++++++++++++ .../plugins/publish/extract_maya_usd_layer.py | 58 ++++++++++++++++++ .../publish/validate_instance_has_members.py | 13 ++-- 3 files changed, 126 insertions(+), 5 deletions(-) create mode 100644 openpype/hosts/maya/plugins/create/create_maya_usd_layer.py create mode 100644 openpype/hosts/maya/plugins/publish/extract_maya_usd_layer.py diff --git a/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py b/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py new file mode 100644 index 00000000000..6fb54883689 --- /dev/null +++ b/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py @@ -0,0 +1,60 @@ +from openpype.hosts.maya.api import plugin +from openpype.lib import EnumDef + + +class CreateMayaUsdLayer(plugin.MayaCreator): + """Create Maya USD Export from `mayaUsdProxyShape` layer""" + + identifier = "io.openpype.creators.maya.mayausdlayer" + label = "Maya USD Layer Export" + family = "usd" + icon = "cubes" + description = "Create mayaUsdProxyShape layer export" + + def get_publish_families(self): + return ["usd", "mayaUsdLayer"] + + def get_instance_attr_defs(self): + + from maya import cmds + import mayaUsd + + items = [] + for proxy in cmds.ls(type="mayaUsdProxyShape", long=True): + # Ignore unsharable proxies + if not cmds.getAttr(proxy + ".shareStage"): + continue + + stage = mayaUsd.ufe.getStage("|world{}".format(proxy)) + if not stage: + continue + + for layer in stage.GetLayerStack(includeSessionLayers=False): + + proxy_nice_name = proxy.rsplit("|", 2)[-2] + layer_nice_name = layer.GetDisplayName() + label = "{} -> {}".format(proxy_nice_name, layer_nice_name) + value = ">".join([proxy, layer.identifier]) + + items.append({ + "label": label, + "value": value + }) + + if not items: + items.append("") + + defs = [ + EnumDef("defaultUSDFormat", + label="File format", + items={ + "usdc": "Binary", + "usda": "ASCII" + }, + default="usdc"), + EnumDef("stageLayerIdentifier", + label="Stage and Layer Identifier", + items=items) + ] + + return defs diff --git a/openpype/hosts/maya/plugins/publish/extract_maya_usd_layer.py b/openpype/hosts/maya/plugins/publish/extract_maya_usd_layer.py new file mode 100644 index 00000000000..5e90450ee8a --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_maya_usd_layer.py @@ -0,0 +1,58 @@ +import os + +from maya import cmds +from openpype.pipeline import publish + + +class ExtractMayaUsdLayer(publish.Extractor): + """Extractor for Maya USD Layer from `mayaUsdProxyShape`""" + + label = "Extract Maya USD Layer" + hosts = ["maya"] + families = ["mayaUsdLayer"] + + def process(self, instance): + + import mayaUsd + + # Load plugin first + cmds.loadPlugin("mayaUsdPlugin", quiet=True) + + data = instance.data["stageLayerIdentifier"] + proxy, layer_identifier = data.split(">", 1) + + # TODO: The stage and layer should actually be retrieved during + # Collecting so that they can be validated upon and potentially that + # any 'child layers' can potentially be recursively exported along + stage = mayaUsd.ufe.getStage('|world' + proxy) + layers = stage.GetLayerStack(includeSessionLayers=False) + layer = next( + layer for layer in layers if layer.identifier == layer_identifier + ) + + # Define output file path + staging_dir = self.staging_dir(instance) + file_name = "{0}.usd".format(instance.name) + file_path = os.path.join(staging_dir, file_name) + file_path = file_path.replace('\\', '/') + + self.log.debug("Exporting USD layer to: {}".format(file_path)) + layer.Export(file_path, args={ + "format": instance.data.get("defaultUSDFormat", "usdc") + }) + + # TODO: We might want to remap certain paths - to do so we could take + # the SdfLayer and transfer its contents into a anonymous SdfLayer + # then we can use the copy to alter it in memory to our like before + # writing out + + representation = { + 'name': "usd", + 'ext': "usd", + 'files': file_name, + 'stagingDir': staging_dir + } + instance.data.setdefault("representations", []).append(representation) + self.log.debug( + "Extracted instance {} to {}".format(instance.name, file_path) + ) diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py index fbc94627091..3811b732500 100644 --- a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py +++ b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py @@ -25,11 +25,14 @@ def get_invalid(cls, instance): def process(self, instance): # Allow renderlayer, rendersetup and workfile to be empty - skip_families = {"workfile", "renderlayer", "rendersetup"} - if instance.data.get("family") in skip_families: - return - - if "usd.bootstrap" in instance.data.get("families", []): + skip_families = {"workfile", + "renderlayer", + "rendersetup", + "mayaUsdLayer", + "usd.bootstrap"} + families = {instance.data.get("family")} + families.update(instance.data.get("families", [])) + if families.intersection(skip_families): return invalid = self.get_invalid(instance) From 8e57b54147fa63cac65d3e954e212ceaad560d52 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 8 Nov 2023 17:22:33 +0100 Subject: [PATCH 12/56] Remove (experimental) suffix from Houdini USD extractors --- openpype/hosts/houdini/plugins/create/create_usd.py | 2 +- openpype/hosts/houdini/plugins/create/create_usdrender.py | 2 +- .../projects_schema/schemas/schema_houdini_create.json | 4 ++-- server_addon/houdini/server/settings/create.py | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index e05d254863b..99818d33cf1 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -9,7 +9,7 @@ class CreateUSD(plugin.HoudiniCreator): """Universal Scene Description""" identifier = "io.openpype.creators.houdini.usd" - label = "USD (experimental)" + label = "USD" family = "usd" icon = "gears" enabled = False diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index f78f0bed503..047b131683e 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -7,7 +7,7 @@ class CreateUSDRender(plugin.HoudiniCreator): """USD Render ROP in /stage""" identifier = "io.openpype.creators.houdini.usdrender" - label = "USD Render (experimental)" + label = "USD Render" family = "usdrender" icon = "magic" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json index f37738c4ec5..4e4c602ac2d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json @@ -132,11 +132,11 @@ "template_data": [ { "key": "CreateUSD", - "label": "Create USD (experimental)" + "label": "Create USD" }, { "key": "CreateUSDRender", - "label": "Create USD render (experimental)" + "label": "Create USD render" }, { "key": "CreateVDBCache", diff --git a/server_addon/houdini/server/settings/create.py b/server_addon/houdini/server/settings/create.py index 81b871e83fd..1a822050eaf 100644 --- a/server_addon/houdini/server/settings/create.py +++ b/server_addon/houdini/server/settings/create.py @@ -76,10 +76,10 @@ class CreatePluginsModel(BaseSettingsModel): title="Create Static Mesh") CreateUSD: CreatorModel = Field( default_factory=CreatorModel, - title="Create USD (experimental)") + title="Create USD") CreateUSDRender: CreatorModel = Field( default_factory=CreatorModel, - title="Create USD render (experimental)") + title="Create USD render") CreateVDBCache: CreatorModel = Field( default_factory=CreatorModel, title="Create VDB Cache") From 803f4576373c2631de279a674bea5e24f004d9cc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 8 Nov 2023 17:23:36 +0100 Subject: [PATCH 13/56] Remove legacy/unused Houdin USD publish plug-ins --- .../publish/collect_instances_usd_layered.py | 148 -------- .../plugins/publish/extract_usd_layered.py | 331 ------------------ .../validate_usd_shade_model_exists.py | 40 --- .../publish/validate_usd_shade_workspace.py | 66 ---- 4 files changed, 585 deletions(-) delete mode 100644 openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py delete mode 100644 openpype/hosts/houdini/plugins/publish/extract_usd_layered.py delete mode 100644 openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py delete mode 100644 openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py b/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py deleted file mode 100644 index 80af830a8be..00000000000 --- a/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py +++ /dev/null @@ -1,148 +0,0 @@ -import hou -import pyblish.api -from openpype.hosts.houdini.api import lib -import openpype.hosts.houdini.api.usd as hou_usdlib -import openpype.lib.usdlib as usdlib - - -class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): - """Collect Instances from a ROP Network and its configured layer paths. - - The output nodes of the ROP node will only be published when *any* of the - layers remain set to 'publish' by the user. - - This works differently from most of our Avalon instances in the pipeline. - As opposed to storing `pyblish.avalon.instance` as id on the node we store - `pyblish.avalon.usdlayered`. - - Additionally this instance has no need for storing family, asset, subset - or name on the nodes. Instead all information is retrieved solely from - the output filepath, which is an Avalon URI: - avalon://{asset}/{subset}.{representation} - - Each final ROP node is considered a dependency for any of the Configured - Save Path layers it sets along the way. As such, the instances shown in - the Pyblish UI are solely the configured layers. The encapsulating usd - files are generated whenever *any* of the dependencies is published. - - These dependency instances are stored in: - instance.data["publishDependencies"] - - """ - - order = pyblish.api.CollectorOrder - 0.01 - label = "Collect Instances (USD Configured Layers)" - hosts = ["houdini"] - - def process(self, context): - - stage = hou.node("/stage") - if not stage: - # Likely Houdini version <18 - return - - nodes = stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop) - for node in nodes: - - if not node.parm("id"): - continue - - if node.evalParm("id") != "pyblish.avalon.usdlayered": - continue - - has_family = node.evalParm("family") - assert has_family, "'%s' is missing 'family'" % node.name() - - self.process_node(node, context) - - def sort_by_family(instance): - """Sort by family""" - return instance.data.get("families", instance.data.get("family")) - - # Sort/grouped by family (preserving local index) - context[:] = sorted(context, key=sort_by_family) - - return context - - def process_node(self, node, context): - - # Allow a single ROP node or a full ROP network of USD ROP nodes - # to be processed as a single entry that should "live together" on - # a publish. - if node.type().name() == "ropnet": - # All rop nodes inside ROP Network - ropnodes = node.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop) - else: - # A single node - ropnodes = [node] - - data = lib.read(node) - - # Don't use the explicit "colorbleed.usd.layered" family for publishing - # instead use the "colorbleed.usd" family to integrate. - data["publishFamilies"] = ["colorbleed.usd"] - - # For now group ALL of them into USD Layer subset group - # Allow this subset to be grouped into a USD Layer on creation - data["subsetGroup"] = "USD Layer" - - instances = list() - dependencies = [] - for ropnode in ropnodes: - - # Create a dependency instance per ROP Node. - lopoutput = ropnode.evalParm("lopoutput") - dependency_save_data = self.get_save_data(lopoutput) - dependency = context.create_instance(dependency_save_data["name"]) - dependency.append(ropnode) - dependency.data.update(data) - dependency.data.update(dependency_save_data) - dependency.data["family"] = "colorbleed.usd.dependency" - dependency.data["optional"] = False - dependencies.append(dependency) - - # Hide the dependency instance from the context - context.pop() - - # Get all configured layers for this USD ROP node - # and create a Pyblish instance for each one - layers = hou_usdlib.get_configured_save_layers(ropnode) - for layer in layers: - save_path = hou_usdlib.get_layer_save_path(layer) - save_data = self.get_save_data(save_path) - if not save_data: - continue - self.log.info(save_path) - - instance = context.create_instance(save_data["name"]) - instance[:] = [node] - - # Set the instance data - instance.data.update(data) - instance.data.update(save_data) - instance.data["usdLayer"] = layer - - instances.append(instance) - - # Store the collected ROP node dependencies - self.log.debug("Collected dependencies: %s" % (dependencies,)) - for instance in instances: - instance.data["publishDependencies"] = dependencies - - def get_save_data(self, save_path): - - # Resolve Avalon URI - uri_data = usdlib.parse_ayon_uri(save_path) - if not uri_data: - self.log.warning("Non Avalon URI Layer Path: %s" % save_path) - return {} - - # Collect asset + subset from URI - name = "{product} ({asset})".format(**uri_data) - fname = "{asset}_{subset}.usd".format(**uri_data) - - data = dict(uri_data) - data["usdSavePath"] = save_path - data["usdFilename"] = fname - data["name"] = name - return data diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py deleted file mode 100644 index c9785120064..00000000000 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ /dev/null @@ -1,331 +0,0 @@ -import os -import contextlib -import hou -import sys -from collections import deque - -import pyblish.api - -from openpype.client import ( - get_asset_by_name, - get_subset_by_name, - get_last_version_by_subset_id, - get_representation_by_name, -) -from openpype.pipeline import ( - get_representation_path, - publish, -) -import openpype.hosts.houdini.api.usd as hou_usdlib -from openpype.hosts.houdini.api.lib import render_rop - -try: - # Py 3.3+ - from contextlib import ExitStack -except ImportError: - # Implement for backwards compatibility - class ExitStack(object): - """Context manager for dynamic management of a stack of exit callbacks. - - For example: - - with ExitStack() as stack: - files = [stack.enter_context(open(fname)) for fname in filenames] - # All opened files will automatically be closed at the end of - # the with statement, even if attempts to open files later - # in the list raise an exception - - """ - - def __init__(self): - self._exit_callbacks = deque() - - def pop_all(self): - """Preserve context stack by transferring it to a new instance""" - new_stack = type(self)() - new_stack._exit_callbacks = self._exit_callbacks - self._exit_callbacks = deque() - return new_stack - - def _push_cm_exit(self, cm, cm_exit): - """Helper to correctly register callbacks to __exit__ methods""" - - def _exit_wrapper(*exc_details): - return cm_exit(cm, *exc_details) - - _exit_wrapper.__self__ = cm - self.push(_exit_wrapper) - - def push(self, exit): - """Registers a callback with the standard __exit__ signature. - - Can suppress exceptions the same way __exit__ methods can. - - Also accepts any object with an __exit__ method (registering a call - to the method instead of the object itself) - - """ - # We use an unbound method rather than a bound method to follow - # the standard lookup behaviour for special methods - _cb_type = type(exit) - try: - exit_method = _cb_type.__exit__ - except AttributeError: - # Not a context manager, so assume its a callable - self._exit_callbacks.append(exit) - else: - self._push_cm_exit(exit, exit_method) - return exit # Allow use as a decorator - - def callback(self, callback, *args, **kwds): - """Registers an arbitrary callback and arguments. - - Cannot suppress exceptions. - """ - - def _exit_wrapper(exc_type, exc, tb): - callback(*args, **kwds) - - # We changed the signature, so using @wraps is not appropriate, but - # setting __wrapped__ may still help with introspection - _exit_wrapper.__wrapped__ = callback - self.push(_exit_wrapper) - return callback # Allow use as a decorator - - def enter_context(self, cm): - """Enters the supplied context manager - - If successful, also pushes its __exit__ method as a callback and - returns the result of the __enter__ method. - """ - # We look up the special methods on the type to match the with - # statement - _cm_type = type(cm) - _exit = _cm_type.__exit__ - result = _cm_type.__enter__(cm) - self._push_cm_exit(cm, _exit) - return result - - def close(self): - """Immediately unwind the context stack""" - self.__exit__(None, None, None) - - def __enter__(self): - return self - - def __exit__(self, *exc_details): - # We manipulate the exception state so it behaves as though - # we were actually nesting multiple with statements - frame_exc = sys.exc_info()[1] - - def _fix_exception_context(new_exc, old_exc): - while 1: - exc_context = new_exc.__context__ - if exc_context in (None, frame_exc): - break - new_exc = exc_context - new_exc.__context__ = old_exc - - # Callbacks are invoked in LIFO order to match the behaviour of - # nested context managers - suppressed_exc = False - while self._exit_callbacks: - cb = self._exit_callbacks.pop() - try: - if cb(*exc_details): - suppressed_exc = True - exc_details = (None, None, None) - except Exception: - new_exc_details = sys.exc_info() - # simulate the stack of exceptions by setting the context - _fix_exception_context(new_exc_details[1], exc_details[1]) - if not self._exit_callbacks: - raise - exc_details = new_exc_details - return suppressed_exc - - -@contextlib.contextmanager -def parm_values(overrides): - """Override Parameter values during the context.""" - - originals = [] - try: - for parm, value in overrides: - originals.append((parm, parm.eval())) - parm.set(value) - yield - finally: - for parm, value in originals: - # Parameter might not exist anymore so first - # check whether it's still valid - if hou.parm(parm.path()): - parm.set(value) - - -class ExtractUSDLayered(publish.Extractor): - - order = pyblish.api.ExtractorOrder - label = "Extract Layered USD" - hosts = ["houdini"] - families = ["usdLayered", "usdShade"] - - # Force Output Processors so it will always save any file - # into our unique staging directory with processed Avalon paths - output_processors = ["ayon_uri_processor", "savepathsrelativetooutput"] - - def process(self, instance): - - self.log.info("Extracting: %s" % instance) - - staging_dir = self.staging_dir(instance) - fname = instance.data.get("usdFilename") - - # The individual rop nodes are collected as "publishDependencies" - dependencies = instance.data["publishDependencies"] - ropnodes = [dependency[0] for dependency in dependencies] - assert all( - node.type().name() in {"usd", "usd_rop"} for node in ropnodes - ) - - # Main ROP node, either a USD Rop or ROP network with - # multiple USD ROPs - node = hou.node(instance.data["instance_node"]) - - # Collect any output dependencies that have not been processed yet - # during extraction of other instances - outputs = [fname] - active_dependencies = [ - dep - for dep in dependencies - if dep.data.get("publish", True) - and not dep.data.get("_isExtracted", False) - ] - for dependency in active_dependencies: - outputs.append(dependency.data["usdFilename"]) - - pattern = r"*[/\]{0} {0}" - save_pattern = " ".join(pattern.format(fname) for fname in outputs) - - # Run a stack of context managers before we start the render to - # temporarily adjust USD ROP settings for our publish output. - rop_overrides = { - # This sets staging directory on the processor to force our - # output files to end up in the Staging Directory. - "savepathsrelativetooutput_rootdir": staging_dir, - # Force the Avalon URI Output Processor to refactor paths for - # references, payloads and layers to published paths. - "ayonurioutputprocessor_use_publish_paths": True, - # Only write out specific USD files based on our outputs - "savepattern": save_pattern, - } - overrides = list() - with ExitStack() as stack: - - for ropnode in ropnodes: - manager = hou_usdlib.outputprocessors( - ropnode, - processors=self.output_processors, - disable_all_others=True, - ) - stack.enter_context(manager) - - # These must be added after we enter the output - # processor context manager because those parameters only - # exist when the Output Processor is added to the ROP node. - for name, value in rop_overrides.items(): - parm = ropnode.parm(name) - assert parm, "Parm not found: %s.%s" % ( - ropnode.path(), - name, - ) - overrides.append((parm, value)) - - stack.enter_context(parm_values(overrides)) - - # Render the single ROP node or the full ROP network - render_rop(node) - - # Assert all output files in the Staging Directory - for output_fname in outputs: - path = os.path.join(staging_dir, output_fname) - assert os.path.exists(path), "Output file must exist: %s" % path - - # Set up the dependency for publish if they have new content - # compared to previous publishes - project_name = instance.context.data["projectName"] - for dependency in active_dependencies: - dependency_fname = dependency.data["usdFilename"] - - filepath = os.path.join(staging_dir, dependency_fname) - similar = self._compare_with_latest_publish( - project_name, dependency, filepath - ) - if similar: - # Deactivate this dependency - self.log.debug( - "Dependency matches previous publish version," - " deactivating %s for publish", dependency - ) - dependency.data["publish"] = False - else: - self.log.debug("Extracted dependency: %s", dependency) - # This dependency should be published - dependency.data["files"] = [dependency_fname] - dependency.data["stagingDir"] = staging_dir - dependency.data["_isExtracted"] = True - - # Store the created files on the instance - if "files" not in instance.data: - instance.data["files"] = [] - instance.data["files"].append(fname) - - def _compare_with_latest_publish(self, project_name, dependency, new_file): - """Compare whether last published version matches the current new file. - - Returns: - bool: Whether it's a match or not. - - """ - import filecmp - - _, ext = os.path.splitext(new_file) - - # Compare this dependency with the latest published version - # to detect whether we should make this into a new publish - # version. If not, skip it. - asset = get_asset_by_name( - project_name, dependency.data["asset"], fields=["_id"] - ) - subset = get_subset_by_name( - project_name, - dependency.data["subset"], - asset["_id"], - fields=["_id"] - ) - if not subset: - # Subset doesn't exist yet. Definitely new file - self.log.debug("No existing subset..") - return False - - version = get_last_version_by_subset_id( - project_name, subset["_id"], fields=["_id"] - ) - if not version: - self.log.debug("No existing version..") - return False - - representation = get_representation_by_name( - project_name, - representation_name=ext.lstrip("."), - version_id=version["_id"] - ) - if not representation: - self.log.debug("No existing representation..") - return False - - old_file = get_representation_path(representation) - if not os.path.exists(old_file): - return False - - return filecmp.cmp(old_file, new_file) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py deleted file mode 100644 index 0db782d5453..00000000000 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -import re - -import pyblish.api - -from openpype.client import get_subset_by_name -from openpype.pipeline.publish import ValidateContentsOrder -from openpype.pipeline import PublishValidationError - - -class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): - """Validate the Instance has no current cooking errors.""" - - order = ValidateContentsOrder - hosts = ["houdini"] - families = ["usdShade"] - label = "USD Shade model exists" - - def process(self, instance): - project_name = instance.context.data["projectName"] - asset_name = instance.data["asset"] - subset = instance.data["subset"] - - # Assume shading variation starts after a dot separator - shade_subset = subset.split(".", 1)[0] - model_subset = re.sub("^usdShade", "usdModel", shade_subset) - - asset_doc = instance.data.get("assetEntity") - if not asset_doc: - raise RuntimeError("Asset document is not filled on instance.") - - subset_doc = get_subset_by_name( - project_name, model_subset, asset_doc["_id"], fields=["_id"] - ) - if not subset_doc: - raise PublishValidationError( - ("USD Model subset not found: " - "{} ({})").format(model_subset, asset_name), - title=self.label - ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py deleted file mode 100644 index cb2099437da..00000000000 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -import pyblish.api -from openpype.pipeline import PublishValidationError - -import hou - - -class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): - """Validate USD Shading Workspace is correct version. - - There have been some issues with outdated/erroneous Shading Workspaces - so this is to confirm everything is set as it should. - - """ - - order = pyblish.api.ValidatorOrder - hosts = ["houdini"] - families = ["usdShade"] - label = "USD Shade Workspace" - - def process(self, instance): - - rop = hou.node(instance.data.get("instance_node")) - workspace = rop.parent() - - definition = workspace.type().definition() - name = definition.nodeType().name() - library = definition.libraryFilePath() - - all_definitions = hou.hda.definitionsInFile(library) - node_type, version = name.rsplit(":", 1) - version = float(version) - - highest = version - for other_definition in all_definitions: - other_name = other_definition.nodeType().name() - other_node_type, other_version = other_name.rsplit(":", 1) - other_version = float(other_version) - - if node_type != other_node_type: - continue - - # Get the highest version - highest = max(highest, other_version) - - if version != highest: - raise PublishValidationError( - ("Shading Workspace is not the latest version." - " Found {}. Latest is {}.").format(version, highest), - title=self.label - ) - - # There were some issues with the editable node not having the right - # configured path. So for now let's assure that is correct to.from - value = ( - 'avalon://`chs("../asset_name")`/' - 'usdShade`chs("../model_variantname1")`.usd' - ) - rop_value = rop.parm("lopoutput").rawValue() - if rop_value != value: - raise PublishValidationError( - ("Shading Workspace has invalid 'lopoutput'" - " parameter value. The Shading Workspace" - " needs to be reset to its default values."), - title=self.label - ) From 6cd836e0c75ba0709dafe0ac89b4502ccd9f4fa5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 8 Nov 2023 18:14:06 +0100 Subject: [PATCH 14/56] Allow publishing USD ROP node including upstream configured save layer paths as separate publish instances - This also remaps the path on publish to the generated publish path so the resulting USD file with point to the published paths; this would be way more trivial with a working AYON USD asset resolver --- openpype/hosts/houdini/api/lib.py | 24 ++++ openpype/hosts/houdini/api/usd.py | 50 +++++++- .../plugins/publish/collect_usd_layers.py | 118 ++++++++++++++++-- .../houdini/plugins/publish/extract_usd.py | 78 +++++++++++- .../publish/validate_usd_output_node.py | 6 + .../outputprocessors/remap_to_publish.py | 66 ++++++++++ 6 files changed, 327 insertions(+), 15 deletions(-) create mode 100644 openpype/hosts/houdini/startup/husdplugins/outputprocessors/remap_to_publish.py diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index ac375c56d61..d6bec66e910 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -495,6 +495,30 @@ def maintained_selection(): node.setSelected(on=True) +@contextmanager +def parm_values(overrides): + """Override Parameter values during the context. + + Arguments: + overrides (List[Tuple[hou.Parm, Any]]): The overrides per parm + that should be applied during context. + + """ + + originals = [] + try: + for parm, value in overrides: + originals.append((parm, parm.eval())) + parm.set(value) + yield + finally: + for parm, value in originals: + # Parameter might not exist anymore so first + # check whether it's still valid + if hou.parm(parm.path()): + parm.set(value) + + def reset_framerange(): """Set frame range and FPS to current asset""" diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py index 1d4415ae94a..0a672474644 100644 --- a/openpype/hosts/houdini/api/usd.py +++ b/openpype/hosts/houdini/api/usd.py @@ -2,6 +2,7 @@ import contextlib import logging +import json import hou from pxr import Sdf, Vt @@ -186,8 +187,18 @@ def iter_layer_recursive(layer): yield layer -def get_configured_save_layers(usd_rop): - """Retrieve the layer save paths from a USD ROP.""" +def get_configured_save_layers(usd_rop, strip_above_layer_break=True): + """Retrieve the layer save paths from a USD ROP. + + Arguments: + usdrop (hou.RopNode): USD Rop Node + strip_above_layer_break (Optional[bool]): Whether to exclude any + layers that are above layer breaks. This defaults to True. + + Returns: + List[Sdf.Layer]: The layers with configured save paths. + + """ lop_node = get_usd_rop_loppath(usd_rop) stage = lop_node.stage(apply_viewport_overrides=False) @@ -198,8 +209,19 @@ def get_configured_save_layers(usd_rop): root_layer = stage.GetRootLayer() + if strip_above_layer_break: + layers_above_layer_break = set(lop_node.layersAboveLayerBreak()) + else: + layers_above_layer_break = set() + save_layers = [] for layer in iter_layer_recursive(root_layer): + if ( + strip_above_layer_break and + layer.identifier in layers_above_layer_break + ): + continue + save_path = get_layer_save_path(layer) if save_path is not None: save_layers.append(layer) @@ -261,3 +283,27 @@ def setup_lop_python_layer(layer, node, savepath=None, node.addHeldLayer(layer.identifier) return p + + +@contextlib.contextmanager +def remap_paths(rop_node, mapping): + """Enable the AyonRemapPaths output processor with provided `mapping`""" + from openpype.hosts.houdini.api.lib import parm_values + + if not mapping: + # Do nothing + yield + return + + # Houdini string parms need to escape backslashes due to the support + # of expressions - as such we do so on the json data + value = json.dumps(mapping).replace("\\", "\\\\") + with outputprocessors( + rop_node, + processors=["ayon_remap_paths"], + disable_all_others=True, + ): + with parm_values([ + (rop_node.parm("ayon_remap_paths_remap_json"), value) + ]): + yield diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index 696560a5906..8d8a2501467 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -1,11 +1,54 @@ +import copy import os +import re import pyblish.api + +from openpype.pipeline.create import get_subset_name +from openpype.client import get_asset_by_name import openpype.hosts.houdini.api.usd as usdlib import hou +def copy_instance_data(instance_src, instance_dest, attr): + """Copy instance data from `src` instance to `dest` instance. + + Examples: + >>> copy_instance_data(instance_src, instance_dest, + >>> attr="publish_attributes.CollectRopFrameRange") + + Arguments: + instance_src (pyblish.api.Instance): Source instance to copy from + instance_dest (pyblish.api.Instance): Target instance to copy to + attr (str): Attribute on the source instance to copy. This can be + a nested key joined by `.` to only copy sub entries of dictionaries + in the source instance's data. + + Raises: + KeyError: If the key does not exist on the source instance. + AssertionError: If a parent key already exists on the destination + instance but is not of the correct type (= is not a dict) + + """ + + src_data = instance_src.data + dest_data = instance_dest.data + keys = attr.split(".") + for i, key in enumerate(keys): + if key not in src_data: + break + + src_value = src_data[key] + if i != len(key): + dest_data = dest_data.setdefault(key, {}) + assert isinstance(dest_data, dict), "Destination must be a dict" + src_data = src_value + else: + # Last iteration - assign the value + dest_data[key] = copy.deepcopy(src_value) + + class CollectUsdLayers(pyblish.api.InstancePlugin): """Collect the USD Layers that have configured save paths.""" @@ -15,6 +58,10 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): families = ["usd"] def process(self, instance): + # TODO: Replace this with a Hidden Creator so we collect these BEFORE + # starting the publish so the user sees them before publishing + # - however user should not be able to individually enable/disable + # this from the main ROP its created from? output = instance.data.get("output_node") if not output: @@ -29,15 +76,19 @@ def process(self, instance): info = layer.rootPrims.get("HoudiniLayerInfo") save_path = info.customData.get("HoudiniSavePath") creator = info.customData.get("HoudiniCreatorNode") + self.log.info(info.customData) self.log.debug("Found configured save path: " - "%s -> %s" % (layer, save_path)) + "%s -> %s", layer, save_path) # Log node that configured this save path - if creator: - self.log.debug("Created by: %s" % creator) + creator_node = hou.nodeBySessionId(creator) if creator else None + if creator_node: + self.log.debug( + "Created by: %s", creator_node.path() + ) - save_layers.append((layer, save_path)) + save_layers.append((layer, save_path, creator_node)) # Store on the instance instance.data["usdConfiguredSavePaths"] = save_layers @@ -45,22 +96,65 @@ def process(self, instance): # Create configured layer instances so User can disable updating # specific configured layers for publishing. context = instance.context - for layer, save_path in save_layers: + for layer, save_path, creator_node in save_layers: name = os.path.basename(save_path) - label = "{0} -> {1}".format(instance.data["name"], name) layer_inst = context.create_instance(name) - family = "usdlayer" + # include same USD ROP + layer_inst.append(rop_node) + + staging_dir, fname = os.path.split(save_path) + fname_no_ext, ext = os.path.splitext(fname) + + variant = fname_no_ext + + # Strip off any trailing version number in the form of _v[0-9]+ + variant = re.sub("_v[0-9]+$", "", variant) + + layer_inst.data["usd_layer"] = layer + layer_inst.data["usd_layer_save_path"] = save_path + + project_name = context.data["projectName"] + asset_doc = get_asset_by_name(project_name, + asset_name=instance.data["asset"]) + variant_base = instance.data["variant"] + subset = get_subset_name( + family="usd", + variant=variant_base + "_" + variant, + task_name=context.data["anatomyData"]["task"]["name"], + asset_doc=asset_doc, + project_name=project_name, + host_name=context.data["hostName"], + project_settings=context.data["project_settings"] + ) + + label = "{0} -> {1}".format(instance.data["name"], subset) + family = "usd" layer_inst.data["family"] = family layer_inst.data["families"] = [family] - layer_inst.data["subset"] = "__stub__" + layer_inst.data["subset"] = subset layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] layer_inst.data["instance_node"] = instance.data["instance_node"] - # include same USD ROP - layer_inst.append(rop_node) - # include layer data - layer_inst.append((layer, save_path)) + layer_inst.data["render"] = False + layer_inst.data["output_node"] = creator_node + + # Inherit "use handles" from the source instance + # TODO: Do we want to maybe copy full `publish_attributes` instead? + copy_instance_data( + instance, layer_inst, + attr="publish_attributes.CollectRopFrameRange.use_handles" + ) # Allow this subset to be grouped into a USD Layer on creation layer_inst.data["subsetGroup"] = "USD Layer" + + # For now just assume the representation will get published + representation = { + "name": "usd", + "ext": ext.lstrip("."), + "stagingDir": staging_dir, + "files": fname + } + layer_inst.data.setdefault("representations", []).append( + representation) diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 61c1b477b2d..e126457696b 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -1,12 +1,15 @@ +import copy import os import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +from openpype.hosts.houdini.api.usd import remap_paths import hou + class ExtractUSD(publish.Extractor): order = pyblish.api.ExtractorOrder @@ -18,6 +21,11 @@ class ExtractUSD(publish.Extractor): def process(self, instance): + # TODO: Clean this up - for now this is used for runtime upstream + # instances (explicit save path layers) from `collect_usd_layers.py` + if not instance.data.get("render", True): + return + ropnode = hou.node(instance.data.get("instance_node")) # Get the filename from the filename parameter @@ -28,7 +36,9 @@ def process(self, instance): self.log.info("Writing USD '%s' to '%s'" % (file_name, staging_dir)) - render_rop(ropnode) + mapping = self.get_source_to_publish_paths(instance.context) + with remap_paths(ropnode, mapping): + render_rop(ropnode) assert os.path.exists(output), "Output does not exist: %s" % output @@ -42,3 +52,69 @@ def process(self, instance): "stagingDir": staging_dir, } instance.data["representations"].append(representation) + + def get_source_to_publish_paths(self, context): + """Define a mapping of all current instances in context from source + file to publish file so this can be used on the USD save to remap + asset layer paths on publish via AyonRemapPaths output processor""" + + mapping = {} + for instance in context: + + if not instance.data.get("active", True): + continue + + if not instance.data.get("publish", True): + continue + + for repre in instance.data.get("representations", []): + name = repre.get("name") + ext = repre.get("ext") + + # TODO: The remapping might need to get more involved if the + # asset paths that are set use e.g. $F + # TODO: If the representation has multiple files we might need + # to define the path remapping per file of the sequence + path = get_instance_expected_output_path( + instance, representation_name=name, ext=ext + ) + for source_path in get_source_paths(instance, repre): + source_path = os.path.normpath(source_path) + mapping[source_path] = path + + return mapping + + +def get_source_paths(instance, repre): + """Return the full source filepath for an instance's representations""" + + staging = repre.get("stagingDir", instance.data.get("stagingDir")) + files = repre.get("files", []) + if isinstance(files, list): + return [os.path.join(staging, fname) for fname in files] + else: + # Single file + return [os.path.join(staging, files)] + + +def get_instance_expected_output_path(instance, representation_name, ext=None): + """Return expected publish filepath for representation in instance""" + + if ext is None: + ext = representation_name + + context = instance.context + anatomy = context.data["anatomy"] + path_template_obj = anatomy.templates_obj["publish"]["path"] + template_data = copy.deepcopy(instance.data["anatomyData"]) + template_data.update({ + "ext": ext, + "representation": representation_name, + "subset": instance.data["subset"], + "asset": instance.data["asset"], + "variant": instance.data.get("variant"), + "version": instance.data["version"] + }) + + template_filled = path_template_obj.format_strict(template_data) + return os.path.normpath(template_filled) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 5cb5bd35fb4..d4875471448 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -44,6 +44,12 @@ def get_invalid(cls, instance): return [node.path()] + # TODO: Remove early return + # This is just done so that for now the "CreatorNode" defined for + # an explicit save path layer (upstream from the RopNode) can pass + # through here, see "collect_usd_layers.py" + return + # Output node must be a Sop node. if not isinstance(output_node, hou.LopNode): cls.log.error( diff --git a/openpype/hosts/houdini/startup/husdplugins/outputprocessors/remap_to_publish.py b/openpype/hosts/houdini/startup/husdplugins/outputprocessors/remap_to_publish.py new file mode 100644 index 00000000000..17d2db0a17e --- /dev/null +++ b/openpype/hosts/houdini/startup/husdplugins/outputprocessors/remap_to_publish.py @@ -0,0 +1,66 @@ +import os +import json + +import hou +from husd.outputprocessor import OutputProcessor + + +class AyonRemapPaths(OutputProcessor): + """Remap paths based on a mapping dict on rop node.""" + + def __init__(self): + self._mapping = dict() + + @staticmethod + def name(): + return "ayon_remap_paths" + + @staticmethod + def displayName(): + return "Ayon Remap Paths" + + @staticmethod + def hidden(): + return True + + @staticmethod + def parameters(): + group = hou.ParmTemplateGroup() + + parm_template = hou.StringParmTemplate( + "ayon_remap_paths_remap_json", + "Remapping dict (json)", + default_value="{}", + num_components=1, + string_type=hou.stringParmType.Regular, + ) + group.append(parm_template) + + return group.asDialogScript() + + def beginSave(self, config_node, config_overrides, lop_node, t): + super(AyonRemapPaths, self).beginSave(config_node, + config_overrides, + lop_node, + t) + + value = config_node.evalParm("ayon_remap_paths_remap_json") + mapping = json.loads(value) + assert isinstance(self._mapping, dict) + + # Ensure all keys are normalized paths so the lookup can be done + # correctly + mapping = { + os.path.normpath(key): value for key, value in mapping.items() + } + self._mapping = mapping + + def processReferencePath(self, + asset_path, + referencing_layer_path, + asset_is_layer): + return self._mapping.get(os.path.normpath(asset_path), asset_path) + + +def usdOutputProcessor(): + return AyonRemapPaths From b9b5d3f0ffb25f6833e8e2cef95853ff2a5248a5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 8 Nov 2023 18:14:40 +0100 Subject: [PATCH 15/56] Cleanup + fix label --- .../startup/husdplugins/outputprocessors/ayon_uri_processor.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py b/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py index a0822d73d54..a2f3494626d 100644 --- a/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py +++ b/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py @@ -1,5 +1,4 @@ import logging -import os from husd.outputprocessor import OutputProcessor @@ -24,7 +23,7 @@ def name(): @staticmethod def displayName(): - return "Avalon URI Output Processor" + return "Ayon URI Output Processor" def processReferencePath(self, asset_path, From 0c0ab1a22bbdb60d5df8a702db72fc70d0225ef8 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 8 Nov 2023 18:16:19 +0100 Subject: [PATCH 16/56] WIP draft for Maya USD exports to "USD Assets" with a Python API that can also run 'live' for Houdini Python LOP nodes (editing Sdf.Layer, etc. without saving to disk directly using e.g. `setup_asset_layer` --- .../maya/plugins/create/create_maya_usd.py | 210 ++++++++++++------ .../plugins/publish/extract_usd_bootstrap.py | 64 +++--- openpype/lib/usdlib.py | 115 ++++++---- 3 files changed, 251 insertions(+), 138 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_maya_usd.py b/openpype/hosts/maya/plugins/create/create_maya_usd.py index b268503b434..7e33af84bc7 100644 --- a/openpype/hosts/maya/plugins/create/create_maya_usd.py +++ b/openpype/hosts/maya/plugins/create/create_maya_usd.py @@ -5,22 +5,19 @@ TextDef, UILabelDef, UISeparatorDef, - usdlib ) -from openpype.pipeline.context_tools import get_current_context from maya import cmds class CreateMayaUsd(plugin.MayaCreator): - """Create Maya USD Export""" + """Create Maya USD Export from maya scene objects""" identifier = "io.openpype.creators.maya.mayausd" label = "Maya USD" family = "usd" icon = "cubes" description = "Create Maya USD Export" - cache = {} def get_publish_families(self): @@ -127,97 +124,172 @@ class CreateMayaUsdContribution(CreateMayaUsd): icon = "cubes" description = "Create Maya USD Contribution" + default_variants = ["main"] + # TODO: Do not include material for model publish + # TODO: Do only include material + assignments for material publish + # + attribute overrides onto existing geo? (`over`?) + # Define all in `geo` as `over`? + + bootstrap = "asset" + + contribution_asset_layer = None + + def create_template_hierarchy(self, asset_name, variant): + """Create the asset root template to hold the geo for the usd asset. + + Args: + asset_name: Asset name to use for the group + variant: Variant name to use as namespace. + This is needed so separate asset contributions can be + correctly created from a single scene. + + Returns: + list: The root node and geometry group. + + """ + + def set_usd_type(node, value): + attr = "USD_typeName" + if not cmds.attributeQuery(attr, node=node, exists=True): + cmds.addAttr(node, ln=attr, dt="string") + cmds.setAttr(f"{node}.{attr}", value, type="string") + + # Ensure simple unique namespace (add trailing number) + namespace = variant + name = f"{namespace}:{asset_name}" + i = 1 + while cmds.objExists(name): + name = f"{namespace}{i}:{asset_name}" + i += 1 + + # Define template hierarchy {asset_name}/geo + root = cmds.createNode("transform", + name=name, + skipSelect=True) + geo = cmds.createNode("transform", + name="geo", + parent=root, + skipSelect=True) + set_usd_type(geo, "Scope") + # Lock + hide transformations since we're exporting as Scope + for attr in ["tx", "ty", "tz", "rx", "ry", "rz", "sx", "sy", "sz"]: + cmds.setAttr(f"{geo}.{attr}", lock=True, keyable=False) + + return [root, geo] + + def create(self, subset_name, instance_data, pre_create_data): + + # Create template hierarchy + if pre_create_data.get("createTemplateHierarchy", True): + members = [] + if pre_create_data.get("use_selection"): + members = cmds.ls(selection=True, + long=True, + type="dagNode") + + root, geo = self.create_template_hierarchy( + asset_name=instance_data["asset"], + variant=instance_data["variant"] + ) + + if members: + cmds.parent(members, geo) + + # Select root and enable selection just so parent class' + # create adds it to the created instance + cmds.select(root, replace=True, noExpand=True) + pre_create_data["use_selection"] = True + + super(CreateMayaUsdContribution, self).create( + subset_name, + instance_data, + pre_create_data + ) + + def add_transient_instance_data(self, instance_data): + super().add_transient_instance_data(instance_data) + instance_data["usd_bootstrap"] = self.bootstrap + instance_data["usd_contribution"] = "model" + + def remove_transient_instance_data(self, instance_data): + super().remove_transient_instance_data(instance_data) + instance_data.pop("usd_bootstrap", None) + instance_data.pop("usd_contribution", None) + def get_publish_families(self): families = ["usd", "mayaUsd", "usd.layered"] if self.family not in families: families.append(self.family) return families - def get_instance_attr_defs(self): - - context = get_current_context() + def get_pre_create_attr_defs(self): + defs = super(CreateMayaUsdContribution, self).get_pre_create_attr_defs() + defs.extend([ + BoolDef("createTemplateHierarchy", + label="Create template hierarchy", + default=True) + ]) + return defs - # The departments must be 'ordered' so that e.g. a look can apply - # overrides to any opinion from the model department. - department = context["task_name"] # usually equals the department? - variant = "Main" # used as default for sublayer + def get_instance_attr_defs(self): defs = [ UISeparatorDef("contribution_settings1"), UILabelDef(label="Contribution"), UISeparatorDef("contribution_settings2"), - BoolDef("contribution_enabled", - label="Add to USD container", - default=True), - TextDef("contribution_department_layer", + TextDef("contribution_asset", + label="USD Asset subset", + default="usdAsset"), + + # Asset layer, e.g. model.usd, look.usd, rig.usd + EnumDef("contribution_asset_layer", label="Department layer", - default=department), - TextDef("contribution_sublayer", - label="Sublayer", - # Usually e.g. usdModel, usdLook, usdLookRed - default=variant), + tooltip="The layer the contribution should be made to in " + "the usd asset.", + items=["model", "look", "rig"], + hidden=bool(self.contribution_asset_layer), + default=self.contribution_asset_layer), + BoolDef("contribute_as_variant", + label="Use as variant", + default=True), TextDef("contribution_variant_set_name", label="Variant Set Name", - default=""), + default="model"), TextDef("contribution_variant", label="Variant Name", - default=""), + default="{variant}"), + + # Separate the rest of the settings visually UISeparatorDef("export_settings1"), UILabelDef(label="Export Settings"), UISeparatorDef("export_settings2"), ] defs += super(CreateMayaUsdContribution, self).get_instance_attr_defs() - return defs - - -for contribution in usdlib.PIPELINE["asset"]: - step = contribution.step - - class CreateMayaUsdDynamicStepContribution(CreateMayaUsdContribution): - identifier = f"{CreateMayaUsdContribution.identifier}.{step}" - default_variants = plugin.MayaCreator.default_variants - label = f"USD {step.title()}" - family = contribution.family - - # Define some nice icons - icon = { - "look": "paint-brush", - "model": "cube", - "rig": "wheelchair" - }.get(step, "cubes") - - description = f"Create USD {step.title()} Contribution" - - bootstrap = "asset" + # Remove certain settings that we don't want to expose on asset + # creation + remove = {"stripNamespaces", "mergeTransformAndShape"} + defs = [attr_def for attr_def in defs if attr_def.key not in remove] + return defs - contribution = contribution - # TODO: Should these still be customizable - # contribution_sublayer_order = contribution.order - # contribution_department = contribution.step - # contribution_variant_set_name = contribution.step - # contribution_variant_name = "{variant}" +class CreateUsdLookContribution(CreateMayaUsdContribution): + """Look layer contribution to the USD Asset""" + identifier = CreateMayaUsdContribution.identifier + ".look" + label = "USD Look" + icon = "paint-brush" + description = "Create USD Look contribution" + family = "usd.look" - def add_transient_instance_data(self, instance_data): - super().add_transient_instance_data(instance_data) - instance_data["usd_bootstrap"] = self.bootstrap - instance_data["usd_contribution"] = self.contribution + contribution_asset_layer = "look" - def remove_transient_instance_data(self, instance_data): - super().remove_transient_instance_data(instance_data) - instance_data.pop("usd_bootstrap", None) - instance_data.pop("usd_contribution", None) - # Dynamically create USD creators for easy access to a certain step - # in production - global_variables = globals() - klass_name = f"CreateMayaUsd{step.title()}Contribution" - klass = type(klass_name, (CreateMayaUsdDynamicStepContribution,), {}) - global_variables[klass_name] = klass +class CreateUsdModelContribution(CreateMayaUsdContribution): + """Model layer contribution to the USD Asset""" + identifier = CreateMayaUsdContribution.identifier + ".model" + label = "USD Model" + icon = "cube" + description = "Create USD Model contribution" + family = "usd.model" - # We only want to store the global variables, and don't want the last - # iteration of the loop to persist after because Create Context will - # pick those up too - del klass - del CreateMayaUsdDynamicStepContribution + contribution_asset_layer = "model" diff --git a/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py b/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py index 6adbbf27319..6095893bdac 100644 --- a/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py +++ b/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py @@ -7,6 +7,28 @@ from openpype.pipeline.create import get_subset_name +def get_instance_expected_output_path(instance, representation_name, ext=None): + + if ext is None: + ext = representation_name + + context = instance.context + anatomy = context.data["anatomy"] + path_template_obj = anatomy.templates_obj["publish"]["path"] + template_data = copy.deepcopy(instance.data["anatomyData"]) + template_data.update({ + "ext": ext, + "representation": representation_name, + "subset": instance.data["subset"], + "asset": instance.data["asset"], + "variant": instance.data["variant"], + "version": instance.data["version"] + }) + + template_filled = path_template_obj.format_strict(template_data) + return os.path.normpath(template_filled) + + class ExtractBootstrapUSD(publish.Extractor): """Extract in-memory bootstrap USD files for Assets and Shots. @@ -33,13 +55,17 @@ def process(self, instance): # Asset contributions = usdlib.PIPELINE["asset"] layers = self.get_contribution_paths(contributions, instance) - relative_files = usdlib.create_asset( + created_layers = usdlib.create_asset( filepath, asset_name=instance.data["asset"], reference_layers=layers ) - for relative_file in relative_files: - self.add_relative_file(instance, relative_file) + + # Ignore the first layer which is the asset layer that is not + # relative to itself + created_layers = created_layers[1:] + for layer in created_layers: + self.add_relative_file(instance, layer.get_full_path()) elif subset == "usdShot": # Shot @@ -86,7 +112,7 @@ def process(self, instance): "stagingDir": staging_dir }) - def add_relative_file(self, instance, relative_path, staging_dir=None): + def add_relative_file(self, instance, source, staging_dir=None): """Add transfer for a relative path form staging to publish dir. Unlike files in representations, the file will not be renamed and @@ -96,12 +122,13 @@ def add_relative_file(self, instance, relative_path, staging_dir=None): if staging_dir is None: staging_dir = self.staging_dir(instance) publish_dir = instance.data["publishDir"] - source = os.path.join(staging_dir, relative_path) - source = os.path.normpath(source) + + relative_path = os.path.relpath(source, staging_dir) destination = os.path.join(publish_dir, relative_path) destination = os.path.normpath(destination) transfers = instance.data.setdefault("transfers", []) + self.log.debug(f"Adding relative file {source} -> {relative_path}") transfers.append((source, destination)) def get_contribution_paths(self, contributions, instance): @@ -203,27 +230,10 @@ def get_representation_path_per_subset_in_publish(self, subsets, instance): "subset {}".format(subset) ) - anatomy = context.data["anatomy"] - path_template_obj = anatomy.templates_obj["publish"]["path"] - template_data = copy.deepcopy(instance.data["anatomyData"]) - template_data.update({ - "ext": "usd", - "representation": "usd", - "subset": subset, - "asset": other_instance.data["asset"], - "variant": other_instance.data["variant"], - "version": other_instance.data["version"] - }) - if "version" in other_instance.data: - template_data["version"] = other_instance.data["version"] - - self.log.debug( - "Found publish session subset '{}' version 'v{:03d}'".format( - subset, other_instance.data["version"] - )) - - template_filled = path_template_obj.format_strict(template_data) - result[subset] = os.path.normpath(template_filled) + path = get_instance_expected_output_path( + other_instance, representation_name="usd" + ) + result[subset] = path return result diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index b920fff33e2..bfb8135ecc6 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -1,3 +1,4 @@ +import dataclasses import os import re import logging @@ -42,6 +43,44 @@ Contribution = namedtuple("Contribution", ("family", "variant", "order", "step")) + +@dataclasses.dataclass +class Layer: + layer: Sdf.Layer + path: str + # Allow to anchor a layer to another so that when the layer would be + # exported it'd write itself out relative to its anchor + anchor: 'Layer' = None + + @property + def identifier(self): + return self.layer.identifier + + def get_full_path(self): + """Return full path relative to the anchor layer""" + if not os.path.isabs(self.path) and self.anchor: + anchor_path = self.anchor.get_full_path() + root = os.path.dirname(anchor_path) + return os.path.normpath(os.path.join(root, self.path)) + else: + return self.path + + def export(self, path=None, args=None): + """Save the layer""" + if path is None: + path = self.get_full_path() + + if args is None: + args = self.layer.GetFileFormatArguments() + + self.layer.Export(path, args=args) + + @classmethod + def create_anonymous(cls, path, tag="LOP", anchor=None): + sdf_layer = Sdf.Layer.CreateAnonymous(tag) + return cls(layer=sdf_layer, path=path, anchor=anchor) + + # The predefined steps order used for bootstrapping USD Shots and Assets. # These are ordered in order from strongest to weakest opinions, like in USD. PIPELINE = { @@ -93,12 +132,12 @@ def setup_asset_layer( preferred output save paths. Args: - filepath (str): Filepath where the asset.usd file will be saved. + layer (Sdf.Layer): Layer to set up the asset structure for. + asset_name (str): The name for the Asset identifier and default prim. reference_layers (list): USD Files to reference in the asset. Note that the bottom layer (first file, like a model) would be last in the list. The strongest layer will be the first index. - asset_name (str): The name for the Asset identifier and default prim. kind (pxr.Kind): A USD Kind for the root asset. define_class: Define a `/__class__/{asset_name}` class which the root asset prim will inherit from. @@ -155,7 +194,8 @@ def setup_asset_layer( payload_layer = Sdf.Layer.CreateAnonymous("LOP", args={"format": "usda"}) set_layer_defaults(payload_layer, default_prim=asset_name) - created_layers.append((payload_layer, "./payload.usd")) + created_layers.append(Layer(layer=payload_layer, + path="./payload.usd")) # Add sublayers to the payload layer # Note: Sublayering is tricky because it requires that the sublayers @@ -189,46 +229,31 @@ def create_asset( """ # Also see create_asset.py in PixarAnimationStudios/USD endToEnd example - log.debug("Creating asset at %s", filepath) - # Make the layer ascii - good for readability, plus the file is small - layer = Sdf.Layer.CreateAnonymous() + sdf_layer = Sdf.Layer.CreateAnonymous() + layer = Layer(layer=sdf_layer, path=filepath) - created_layers_with_paths = setup_asset_layer( - layer=layer, + created_layers = setup_asset_layer( + layer=sdf_layer, asset_name=asset_name, reference_layers=reference_layers, kind=kind, define_class=define_class ) - _save_layer_paths_anchored_to_layer(layer, filepath, - created_layers_with_paths) - layer.Export(filepath, args={"format": "usda"}) - return [layer] + [layer for layer, _ in created_layers_with_paths] - + for created_layer in created_layers: + created_layer.anchor = layer + created_layer.export() -def _save_layer_paths_anchored_to_layer(base_layer, - base_layer_path, - sublayers_with_paths): - """Export and update layer asset identifiers for sublayers with paths. - - The layers will be anchorde relative to the base layer and base layer's - filepath to allow for relative anchoring and saving for anonymous layers. - - """ - for layer, path in sublayers_with_paths: - if not os.path.isabs(path): - # Use relative path anchoring to the base layer - folder = os.path.dirname(base_layer_path) - full_path = os.path.join(folder, path) - else: - full_path = path + # Update the dependency on the base layer + sdf_layer.UpdateCompositionAssetDependency( + created_layer.identifier, created_layer.get_full_path() + ) - # Export the layer - layer.Export(full_path, args=layer.GetFileFormatArguments()) + # Make the layer ascii - good for readability, plus the file is small + log.debug("Creating asset at %s", filepath) + layer.export(args={"format": "usda"}) - # Update dependencies on the base layer - base_layer.UpdateCompositionAssetDependency(layer.identifier, path) + return [layer] + created_layers def create_shot(filepath, layers, create_layers=False): @@ -367,10 +392,9 @@ def create_shade(filename, asset, variant_subsets): variants.append((variant, path)) stage = _create_variants_file( - filename, variants=variants, variantset="model", variant_prim="/root" + variants=variants, variantset="model", variant_prim="/root" ) - - stage.GetRootLayer().Save() + stage.GetRootLayer().Export(filename, args={"format": "usda"}) def create_shade_variation(filename, asset, model_variant, shade_variants): @@ -399,14 +423,12 @@ def create_shade_variation(filename, asset, model_variant, shade_variants): variants.append((variant, path)) stage = _create_variants_file( - filename, variants=variants, variantset="shade", variant_prim="/root" + variants=variants, variantset="shade", variant_prim="/root" ) - - stage.GetRootLayer().Save() + stage.GetRootLayer().Export(filename, args={"format": "usda"}) def _create_variants_file( - filename, variants, variantset, default_variant=None, @@ -418,8 +440,17 @@ def _create_variants_file( ): """Create a USD file with references to given variants and their paths. + Examples: + >>> stage = _create_variants_file("model.usd", + >>> variants=[ + >>> ("main", "main.usd"), + >>> ("damaged", "damaged.usd"), + >>> ("twisted", "twisted.usd") + >>> ], + >>> variantset="model") + >>> stage.rootLayer.Export("model.usd", args={"format": "usda"}) + Arguments: - filename (str): USD file containing the variant sets. variants (List[List[str, str]): List of two-tuples of variant name to the filepath that should be referenced in for that variant. variantset (str): Name of the variant set @@ -440,7 +471,7 @@ def _create_variants_file( """ - root_layer = Sdf.Layer.CreateNew(filename, args={"format": "usda"}) + root_layer = Sdf.Layer.CreateAnonymous() stage = Usd.Stage.Open(root_layer) root_prim = stage.DefinePrim(variant_prim) From 05754bb3fa63227edb6c00d8a7508e2c757ac860 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 8 Nov 2023 19:51:14 +0100 Subject: [PATCH 17/56] Improve docstring --- .../hosts/maya/plugins/publish/extract_maya_usd_layer.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_maya_usd_layer.py b/openpype/hosts/maya/plugins/publish/extract_maya_usd_layer.py index 5e90450ee8a..914874ede95 100644 --- a/openpype/hosts/maya/plugins/publish/extract_maya_usd_layer.py +++ b/openpype/hosts/maya/plugins/publish/extract_maya_usd_layer.py @@ -5,7 +5,12 @@ class ExtractMayaUsdLayer(publish.Extractor): - """Extractor for Maya USD Layer from `mayaUsdProxyShape`""" + """Extractor for Maya USD Layer from `mayaUsdProxyShape` + + Exports a single Sdf.Layer from a mayaUsdPlugin `mayaUsdProxyShape`. + These layers are the same managed via Maya's Windows > USD Layer Editor. + + """ label = "Extract Maya USD Layer" hosts = ["maya"] From 3503514b19f1b88fbf02cad046f1bc5f5fb7b721 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 8 Nov 2023 20:09:43 +0100 Subject: [PATCH 18/56] Add comments --- openpype/hosts/maya/plugins/create/create_maya_usd_layer.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py b/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py index 6fb54883689..4f1903de54c 100644 --- a/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py +++ b/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py @@ -19,6 +19,8 @@ def get_instance_attr_defs(self): from maya import cmds import mayaUsd + # Construct the stage + layer EnumDef from the maya proxies in the + # scene and the Sdf.Layer stack of the Usd.Stage per proxy. items = [] for proxy in cmds.ls(type="mayaUsdProxyShape", long=True): # Ignore unsharable proxies @@ -42,6 +44,7 @@ def get_instance_attr_defs(self): }) if not items: + # EnumDef is not allowed to be empty items.append("") defs = [ From c38b3e97d18ced4b190b0ea69b0444c9e0f88f8c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 9 Nov 2023 01:30:14 +0100 Subject: [PATCH 19/56] Cleanup + add very simple AYON uri resolving functionality --- .../houdini/plugins/publish/extract_usd.py | 27 +-- openpype/lib/usdlib.py | 161 +++++++++++++++++- 2 files changed, 156 insertions(+), 32 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index e126457696b..fdafe5251fa 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -1,4 +1,3 @@ -import copy import os import pyblish.api @@ -58,9 +57,10 @@ def get_source_to_publish_paths(self, context): file to publish file so this can be used on the USD save to remap asset layer paths on publish via AyonRemapPaths output processor""" + from openpype.lib.usdlib import get_instance_expected_output_path + mapping = {} for instance in context: - if not instance.data.get("active", True): continue @@ -95,26 +95,3 @@ def get_source_paths(instance, repre): else: # Single file return [os.path.join(staging, files)] - - -def get_instance_expected_output_path(instance, representation_name, ext=None): - """Return expected publish filepath for representation in instance""" - - if ext is None: - ext = representation_name - - context = instance.context - anatomy = context.data["anatomy"] - path_template_obj = anatomy.templates_obj["publish"]["path"] - template_data = copy.deepcopy(instance.data["anatomyData"]) - template_data.update({ - "ext": ext, - "representation": representation_name, - "subset": instance.data["subset"], - "asset": instance.data["asset"], - "variant": instance.data.get("variant"), - "version": instance.data["version"] - }) - - template_filled = path_template_obj.format_strict(template_data) - return os.path.normpath(template_filled) diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index bfb8135ecc6..fa66af9c8dd 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -1,5 +1,6 @@ import dataclasses import os +import copy import re import logging from urllib.parse import urlparse, parse_qs @@ -530,14 +531,14 @@ def _reference(path): return stage -def get_representation_path_by_names( +def get_representation_by_names( project_name, asset_name, subset_name, version_name, representation_name, ): - """Get (latest) filepath for representation for asset and subset. + """Get representation entity for asset and subset. If version_name is "hero" then return the hero version If version_name is "latest" then return the latest version @@ -577,12 +578,35 @@ def get_representation_path_by_names( if not version: return - representation = get_representation_by_name(project_name, - representation_name, - version_id=version["_id"]) + return get_representation_by_name(project_name, + representation_name, + version_id=version["_id"]) + + +def get_representation_path_by_names( + project_name, + asset_name, + subset_name, + version_name, + representation_name): + """Get (latest) filepath for representation for asset and subset. + + See `get_representation_by_names` for more details. + + Returns: + str: The representation path if the representation exists. - path = get_representation_path(representation) - return path.replace("\\", "/") + """ + representation = get_representation_by_names( + project_name, + asset_name, + subset_name, + version_name, + representation_name + ) + if representation: + path = get_representation_path(representation) + return path.replace("\\", "/") def parse_ayon_uri(uri): @@ -632,6 +656,129 @@ def parse_ayon_uri(uri): return result +def construct_ayon_uri( + project_name, + asset_name, + product, + version, + representation_name +): + if not (isinstance(version, int) or version in {"latest", "hero"}): + raise ValueError( + "Version must either be integer, 'latest' or 'hero'. " + "Got: {}".format(version) + ) + return ( + "ayon+entity://{project}/{asset}?product={product}&version={version}" + "&representation={representation}".format( + project=project_name, + asset=asset_name, + product=product, + version=version, + representation=representation_name + ) + ) + + +def get_representation_path_by_ayon_uri( + uri, + context=None +): + """Return resolved path for Ayon entity URI. + + Allow resolving 'latest' paths from a publishing context's instances + as if they will exist after publishing without them being integrated yet. + + Args: + uri (str): Ayon entity URI. See `parse_ayon_uri` + context (pyblish.api.Context): Publishing context. + + Returns: + Union[str, None]: Returns the path if it could be resolved + + """ + query = parse_ayon_uri(uri) + + if context is not None and context.data["projectName"] == query["project"]: + # Search first in publish context to allow resolving latest versions + # from e.g. the current publish session if the context is provided + if query["version"] == "hero": + raise NotImplementedError( + "Hero version resolving not implemented from context" + ) + + specific_version = isinstance(query["version"], int) + for instance in context: + if instance.data.get("asset") != query["asset"]: + continue + + if instance.data.get("subset") != query["product"]: + continue + + # Only consider if the instance has a representation by + # that name + representations = instance.data.get("representations", []) + if not any(representation.get("name") == query["representation"] + for representation in representations): + continue + + return get_instance_expected_output_path( + instance, + representation_name=query["representation"], + version=query["version"] if specific_version else None + ) + + return get_representation_path_by_names( + project_name=query["project"], + asset_name=query["asset"], + subset_name=query["product"], + version_name=query["version"], + representation_name=query["representation"], + ) + + +def get_instance_expected_output_path(instance, representation_name, + ext=None, version=None): + """Return expected publish filepath for representation in instance + + This does not validate whether the instance has any representation by the + given name, extension and/or version. + + Arguments: + instance (pyblish.api.Instance): publish instance + representation_name (str): representation name + ext (Optional[str]): extension for the file, useful if `name` += `ext` + version (Optional[int]): if provided, force it to format to this + particular version. + representation_name (str): representation name + + Returns: + str: Resolved path + + """ + + if ext is None: + ext = representation_name + if version is None: + version = instance.data["version"] + + context = instance.context + anatomy = context.data["anatomy"] + path_template_obj = anatomy.templates_obj["publish"]["path"] + template_data = copy.deepcopy(instance.data["anatomyData"]) + template_data.update({ + "ext": ext, + "representation": representation_name, + "subset": instance.data["subset"], + "asset": instance.data["asset"], + "variant": instance.data.get("variant"), + "version": version + }) + + template_filled = path_template_obj.format_strict(template_data) + return os.path.normpath(template_filled) + + def set_layer_defaults(layer, up_axis=UsdGeom.Tokens.y, meters_per_unit=1.0, From 293cf1b29e6844240cfbfbb21252551063d7808d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 9 Nov 2023 01:36:34 +0100 Subject: [PATCH 20/56] Remove debug print --- openpype/hosts/houdini/plugins/publish/collect_usd_layers.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index 8d8a2501467..fa3c9f018e3 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -76,7 +76,6 @@ def process(self, instance): info = layer.rootPrims.get("HoudiniLayerInfo") save_path = info.customData.get("HoudiniSavePath") creator = info.customData.get("HoudiniCreatorNode") - self.log.info(info.customData) self.log.debug("Found configured save path: " "%s -> %s", layer, save_path) From cac9c0e51342738d15d38b808505c91791735111 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 9 Nov 2023 02:16:23 +0100 Subject: [PATCH 21/56] Add simple Blender USD creator + extractor --- .../blender/plugins/create/create_usd.py | 44 ++++++++++++ .../blender/plugins/publish/extract_usd.py | 72 +++++++++++++++++++ 2 files changed, 116 insertions(+) create mode 100644 openpype/hosts/blender/plugins/create/create_usd.py create mode 100644 openpype/hosts/blender/plugins/publish/extract_usd.py diff --git a/openpype/hosts/blender/plugins/create/create_usd.py b/openpype/hosts/blender/plugins/create/create_usd.py new file mode 100644 index 00000000000..b0a5069f376 --- /dev/null +++ b/openpype/hosts/blender/plugins/create/create_usd.py @@ -0,0 +1,44 @@ +"""Create a USD Export.""" + +import bpy + +from openpype.pipeline import get_current_task_name +from openpype.hosts.blender.api import plugin, lib, ops +from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES + + +class CreateUSD(plugin.Creator): + """Create USD Export""" + + name = "usdMain" + label = "USD" + family = "usd" + icon = "gears" + + def process(self): + """ Run the creator on Blender main thread""" + mti = ops.MainThreadItem(self._process) + ops.execute_in_main_thread(mti) + + def _process(self): + # Get Instance Container or create it if it does not exist + instances = bpy.data.collections.get(AVALON_INSTANCES) + if not instances: + instances = bpy.data.collections.new(name=AVALON_INSTANCES) + bpy.context.scene.collection.children.link(instances) + + # Create instance object + asset = self.data["asset"] + subset = self.data["subset"] + name = plugin.asset_name(asset, subset) + collection = bpy.data.collections.new(name=name) + instances.children.link(collection) + self.data['task'] = get_current_task_name() + lib.imprint(collection, self.data) + + # Add selected objects to instance + if (self.options or {}).get("useSelection"): + for obj in lib.get_selection(): + collection.objects.link(obj) + + return collection diff --git a/openpype/hosts/blender/plugins/publish/extract_usd.py b/openpype/hosts/blender/plugins/publish/extract_usd.py new file mode 100644 index 00000000000..74f6f832dd2 --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/extract_usd.py @@ -0,0 +1,72 @@ +import os + +import bpy + +from openpype.pipeline import publish +from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY + + +class ExtractUSD(publish.Extractor): + """Extract as USD.""" + + label = "Extract USD" + hosts = ["blender"] + families = ["usd"] + + def process(self, instance): + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = f"{instance.name}.usd" + filepath = os.path.join(stagingdir, filename) + + # Perform extraction + self.log.debug("Performing extraction..") + + # Select all members to "export selected" + plugin.deselect_all() + selected = [] + asset_group = None + for obj in instance: + if isinstance(obj, bpy.types.Collection): + # TODO: instead include all children - but that's actually + # up to the Collector instead + continue + + obj.select_set(True) + selected.append(obj) + if obj.get(AVALON_PROPERTY): + asset_group = obj + + context = plugin.create_blender_context( + active=asset_group, selected=selected) + + # Export USD + bpy.ops.wm.usd_export( + context, + filepath=filepath, + selected_objects_only=True, + export_textures=False, + relative_paths=False, + export_animation=False, + export_hair=False, + export_uvmaps=True, + # TODO: add for new version of Blender (4+?) + #export_mesh_colors=True, + export_normals=True, + export_materials=True, + use_instancing=True + ) + + plugin.deselect_all() + + # Add representation + representation = { + 'name': 'usd', + 'ext': 'usd', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data.setdefault("representations", []).append(representation) + self.log.debug("Extracted instance '%s' to: %s", + instance.name, representation) From 3970193698c92eb30b280af070987aca0ceae635 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 9 Nov 2023 09:09:33 +0100 Subject: [PATCH 22/56] Support short-hand `ayon://` entity URI --- openpype/lib/usdlib.py | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index fa66af9c8dd..34c86effbba 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -610,7 +610,7 @@ def get_representation_path_by_names( def parse_ayon_uri(uri): - """Parse ayon+entity URI into individual components. + """Parse ayon entity URI into individual components. URI specification: ayon+entity://{project}/{asset}?product={product} @@ -619,24 +619,34 @@ def parse_ayon_uri(uri): URI example: ayon+entity://test/hero?modelMain&version=2&representation=usd + However - if the netloc is `ayon://` it will by default also resolve as + `ayon+entity://` on AYON server, thus we need to support both. The shorter + `ayon://` is preferred for user readability. + Example: >>> parse_ayon_uri( - >>> "ayon+entity://test/villain?product=modelMain&version=2&representation=usd" # noqa: E501 + >>> "ayon://test/villain?product=modelMain&version=2&representation=usd" # noqa: E501 >>> ) {'project': 'test', 'asset': 'villain', 'product': 'modelMain', 'version': 1, 'representation': 'usd'} + >>> parse_ayon_uri( + >>> "ayon+entity://project/asset?product=renderMain&version=3&representation=exr" # noqa: E501 + >>> ) + {'project': 'project', 'asset': 'asset', + 'product': 'renderMain', 'version': 3, + 'representation': 'exr'} Returns: dict: The individual keys of the ayon entity query. """ - if not uri.startswith("ayon+entity://"): + if not (uri.startswith("ayon+entity://") or uri.startswith("ayon://")): return parsed = urlparse(uri) - if parsed.scheme != "ayon+entity": + if parsed.scheme not in {"ayon+entity", "ayon"}: return result = { @@ -663,13 +673,19 @@ def construct_ayon_uri( version, representation_name ): + """Construct Ayon entity URI from its components + + Returns: + str: Ayon Entity URI to query entity path. + Also works with `get_representation_path_by_ayon_uri` + """ if not (isinstance(version, int) or version in {"latest", "hero"}): raise ValueError( "Version must either be integer, 'latest' or 'hero'. " "Got: {}".format(version) ) return ( - "ayon+entity://{project}/{asset}?product={product}&version={version}" + "ayon://{project}/{asset}?product={product}&version={version}" "&representation={representation}".format( project=project_name, asset=asset_name, From be65cf1b2f84e52c86f13f0377a727b54aa363af Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 10 Nov 2023 18:13:01 +0100 Subject: [PATCH 23/56] Cleanup usdlib + start adding tests + improve docstrings --- .../plugins/publish/extract_usd_bootstrap.py | 59 +-- openpype/lib/usdlib.py | 411 +++++++++--------- tests/unit/openpype/lib/test_usdlib.py | 150 +++++++ 3 files changed, 382 insertions(+), 238 deletions(-) create mode 100644 tests/unit/openpype/lib/test_usdlib.py diff --git a/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py b/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py index 6095893bdac..d6b071127f1 100644 --- a/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py +++ b/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py @@ -1,32 +1,17 @@ import os -import copy import operator import pyblish.api from openpype.pipeline import publish from openpype.pipeline.create import get_subset_name +from openpype.lib.usdlib import get_instance_expected_output_path - -def get_instance_expected_output_path(instance, representation_name, ext=None): - - if ext is None: - ext = representation_name - - context = instance.context - anatomy = context.data["anatomy"] - path_template_obj = anatomy.templates_obj["publish"]["path"] - template_data = copy.deepcopy(instance.data["anatomyData"]) - template_data.update({ - "ext": ext, - "representation": representation_name, - "subset": instance.data["subset"], - "asset": instance.data["asset"], - "variant": instance.data["variant"], - "version": instance.data["version"] - }) - - template_filled = path_template_obj.format_strict(template_data) - return os.path.normpath(template_filled) +from openpype.pipeline import get_representation_path +from openpype.client import ( + get_subsets, + get_last_versions, + get_representations +) class ExtractBootstrapUSD(publish.Extractor): @@ -74,18 +59,6 @@ def process(self, instance): usdlib.create_shot(filepath, layers=layers) - elif subset == "usdModel": - variant_subsets = instance.data["variantSubsets"] - usdlib.create_model(filepath, - asset=instance.data["asset"], - variant_subsets=variant_subsets) - - elif subset == "usdShade": - variant_subsets = instance.data["variantSubsets"] - usdlib.create_shade(filepath, - asset=instance.data["asset"], - variant_subsets=variant_subsets) - elif subset in usdlib.PIPELINE["asset"]: # Asset layer # Generate the stub files with root primitive @@ -118,7 +91,18 @@ def add_relative_file(self, instance, source, staging_dir=None): Unlike files in representations, the file will not be renamed and will be ingested one-to-one into the publish directory. + Note: This file does not get registered as a representation, because + representation files always get renamed by the publish template + system. These files get included in the `representation["files"]` + info with all the representations of the version - and thus will + appear multiple times per version. + """ + # TODO: It can be nice to force a particular representation no matter + # what to adhere to a certain filename on integration because e.g. a + # particular file format relies on that file named like that or alike + # and still allow regular registering with the database as a file of + # the version. As such we might want to tweak integrator logic? if staging_dir is None: staging_dir = self.staging_dir(instance) publish_dir = instance.data["publishDir"] @@ -248,13 +232,6 @@ def get_existing_representation_path_per_subset(self, subsets, instance): project_name = context.data["projectName"] asset_entity = instance.data["assetEntity"] - from openpype.pipeline import get_representation_path - from openpype.client import ( - get_subsets, - get_last_versions, - get_representations - ) - def to_id(entity): return entity["_id"] diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index 34c86effbba..cdaa4b62155 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -1,7 +1,6 @@ import dataclasses import os import copy -import re import logging from urllib.parse import urlparse, parse_qs from collections import namedtuple @@ -21,7 +20,6 @@ get_last_version_by_subset_id ) from openpype.pipeline import ( - get_current_project_name, get_representation_path ) @@ -302,134 +300,7 @@ def create_shot(filepath, layers, create_layers=False): return created_layers -def create_model(filename, asset, variant_subsets): - """Create a USD Model file. - - For each of the variation paths it will payload the path and set its - relevant variation name. - - """ - - project_name = get_current_project_name() - asset_doc = get_asset_by_name(project_name, asset) - assert asset_doc, "Asset not found: %s" % asset - - variants = [] - for subset in variant_subsets: - prefix = "usdModel" - if subset.startswith(prefix): - # Strip off `usdModel_` - variant = subset[len(prefix):] - else: - raise ValueError( - "Model subsets must start " "with usdModel: %s" % subset - ) - - path = get_representation_path_by_names( - project_name=project_name, - asset_name=asset_doc, - subset_name=subset, - version_name="latest", - representation_name="usd" - ) - if path: - variants.append((variant, path)) - - stage = _create_variants_file( - filename, - variants=variants, - variantset="model", - variant_prim="/root", - reference_prim="/root/geo", - as_payload=True, - ) - - UsdGeom.SetStageMetersPerUnit(stage, 1) - UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) - - # modelAPI = Usd.ModelAPI(root_prim) - # modelAPI.SetKind(Kind.Tokens.component) - - # See http://openusd.org/docs/api/class_usd_model_a_p_i.html#details - # for more on assetInfo - # modelAPI.SetAssetName(asset) - # modelAPI.SetAssetIdentifier(asset) - - stage.GetRootLayer().Save() - - -def create_shade(filename, asset, variant_subsets): - """Create a master USD shade file for an asset. - - For each available model variation this should generate a reference - to a `usdShade_{modelVariant}` subset. - - """ - - project_name = get_current_project_name() - asset_doc = get_asset_by_name(project_name, asset) - assert asset_doc, "Asset not found: %s" % asset - - variants = [] - - for subset in variant_subsets: - prefix = "usdModel" - if subset.startswith(prefix): - # Strip off `usdModel_` - variant = subset[len(prefix):] - else: - raise ValueError( - "Model subsets must start " "with usdModel: %s" % subset - ) - - shade_subset = re.sub("^usdModel", "usdShade", subset) - path = get_representation_path_by_names( - project_name=project_name, - asset_name=asset_doc, - subset_name=shade_subset, - version_name="latest", - representation_name="usd" - ) - variants.append((variant, path)) - - stage = _create_variants_file( - variants=variants, variantset="model", variant_prim="/root" - ) - stage.GetRootLayer().Export(filename, args={"format": "usda"}) - - -def create_shade_variation(filename, asset, model_variant, shade_variants): - """Create the master Shade file for a specific model variant. - - This should reference all shade variants for the specific model variant. - - """ - - project_name = get_current_project_name() - asset_doc = get_asset_by_name(project_name, asset) - assert asset_doc, "Asset not found: %s" % asset - - variants = [] - for variant in shade_variants: - subset = "usdShade_{model}_{shade}".format( - model=model_variant, shade=variant - ) - path = get_representation_path_by_names( - project_name=project_name, - asset_name=asset_doc, - subset_name=subset, - version_name="latest", - representation_name="usd" - ) - variants.append((variant, path)) - - stage = _create_variants_file( - variants=variants, variantset="shade", variant_prim="/root" - ) - stage.GetRootLayer().Export(filename, args={"format": "usda"}) - - -def _create_variants_file( +def add_variant_references_to_layer( variants, variantset, default_variant=None, @@ -437,19 +308,35 @@ def _create_variants_file( reference_prim=None, set_default_variant=True, as_payload=False, - skip_variant_on_single_file=True, + skip_variant_on_single_file=False, + layer=None ): - """Create a USD file with references to given variants and their paths. + """Add or set a prim's variants to reference specified paths in the layer. + + Note: + This does not clear any of the other opinions than replacing + `prim.referenceList.prependedItems` with the new reference. + If `as_payload=True` then this only does it for payloads and leaves + references as they were in-tact. + + Note: + If `skip_variant_on_single_file=True` it does *not* check if any + other variants do exist; it only checks whether you are currently + adding more than one since it'd be hard to find out whether previously + this was also skipped and should now if you're adding a new one + suddenly also be its original 'variant'. As such it's recommended to + keep this disabled unless you know you're not updating the file later + into the same variant set. Examples: - >>> stage = _create_variants_file("model.usd", + >>> layer = add_variant_references_to_layer("model.usd", >>> variants=[ >>> ("main", "main.usd"), >>> ("damaged", "damaged.usd"), >>> ("twisted", "twisted.usd") >>> ], >>> variantset="model") - >>> stage.rootLayer.Export("model.usd", args={"format": "usda"}) + >>> layer.Export("model.usd", args={"format": "usda"}) Arguments: variants (List[List[str, str]): List of two-tuples of variant name to @@ -457,6 +344,7 @@ def _create_variants_file( variantset (str): Name of the variant set default_variant (str): Default variant to set. If not provided the first variant will be used. + variant_prim (str): Variant prim? reference_prim (str): Path to the reference prim where to add the references and variant sets. set_default_variant (bool): Whether to set the default variant. @@ -466,69 +354,222 @@ def _create_variants_file( skip_variant_on_single_file (bool): If this is enabled and only a single variant is provided then do not create the variant set but just reference that single file. + layer (Sdf.Layer): When provided operate on this layer, otherwise + create an anonymous layer in memory. Returns: Usd.Stage: The saved usd stage """ + if layer is None: + layer = Sdf.Layer.CreateAnonymous() + set_layer_defaults(layer, default_prim=variant_prim.strip("/")) - root_layer = Sdf.Layer.CreateAnonymous() - stage = Usd.Stage.Open(root_layer) - - root_prim = stage.DefinePrim(variant_prim) - stage.SetDefaultPrim(root_prim) - - def _reference(path): - """Reference/Payload path depending on function arguments""" + prim_path_to_get_variants = Sdf.Path(variant_prim) + root_prim = get_or_define_prim_spec(layer, variant_prim, "Xform") - if reference_prim: - prim = stage.DefinePrim(reference_prim) - else: - prim = root_prim - - if as_payload: - # Payload - prim.GetPayloads().AddPayload(Sdf.Payload(path)) - else: - # Reference - prim.GetReferences().AddReference(Sdf.Reference(path)) + # TODO: Define why there's a need for separate variant_prim and + # reference_prim attribute. When should they differ? Does it even work? + if not reference_prim: + reference_prim = root_prim + else: + reference_prim = get_or_define_prim_spec(layer, reference_prim, + "Xform") assert variants, "Must have variants, got: %s" % variants if skip_variant_on_single_file and len(variants) == 1: # Reference directly, no variants variant_path = variants[0][1] - _reference(variant_path) + if as_payload: + # Payload + reference_prim.payloadList.prependedItems.append( + Sdf.Payload(variant_path) + ) + else: + # Reference + reference_prim.referenceList.prependedItems.append( + Sdf.Reference(variant_path) + ) log.debug("Creating without variants due to single file only.") log.debug("Path: %s", variant_path) else: # Variants - append = Usd.ListPositionBackOfAppendList - variant_set = root_prim.GetVariantSets().AddVariantSet( - variantset, append - ) - debug_label = "Payloading" if as_payload else "Referencing" - - for variant, variant_path in variants: - + for variant, variant_filepath in variants: if default_variant is None: default_variant = variant - variant_set.AddVariant(variant, append) - variant_set.SetVariantSelection(variant) - with variant_set.GetVariantEditContext(): - _reference(variant_path) - - log.debug("%s variants.", debug_label) - log.debug("Variant: %s", variant) - log.debug("Path: %s", variant_path) + set_variant_reference(layer, + prim_path=prim_path_to_get_variants, + variant_selections=[[variantset, variant]], + path=variant_filepath, + as_payload=as_payload) if set_default_variant and default_variant is not None: - variant_set.SetVariantSelection(default_variant) + # Set default variant selection + root_prim.variantSelections[variantset] = default_variant - return stage + return layer + + +def set_layer_defaults(layer, + up_axis=UsdGeom.Tokens.y, + meters_per_unit=1.0, + default_prim=None): + """Set some default metadata for the SdfLayer. + + Arguments: + layer (Sdf.Layer): The layer to set default for via Sdf API. + up_axis (UsdGeom.Token); Which axis is the up-axis + meters_per_unit (float): Meters per unit + default_prim (Optional[str]: Default prim name + + """ + # Set default prim + if default_prim is not None: + layer.defaultPrim = default_prim + + # Let viewing applications know how to orient a free camera properly + # Similar to: UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + layer.pseudoRoot.SetInfo(UsdGeom.Tokens.upAxis, up_axis) + + # Set meters per unit + layer.pseudoRoot.SetInfo(UsdGeom.Tokens.metersPerUnit, + float(meters_per_unit)) + + +def get_or_define_prim_spec(layer, prim_path, type_name): + """Get or create a PrimSpec in the layer. + + Note: + This creates a Sdf.PrimSpec with Sdf.SpecifierDef but if the PrimSpec + already exists this will not force it to be a Sdf.SpecifierDef and + it may remain what it was, e.g. Sdf.SpecifierOver + + Args: + layer (Sdf.Layer): The layer to create it in. + prim_path (Any[str, Sdf.Path]): Prim path to create. + type_name (str): Type name for the PrimSpec. + This will only be set if the prim does not exist in the layer + yet. It does not update type for an existing prim. + + Returns: + Sdf.PrimSpec: The PrimSpec in the layer for the given prim path. + + """ + prim_spec = layer.GetPrimAtPath(prim_path) + if prim_spec: + return prim_spec + + prim_spec = Sdf.CreatePrimInLayer(layer, prim_path) + prim_spec.specifier = Sdf.SpecifierDef + prim_spec.typeName = type_name + return prim_spec + + +def variant_nested_prim_path(prim_path, variant_selections): + """Return the Sdf.Path path for a nested variant selection at prim path. + + Examples: + >>> prim_path = Sdf.Path("/asset") + >>> variant_spec = variant_nested_prim_path( + >>> prim_path, + >>> variant_selections=[["model", "main"], ["look", "main"]] + >>> ) + >>> variant_spec.path + + Args: + prim_path (Sdf.PrimPath): The prim path to create the spec in + variant_selections (List[List[str, str]]): A list of variant set names + and variant names to get the prim spec in. + + Returns: + Sdf.Path: The variant prim path + + """ + variant_prim_path = Sdf.Path(prim_path) + for variant_set_name, variant_name in variant_selections: + variant_prim_path = variant_prim_path.AppendVariantSelection( + variant_set_name, variant_name) + return variant_prim_path + + +def set_variant_reference(sdf_layer, prim_path, variant_selections, path, + as_payload=False, + append=True): + """Get or define variant selection at prim path and add a reference + + If the Variant Prim already exists the prepended references are replaced + with a reference to `path`, it is overridden. + + Args: + sdf_layer (Sdf.Layer): Layer to operate in. + prim_path (Any[str, Sdf.Path]): Prim path to add variant to. + variant_selections (List[List[str, str]]): A list of variant set names + and variant names to get the prim spec in. + path (str): Path to reference or payload + as_payload (bool): When enabled it will generate a payload instead of + a reference. Defaults to False. + append (bool): When enabled it will append the reference of payload + to prepended items, otherwise it will replace it. + + Returns: + S + + """ + prim_path = Sdf.Path(prim_path) + # TODO: inherit type from outside of variants if it has it + get_or_define_prim_spec(sdf_layer, prim_path, "Xform") + variant_prim_path = variant_nested_prim_path(prim_path, variant_selections) + variant_prim = get_or_define_prim_spec(sdf_layer, + variant_prim_path, + "Xform") + # Replace the prepended references or payloads + if as_payload: + # Payload + if append: + variant_prim.payloadList.prependedItems.append( + Sdf.Payload(assetPath=path) + ) + else: + variant_prim.payloadList.prependedItems.append( + Sdf.Payload(assetPath=path) + ) + else: + # Reference + if append: + variant_prim.referenceList.prependedItems[:] = [ + Sdf.Reference(assetPath=path) + ] + else: + variant_prim.payloadList.prependedItems[:] = [ + Sdf.Payload(assetPath=path) + ] + + return variant_prim + + +def get_sdf_format_args(path): + """Return SDF_FORMAT_ARGS parsed to `dict`""" + if ":SDF_FORMAT_ARGS:" not in path: + return {} + + format_args_str = path.split(":SDF_FORMAT_ARGS:", 1)[-1] + args = {} + for arg_str in format_args_str.split(":"): + if "=" not in arg_str: + # ill-formed argument key=value + continue + + key, value = arg_str.split("=", 1) + args[key] = value + return args + +# TODO: Functions below are not necessarily USD functions and hence should not +# be in this file. Refactor by moving them elsewhere +# region representations and Ayon uris def get_representation_by_names( @@ -794,28 +835,4 @@ def get_instance_expected_output_path(instance, representation_name, template_filled = path_template_obj.format_strict(template_data) return os.path.normpath(template_filled) - -def set_layer_defaults(layer, - up_axis=UsdGeom.Tokens.y, - meters_per_unit=1.0, - default_prim=None): - """Set some default metadata for the SdfLayer. - - Arguments: - layer (Sdf.Layer): The layer to set default for via Sdf API. - up_axis (UsdGeom.Token); Which axis is the up-axis - meters_per_unit (float): Meters per unit - default_prim (Optional[str]: Default prim name - - """ - # Set default prim - if default_prim is not None: - layer.defaultPrim = default_prim - - # Let viewing applications know how to orient a free camera properly - # Similar to: UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) - layer.pseudoRoot.SetInfo(UsdGeom.Tokens.upAxis, up_axis) - - # Set meters per unit - layer.pseudoRoot.SetInfo(UsdGeom.Tokens.metersPerUnit, - float(meters_per_unit)) +# endregion diff --git a/tests/unit/openpype/lib/test_usdlib.py b/tests/unit/openpype/lib/test_usdlib.py new file mode 100644 index 00000000000..f7ec9c17540 --- /dev/null +++ b/tests/unit/openpype/lib/test_usdlib.py @@ -0,0 +1,150 @@ +from openpype.lib import usdlib +from pxr import Sdf + + +def test_create_asset(tmp_path): + layers = usdlib.create_asset(str(tmp_path / "asset.usd"), + asset_name="test", + reference_layers=["./model.usd", + "./look.usd"]) + assert len( + layers) == 2, "Expecting two files, the asset.usd and payload.usd" + assert (tmp_path / "asset.usd").exists() + assert (tmp_path / "payload.usd").exists() + assert not (tmp_path / "model.usd").exists() + assert not (tmp_path / "look.usd").exists() + + +def test_add_contributions_to_asset(tmp_path): + """Test adding contributions on top of each other works as expected""" + asset_usd = str(tmp_path / "asset.usd") + usdlib.create_asset(asset_usd, + asset_name="test", + reference_layers=["./model.usd", + "./look.usd"]) + + layer = Sdf.Layer.OpenAsAnonymous(asset_usd) + prim_path = Sdf.Path("/test") # prim is named by `asset_name` + + path_in_variant = prim_path.AppendVariantSelection("model", "modelMain") + assert not layer.GetPrimAtPath(path_in_variant), ( + "Variant should not exist yet and thus the prim should not exist" + ) + + # Adding a variant with a single prepended reference should work + usdlib.set_variant_reference( + layer, + prim_path=prim_path, + variant_selections=[["model", "modelMain"]], + path="./modelMain.usd" + ) + + prim_in_variant = layer.GetPrimAtPath(path_in_variant) + assert prim_in_variant, "Path in variant should be defined" + references = prim_in_variant.referenceList.prependedItems[:] + assert len(references) == 1, \ + "Must have only one reference" + assert references[0].assetPath == "./modelMain.usd", \ + "Must reference ./modelMain.usd" + + # Replacing an existing variant reference should work + usdlib.set_variant_reference( + layer, + prim_path=prim_path, + variant_selections=[["model", "modelMain"]], + path="./modelMain_v2.usd" + ) + prim_in_variant = layer.GetPrimAtPath(path_in_variant) + references = prim_in_variant.referenceList.prependedItems[:] + assert len(references) == 1, \ + "Must have only one reference" + assert references[0].assetPath == "./modelMain_v2.usd", \ + "Must reference ./modelMain_v2.usd" + + # Adding multiple variants should work and should not adjust original + usdlib.set_variant_reference( + layer, + prim_path=prim_path, + variant_selections=[["model", "modelDamaged"]], + path="./modelDamaged.usd" + ) + usdlib.set_variant_reference( + layer, + prim_path=prim_path, + variant_selections=[["look", "lookMain"]], + path="./lookMain.usd", + ) + + # Validate all exist and paths are set as expected path + for variant_set_name, variant_name, expected_path in [ + ("model", "modelMain", "./modelMain_v2.usd"), + ("model", "modelDamaged", "./modelDamaged.usd"), + ("look", "lookMain", "./lookMain.usd"), + ]: + path_in_variant = prim_path.AppendVariantSelection(variant_set_name, + variant_name) + prim_in_variant = layer.GetPrimAtPath(path_in_variant) + references = prim_in_variant.referenceList.prependedItems[:] + assert len(references) == 1, \ + "Must have only one reference" + assert references[0].assetPath == expected_path, \ + f"Must reference {expected_path}" + + print(layer.ExportToString()) + + +def test_create_shot(tmp_path): + usdlib.create_shot(str(tmp_path / "shot.usd"), + layers=["./lighting.usd", + "./fx.usd", + "./animation.usd" + "./layout.usd"]) + assert (tmp_path / "shot.usd").exists() + assert not (tmp_path / "lighting.usd").exists() + assert not (tmp_path / "fx.usd").exists() + assert not (tmp_path / "animation.usd").exists() + assert not (tmp_path / "layout.usd").exists() + + +def test_add_variant_references_to_layer(tmp_path): + layer = usdlib.add_variant_references_to_layer(variants=[ + ("main", "./main.usd"), + ("twist", "./twist.usd"), + ("damaged", "./damaged.usd"), + ("tall", "./tall.usd"), + ], + variantset="model" + ) + + # Allow recalling with a layer provided to operate on that layer + # instead; adding more variant definitions + layer = usdlib.add_variant_references_to_layer(variants=[ + ("main", "./look_main.usd"), + ("twist", "./look_twist.usd"), + ("damaged", "./look_damaged.usd"), + ("tall", "./look_tall.usd"), + ], + variantset="look", + layer=layer + ) + + # Allow with a layer provided to operate on that layer + # instead; adding more variant names to an existing variant set + layer = usdlib.add_variant_references_to_layer(variants=[ + ("short", "./look_short.usd"), + ], + variantset="look", + layer=layer, + skip_variant_on_single_file=True + ) + + # Save + layer.Export( + str(tmp_path / "model.usd"), + args={"format": "usda"} + ) + + # Debug print generated file (pytest excludes it by default but will + # show it if the -s flag is passed) + print(layer.ExportToString()) + assert (tmp_path / "model.usd").exists() From 4bc2541dd3c909c10cc06612b54d147c83a28818 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 10 Nov 2023 20:49:43 +0100 Subject: [PATCH 24/56] Use dataclass since it's more explicit --- openpype/lib/usdlib.py | 29 +++++++++++++++++++++++++---- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index cdaa4b62155..d871f5cf83e 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -3,7 +3,6 @@ import copy import logging from urllib.parse import urlparse, parse_qs -from collections import namedtuple try: from pxr import Usd, UsdGeom, Sdf, Kind @@ -39,8 +38,12 @@ # directly from the publisher at that particular order. Future publishes will # then see the existing contribution and will persist adding it to future # bootstraps at that order -Contribution = namedtuple("Contribution", - ("family", "variant", "order", "step")) +@dataclasses.dataclass +class Contribution: + family: str + variant: str + order: int + step: str @dataclasses.dataclass @@ -77,7 +80,7 @@ def export(self, path=None, args=None): @classmethod def create_anonymous(cls, path, tag="LOP", anchor=None): sdf_layer = Sdf.Layer.CreateAnonymous(tag) - return cls(layer=sdf_layer, path=path, anchor=anchor) + return cls(layer=sdf_layer, path=path, anchor=anchor, tag=tag) # The predefined steps order used for bootstrapping USD Shots and Assets. @@ -300,6 +303,24 @@ def create_shot(filepath, layers, create_layers=False): return created_layers +def update_ordered_sublayers(layer, layer_path_with_order): + """Add sublayer paths in the Sdf.Layer at given "orders" + + USD does not provide a way to set metadata per sublayer entry, but we can + 'sneak it in' by adding it as part of the file url after :SDF_FORMAT_ARGS: + There they will then just be unused args that we can parse later again + to access our data. + + Args: + layer (Sdf.Layer): Layer to add sublayers in. + layer_path_with_order (List[List[Str, int]]): + + Returns: + + """ + raise NotImplementedError("TODO") + + def add_variant_references_to_layer( variants, variantset, From b6403e4e7ab72c51423ff707e5e2eaaf0bac13ec Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 10 Nov 2023 21:01:12 +0100 Subject: [PATCH 25/56] Expand tests --- tests/unit/openpype/lib/test_usdlib.py | 31 +++++++++++++++++++++----- 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/tests/unit/openpype/lib/test_usdlib.py b/tests/unit/openpype/lib/test_usdlib.py index f7ec9c17540..62c7a59ffd4 100644 --- a/tests/unit/openpype/lib/test_usdlib.py +++ b/tests/unit/openpype/lib/test_usdlib.py @@ -3,6 +3,7 @@ def test_create_asset(tmp_path): + """Test creating the basics of an asset structure.""" layers = usdlib.create_asset(str(tmp_path / "asset.usd"), asset_name="test", reference_layers=["./model.usd", @@ -94,6 +95,7 @@ def test_add_contributions_to_asset(tmp_path): def test_create_shot(tmp_path): + """Test creating shot structure; which is just a bunch of layers""" usdlib.create_shot(str(tmp_path / "shot.usd"), layers=["./lighting.usd", "./fx.usd", @@ -107,13 +109,18 @@ def test_create_shot(tmp_path): def test_add_variant_references_to_layer(tmp_path): + """Test adding variants to a layer, replacing older ones""" + # TODO: The code doesn't error but the data should still be validated + + prim_path = "/root" layer = usdlib.add_variant_references_to_layer(variants=[ ("main", "./main.usd"), ("twist", "./twist.usd"), ("damaged", "./damaged.usd"), ("tall", "./tall.usd"), ], - variantset="model" + variantset="model", + variant_prim=prim_path ) # Allow recalling with a layer provided to operate on that layer @@ -125,7 +132,8 @@ def test_add_variant_references_to_layer(tmp_path): ("tall", "./look_tall.usd"), ], variantset="look", - layer=layer + layer=layer, + variant_prim=prim_path ) # Allow with a layer provided to operate on that layer @@ -135,16 +143,27 @@ def test_add_variant_references_to_layer(tmp_path): ], variantset="look", layer=layer, - skip_variant_on_single_file=True + set_default_variant=False, + variant_prim=prim_path ) - # Save + # Applying variants to another prim should not affect first prim + layer = usdlib.add_variant_references_to_layer(variants=[ + ("short", "./look_short.usd"), + ], + variantset="look", + layer=layer, + set_default_variant=False, + variant_prim="/other_root" + ) + + # Export layer should work layer.Export( str(tmp_path / "model.usd"), - args={"format": "usda"} + args={"format": "usda"}, ) + assert (tmp_path / "model.usd").exists() # Debug print generated file (pytest excludes it by default but will # show it if the -s flag is passed) print(layer.ExportToString()) - assert (tmp_path / "model.usd").exists() From 35e139466fbb4e51156bcf1d6dfe3eec94ba54e5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 11 Nov 2023 02:31:15 +0100 Subject: [PATCH 26/56] Tweak logging message for clarity --- openpype/pipeline/colorspace.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/colorspace.py b/openpype/pipeline/colorspace.py index 9f720f6ae95..605e7612c3b 100644 --- a/openpype/pipeline/colorspace.py +++ b/openpype/pipeline/colorspace.py @@ -1096,7 +1096,8 @@ def set_colorspace_data_to_representation( # check if `file_ext` in lower case is in CachedData.allowed_exts if file_ext.lstrip(".").lower() not in CachedData.allowed_exts: log.debug( - "Extension '{}' is not in allowed extensions.".format(file_ext) + "Extension '{}' is not in allowed extensions to retrieve " + "colorspace data for, ignoring...".format(file_ext) ) return From 65ab8a1bef96bc5cb5bc6373af5fb5b4260841e4 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 11 Nov 2023 02:50:12 +0100 Subject: [PATCH 27/56] Replace old usd bootstrap with new (global) layer contribution system --- .../plugins/publish/collect_usd_bootstrap.py | 111 ---- .../houdini/plugins/publish/extract_usd.py | 4 +- .../validate_usd_layer_path_backslashes.py | 2 +- .../plugins/publish/collect_usd_bootstrap.py | 125 ----- .../plugins/publish/extract_usd_bootstrap.py | 277 ---------- .../publish/validate_instance_has_members.py | 3 +- openpype/lib/usdlib.py | 94 +++- .../extract_usd_layer_contributions.py | 489 ++++++++++++++++++ 8 files changed, 569 insertions(+), 536 deletions(-) delete mode 100644 openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py delete mode 100644 openpype/hosts/maya/plugins/publish/collect_usd_bootstrap.py delete mode 100644 openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py create mode 100644 openpype/plugins/publish/extract_usd_layer_contributions.py diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py deleted file mode 100644 index 14a8e3c0562..00000000000 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ /dev/null @@ -1,111 +0,0 @@ -import pyblish.api - -from openpype.client import get_subset_by_name, get_asset_by_name -import openpype.lib.usdlib as usdlib - - -class CollectUsdBootstrap(pyblish.api.InstancePlugin): - """Collect special Asset/Shot bootstrap instances if those are needed. - - Some specific subsets are intended to be part of the default structure - of an "Asset" or "Shot" in our USD pipeline. For example, for an Asset - we layer a Model and Shade USD file over each other and expose that in - a Asset USD file, ready to use. - - On the first publish of any of the components of a Asset or Shot the - missing pieces are bootstrapped and generated in the pipeline too. This - means that on the very first publish of your model the Asset USD file - will exist too. - - """ - - order = pyblish.api.CollectorOrder + 0.35 - label = "Collect USD Bootstrap" - hosts = ["houdini"] - families = ["usd", "usd.layered"] - - def process(self, instance): - - # Detect whether the current subset is a subset in a pipeline - def get_bootstrap(instance): - instance_subset = instance.data["subset"] - for name, layers in usdlib.PIPELINE.items(): - if instance_subset in set(layers): - return name # e.g. "asset" - break - else: - return - - bootstrap = get_bootstrap(instance) - if bootstrap: - self.add_bootstrap(instance, bootstrap) - - # Check if any of the dependencies requires a bootstrap - for dependency in instance.data.get("publishDependencies", list()): - bootstrap = get_bootstrap(dependency) - if bootstrap: - self.add_bootstrap(dependency, bootstrap) - - def add_bootstrap(self, instance, bootstrap): - - self.log.debug("Add bootstrap for: %s" % bootstrap) - - project_name = instance.context.data["projectName"] - asset = get_asset_by_name(project_name, instance.data["asset"]) - assert asset, "Asset must exist: %s" % asset - - # Check which are not about to be created and don't exist yet - required = {"shot": ["usdShot"], "asset": ["usdAsset"]}.get(bootstrap) - - require_all_layers = instance.data.get("requireAllLayers", False) - if require_all_layers: - # USD files load fine in usdview and Houdini even when layered or - # referenced files do not exist. So by default we don't require - # the layers to exist. - layers = usdlib.PIPELINE.get(bootstrap) - if layers: - required += list(layers) - - self.log.debug("Checking required bootstrap: %s" % required) - for subset in required: - if self._subset_exists(project_name, instance, subset, asset): - continue - - self.log.debug( - "Creating {0} USD bootstrap: {1} {2}".format( - bootstrap, asset["name"], subset - ) - ) - - new = instance.context.create_instance(subset) - new.data["subset"] = subset - new.data["label"] = "{0} ({1})".format(subset, asset["name"]) - new.data["family"] = "usd.bootstrap" - new.data["comment"] = "Automated bootstrap USD file." - new.data["publishFamilies"] = ["usd"] - - # Do not allow the user to toggle this instance - new.data["optional"] = False - - # Copy some data from the instance for which we bootstrap - for key in ["asset"]: - new.data[key] = instance.data[key] - - def _subset_exists(self, project_name, instance, subset, asset): - """Return whether subset exists in current context or in database.""" - # Allow it to be created during this publish session - context = instance.context - for inst in context: - if ( - inst.data["subset"] == subset - and inst.data["asset"] == asset["name"] - ): - return True - - # Or, if they already exist in the database we can - # skip them too. - if get_subset_by_name( - project_name, subset, asset["_id"], fields=["_id"] - ): - return True - return False diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index fdafe5251fa..b1353ba4953 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -14,9 +14,7 @@ class ExtractUSD(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract USD" hosts = ["houdini"] - families = ["usd", - "usdModel", - "usdSetDress"] + families = ["usd"] def process(self, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index f2c7878c4ed..f7ad09bad05 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -21,7 +21,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["usdSetDress", "usdShade", "usd", "usdrender"] + families = ["usd", "usdrender"] hosts = ["houdini"] label = "USD Layer path backslashes" optional = True diff --git a/openpype/hosts/maya/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/maya/plugins/publish/collect_usd_bootstrap.py deleted file mode 100644 index e69199cba65..00000000000 --- a/openpype/hosts/maya/plugins/publish/collect_usd_bootstrap.py +++ /dev/null @@ -1,125 +0,0 @@ -import pyblish.api - -from openpype.client import get_subset_by_name, get_asset_by_name -import openpype.lib.usdlib as usdlib -from openpype.pipeline.create import get_subset_name - - -class CollectUsdBootstrap(pyblish.api.InstancePlugin): - """Collect special Asset/Shot bootstrap instances if those are needed. - - Some specific subsets are intended to be part of the default structure - of an "Asset" or "Shot" in our USD pipeline. For example, for an Asset - we layer a Model and Look USD file over each other and expose that in - a Asset USD file, ready to use. - - On the first publish of any components of an Asset or Shot the - missing pieces are bootstrapped and generated in the pipeline too. This - means that on the very first publish of your model the Asset USD file - will exist too. - - """ - - order = pyblish.api.CollectorOrder - 0.4 - label = "Collect USD Bootstrap" - hosts = ["maya"] - families = ["usd"] - - def process(self, instance): - - bootstrap = instance.data.get("usd_bootstrap") - if bootstrap: - self.log.debug("Add bootstrap for: %s" % bootstrap) - self.add_bootstrap(instance, bootstrap) - - def add_bootstrap(self, instance, bootstrap): - - project_name = instance.context.data["projectName"] - asset = get_asset_by_name(project_name, instance.data["asset"]) - assert asset, "Asset must exist: %s" % asset - - # Check which are not about to be created and don't exist yet - variants_to_create = [bootstrap] - - require_all_layers = instance.data.get("requireAllLayers", False) - if require_all_layers: - # USD files load fine in usdview and Houdini even when layered or - # referenced files do not exist. So by default we don't require - # the layers to exist. - contributions = usdlib.PIPELINE.get(bootstrap) - if contributions: - variants_to_create.extend( - contribution.variant for contribution in contributions - ) - - if not variants_to_create: - return - - for variant in variants_to_create: - self.log.info("USD bootstrapping usd-variant: %s", variant) - - subset = get_subset_name( - family="usd", - variant=variant.title(), - task_name=instance.data["task"], - asset_doc=asset, - project_name=project_name - ) - self.log.info(subset) - - defined = self.get_subset_in_context(instance, subset, asset) - if defined: - defined.append(instance.id) - self.log.info("defined..") - continue - - self.log.debug( - "Creating USD bootstrap: " - "{asset} > {subset}".format( - bootstrap=bootstrap, - asset=asset["name"], - subset=subset - ) - ) - - new = instance.context.create_instance(subset) - - # Define subset with - new.data["subset"] = subset - new.data["variant"] = variant - new.data["label"] = "{0} ({1})".format(subset, asset["name"]) - new.data["family"] = "usd" - new.data["families"] = ["usd", "usd.bootstrap"] - new.data["icon"] = "link" - new.data["comment"] = "Automated bootstrap USD file." - new.data["publishFamilies"] = ["usd"] - new[:] = [instance.id] - - # Do not allow the user to toggle this instance - new.data["optional"] = False - - # Copy some data from the instance for which we bootstrap - for key in ["asset", "task"]: - new.data[key] = instance.data[key] - - def get_subset_in_context(self, instance, subset, asset): - """Return whether subset exists in current context.""" - # Allow it to be created during this publish session - context = instance.context - for inst in context: - if ( - inst.data["subset"] == subset - and inst.data["asset"] == asset["name"] - ): - return inst - - # TODO: Since we don't have an asset resolver that will resolve - # 'to latest' we currently always want to push an update to the - # bootstrap explicitly - # Or, if they already exist in the database we can - # skip them too. - # if get_subset_by_name( - # project_name, subset, asset["_id"], fields=["_id"] - # ): - # return True - # return False diff --git a/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py b/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py deleted file mode 100644 index d6b071127f1..00000000000 --- a/openpype/hosts/maya/plugins/publish/extract_usd_bootstrap.py +++ /dev/null @@ -1,277 +0,0 @@ -import os -import operator - -import pyblish.api -from openpype.pipeline import publish -from openpype.pipeline.create import get_subset_name -from openpype.lib.usdlib import get_instance_expected_output_path - -from openpype.pipeline import get_representation_path -from openpype.client import ( - get_subsets, - get_last_versions, - get_representations -) - - -class ExtractBootstrapUSD(publish.Extractor): - """Extract in-memory bootstrap USD files for Assets and Shots. - - See `collect_usd_bootstrap_asset.py` for more details. - - """ - - order = pyblish.api.ExtractorOrder + 0.2 - label = "Bootstrap USD" - hosts = ["houdini", "maya"] - targets = ["local"] - families = ["usd.bootstrap"] - - def process(self, instance): - from openpype.lib import usdlib - - staging_dir = self.staging_dir(instance) - filename = "{subset}.usd".format(**instance.data) - filepath = os.path.join(staging_dir, filename) - self.log.debug("Bootstrap USD '%s' to '%s'" % (filename, staging_dir)) - - subset = instance.data["subset"] - if subset == "usdAsset": - # Asset - contributions = usdlib.PIPELINE["asset"] - layers = self.get_contribution_paths(contributions, instance) - created_layers = usdlib.create_asset( - filepath, - asset_name=instance.data["asset"], - reference_layers=layers - ) - - # Ignore the first layer which is the asset layer that is not - # relative to itself - created_layers = created_layers[1:] - for layer in created_layers: - self.add_relative_file(instance, layer.get_full_path()) - - elif subset == "usdShot": - # Shot - steps = usdlib.PIPELINE["shot"] - layers = self.get_contribution_paths(steps, instance) - usdlib.create_shot(filepath, - layers=layers) - - elif subset in usdlib.PIPELINE["asset"]: - # Asset layer - # Generate the stub files with root primitive - # TODO: implement - #usdlib.create_stub_usd(filepath) - raise NotImplementedError("TODO") - - elif subset in usdlib.PIPELINE["shot"]: - # Shot Layer - # Generate the stub file for an Sdf Layer - # TODO: implement - #usdlib.create_stub_usd_sdf_layer(filepath) - raise NotImplementedError("TODO") - - else: - raise RuntimeError("No bootstrap method " - "available for: %s" % subset) - - representations = instance.data.setdefault("representations", []) - representations.append({ - "name": "usd", - "ext": "usd", - "files": filename, - "stagingDir": staging_dir - }) - - def add_relative_file(self, instance, source, staging_dir=None): - """Add transfer for a relative path form staging to publish dir. - - Unlike files in representations, the file will not be renamed and - will be ingested one-to-one into the publish directory. - - Note: This file does not get registered as a representation, because - representation files always get renamed by the publish template - system. These files get included in the `representation["files"]` - info with all the representations of the version - and thus will - appear multiple times per version. - - """ - # TODO: It can be nice to force a particular representation no matter - # what to adhere to a certain filename on integration because e.g. a - # particular file format relies on that file named like that or alike - # and still allow regular registering with the database as a file of - # the version. As such we might want to tweak integrator logic? - if staging_dir is None: - staging_dir = self.staging_dir(instance) - publish_dir = instance.data["publishDir"] - - relative_path = os.path.relpath(source, staging_dir) - destination = os.path.join(publish_dir, relative_path) - destination = os.path.normpath(destination) - - transfers = instance.data.setdefault("transfers", []) - self.log.debug(f"Adding relative file {source} -> {relative_path}") - transfers.append((source, destination)) - - def get_contribution_paths(self, contributions, instance): - """Return the asset paths (filepath) for the contributions. - - If the contribution is not found in the current publish context nor - as an existing entity in the database it will be silently excluded - from the result. - - """ - # TODO: create paths for AYON asset resolver as AYON URIs - # TODO: Get any contributions from the last version of the instance - # so that we ensure we're always adding into the last existing - # version instead of replacing - # last_contributions = self.get_last_contributions(instance) - # for contribution in last_contributions: - # if contribution not in contributions: - # contributions.append(last_contributions) - contributions.sort(key=operator.attrgetter("order")) - - # Define subsets from family + variant - subsets = [] - for contribution in contributions: - subset = get_subset_name( - family=contribution.family, - variant=contribution.variant, - task_name=instance.data["task"], - asset_doc=instance.data["assetEntity"], - project_name=instance.context.data["projectName"] - ) - subsets.append(subset) - - # Find all subsets in the current publish session - result = self.get_representation_path_per_subset_in_publish(subsets, - instance) - - # Find last existing version for those not in current publish session - missing = [subset for subset in subsets if subset not in result] - if missing: - existing = self.get_existing_representation_path_per_subset( - missing, instance - ) - result.update(existing) - - order = {subset: index for index, subset in enumerate(subsets)} - result = { - subset: path for subset, path in sorted(result.items(), - key=lambda x: order[x[0]]) - } - - self.log.debug( - "Found subsets to contribute: {}".format(", ".join(result)) - ) - assert result, "Must have one subset to contribute at least" - return list(result.values()) - - def get_representation_path_per_subset_in_publish(self, subsets, instance): - """Get path for representations in the current publish session - - Given the input subset names compute all destination paths for - active instances in the current publish session that will be - ingested as the new versions for those publishes. - - This assumes those subset will generate a USD representation and - must already have it added in `instance.data["representations"]` - - """ - asset = instance.data["asset"] - result = {} - context = instance.context - self.log.debug(f"Looking for subsets: {subsets}") - for other_instance in context: - if other_instance is instance: - continue - - if not other_instance.data.get("active", True): - continue - - if not other_instance.data.get("publish", True): - continue - - if other_instance.data["asset"] != asset: - continue - - if other_instance.data["subset"] not in subsets: - continue - - subset = other_instance.data["subset"] - - # Make sure the instance has a `usd` representation; note that - # usually the extractors add these so we want this plug-in to - # run quite late as an extractor to ensure others have run before - if not any( - repre["name"] == "usd" for repre in - other_instance.data.get("representations", []) - ): - raise RuntimeError( - "Missing `usd` representation on instance with " - "subset {}".format(subset) - ) - - path = get_instance_expected_output_path( - other_instance, representation_name="usd" - ) - result[subset] = path - - return result - - def get_existing_representation_path_per_subset(self, subsets, instance): - """Get last version for subsets in the database - - Given the input subset names find all latest existing version in the - database and retrieve their `usd` representation paths. - - """ - context = instance.context - project_name = context.data["projectName"] - asset_entity = instance.data["assetEntity"] - - def to_id(entity): - return entity["_id"] - - subsets_docs = list( - get_subsets(project_name, - subset_names=subsets, - asset_ids=[asset_entity["_id"]]) - ) - if not subsets_docs: - return {} - - version_docs = list(get_last_versions( - project_name, - subset_ids=map(to_id, subsets_docs) - ).values()) - if not version_docs: - return {} - - representation_docs = list(get_representations( - project_name, - version_ids=map(to_id, version_docs), - representation_names=["usd"] - )) - if not representation_docs: - return {} - - result = {} - versions_by_id = {v["_id"]: v for v in version_docs} - subsets_by_id = {s["_id"]: s for s in subsets_docs} - for representation in representation_docs: - version_doc = versions_by_id[representation["parent"]] - subset_doc = subsets_by_id[version_doc["parent"]] - subset = subset_doc["name"] - - self.log.debug( - "Found existing subset '{}' version 'v{:03d}'".format( - subset, version_doc["name"] - )) - - path = get_representation_path(representation) - result[subset] = path - - return result diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py index 3811b732500..a3d65828715 100644 --- a/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py +++ b/openpype/hosts/maya/plugins/publish/validate_instance_has_members.py @@ -29,7 +29,8 @@ def process(self, instance): "renderlayer", "rendersetup", "mayaUsdLayer", - "usd.bootstrap"} + "usdLayer", + "usdAsset"} families = {instance.data.get("family")} families.update(instance.data.get("families", [])) if families.intersection(skip_families): diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index d871f5cf83e..430c997cfe7 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -106,7 +106,9 @@ def setup_asset_layer( asset_name, reference_layers=None, kind=Kind.Tokens.component, - define_class=True + define_class=True, + force_add_payload=False, + set_payload_path=False ): """ Adds an asset prim to the layer with the `reference_layers` added as @@ -143,6 +145,10 @@ def setup_asset_layer( kind (pxr.Kind): A USD Kind for the root asset. define_class: Define a `/__class__/{asset_name}` class which the root asset prim will inherit from. + force_add_payload (bool): Generate payload layer even if no + reference paths are set - thus generating an enmpty layer. + set_payload_path (bool): Whether to directly set the payload asset + path to `./payload.usd` or not Defaults to True. """ # Define root prim for the asset and make it the default for the stage. @@ -188,7 +194,7 @@ def setup_asset_layer( created_layers = [] # Add references to the asset prim - if reference_layers: + if force_add_payload or reference_layers: # Create a relative payload file to filepath through which we sublayer # the heavier payloads # Prefix with `LOP` just so so that if Houdini ROP were to save @@ -199,17 +205,23 @@ def setup_asset_layer( created_layers.append(Layer(layer=payload_layer, path="./payload.usd")) + # Add payload + if set_payload_path: + payload_identifier = "./payload.usd" + else: + payload_identifier = payload_layer.identifier + + asset_prim.payloadList.prependedItems[:] = [ + Sdf.Payload(assetPath=payload_identifier) + ] + # Add sublayers to the payload layer # Note: Sublayering is tricky because it requires that the sublayers # actually define the path at defaultPrim otherwise the payload # reference will not find the defaultPrim and turn up empty. - for ref_layer in reference_layers: - payload_layer.subLayerPaths.append(ref_layer) - - # Add payload - asset_prim.payloadList.prependedItems[:] = [ - Sdf.Payload(assetPath=payload_layer.identifier) - ] + if reference_layers: + for ref_layer in reference_layers: + payload_layer.subLayerPaths.append(ref_layer) return created_layers @@ -240,17 +252,13 @@ def create_asset( asset_name=asset_name, reference_layers=reference_layers, kind=kind, - define_class=define_class + define_class=define_class, + set_payload_path=True ) for created_layer in created_layers: created_layer.anchor = layer created_layer.export() - # Update the dependency on the base layer - sdf_layer.UpdateCompositionAssetDependency( - created_layer.identifier, created_layer.get_full_path() - ) - # Make the layer ascii - good for readability, plus the file is small log.debug("Creating asset at %s", filepath) layer.export(args={"format": "usda"}) @@ -303,7 +311,8 @@ def create_shot(filepath, layers, create_layers=False): return created_layers -def update_ordered_sublayers(layer, layer_path_with_order): +def add_ordered_sublayer(layer, contribution_path, order, layer_id, + add_sdf_arguments_metadata=True): """Add sublayer paths in the Sdf.Layer at given "orders" USD does not provide a way to set metadata per sublayer entry, but we can @@ -313,12 +322,61 @@ def update_ordered_sublayers(layer, layer_path_with_order): Args: layer (Sdf.Layer): Layer to add sublayers in. - layer_path_with_order (List[List[Str, int]]): + contribution_path (str): Path/URI to add. + order (int): Order to place the contribution in the sublayers + layer_id (str): Token that if found for an existing layer it will + replace that layer + add_sdf_arguments_metadata (bool): Add metadata into the filepath + to store the `layer_id` and `order` so ordering can be maintained + in the future as intended. Returns: """ - raise NotImplementedError("TODO") + + # Add the order with the contribution path so that for future + # contributions we can again use it to magically fit into the + # ordering. We put this in the path because sublayer paths do + # not allow customData to be stored. + # TODO: Avoid this hack to store 'order' and 'layer' metadata + # for sublayers; in USD sublayers can't hold customdata + if add_sdf_arguments_metadata: + contribution_path = ( + "{}:SDF_FORMAT_ARGS:order={}:layer_id={}".format( + contribution_path, + order, + layer_id + ) + ) + + # If the layer was already in the layers, then replace it + for index, existing_path in enumerate(layer.subLayerPaths): + args = get_sdf_format_args(existing_path) + existing_layer = args.get("layer_id") + if existing_layer == layer_id: + # Put it in the same position where it was before + # swapping it with the original + log.debug( + f"Replacing existing layer: {layer.subLayerPaths[index]} " + f"-> {contribution_path}" + ) + layer.subLayerPaths[index] = contribution_path + return + + # If other layers are ordered than place it after the last order where we + # are higher + for index, existing_path in enumerate(layer.subLayerPaths): + args = get_sdf_format_args(existing_path) + existing_order = args.get("order") + if existing_order is not None and order > int(existing_order): + log.debug(f"Inserting new layer at {index}: {contribution_path}") + layer.subLayerPaths.insert(index, contribution_path) + return + + # If not paths found with an order to put it next to + # then put the sublayer at the end + log.debug(f"Appending new layer: {contribution_path}") + layer.subLayerPaths.append(contribution_path) def add_variant_references_to_layer( diff --git a/openpype/plugins/publish/extract_usd_layer_contributions.py b/openpype/plugins/publish/extract_usd_layer_contributions.py new file mode 100644 index 00000000000..c07bf71e60b --- /dev/null +++ b/openpype/plugins/publish/extract_usd_layer_contributions.py @@ -0,0 +1,489 @@ +from operator import attrgetter +import dataclasses +import os + +import pyblish.api + +from openpype.lib import ( + TextDef, + BoolDef, + UISeparatorDef, + UILabelDef, + EnumDef +) +from openpype.pipeline import publish + + +from pxr import Sdf +from openpype.lib.usdlib import ( + set_variant_reference, + setup_asset_layer, + add_ordered_sublayer, + construct_ayon_uri, + get_representation_path_by_ayon_uri, + get_representation_path_by_names, + set_layer_defaults +) + + +# TODO: Avoid hardcoded ordering - might need to be set through settings? +LAYER_ORDERS = { + # asset layers + "model": 100, + "assembly": 150, + "look": 200, + "rig": 300, + # shot layers + "layout": 200, + "animation": 300, + "simulation": 400, + "fx": 500, + "lighting": 600, +} + +BUILD_INTO_LAST_VERSIONS = True + + +@dataclasses.dataclass +class Contribution: + # What are we contributing? + instance: pyblish.api.Instance # instance that contributes it + + # Where are we contributing to? + layer_id: str # usually the department or task layer + target_product: str = "usdAsset" # target subset the layer should merge to + + # Variant + apply_as_variant: bool = False + variant_set_name: str = "" + variant_name: str = "" + variant_is_default: bool = False + + order: int = 0 + + +def get_instance_uri_path( + instance, + resolve=True +): + """Return path for instance's usd representation""" + context = instance.context + asset = instance.data["asset"] + subset = instance.data["subset"] + project_name = context.data["projectName"] + + # Get the layer's published path + path = construct_ayon_uri( + project_name=project_name, + asset_name=asset, + product=subset, + version="latest", + representation_name="usd" + ) + + # Resolve contribution path + # TODO: Remove this when Asset Resolver is used + if resolve: + path = get_representation_path_by_ayon_uri( + path, + # Allow also resolving live to entries from current context + context=instance.context + ) + # Ensure `None` for now is also a string + path = str(path) + + return path + + +def get_last_publish(instance, representation="usd"): + return get_representation_path_by_names( + project_name=instance.context.data["projectName"], + asset_name=instance.data["asset"], + subset_name=instance.data["subset"], + version_name="latest", + representation_name=representation + ) + + +def add_representation(instance, name, + files, staging_dir, ext=None, + output_name=None): + """Add a representation to publish and integrate. + + A representation must exist of either a single file or a + single file sequence. It can *not* contain multiple files. + + For the integration to succeed the instance must provide the context + for asset, frame range, etc. even though the representation can + override some parts of it. + + Arguments: + instance (pyblish.api.Instance): Publish instance + name (str): The representation name + ext (Optional[str]): Explicit extension for the output + output_name (Optional[str]): Output name suffix for the + destination file to ensure the file is unique if + multiple representations share the same extension. + + Returns: + dict: Representation data for integration. + + """ + if ext is None: + # TODO: Use filename + ext = name + + representation = { + "name": name, + "ext": ext, + "stagingDir": staging_dir, + "files": files + } + if output_name: + representation["outputName"] = output_name + + instance.data.setdefault("representations", []).append(representation) + return representation + + +class CollectUSDLayerContributions(pyblish.api.InstancePlugin, + publish.OpenPypePyblishPluginMixin): + """Collect the USD Layer Contributions and create dependent instances. + + Our contributions go to the layer + + Instance representation -> Department Layer -> Asset + + So that for example: + modelMain --> variant 'main' in model.usd -> asset.usd + modelDamaged --> variant 'damaged' in model.usd -> asset.usd + + """ + + order = pyblish.api.CollectorOrder + 0.25 + label = "Collect USD Layer Contributions (Asset/Shot)" + families = ["usd"] + + def process(self, instance): + + attr_values = self.get_attr_values_from_data(instance.data) + if not attr_values.get("contribution_enabled"): + return + + instance.data["subsetGroup"] = ( + instance.data.get("subsetGroup") or "USD Layer" + ) + + # Allow formatting in variant set name and variant name + data = instance.data.copy() + data["layer"] = attr_values["contribution_layer"] + for key in [ + "contribution_variant_set_name", + "contribution_variant" + ]: + attr_values[key] = attr_values[key].format(**data) + + # Define contribution + order = LAYER_ORDERS.get(attr_values["contribution_layer"], 0) + contribution = Contribution( + instance=instance, + layer_id=attr_values["contribution_layer"], + target_product=attr_values["contribution_asset_subset"], + apply_as_variant=attr_values["contribution_apply_as_variant"], + variant_set_name=attr_values["contribution_variant_set_name"], + variant_name=attr_values["contribution_variant"], + variant_is_default=attr_values["contribution_variant_is_default"], + order=order + ) + asset_subset = contribution.target_product + layer_subset = "{}_{}".format(asset_subset, contribution.layer_id) + + # Layer contribution instance + layer_instance = self.get_or_create_instance( + subset=layer_subset, + variant=contribution.layer_id, + source_instance=instance, + families=["usd", "usdLayer"], + ) + layer_instance.data.setdefault("usd_contributions", []).append( + contribution + ) + layer_instance.data["usd_layer_id"] = contribution.layer_id + layer_instance.data["usd_layer_order"] = contribution.order + + layer_instance.data["subsetGroup"] = ( + instance.data.get("subsetGroup") or "USD Layer" + ) + + # Asset/Shot contribution instance + self.get_or_create_instance( + subset=asset_subset, + variant=asset_subset, + source_instance=layer_instance, + families=["usd", "usdAsset"], + ) + + self.log.info( + f"Contributing {instance.data['subset']} to " + f"{layer_subset} -> {asset_subset}" + ) + + def find_instance(self, context, data, ignore_instance): + for instance in context: + if instance is ignore_instance: + continue + + if all(instance.data.get(key) == value + for key, value in data.items()): + return instance + + def get_or_create_instance(self, subset, variant, source_instance, families): + + # Potentially the instance already exists due to multiple instances + # contributing to the same layer or asset - so we first check for + # existence + context = source_instance.context + + # Required matching vars + data = { + "asset": source_instance.data["asset"], + "task": source_instance.data.get("task"), + "subset": subset, + "variant": variant, + "families": families + } + existing_instance = self.find_instance(context, data, + ignore_instance=source_instance) + if existing_instance: + existing_instance.append(source_instance.id) + existing_instance.data["source_instances"].append(source_instance) + return existing_instance + + # Otherwise create the instance + new_instance = context.create_instance(name=subset) + new_instance.data.update(data) + + new_instance.data["label"] = ( + "{0} ({1})".format(subset, new_instance.data["asset"]) + ) + new_instance.data["family"] = "usd" + new_instance.data["icon"] = "link" + new_instance.data["comment"] = "Automated bootstrap USD file." + new_instance.append(source_instance.id) + new_instance.data["source_instances"] = [source_instance] + + return new_instance + + @classmethod + def get_attribute_defs(cls): + + return [ + UISeparatorDef("usd_container_settings1"), + UILabelDef(label="USD Container"), + BoolDef("contribution_enabled", + label="Enable", + default=True), + TextDef("contribution_asset_subset", + label="USD Asset subset", + default="usdAsset"), + + # Asset layer, e.g. model.usd, look.usd, rig.usd + EnumDef("contribution_layer", + label="Department layer", + tooltip="The layer the contribution should be made to in " + "the usd asset.", + items=list(LAYER_ORDERS.keys()), + default="model"), + BoolDef("contribution_apply_as_variant", + label="Add as variant", + default=True), + TextDef("contribution_variant_set_name", + label="Variant Set Name", + default="{layer}"), + TextDef("contribution_variant", + label="Variant Name", + default="{variant}"), + BoolDef("contribution_variant_is_default", + label="Set as default variant selection", + default=False), + UISeparatorDef("usd_container_settings3"), + ] + + +class ExtractUSDLayerContribution(publish.Extractor): + + families = ["usdLayer"] + order = pyblish.api.ExtractorOrder + 0.45 + + def process(self, instance): + from pxr import Sdf + + asset = instance.data["asset"] + product = instance.data["subset"] + self.log.debug(f"Building layer: {asset} > {product}") + + path = get_last_publish(instance) + if path and BUILD_INTO_LAST_VERSIONS: + sdf_layer = Sdf.Layer.OpenAsAnonymous(path) + default_prim = sdf_layer.defaultPrim + else: + default_prim = asset + sdf_layer = Sdf.Layer.CreateAnonymous() + set_layer_defaults(sdf_layer, default_prim=default_prim) + + contributions = instance.data.get("usd_contributions", []) + for contribution in sorted(contributions, key=attrgetter("order")): + path = get_instance_uri_path(contribution.instance) + if contribution.apply_as_variant: + # Add contribution as variants to their layer subsets + self.log.debug("Adding variant") + prim_path = f"/{default_prim}" + variant_set_name = contribution.variant_set_name + variant_name = contribution.variant_name + set_variant_reference( + sdf_layer, + prim_path=prim_path, + variant_selections=[(variant_set_name, variant_name)], + path=path + ) + prim = sdf_layer.GetPrimAtPath(prim_path) + + # Set default variant selection + if contribution.variant_is_default or \ + variant_set_name not in prim.variantSelections: + prim.variantSelections[variant_set_name] = variant_name + + else: + # Sublayer source file + self.log.debug("Adding sublayer") + sdf_layer.subLayerPaths.append(path) + + # Save the file + staging_dir = self.staging_dir(instance) + filename = f"{instance.name}.usd" + filepath = os.path.join(staging_dir, filename) + sdf_layer.Export(filepath, args={"format": "usda"}) + + add_representation( + instance, + name="usd", + files=filename, + staging_dir=staging_dir + ) + + +class ExtractUSDAssetContribution(publish.Extractor): + + families = ["usdAsset"] + order = ExtractUSDLayerContribution.order + 0.01 + + def process(self, instance): + from pxr import Sdf + + asset = instance.data["asset"] + subset = instance.data["subset"] + self.log.debug(f"Building asset: {asset} > {subset}") + + # Contribute layers to asset + # Use existing asset and add to it, or initialize a new asset layer + path = get_last_publish(instance) + payload_layer = None + if path and BUILD_INTO_LAST_VERSIONS: + # If there's a payload file, put it in the payload instead + folder = os.path.dirname(path) + payload_path = os.path.join(folder, "payload.usd") + if os.path.exists(payload_path): + payload_layer = Sdf.Layer.OpenAsAnonymous(payload_path) + + asset_layer = Sdf.Layer.OpenAsAnonymous(path) + else: + asset_layer, payload_layer = self.init_asset_layer(asset_name=asset) + + target_layer = payload_layer if payload_layer else asset_layer + + # Get unique layer instances (remove duplicate entries) + processed_ids = set() + layer_instances = [] + for layer_inst in instance.data["source_instances"]: + if layer_inst.id in processed_ids: + continue + layer_instances.append(layer_inst) + processed_ids.add(layer_inst.id) + + # Insert the layer in contributions order + def sort_by_order(instance): + return instance.data["usd_layer_order"] + + for layer_instance in sorted(layer_instances, + key=sort_by_order, + reverse=True): + + layer_id = layer_instance.data["usd_layer_id"] + order = layer_instance.data["usd_layer_order"] + + path = get_instance_uri_path(instance=layer_instance) + add_ordered_sublayer(target_layer, + contribution_path=path, + order=order, + layer_id=layer_id, + # Add the sdf argument metadata which allows + # us to later detect whether another path + # has the same layer id, so we can replace it + # it. + add_sdf_arguments_metadata=True) + + # Save the file + staging_dir = self.staging_dir(instance) + filename = f"{instance.name}.usd" + filepath = os.path.join(staging_dir, filename) + asset_layer.Export(filepath, args={"format": "usda"}) + + add_representation( + instance, + name="usd", + files=filename, + staging_dir=staging_dir + ) + + if payload_layer: + payload_path = os.path.join(staging_dir, "payload.usd") + payload_layer.Export(payload_path, args={"format": "usda"}) + self.add_relative_file(instance, payload_path) + + def init_asset_layer(self, asset_name): + asset_layer = Sdf.Layer.CreateAnonymous() + created_layers = setup_asset_layer(asset_layer, asset_name, + force_add_payload=True, + set_payload_path=True) + payload_layer = created_layers[0].layer + return asset_layer, payload_layer + + def add_relative_file(self, instance, source, staging_dir=None): + """Add transfer for a relative path form staging to publish dir. + + Unlike files in representations, the file will not be renamed and + will be ingested one-to-one into the publish directory. + + Note: This file does not get registered as a representation, because + representation files always get renamed by the publish template + system. These files get included in the `representation["files"]` + info with all the representations of the version - and thus will + appear multiple times per version. + + """ + # TODO: It can be nice to force a particular representation no matter + # what to adhere to a certain filename on integration because e.g. a + # particular file format relies on that file named like that or alike + # and still allow regular registering with the database as a file of + # the version. As such we might want to tweak integrator logic? + if staging_dir is None: + staging_dir = self.staging_dir(instance) + publish_dir = instance.data["publishDir"] + + relative_path = os.path.relpath(source, staging_dir) + destination = os.path.join(publish_dir, relative_path) + destination = os.path.normpath(destination) + + transfers = instance.data.setdefault("transfers", []) + self.log.debug(f"Adding relative file {source} -> {relative_path}") + transfers.append((source, destination)) From c95fb7d0261d465587548fbfaef9bd844f0ea5ad Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 11 Nov 2023 02:50:35 +0100 Subject: [PATCH 28/56] Add shading mode option, default to exporting no shaders --- openpype/hosts/maya/plugins/publish/extract_maya_usd.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_maya_usd.py b/openpype/hosts/maya/plugins/publish/extract_maya_usd.py index 4ee4b676bc3..ece96551be6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_maya_usd.py +++ b/openpype/hosts/maya/plugins/publish/extract_maya_usd.py @@ -170,6 +170,8 @@ def options(self): "exportRefsAsInstanceable": bool, "eulerFilter": bool, "renderableOnly": bool, + "convertMaterialsTo": str, + "shadingMode": (str, None), # optional str "jobContext": (list, None) # optional list # "worldspace": bool, } @@ -192,6 +194,8 @@ def default_options(self): "exportRefsAsInstanceable": False, "eulerFilter": True, "renderableOnly": False, + "shadingMode": "none", + "convertMaterialsTo": "none", "jobContext": None # "worldspace": False } From 5094728330827cf861cf86839eb71d88429566fe Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 11 Nov 2023 02:55:14 +0100 Subject: [PATCH 29/56] Cleanup --- openpype/lib/usdlib.py | 39 ------------------- .../extract_usd_layer_contributions.py | 11 ++++++ 2 files changed, 11 insertions(+), 39 deletions(-) diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index 430c997cfe7..f1442b506cf 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -25,27 +25,6 @@ log = logging.getLogger(__name__) -# A contribution defines a layer or references into a particular bootstrap. -# The idea is that contributions can be bootstrapped so, that for example -# the bootstrap of a look variant would update the look bootstrap which updates -# the asset bootstrap. The exact data structure to access and configure these -# easily is still to be defined, but we need to at least know what it targets -# (e.g. where does it go into) and in what order (which contribution is stronger?) -# Preferably the bootstrapped data (e.g. the Shot) preserves metadata about -# the contributions so that we can design a system where custom contributions -# outside of the predefined orders are possible to be managed. So that if a -# particular asset requires an extra contribution level, you can add it -# directly from the publisher at that particular order. Future publishes will -# then see the existing contribution and will persist adding it to future -# bootstraps at that order -@dataclasses.dataclass -class Contribution: - family: str - variant: str - order: int - step: str - - @dataclasses.dataclass class Layer: layer: Sdf.Layer @@ -83,24 +62,6 @@ def create_anonymous(cls, path, tag="LOP", anchor=None): return cls(layer=sdf_layer, path=path, anchor=anchor, tag=tag) -# The predefined steps order used for bootstrapping USD Shots and Assets. -# These are ordered in order from strongest to weakest opinions, like in USD. -PIPELINE = { - "shot": [ - Contribution(family="usd", variant="lighting", order=500, step="lighting"), - Contribution(family="usd", variant="fx", order=400, step="fx"), - Contribution(family="usd", variant="simulation", order=300, step="simulation"), - Contribution(family="usd", variant="animation", order=200, step="animation"), - Contribution(family="usd", variant="layout", order=100, step="layout"), - ], - "asset": [ - Contribution(family="usd.rig", variant="main", order=300, step="rig"), - Contribution(family="usd.look", variant="main", order=200, step="look"), - Contribution(family="usd.model", variant="main", order=100, step="model") - ], -} - - def setup_asset_layer( layer, asset_name, diff --git a/openpype/plugins/publish/extract_usd_layer_contributions.py b/openpype/plugins/publish/extract_usd_layer_contributions.py index c07bf71e60b..cdd1d599e6e 100644 --- a/openpype/plugins/publish/extract_usd_layer_contributions.py +++ b/openpype/plugins/publish/extract_usd_layer_contributions.py @@ -26,6 +26,17 @@ ) +# A contribution defines a contribution into a (department) layer which will +# get layered into the target product, usually the asset or shot. +# We need to at least know what it targets (e.g. where does it go into) and +# in what order (which contribution is stronger?) +# Preferably the bootstrapped data (e.g. the Shot) preserves metadata about +# the contributions so that we can design a system where custom contributions +# outside of the predefined orders are possible to be managed. So that if a +# particular asset requires an extra contribution level, you can add it +# directly from the publisher at that particular order. Future publishes will +# then see the existing contribution and will persist adding it to future +# bootstraps at that order # TODO: Avoid hardcoded ordering - might need to be set through settings? LAYER_ORDERS = { # asset layers From 26841a00fc156d341c507eb140ba1d136a8a52e9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sat, 11 Nov 2023 19:14:11 +0100 Subject: [PATCH 30/56] Add todo --- openpype/hosts/maya/plugins/load/load_into_maya_usd.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/plugins/load/load_into_maya_usd.py b/openpype/hosts/maya/plugins/load/load_into_maya_usd.py index 9597f5d81f8..08ca32c0711 100644 --- a/openpype/hosts/maya/plugins/load/load_into_maya_usd.py +++ b/openpype/hosts/maya/plugins/load/load_into_maya_usd.py @@ -112,6 +112,7 @@ def update(self, container, representation): # Update representation id # TODO: Do this in prim spec where we update reference path? + # TODO: Store this in the Sdf.Reference CustomData instead? prim.SetCustomDataByKey( "openpype:representation", str(representation["_id"]) ) From 258e72189c3c37e998911ec98decd92539e7d574 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 12 Nov 2023 21:49:19 +0100 Subject: [PATCH 31/56] Houdini: Allow defining multiple families from Creator --- openpype/hosts/houdini/api/plugin.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 72565f72116..d9ef2388ce6 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -189,6 +189,7 @@ def create(self, subset_name, instance_data, pre_create_data): instance_data["instance_node"] = instance_node.path() instance_data["instance_id"] = instance_node.path() + instance_data["families"] = self.get_publish_families() instance = CreatedInstance( self.family, subset_name, @@ -237,6 +238,7 @@ def collect_instances(self): node_path = instance.path() node_data["instance_id"] = node_path node_data["instance_node"] = node_path + node_data["families"] = self.get_publish_families() created_instance = CreatedInstance.from_existing( node_data, self @@ -262,6 +264,7 @@ def imprint(self, node, values, update=False): # from the node's path values.pop("instance_node", None) values.pop("instance_id", None) + values.pop("families", None) imprint(node, values, update=update) def remove_instances(self, instances): @@ -303,6 +306,22 @@ def customize_node_look( node.setUserData('nodeshape', shape) node.setColor(color) + def get_publish_families(self): + """Return families for the instances of this creator. + + Allow a Creator to define multiple families so that a creator can + e.g. specify `usd` and `usdrop`. + + There is no need to override this method if you only have the + primary family defined by the `family` property as that will always + be set. + + Returns: + list: families for instances of this creator + + """ + return [] + def get_network_categories(self): """Return in which network view type this creator should show. From e82f9b35f70772c6c4230bfb1129dc7c3ba0d193 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 12 Nov 2023 21:51:25 +0100 Subject: [PATCH 32/56] Houdini: Fix USD Layer Contribution --- openpype/hosts/houdini/plugins/create/create_usd.py | 3 +++ .../houdini/plugins/publish/collect_usd_layers.py | 4 ++-- .../hosts/houdini/plugins/publish/extract_usd.py | 7 +------ .../hosts/houdini/plugins/publish/validate_bypass.py | 8 +++++--- .../publish/validate_houdini_license_category.py | 2 +- .../houdini/plugins/publish/validate_no_errors.py | 7 +++++++ .../publish/validate_usd_layer_path_backslashes.py | 12 +++++++++--- .../plugins/publish/validate_usd_output_node.py | 8 +------- 8 files changed, 29 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 99818d33cf1..4c7b1f6cdc4 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -50,3 +50,6 @@ def get_network_categories(self): hou.ropNodeTypeCategory(), hou.lopNodeTypeCategory() ] + + def get_publish_families(self): + return ["usd", "usdrop"] diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index fa3c9f018e3..32fc3809619 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -52,10 +52,10 @@ def copy_instance_data(instance_src, instance_dest, attr): class CollectUsdLayers(pyblish.api.InstancePlugin): """Collect the USD Layers that have configured save paths.""" - order = pyblish.api.CollectorOrder + 0.35 + order = pyblish.api.CollectorOrder + 0.25 label = "Collect USD Layers" hosts = ["houdini"] - families = ["usd"] + families = ["usdrop"] def process(self, instance): # TODO: Replace this with a Hidden Creator so we collect these BEFORE diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index b1353ba4953..1722f309cd9 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -14,15 +14,10 @@ class ExtractUSD(publish.Extractor): order = pyblish.api.ExtractorOrder label = "Extract USD" hosts = ["houdini"] - families = ["usd"] + families = ["usdrop"] def process(self, instance): - # TODO: Clean this up - for now this is used for runtime upstream - # instances (explicit save path layers) from `collect_usd_layers.py` - if not instance.data.get("render", True): - return - ropnode = hou.node(instance.data.get("instance_node")) # Get the filename from the filename parameter diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index c10c5a2c05d..4820889f98b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -4,6 +4,7 @@ import hou + class ValidateBypassed(pyblish.api.InstancePlugin): """Validate all primitives build hierarchy from attribute when enabled. @@ -20,9 +21,10 @@ class ValidateBypassed(pyblish.api.InstancePlugin): def process(self, instance): - if len(instance) == 0: - # Ignore instances without any nodes - # e.g. in memory bootstrap instances + if not instance.data.get("instance_node"): + self.log.debug( + "Skipping instance without instance node: {}".format(instance) + ) return invalid = self.get_invalid(instance) diff --git a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py index f1c52f22c19..94adba8a499 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py +++ b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py @@ -16,7 +16,7 @@ class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["usd"] + families = ["usdrop"] hosts = ["houdini"] label = "Houdini Commercial License" diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index 6c48eae70a2..90c2f080a13 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -35,6 +35,13 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): def process(self, instance): + if not instance.data.get("instance_node"): + self.log.debug( + "Skipping 'Validate no errors' because instance " + "has no instance node: {}".format(instance) + ) + return + validate_nodes = [] if len(instance) > 0: diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index f7ad09bad05..0c6b0911be2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -2,12 +2,16 @@ import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib -from openpype.pipeline import PublishValidationError +from openpype.pipeline import ( + PublishValidationError, + publish +) import hou -class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): +class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin, + publish.OptionalPyblishPluginMixin): """Validate USD loaded paths have no backslashes. This is a crucial validation for HUSK USD rendering as Houdini's @@ -21,12 +25,14 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["usd", "usdrender"] + families = ["usdrop", "usdrender"] hosts = ["houdini"] label = "USD Layer path backslashes" optional = True def process(self, instance): + if not self.is_active(instance.data): + return rop = hou.node(instance.data.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index d4875471448..75a55b1d34e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -14,7 +14,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["usd"] + families = ["usdrop"] hosts = ["houdini"] label = "Validate Output Node (USD)" @@ -44,12 +44,6 @@ def get_invalid(cls, instance): return [node.path()] - # TODO: Remove early return - # This is just done so that for now the "CreatorNode" defined for - # an explicit save path layer (upstream from the RopNode) can pass - # through here, see "collect_usd_layers.py" - return - # Output node must be a Sop node. if not isinstance(output_node, hou.LopNode): cls.log.error( From b87874338086b67dd34c0a8a7ee4614685616268 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 12 Nov 2023 21:52:03 +0100 Subject: [PATCH 33/56] Cosmetics + add todo --- .../publish/extract_usd_layer_contributions.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/extract_usd_layer_contributions.py b/openpype/plugins/publish/extract_usd_layer_contributions.py index cdd1d599e6e..12e003f30db 100644 --- a/openpype/plugins/publish/extract_usd_layer_contributions.py +++ b/openpype/plugins/publish/extract_usd_layer_contributions.py @@ -61,7 +61,7 @@ class Contribution: instance: pyblish.api.Instance # instance that contributes it # Where are we contributing to? - layer_id: str # usually the department or task layer + layer_id: str # usually the department or task name target_product: str = "usdAsset" # target subset the layer should merge to # Variant @@ -171,10 +171,16 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin, """ - order = pyblish.api.CollectorOrder + 0.25 + order = pyblish.api.CollectorOrder + 0.35 label = "Collect USD Layer Contributions (Asset/Shot)" families = ["usd"] + # TODO: Currently asset and shot contributions both behave as creating + # an "asset" with payload to put the layers in; however, shot-based + # contributions don't need the payload nor the asset structure. We should + # separate it so both can behave independently but still share most of + # the code for easy maintenance + def process(self, instance): attr_values = self.get_attr_values_from_data(instance.data) @@ -324,6 +330,7 @@ def get_attribute_defs(cls): class ExtractUSDLayerContribution(publish.Extractor): families = ["usdLayer"] + label = "Extract USD Layer Contributions (Asset/Shot)" order = pyblish.api.ExtractorOrder + 0.45 def process(self, instance): @@ -386,6 +393,7 @@ def process(self, instance): class ExtractUSDAssetContribution(publish.Extractor): families = ["usdAsset"] + label = "Extract USD Asset/Shot Contributions" order = ExtractUSDLayerContribution.order + 0.01 def process(self, instance): From 8bc989b5a2ba8935d509bbf15a9e5652e1abbebf Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 12 Nov 2023 22:30:25 +0100 Subject: [PATCH 34/56] Fix sublayer versions not continuously appending versions --- openpype/lib/usdlib.py | 48 +++++++++++-------- .../extract_usd_layer_contributions.py | 33 +++++++++++-- 2 files changed, 55 insertions(+), 26 deletions(-) diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index f1442b506cf..97bffcede4c 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -272,7 +272,7 @@ def create_shot(filepath, layers, create_layers=False): return created_layers -def add_ordered_sublayer(layer, contribution_path, order, layer_id, +def add_ordered_sublayer(layer, contribution_path, layer_id, order=None, add_sdf_arguments_metadata=True): """Add sublayer paths in the Sdf.Layer at given "orders" @@ -284,9 +284,12 @@ def add_ordered_sublayer(layer, contribution_path, order, layer_id, Args: layer (Sdf.Layer): Layer to add sublayers in. contribution_path (str): Path/URI to add. - order (int): Order to place the contribution in the sublayers layer_id (str): Token that if found for an existing layer it will - replace that layer + replace that layer. + order (Any[int, None]): Order to place the contribution in + the sublayers. When `None` no ordering is considered nor will + ordering metadata be written if `add_sdf_arguments_metadata` is + False. add_sdf_arguments_metadata (bool): Add metadata into the filepath to store the `layer_id` and `order` so ordering can be maintained in the future as intended. @@ -302,13 +305,13 @@ def add_ordered_sublayer(layer, contribution_path, order, layer_id, # TODO: Avoid this hack to store 'order' and 'layer' metadata # for sublayers; in USD sublayers can't hold customdata if add_sdf_arguments_metadata: - contribution_path = ( - "{}:SDF_FORMAT_ARGS:order={}:layer_id={}".format( - contribution_path, - order, - layer_id - ) - ) + parts = [contribution_path, + # Special separator for SDF Format Args used in USD + "SDF_FORMAT_ARGS"] + parts.append("layer_id={}".format(layer_id)) + if order is not None: + parts.append("order={}".format(order)) + contribution_path = ":".join(parts) # If the layer was already in the layers, then replace it for index, existing_path in enumerate(layer.subLayerPaths): @@ -324,17 +327,20 @@ def add_ordered_sublayer(layer, contribution_path, order, layer_id, layer.subLayerPaths[index] = contribution_path return - # If other layers are ordered than place it after the last order where we - # are higher - for index, existing_path in enumerate(layer.subLayerPaths): - args = get_sdf_format_args(existing_path) - existing_order = args.get("order") - if existing_order is not None and order > int(existing_order): - log.debug(f"Inserting new layer at {index}: {contribution_path}") - layer.subLayerPaths.insert(index, contribution_path) - return - - # If not paths found with an order to put it next to + # If an order is defined and other layers are ordered than place it after + # the last order where we are higher + if order is not None: + for index, existing_path in enumerate(layer.subLayerPaths): + args = get_sdf_format_args(existing_path) + existing_order = args.get("order") + if existing_order is not None and order > int(existing_order): + log.debug( + f"Inserting new layer at {index}: {contribution_path}" + ) + layer.subLayerPaths.insert(index, contribution_path) + return + + # If no paths found with an order to put it next to # then put the sublayer at the end log.debug(f"Appending new layer: {contribution_path}") layer.subLayerPaths.append(contribution_path) diff --git a/openpype/plugins/publish/extract_usd_layer_contributions.py b/openpype/plugins/publish/extract_usd_layer_contributions.py index 12e003f30db..1b90a95aaf2 100644 --- a/openpype/plugins/publish/extract_usd_layer_contributions.py +++ b/openpype/plugins/publish/extract_usd_layer_contributions.py @@ -52,7 +52,7 @@ "lighting": 600, } -BUILD_INTO_LAST_VERSIONS = True +BUILD_INTO_LAST_VERSIONS = False @dataclasses.dataclass @@ -374,7 +374,18 @@ def process(self, instance): else: # Sublayer source file self.log.debug("Adding sublayer") - sdf_layer.subLayerPaths.append(path) + + # This replaces existing versions of itself so that + # republishing does not continuously add more versions of the + # same subset + subset = contribution.instance.data["subset"] + add_ordered_sublayer( + layer=sdf_layer, + contribution_path=path, + layer_id=subset, + order=None, # unordered + add_sdf_arguments_metadata=True + ) # Save the file staging_dir = self.staging_dir(instance) @@ -416,7 +427,7 @@ def process(self, instance): asset_layer = Sdf.Layer.OpenAsAnonymous(path) else: - asset_layer, payload_layer = self.init_asset_layer(asset_name=asset) + asset_layer, payload_layer = self.init_layer(asset_name=asset) target_layer = payload_layer if payload_layer else asset_layer @@ -443,8 +454,8 @@ def sort_by_order(instance): path = get_instance_uri_path(instance=layer_instance) add_ordered_sublayer(target_layer, contribution_path=path, - order=order, layer_id=layer_id, + order=order, # Add the sdf argument metadata which allows # us to later detect whether another path # has the same layer id, so we can replace it @@ -469,7 +480,8 @@ def sort_by_order(instance): payload_layer.Export(payload_path, args={"format": "usda"}) self.add_relative_file(instance, payload_path) - def init_asset_layer(self, asset_name): + def init_layer(self, asset_name): + """Initialize layer if no previous version exists""" asset_layer = Sdf.Layer.CreateAnonymous() created_layers = setup_asset_layer(asset_layer, asset_name, force_add_payload=True, @@ -506,3 +518,14 @@ def add_relative_file(self, instance, source, staging_dir=None): transfers = instance.data.setdefault("transfers", []) self.log.debug(f"Adding relative file {source} -> {relative_path}") transfers.append((source, destination)) + + +class ExtractShotContribution(ExtractUSDAssetContribution): + """Shot contributions go into an empty layer as sublayers""" + families = ["usdShot"] + + def init_layer(self, asset_name): + """Initialize layer if no previous version exists""" + layer = Sdf.Layer.CreateAnonymous() + set_layer_defaults(layer, default_prim=None) + return layer From af3a7f04778c5cb38f808ea485575e0f81d00fca Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 12 Nov 2023 23:14:59 +0100 Subject: [PATCH 35/56] Add tests and fix implementation for `add_ordered_sublayer` --- openpype/lib/usdlib.py | 43 +++++++++---- tests/unit/openpype/lib/test_usdlib.py | 89 ++++++++++++++++++++++++++ 2 files changed, 118 insertions(+), 14 deletions(-) diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index 97bffcede4c..1892791ce54 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -302,24 +302,35 @@ def add_ordered_sublayer(layer, contribution_path, layer_id, order=None, # contributions we can again use it to magically fit into the # ordering. We put this in the path because sublayer paths do # not allow customData to be stored. - # TODO: Avoid this hack to store 'order' and 'layer' metadata - # for sublayers; in USD sublayers can't hold customdata - if add_sdf_arguments_metadata: - parts = [contribution_path, + def _format_path(path, layer_id, order): + # TODO: Avoid this hack to store 'order' and 'layer' metadata + # for sublayers; in USD sublayers can't hold customdata + if add_sdf_arguments_metadata: + parts = [ + contribution_path, # Special separator for SDF Format Args used in USD - "SDF_FORMAT_ARGS"] - parts.append("layer_id={}".format(layer_id)) - if order is not None: - parts.append("order={}".format(order)) - contribution_path = ":".join(parts) + "SDF_FORMAT_ARGS", + "layer_id={}".format(layer_id) + ] + if order is not None: + parts.append("order={}".format(order)) + return ":".join(parts) # If the layer was already in the layers, then replace it for index, existing_path in enumerate(layer.subLayerPaths): args = get_sdf_format_args(existing_path) existing_layer = args.get("layer_id") if existing_layer == layer_id: - # Put it in the same position where it was before - # swapping it with the original + # Put it in the same position where it was before when swapping + # it with the original, also take over its order metadata + order = args.get("order") + if order is not None: + order = int(order) + else: + order = None + contribution_path = _format_path(contribution_path, + order=order, + layer_id=layer_id) log.debug( f"Replacing existing layer: {layer.subLayerPaths[index]} " f"-> {contribution_path}" @@ -327,13 +338,17 @@ def add_ordered_sublayer(layer, contribution_path, layer_id, order=None, layer.subLayerPaths[index] = contribution_path return - # If an order is defined and other layers are ordered than place it after - # the last order where we are higher + contribution_path = _format_path(contribution_path, + order=order, + layer_id=layer_id) + + # If an order is defined and other layers are ordered than place it before + # the first order where existing order is higher if order is not None: for index, existing_path in enumerate(layer.subLayerPaths): args = get_sdf_format_args(existing_path) existing_order = args.get("order") - if existing_order is not None and order > int(existing_order): + if existing_order is not None and int(existing_order) > order: log.debug( f"Inserting new layer at {index}: {contribution_path}" ) diff --git a/tests/unit/openpype/lib/test_usdlib.py b/tests/unit/openpype/lib/test_usdlib.py index 62c7a59ffd4..169c8bc3bad 100644 --- a/tests/unit/openpype/lib/test_usdlib.py +++ b/tests/unit/openpype/lib/test_usdlib.py @@ -167,3 +167,92 @@ def test_add_variant_references_to_layer(tmp_path): # Debug print generated file (pytest excludes it by default but will # show it if the -s flag is passed) print(layer.ExportToString()) + + +def test_add_ordered_sublayer(tmp_path): + """Test addinng sublayers by order and uniqueness""" + # TODO: The code doesn't error but the data should still be validated + + layer = Sdf.Layer.CreateAnonymous() + + def get_paths(layer): + paths = layer.subLayerPaths + # Remove stored metadata in string + paths = [path.split(":SDF_FORMAT_ARGS:", 1)[0] for path in paths] + return paths + + orders = [300, 500, 350, 600, 50, 150, 450] + orders_sorted = list(sorted(orders)) + for order in orders: + usdlib.add_ordered_sublayer(layer, + contribution_path=str(order), + layer_id=str(order), + order=order) + + paths = get_paths(layer) + assert paths == [str(i) for i in sorted(orders)] + assert len(paths) == len(orders) + + # This should not add a sublayer but should replace by `layer_id` + usdlib.add_ordered_sublayer(layer, + contribution_path="300_v2", + layer_id="300", + order=300) + + paths = get_paths(layer) + assert paths[orders_sorted.index(300)] == "300_v2" + assert len(paths) == len(orders) + + # When replacing a layer with a new 'id' the ordering is preserved from + # before; the new order is not applied. + usdlib.add_ordered_sublayer(layer, + contribution_path=f"500_v2", + layer_id="500", + order=9999) + + paths = get_paths(layer) + assert paths[orders_sorted.index(500)] == "500_v2" + assert len(paths) == len(orders) + + # When replacing a layer with a new 'id' the ordering is preserved from + # before; the new order is not applied even when it is None + usdlib.add_ordered_sublayer(layer, + contribution_path=f"500_v3", + layer_id="500", + order=None) + + paths = get_paths(layer) + assert paths[orders_sorted.index(500)] == "500_v3" + assert len(paths) == len(orders) + + # Adding new layer id should also work to insert the new layer + usdlib.add_ordered_sublayer(layer, + contribution_path=f"75", + layer_id="75", + order=75) + + paths = get_paths(layer) + assert paths[1] == "75" + assert len(paths) == len(orders) + 1 + + # Adding a layer with `order=None` should append at the end + usdlib.add_ordered_sublayer(layer, + contribution_path=f"None", + layer_id="None", + order=None) + paths = get_paths(layer) + assert paths[-1] == "None" + assert len(paths) == len(orders) + 2 + + # Adding a layer with `order=None` should also be replaceable + usdlib.add_ordered_sublayer(layer, + contribution_path=f"None_v2", + layer_id="None", + order=None) + paths = get_paths(layer) + assert paths[-1] == "None_v2" + assert len(paths) == len(orders) + 2 + + # Debug print generated file (pytest excludes it by default but will + # show it if the -s flag is passed) + print(layer.ExportToString()) From 7a809b0e876b3d71c5298b0d5c28cad9db541ce9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 13 Nov 2023 13:29:39 +0100 Subject: [PATCH 36/56] Fix ordering (higher order is stronger opinion) + make tests more explicit by always checking the full list which also helps readability --- openpype/lib/usdlib.py | 18 ++++++++++---- tests/unit/openpype/lib/test_usdlib.py | 33 ++++++++++++-------------- 2 files changed, 29 insertions(+), 22 deletions(-) diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index 1892791ce54..9b249b4782c 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -281,6 +281,10 @@ def add_ordered_sublayer(layer, contribution_path, layer_id, order=None, There they will then just be unused args that we can parse later again to access our data. + A higher order will appear earlier in the subLayerPaths as a stronger + opinion. An unordered layer (`order=None`) will be stronger than any + ordered opinion and thus will be inserted at the start of the list. + Args: layer (Sdf.Layer): Layer to add sublayers in. contribution_path (str): Path/URI to add. @@ -295,6 +299,8 @@ def add_ordered_sublayer(layer, contribution_path, layer_id, order=None, in the future as intended. Returns: + str: The resulting contribution path (which maybe include the + sdf format args metadata if enabled) """ @@ -336,29 +342,33 @@ def _format_path(path, layer_id, order): f"-> {contribution_path}" ) layer.subLayerPaths[index] = contribution_path - return + return contribution_path contribution_path = _format_path(contribution_path, order=order, layer_id=layer_id) # If an order is defined and other layers are ordered than place it before - # the first order where existing order is higher + # the first order where existing order is lower if order is not None: for index, existing_path in enumerate(layer.subLayerPaths): args = get_sdf_format_args(existing_path) existing_order = args.get("order") - if existing_order is not None and int(existing_order) > order: + if existing_order is not None and int(existing_order) < order: log.debug( f"Inserting new layer at {index}: {contribution_path}" ) layer.subLayerPaths.insert(index, contribution_path) return + # Weakest ordered opinion + layer.subLayerPaths.append(contribution_path) + return contribution_path # If no paths found with an order to put it next to # then put the sublayer at the end log.debug(f"Appending new layer: {contribution_path}") - layer.subLayerPaths.append(contribution_path) + layer.subLayerPaths.insert(0, contribution_path) + return contribution_path def add_variant_references_to_layer( diff --git a/tests/unit/openpype/lib/test_usdlib.py b/tests/unit/openpype/lib/test_usdlib.py index 169c8bc3bad..345bce9848c 100644 --- a/tests/unit/openpype/lib/test_usdlib.py +++ b/tests/unit/openpype/lib/test_usdlib.py @@ -175,14 +175,17 @@ def test_add_ordered_sublayer(tmp_path): layer = Sdf.Layer.CreateAnonymous() - def get_paths(layer): + def get_paths(layer, remove_format_args=True): paths = layer.subLayerPaths # Remove stored metadata in string - paths = [path.split(":SDF_FORMAT_ARGS:", 1)[0] for path in paths] + if remove_format_args: + paths = [path.split(":SDF_FORMAT_ARGS:", 1)[0] for path in paths] return paths + # The layer stack should have the higher orders earlier in the list + # because those state "stronger opinions" - as such the order needs to be + # reversed orders = [300, 500, 350, 600, 50, 150, 450] - orders_sorted = list(sorted(orders)) for order in orders: usdlib.add_ordered_sublayer(layer, contribution_path=str(order), @@ -190,8 +193,7 @@ def get_paths(layer): order=order) paths = get_paths(layer) - assert paths == [str(i) for i in sorted(orders)] - assert len(paths) == len(orders) + assert paths == ["600", "500", "450", "350", "300", "150", "50"] # This should not add a sublayer but should replace by `layer_id` usdlib.add_ordered_sublayer(layer, @@ -200,8 +202,7 @@ def get_paths(layer): order=300) paths = get_paths(layer) - assert paths[orders_sorted.index(300)] == "300_v2" - assert len(paths) == len(orders) + assert paths == ["600", "500", "450", "350", "300_v2", "150", "50"] # When replacing a layer with a new 'id' the ordering is preserved from # before; the new order is not applied. @@ -211,8 +212,7 @@ def get_paths(layer): order=9999) paths = get_paths(layer) - assert paths[orders_sorted.index(500)] == "500_v2" - assert len(paths) == len(orders) + assert paths == ["600", "500_v2", "450", "350", "300_v2", "150", "50"] # When replacing a layer with a new 'id' the ordering is preserved from # before; the new order is not applied even when it is None @@ -222,8 +222,7 @@ def get_paths(layer): order=None) paths = get_paths(layer) - assert paths[orders_sorted.index(500)] == "500_v3" - assert len(paths) == len(orders) + assert paths == ["600", "500_v3", "450", "350", "300_v2", "150", "50"] # Adding new layer id should also work to insert the new layer usdlib.add_ordered_sublayer(layer, @@ -232,17 +231,16 @@ def get_paths(layer): order=75) paths = get_paths(layer) - assert paths[1] == "75" - assert len(paths) == len(orders) + 1 + assert paths == ["600", "500_v3", "450", "350", "300_v2", "150", "75", "50"] # noqa: E501 - # Adding a layer with `order=None` should append at the end + # Adding a layer with `order=None` should append at the start as a + # strongest opinion usdlib.add_ordered_sublayer(layer, contribution_path=f"None", layer_id="None", order=None) paths = get_paths(layer) - assert paths[-1] == "None" - assert len(paths) == len(orders) + 2 + assert paths == ["None", "600", "500_v3", "450", "350", "300_v2", "150", "75", "50"] # noqa: E501 # Adding a layer with `order=None` should also be replaceable usdlib.add_ordered_sublayer(layer, @@ -250,8 +248,7 @@ def get_paths(layer): layer_id="None", order=None) paths = get_paths(layer) - assert paths[-1] == "None_v2" - assert len(paths) == len(orders) + 2 + assert paths == ["None_v2", "600", "500_v3", "450", "350", "300_v2", "150", "75", "50"] # noqa: E501 # Debug print generated file (pytest excludes it by default but will # show it if the -s flag is passed) From 02f7d1a1d08f46ffa185e5b11fb79a1e328032c0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 13 Nov 2023 13:59:34 +0100 Subject: [PATCH 37/56] Allow to initialize as either `asset` or `shot` target USD file if not existent product yet --- .../extract_usd_layer_contributions.py | 72 ++++++++++++------- 1 file changed, 45 insertions(+), 27 deletions(-) diff --git a/openpype/plugins/publish/extract_usd_layer_contributions.py b/openpype/plugins/publish/extract_usd_layer_contributions.py index 1b90a95aaf2..b1543fba2b9 100644 --- a/openpype/plugins/publish/extract_usd_layer_contributions.py +++ b/openpype/plugins/publish/extract_usd_layer_contributions.py @@ -205,7 +205,7 @@ def process(self, instance): contribution = Contribution( instance=instance, layer_id=attr_values["contribution_layer"], - target_product=attr_values["contribution_asset_subset"], + target_product=attr_values["contribution_target_product"], apply_as_variant=attr_values["contribution_apply_as_variant"], variant_set_name=attr_values["contribution_variant_set_name"], variant_name=attr_values["contribution_variant"], @@ -233,12 +233,15 @@ def process(self, instance): ) # Asset/Shot contribution instance - self.get_or_create_instance( + target_instance = self.get_or_create_instance( subset=asset_subset, variant=asset_subset, source_instance=layer_instance, families=["usd", "usdAsset"], ) + target_instance.data["contribution_target_product_init"] = attr_values[ + "contribution_target_product_init" + ] self.log.info( f"Contributing {instance.data['subset']} to " @@ -296,17 +299,26 @@ def get_attribute_defs(cls): return [ UISeparatorDef("usd_container_settings1"), - UILabelDef(label="USD Container"), + UILabelDef(label="USD Contribution"), BoolDef("contribution_enabled", label="Enable", default=True), - TextDef("contribution_asset_subset", - label="USD Asset subset", + TextDef("contribution_target_product", + label="Target subset", default="usdAsset"), + EnumDef("contribution_target_product_init", + label="Initialize as", + tooltip=( + "The target products USD file will be initialized " + "based on this type if there's no existing USD of " + "that product yet." + ), + items=["asset", "shot"], + default="asset"), # Asset layer, e.g. model.usd, look.usd, rig.usd EnumDef("contribution_layer", - label="Department layer", + label="Add to department layer", tooltip="The layer the contribution should be made to in " "the usd asset.", items=list(LAYER_ORDERS.keys()), @@ -354,7 +366,7 @@ def process(self, instance): path = get_instance_uri_path(contribution.instance) if contribution.apply_as_variant: # Add contribution as variants to their layer subsets - self.log.debug("Adding variant") + self.log.debug(f"Adding variant: {contribution}") prim_path = f"/{default_prim}" variant_set_name = contribution.variant_set_name variant_name = contribution.variant_name @@ -373,7 +385,7 @@ def process(self, instance): else: # Sublayer source file - self.log.debug("Adding sublayer") + self.log.debug(f"Adding sublayer: {contribution}") # This replaces existing versions of itself so that # republishing does not continuously add more versions of the @@ -427,7 +439,11 @@ def process(self, instance): asset_layer = Sdf.Layer.OpenAsAnonymous(path) else: - asset_layer, payload_layer = self.init_layer(asset_name=asset) + # If not existing publish of this product yet then we initialize + # the layer as either a default asset or shot structure. + init_type = instance.data["contribution_target_product_init"] + asset_layer, payload_layer = self.init_layer(asset_name=asset, + init_type=init_type) target_layer = payload_layer if payload_layer else asset_layer @@ -480,14 +496,27 @@ def sort_by_order(instance): payload_layer.Export(payload_path, args={"format": "usda"}) self.add_relative_file(instance, payload_path) - def init_layer(self, asset_name): + def init_layer(self, asset_name, init_type): """Initialize layer if no previous version exists""" - asset_layer = Sdf.Layer.CreateAnonymous() - created_layers = setup_asset_layer(asset_layer, asset_name, - force_add_payload=True, - set_payload_path=True) - payload_layer = created_layers[0].layer - return asset_layer, payload_layer + + if init_type == "asset": + asset_layer = Sdf.Layer.CreateAnonymous() + created_layers = setup_asset_layer(asset_layer, asset_name, + force_add_payload=True, + set_payload_path=True) + payload_layer = created_layers[0].layer + return asset_layer, payload_layer + + elif init_type == "shot": + shot_layer = Sdf.Layer.CreateAnonymous() + set_layer_defaults(shot_layer, default_prim=None) + return shot_layer, None + + else: + raise ValueError( + "USD Target Product contribution can only initialize " + "as 'asset' or 'shot', got: '{}'".format(init_type) + ) def add_relative_file(self, instance, source, staging_dir=None): """Add transfer for a relative path form staging to publish dir. @@ -518,14 +547,3 @@ def add_relative_file(self, instance, source, staging_dir=None): transfers = instance.data.setdefault("transfers", []) self.log.debug(f"Adding relative file {source} -> {relative_path}") transfers.append((source, destination)) - - -class ExtractShotContribution(ExtractUSDAssetContribution): - """Shot contributions go into an empty layer as sublayers""" - families = ["usdShot"] - - def init_layer(self, asset_name): - """Initialize layer if no previous version exists""" - layer = Sdf.Layer.CreateAnonymous() - set_layer_defaults(layer, default_prim=None) - return layer From b983779b3778f04cdbf2ee4a72bf036ff876acf1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 13 Nov 2023 14:10:45 +0100 Subject: [PATCH 38/56] Implement validations in the test that data is as we expect --- tests/unit/openpype/lib/test_usdlib.py | 54 ++++++++++++++++++++++++-- 1 file changed, 50 insertions(+), 4 deletions(-) diff --git a/tests/unit/openpype/lib/test_usdlib.py b/tests/unit/openpype/lib/test_usdlib.py index 345bce9848c..5a34cf2b22b 100644 --- a/tests/unit/openpype/lib/test_usdlib.py +++ b/tests/unit/openpype/lib/test_usdlib.py @@ -110,31 +110,45 @@ def test_create_shot(tmp_path): def test_add_variant_references_to_layer(tmp_path): """Test adding variants to a layer, replacing older ones""" - # TODO: The code doesn't error but the data should still be validated + + def get_references(layer, prim_path, variant_set, variant): + """Return prepended reference asset paths for prim in variant set""" + path = Sdf.Path(prim_path).AppendVariantSelection(variant_set, variant) + prim_spec = layer.GetPrimAtPath(path) + references = list(prim_spec.referenceList.prependedItems) + return [ref.assetPath for ref in references] prim_path = "/root" layer = usdlib.add_variant_references_to_layer(variants=[ ("main", "./main.usd"), ("twist", "./twist.usd"), - ("damaged", "./damaged.usd"), ("tall", "./tall.usd"), ], variantset="model", variant_prim=prim_path ) + assert get_references(layer, prim_path, "model", "main") == ["./main.usd"] + assert get_references(layer, prim_path, "model", "twist") == ["./twist.usd"] + assert get_references(layer, prim_path, "model", "tall") == ["./tall.usd"] # Allow recalling with a layer provided to operate on that layer - # instead; adding more variant definitions + # instead; adding more variant definitions, keeping existing definitions + # as well layer = usdlib.add_variant_references_to_layer(variants=[ ("main", "./look_main.usd"), ("twist", "./look_twist.usd"), - ("damaged", "./look_damaged.usd"), ("tall", "./look_tall.usd"), ], variantset="look", layer=layer, variant_prim=prim_path ) + assert get_references(layer, prim_path, "model", "main") == ["./main.usd"] + assert get_references(layer, prim_path, "model", "twist") == ["./twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "model", "tall") == ["./tall.usd"] + assert get_references(layer, prim_path, "look", "main") == ["./look_main.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "twist") == ["./look_twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "tall") == ["./look_tall.usd"] # noqa: E501 # Allow with a layer provided to operate on that layer # instead; adding more variant names to an existing variant set @@ -146,6 +160,30 @@ def test_add_variant_references_to_layer(tmp_path): set_default_variant=False, variant_prim=prim_path ) + assert get_references(layer, prim_path, "model", "main") == ["./main.usd"] + assert get_references(layer, prim_path, "model", "twist") == ["./twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "model", "tall") == ["./tall.usd"] + assert get_references(layer, prim_path, "look", "main") == ["./look_main.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "twist") == ["./look_twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "tall") == ["./look_tall.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "short") == ["./look_short.usd"] # noqa: E501 + + # Allow updating an existing variant with a new file + layer = usdlib.add_variant_references_to_layer(variants=[ + ("short", "./look_short_v02.usd"), + ], + variantset="look", + layer=layer, + set_default_variant=False, + variant_prim=prim_path + ) + assert get_references(layer, prim_path, "model", "main") == ["./main.usd"] + assert get_references(layer, prim_path, "model", "twist") == ["./twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "model", "tall") == ["./tall.usd"] + assert get_references(layer, prim_path, "look", "main") == ["./look_main.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "twist") == ["./look_twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "tall") == ["./look_tall.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "short") == ["./look_short_v02.usd"] # noqa: E501 # Applying variants to another prim should not affect first prim layer = usdlib.add_variant_references_to_layer(variants=[ @@ -156,6 +194,14 @@ def test_add_variant_references_to_layer(tmp_path): set_default_variant=False, variant_prim="/other_root" ) + assert get_references(layer, prim_path, "model", "main") == ["./main.usd"] + assert get_references(layer, prim_path, "model", "twist") == ["./twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "model", "tall") == ["./tall.usd"] + assert get_references(layer, prim_path, "look", "main") == ["./look_main.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "twist") == ["./look_twist.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "tall") == ["./look_tall.usd"] # noqa: E501 + assert get_references(layer, prim_path, "look", "short") == ["./look_short_v02.usd"] # noqa: E501 + assert get_references(layer, "/other_root", "look", "short") == ["./look_short.usd"] # noqa: E501 # Export layer should work layer.Export( From 7086adb228cd7fc03195ffd852653d273133aa6f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 13 Nov 2023 14:11:40 +0100 Subject: [PATCH 39/56] Remove unused `tmp_path` feature --- tests/unit/openpype/lib/test_usdlib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/openpype/lib/test_usdlib.py b/tests/unit/openpype/lib/test_usdlib.py index 5a34cf2b22b..8e46b24d93b 100644 --- a/tests/unit/openpype/lib/test_usdlib.py +++ b/tests/unit/openpype/lib/test_usdlib.py @@ -215,7 +215,7 @@ def get_references(layer, prim_path, variant_set, variant): print(layer.ExportToString()) -def test_add_ordered_sublayer(tmp_path): +def test_add_ordered_sublayer(): """Test addinng sublayers by order and uniqueness""" # TODO: The code doesn't error but the data should still be validated From 32f1791121b7df20ab77bd27964f31f560a8e0f1 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 13 Nov 2023 15:22:54 +0100 Subject: [PATCH 40/56] Re-enable `BUILD_INTO_LAST_VERSIONS` --- .../plugins/publish/extract_usd_layer_contributions.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_usd_layer_contributions.py b/openpype/plugins/publish/extract_usd_layer_contributions.py index b1543fba2b9..909b665c238 100644 --- a/openpype/plugins/publish/extract_usd_layer_contributions.py +++ b/openpype/plugins/publish/extract_usd_layer_contributions.py @@ -52,7 +52,13 @@ "lighting": 600, } -BUILD_INTO_LAST_VERSIONS = False +# This global toggle is here mostly for debugging purposes and should usually +# be True so that new publishes merge and extend on previous contributions. +# With this enabled a new variant model layer publish would e.g. merge with +# the model layer's other variants nicely, so you can build up an asset by +# individual publishes instead of requiring to republish each contribution +# all the time at the same time +BUILD_INTO_LAST_VERSIONS = True @dataclasses.dataclass From c1922f631c41c23082e827c67899efe3d51988ba Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 13 Nov 2023 15:23:49 +0100 Subject: [PATCH 41/56] Reorder imports (cosmetics) --- openpype/plugins/publish/extract_usd_layer_contributions.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/extract_usd_layer_contributions.py b/openpype/plugins/publish/extract_usd_layer_contributions.py index 909b665c238..4205514fff2 100644 --- a/openpype/plugins/publish/extract_usd_layer_contributions.py +++ b/openpype/plugins/publish/extract_usd_layer_contributions.py @@ -3,6 +3,7 @@ import os import pyblish.api +from pxr import Sdf from openpype.lib import ( TextDef, @@ -11,10 +12,6 @@ UILabelDef, EnumDef ) -from openpype.pipeline import publish - - -from pxr import Sdf from openpype.lib.usdlib import ( set_variant_reference, setup_asset_layer, @@ -24,6 +21,7 @@ get_representation_path_by_names, set_layer_defaults ) +from openpype.pipeline import publish # A contribution defines a contribution into a (department) layer which will From 774c089a80b57e25ad53e464c5c59616e9059411 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 13 Nov 2023 17:02:42 +0100 Subject: [PATCH 42/56] Tweak label for readability --- openpype/hosts/maya/plugins/create/create_maya_usd_layer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py b/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py index 4f1903de54c..3df8cece189 100644 --- a/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py +++ b/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py @@ -6,7 +6,7 @@ class CreateMayaUsdLayer(plugin.MayaCreator): """Create Maya USD Export from `mayaUsdProxyShape` layer""" identifier = "io.openpype.creators.maya.mayausdlayer" - label = "Maya USD Layer Export" + label = "Maya USD Export Layer" family = "usd" icon = "cubes" description = "Create mayaUsdProxyShape layer export" From 53dc0f3c00e486bc8594c97467795d3912eb4ccf Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 13 Nov 2023 19:55:34 +0100 Subject: [PATCH 43/56] Make asset contribution create just a regular usd instance --- .../maya/plugins/create/create_maya_usd.py | 113 +++++------------- 1 file changed, 32 insertions(+), 81 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_maya_usd.py b/openpype/hosts/maya/plugins/create/create_maya_usd.py index 7e33af84bc7..250818c551c 100644 --- a/openpype/hosts/maya/plugins/create/create_maya_usd.py +++ b/openpype/hosts/maya/plugins/create/create_maya_usd.py @@ -118,13 +118,13 @@ class CreateMayaUsdContribution(CreateMayaUsd): """ - identifier = "io.openpype.creators.maya.mayausd.contribution" - label = "Maya USD Contribution" + identifier = "io.openpype.creators.maya.mayausd.assetcontribution" + label = "Maya USD Asset Contribution" family = "usd" icon = "cubes" description = "Create Maya USD Contribution" - default_variants = ["main"] + # default_variants = ["main"] # TODO: Do not include material for model publish # TODO: Do only include material + assignments for material publish # + attribute overrides onto existing geo? (`over`?) @@ -200,28 +200,20 @@ def create(self, subset_name, instance_data, pre_create_data): cmds.select(root, replace=True, noExpand=True) pre_create_data["use_selection"] = True - super(CreateMayaUsdContribution, self).create( + # Create as if we're the other plug-in so that the instance after + # creation thinks it was created by `CreateMayaUsd` and this Creator + # here is solely used to apply different default values + # TODO: Improve this hack + CreateMayaUsd( + project_settings=self.project_settings, + system_settings=None, + create_context=self.create_context + ).create( subset_name, instance_data, pre_create_data ) - def add_transient_instance_data(self, instance_data): - super().add_transient_instance_data(instance_data) - instance_data["usd_bootstrap"] = self.bootstrap - instance_data["usd_contribution"] = "model" - - def remove_transient_instance_data(self, instance_data): - super().remove_transient_instance_data(instance_data) - instance_data.pop("usd_bootstrap", None) - instance_data.pop("usd_contribution", None) - - def get_publish_families(self): - families = ["usd", "mayaUsd", "usd.layered"] - if self.family not in families: - families.append(self.family) - return families - def get_pre_create_attr_defs(self): defs = super(CreateMayaUsdContribution, self).get_pre_create_attr_defs() defs.extend([ @@ -231,65 +223,24 @@ def get_pre_create_attr_defs(self): ]) return defs - def get_instance_attr_defs(self): - - defs = [ - UISeparatorDef("contribution_settings1"), - UILabelDef(label="Contribution"), - UISeparatorDef("contribution_settings2"), - TextDef("contribution_asset", - label="USD Asset subset", - default="usdAsset"), - - # Asset layer, e.g. model.usd, look.usd, rig.usd - EnumDef("contribution_asset_layer", - label="Department layer", - tooltip="The layer the contribution should be made to in " - "the usd asset.", - items=["model", "look", "rig"], - hidden=bool(self.contribution_asset_layer), - default=self.contribution_asset_layer), - BoolDef("contribute_as_variant", - label="Use as variant", - default=True), - TextDef("contribution_variant_set_name", - label="Variant Set Name", - default="model"), - TextDef("contribution_variant", - label="Variant Name", - default="{variant}"), - - # Separate the rest of the settings visually - UISeparatorDef("export_settings1"), - UILabelDef(label="Export Settings"), - UISeparatorDef("export_settings2"), - ] - defs += super(CreateMayaUsdContribution, self).get_instance_attr_defs() - - # Remove certain settings that we don't want to expose on asset - # creation - remove = {"stripNamespaces", "mergeTransformAndShape"} - defs = [attr_def for attr_def in defs if attr_def.key not in remove] - return defs - - -class CreateUsdLookContribution(CreateMayaUsdContribution): - """Look layer contribution to the USD Asset""" - identifier = CreateMayaUsdContribution.identifier + ".look" - label = "USD Look" - icon = "paint-brush" - description = "Create USD Look contribution" - family = "usd.look" - - contribution_asset_layer = "look" - - -class CreateUsdModelContribution(CreateMayaUsdContribution): - """Model layer contribution to the USD Asset""" - identifier = CreateMayaUsdContribution.identifier + ".model" - label = "USD Model" - icon = "cube" - description = "Create USD Model contribution" - family = "usd.model" - contribution_asset_layer = "model" +# class CreateUsdLookContribution(CreateMayaUsdContribution): +# """Look layer contribution to the USD Asset""" +# identifier = CreateMayaUsdContribution.identifier + ".look" +# label = "USD Look" +# icon = "paint-brush" +# description = "Create USD Look contribution" +# family = "usd.look" +# +# contribution_asset_layer = "look" +# +# +# class CreateUsdModelContribution(CreateMayaUsdContribution): +# """Model layer contribution to the USD Asset""" +# identifier = CreateMayaUsdContribution.identifier + ".model" +# label = "USD Model" +# icon = "cube" +# description = "Create USD Model contribution" +# family = "usd.model" +# +# contribution_asset_layer = "model" From 4e0d733d25fa8a4255128a7bad1e05aca41c356f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 14 Nov 2023 13:09:14 +0100 Subject: [PATCH 44/56] Fix USD export for draft new publisher --- .../blender/plugins/create/create_usd.py | 40 +++++-------- .../plugins/publish/collect_instance.py | 2 +- .../blender/plugins/publish/extract_usd.py | 56 +++++++++++++++---- 3 files changed, 61 insertions(+), 37 deletions(-) diff --git a/openpype/hosts/blender/plugins/create/create_usd.py b/openpype/hosts/blender/plugins/create/create_usd.py index b0a5069f376..639a0c32d83 100644 --- a/openpype/hosts/blender/plugins/create/create_usd.py +++ b/openpype/hosts/blender/plugins/create/create_usd.py @@ -7,38 +7,28 @@ from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES -class CreateUSD(plugin.Creator): +class CreateUSD(plugin.BaseCreator): """Create USD Export""" + identifier = "io.openpype.creators.blender.usd" name = "usdMain" label = "USD" family = "usd" icon = "gears" - def process(self): - """ Run the creator on Blender main thread""" - mti = ops.MainThreadItem(self._process) - ops.execute_in_main_thread(mti) - - def _process(self): - # Get Instance Container or create it if it does not exist - instances = bpy.data.collections.get(AVALON_INSTANCES) - if not instances: - instances = bpy.data.collections.new(name=AVALON_INSTANCES) - bpy.context.scene.collection.children.link(instances) - - # Create instance object - asset = self.data["asset"] - subset = self.data["subset"] - name = plugin.asset_name(asset, subset) - collection = bpy.data.collections.new(name=name) - instances.children.link(collection) - self.data['task'] = get_current_task_name() - lib.imprint(collection, self.data) - - # Add selected objects to instance - if (self.options or {}).get("useSelection"): - for obj in lib.get_selection(): + def create( + self, subset_name: str, instance_data: dict, pre_create_data: dict + ): + # Run parent create method + collection = super().create( + subset_name, instance_data, pre_create_data + ) + + if pre_create_data.get("use_selection"): + objects = lib.get_selection() + for obj in objects: collection.objects.link(obj) + if obj.type == 'EMPTY': + objects.extend(obj.children) return collection diff --git a/openpype/hosts/blender/plugins/publish/collect_instance.py b/openpype/hosts/blender/plugins/publish/collect_instance.py index b170e3d06fd..656a9e4c162 100644 --- a/openpype/hosts/blender/plugins/publish/collect_instance.py +++ b/openpype/hosts/blender/plugins/publish/collect_instance.py @@ -12,7 +12,7 @@ class CollectBlenderInstanceData(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder hosts = ["blender"] families = ["model", "pointcache", "rig", "camera" "layout", "blendScene", - "camera"] + "camera", "usd"] label = "Collect Instance" def process(self, instance): diff --git a/openpype/hosts/blender/plugins/publish/extract_usd.py b/openpype/hosts/blender/plugins/publish/extract_usd.py index 74f6f832dd2..2f057acf44a 100644 --- a/openpype/hosts/blender/plugins/publish/extract_usd.py +++ b/openpype/hosts/blender/plugins/publish/extract_usd.py @@ -4,7 +4,17 @@ from openpype.pipeline import publish from openpype.hosts.blender.api import plugin -from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY + + +def get_all_parents(obj): + """Get all recursive parents of object""" + result = [] + while True: + obj = obj.parent + if not obj: + break + result.append(obj) + return result class ExtractUSD(publish.Extractor): @@ -15,6 +25,12 @@ class ExtractUSD(publish.Extractor): families = ["usd"] def process(self, instance): + + # Ignore runtime instances (e.g. USD layers) + # TODO: This is better done via more specific `families` + if not instance.data.get("transientData", {}).get("instance_node"): + return + # Define extract output file path stagingdir = self.staging_dir(instance) filename = f"{instance.name}.usd" @@ -25,21 +41,39 @@ def process(self, instance): # Select all members to "export selected" plugin.deselect_all() + selected = [] - asset_group = None for obj in instance: - if isinstance(obj, bpy.types.Collection): - # TODO: instead include all children - but that's actually - # up to the Collector instead - continue + if isinstance(obj, bpy.types.Object): + obj.select_set(True) + selected.append(obj) + + # The extraction does not work if the active object is a Collection + # so we need to pick an object instead; this should be the highest + # object in the hierarchy + included_objects = {obj.name_full for obj in instance} + num_parents_to_obj = {} + for obj in instance: + if isinstance(obj, bpy.types.Object): + parents = get_all_parents(obj) + # included parents + parents = [parent for parent in parents if + parent.name_full in included_objects] + if not parents: + root = obj + break + + num_parents_to_obj.setdefault(len(parents), obj) + else: + minimum_parent = min(num_parents_to_obj) + root = num_parents_to_obj[minimum_parent] - obj.select_set(True) - selected.append(obj) - if obj.get(AVALON_PROPERTY): - asset_group = obj + if not root: + raise RuntimeError("No root node found") + self.log.debug(f"Exporting using active root: {root.name}") context = plugin.create_blender_context( - active=asset_group, selected=selected) + active=root, selected=selected) # Export USD bpy.ops.wm.usd_export( From c858eefc31b8c534eb5304534258000a730b6cee Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Tue, 14 Nov 2023 14:08:45 +0100 Subject: [PATCH 45/56] Lib cleanup --- openpype/lib/usdlib.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index 9b249b4782c..5dc4f00c9b5 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -541,7 +541,7 @@ def get_or_define_prim_spec(layer, prim_path, type_name): def variant_nested_prim_path(prim_path, variant_selections): - """Return the Sdf.Path path for a nested variant selection at prim path. + """Return the Sdf.Path for a nested variant selection at prim path. Examples: >>> prim_path = Sdf.Path("/asset") @@ -587,7 +587,8 @@ def set_variant_reference(sdf_layer, prim_path, variant_selections, path, to prepended items, otherwise it will replace it. Returns: - S + Sdf.PrimSpec: The prim spec for the prim path at the given + variant selection. """ prim_path = Sdf.Path(prim_path) From 547a4efed8410dd6e81982a2a07bb3cc88ac2ebe Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 15 Nov 2023 19:25:00 +0100 Subject: [PATCH 46/56] Re-use duplicated functionality from lib instead + improve docstrings --- openpype/hosts/blender/api/lib.py | 59 +++++++++++++++++++ .../plugins/publish/extract_fbx_animation.py | 43 +++----------- .../blender/plugins/publish/extract_usd.py | 39 ++---------- 3 files changed, 74 insertions(+), 67 deletions(-) diff --git a/openpype/hosts/blender/api/lib.py b/openpype/hosts/blender/api/lib.py index e80ed61bc81..032c8e21c82 100644 --- a/openpype/hosts/blender/api/lib.py +++ b/openpype/hosts/blender/api/lib.py @@ -365,3 +365,62 @@ def maintained_time(): yield finally: bpy.context.scene.frame_current = current_time + + +def get_all_parents(obj): + """Get all recursive parents of object. + + Arguments: + obj (bpy.types.Object): Object to get all parents for. + + Returns: + List[bpy.types.Object]: All parents of object + + """ + result = [] + while True: + obj = obj.parent + if not obj: + break + result.append(obj) + return result + + +def get_highest_root(objects): + """Get the highest object (the least parents) among the objects. + + If multiple objects have the same amount of parents (or no parents) the + first object found in the input iterable will be returned. + + Note that this will *not* return objects outside of the input list, as + such it will not return the root of node from a child node. It is purely + intended to find the highest object among a list of objects. To instead + get the root from one object use, e.g. `get_all_parents(obj)[-1]` + + Arguments: + objects (List[bpy.types.Object]): Objects to find the highest root in. + + Returns: + Optional[bpy.types.Object]: First highest root found or None if no + `bpy.types.Object` found in input list. + + """ + included_objects = {obj.name_full for obj in objects} + num_parents_to_obj = {} + for obj in objects: + if isinstance(obj, bpy.types.Object): + parents = get_all_parents(obj) + # included parents + parents = [parent for parent in parents if + parent.name_full in included_objects] + if not parents: + # A node without parents must be a highest root + return obj + + num_parents_to_obj.setdefault(len(parents), obj) + + if not num_parents_to_obj: + return + + minimum_parent = min(num_parents_to_obj) + return num_parents_to_obj[minimum_parent] diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py index a705345edbd..3d75e1e10ee 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py @@ -7,40 +7,10 @@ from openpype.pipeline import publish from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.lib import get_highest_root, get_all_parents from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY -def get_all_parents(obj): - """Get all recursive parents of object""" - result = [] - while True: - obj = obj.parent - if not obj: - break - result.append(obj) - return result - - -def get_highest_root(objects): - # Get the highest object that is also in the collection - included_objects = {obj.name_full for obj in objects} - num_parents_to_obj = {} - for obj in objects: - if isinstance(obj, bpy.types.Object): - parents = get_all_parents(obj) - # included parents - parents = [parent for parent in parents if - parent.name_full in included_objects] - if not parents: - # A node without parents must be a highest root - return obj - - num_parents_to_obj.setdefault(len(parents), obj) - - minimum_parent = min(num_parents_to_obj) - return num_parents_to_obj[minimum_parent] - - class ExtractAnimationFBX( publish.Extractor, publish.OptionalPyblishPluginMixin, @@ -68,19 +38,24 @@ def process(self, instance): # and for those objects include the children hierarchy # TODO: Would it make more sense for the Collect Instance collector # to also always retrieve all the children? - objects = set(asset_group.objects) # From the direct children of the collection find the 'root' node # that we want to export - it is the 'highest' node in a hierarchy - root = get_highest_root(objects) + root = get_highest_root(asset_group.objects) + if not root: + raise publish.KnownPublishError( + f"No root object found in instance: {asset_group.name}" + f"No objects in asset group: {asset_group.name}" + ) + objects = set(asset_group.objects) for obj in list(objects): objects.update(obj.children_recursive) # Find all armatures among the objects, assume to find only one armatures = [obj for obj in objects if obj.type == "ARMATURE"] if not armatures: - raise RuntimeError( + raise publish.KnownPublishError( f"Unable to find ARMATURE in collection: " f"{asset_group.name}" ) diff --git a/openpype/hosts/blender/plugins/publish/extract_usd.py b/openpype/hosts/blender/plugins/publish/extract_usd.py index 2f057acf44a..02668482b11 100644 --- a/openpype/hosts/blender/plugins/publish/extract_usd.py +++ b/openpype/hosts/blender/plugins/publish/extract_usd.py @@ -3,18 +3,7 @@ import bpy from openpype.pipeline import publish -from openpype.hosts.blender.api import plugin - - -def get_all_parents(obj): - """Get all recursive parents of object""" - result = [] - while True: - obj = obj.parent - if not obj: - break - result.append(obj) - return result +from openpype.hosts.blender.api import plugin, lib class ExtractUSD(publish.Extractor): @@ -48,28 +37,12 @@ def process(self, instance): obj.select_set(True) selected.append(obj) - # The extraction does not work if the active object is a Collection - # so we need to pick an object instead; this should be the highest - # object in the hierarchy - included_objects = {obj.name_full for obj in instance} - num_parents_to_obj = {} - for obj in instance: - if isinstance(obj, bpy.types.Object): - parents = get_all_parents(obj) - # included parents - parents = [parent for parent in parents if - parent.name_full in included_objects] - if not parents: - root = obj - break - - num_parents_to_obj.setdefault(len(parents), obj) - else: - minimum_parent = min(num_parents_to_obj) - root = num_parents_to_obj[minimum_parent] - + root = lib.get_highest_root(objects=instance[:]) if not root: - raise RuntimeError("No root node found") + instance_node = instance.data["transientData"]["instance_node"] + raise publish.KnownPublishError( + f"No root object found in instance: {instance_node.name}" + ) self.log.debug(f"Exporting using active root: {root.name}") context = plugin.create_blender_context( From c47777be790a80069682e938e982c441cfbaa7a9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 15 Nov 2023 21:35:50 +0100 Subject: [PATCH 47/56] Hound + cosmetics --- .../blender/plugins/create/create_usd.py | 6 +---- .../plugins/publish/extract_fbx_animation.py | 2 +- .../blender/plugins/publish/extract_usd.py | 2 +- .../outputprocessors/ayon_uri_processor.py | 1 + .../maya/plugins/create/create_maya_usd.py | 3 ++- .../maya/plugins/load/load_into_maya_usd.py | 2 +- .../hosts/maya/plugins/load/show_usdview.py | 2 +- openpype/lib/usdlib.py | 20 +++++++------- .../extract_usd_layer_contributions.py | 26 ++++++++++++++++++- 9 files changed, 43 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/blender/plugins/create/create_usd.py b/openpype/hosts/blender/plugins/create/create_usd.py index 639a0c32d83..b7e6db077a9 100644 --- a/openpype/hosts/blender/plugins/create/create_usd.py +++ b/openpype/hosts/blender/plugins/create/create_usd.py @@ -1,10 +1,6 @@ """Create a USD Export.""" -import bpy - -from openpype.pipeline import get_current_task_name -from openpype.hosts.blender.api import plugin, lib, ops -from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES +from openpype.hosts.blender.api import plugin, lib class CreateUSD(plugin.BaseCreator): diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py index 3d75e1e10ee..ff2a3aac8da 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py @@ -7,7 +7,7 @@ from openpype.pipeline import publish from openpype.hosts.blender.api import plugin -from openpype.hosts.blender.api.lib import get_highest_root, get_all_parents +from openpype.hosts.blender.api.lib import get_highest_root from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY diff --git a/openpype/hosts/blender/plugins/publish/extract_usd.py b/openpype/hosts/blender/plugins/publish/extract_usd.py index 02668482b11..de0e880e446 100644 --- a/openpype/hosts/blender/plugins/publish/extract_usd.py +++ b/openpype/hosts/blender/plugins/publish/extract_usd.py @@ -59,7 +59,7 @@ def process(self, instance): export_hair=False, export_uvmaps=True, # TODO: add for new version of Blender (4+?) - #export_mesh_colors=True, + # export_mesh_colors=True, export_normals=True, export_materials=True, use_instancing=True diff --git a/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py b/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py index a2f3494626d..cd163c34918 100644 --- a/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py +++ b/openpype/hosts/houdini/startup/husdplugins/outputprocessors/ayon_uri_processor.py @@ -17,6 +17,7 @@ def __init__(self): self._ref_cache = dict() self._publish_context = None self.log = logging.getLogger(__name__) + @staticmethod def name(): return "ayon_uri_processor" diff --git a/openpype/hosts/maya/plugins/create/create_maya_usd.py b/openpype/hosts/maya/plugins/create/create_maya_usd.py index 250818c551c..dce17fc52bf 100644 --- a/openpype/hosts/maya/plugins/create/create_maya_usd.py +++ b/openpype/hosts/maya/plugins/create/create_maya_usd.py @@ -215,7 +215,8 @@ def create(self, subset_name, instance_data, pre_create_data): ) def get_pre_create_attr_defs(self): - defs = super(CreateMayaUsdContribution, self).get_pre_create_attr_defs() + defs = super(CreateMayaUsdContribution, + self).get_pre_create_attr_defs() defs.extend([ BoolDef("createTemplateHierarchy", label="Create template hierarchy", diff --git a/openpype/hosts/maya/plugins/load/load_into_maya_usd.py b/openpype/hosts/maya/plugins/load/load_into_maya_usd.py index 08ca32c0711..f9a725d6c1b 100644 --- a/openpype/hosts/maya/plugins/load/load_into_maya_usd.py +++ b/openpype/hosts/maya/plugins/load/load_into_maya_usd.py @@ -143,7 +143,7 @@ def _get_prim_references(self, prim): continue prepended_items = prim_spec.referenceList.prependedItems - for index, reference in enumerate(prepended_items): + for index, _reference in enumerate(prepended_items): # Override the matching reference identifier # TODO: Make sure we only return the correct reference yield prepended_items, index diff --git a/openpype/hosts/maya/plugins/load/show_usdview.py b/openpype/hosts/maya/plugins/load/show_usdview.py index 16ab28ab21b..1a2e0c76102 100644 --- a/openpype/hosts/maya/plugins/load/show_usdview.py +++ b/openpype/hosts/maya/plugins/load/show_usdview.py @@ -27,7 +27,7 @@ class ShowInUsdview(load.LoaderPlugin): def load(self, context, name=None, namespace=None, data=None): try: - import OpenGL + import OpenGL # noqa: F403 except ImportError: self.log.error( "usdview for mayapy requires to have `OpenGL` python library " diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index 5dc4f00c9b5..442fcf97f3d 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -121,7 +121,7 @@ def setup_asset_layer( "__class__", Sdf.SpecifierClass, ) - _class_asset_prim = Sdf.PrimSpec( + Sdf.PrimSpec( class_prim, prim_name, Sdf.SpecifierClass, @@ -209,12 +209,12 @@ def create_asset( layer = Layer(layer=sdf_layer, path=filepath) created_layers = setup_asset_layer( - layer=sdf_layer, - asset_name=asset_name, - reference_layers=reference_layers, - kind=kind, - define_class=define_class, - set_payload_path=True + layer=sdf_layer, + asset_name=asset_name, + reference_layers=reference_layers, + kind=kind, + define_class=define_class, + set_payload_path=True ) for created_layer in created_layers: created_layer.anchor = layer @@ -314,9 +314,9 @@ def _format_path(path, layer_id, order): if add_sdf_arguments_metadata: parts = [ contribution_path, - # Special separator for SDF Format Args used in USD - "SDF_FORMAT_ARGS", - "layer_id={}".format(layer_id) + # Special separator for SDF Format Args used in USD + "SDF_FORMAT_ARGS", + "layer_id={}".format(layer_id) ] if order is not None: parts.append("order={}".format(order)) diff --git a/openpype/plugins/publish/extract_usd_layer_contributions.py b/openpype/plugins/publish/extract_usd_layer_contributions.py index 4205514fff2..32dfa8fdf32 100644 --- a/openpype/plugins/publish/extract_usd_layer_contributions.py +++ b/openpype/plugins/publish/extract_usd_layer_contributions.py @@ -261,7 +261,31 @@ def find_instance(self, context, data, ignore_instance): for key, value in data.items()): return instance - def get_or_create_instance(self, subset, variant, source_instance, families): + def get_or_create_instance(self, + subset, + variant, + source_instance, + families): + """Get or create the instance matching the subset/variant. + + The source instance will be used to do additional matching, like + ensuring it's a subset for the same asset and task. If the instance + already exists in the `context` then the existing one is returned. + + For each source instance this is called the sources will be appended + to a `instance.data["source_instances"]` list on the returned instance. + + Arguments: + subset (str): Subset name + variant (str): Variant name + source_instance (pyblish.api.Instance): Source instance to + be related to for asset, task. + families (list): The families required to be set on the instance. + + Returns: + pyblish.api.Instance: The resulting instance. + + """ # Potentially the instance already exists due to multiple instances # contributing to the same layer or asset - so we first check for From 4af412ef89519a0303896da741926012c9cf6621 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 15 Nov 2023 21:41:38 +0100 Subject: [PATCH 48/56] Improve docstring + error message --- openpype/hosts/maya/plugins/load/show_usdview.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/show_usdview.py b/openpype/hosts/maya/plugins/load/show_usdview.py index 1a2e0c76102..1e8c6752439 100644 --- a/openpype/hosts/maya/plugins/load/show_usdview.py +++ b/openpype/hosts/maya/plugins/load/show_usdview.py @@ -10,7 +10,15 @@ class ShowInUsdview(load.LoaderPlugin): - """Open USD file in usdview""" + """Open USD file in usdview + + This requires `OpenGL` python package to be available to Maya. For more + details also see the "Using Usdiew in Maya" section in maya documentation. + + See Also: + https://help.autodesk.com/view/MAYAUL/2024/ENU/?guid=GUID-C452B331-7C4D-4B58-A1BA-D03775F266A8 # noqa + + """ label = "Show in usdview" representations = ["*"] @@ -30,8 +38,10 @@ def load(self, context, name=None, namespace=None, data=None): import OpenGL # noqa: F403 except ImportError: self.log.error( - "usdview for mayapy requires to have `OpenGL` python library " - "available. Please make sure to install it." + "usdview for maya requires to have `OpenGL` python library " + "available. Please make sure to install it.\n" + "The `OpenGL` library can be installed for maya using:\n" + "mayapy -m pip install PyOpenGL==3.1.0" ) filepath = self.filepath_from_context(context) filepath = os.path.normpath(filepath) From e4fc6a8de20bab4734a9a7acfa908152e6152e03 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 15 Nov 2023 22:04:22 +0100 Subject: [PATCH 49/56] Add draft USD importing to Blender --- openpype/hosts/blender/api/plugin.py | 3 ++- .../hosts/blender/plugins/load/load_abc.py | 26 ++++++++++++++----- 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/blender/api/plugin.py b/openpype/hosts/blender/api/plugin.py index 7ac12b55493..24b903e2e35 100644 --- a/openpype/hosts/blender/api/plugin.py +++ b/openpype/hosts/blender/api/plugin.py @@ -25,7 +25,8 @@ ) from .lib import imprint -VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx"] +VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx", + ".usd", ".usdc", ".usda"] def asset_name( diff --git a/openpype/hosts/blender/plugins/load/load_abc.py b/openpype/hosts/blender/plugins/load/load_abc.py index 8d1863d4d55..e740d6a3eb2 100644 --- a/openpype/hosts/blender/plugins/load/load_abc.py +++ b/openpype/hosts/blender/plugins/load/load_abc.py @@ -26,10 +26,11 @@ class CacheModelLoader(plugin.AssetLoader): Note: At least for now it only supports Alembic files. """ - families = ["model", "pointcache", "animation"] - representations = ["abc"] + families = ["model", "pointcache", "animation", "usd"] + representations = ["abc", "usd"] - label = "Load Alembic" + # TODO: Should USD loader be a separate loader instead? + label = "Load Alembic/USD" icon = "code-fork" color = "orange" @@ -53,10 +54,21 @@ def _process(self, libpath, asset_group, group_name): plugin.deselect_all() relative = bpy.context.preferences.filepaths.use_relative_paths - bpy.ops.wm.alembic_import( - filepath=libpath, - relative_path=relative - ) + + if any(libpath.lower().endswith(ext) + for ext in [".usd", ".usda", ".usdc"]): + # USD + bpy.ops.wm.usd_import( + filepath=libpath, + relative_path=relative + ) + + else: + # Alembic + bpy.ops.wm.alembic_import( + filepath=libpath, + relative_path=relative + ) imported = lib.get_selection() From 885429c1797e16030885bb6af29cb80a90b1f842 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 15 Nov 2023 22:08:08 +0100 Subject: [PATCH 50/56] Hound --- openpype/hosts/maya/plugins/load/show_usdview.py | 2 +- tests/unit/openpype/lib/test_usdlib.py | 11 +++++++---- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/show_usdview.py b/openpype/hosts/maya/plugins/load/show_usdview.py index 1e8c6752439..ff5f0df5dc4 100644 --- a/openpype/hosts/maya/plugins/load/show_usdview.py +++ b/openpype/hosts/maya/plugins/load/show_usdview.py @@ -35,7 +35,7 @@ class ShowInUsdview(load.LoaderPlugin): def load(self, context, name=None, namespace=None, data=None): try: - import OpenGL # noqa: F403 + import OpenGL # noqa except ImportError: self.log.error( "usdview for maya requires to have `OpenGL` python library " diff --git a/tests/unit/openpype/lib/test_usdlib.py b/tests/unit/openpype/lib/test_usdlib.py index 8e46b24d93b..69640ba97c9 100644 --- a/tests/unit/openpype/lib/test_usdlib.py +++ b/tests/unit/openpype/lib/test_usdlib.py @@ -119,7 +119,8 @@ def get_references(layer, prim_path, variant_set, variant): return [ref.assetPath for ref in references] prim_path = "/root" - layer = usdlib.add_variant_references_to_layer(variants=[ + layer = usdlib.add_variant_references_to_layer( + variants=[ ("main", "./main.usd"), ("twist", "./twist.usd"), ("tall", "./tall.usd"), @@ -128,13 +129,14 @@ def get_references(layer, prim_path, variant_set, variant): variant_prim=prim_path ) assert get_references(layer, prim_path, "model", "main") == ["./main.usd"] - assert get_references(layer, prim_path, "model", "twist") == ["./twist.usd"] + assert get_references(layer, prim_path, "model", "twist") == ["./twist.usd"] # noqa: E501 assert get_references(layer, prim_path, "model", "tall") == ["./tall.usd"] # Allow recalling with a layer provided to operate on that layer # instead; adding more variant definitions, keeping existing definitions # as well - layer = usdlib.add_variant_references_to_layer(variants=[ + layer = usdlib.add_variant_references_to_layer( + variants=[ ("main", "./look_main.usd"), ("twist", "./look_twist.usd"), ("tall", "./look_tall.usd"), @@ -186,7 +188,8 @@ def get_references(layer, prim_path, variant_set, variant): assert get_references(layer, prim_path, "look", "short") == ["./look_short_v02.usd"] # noqa: E501 # Applying variants to another prim should not affect first prim - layer = usdlib.add_variant_references_to_layer(variants=[ + layer = usdlib.add_variant_references_to_layer( + variants=[ ("short", "./look_short.usd"), ], variantset="look", From 9b56fe4699ab41fc4f89cef6fbc21d676c8dee96 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 15 Nov 2023 22:08:37 +0100 Subject: [PATCH 51/56] Hound --- openpype/plugins/publish/extract_usd_layer_contributions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_usd_layer_contributions.py b/openpype/plugins/publish/extract_usd_layer_contributions.py index 32dfa8fdf32..13674c2f99f 100644 --- a/openpype/plugins/publish/extract_usd_layer_contributions.py +++ b/openpype/plugins/publish/extract_usd_layer_contributions.py @@ -192,7 +192,7 @@ def process(self, instance): return instance.data["subsetGroup"] = ( - instance.data.get("subsetGroup") or "USD Layer" + instance.data.get("subsetGroup") or "USD Layer" ) # Allow formatting in variant set name and variant name From b2a0014362a8c2c17273ca75c549da250c8a1df0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 15 Nov 2023 22:09:15 +0100 Subject: [PATCH 52/56] Remove todo that has been implemented - even if rudimentary --- openpype/plugins/publish/extract_usd_layer_contributions.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/openpype/plugins/publish/extract_usd_layer_contributions.py b/openpype/plugins/publish/extract_usd_layer_contributions.py index 13674c2f99f..831d5d0453d 100644 --- a/openpype/plugins/publish/extract_usd_layer_contributions.py +++ b/openpype/plugins/publish/extract_usd_layer_contributions.py @@ -179,12 +179,6 @@ class CollectUSDLayerContributions(pyblish.api.InstancePlugin, label = "Collect USD Layer Contributions (Asset/Shot)" families = ["usd"] - # TODO: Currently asset and shot contributions both behave as creating - # an "asset" with payload to put the layers in; however, shot-based - # contributions don't need the payload nor the asset structure. We should - # separate it so both can behave independently but still share most of - # the code for easy maintenance - def process(self, instance): attr_values = self.get_attr_values_from_data(instance.data) From 4fc3bf4337a12492f749dcec74db2be73a2cc64e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 15 Nov 2023 22:13:29 +0100 Subject: [PATCH 53/56] Hound --- tests/unit/openpype/lib/test_usdlib.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/unit/openpype/lib/test_usdlib.py b/tests/unit/openpype/lib/test_usdlib.py index 69640ba97c9..a21f498f6e4 100644 --- a/tests/unit/openpype/lib/test_usdlib.py +++ b/tests/unit/openpype/lib/test_usdlib.py @@ -154,7 +154,8 @@ def get_references(layer, prim_path, variant_set, variant): # Allow with a layer provided to operate on that layer # instead; adding more variant names to an existing variant set - layer = usdlib.add_variant_references_to_layer(variants=[ + layer = usdlib.add_variant_references_to_layer( + variants=[ ("short", "./look_short.usd"), ], variantset="look", @@ -171,7 +172,8 @@ def get_references(layer, prim_path, variant_set, variant): assert get_references(layer, prim_path, "look", "short") == ["./look_short.usd"] # noqa: E501 # Allow updating an existing variant with a new file - layer = usdlib.add_variant_references_to_layer(variants=[ + layer = usdlib.add_variant_references_to_layer( + variants=[ ("short", "./look_short_v02.usd"), ], variantset="look", From 856e654474e23921c8c594f7222e45378c5949a9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Thu, 16 Nov 2023 00:22:47 +0100 Subject: [PATCH 54/56] Do not hide unshareable proxies --- openpype/hosts/maya/plugins/create/create_maya_usd_layer.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py b/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py index 3df8cece189..cfeae9c8453 100644 --- a/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py +++ b/openpype/hosts/maya/plugins/create/create_maya_usd_layer.py @@ -23,10 +23,6 @@ def get_instance_attr_defs(self): # scene and the Sdf.Layer stack of the Usd.Stage per proxy. items = [] for proxy in cmds.ls(type="mayaUsdProxyShape", long=True): - # Ignore unsharable proxies - if not cmds.getAttr(proxy + ".shareStage"): - continue - stage = mayaUsd.ufe.getStage("|world{}".format(proxy)) if not stage: continue From 65da9c6062232091c78a21293ff5e49b2b159274 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 17 Nov 2023 22:11:39 +0100 Subject: [PATCH 55/56] Refactor filenames to be easier to find --- ...rence_into_maya_usd.py => load_maya_usd_add_maya_reference.py} | 0 .../{load_into_maya_usd.py => load_maya_usd_add_reference.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename openpype/hosts/maya/plugins/load/{load_reference_into_maya_usd.py => load_maya_usd_add_maya_reference.py} (100%) rename openpype/hosts/maya/plugins/load/{load_into_maya_usd.py => load_maya_usd_add_reference.py} (100%) diff --git a/openpype/hosts/maya/plugins/load/load_reference_into_maya_usd.py b/openpype/hosts/maya/plugins/load/load_maya_usd_add_maya_reference.py similarity index 100% rename from openpype/hosts/maya/plugins/load/load_reference_into_maya_usd.py rename to openpype/hosts/maya/plugins/load/load_maya_usd_add_maya_reference.py diff --git a/openpype/hosts/maya/plugins/load/load_into_maya_usd.py b/openpype/hosts/maya/plugins/load/load_maya_usd_add_reference.py similarity index 100% rename from openpype/hosts/maya/plugins/load/load_into_maya_usd.py rename to openpype/hosts/maya/plugins/load/load_maya_usd_add_reference.py From 63851260a0f2b071e160c0593aad8903c999e127 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 19 Nov 2023 22:14:01 +0100 Subject: [PATCH 56/56] Add tooltips --- .../extract_usd_layer_contributions.py | 48 +++++++++++++++++-- 1 file changed, 43 insertions(+), 5 deletions(-) diff --git a/openpype/plugins/publish/extract_usd_layer_contributions.py b/openpype/plugins/publish/extract_usd_layer_contributions.py index 831d5d0453d..3513a2381d3 100644 --- a/openpype/plugins/publish/extract_usd_layer_contributions.py +++ b/openpype/plugins/publish/extract_usd_layer_contributions.py @@ -324,16 +324,34 @@ def get_attribute_defs(cls): UILabelDef(label="USD Contribution"), BoolDef("contribution_enabled", label="Enable", + tooltip=( + "When enabled this publish instance will be added " + "into a department layer into a target product, " + "usually an asset or shot.\n" + "When disabled this publish instance will not be " + "added into another USD file and remain as is.\n" + "In both cases the USD data itself is free to have " + "references and sublayers of its own." + ), default=True), TextDef("contribution_target_product", - label="Target subset", + label="Target product", + tooltip=( + "The target product the contribution should be added " + "to. Usually this is the asset or shot product.\nThe " + "department layer will be added to this product, and " + "the contribution itself will be added to the " + "department layer." + ), default="usdAsset"), EnumDef("contribution_target_product_init", label="Initialize as", tooltip=( - "The target products USD file will be initialized " + "The target product's USD file will be initialized " "based on this type if there's no existing USD of " - "that product yet." + "that product yet.\nIf there's already an existing " + "product with the name of the 'target product' this " + "setting will do nothing." ), items=["asset", "shot"], default="asset"), @@ -341,12 +359,24 @@ def get_attribute_defs(cls): # Asset layer, e.g. model.usd, look.usd, rig.usd EnumDef("contribution_layer", label="Add to department layer", - tooltip="The layer the contribution should be made to in " - "the usd asset.", + tooltip=( + "The layer the contribution should be made to in the " + "target product.\nThe layers have their own " + "predefined ordering.\nA higher order (further down " + "the list) will contribute as a stronger opinion." + ), items=list(LAYER_ORDERS.keys()), default="model"), BoolDef("contribution_apply_as_variant", label="Add as variant", + tooltip=( + "When enabled the contribution to the department " + "layer will be added as a variant where the variant " + "on the default root prim will be added as a " + "reference.\nWhen disabled the contribution will be " + "appended to as a sublayer to the department layer " + "instead." + ), default=True), TextDef("contribution_variant_set_name", label="Variant Set Name", @@ -356,6 +386,14 @@ def get_attribute_defs(cls): default="{variant}"), BoolDef("contribution_variant_is_default", label="Set as default variant selection", + tooltip=( + "Whether to set this instance's variant name as the " + "default selected variant name for the variant set.\n" + "It is always expected to be enabled for only one " + "variant name in the variant set.\n" + "The behavior is unpredictable if multiple instances " + "for the same variant set have this enabled." + ), default=False), UISeparatorDef("usd_container_settings3"), ]