For leakage in knowledge between extractors and integrators, consider extracting to an intermediate directory first, and then having your integrator simply move files from this location to the final destination.
That way, the extractors can be dumb, out-of-pipeline and independent. Putting files in /tmp
or the current Maya workspace and what not. If it stores the location in the context
, then the integrator can be the one to figure out the final destination, without needing to interact with Maya, and move the files from there.
As a second step, in order to update the published Maya scene without affecting the current scene, you can search-and-replace the new published paths with the paths currently used in the scene.
I mocked up an example of this here.
from pyblish import api
class CollectModels(api.ContextPlugin):
order = api.CollectorOrder
def process(self, context):
from maya import cmds
instance = context.create_instance("wholeScene")
instance[:] = cmds.ls()
class ExtractModels(api.InstancePlugin):
order = api.ExtractorOrder
def process(self, instance):
import os
from maya import cmds
stagedir = os.path.join(instance.context.data["workspaceDir"], "stage")
try:
os.makedirs(stagedir)
except OSError:
pass
fname = "{name}.ma".format(**instance.data)
path = os.path.join(stagedir, fname)
cmds.select(instance, replace=True)
cmds.file(path, typ="mayaAscii", force=True, exportSelected=True)
class ExtractResources(api.InstancePlugin):
order = api.ExtractorOrder
def process(self, instance):
import os
import shutil
from maya import cmds
stagedir = os.path.join(instance.context.data["workspaceDir"], "stage")
resources = instance.data.get("resources", {})
for resource in cmds.ls(type="file"):
src = cmds.getAttr(resource + ".fileTextureName")
assert os.path.isfile(src), src
dirname, fname = os.path.split(src)
dst = os.path.join(stagedir, fname)
# Keep tabs on what got remapped
shutil.copy(src, dst)
resources[src] = dst.replace("\\", "/")
instance.data["resources"] = resources
class IntegrateAssets(api.InstancePlugin):
order = api.IntegratorOrder
def process(self, instance):
import os
import shutil
stagedir = os.path.join(instance.context.data["workspaceDir"], "stage")
# Update .ma files with remapped resources
for fname in os.listdir(stagedir):
abspath = os.path.join(stagedir, fname)
self.log.info("Looking at '%s'.." % abspath)
if fname.endswith(".ma"):
self.log.info("Updating..")
new_file = list()
with open(abspath) as f:
for line in f:
for src, dst in instance.data.get("resources").items():
self.log.info("Replacing '%s' with '%s'" % (
src, dst))
line = line.replace(src, dst)
new_file.append(line)
# Update file
with open(abspath, "w") as f:
f.write("".join(new_file))
self.log.info("Updated '%s'." % abspath)
# Write to final location
versiondir = os.path.join(
instance.context.data["workspaceDir"], "v001")
try:
# Overwrite the version, remove me
shutil.rmtree(versiondir)
except OSError:
pass
shutil.copytree(stagedir, versiondir)
api.register_plugin(CollectModels)
api.register_plugin(ExtractModels)
api.register_plugin(ExtractResources)
api.register_plugin(IntegrateAssets)
# Setup scene
from maya import cmds
cmds.file(new=True, force=True)
fnode = cmds.createNode("file")
resource = r"C:\Users\marcus\Desktop\temp.png"
cmds.setAttr(fnode + ".fileTextureName", resource, type="string")
Either include the setup at the bottom, or take any scene containing a file
node and watch it get published into a final destination and automatically updating the published Maya scene as it goes.
Main points to note.
- Files are extracted into a temporary directory first, called “stage”
- Integrator knows nothing of extraction, only files and final destinations
- Integrator updates paths given a mapping created during extraction.
This would also work for alembic caches and other resources referenced into Maya.
Here’s an example integrator that does this in a production environment.