diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index a02a0ce86e..775f7bf9e5 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -6,13 +6,11 @@ """ import json.decoder from abc import abstractmethod -import platform import getpass -from functools import partial -from collections import OrderedDict +import os +import datetime + -import six -import attr import requests import pyblish.api @@ -24,8 +22,7 @@ from ayon_core.pipeline.publish.lib import ( replace_with_published_scene_path ) - -from .lib import get_ayon_render_job_envs, get_instance_job_envs +from ayon_core.lib import is_in_tests JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError) @@ -72,349 +69,11 @@ def requests_get(*args, **kwargs): return requests.get(*args, **kwargs) -class DeadlineKeyValueVar(dict): - """ - - Serializes dictionary key values as "{key}={value}" like Deadline uses - for EnvironmentKeyValue. - - As an example: - EnvironmentKeyValue0="A_KEY=VALUE_A" - EnvironmentKeyValue1="OTHER_KEY=VALUE_B" - - The keys are serialized in alphabetical order (sorted). - - Example: - >>> var = DeadlineKeyValueVar("EnvironmentKeyValue") - >>> var["my_var"] = "hello" - >>> var["my_other_var"] = "hello2" - >>> var.serialize() - - - """ - def __init__(self, key): - super(DeadlineKeyValueVar, self).__init__() - self.__key = key - - def serialize(self): - key = self.__key - - # Allow custom location for index in serialized string - if "{}" not in key: - key = key + "{}" - - return { - key.format(index): "{}={}".format(var_key, var_value) - for index, (var_key, var_value) in enumerate(sorted(self.items())) - } - - -class DeadlineIndexedVar(dict): - """ - - Allows to set and query values by integer indices: - Query: var[1] or var.get(1) - Set: var[1] = "my_value" - Append: var += "value" - - Note: Iterating the instance is not guarantueed to be the order of the - indices. To do so iterate with `sorted()` - - """ - def __init__(self, key): - super(DeadlineIndexedVar, self).__init__() - self.__key = key - - def serialize(self): - key = self.__key - - # Allow custom location for index in serialized string - if "{}" not in key: - key = key + "{}" - - return { - key.format(index): value for index, value in sorted(self.items()) - } - - def next_available_index(self): - # Add as first unused entry - i = 0 - while i in self.keys(): - i += 1 - return i - - def update(self, data): - # Force the integer key check - for key, value in data.items(): - self.__setitem__(key, value) - - def __iadd__(self, other): - index = self.next_available_index() - self[index] = other - return self - - def __setitem__(self, key, value): - if not isinstance(key, int): - raise TypeError("Key must be an integer: {}".format(key)) - - if key < 0: - raise ValueError("Negative index can't be set: {}".format(key)) - dict.__setitem__(self, key, value) - - -@attr.s -class DeadlineJobInfo(object): - """Mapping of all Deadline *JobInfo* attributes. - - This contains all JobInfo attributes plus their default values. - Those attributes set to `None` shouldn't be posted to Deadline as - the only required one is `Plugin`. Their default values used by Deadline - are stated in - comments. - - ..seealso: - https://docs.thinkboxsoftware.com/products/deadline/10.1/1_User%20Manual/manual/manual-submission.html - - """ - - # Required - # ---------------------------------------------- - Plugin = attr.ib() - - # General - Frames = attr.ib(default=None) # default: 0 - Name = attr.ib(default="Untitled") - Comment = attr.ib(default=None) # default: empty - Department = attr.ib(default=None) # default: empty - BatchName = attr.ib(default=None) # default: empty - UserName = attr.ib(default=getpass.getuser()) - MachineName = attr.ib(default=platform.node()) - Pool = attr.ib(default=None) # default: "none" - SecondaryPool = attr.ib(default=None) - Group = attr.ib(default=None) # default: "none" - Priority = attr.ib(default=50) - ChunkSize = attr.ib(default=1) - ConcurrentTasks = attr.ib(default=1) - LimitConcurrentTasksToNumberOfCpus = attr.ib( - default=None) # default: "true" - OnJobComplete = attr.ib(default="Nothing") - SynchronizeAllAuxiliaryFiles = attr.ib(default=None) # default: false - ForceReloadPlugin = attr.ib(default=None) # default: false - Sequential = attr.ib(default=None) # default: false - SuppressEvents = attr.ib(default=None) # default: false - Protected = attr.ib(default=None) # default: false - InitialStatus = attr.ib(default="Active") - NetworkRoot = attr.ib(default=None) - - # Timeouts - # ---------------------------------------------- - MinRenderTimeSeconds = attr.ib(default=None) # Default: 0 - MinRenderTimeMinutes = attr.ib(default=None) # Default: 0 - TaskTimeoutSeconds = attr.ib(default=None) # Default: 0 - TaskTimeoutMinutes = attr.ib(default=None) # Default: 0 - StartJobTimeoutSeconds = attr.ib(default=None) # Default: 0 - StartJobTimeoutMinutes = attr.ib(default=None) # Default: 0 - InitializePluginTimeoutSeconds = attr.ib(default=None) # Default: 0 - # can be one of - OnTaskTimeout = attr.ib(default=None) # Default: Error - EnableTimeoutsForScriptTasks = attr.ib(default=None) # Default: false - EnableFrameTimeouts = attr.ib(default=None) # Default: false - EnableAutoTimeout = attr.ib(default=None) # Default: false - - # Interruptible - # ---------------------------------------------- - Interruptible = attr.ib(default=None) # Default: false - InterruptiblePercentage = attr.ib(default=None) - RemTimeThreshold = attr.ib(default=None) - - # Notifications - # ---------------------------------------------- - # can be comma separated list of users - NotificationTargets = attr.ib(default=None) # Default: blank - ClearNotificationTargets = attr.ib(default=None) # Default: false - # A comma separated list of additional email addresses - NotificationEmails = attr.ib(default=None) # Default: blank - OverrideNotificationMethod = attr.ib(default=None) # Default: false - EmailNotification = attr.ib(default=None) # Default: false - PopupNotification = attr.ib(default=None) # Default: false - # String with `[EOL]` used for end of line - NotificationNote = attr.ib(default=None) # Default: blank - - # Machine Limit - # ---------------------------------------------- - MachineLimit = attr.ib(default=None) # Default: 0 - MachineLimitProgress = attr.ib(default=None) # Default: -1.0 - Whitelist = attr.ib(default=None) # Default: blank - Blacklist = attr.ib(default=None) # Default: blank - - # Limits - # ---------------------------------------------- - # comma separated list of limit groups - LimitGroups = attr.ib(default=None) # Default: blank - - # Dependencies - # ---------------------------------------------- - # comma separated list of job IDs - JobDependencies = attr.ib(default=None) # Default: blank - JobDependencyPercentage = attr.ib(default=None) # Default: -1 - IsFrameDependent = attr.ib(default=None) # Default: false - FrameDependencyOffsetStart = attr.ib(default=None) # Default: 0 - FrameDependencyOffsetEnd = attr.ib(default=None) # Default: 0 - ResumeOnCompleteDependencies = attr.ib(default=None) # Default: true - ResumeOnDeletedDependencies = attr.ib(default=None) # Default: false - ResumeOnFailedDependencies = attr.ib(default=None) # Default: false - # comma separated list of asset paths - RequiredAssets = attr.ib(default=None) # Default: blank - # comma separated list of script paths - ScriptDependencies = attr.ib(default=None) # Default: blank - - # Failure Detection - # ---------------------------------------------- - OverrideJobFailureDetection = attr.ib(default=None) # Default: false - FailureDetectionJobErrors = attr.ib(default=None) # 0..x - OverrideTaskFailureDetection = attr.ib(default=None) # Default: false - FailureDetectionTaskErrors = attr.ib(default=None) # 0..x - IgnoreBadJobDetection = attr.ib(default=None) # Default: false - SendJobErrorWarning = attr.ib(default=None) # Default: false - - # Cleanup - # ---------------------------------------------- - DeleteOnComplete = attr.ib(default=None) # Default: false - ArchiveOnComplete = attr.ib(default=None) # Default: false - OverrideAutoJobCleanup = attr.ib(default=None) # Default: false - OverrideJobCleanup = attr.ib(default=None) - JobCleanupDays = attr.ib(default=None) # Default: false - # - OverrideJobCleanupType = attr.ib(default=None) - - # Scheduling - # ---------------------------------------------- - # - ScheduledType = attr.ib(default=None) # Default: None - #
- ScheduledStartDateTime = attr.ib(default=None) - ScheduledDays = attr.ib(default=None) # Default: 1 - # - JobDelay = attr.ib(default=None) - # Time= - Scheduled = attr.ib(default=None) - - # Scripts - # ---------------------------------------------- - # all accept path to script - PreJobScript = attr.ib(default=None) # Default: blank - PostJobScript = attr.ib(default=None) # Default: blank - PreTaskScript = attr.ib(default=None) # Default: blank - PostTaskScript = attr.ib(default=None) # Default: blank - - # Event Opt-Ins - # ---------------------------------------------- - # comma separated list of plugins - EventOptIns = attr.ib(default=None) # Default: blank - - # Environment - # ---------------------------------------------- - EnvironmentKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, - "EnvironmentKeyValue")) - - IncludeEnvironment = attr.ib(default=None) # Default: false - UseJobEnvironmentOnly = attr.ib(default=None) # Default: false - CustomPluginDirectory = attr.ib(default=None) # Default: blank - - # Job Extra Info - # ---------------------------------------------- - ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo")) - ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, - "ExtraInfoKeyValue")) - - # Task Extra Info Names - # ---------------------------------------------- - OverrideTaskExtraInfoNames = attr.ib(default=None) # Default: false - TaskExtraInfoName = attr.ib(factory=partial(DeadlineIndexedVar, - "TaskExtraInfoName")) - - # Output - # ---------------------------------------------- - OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar, - "OutputFilename")) - OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar, - "OutputFilename{}Tile")) - OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar, - "OutputDirectory")) - - # Asset Dependency - # ---------------------------------------------- - AssetDependency = attr.ib(factory=partial(DeadlineIndexedVar, - "AssetDependency")) - - # Tile Job - # ---------------------------------------------- - TileJob = attr.ib(default=None) # Default: false - TileJobFrame = attr.ib(default=None) # Default: 0 - TileJobTilesInX = attr.ib(default=None) # Default: 0 - TileJobTilesInY = attr.ib(default=None) # Default: 0 - TileJobTileCount = attr.ib(default=None) # Default: 0 - - # Maintenance Job - # ---------------------------------------------- - MaintenanceJob = attr.ib(default=None) # Default: false - MaintenanceJobStartFrame = attr.ib(default=None) # Default: 0 - MaintenanceJobEndFrame = attr.ib(default=None) # Default: 0 - - def serialize(self): - """Return all data serialized as dictionary. - - Returns: - OrderedDict: all serialized data. - - """ - def filter_data(a, v): - if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)): - return False - if v is None: - return False - return True - - serialized = attr.asdict( - self, dict_factory=OrderedDict, filter=filter_data) - - # Custom serialize these attributes - for attribute in [ - self.EnvironmentKeyValue, - self.ExtraInfo, - self.ExtraInfoKeyValue, - self.TaskExtraInfoName, - self.OutputFilename, - self.OutputFilenameTile, - self.OutputDirectory, - self.AssetDependency - ]: - serialized.update(attribute.serialize()) - - return serialized - - def update(self, data): - """Update instance with data dict""" - for key, value in data.items(): - setattr(self, key, value) - - def add_render_job_env_var(self): - """Add required env vars for valid render job submission.""" - for key, value in get_ayon_render_job_envs().items(): - self.EnvironmentKeyValue[key] = value - - def add_instance_job_env_vars(self, instance): - """Add all job environments as specified on the instance and context - - Any instance `job_env` vars will override the context `job_env` vars. - """ - for key, value in get_instance_job_envs(instance).items(): - self.EnvironmentKeyValue[key] = value - - -@six.add_metaclass(AbstractMetaInstancePlugin) -class AbstractSubmitDeadline(pyblish.api.InstancePlugin, - AYONPyblishPluginMixin): +class AbstractSubmitDeadline( + pyblish.api.InstancePlugin, + AYONPyblishPluginMixin, + metaclass=AbstractMetaInstancePlugin +): """Class abstracting access to Deadline.""" label = "Submit to Deadline" @@ -458,7 +117,8 @@ def process(self, instance): self.scene_path = file_path self.log.info("Using {} for render/export.".format(file_path)) - self.job_info = self.get_job_info() + job_info = self.get_generic_job_info(instance) + self.job_info = self.get_job_info(job_info) self.plugin_info = self.get_plugin_info() self.aux_files = self.get_aux_files() @@ -495,6 +155,34 @@ def process_submission(self): verify = self._instance.data["deadline"]["verify"] return self.submit(payload, auth, verify) + def get_generic_job_info(self, instance): + context = instance.context + + job_info = instance.data["deadline"]["job_info"] + + # Always use the original work file name for the Job name even when + # rendering is done from the published Work File. The original work + # file name is clearer because it can also have subversion strings, + # etc. which are stripped for the published file. + batch_name = os.path.basename(context.data["currentFile"]) + + if is_in_tests(): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") + + job_info.Name = "%s - %s" % (batch_name, instance.name) + job_info.BatchName = batch_name + job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # TODO clean deadlineUser + + first_expected_file = instance.data["expectedFiles"][0] + job_info.OutputFilename += os.path.basename(first_expected_file) + job_info.OutputDirectory += os.path.dirname(first_expected_file) + + # Set job environment variables + job_info.add_instance_job_env_vars(instance) + job_info.add_render_job_env_var() + + return job_info + @abstractmethod def get_job_info(self): """Return filled Deadline JobInfo. diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 75ad830a77..7384ac6538 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -1,4 +1,8 @@ import os +from dataclasses import dataclass, field, asdict +from functools import partial +from typing import Optional, Dict, Any +import json # describes list of product typed used for plugin filtering for farm publishing FARM_FAMILIES = [ @@ -43,3 +47,338 @@ def get_instance_job_envs(instance) -> "dict[str, str]": env = dict(sorted(env.items())) return env + + +class DeadlineKeyValueVar(dict): + """ + + Serializes dictionary key values as "{key}={value}" like Deadline uses + for EnvironmentKeyValue. + + As an example: + EnvironmentKeyValue0="A_KEY=VALUE_A" + EnvironmentKeyValue1="OTHER_KEY=VALUE_B" + + The keys are serialized in alphabetical order (sorted). + + Example: + >>> var = DeadlineKeyValueVar("EnvironmentKeyValue") + >>> var["my_var"] = "hello" + >>> var["my_other_var"] = "hello2" + >>> var.serialize() + + + """ + def __init__(self, key): + super(DeadlineKeyValueVar, self).__init__() + self.__key = key + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): "{}={}".format(var_key, var_value) + for index, (var_key, var_value) in enumerate(sorted(self.items())) + } + + +class DeadlineIndexedVar(dict): + """ + + Allows to set and query values by integer indices: + Query: var[1] or var.get(1) + Set: var[1] = "my_value" + Append: var += "value" + + Note: Iterating the instance is not guarantueed to be the order of the + indices. To do so iterate with `sorted()` + + """ + def __init__(self, key): + super(DeadlineIndexedVar, self).__init__() + self.__key = key + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): value for index, value in sorted(self.items()) + } + + def next_available_index(self): + # Add as first unused entry + i = 0 + while i in self.keys(): + i += 1 + return i + + def update(self, data): + # Force the integer key check + for key, value in data.items(): + self.__setitem__(key, value) + + def __iadd__(self, other): + index = self.next_available_index() + self[index] = other + return self + + def __setitem__(self, key, value): + if not isinstance(key, int): + raise TypeError("Key must be an integer: {}".format(key)) + + if key < 0: + raise ValueError("Negative index can't be set: {}".format(key)) + dict.__setitem__(self, key, value) + + +@dataclass +class DeadlineJobInfo: + """Mapping of all Deadline JobInfo attributes. + + This contains all JobInfo attributes plus their default values. + Those attributes set to `None` shouldn't be posted to Deadline as + the only required one is `Plugin`. + """ + + # Required + Plugin: str = field(default="Untitled") + + # General + Name: str = field(default="Untitled") + Frames: Optional[int] = field(default=None) # default: 0 + Comment: Optional[str] = field(default=None) # default: empty + Department: Optional[str] = field(default=None) # default: empty + BatchName: Optional[str] = field(default=None) # default: empty + UserName: str = field(default=None) + MachineName: str = field(default=None) + Pool: Optional[str] = field(default=None) # default: "none" + SecondaryPool: Optional[str] = field(default=None) + Group: Optional[str] = field(default=None) # default: "none" + Priority: int = field(default=None) + ChunkSize: int = field(default=None) + ConcurrentTasks: int = field(default=None) + LimitConcurrentTasksToNumberOfCpus: Optional[bool] = field( + default=None) # default: "true" + OnJobComplete: str = field(default=None) + SynchronizeAllAuxiliaryFiles: Optional[bool] = field( + default=None) # default: false + ForceReloadPlugin: Optional[bool] = field(default=None) # default: false + Sequential: Optional[bool] = field(default=None) # default: false + SuppressEvents: Optional[bool] = field(default=None) # default: false + Protected: Optional[bool] = field(default=None) # default: false + InitialStatus: str = field(default="Active") + NetworkRoot: Optional[str] = field(default=None) + + # Timeouts + MinRenderTimeSeconds: Optional[int] = field(default=None) # Default: 0 + MinRenderTimeMinutes: Optional[int] = field(default=None) # Default: 0 + TaskTimeoutSeconds: Optional[int] = field(default=None) # Default: 0 + TaskTimeoutMinutes: Optional[int] = field(default=None) # Default: 0 + StartJobTimeoutSeconds: Optional[int] = field(default=None) # Default: 0 + StartJobTimeoutMinutes: Optional[int] = field(default=None) # Default: 0 + InitializePluginTimeoutSeconds: Optional[int] = field( + default=None) # Default: 0 + OnTaskTimeout: Optional[str] = field(default=None) # Default: Error + EnableTimeoutsForScriptTasks: Optional[bool] = field( + default=None) # Default: false + EnableFrameTimeouts: Optional[bool] = field(default=None) # Default: false + EnableAutoTimeout: Optional[bool] = field(default=None) # Default: false + + # Interruptible + Interruptible: Optional[bool] = field(default=None) # Default: false + InterruptiblePercentage: Optional[int] = field(default=None) + RemTimeThreshold: Optional[int] = field(default=None) + + # Notifications + NotificationTargets: Optional[str] = field( + default=None) # Default: blank (comma-separated list of users) + ClearNotificationTargets: Optional[bool] = field( + default=None) # Default: false + NotificationEmails: Optional[str] = field( + default=None) # Default: blank (comma-separated list of email addresses) + OverrideNotificationMethod: Optional[bool] = field( + default=None) # Default: false + EmailNotification: Optional[bool] = field(default=None) # Default: false + PopupNotification: Optional[bool] = field(default=None) # Default: false + NotificationNote: Optional[str] = field(default=None) # Default: blank + + # Machine Limit + MachineLimit: Optional[int] = field(default=None) # Default: 0 + MachineLimitProgress: Optional[float] = field(default=None) # Default -1.0 + Whitelist: Optional[str] = field( + default=None) # Default blank (comma-separated list) + Blacklist: Optional[str] = field( + default=None) # Default blank (comma-separated list) + + # Limits + LimitGroups: Optional[str] = field(default=None) # Default: blank + + # Dependencies + JobDependencies: Optional[str] = field(default=None) # Default: blank + JobDependencyPercentage: Optional[int] = field(default=None) # Default: -1 + IsFrameDependent: Optional[bool] = field(default=None) # Default: false + FrameDependencyOffsetStart: Optional[int] = field(default=None) # Default: 0 + FrameDependencyOffsetEnd: Optional[int] = field(default=None) # Default: 0 + ResumeOnCompleteDependencies: Optional[bool] = field( + default=True) # Default: true + ResumeOnDeletedDependencies: Optional[bool] = field( + default=False) # Default: false + ResumeOnFailedDependencies: Optional[bool] = field( + default=False) # Default: false + RequiredAssets: Optional[str] = field( + default=None) # Default: blank (comma-separated list) + ScriptDependencies: Optional[str] = field( + default=None) # Default: blank (comma-separated list) + + # Failure Detection + OverrideJobFailureDetection: Optional[bool] = field( + default=False) # Default: false + FailureDetectionJobErrors: Optional[int] = field(default=None) # 0..x + OverrideTaskFailureDetection: Optional[bool] = field( + default=False) # Default: false + FailureDetectionTaskErrors: Optional[int] = field(default=None) # 0..x + IgnoreBadJobDetection: Optional[bool] = field( + default=False) # Default: false + SendJobErrorWarning: Optional[bool] = field( + default=False) # Default: false + + # Cleanup + DeleteOnComplete: Optional[bool] = field(default=False) # Default: false + ArchiveOnComplete: Optional[bool] = field(default=False) # Default: false + OverrideAutoJobCleanup: Optional[bool] = field( + default=False) # Default: false + OverrideJobCleanup: Optional[bool] = field(default=None) + JobCleanupDays: Optional[int] = field( + default=None) # Default: false (not clear) + OverrideJobCleanupType: Optional[str] = field(default=None) + + # Scheduling + ScheduledType: Optional[str] = field( + default=None) # Default: None () + ScheduledStartDateTime: Optional[str] = field( + default=None) #
+ ScheduledDays: Optional[int] = field(default=1) # Default: 1 + JobDelay: Optional[str] = field(default=None) # + Scheduled: Optional[str] = field( + default=None) # Time= + + # Scripts + PreJobScript: Optional[str] = field(default=None) # Default: blank + PostJobScript: Optional[str] = field(default=None) # Default: blank + PreTaskScript: Optional[str] = field(default=None) # Default: blank + PostTaskScript: Optional[str] = field(default=None) # Default: blank + + # Event Opt-Ins + EventOptIns: Optional[str] = field( + default=None) # Default blank (comma-separated list) + + # Environment + EnvironmentKeyValue: Any = field( + default_factory=partial(DeadlineKeyValueVar, "EnvironmentKeyValue")) + IncludeEnvironment: Optional[bool] = field(default=False) # Default: false + UseJobEnvironmentOnly: Optional[bool] = field(default=False) # Default: false + CustomPluginDirectory: Optional[str] = field(default=None) # Default blank + + # Job Extra Info + ExtraInfo: Any = field( + default_factory=partial(DeadlineIndexedVar, "ExtraInfo")) + ExtraInfoKeyValue: Any = field( + default_factory=partial(DeadlineKeyValueVar, "ExtraInfoKeyValue")) + + OverrideTaskExtraInfoNames: Optional[bool] = field( + default=False) # Default false + + TaskExtraInfoName: Any = field( + default_factory=partial(DeadlineIndexedVar, "TaskExtraInfoName")) + + OutputFilename: Any = field( + default_factory=partial(DeadlineIndexedVar, "OutputFilename")) + OutputFilenameTile: str = field( + default_factory=partial(DeadlineIndexedVar, "OutputFilename{}Tile")) + OutputDirectory: str = field( + default_factory=partial(DeadlineIndexedVar, "OutputDirectory")) + + AssetDependency: str = field( + default_factory=partial(DeadlineIndexedVar, "AssetDependency")) + + TileJob: bool = field(default=False) + TileJobFrame: int = field(default=0) + TileJobTilesInX: int = field(default=0) + TileJobTilesInY: int = field(default=0) + TileJobTileCount: int = field(default=0) + + MaintenanceJob: bool = field(default=False) + MaintenanceJobStartFrame: int = field(default=0) + MaintenanceJobEndFrame: int = field(default=0) + + def serialize(self): + """Return all data serialized as dictionary. + + Returns: + OrderedDict: all serialized data. + + """ + def filter_data(a, v): + if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)): + return False + if v is None: + return False + return True + + serialized = asdict(self) + serialized = {k: v for k, v in serialized.items() + if filter_data(k, v)} + + # Custom serialize these attributes + for attribute in [ + self.EnvironmentKeyValue, + self.ExtraInfo, + self.ExtraInfoKeyValue, + self.TaskExtraInfoName, + self.OutputFilename, + self.OutputFilenameTile, + self.OutputDirectory, + self.AssetDependency + ]: + serialized.update(attribute.serialize()) + + return serialized + + @classmethod + def from_dict(cls, data: Dict) -> 'JobInfo': + + def capitalize(key): + words = key.split("_") + return "".join(word.capitalize() for word in words) + + # Filter the dictionary to only include keys that are fields in the dataclass + capitalized = {capitalize(k): v for k, v in data.items()} + filtered_data = {k: v for k, v + in capitalized.items() + if k in cls.__annotations__} + return cls(**filtered_data) + + def add_render_job_env_var(self): + """Add required env vars for valid render job submission.""" + for key, value in get_ayon_render_job_envs().items(): + self.EnvironmentKeyValue[key] = value + + def add_instance_job_env_vars(self, instance): + """Add all job environments as specified on the instance and context + + Any instance `job_env` vars will override the context `job_env` vars. + """ + for key, value in get_instance_job_envs(instance).items(): + self.EnvironmentKeyValue[key] = value + + def to_json(self) -> str: + """Serialize the dataclass instance to a JSON string.""" + return json.dumps(asdict(self)) \ No newline at end of file diff --git a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py index f646df720a..3966a51588 100644 --- a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py +++ b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py @@ -1,8 +1,6 @@ import os import attr -import getpass import pyblish.api -from datetime import datetime from ayon_core.lib import ( env_value_to_bool, @@ -10,7 +8,6 @@ is_in_tests, ) from ayon_deadline import abstract_submit_deadline -from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo @attr.s @@ -39,46 +36,16 @@ class AfterEffectsSubmitDeadline( targets = ["local"] settings_category = "deadline" - priority = 50 - chunk_size = 1000000 - group = None - department = None - multiprocess = True - - def get_job_info(self): - dln_job_info = DeadlineJobInfo(Plugin="AfterEffects") - - context = self._instance.context - - batch_name = os.path.basename(self._instance.data["source"]) - if is_in_tests(): - batch_name += datetime.now().strftime("%d%m%Y%H%M%S") - dln_job_info.Name = self._instance.data["name"] - dln_job_info.BatchName = batch_name + def get_job_info(self, dln_job_info): dln_job_info.Plugin = "AfterEffects" - dln_job_info.UserName = context.data.get( - "deadlineUser", getpass.getuser()) - # Deadline requires integers in frame range - frame_range = "{}-{}".format( - int(round(self._instance.data["frameStart"])), - int(round(self._instance.data["frameEnd"]))) - dln_job_info.Frames = frame_range - - dln_job_info.Priority = self.priority - dln_job_info.Pool = self._instance.data.get("primaryPool") - dln_job_info.SecondaryPool = self._instance.data.get("secondaryPool") - dln_job_info.Group = self.group - dln_job_info.Department = self.department - dln_job_info.ChunkSize = self.chunk_size - dln_job_info.OutputFilename += \ - os.path.basename(self._instance.data["expectedFiles"][0]) - dln_job_info.OutputDirectory += \ - os.path.dirname(self._instance.data["expectedFiles"][0]) - dln_job_info.JobDelay = "00:00:00" - - # Set job environment variables - dln_job_info.add_instance_job_env_vars(self._instance) - dln_job_info.add_render_job_env_var() + + # already collected explicit values for rendered Frames + if not dln_job_info.Frames: + # Deadline requires integers in frame range + frame_range = "{}-{}".format( + int(round(self._instance.data["frameStart"])), + int(round(self._instance.data["frameEnd"]))) + dln_job_info.Frames = frame_range return dln_job_info diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py new file mode 100644 index 0000000000..6213cd8b95 --- /dev/null +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- +from collections import OrderedDict + +import ayon_api +import pyblish.api +from ayon_core.lib import ( + BoolDef, + NumberDef, + TextDef, + EnumDef, + is_in_tests, + UISeparatorDef +) +from ayon_core.pipeline.publish import AYONPyblishPluginMixin +from ayon_core.settings import get_project_settings +from ayon_core.lib.profiles_filtering import filter_profiles + +from ayon_deadline.lib import FARM_FAMILIES, DeadlineJobInfo + + +class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): + """Collect variables that belong to Deadline's JobInfo. + + Variables like: + - department + - priority + - chunk size + + """ + + order = pyblish.api.CollectorOrder + 0.420 + label = "Collect Deadline JobInfo" + + families = FARM_FAMILIES + targets = ["local"] + + def process(self, instance): + attr_values = self._get_jobinfo_defaults(instance) + + attr_values.update(self.get_attr_values_from_data(instance.data)) + # do not set empty strings + attr_values = { + key: value + for key,value in attr_values.items() + if value != "" + } + job_info = DeadlineJobInfo.from_dict(attr_values) + instance.data["deadline"]["job_info"] = job_info + + @classmethod + def get_attr_defs_for_instance(cls, create_context, instance): + if not cls.instance_matches_plugin_families(instance): + return [] + + if not instance["active"]: # TODO origin_data seem not right + return [] + + # will be reworked when CreateContext contains settings and task types + project_name = create_context.project_name + project_settings = get_project_settings(project_name) + + host_name = create_context.host_name + + task_name = instance["task"] + folder_path = instance["folderPath"] + folder_entity = ayon_api.get_folder_by_path(project_name,folder_path) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name) + profiles = ( + project_settings["deadline"]["publish"][cls.__name__]["profiles"]) + + if not profiles: + return [] + + profile = filter_profiles( + profiles, + { + "host_names": host_name, + "task_types": task_entity["taskType"], + "task_names": task_name, + # "product_type": product_type + } + ) + overrides = set(profile["overrides"]) + if not profile or not overrides: + return [] + + defs = [] + + defs.extend([ + UISeparatorDef("deadline_defs_starts"), + ]) + + defs.extend(cls._get_artist_overrides(overrides, profile)) + + # explicit + defs.append( + TextDef( + "frames", + label="Frames", + default="", + tooltip="Explicit frames to be rendered. (1, 3-4)" + ) + ) + + defs.append( + UISeparatorDef("deadline_defs_end") + ) + + return defs + + @classmethod + def _get_artist_overrides(cls, overrides, profile): + """Provide list of Defs that could be filled by artist""" + # should be matching to extract_jobinfo_overrides_enum + override_defs = OrderedDict({ + "chunkSize": NumberDef( + "chunkSize", + label="Frames Per Task", + default=1, + decimals=0, + minimum=1, + maximum=1000 + ), + "priority": NumberDef( + "priority", + label="Priority", + decimals=0 + ), + "department": TextDef( + "department", + label="Department", + default="", + ), + "limit_groups": TextDef( + "limit_groups", + label="Limit Groups", + default="", + placeholder="machine1,machine2" + ), + "job_delay": TextDef( + "job_delay", + label="Delay job (timecode dd:hh:mm:ss)", + default="" + ), + }) + defs = [] + # The Arguments that can be modified by the Publisher + for key, value in override_defs.items(): + if key not in overrides: + continue + + default_value = profile[key] + value.default = default_value + defs.append(value) + + return defs + + @classmethod + def register_create_context_callbacks(cls, create_context): + create_context.add_value_changed_callback(cls.on_values_changed) + + @classmethod + def on_value_change(cls, event): + for instance_change in event["changes"]: + if not cls.instance_matches_plugin_families(instance): + continue + value_changes = instance_change["changes"] + if "enabled" not in value_changes: + continue + instance = instance_change["instance"] + new_attrs = cls.get_attr_defs_for_instance( + event["create_context"], instance + ) + instance.set_publish_plugin_attr_defs(cls.__name__, new_attrs) + + def _get_jobinfo_defaults(self, instance): + """Queries project setting for profile with default values + + Args: + instance (pyblish.api.Instance): Source instance. + + Returns: + (dict) + """ + attr_values = {} + + context_data = instance.context.data + host_name = context_data["hostName"] + project_settings = context_data["project_settings"] + task_entity = context_data["taskEntity"] + + task_name = task_type = "" + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] + profiles = ( + project_settings["deadline"] + ["publish"] + ["CollectJobInfo"] + ["profiles"] + ) + if profiles: + profile = filter_profiles( + profiles, + { + "host_names": host_name, + "task_types": task_type, + "task_names": task_name, + # "product_type": product_type + } + ) + if profile: + attr_values = profile + return attr_values + + +class CollectMayaJobInfo(CollectJobInfo): + hosts = [ + "maya", + ] + @classmethod + def get_attr_defs_for_instance(cls, create_context, instance): + defs = super().get_attr_defs_for_instance(create_context, instance) + + defs.extend([ + NumberDef( + "tile_priority", + label="Tile Assembler Priority", + decimals=0, + default=cls.tile_priorit + ), + BoolDef( + "strict_error_checking", + label="Strict Error Checking", + default=cls.strict_error_checking + ), + ]) + + return defs diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 65e4285d50..b6e430b1fd 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -398,7 +398,8 @@ def process(self, instance): self.skip_integration_repre_list, do_not_add_review, instance.context, - self + self, + instance.data["deadline"]["job_info"].Frames ) if "representations" not in instance_skeleton_data.keys(): @@ -465,6 +466,7 @@ def process(self, instance): # Inject deadline url to instances to query DL for job id for overrides for inst in instances: + instance.data["deadline"].pop("job_info") inst["deadline"] = instance.data["deadline"] # publish job file @@ -479,9 +481,10 @@ def process(self, instance): "intent": instance.context.data.get("intent"), "comment": instance.context.data.get("comment"), "job": render_job or None, + # do not carry over unnecessary DL info with large DeadlineJobInfo "instances": instances - } + } if deadline_publish_job_id: publish_job["deadline_publish_job_id"] = deadline_publish_job_id diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index af341bfedd..9e48fdc120 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -4,9 +4,25 @@ BaseSettingsModel, SettingsField, ensure_unique_names, + task_types_enum, ) +class LimitGroupsSubmodel(BaseSettingsModel): + _layout = "expanded" + name: str = SettingsField(title="Name") + value: list[str] = SettingsField( + default_factory=list, + title="Limit Groups" + ) + + +class EnvSearchReplaceSubmodel(BaseSettingsModel): + _layout = "compact" + name: str = SettingsField(title="Name") + value: str = SettingsField(title="Value") + + class CollectDeadlinePoolsModel(BaseSettingsModel): """Settings Deadline default pools.""" @@ -15,6 +31,78 @@ class CollectDeadlinePoolsModel(BaseSettingsModel): secondary_pool: str = SettingsField(title="Secondary Pool") +def extract_jobinfo_overrides_enum(): + return [ + {"label": "Frames per Task", "value": "chunk_size"}, + {"label": "Priority", "value": "priority"}, + {"label": "Department", "value": "department"}, + {"label": "Limit groups", "value": "limit_groups"}, + {"label": "Delay job (timecode dd:hh:mm:ss)", "value": "job_delay"}, + {"label": "Group", "value": "group"}, + ] + + +class CollectJobInfoItem(BaseSettingsModel): + _layout = "expanded" + host_names: list[str] = SettingsField( + default_factory=list, + title="Host names" + ) + task_types: list[str] = SettingsField( + default_factory=list, + title="Task types", + enum_resolver=task_types_enum + ) + task_names: list[str] = SettingsField( + default_factory=list, + title="Task names" + ) + + chunk_size: int = SettingsField(999, title="Frames per Task") + priority: int = SettingsField(50, title="Priority") + group: str = SettingsField("", title="Group") + limit_groups: list[LimitGroupsSubmodel] = SettingsField( + default_factory=list, + title="Limit Groups", + ) + concurrent_tasks: int = SettingsField( + 1, title="Number of concurrent tasks") + department: str = SettingsField("", title="Department") + use_gpu: bool = SettingsField("", title="Use GPU") + job_delay: str = SettingsField( + "", title="Delay job", + placeholder="dd:hh:mm:ss" + ) + use_published: bool = SettingsField(True, title="Use Published scene") + asset_dependencies: bool = SettingsField(True, title="Use Asset dependencies") + workfile_dependency: bool = SettingsField(True, title="Workfile Dependency") + multiprocess: bool = SettingsField(False, title="Multiprocess") + + env_allowed_keys: list[str] = SettingsField( + default_factory=list, + title="Allowed environment keys", + description="Pass selected environment variables with current value" + ) + env_search_replace_values: list[EnvSearchReplaceSubmodel] = SettingsField( + default_factory=list, + title="Search & replace in environment values", + description="Replace string values in 'Name' with value from 'Value'" + ) + overrides: list[str] = SettingsField( + enum_resolver=extract_jobinfo_overrides_enum, + title="Exposed Overrides", + description=( + "Expose the attribute in this list to the user when publishing." + ) + ) + + +class CollectJobInfoModel(BaseSettingsModel): + _isGroup = True + enabled: bool = SettingsField(False) + profiles: list[CollectJobInfoItem] = SettingsField(default_factory=list) + + class ValidateExpectedFilesModel(BaseSettingsModel): enabled: bool = SettingsField(True, title="Enabled") active: bool = SettingsField(True, title="Active") @@ -56,18 +144,11 @@ class MayaSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") import_reference: bool = SettingsField( title="Use Scene with Imported Reference" ) - asset_dependencies: bool = SettingsField(title="Use Asset dependencies") - priority: int = SettingsField(title="Priority") tile_priority: int = SettingsField(title="Tile Priority") - group: str = SettingsField(title="Group") - limit: list[str] = SettingsField( - default_factory=list, - title="Limit Groups" - ) + tile_assembler_plugin: str = SettingsField( title="Tile Assembler Plugin", enum_resolver=tile_assembler_enum, @@ -99,25 +180,6 @@ class MaxSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(True) optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Frame per Task") - group: str = SettingsField("", title="Group Name") - - -class EnvSearchReplaceSubmodel(BaseSettingsModel): - _layout = "compact" - name: str = SettingsField(title="Name") - value: str = SettingsField(title="Value") - - -class LimitGroupsSubmodel(BaseSettingsModel): - _layout = "expanded" - name: str = SettingsField(title="Name") - value: list[str] = SettingsField( - default_factory=list, - title="Limit Groups" - ) def fusion_deadline_plugin_enum(): @@ -142,12 +204,9 @@ class FusionSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(True, title="Enabled") optional: bool = SettingsField(False, title="Optional") active: bool = SettingsField(True, title="Active") - priority: int = SettingsField(50, title="Priority") - chunk_size: int = SettingsField(10, title="Frame per Task") concurrent_tasks: int = SettingsField( 1, title="Number of concurrent tasks" ) - group: str = SettingsField("", title="Group Name") plugin: str = SettingsField("Fusion", enum_resolver=fusion_deadline_plugin_enum, title="Deadline Plugin") @@ -159,38 +218,6 @@ class NukeSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - concurrent_tasks: int = SettingsField(title="Number of concurrent tasks") - group: str = SettingsField(title="Group") - department: str = SettingsField(title="Department") - use_gpu: bool = SettingsField(title="Use GPU") - workfile_dependency: bool = SettingsField(title="Workfile Dependency") - use_published_workfile: bool = SettingsField( - title="Use Published Workfile" - ) - - env_allowed_keys: list[str] = SettingsField( - default_factory=list, - title="Allowed environment keys" - ) - - env_search_replace_values: list[EnvSearchReplaceSubmodel] = SettingsField( - default_factory=list, - title="Search & replace in environment values", - ) - - limit_groups: list[LimitGroupsSubmodel] = SettingsField( - default_factory=list, - title="Limit Groups", - ) - - @validator( - "limit_groups", - "env_search_replace_values") - def validate_unique_names(cls, value): - ensure_unique_names(value) - return value class HarmonySubmitDeadlineModel(BaseSettingsModel): @@ -199,11 +226,6 @@ class HarmonySubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - group: str = SettingsField(title="Group") - department: str = SettingsField(title="Department") class HoudiniSubmitDeadlineModel(BaseSettingsModel): @@ -212,25 +234,6 @@ class HoudiniSubmitDeadlineModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - group: str = SettingsField(title="Group") - limits: str = SettingsField( - title="Limit Groups", - description=( - "Enter a comma separated list of limits.\n" - "Specifies the limit groups that this job is a member of (default = blank)." - ) - ) - machine_limit: int = SettingsField( - title="Machine Limit", - description=( - "Specifies the maximum number of machines this job can be" - " rendered on at the same time (default = 0, which means" - " unlimited)." - ) - ) - export_priority: int = SettingsField(title="Export Priority") export_chunk_size: int = SettingsField(title="Export Chunk Size") export_group: str = SettingsField(title="Export Group") @@ -257,25 +260,6 @@ class HoudiniCacheSubmitDeadlineModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - group: str = SettingsField(title="Group") - limits: str = SettingsField( - title="Limit Groups", - description=( - "Enter a comma separated list of limits.\n" - "Specifies the limit groups that this job is a member of (default = blank)." - ) - ) - machine_limit: int = SettingsField( - title="Machine Limit", - description=( - "Specifies the maximum number of machines this job can be" - " rendered on at the same time (default = 0, which means" - " unlimited)." - ) - ) - class AfterEffectsSubmitDeadlineModel(BaseSettingsModel): """After Effects deadline submitter settings.""" @@ -283,12 +267,6 @@ class AfterEffectsSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - group: str = SettingsField(title="Group") - department: str = SettingsField(title="Department") - multiprocess: bool = SettingsField(title="Optional") class CelactionSubmitDeadlineModel(BaseSettingsModel): @@ -310,14 +288,6 @@ class BlenderSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(True) optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") - asset_dependencies: bool = SettingsField(title="Use Asset dependencies") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Frame per Task") - group: str = SettingsField("", title="Group Name") - job_delay: str = SettingsField( - "", title="Delay job (timecode dd:hh:mm:ss)" - ) class AOVFilterSubmodel(BaseSettingsModel): @@ -373,6 +343,9 @@ class PublishPluginsModel(BaseSettingsModel): CollectDeadlinePools: CollectDeadlinePoolsModel = SettingsField( default_factory=CollectDeadlinePoolsModel, title="Default Pools") + CollectJobInfo: CollectJobInfoModel = SettingsField( + default_factory=CollectJobInfoModel, + title="Collect JobInfo") ValidateExpectedFiles: ValidateExpectedFilesModel = SettingsField( default_factory=ValidateExpectedFilesModel, title="Validate Expected Files" @@ -440,23 +413,11 @@ class PublishPluginsModel(BaseSettingsModel): "enabled": True, "optional": False, "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10000, - "group": "", - "department": "", - "multiprocess": True }, "BlenderSubmitDeadline": { "enabled": True, "optional": False, "active": True, - "use_published": True, - "asset_dependencies": True, - "priority": 50, - "chunk_size": 10, - "group": "none", - "job_delay": "00:00:00:00" }, "CelactionSubmitDeadline": { "enabled": True, @@ -472,40 +433,21 @@ class PublishPluginsModel(BaseSettingsModel): "enabled": True, "optional": False, "active": True, - "priority": 50, - "chunk_size": 10, - "concurrent_tasks": 1, - "group": "" }, "HarmonySubmitDeadline": { "enabled": True, "optional": False, "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10000, - "group": "", - "department": "" }, "HoudiniCacheSubmitDeadline": { "enabled": True, "optional": False, "active": True, - "priority": 50, - "chunk_size": 999999, - "group": "", - "limits": "", - "machine_limit": 0 }, "HoudiniSubmitDeadline": { "enabled": True, "optional": False, "active": True, - "priority": 50, - "chunk_size": 1, - "group": "", - "limits": "", - "machine_limit": 0, "export_priority": 50, "export_chunk_size": 10, "export_group": "", @@ -516,24 +458,15 @@ class PublishPluginsModel(BaseSettingsModel): "enabled": True, "optional": False, "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10, - "group": "none" }, "MayaSubmitDeadline": { "enabled": True, "optional": False, "active": True, "tile_assembler_plugin": "DraftTileAssembler", - "use_published": True, "import_reference": False, - "asset_dependencies": True, "strict_error_checking": True, - "priority": 50, "tile_priority": 50, - "group": "none", - "limit": [], # this used to be empty dict "jobInfo": "", # this used to be empty dict @@ -544,17 +477,6 @@ class PublishPluginsModel(BaseSettingsModel): "enabled": True, "optional": False, "active": True, - "priority": 50, - "chunk_size": 10, - "concurrent_tasks": 1, - "group": "", - "department": "", - "use_gpu": True, - "workfile_dependency": True, - "use_published_workfile": True, - "env_allowed_keys": [], - "env_search_replace_values": [], - "limit_groups": [] }, "ProcessSubmittedCacheJobOnFarm": { "enabled": True,