From d1a883be55a1f281937cba404f589a56b1b27eaa Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Oct 2024 18:50:19 +0200 Subject: [PATCH 01/21] New Settings for CollectJobInfoItem These settings should be base of generic JobInfo values for Deadline submission. They should contain variables previously contained in Submit* Settings. Some of them should be exposed to Publisher UI as artist overrides. --- server/settings/publish_plugins.py | 263 ++++++++++------------------- 1 file changed, 93 insertions(+), 170 deletions(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index af341bfedd..daf99d09f2 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -4,9 +4,25 @@ BaseSettingsModel, SettingsField, ensure_unique_names, + task_types_enum, ) +class LimitGroupsSubmodel(BaseSettingsModel): + _layout = "expanded" + name: str = SettingsField(title="Name") + value: list[str] = SettingsField( + default_factory=list, + title="Limit Groups" + ) + + +class EnvSearchReplaceSubmodel(BaseSettingsModel): + _layout = "compact" + name: str = SettingsField(title="Name") + value: str = SettingsField(title="Value") + + class CollectDeadlinePoolsModel(BaseSettingsModel): """Settings Deadline default pools.""" @@ -15,6 +31,79 @@ class CollectDeadlinePoolsModel(BaseSettingsModel): secondary_pool: str = SettingsField(title="Secondary Pool") +def extract_jobinfo_overrides_enum(): + return [ + {"label": "Frames per Task", "value": "chunk_size"}, + {"label": "Priority", "value": "priority"}, + {"label": "Group", "value": "group"}, + {"label": "Limit groups", "value": "limit_groups"}, + {"label": "Delay job (timecode dd:hh:mm:ss)", "value": "job_delay"}, + {"label": "Group", "value": "group"}, + + ] + + +class CollectJobInfoItem(BaseSettingsModel): + _layout = "expanded" + host_names: list[str] = SettingsField( + default_factory=list, + title="Host names" + ) + task_types: list[str] = SettingsField( + default_factory=list, + title="Task types", + enum_resolver=task_types_enum + ) + task_names: list[str] = SettingsField( + default_factory=list, + title="Task names" + ) + + chunk_size: int = SettingsField(999, title="Frames per Task") + priority: int = SettingsField(50, title="Priority") + group: str = SettingsField("", title="Group") + limit_groups: list[LimitGroupsSubmodel] = SettingsField( + default_factory=list, + title="Limit Groups", + ) + concurrent_tasks: int = SettingsField( + 1, title="Number of concurrent tasks") + department: str = SettingsField("", title="Department") + use_gpu: bool = SettingsField("", title="Use GPU") + job_delay: str = SettingsField( + "", title="Delay job", + placeholder="dd:hh:mm:ss" + ) + use_published: bool = SettingsField(True, title="Use Published scene") + asset_dependencies: bool = SettingsField(True, title="Use Asset dependencies") + workfile_dependency: bool = SettingsField(True, title="Workfile Dependency") + multiprocess: bool = SettingsField(False, title="Multiprocess") + + env_allowed_keys: list[str] = SettingsField( + default_factory=list, + title="Allowed environment keys", + description="Pass selected environment variables with current value" + ) + env_search_replace_values: list[EnvSearchReplaceSubmodel] = SettingsField( + default_factory=list, + title="Search & replace in environment values", + description="Replace string values in 'Name' with value from 'Value'" + ) + overrides: list[str] = SettingsField( + enum_resolver=extract_jobinfo_overrides_enum, + title="Exposed Overrides", + description=( + "Expose the attribute in this list to the user when publishing." + ) + ) + + +class CollectJobInfoModel(BaseSettingsModel): + _isGroup = True + enabled: bool = SettingsField(False) + profiles: list[CollectJobInfoItem] = SettingsField(default_factory=list) + + class ValidateExpectedFilesModel(BaseSettingsModel): enabled: bool = SettingsField(True, title="Enabled") active: bool = SettingsField(True, title="Active") @@ -56,18 +145,11 @@ class MayaSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") import_reference: bool = SettingsField( title="Use Scene with Imported Reference" ) - asset_dependencies: bool = SettingsField(title="Use Asset dependencies") - priority: int = SettingsField(title="Priority") tile_priority: int = SettingsField(title="Tile Priority") - group: str = SettingsField(title="Group") - limit: list[str] = SettingsField( - default_factory=list, - title="Limit Groups" - ) + tile_assembler_plugin: str = SettingsField( title="Tile Assembler Plugin", enum_resolver=tile_assembler_enum, @@ -99,25 +181,6 @@ class MaxSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(True) optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Frame per Task") - group: str = SettingsField("", title="Group Name") - - -class EnvSearchReplaceSubmodel(BaseSettingsModel): - _layout = "compact" - name: str = SettingsField(title="Name") - value: str = SettingsField(title="Value") - - -class LimitGroupsSubmodel(BaseSettingsModel): - _layout = "expanded" - name: str = SettingsField(title="Name") - value: list[str] = SettingsField( - default_factory=list, - title="Limit Groups" - ) def fusion_deadline_plugin_enum(): @@ -142,12 +205,9 @@ class FusionSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(True, title="Enabled") optional: bool = SettingsField(False, title="Optional") active: bool = SettingsField(True, title="Active") - priority: int = SettingsField(50, title="Priority") - chunk_size: int = SettingsField(10, title="Frame per Task") concurrent_tasks: int = SettingsField( 1, title="Number of concurrent tasks" ) - group: str = SettingsField("", title="Group Name") plugin: str = SettingsField("Fusion", enum_resolver=fusion_deadline_plugin_enum, title="Deadline Plugin") @@ -159,38 +219,6 @@ class NukeSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - concurrent_tasks: int = SettingsField(title="Number of concurrent tasks") - group: str = SettingsField(title="Group") - department: str = SettingsField(title="Department") - use_gpu: bool = SettingsField(title="Use GPU") - workfile_dependency: bool = SettingsField(title="Workfile Dependency") - use_published_workfile: bool = SettingsField( - title="Use Published Workfile" - ) - - env_allowed_keys: list[str] = SettingsField( - default_factory=list, - title="Allowed environment keys" - ) - - env_search_replace_values: list[EnvSearchReplaceSubmodel] = SettingsField( - default_factory=list, - title="Search & replace in environment values", - ) - - limit_groups: list[LimitGroupsSubmodel] = SettingsField( - default_factory=list, - title="Limit Groups", - ) - - @validator( - "limit_groups", - "env_search_replace_values") - def validate_unique_names(cls, value): - ensure_unique_names(value) - return value class HarmonySubmitDeadlineModel(BaseSettingsModel): @@ -199,11 +227,6 @@ class HarmonySubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - group: str = SettingsField(title="Group") - department: str = SettingsField(title="Department") class HoudiniSubmitDeadlineModel(BaseSettingsModel): @@ -212,25 +235,6 @@ class HoudiniSubmitDeadlineModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - group: str = SettingsField(title="Group") - limits: str = SettingsField( - title="Limit Groups", - description=( - "Enter a comma separated list of limits.\n" - "Specifies the limit groups that this job is a member of (default = blank)." - ) - ) - machine_limit: int = SettingsField( - title="Machine Limit", - description=( - "Specifies the maximum number of machines this job can be" - " rendered on at the same time (default = 0, which means" - " unlimited)." - ) - ) - export_priority: int = SettingsField(title="Export Priority") export_chunk_size: int = SettingsField(title="Export Chunk Size") export_group: str = SettingsField(title="Export Group") @@ -257,25 +261,6 @@ class HoudiniCacheSubmitDeadlineModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - group: str = SettingsField(title="Group") - limits: str = SettingsField( - title="Limit Groups", - description=( - "Enter a comma separated list of limits.\n" - "Specifies the limit groups that this job is a member of (default = blank)." - ) - ) - machine_limit: int = SettingsField( - title="Machine Limit", - description=( - "Specifies the maximum number of machines this job can be" - " rendered on at the same time (default = 0, which means" - " unlimited)." - ) - ) - class AfterEffectsSubmitDeadlineModel(BaseSettingsModel): """After Effects deadline submitter settings.""" @@ -283,12 +268,6 @@ class AfterEffectsSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - group: str = SettingsField(title="Group") - department: str = SettingsField(title="Department") - multiprocess: bool = SettingsField(title="Optional") class CelactionSubmitDeadlineModel(BaseSettingsModel): @@ -310,14 +289,6 @@ class BlenderSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(True) optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") - asset_dependencies: bool = SettingsField(title="Use Asset dependencies") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Frame per Task") - group: str = SettingsField("", title="Group Name") - job_delay: str = SettingsField( - "", title="Delay job (timecode dd:hh:mm:ss)" - ) class AOVFilterSubmodel(BaseSettingsModel): @@ -373,6 +344,9 @@ class PublishPluginsModel(BaseSettingsModel): CollectDeadlinePools: CollectDeadlinePoolsModel = SettingsField( default_factory=CollectDeadlinePoolsModel, title="Default Pools") + CollectJobInfo: CollectJobInfoModel = SettingsField( + default_factory=CollectJobInfoModel, + title="Collect JobInfo") ValidateExpectedFiles: ValidateExpectedFilesModel = SettingsField( default_factory=ValidateExpectedFilesModel, title="Validate Expected Files" @@ -440,23 +414,11 @@ class PublishPluginsModel(BaseSettingsModel): "enabled": True, "optional": False, "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10000, - "group": "", - "department": "", - "multiprocess": True }, "BlenderSubmitDeadline": { "enabled": True, "optional": False, "active": True, - "use_published": True, - "asset_dependencies": True, - "priority": 50, - "chunk_size": 10, - "group": "none", - "job_delay": "00:00:00:00" }, "CelactionSubmitDeadline": { "enabled": True, @@ -472,40 +434,21 @@ class PublishPluginsModel(BaseSettingsModel): "enabled": True, "optional": False, "active": True, - "priority": 50, - "chunk_size": 10, - "concurrent_tasks": 1, - "group": "" }, "HarmonySubmitDeadline": { "enabled": True, "optional": False, "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10000, - "group": "", - "department": "" }, "HoudiniCacheSubmitDeadline": { "enabled": True, "optional": False, "active": True, - "priority": 50, - "chunk_size": 999999, - "group": "", - "limits": "", - "machine_limit": 0 }, "HoudiniSubmitDeadline": { "enabled": True, "optional": False, "active": True, - "priority": 50, - "chunk_size": 1, - "group": "", - "limits": "", - "machine_limit": 0, "export_priority": 50, "export_chunk_size": 10, "export_group": "", @@ -516,24 +459,15 @@ class PublishPluginsModel(BaseSettingsModel): "enabled": True, "optional": False, "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10, - "group": "none" }, "MayaSubmitDeadline": { "enabled": True, "optional": False, "active": True, "tile_assembler_plugin": "DraftTileAssembler", - "use_published": True, "import_reference": False, - "asset_dependencies": True, "strict_error_checking": True, - "priority": 50, "tile_priority": 50, - "group": "none", - "limit": [], # this used to be empty dict "jobInfo": "", # this used to be empty dict @@ -544,17 +478,6 @@ class PublishPluginsModel(BaseSettingsModel): "enabled": True, "optional": False, "active": True, - "priority": 50, - "chunk_size": 10, - "concurrent_tasks": 1, - "group": "", - "department": "", - "use_gpu": True, - "workfile_dependency": True, - "use_published_workfile": True, - "env_allowed_keys": [], - "env_search_replace_values": [], - "limit_groups": [] }, "ProcessSubmittedCacheJobOnFarm": { "enabled": True, From f72e87f34cee15949f6090b68324c01f672a5dd6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Oct 2024 17:12:12 +0200 Subject: [PATCH 02/21] Matched implemented artist overrides definitions --- .../plugins/publish/global/collect_jobinfo.py | 172 ++++++++++++++++++ server/settings/publish_plugins.py | 2 +- 2 files changed, 173 insertions(+), 1 deletion(-) create mode 100644 client/ayon_deadline/plugins/publish/global/collect_jobinfo.py diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py new file mode 100644 index 0000000000..099182dec2 --- /dev/null +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -0,0 +1,172 @@ +# -*- coding: utf-8 -*- +from collections import OrderedDict + +import ayon_api +import pyblish.api +from ayon_core.lib import ( + BoolDef, + NumberDef, + TextDef, + EnumDef, + is_in_tests, + UISeparatorDef +) +from ayon_core.pipeline.publish import AYONPyblishPluginMixin +from ayon_core.settings import get_project_settings +from ayon_core.lib.profiles_filtering import filter_profiles + +from ayon_deadline.lib import FARM_FAMILIES + + +class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): + """Collect variables that belong to Deadline's JobInfo. + + Variables like: + - department + - priority + - chunk size + + """ + + order = pyblish.api.CollectorOrder + 0.420 + label = "Collect Deadline JobInfo" + + families = FARM_FAMILIES + + def process(self, instance): + attr_values = self.get_attr_values_from_data(instance.data) + self.log.info(attr_values) + + @classmethod + def get_attr_defs_for_instance(cls, create_context, instance): + if not cls.instance_matches_plugin_families(instance): + return [] + + if not instance["active"]: # TODO origin_data seem not right + return [] + + project_name = create_context.project_name + project_settings = get_project_settings(project_name) + + host_name = create_context.host_name + + task_name = instance["task"] + folder_path = instance["folderPath"] + folder_entity = ayon_api.get_folder_by_path(project_name,folder_path) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name) + profiles = ( + project_settings["deadline"]["publish"][cls.__name__]["profiles"]) + + if not profiles: + return [] + + profile = filter_profiles( + profiles, + { + "host_names": host_name, + "task_types": task_entity["taskType"], + "task_names": task_name, + # "product_type": product_type + } + ) + overrides = set(profile["overrides"]) + if not profile or not overrides: + return [] + + defs = [] + + # should be matching to extract_jobinfo_overrides_enum + override_defs = OrderedDict({ + "chunkSize": NumberDef( + "chunkSize", + label="Frames Per Task", + default=1, + decimals=0, + minimum=1, + maximum=1000 + ), + "priority": NumberDef( + "priority", + label="Priority", + decimals=0 + ), + "department": TextDef( + "department", + label="Department", + default="", + ), + "limit_groups": TextDef( + "limit_groups", + label="Limit Groups", + default="", + placeholder="machine1,machine2" + ), + "job_delay": TextDef( + "job_delay", + label="Delay job (timecode dd:hh:mm:ss)", + default="" + ), + }) + + defs.extend([ + UISeparatorDef("options"), + ]) + + # The Arguments that can be modified by the Publisher + for key, value in override_defs.items(): + if key not in overrides: + continue + + default_value = profile[key] + value.default = default_value + defs.append(value) + + defs.append( + UISeparatorDef("sep_alembic_options_end") + ) + + return defs + + @classmethod + def register_create_context_callbacks(cls, create_context): + create_context.add_value_changed_callback(cls.on_values_changed) + + @classmethod + def on_value_change(cls, event): + for instance_change in event["changes"]: + if not cls.instance_matches_plugin_families(instance): + continue + value_changes = instance_change["changes"] + if "enabled" not in value_changes: + continue + instance = instance_change["instance"] + new_attrs = cls.get_attr_defs_for_instance( + event["create_context"], instance + ) + instance.set_publish_plugin_attr_defs(cls.__name__, new_attrs) + + +class CollectMayaJobInfo(CollectJobInfo): + hosts = [ + "maya", + ] + @classmethod + def get_attribute_defs(cls): + defs = super().get_attribute_defs() + + defs.extend([ + NumberDef( + "tile_priority", + label="Tile Assembler Priority", + decimals=0, + default=cls.tile_priorit + ), + BoolDef( + "strict_error_checking", + label="Strict Error Checking", + default=cls.strict_error_checking + ), + ]) + + return defs \ No newline at end of file diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index daf99d09f2..f5cc685e57 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -35,7 +35,7 @@ def extract_jobinfo_overrides_enum(): return [ {"label": "Frames per Task", "value": "chunk_size"}, {"label": "Priority", "value": "priority"}, - {"label": "Group", "value": "group"}, + {"label": "Department", "value": "department"}, {"label": "Limit groups", "value": "limit_groups"}, {"label": "Delay job (timecode dd:hh:mm:ss)", "value": "job_delay"}, {"label": "Group", "value": "group"}, From 2b294037e0224164f4a92f9b4a3b312bc86a88c4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Oct 2024 17:51:43 +0200 Subject: [PATCH 03/21] Added collected jobInfo object --- client/ayon_deadline/lib.py | 26 ++++++++++ .../plugins/publish/global/collect_jobinfo.py | 50 +++++++++++++++++-- 2 files changed, 73 insertions(+), 3 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 75ad830a77..2eff33971a 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -1,4 +1,6 @@ import os +from dataclasses import dataclass +from typing import List, Dict # describes list of product typed used for plugin filtering for farm publishing FARM_FAMILIES = [ @@ -43,3 +45,27 @@ def get_instance_job_envs(instance) -> "dict[str, str]": env = dict(sorted(env.items())) return env + + +@dataclass +class JobInfo: + priority: int + chunk_size: int + group: str + limit_groups: List[str] + concurrent_tasks: int + department: str + use_gpu: bool + job_delay: str + use_published: bool + asset_dependencies: bool + workfile_dependency: bool + multiprocess: bool + env_allowed_keys: List[str] + env_search_replace_values: Dict[str, str] + + @classmethod + def from_dict(cls, data: Dict) -> 'JobInfo': + # Filter the dictionary to only include keys that are fields in the dataclass + filtered_data = {k: v for k, v in data.items() if k in cls.__annotations__} + return cls(**filtered_data) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 099182dec2..88b7a380f9 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -15,7 +15,7 @@ from ayon_core.settings import get_project_settings from ayon_core.lib.profiles_filtering import filter_profiles -from ayon_deadline.lib import FARM_FAMILIES +from ayon_deadline.lib import FARM_FAMILIES, JobInfo class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): @@ -34,8 +34,11 @@ class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): families = FARM_FAMILIES def process(self, instance): - attr_values = self.get_attr_values_from_data(instance.data) - self.log.info(attr_values) + attr_values = self._get_jobinfo_defaults(instance) + + attr_values.update(self.get_attr_values_from_data(instance.data)) + job_info = JobInfo.from_dict(attr_values) + instance.data["deadline"]["job_info"] = job_info @classmethod def get_attr_defs_for_instance(cls, create_context, instance): @@ -45,6 +48,7 @@ def get_attr_defs_for_instance(cls, create_context, instance): if not instance["active"]: # TODO origin_data seem not right return [] + # will be reworked when CreateContext contains settings and task types project_name = create_context.project_name project_settings = get_project_settings(project_name) @@ -146,6 +150,46 @@ def on_value_change(cls, event): ) instance.set_publish_plugin_attr_defs(cls.__name__, new_attrs) + def _get_jobinfo_defaults(self, instance): + """Queries project setting for profile with default values + + Args: + instance (pyblish.api.Instance): Source instance. + + Returns: + (dict) + """ + attr_values = {} + + context_data = instance.context.data + host_name = context_data["hostName"] + project_settings = context_data["project_settings"] + task_entity = context_data["taskEntity"] + + task_name = task_type = "" + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] + profiles = ( + project_settings["deadline"] + ["publish"] + ["CollectJobInfo"] + ["profiles"] + ) + if profiles: + profile = filter_profiles( + profiles, + { + "host_names": host_name, + "task_types": task_type, + "task_names": task_name, + # "product_type": product_type + } + ) + if profile: + attr_values = profile + return attr_values + class CollectMayaJobInfo(CollectJobInfo): hosts = [ From cb0d24d588e1efd12d6ea8b1dfd548d86096650f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 11:36:18 +0200 Subject: [PATCH 04/21] Use DeadlineJobInfo as collector object It was decided that dataclasses should be used instead of attrs. This moves DeadlineJobInfo which is full mapping of JobInfo from abstract submitters to collectors to limit need of new class and necessary remapping later. --- client/ayon_deadline/lib.py | 206 ++++++++++++++++-- .../plugins/publish/global/collect_jobinfo.py | 4 +- 2 files changed, 190 insertions(+), 20 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 2eff33971a..ba6ffdea6d 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -1,6 +1,6 @@ import os -from dataclasses import dataclass -from typing import List, Dict +from dataclasses import dataclass, field +from typing import Optional, Dict, List # describes list of product typed used for plugin filtering for farm publishing FARM_FAMILIES = [ @@ -48,24 +48,194 @@ def get_instance_job_envs(instance) -> "dict[str, str]": @dataclass -class JobInfo: - priority: int - chunk_size: int - group: str - limit_groups: List[str] - concurrent_tasks: int - department: str - use_gpu: bool - job_delay: str - use_published: bool - asset_dependencies: bool - workfile_dependency: bool - multiprocess: bool - env_allowed_keys: List[str] - env_search_replace_values: Dict[str, str] +class DeadlineJobInfo: + """Mapping of all Deadline JobInfo attributes. + + This contains all JobInfo attributes plus their default values. + Those attributes set to `None` shouldn't be posted to Deadline as + the only required one is `Plugin`. + """ + + # Required + Plugin: str = field(default="Untitled") + + # General + Name: str = field(default="Untitled") + Frames: Optional[int] = field(default=None) # default: 0 + Comment: Optional[str] = field(default=None) # default: empty + Department: Optional[str] = field(default=None) # default: empty + BatchName: Optional[str] = field(default=None) # default: empty + UserName: str = field(default=None) + MachineName: str = field(default=None) + Pool: Optional[str] = field(default=None) # default: "none" + SecondaryPool: Optional[str] = field(default=None) + Group: Optional[str] = field(default=None) # default: "none" + Priority: int = field(default=None) + ChunkSize: int = field(default=None) + ConcurrentTasks: int = field(default=None) + LimitConcurrentTasksToNumberOfCpus: Optional[bool] = field( + default=None) # default: "true" + OnJobComplete: str = field(default=None) + SynchronizeAllAuxiliaryFiles: Optional[bool] = field( + default=None) # default: false + ForceReloadPlugin: Optional[bool] = field(default=None) # default: false + Sequential: Optional[bool] = field(default=None) # default: false + SuppressEvents: Optional[bool] = field(default=None) # default: false + Protected: Optional[bool] = field(default=None) # default: false + InitialStatus: str = field(default="Active") + NetworkRoot: Optional[str] = field(default=None) + + # Timeouts + MinRenderTimeSeconds: Optional[int] = field(default=None) # Default: 0 + MinRenderTimeMinutes: Optional[int] = field(default=None) # Default: 0 + TaskTimeoutSeconds: Optional[int] = field(default=None) # Default: 0 + TaskTimeoutMinutes: Optional[int] = field(default=None) # Default: 0 + StartJobTimeoutSeconds: Optional[int] = field(default=None) # Default: 0 + StartJobTimeoutMinutes: Optional[int] = field(default=None) # Default: 0 + InitializePluginTimeoutSeconds: Optional[int] = field( + default=None) # Default: 0 + OnTaskTimeout: Optional[str] = field(default=None) # Default: Error + EnableTimeoutsForScriptTasks: Optional[bool] = field( + default=None) # Default: false + EnableFrameTimeouts: Optional[bool] = field(default=None) # Default: false + EnableAutoTimeout: Optional[bool] = field(default=None) # Default: false + + # Interruptible + Interruptible: Optional[bool] = field(default=None) # Default: false + InterruptiblePercentage: Optional[int] = field(default=None) + RemTimeThreshold: Optional[int] = field(default=None) + + # Notifications + NotificationTargets: Optional[str] = field( + default=None) # Default: blank (comma-separated list of users) + ClearNotificationTargets: Optional[bool] = field( + default=None) # Default: false + NotificationEmails: Optional[str] = field( + default=None) # Default: blank (comma-separated list of email addresses) + OverrideNotificationMethod: Optional[bool] = field( + default=None) # Default: false + EmailNotification: Optional[bool] = field(default=None) # Default: false + PopupNotification: Optional[bool] = field(default=None) # Default: false + NotificationNote: Optional[str] = field(default=None) # Default: blank + + # Machine Limit + MachineLimit: Optional[int] = field(default=None) # Default: 0 + MachineLimitProgress: Optional[float] = field(default=None) # Default -1.0 + Whitelist: Optional[str] = field( + default=None) # Default blank (comma-separated list) + Blacklist: Optional[str] = field( + default=None) # Default blank (comma-separated list) + + # Limits + LimitGroups: Optional[str] = field(default=None) # Default: blank + + # Dependencies + JobDependencies: Optional[str] = field(default=None) # Default: blank + JobDependencyPercentage: Optional[int] = field(default=None) # Default: -1 + IsFrameDependent: Optional[bool] = field(default=None) # Default: false + FrameDependencyOffsetStart: Optional[int] = field(default=None) # Default: 0 + FrameDependencyOffsetEnd: Optional[int] = field(default=None) # Default: 0 + ResumeOnCompleteDependencies: Optional[bool] = field( + default=True) # Default: true + ResumeOnDeletedDependencies: Optional[bool] = field( + default=False) # Default: false + ResumeOnFailedDependencies: Optional[bool] = field( + default=False) # Default: false + RequiredAssets: Optional[str] = field( + default=None) # Default: blank (comma-separated list) + ScriptDependencies: Optional[str] = field( + default=None) # Default: blank (comma-separated list) + + # Failure Detection + OverrideJobFailureDetection: Optional[bool] = field( + default=False) # Default: false + FailureDetectionJobErrors: Optional[int] = field(default=None) # 0..x + OverrideTaskFailureDetection: Optional[bool] = field( + default=False) # Default: false + FailureDetectionTaskErrors: Optional[int] = field(default=None) # 0..x + IgnoreBadJobDetection: Optional[bool] = field( + default=False) # Default: false + SendJobErrorWarning: Optional[bool] = field( + default=False) # Default: false + + # Cleanup + DeleteOnComplete: Optional[bool] = field(default=False) # Default: false + ArchiveOnComplete: Optional[bool] = field(default=False) # Default: false + OverrideAutoJobCleanup: Optional[bool] = field( + default=False) # Default: false + OverrideJobCleanup: Optional[bool] = field(default=None) + JobCleanupDays: Optional[int] = field( + default=None) # Default: false (not clear) + OverrideJobCleanupType: Optional[str] = field(default=None) + + # Scheduling + ScheduledType: Optional[str] = field( + default=None) # Default: None () + ScheduledStartDateTime: Optional[str] = field( + default=None) #
+ ScheduledDays: Optional[int] = field(default=1) # Default: 1 + JobDelay: Optional[str] = field(default=None) # + Scheduled: Optional[str] = field( + default=None) # Time= + + # Scripts + PreJobScript: Optional[str] = field(default=None) # Default: blank + PostJobScript: Optional[str] = field(default=None) # Default: blank + PreTaskScript: Optional[str] = field(default=None) # Default: blank + PostTaskScript: Optional[str] = field(default=None) # Default: blank + + # Event Opt-Ins + EventOptIns: Optional[str] = field( + default=None) # Default blank (comma-separated list) + + # Environment + EnvironmentKeyValue: str = field(default_factory=lambda: "EnvironmentKeyValue") + IncludeEnvironment: Optional[bool] = field(default=False) # Default: false + UseJobEnvironmentOnly: Optional[bool] = field( + default=False) # Default: false + CustomPluginDirectory: Optional[str] = field(default=None) # Default blank + + # Job Extra Info + ExtraInfoKeyValue: str = field(default_factory=lambda: "ExtraInfoKeyValue") + + OverrideTaskExtraInfoNames: Optional[bool] = field( + default=False) # Default false + + TaskExtraInfoName: str = field(default_factory=lambda: "TaskExtraInfoName") + + OutputFilename: str = field(default_factory=lambda: "OutputFilename") + OutputFilenameTile: str = field(default_factory=lambda: "OutputFilename{}Tile") + OutputDirectory: str = field(default_factory=lambda: "OutputDirectory") + + AssetDependency: str = field(default_factory=lambda: "AssetDependency") + + TileJob: bool = field(default=False) + TileJobFrame: int = field(default=0) + TileJobTilesInX: int = field(default=0) + TileJobTilesInY: int = field(default=0) + TileJobTileCount: int = field(default=0) + + MaintenanceJob: bool = field(default=False) + MaintenanceJobStartFrame: int = field(default=0) + MaintenanceJobEndFrame: int = field(default=0) + @classmethod def from_dict(cls, data: Dict) -> 'JobInfo': + + def capitalize(key): + words = key.split("_") + return "".join(word.capitalize() for word in words) + # Filter the dictionary to only include keys that are fields in the dataclass - filtered_data = {k: v for k, v in data.items() if k in cls.__annotations__} + capitalized = {capitalize(k): v for k, v in data.items()} + filtered_data = {k: v for k, v + in capitalized.items() + if k in cls.__annotations__} return cls(**filtered_data) + + + +arr = {"priority": 40} +job = DeadlineJobInfo.from_dict(arr) +print(job.Priority) \ No newline at end of file diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 88b7a380f9..1070baed4e 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -15,7 +15,7 @@ from ayon_core.settings import get_project_settings from ayon_core.lib.profiles_filtering import filter_profiles -from ayon_deadline.lib import FARM_FAMILIES, JobInfo +from ayon_deadline.lib import FARM_FAMILIES, DeadlineJobInfo class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): @@ -37,7 +37,7 @@ def process(self, instance): attr_values = self._get_jobinfo_defaults(instance) attr_values.update(self.get_attr_values_from_data(instance.data)) - job_info = JobInfo.from_dict(attr_values) + job_info = DeadlineJobInfo.from_dict(attr_values) instance.data["deadline"]["job_info"] = job_info @classmethod From 37c0a0d6e80042766de72f5c257e3daffdfe0562 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 14:58:34 +0200 Subject: [PATCH 05/21] Removed six dependency --- .../ayon_deadline/abstract_submit_deadline.py | 350 +----------------- 1 file changed, 6 insertions(+), 344 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index a02a0ce86e..9232efa2f9 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -11,7 +11,7 @@ from functools import partial from collections import OrderedDict -import six + import attr import requests @@ -72,349 +72,11 @@ def requests_get(*args, **kwargs): return requests.get(*args, **kwargs) -class DeadlineKeyValueVar(dict): - """ - - Serializes dictionary key values as "{key}={value}" like Deadline uses - for EnvironmentKeyValue. - - As an example: - EnvironmentKeyValue0="A_KEY=VALUE_A" - EnvironmentKeyValue1="OTHER_KEY=VALUE_B" - - The keys are serialized in alphabetical order (sorted). - - Example: - >>> var = DeadlineKeyValueVar("EnvironmentKeyValue") - >>> var["my_var"] = "hello" - >>> var["my_other_var"] = "hello2" - >>> var.serialize() - - - """ - def __init__(self, key): - super(DeadlineKeyValueVar, self).__init__() - self.__key = key - - def serialize(self): - key = self.__key - - # Allow custom location for index in serialized string - if "{}" not in key: - key = key + "{}" - - return { - key.format(index): "{}={}".format(var_key, var_value) - for index, (var_key, var_value) in enumerate(sorted(self.items())) - } - - -class DeadlineIndexedVar(dict): - """ - - Allows to set and query values by integer indices: - Query: var[1] or var.get(1) - Set: var[1] = "my_value" - Append: var += "value" - - Note: Iterating the instance is not guarantueed to be the order of the - indices. To do so iterate with `sorted()` - - """ - def __init__(self, key): - super(DeadlineIndexedVar, self).__init__() - self.__key = key - - def serialize(self): - key = self.__key - - # Allow custom location for index in serialized string - if "{}" not in key: - key = key + "{}" - - return { - key.format(index): value for index, value in sorted(self.items()) - } - - def next_available_index(self): - # Add as first unused entry - i = 0 - while i in self.keys(): - i += 1 - return i - - def update(self, data): - # Force the integer key check - for key, value in data.items(): - self.__setitem__(key, value) - - def __iadd__(self, other): - index = self.next_available_index() - self[index] = other - return self - - def __setitem__(self, key, value): - if not isinstance(key, int): - raise TypeError("Key must be an integer: {}".format(key)) - - if key < 0: - raise ValueError("Negative index can't be set: {}".format(key)) - dict.__setitem__(self, key, value) - - -@attr.s -class DeadlineJobInfo(object): - """Mapping of all Deadline *JobInfo* attributes. - - This contains all JobInfo attributes plus their default values. - Those attributes set to `None` shouldn't be posted to Deadline as - the only required one is `Plugin`. Their default values used by Deadline - are stated in - comments. - - ..seealso: - https://docs.thinkboxsoftware.com/products/deadline/10.1/1_User%20Manual/manual/manual-submission.html - - """ - - # Required - # ---------------------------------------------- - Plugin = attr.ib() - - # General - Frames = attr.ib(default=None) # default: 0 - Name = attr.ib(default="Untitled") - Comment = attr.ib(default=None) # default: empty - Department = attr.ib(default=None) # default: empty - BatchName = attr.ib(default=None) # default: empty - UserName = attr.ib(default=getpass.getuser()) - MachineName = attr.ib(default=platform.node()) - Pool = attr.ib(default=None) # default: "none" - SecondaryPool = attr.ib(default=None) - Group = attr.ib(default=None) # default: "none" - Priority = attr.ib(default=50) - ChunkSize = attr.ib(default=1) - ConcurrentTasks = attr.ib(default=1) - LimitConcurrentTasksToNumberOfCpus = attr.ib( - default=None) # default: "true" - OnJobComplete = attr.ib(default="Nothing") - SynchronizeAllAuxiliaryFiles = attr.ib(default=None) # default: false - ForceReloadPlugin = attr.ib(default=None) # default: false - Sequential = attr.ib(default=None) # default: false - SuppressEvents = attr.ib(default=None) # default: false - Protected = attr.ib(default=None) # default: false - InitialStatus = attr.ib(default="Active") - NetworkRoot = attr.ib(default=None) - - # Timeouts - # ---------------------------------------------- - MinRenderTimeSeconds = attr.ib(default=None) # Default: 0 - MinRenderTimeMinutes = attr.ib(default=None) # Default: 0 - TaskTimeoutSeconds = attr.ib(default=None) # Default: 0 - TaskTimeoutMinutes = attr.ib(default=None) # Default: 0 - StartJobTimeoutSeconds = attr.ib(default=None) # Default: 0 - StartJobTimeoutMinutes = attr.ib(default=None) # Default: 0 - InitializePluginTimeoutSeconds = attr.ib(default=None) # Default: 0 - # can be one of - OnTaskTimeout = attr.ib(default=None) # Default: Error - EnableTimeoutsForScriptTasks = attr.ib(default=None) # Default: false - EnableFrameTimeouts = attr.ib(default=None) # Default: false - EnableAutoTimeout = attr.ib(default=None) # Default: false - - # Interruptible - # ---------------------------------------------- - Interruptible = attr.ib(default=None) # Default: false - InterruptiblePercentage = attr.ib(default=None) - RemTimeThreshold = attr.ib(default=None) - - # Notifications - # ---------------------------------------------- - # can be comma separated list of users - NotificationTargets = attr.ib(default=None) # Default: blank - ClearNotificationTargets = attr.ib(default=None) # Default: false - # A comma separated list of additional email addresses - NotificationEmails = attr.ib(default=None) # Default: blank - OverrideNotificationMethod = attr.ib(default=None) # Default: false - EmailNotification = attr.ib(default=None) # Default: false - PopupNotification = attr.ib(default=None) # Default: false - # String with `[EOL]` used for end of line - NotificationNote = attr.ib(default=None) # Default: blank - - # Machine Limit - # ---------------------------------------------- - MachineLimit = attr.ib(default=None) # Default: 0 - MachineLimitProgress = attr.ib(default=None) # Default: -1.0 - Whitelist = attr.ib(default=None) # Default: blank - Blacklist = attr.ib(default=None) # Default: blank - - # Limits - # ---------------------------------------------- - # comma separated list of limit groups - LimitGroups = attr.ib(default=None) # Default: blank - - # Dependencies - # ---------------------------------------------- - # comma separated list of job IDs - JobDependencies = attr.ib(default=None) # Default: blank - JobDependencyPercentage = attr.ib(default=None) # Default: -1 - IsFrameDependent = attr.ib(default=None) # Default: false - FrameDependencyOffsetStart = attr.ib(default=None) # Default: 0 - FrameDependencyOffsetEnd = attr.ib(default=None) # Default: 0 - ResumeOnCompleteDependencies = attr.ib(default=None) # Default: true - ResumeOnDeletedDependencies = attr.ib(default=None) # Default: false - ResumeOnFailedDependencies = attr.ib(default=None) # Default: false - # comma separated list of asset paths - RequiredAssets = attr.ib(default=None) # Default: blank - # comma separated list of script paths - ScriptDependencies = attr.ib(default=None) # Default: blank - - # Failure Detection - # ---------------------------------------------- - OverrideJobFailureDetection = attr.ib(default=None) # Default: false - FailureDetectionJobErrors = attr.ib(default=None) # 0..x - OverrideTaskFailureDetection = attr.ib(default=None) # Default: false - FailureDetectionTaskErrors = attr.ib(default=None) # 0..x - IgnoreBadJobDetection = attr.ib(default=None) # Default: false - SendJobErrorWarning = attr.ib(default=None) # Default: false - - # Cleanup - # ---------------------------------------------- - DeleteOnComplete = attr.ib(default=None) # Default: false - ArchiveOnComplete = attr.ib(default=None) # Default: false - OverrideAutoJobCleanup = attr.ib(default=None) # Default: false - OverrideJobCleanup = attr.ib(default=None) - JobCleanupDays = attr.ib(default=None) # Default: false - # - OverrideJobCleanupType = attr.ib(default=None) - - # Scheduling - # ---------------------------------------------- - # - ScheduledType = attr.ib(default=None) # Default: None - #
- ScheduledStartDateTime = attr.ib(default=None) - ScheduledDays = attr.ib(default=None) # Default: 1 - # - JobDelay = attr.ib(default=None) - # Time= - Scheduled = attr.ib(default=None) - - # Scripts - # ---------------------------------------------- - # all accept path to script - PreJobScript = attr.ib(default=None) # Default: blank - PostJobScript = attr.ib(default=None) # Default: blank - PreTaskScript = attr.ib(default=None) # Default: blank - PostTaskScript = attr.ib(default=None) # Default: blank - - # Event Opt-Ins - # ---------------------------------------------- - # comma separated list of plugins - EventOptIns = attr.ib(default=None) # Default: blank - - # Environment - # ---------------------------------------------- - EnvironmentKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, - "EnvironmentKeyValue")) - - IncludeEnvironment = attr.ib(default=None) # Default: false - UseJobEnvironmentOnly = attr.ib(default=None) # Default: false - CustomPluginDirectory = attr.ib(default=None) # Default: blank - - # Job Extra Info - # ---------------------------------------------- - ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo")) - ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, - "ExtraInfoKeyValue")) - - # Task Extra Info Names - # ---------------------------------------------- - OverrideTaskExtraInfoNames = attr.ib(default=None) # Default: false - TaskExtraInfoName = attr.ib(factory=partial(DeadlineIndexedVar, - "TaskExtraInfoName")) - - # Output - # ---------------------------------------------- - OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar, - "OutputFilename")) - OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar, - "OutputFilename{}Tile")) - OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar, - "OutputDirectory")) - - # Asset Dependency - # ---------------------------------------------- - AssetDependency = attr.ib(factory=partial(DeadlineIndexedVar, - "AssetDependency")) - - # Tile Job - # ---------------------------------------------- - TileJob = attr.ib(default=None) # Default: false - TileJobFrame = attr.ib(default=None) # Default: 0 - TileJobTilesInX = attr.ib(default=None) # Default: 0 - TileJobTilesInY = attr.ib(default=None) # Default: 0 - TileJobTileCount = attr.ib(default=None) # Default: 0 - - # Maintenance Job - # ---------------------------------------------- - MaintenanceJob = attr.ib(default=None) # Default: false - MaintenanceJobStartFrame = attr.ib(default=None) # Default: 0 - MaintenanceJobEndFrame = attr.ib(default=None) # Default: 0 - - def serialize(self): - """Return all data serialized as dictionary. - - Returns: - OrderedDict: all serialized data. - - """ - def filter_data(a, v): - if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)): - return False - if v is None: - return False - return True - - serialized = attr.asdict( - self, dict_factory=OrderedDict, filter=filter_data) - - # Custom serialize these attributes - for attribute in [ - self.EnvironmentKeyValue, - self.ExtraInfo, - self.ExtraInfoKeyValue, - self.TaskExtraInfoName, - self.OutputFilename, - self.OutputFilenameTile, - self.OutputDirectory, - self.AssetDependency - ]: - serialized.update(attribute.serialize()) - - return serialized - - def update(self, data): - """Update instance with data dict""" - for key, value in data.items(): - setattr(self, key, value) - - def add_render_job_env_var(self): - """Add required env vars for valid render job submission.""" - for key, value in get_ayon_render_job_envs().items(): - self.EnvironmentKeyValue[key] = value - - def add_instance_job_env_vars(self, instance): - """Add all job environments as specified on the instance and context - - Any instance `job_env` vars will override the context `job_env` vars. - """ - for key, value in get_instance_job_envs(instance).items(): - self.EnvironmentKeyValue[key] = value - - -@six.add_metaclass(AbstractMetaInstancePlugin) -class AbstractSubmitDeadline(pyblish.api.InstancePlugin, - AYONPyblishPluginMixin): +class AbstractSubmitDeadline( + pyblish.api.InstancePlugin, + AYONPyblishPluginMixin, + metaclass=AbstractMetaInstancePlugin +): """Class abstracting access to Deadline.""" label = "Submit to Deadline" From 335883800d6b3fca5024173f7ac04237dcac5816 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:01:04 +0200 Subject: [PATCH 06/21] Implemented new generic method applicable for all DCC Added new argument for old get_job_info (which should be probabaly renamed) to pass base of prepared object to be enhanced with DCC specific fields --- .../ayon_deadline/abstract_submit_deadline.py | 36 ++++++++++++++++--- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 9232efa2f9..e84ee20509 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -12,7 +12,6 @@ from collections import OrderedDict -import attr import requests import pyblish.api @@ -24,8 +23,7 @@ from ayon_core.pipeline.publish.lib import ( replace_with_published_scene_path ) - -from .lib import get_ayon_render_job_envs, get_instance_job_envs +from ayon_core.lib import is_in_tests JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError) @@ -120,7 +118,8 @@ def process(self, instance): self.scene_path = file_path self.log.info("Using {} for render/export.".format(file_path)) - self.job_info = self.get_job_info() + job_info = self.get_generic_job_info(instance) + self.job_info = self.get_job_info(job_info) self.plugin_info = self.get_plugin_info() self.aux_files = self.get_aux_files() @@ -157,6 +156,35 @@ def process_submission(self): verify = self._instance.data["deadline"]["verify"] return self.submit(payload, auth, verify) + def get_generic_job_info(self, instance): + context = instance.context + + job_info = instance.data["deadline"]["job_info"] + + # Always use the original work file name for the Job name even when + # rendering is done from the published Work File. The original work + # file name is clearer because it can also have subversion strings, + # etc. which are stripped for the published file. + src_filepath = context.data["currentFile"] + src_filename = os.path.basename(src_filepath) + + if is_in_tests(): + src_filename += datetime.now().strftime("%d%m%Y%H%M%S") + + job_info.Name = "%s - %s" % (src_filename, instance.name) + job_info.BatchName = src_filename + job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # TODO clean deadlineUser + + first_expected_file = instance.data["expectedFiles"][0] + job_info.OutputFilename += os.path.basename(first_expected_file) + job_info.OutputDirectory += os.path.dirname(first_expected_file) + + # Set job environment variables + job_info.add_instance_job_env_vars(instance) + job_info.add_render_job_env_var() + + return job_info + @abstractmethod def get_job_info(self): """Return filled Deadline JobInfo. From 60da8b939e901a322911cff6f177183a618289ac Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:01:36 +0200 Subject: [PATCH 07/21] Removed unneeded imports --- client/ayon_deadline/abstract_submit_deadline.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index e84ee20509..1046f469c6 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -6,10 +6,9 @@ """ import json.decoder from abc import abstractmethod -import platform import getpass -from functools import partial -from collections import OrderedDict +import os +import datetime import requests From ae3c73917fdff4ab2feb71b2ee1640ee78d8beb9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:02:35 +0200 Subject: [PATCH 08/21] Do not pass empty string to DeadlineJobInfo Empty strings overrides None defaults which might cause issue (it definitely does for job_delay). --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 1070baed4e..18a073a94e 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -37,6 +37,12 @@ def process(self, instance): attr_values = self._get_jobinfo_defaults(instance) attr_values.update(self.get_attr_values_from_data(instance.data)) + # do not set empty strings + attr_values = { + key: value + for key,value in attr_values.items() + if value != "" + } job_info = DeadlineJobInfo.from_dict(attr_values) instance.data["deadline"]["job_info"] = job_info From 1c8990a1a99d2fed13e4a377b39d6931ffb8fe25 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:03:06 +0200 Subject: [PATCH 09/21] Removed unneded imports --- .../publish/aftereffects/submit_aftereffects_deadline.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py index f646df720a..9973a4c37a 100644 --- a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py +++ b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py @@ -1,8 +1,6 @@ import os import attr -import getpass import pyblish.api -from datetime import datetime from ayon_core.lib import ( env_value_to_bool, @@ -10,7 +8,6 @@ is_in_tests, ) from ayon_deadline import abstract_submit_deadline -from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo @attr.s From 57860cee85e3fedee08d39ba19057f37b7c5b72a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:03:39 +0200 Subject: [PATCH 10/21] Reimplemented get_job_info, moved to generic impl --- .../submit_aftereffects_deadline.py | 36 ++----------------- 1 file changed, 2 insertions(+), 34 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py index 9973a4c37a..b4da16a5e1 100644 --- a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py +++ b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py @@ -36,47 +36,15 @@ class AfterEffectsSubmitDeadline( targets = ["local"] settings_category = "deadline" - priority = 50 - chunk_size = 1000000 - group = None - department = None - multiprocess = True - - def get_job_info(self): - dln_job_info = DeadlineJobInfo(Plugin="AfterEffects") - - context = self._instance.context - - batch_name = os.path.basename(self._instance.data["source"]) - if is_in_tests(): - batch_name += datetime.now().strftime("%d%m%Y%H%M%S") - dln_job_info.Name = self._instance.data["name"] - dln_job_info.BatchName = batch_name + def get_job_info(self, dln_job_info): dln_job_info.Plugin = "AfterEffects" - dln_job_info.UserName = context.data.get( - "deadlineUser", getpass.getuser()) + # Deadline requires integers in frame range frame_range = "{}-{}".format( int(round(self._instance.data["frameStart"])), int(round(self._instance.data["frameEnd"]))) dln_job_info.Frames = frame_range - dln_job_info.Priority = self.priority - dln_job_info.Pool = self._instance.data.get("primaryPool") - dln_job_info.SecondaryPool = self._instance.data.get("secondaryPool") - dln_job_info.Group = self.group - dln_job_info.Department = self.department - dln_job_info.ChunkSize = self.chunk_size - dln_job_info.OutputFilename += \ - os.path.basename(self._instance.data["expectedFiles"][0]) - dln_job_info.OutputDirectory += \ - os.path.dirname(self._instance.data["expectedFiles"][0]) - dln_job_info.JobDelay = "00:00:00" - - # Set job environment variables - dln_job_info.add_instance_job_env_vars(self._instance) - dln_job_info.add_render_job_env_var() - return dln_job_info def get_plugin_info(self): From 10f566f19307901394eb8b6fe55272fd91164c66 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:05:01 +0200 Subject: [PATCH 11/21] Removed storign deadline info to instance 'deadline' dictionary wasnt used at all, it contained large DeadlineJobInfo which just enlarged metadata json unnecessary. --- .../plugins/publish/global/submit_publish_job.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 65e4285d50..1e89d936f8 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -479,9 +479,11 @@ def process(self, instance): "intent": instance.context.data.get("intent"), "comment": instance.context.data.get("comment"), "job": render_job or None, - "instances": instances - } + # do not carry over unnecessary DL info with large DeadlineJobInfo + "instances": [{k: v for k, v in inst.items() if k != "deadline"} + for inst in instances] + } if deadline_publish_job_id: publish_job["deadline_publish_job_id"] = deadline_publish_job_id From bbb12b74f8452ffbe8a5d9c6660ee4abab934be4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:06:05 +0200 Subject: [PATCH 12/21] Updates to DeadlineJobInfo to match previous implementation It is handling EnvironmentKey* type of fields --- client/ayon_deadline/lib.py | 175 +++++++++++++++++++++++++++++++++--- 1 file changed, 161 insertions(+), 14 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index ba6ffdea6d..2fcb643386 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -1,6 +1,8 @@ import os -from dataclasses import dataclass, field -from typing import Optional, Dict, List +from dataclasses import dataclass, field, asdict +from functools import partial +from typing import Optional, Dict, Any +import json # describes list of product typed used for plugin filtering for farm publishing FARM_FAMILIES = [ @@ -47,6 +49,100 @@ def get_instance_job_envs(instance) -> "dict[str, str]": return env +class DeadlineKeyValueVar(dict): + """ + + Serializes dictionary key values as "{key}={value}" like Deadline uses + for EnvironmentKeyValue. + + As an example: + EnvironmentKeyValue0="A_KEY=VALUE_A" + EnvironmentKeyValue1="OTHER_KEY=VALUE_B" + + The keys are serialized in alphabetical order (sorted). + + Example: + >>> var = DeadlineKeyValueVar("EnvironmentKeyValue") + >>> var["my_var"] = "hello" + >>> var["my_other_var"] = "hello2" + >>> var.serialize() + + + """ + def __init__(self, key): + super(DeadlineKeyValueVar, self).__init__() + self.__key = key + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): "{}={}".format(var_key, var_value) + for index, (var_key, var_value) in enumerate(sorted(self.items())) + } + +# def DeadlineKeyValueVar(key: str) -> Any: +# # Placeholder for the actual implementation +# return f"Value for {key}" + + +class DeadlineIndexedVar(dict): + """ + + Allows to set and query values by integer indices: + Query: var[1] or var.get(1) + Set: var[1] = "my_value" + Append: var += "value" + + Note: Iterating the instance is not guarantueed to be the order of the + indices. To do so iterate with `sorted()` + + """ + def __init__(self, key): + super(DeadlineIndexedVar, self).__init__() + self.__key = key + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): value for index, value in sorted(self.items()) + } + + def next_available_index(self): + # Add as first unused entry + i = 0 + while i in self.keys(): + i += 1 + return i + + def update(self, data): + # Force the integer key check + for key, value in data.items(): + self.__setitem__(key, value) + + def __iadd__(self, other): + index = self.next_available_index() + self[index] = other + return self + + def __setitem__(self, key, value): + if not isinstance(key, int): + raise TypeError("Key must be an integer: {}".format(key)) + + if key < 0: + raise ValueError("Negative index can't be set: {}".format(key)) + dict.__setitem__(self, key, value) + + @dataclass class DeadlineJobInfo: """Mapping of all Deadline JobInfo attributes. @@ -189,25 +285,33 @@ class DeadlineJobInfo: default=None) # Default blank (comma-separated list) # Environment - EnvironmentKeyValue: str = field(default_factory=lambda: "EnvironmentKeyValue") + EnvironmentKeyValue: Any = field( + default_factory=partial(DeadlineKeyValueVar, "EnvironmentKeyValue")) IncludeEnvironment: Optional[bool] = field(default=False) # Default: false - UseJobEnvironmentOnly: Optional[bool] = field( - default=False) # Default: false + UseJobEnvironmentOnly: Optional[bool] = field(default=False) # Default: false CustomPluginDirectory: Optional[str] = field(default=None) # Default blank # Job Extra Info - ExtraInfoKeyValue: str = field(default_factory=lambda: "ExtraInfoKeyValue") + ExtraInfo: Any = field( + default_factory=partial(DeadlineIndexedVar, "ExtraInfo")) + ExtraInfoKeyValue: Any = field( + default_factory=partial(DeadlineKeyValueVar, "ExtraInfoKeyValue")) OverrideTaskExtraInfoNames: Optional[bool] = field( default=False) # Default false - TaskExtraInfoName: str = field(default_factory=lambda: "TaskExtraInfoName") + TaskExtraInfoName: Any = field( + default_factory=partial(DeadlineIndexedVar, "TaskExtraInfoName")) - OutputFilename: str = field(default_factory=lambda: "OutputFilename") - OutputFilenameTile: str = field(default_factory=lambda: "OutputFilename{}Tile") - OutputDirectory: str = field(default_factory=lambda: "OutputDirectory") + OutputFilename: Any = field( + default_factory=partial(DeadlineIndexedVar, "OutputFilename")) + OutputFilenameTile: str = field( + default_factory=partial(DeadlineIndexedVar, "OutputFilename{}Tile")) + OutputDirectory: str = field( + default_factory=partial(DeadlineIndexedVar, "OutputDirectory")) - AssetDependency: str = field(default_factory=lambda: "AssetDependency") + AssetDependency: str = field( + default_factory=partial(DeadlineIndexedVar, "AssetDependency")) TileJob: bool = field(default=False) TileJobFrame: int = field(default=0) @@ -219,6 +323,38 @@ class DeadlineJobInfo: MaintenanceJobStartFrame: int = field(default=0) MaintenanceJobEndFrame: int = field(default=0) + def serialize(self): + """Return all data serialized as dictionary. + + Returns: + OrderedDict: all serialized data. + + """ + def filter_data(a, v): + if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)): + return False + if v is None: + return False + return True + + serialized = asdict(self) + serialized = {k: v for k, v in serialized.items() + if filter_data(k, v)} + + # Custom serialize these attributes + for attribute in [ + self.EnvironmentKeyValue, + self.ExtraInfo, + self.ExtraInfoKeyValue, + self.TaskExtraInfoName, + self.OutputFilename, + self.OutputFilenameTile, + self.OutputDirectory, + self.AssetDependency + ]: + serialized.update(attribute.serialize()) + + return serialized @classmethod def from_dict(cls, data: Dict) -> 'JobInfo': @@ -234,8 +370,19 @@ def capitalize(key): if k in cls.__annotations__} return cls(**filtered_data) + def add_render_job_env_var(self): + """Add required env vars for valid render job submission.""" + for key, value in get_ayon_render_job_envs().items(): + self.EnvironmentKeyValue[key] = value + + def add_instance_job_env_vars(self, instance): + """Add all job environments as specified on the instance and context + Any instance `job_env` vars will override the context `job_env` vars. + """ + for key, value in get_instance_job_envs(instance).items(): + self.EnvironmentKeyValue[key] = value -arr = {"priority": 40} -job = DeadlineJobInfo.from_dict(arr) -print(job.Priority) \ No newline at end of file + def to_json(self) -> str: + """Serialize the dataclass instance to a JSON string.""" + return json.dumps(asdict(self)) \ No newline at end of file From c26c9970741e611e4d1119dbf9666bb03e626ab3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:34:16 +0200 Subject: [PATCH 13/21] Added Frames field for explicit rendering --- .../submit_aftereffects_deadline.py | 12 +++--- .../plugins/publish/global/collect_jobinfo.py | 39 +++++++++++++------ 2 files changed, 35 insertions(+), 16 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py index b4da16a5e1..3966a51588 100644 --- a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py +++ b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py @@ -39,11 +39,13 @@ class AfterEffectsSubmitDeadline( def get_job_info(self, dln_job_info): dln_job_info.Plugin = "AfterEffects" - # Deadline requires integers in frame range - frame_range = "{}-{}".format( - int(round(self._instance.data["frameStart"])), - int(round(self._instance.data["frameEnd"]))) - dln_job_info.Frames = frame_range + # already collected explicit values for rendered Frames + if not dln_job_info.Frames: + # Deadline requires integers in frame range + frame_range = "{}-{}".format( + int(round(self._instance.data["frameStart"])), + int(round(self._instance.data["frameEnd"]))) + dln_job_info.Frames = frame_range return dln_job_info diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 18a073a94e..b5e8032ea9 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -86,6 +86,31 @@ def get_attr_defs_for_instance(cls, create_context, instance): defs = [] + + defs.extend([ + UISeparatorDef("deadline_defs_starts"), + ]) + + defs.extend(cls._get_artist_overrides(overrides, profile)) + + defs.append( + TextDef( + "frames", + label="Frames", + default="", + tooltip="Explicit frames to be rendered. (1, 3-4)" + ) + ) + + defs.append( + UISeparatorDef("deadline_defs_end") + ) + + return defs + + @classmethod + def _get_artist_overrides(cls, overrides, profile): + """Provide list of Defs that could be filled by artist""" # should be matching to extract_jobinfo_overrides_enum override_defs = OrderedDict({ "chunkSize": NumberDef( @@ -118,11 +143,7 @@ def get_attr_defs_for_instance(cls, create_context, instance): default="" ), }) - - defs.extend([ - UISeparatorDef("options"), - ]) - + defs = [] # The Arguments that can be modified by the Publisher for key, value in override_defs.items(): if key not in overrides: @@ -132,10 +153,6 @@ def get_attr_defs_for_instance(cls, create_context, instance): value.default = default_value defs.append(value) - defs.append( - UISeparatorDef("sep_alembic_options_end") - ) - return defs @classmethod @@ -202,8 +219,8 @@ class CollectMayaJobInfo(CollectJobInfo): "maya", ] @classmethod - def get_attribute_defs(cls): - defs = super().get_attribute_defs() + def get_attr_defs_for_instance(cls, create_context, instance): + defs = super().get_attr_defs_for_instance(create_context, instance) defs.extend([ NumberDef( From e1eba7f91937f8c3d5ae6d72ba68428ed85c9ead Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:15:05 +0200 Subject: [PATCH 14/21] Run CollectJobInfo only locally --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index b5e8032ea9..4d416a468e 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -32,6 +32,7 @@ class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): label = "Collect Deadline JobInfo" families = FARM_FAMILIES + targets = ["local"] def process(self, instance): attr_values = self._get_jobinfo_defaults(instance) @@ -93,6 +94,7 @@ def get_attr_defs_for_instance(cls, create_context, instance): defs.extend(cls._get_artist_overrides(overrides, profile)) + # explicit defs.append( TextDef( "frames", From 8062462d8239b15095348e36f237c5c2c2d72406 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:19:32 +0200 Subject: [PATCH 15/21] Implemented explicit frames filtering on simple files Not yet working on AOVs. --- .../ayon_deadline/plugins/publish/global/submit_publish_job.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 1e89d936f8..77fa112765 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -398,7 +398,8 @@ def process(self, instance): self.skip_integration_repre_list, do_not_add_review, instance.context, - self + self, + instance.data["deadline"]["job_info"].Frames ) if "representations" not in instance_skeleton_data.keys(): From ec908e05ea8dea1e055d5b817acd686b8b2995bd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:27:29 +0200 Subject: [PATCH 16/21] Reworked removal of deadline in submit job Deadline portion must be there for ValidateExpectedFiles --- .../plugins/publish/global/submit_publish_job.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 77fa112765..b6e430b1fd 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -466,6 +466,7 @@ def process(self, instance): # Inject deadline url to instances to query DL for job id for overrides for inst in instances: + instance.data["deadline"].pop("job_info") inst["deadline"] = instance.data["deadline"] # publish job file @@ -481,8 +482,7 @@ def process(self, instance): "comment": instance.context.data.get("comment"), "job": render_job or None, # do not carry over unnecessary DL info with large DeadlineJobInfo - "instances": [{k: v for k, v in inst.items() if k != "deadline"} - for inst in instances] + "instances": instances } if deadline_publish_job_id: From 64b22ff2504607ecb1cdcf8cff18e5d96192c5c6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:30:01 +0200 Subject: [PATCH 17/21] Removed empty line --- server/settings/publish_plugins.py | 1 - 1 file changed, 1 deletion(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index f5cc685e57..9e48fdc120 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -39,7 +39,6 @@ def extract_jobinfo_overrides_enum(): {"label": "Limit groups", "value": "limit_groups"}, {"label": "Delay job (timecode dd:hh:mm:ss)", "value": "job_delay"}, {"label": "Group", "value": "group"}, - ] From 0d3125dd025735e94fcc94dc41c52c8f0705f4c8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:30:43 +0200 Subject: [PATCH 18/21] Added empty line at the end --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 4d416a468e..c64fa3b698 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -238,4 +238,4 @@ def get_attr_defs_for_instance(cls, create_context, instance): ), ]) - return defs \ No newline at end of file + return defs From c206b2489821ab9308df1e2094aed1996883b279 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:31:27 +0200 Subject: [PATCH 19/21] Formatting --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index c64fa3b698..6213cd8b95 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -87,7 +87,6 @@ def get_attr_defs_for_instance(cls, create_context, instance): defs = [] - defs.extend([ UISeparatorDef("deadline_defs_starts"), ]) From 803c25c81afcca0e6d159ca70fbf9a464f1979a1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:34:16 +0200 Subject: [PATCH 20/21] Removed forgotten dev code --- client/ayon_deadline/lib.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 2fcb643386..7384ac6538 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -85,10 +85,6 @@ def serialize(self): for index, (var_key, var_value) in enumerate(sorted(self.items())) } -# def DeadlineKeyValueVar(key: str) -> Any: -# # Placeholder for the actual implementation -# return f"Value for {key}" - class DeadlineIndexedVar(dict): """ From 201c5b801f5ae8e9dae2d1e7c11dfa06176340a6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 19:50:58 +0200 Subject: [PATCH 21/21] Refactor batchname --- client/ayon_deadline/abstract_submit_deadline.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 1046f469c6..775f7bf9e5 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -164,14 +164,13 @@ def get_generic_job_info(self, instance): # rendering is done from the published Work File. The original work # file name is clearer because it can also have subversion strings, # etc. which are stripped for the published file. - src_filepath = context.data["currentFile"] - src_filename = os.path.basename(src_filepath) + batch_name = os.path.basename(context.data["currentFile"]) if is_in_tests(): - src_filename += datetime.now().strftime("%d%m%Y%H%M%S") + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") - job_info.Name = "%s - %s" % (src_filename, instance.name) - job_info.BatchName = src_filename + job_info.Name = "%s - %s" % (batch_name, instance.name) + job_info.BatchName = batch_name job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # TODO clean deadlineUser first_expected_file = instance.data["expectedFiles"][0]