Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

allow arbitrary resource definitions #87

Merged
merged 25 commits into from
Jul 9, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
2643e5f
allow arbitrary resource definitions
pedohorse Jun 23, 2024
adc5a87
implement arbitrary resource display
pedohorse Jun 23, 2024
0935226
add __repr__ __eq__ to WorkerResources
pedohorse Jun 24, 2024
4fd2f56
adjust tests to new changes
pedohorse Jun 24, 2024
0ec26c0
NodeSerializerBase INTERFACE CHANGE, simplify deserialization, do not…
pedohorse Jun 25, 2024
aaa029e
DESIGN CHANGE: SchedulerConfigProviderBase to provide structured and …
pedohorse Jun 27, 2024
3dde637
move BaseNodeWithTaskRequirements to node_plugin_base module
pedohorse Jun 27, 2024
6f41ffe
add node parameter evaluation context test
pedohorse Jun 30, 2024
97eee93
adjust base node resource tests for arbitrary resource design
pedohorse Jun 30, 2024
593cb13
introduce arbitrary resources WIP
pedohorse Jun 30, 2024
cc73e4b
allow resource definition overrides for testing
pedohorse Jul 1, 2024
25a92f0
NotPerformed operations not added to undo stack
pedohorse Jul 1, 2024
534ce91
appropriate display for locker/readonly parameters
pedohorse Jul 1, 2024
1ad5969
refactor: plugin loader configuration is supplied from config provider
pedohorse Jul 4, 2024
ca8319d
switch to tree model/view for workers
pedohorse Jul 4, 2024
d867993
refactor pluginloader
pedohorse Jul 6, 2024
6aae08e
fix broken config test
pedohorse Jul 6, 2024
9035e16
add default to resource definition
pedohorse Jul 7, 2024
ea38dd3
check db schema upgrade before dealing with resources
pedohorse Jul 7, 2024
9774c14
default for SHARABLE_COMPUTATIONAL_UNIT - int()
pedohorse Jul 7, 2024
c017272
update requirement-related parameters, assume res0 is cpu, res1 is mem
pedohorse Jul 7, 2024
7ba353f
fix resource test to be async
pedohorse Jul 7, 2024
f89bf4c
add cpu/mem requirement migration logic
pedohorse Jul 7, 2024
d9e87ba
add HardwareResources unittests
pedohorse Jul 7, 2024
ef77d76
cleanup
pedohorse Jul 7, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
move BaseNodeWithTaskRequirements to node_plugin_base module
  • Loading branch information
pedohorse committed Jun 27, 2024
commit 3dde637115b3b22ad20afc0a77722afc62ece28d
91 changes: 3 additions & 88 deletions src/lifeblood/basenode.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,15 @@
import asyncio
import re
from copy import deepcopy
from typing import Dict, Optional, Any
from .nodethings import ProcessingResult
from .uidata import NodeUi, ParameterNotFound, Parameter
from .processingcontext import ProcessingContext
from .logging import get_logger
from .enums import NodeParameterType, WorkerType
from .plugin_info import PluginInfo, empty_plugin_info
from .nodegraph_holder_base import NodeGraphHolderBase

# reexport
from .nodethings import ProcessingError

Check warning on line 12 in src/lifeblood/basenode.py

View workflow job for this annotation

GitHub Actions / flake8

F401 '.nodethings.ProcessingError' imported but unused

from typing import TYPE_CHECKING, Iterable

Expand Down Expand Up @@ -56,6 +54,9 @@
self.__parent = graph_holder
self.__parent_nid = node_id_in_graph

def parent(self) -> Optional[NodeGraphHolderBase]:
return self.__parent

def logger(self) -> "Logger":
return self.__logger

Expand Down Expand Up @@ -252,89 +253,3 @@
restore state as given by get_state
"""
pass


class BaseNodeWithTaskRequirements(BaseNode):
def __init__(self, name: str):
super(BaseNodeWithTaskRequirements, self).__init__(name)
ui = self.get_ui()
with ui.initializing_interface_lock():
with ui.collapsable_group_block('main worker requirements', 'worker requirements'):
ui.add_parameter('priority adjustment', 'priority adjustment', NodeParameterType.FLOAT, 0).set_slider_visualization(-100, 100)
with ui.parameters_on_same_line_block():
ui.add_parameter('worker cpu cost', 'min <cpu (cores)> preferred', NodeParameterType.FLOAT, 1.0).set_value_limits(value_min=0)
ui.add_parameter('worker cpu cost preferred', None, NodeParameterType.FLOAT, 0.0).set_value_limits(value_min=0)
with ui.parameters_on_same_line_block():
ui.add_parameter('worker mem cost', 'min <memory (GBs)> preferred', NodeParameterType.FLOAT, 0.5).set_value_limits(value_min=0)
ui.add_parameter('worker mem cost preferred', None, NodeParameterType.FLOAT, 0.0).set_value_limits(value_min=0)
ui.add_parameter('worker groups', 'groups (space or comma separated)', NodeParameterType.STRING, '')
ui.add_parameter('worker type', 'worker type', NodeParameterType.INT, WorkerType.STANDARD.value)\
.add_menu((('standard', WorkerType.STANDARD.value),
('scheduler helper', WorkerType.SCHEDULER_HELPER.value)))
with ui.collapsable_group_block('gpu main worker requirements', 'gpu requirements'):
with ui.parameters_on_same_line_block():
ui.add_parameter('worker gpu cost', 'min <gpus> preferred', NodeParameterType.FLOAT, 0.0).set_value_limits(value_min=0)
ui.add_parameter('worker gpu cost preferred', None, NodeParameterType.FLOAT, 0.0).set_value_limits(value_min=0)
with ui.parameters_on_same_line_block():
ui.add_parameter('worker gpu mem cost', 'min <memory (GBs)> preferred', NodeParameterType.FLOAT, 0.0).set_value_limits(value_min=0)
ui.add_parameter('worker gpu mem cost preferred', None, NodeParameterType.FLOAT, 0.0).set_value_limits(value_min=0)

def __apply_requirements(self, task_dict: dict, node_config: dict, result: ProcessingResult):
if result.invocation_job is not None:
context = ProcessingContext(self, task_dict, node_config)
raw_groups = context.param_value('worker groups').strip()
reqs = result.invocation_job.requirements()
if raw_groups != '':
reqs.add_groups(re.split(r'[ ,]+', raw_groups))

reqs.set_min_resource('cpu_count', context.param_value('worker cpu cost'))
reqs.set_min_resource('cpu_mem', context.param_value('worker mem cost') * 10**9)
reqs.set_min_resource('gpu_count', context.param_value('worker gpu cost'))
reqs.set_min_resource('gpu_mem', context.param_value('worker gpu mem cost') * 10**9)
# preferred
reqs.set_preferred_resource('cpu_count', context.param_value('worker cpu cost preferred'))
reqs.set_preferred_resource('cpu_mem', context.param_value('worker mem cost preferred') * 10**9)
reqs.set_preferred_resource('gpu_count', context.param_value('worker gpu cost preferred'))
reqs.set_preferred_resource('gpu_mem', context.param_value('worker gpu mem cost preferred') * 10**9)

reqs.set_worker_type(WorkerType(context.param_value('worker type')))
result.invocation_job.set_requirements(reqs)
result.invocation_job.set_priority(context.param_value('priority adjustment'))
return result

def _process_task_wrapper(self, task_dict, node_config) -> ProcessingResult:
result = super(BaseNodeWithTaskRequirements, self)._process_task_wrapper(task_dict, node_config)
return self.__apply_requirements(task_dict, node_config, result)

def _postprocess_task_wrapper(self, task_dict, node_config) -> ProcessingResult:
result = super(BaseNodeWithTaskRequirements, self)._postprocess_task_wrapper(task_dict, node_config)
return self.__apply_requirements(task_dict, node_config, result)


# class BaseNodeWithEnvironmentRequirements(BaseNode):
# def __init__(self, name: str):
# super(BaseNodeWithEnvironmentRequirements, self).__init__(name)
# ui = self.get_ui()
# with ui.initializing_interface_lock():
# with ui.collapsable_group_block('main environment resolver', 'task environment resolver additional requirements'):
# ui.add_parameter('main env resolver name', 'resolver name', NodeParameterType.STRING, 'StandardEnvironmentResolver')
# with ui.multigroup_parameter_block('main env resolver arguments'):
# with ui.parameters_on_same_line_block():
# type_param = ui.add_parameter('main env resolver arg type', '', NodeParameterType.INT, 0)
# type_param.add_menu((('int', NodeParameterType.INT.value),
# ('bool', NodeParameterType.BOOL.value),
# ('float', NodeParameterType.FLOAT.value),
# ('string', NodeParameterType.STRING.value),
# ('json', -1)
# ))
#
# ui.add_parameter('main env resolver arg svalue', 'val', NodeParameterType.STRING, '').append_visibility_condition(type_param, '==', NodeParameterType.STRING.value)
# ui.add_parameter('main env resolver arg ivalue', 'val', NodeParameterType.INT, 0).append_visibility_condition(type_param, '==', NodeParameterType.INT.value)
# ui.add_parameter('main env resolver arg fvalue', 'val', NodeParameterType.FLOAT, 0.0).append_visibility_condition(type_param, '==', NodeParameterType.FLOAT.value)
# ui.add_parameter('main env resolver arg bvalue', 'val', NodeParameterType.BOOL, False).append_visibility_condition(type_param, '==', NodeParameterType.BOOL.value)
# ui.add_parameter('main env resolver arg jvalue', 'val', NodeParameterType.STRING, '').append_visibility_condition(type_param, '==', -1)
#
# def _process_task_wrapper(self, task_dict) -> ProcessingResult:
# result = super(BaseNodeWithEnvironmentRequirements, self)._process_task_wrapper(task_dict)
# result.invocation_job.environment_resolver_arguments()
# return result
3 changes: 2 additions & 1 deletion src/lifeblood/basenode_serializer_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
from io import BytesIO
from dataclasses import dataclass
from .basenode_serialization import NodeSerializerBase, IncompatibleDeserializationMethod
from .basenode import BaseNode, NodeParameterType
from .basenode import BaseNode
from .enums import NodeParameterType

from typing import Callable, Optional, Tuple, Union

Expand Down
3 changes: 2 additions & 1 deletion src/lifeblood/basenode_serializer_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
import json
from .common_serialization import AttribSerializer, AttribDeserializer
from .basenode_serialization import NodeSerializerBase, IncompatibleDeserializationMethod, FailedToApplyNodeState, FailedToApplyParameters
from .basenode import BaseNode, NodeParameterType
from .basenode import BaseNode
from .enums import NodeParameterType
from .uidata import ParameterFullValue

from typing import Optional, Tuple, Union
Expand Down Expand Up @@ -84,7 +85,7 @@
state = node.get_state()
return None if state is None else json.dumps(state, cls=NodeSerializerV2.Serializer).encode('latin1')

def deserialize(self, node_data_provider: NodeDataProvider, data: bytes, state: Optional[bytes]) -> BaseNode:

Check warning on line 88 in src/lifeblood/basenode_serializer_v2.py

View workflow job for this annotation

GitHub Actions / flake8

C901 'NodeSerializerV2.deserialize' is too complex (11)
try:
data_dict = json.loads(data.decode('latin1'), cls=NodeSerializerV2.Deserializer)
except json.JSONDecodeError:
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/attribute_splitter.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from copy import copy
from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.enums import NodeParameterType
from lifeblood.nodethings import ProcessingResult, ProcessingError
import math
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/del_attrib.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.nodethings import ProcessingResult
from lifeblood.enums import NodeParameterType

Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/environment_resolver_setter.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from lifeblood.basenode import BaseNode, ProcessingError
from lifeblood.node_plugin_base import BaseNode, ProcessingError
from lifeblood.nodethings import ProcessingResult
from lifeblood.processingcontext import ProcessingContext
from lifeblood.enums import NodeParameterType
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/kill.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.nodethings import ProcessingResult

from typing import Iterable
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/mod_attrib.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.nodethings import ProcessingResult
from lifeblood.taskspawn import TaskSpawn
from lifeblood.exceptions import NodeNotReadyToProcess
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/null.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.nodethings import ProcessingResult

from typing import Iterable
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/parent_children_waiter.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import dataclasses
from dataclasses import dataclass
from lifeblood.attribute_serialization import deserialize_attributes_core
from lifeblood.basenode import BaseNode, ProcessingError
from lifeblood.node_plugin_base import BaseNode, ProcessingError
from lifeblood.nodethings import ProcessingResult
from lifeblood.taskspawn import TaskSpawn
from lifeblood.exceptions import NodeNotReadyToProcess
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/python.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import re
import time

from lifeblood.basenode import BaseNodeWithTaskRequirements
from lifeblood.node_plugin_base import BaseNodeWithTaskRequirements
from lifeblood.invocationjob import InvocationJob, InvocationEnvironment
from lifeblood.processingcontext import ProcessingContext
from lifeblood.nodethings import ProcessingResult, ProcessingError
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/rename_attrib.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.nodethings import ProcessingResult
from lifeblood.taskspawn import TaskSpawn
from lifeblood.exceptions import NodeNotReadyToProcess
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/set_attrib.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.nodethings import ProcessingResult
from lifeblood.enums import NodeParameterType

Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/spawn_children.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.nodethings import ProcessingResult
from lifeblood.enums import NodeParameterType
from lifeblood.taskspawn import TaskSpawn
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/split_waiter.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from dataclasses import dataclass
from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.nodethings import ProcessingResult
from lifeblood.taskspawn import TaskSpawn
from lifeblood.exceptions import NodeNotReadyToProcess
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/switch.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.nodethings import ProcessingResult, ProcessingError
from lifeblood.processingcontext import ProcessingContext
from lifeblood.enums import NodeParameterType
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/test.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import time

from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.invocationjob import InvocationJob, InvocationEnvironment
from lifeblood.nodethings import ProcessingResult
from lifeblood.uidata import NodeParameterType
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/wait_for_task.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from threading import Lock
import shlex
from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.nodethings import ProcessingResult, ProcessingContext
from lifeblood.taskspawn import TaskSpawn
from lifeblood.exceptions import NodeNotReadyToProcess
Expand Down
2 changes: 1 addition & 1 deletion src/lifeblood/core_nodes/wedge.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from lifeblood.basenode import BaseNode
from lifeblood.node_plugin_base import BaseNode
from lifeblood.nodethings import ProcessingResult, ProcessingError
from lifeblood.enums import NodeParameterType
from lifeblood.uidata import NodeUi, MultiGroupLayout, Parameter
Expand Down
97 changes: 97 additions & 0 deletions src/lifeblood/node_plugin_base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
"""
This high-level module has base classes to be used by plugin nodes instead of directly using BaseNode from lifeblood.basenode
"""

import re
from .basenode import BaseNode
from .enums import NodeParameterType, WorkerType
from .processingcontext import ProcessingContext
from .nodethings import ProcessingResult, ProcessingError # unused import - for easy reexport to plugins
from .scheduler.scheduler import Scheduler


class BaseNodeWithTaskRequirements(BaseNode):
def __init__(self, name: str):
super(BaseNodeWithTaskRequirements, self).__init__(name)

ui = self.get_ui()
with ui.initializing_interface_lock():
with ui.collapsable_group_block('main worker requirements', 'worker requirements'):
ui.add_parameter('priority adjustment', 'priority adjustment', NodeParameterType.FLOAT, 0).set_slider_visualization(-100, 100)
with ui.parameters_on_same_line_block():
ui.add_parameter('worker cpu cost', 'min <cpu (cores)> preferred', NodeParameterType.FLOAT, 1.0).set_value_limits(value_min=0)
ui.add_parameter('worker cpu cost preferred', None, NodeParameterType.FLOAT, 0.0).set_value_limits(value_min=0)
with ui.parameters_on_same_line_block():
ui.add_parameter('worker mem cost', 'min <memory (GBs)> preferred', NodeParameterType.FLOAT, 0.5).set_value_limits(value_min=0)
ui.add_parameter('worker mem cost preferred', None, NodeParameterType.FLOAT, 0.0).set_value_limits(value_min=0)
ui.add_parameter('worker groups', 'groups (space or comma separated)', NodeParameterType.STRING, '')
ui.add_parameter('worker type', 'worker type', NodeParameterType.INT, WorkerType.STANDARD.value)\
.add_menu((('standard', WorkerType.STANDARD.value),
('scheduler helper', WorkerType.SCHEDULER_HELPER.value)))
with ui.collapsable_group_block('gpu main worker requirements', 'gpu requirements'):
with ui.parameters_on_same_line_block():
ui.add_parameter('worker gpu cost', 'min <gpus> preferred', NodeParameterType.FLOAT, 0.0).set_value_limits(value_min=0)
ui.add_parameter('worker gpu cost preferred', None, NodeParameterType.FLOAT, 0.0).set_value_limits(value_min=0)
with ui.parameters_on_same_line_block():
ui.add_parameter('worker gpu mem cost', 'min <memory (GBs)> preferred', NodeParameterType.FLOAT, 0.0).set_value_limits(value_min=0)
ui.add_parameter('worker gpu mem cost preferred', None, NodeParameterType.FLOAT, 0.0).set_value_limits(value_min=0)

def __apply_requirements(self, task_dict: dict, node_config: dict, result: ProcessingResult):
if result.invocation_job is not None:
context = ProcessingContext(self, task_dict, node_config)
raw_groups = context.param_value('worker groups').strip()
reqs = result.invocation_job.requirements()
if raw_groups != '':
reqs.add_groups(re.split(r'[ ,]+', raw_groups))

reqs.set_min_resource('cpu_count', context.param_value('worker cpu cost'))
reqs.set_min_resource('cpu_mem', context.param_value('worker mem cost') * 10**9)
reqs.set_min_resource('gpu_count', context.param_value('worker gpu cost'))
reqs.set_min_resource('gpu_mem', context.param_value('worker gpu mem cost') * 10**9)
# preferred
reqs.set_preferred_resource('cpu_count', context.param_value('worker cpu cost preferred'))
reqs.set_preferred_resource('cpu_mem', context.param_value('worker mem cost preferred') * 10**9)
reqs.set_preferred_resource('gpu_count', context.param_value('worker gpu cost preferred'))
reqs.set_preferred_resource('gpu_mem', context.param_value('worker gpu mem cost preferred') * 10**9)

reqs.set_worker_type(WorkerType(context.param_value('worker type')))
result.invocation_job.set_requirements(reqs)
result.invocation_job.set_priority(context.param_value('priority adjustment'))
return result

def _process_task_wrapper(self, task_dict, node_config) -> ProcessingResult:
result = super(BaseNodeWithTaskRequirements, self)._process_task_wrapper(task_dict, node_config)
return self.__apply_requirements(task_dict, node_config, result)

def _postprocess_task_wrapper(self, task_dict, node_config) -> ProcessingResult:
result = super(BaseNodeWithTaskRequirements, self)._postprocess_task_wrapper(task_dict, node_config)
return self.__apply_requirements(task_dict, node_config, result)


# class BaseNodeWithEnvironmentRequirements(BaseNode):
# def __init__(self, name: str):
# super(BaseNodeWithEnvironmentRequirements, self).__init__(name)
# ui = self.get_ui()
# with ui.initializing_interface_lock():
# with ui.collapsable_group_block('main environment resolver', 'task environment resolver additional requirements'):
# ui.add_parameter('main env resolver name', 'resolver name', NodeParameterType.STRING, 'StandardEnvironmentResolver')
# with ui.multigroup_parameter_block('main env resolver arguments'):
# with ui.parameters_on_same_line_block():
# type_param = ui.add_parameter('main env resolver arg type', '', NodeParameterType.INT, 0)
# type_param.add_menu((('int', NodeParameterType.INT.value),
# ('bool', NodeParameterType.BOOL.value),
# ('float', NodeParameterType.FLOAT.value),
# ('string', NodeParameterType.STRING.value),
# ('json', -1)
# ))
#
# ui.add_parameter('main env resolver arg svalue', 'val', NodeParameterType.STRING, '').append_visibility_condition(type_param, '==', NodeParameterType.STRING.value)
# ui.add_parameter('main env resolver arg ivalue', 'val', NodeParameterType.INT, 0).append_visibility_condition(type_param, '==', NodeParameterType.INT.value)
# ui.add_parameter('main env resolver arg fvalue', 'val', NodeParameterType.FLOAT, 0.0).append_visibility_condition(type_param, '==', NodeParameterType.FLOAT.value)
# ui.add_parameter('main env resolver arg bvalue', 'val', NodeParameterType.BOOL, False).append_visibility_condition(type_param, '==', NodeParameterType.BOOL.value)
# ui.add_parameter('main env resolver arg jvalue', 'val', NodeParameterType.STRING, '').append_visibility_condition(type_param, '==', -1)
#
# def _process_task_wrapper(self, task_dict) -> ProcessingResult:
# result = super(BaseNodeWithEnvironmentRequirements, self)._process_task_wrapper(task_dict)
# result.invocation_job.environment_resolver_arguments()
# return result
Loading
Loading