Skip to content

Commit

Permalink
Fix params setting (#1121)
Browse files Browse the repository at this point in the history
* Fix params setting

* Minor

* PEP 8

* Correct test

* Add comment
  • Loading branch information
YamLyubov authored and kasyanovse committed Jul 31, 2023
1 parent 1c9ecb2 commit 420b191
Show file tree
Hide file tree
Showing 6 changed files with 40 additions and 72 deletions.
52 changes: 0 additions & 52 deletions examples/simple/multitask_classification_regression_api.py

This file was deleted.

22 changes: 10 additions & 12 deletions fedot/core/pipelines/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from fedot.core.operations.operation import Operation
from fedot.core.operations.operation_parameters import OperationParameters
from fedot.core.repository.operation_types_repository import OperationTypesRepository
from fedot.core.utils import DEFAULT_PARAMS_STUB
from fedot.core.utils import DEFAULT_PARAMS_STUB, NESTED_PARAMS_LABEL


@register_serializable
Expand Down Expand Up @@ -57,9 +57,6 @@ def __init__(self, operation_type: Optional[Union[str, Operation]] = None,
# Define operation, based on content dictionary
operation = self._process_content_init(passed_content)
params = passed_content.get('params', {})
# The check for "default_params" is needed for backward compatibility.
if params == DEFAULT_PARAMS_STUB:
params = {}
self.metadata = passed_content.get('metadata', NodeMetadata())
else:
# There is no content for node
Expand All @@ -72,9 +69,10 @@ def __init__(self, operation_type: Optional[Union[str, Operation]] = None,
self.fit_time_in_seconds = 0
self.inference_time_in_seconds = 0

self._parameters = OperationParameters.from_operation_type(operation.operation_type, **params)
super().__init__(content={'name': operation,
'params': self._parameters.to_dict()}, nodes_from=nodes_from)
super().__init__(content={'name': operation}, nodes_from=nodes_from)
# use parameters.setter to process input parameters correctly
self.parameters = params

self.log = default_log(self)
self._fitted_operation = None
self.rating = None
Expand Down Expand Up @@ -325,18 +323,18 @@ def parameters(self) -> dict:

@parameters.setter
def parameters(self, params: dict):
"""Sets custom parameters of the node
"""Sets custom parameters of the node or set default
Args:
params: new parameters to be placed instead of existing
"""
if params:
if params is not None:
# The check for "default_params" is needed for backward compatibility.
if params == DEFAULT_PARAMS_STUB:
params = {}
# take nested composer params if they appeared
if 'nested_space' in params:
params = params['nested_space']
# take nested params if they appeared (mostly used for tuning)
if NESTED_PARAMS_LABEL in params:
params = params[NESTED_PARAMS_LABEL]
self._parameters = OperationParameters.from_operation_type(self.operation.operation_type, **params)
self.content['params'] = self._parameters.to_dict()

Expand Down
4 changes: 3 additions & 1 deletion fedot/core/pipelines/tuning/search_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
from golem.core.tuning.search_space import SearchSpace, OperationParametersMapping
from hyperopt import hp

from fedot.core.utils import NESTED_PARAMS_LABEL


class PipelineSearchSpace(SearchSpace):
"""
Expand Down Expand Up @@ -325,7 +327,7 @@ def get_parameters_dict(self):
'type': 'continuous'}
},
'glm': {
'nested_space': {
NESTED_PARAMS_LABEL: {
'hyperopt-dist': hp.choice,
'sampling-scope': [[
{
Expand Down
1 change: 1 addition & 0 deletions fedot/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from sklearn.model_selection import train_test_split

DEFAULT_PARAMS_STUB = 'default_params'
NESTED_PARAMS_LABEL = 'nested_space'


def fedot_project_root() -> Path:
Expand Down
16 changes: 10 additions & 6 deletions test/integration/pipelines/tuning/test_pipeline_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from fedot.core.pipelines.tuning.tuner_builder import TunerBuilder
from fedot.core.repository.quality_metrics_repository import RegressionMetricsEnum, ClassificationMetricsEnum
from fedot.core.repository.tasks import Task, TaskTypesEnum
from fedot.core.utils import fedot_project_root
from fedot.core.utils import fedot_project_root, NESTED_PARAMS_LABEL
from test.unit.tasks.test_forecasting import get_ts_data

seed(1)
Expand Down Expand Up @@ -489,23 +489,27 @@ def test_complex_search_space():
space = PipelineSearchSpace()
for i in range(20):
operation_parameters = space.parameters_per_operation.get("glm")
new_value = hp_sample(operation_parameters["nested_space"])
new_value = hp_sample(operation_parameters[NESTED_PARAMS_LABEL])
for params in new_value['sampling-scope'][0]:
assert params['link'] in GLMImplementation.family_distribution[params['family']]['available_links']


@pytest.mark.parametrize('tuner', [SimultaneousTuner, SequentialTuner, IOptTuner])
def test_complex_search_space_tuning_correct(tuner):
""" Tests SimultaneousTuner for time series forecasting task with GLM model that has a complex glm search space"""
train_data, test_data = get_ts_data(n_steps=200, forecast_length=5)
train_data, test_data = get_ts_data(n_steps=700, forecast_length=20)

glm_pipeline = Pipeline(PipelineNode('glm'))
glm_custom_params = glm_pipeline.nodes[0].parameters
initial_parameters = glm_pipeline.nodes[0].parameters
tuner = TunerBuilder(train_data.task) \
.with_tuner(tuner) \
.with_metric(RegressionMetricsEnum.MSE) \
.with_iterations(100) \
.build(train_data)
tuned_glm_pipeline = tuner.tune(glm_pipeline)
new_custom_params = tuned_glm_pipeline.nodes[0].parameters
assert glm_custom_params == new_custom_params
found_parameters = tuned_glm_pipeline.nodes[0].parameters
if tuner.init_metric == tuner.obtained_metric:
# TODO: (YamLyubov) Remove the check when IOptTuner will be able to tune categorical parameters.
assert initial_parameters == found_parameters
else:
assert initial_parameters != found_parameters
17 changes: 16 additions & 1 deletion test/unit/pipelines/test_node.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import numpy as np
import pytest
from golem.core.dag.graph_utils import ordered_subnodes_hierarchy
from golem.core.dag.linked_graph_node import LinkedGraphNode
from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
Expand All @@ -12,6 +11,7 @@
from fedot.core.pipelines.node import PipelineNode
from fedot.core.repository.dataset_types import DataTypesEnum
from fedot.core.repository.tasks import Task, TaskTypesEnum
from fedot.core.utils import DEFAULT_PARAMS_STUB, NESTED_PARAMS_LABEL


@pytest.fixture()
Expand Down Expand Up @@ -108,3 +108,18 @@ def test_node_return_correct_operation_info():

correct_tags = ["simple", "imputation"]
assert all(correct_tag in operation_tags for correct_tag in correct_tags)


@pytest.mark.parametrize('params, expected_params', [(DEFAULT_PARAMS_STUB, {"family": "gaussian", "link": "identity"}),
({}, {"family": "gaussian", "link": "identity"}),
({NESTED_PARAMS_LABEL: {'family': 'gaussian', 'link': 'log'}},
{'family': 'gaussian', 'link': 'log'}),
({'family': 'inverse_gaussian', 'link': 'inverse_power'},
{'family': 'inverse_gaussian', 'link': 'inverse_power'})])
def test_init_and_set_pipeline_node_with_params(params, expected_params):
node = PipelineNode(content={'name': 'glm', 'params': params})
assert node.parameters == expected_params

node = PipelineNode('glm')
node.parameters = params
assert node.parameters == expected_params

0 comments on commit 420b191

Please sign in to comment.