Skip to content

Commit

Permalink
Merge pull request #682 from Avaiga/feature/#470-rename-configure-def…
Browse files Browse the repository at this point in the history
…ault-methods

Refactor#470 - Rename configure_default_..() methods to set_default_..()
  • Loading branch information
trgiangdo authored Jul 18, 2023
2 parents 0cf86b5 + b67c662 commit 07349d6
Show file tree
Hide file tree
Showing 11 changed files with 55 additions and 49 deletions.
11 changes: 7 additions & 4 deletions src/taipy/core/config/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
DataNodeConfig.default_config(),
[
("configure_data_node", DataNodeConfig._configure),
("configure_default_data_node", DataNodeConfig._configure_default),
("set_default_data_node_configuration", DataNodeConfig._set_default_configuration),
("configure_csv_data_node", DataNodeConfig._configure_csv),
("configure_json_data_node", DataNodeConfig._configure_json),
("configure_parquet_data_node", DataNodeConfig._configure_parquet),
Expand All @@ -61,15 +61,18 @@
TaskConfig,
"tasks",
TaskConfig.default_config(),
[("configure_task", TaskConfig._configure), ("configure_default_task", TaskConfig._configure_default)],
[
("configure_task", TaskConfig._configure),
("set_default_task_configuration", TaskConfig._set_default_configuration),
],
)
_inject_section(
PipelineConfig,
"pipelines",
PipelineConfig.default_config(),
[
("configure_pipeline", PipelineConfig._configure),
("configure_default_pipeline", PipelineConfig._configure_default),
("set_default_pipeline_configuration", PipelineConfig._set_default_configuration),
],
)
_inject_section(
Expand All @@ -78,7 +81,7 @@
ScenarioConfig.default_config(),
[
("configure_scenario", ScenarioConfig._configure),
("configure_default_scenario", ScenarioConfig._configure_default),
("set_default_scenario_configuration", ScenarioConfig._set_default_configuration),
("configure_scenario_from_tasks", ScenarioConfig._configure_from_tasks),
],
)
Expand Down
8 changes: 4 additions & 4 deletions src/taipy/core/config/data_node_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,10 +358,10 @@ def _update(self, as_dict, default_section=None):
self._properties[optional_property] = default_value

@staticmethod
def _configure_default(
def _set_default_configuration(
storage_type: str, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties
) -> "DataNodeConfig":
"""Configure the default values for data node configurations.
"""Set the default values for data node configurations.
This function creates the _default data node configuration_ object,
where all data node configuration objects will find their default
Expand Down Expand Up @@ -404,12 +404,12 @@ def _configure(
storage_type (Optional[str]): The data node configuration storage type. The possible values
are None (which is the default value of *"pickle"*, unless it has been overloaded by the
*storage_type* value set in the default data node configuration
(see `(Config.)configure_default_data_node()^`)), *"pickle"*, *"csv"*, *"excel"*,
(see `(Config.)set_default_data_node_configuration()^`)), *"pickle"*, *"csv"*, *"excel"*,
*"sql_table"*, *"sql"*, *"json"*, *"parquet"*, *"mongo_collection"*, *"in_memory"*, or
*"generic"*.
scope (Optional[Scope^]): The scope of the data node configuration.<br/>
The default value is `Scope.SCENARIO` (or the one specified in
`(Config.)configure_default_data_node()^`).
`(Config.)set_default_data_node_configuration()^`).
validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
considered up-to-date. Once the validity period has passed, the data node is considered stale and
relevant tasks will run even if they are skippable (see the
Expand Down
4 changes: 2 additions & 2 deletions src/taipy/core/config/pipeline_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,8 @@ def _configure(id: str, task_configs: Union[TaskConfig, List[TaskConfig]], **pro
return Config.sections[PipelineConfig.name][id]

@staticmethod
def _configure_default(task_configs: Union[TaskConfig, List[TaskConfig]], **properties) -> "PipelineConfig":
"""Configure the default values for pipeline configurations.
def _set_default_configuration(task_configs: Union[TaskConfig, List[TaskConfig]], **properties) -> "PipelineConfig":
"""Set the default values for pipeline configurations.
This function creates the *default pipeline configuration* object,
where all pipeline configuration objects will find their default
Expand Down
4 changes: 2 additions & 2 deletions src/taipy/core/config/scenario_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,13 +209,13 @@ def _configure_from_tasks(
return Config.sections[ScenarioConfig.name][id]

@staticmethod
def _configure_default(
def _set_default_configuration(
pipeline_configs: List[PipelineConfig],
frequency: Optional[Frequency] = None,
comparators: Optional[Dict[str, Union[List[Callable], Callable]]] = None,
**properties,
) -> "ScenarioConfig":
"""Configure the default values for scenario configurations.
"""Set the default values for scenario configurations.
This function creates the *default scenario configuration* object,
where all scenario configuration objects will find their default
Expand Down
8 changes: 4 additions & 4 deletions src/taipy/core/config/task_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ def __init__(
self,
id: str,
function,
inputs: Union[DataNodeConfig, List[DataNodeConfig]] = None,
outputs: Union[DataNodeConfig, List[DataNodeConfig]] = None,
inputs: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None,
outputs: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None,
skippable: Optional[bool] = False,
**properties,
):
Expand Down Expand Up @@ -185,14 +185,14 @@ def _configure(
return Config.sections[TaskConfig.name][id]

@staticmethod
def _configure_default(
def _set_default_configuration(
function,
input: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None,
output: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None,
skippable: Optional[bool] = False,
**properties,
) -> "TaskConfig":
"""Configure the default values for task configurations.
"""Set the default values for task configurations.
This function creates the *default task configuration* object,
where all task configuration objects will find their default
Expand Down
11 changes: 7 additions & 4 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@ def init_config():
DataNodeConfig.default_config(),
[
("configure_data_node", DataNodeConfig._configure),
("configure_default_data_node", DataNodeConfig._configure_default),
("set_default_data_node_configuration", DataNodeConfig._set_default_configuration),
("configure_csv_data_node", DataNodeConfig._configure_csv),
("configure_json_data_node", DataNodeConfig._configure_json),
("configure_sql_table_data_node", DataNodeConfig._configure_sql_table),
Expand All @@ -388,15 +388,18 @@ def init_config():
TaskConfig,
"tasks",
TaskConfig.default_config(),
[("configure_task", TaskConfig._configure), ("configure_default_task", TaskConfig._configure_default)],
[
("configure_task", TaskConfig._configure),
("set_default_task_configuration", TaskConfig._set_default_configuration),
],
)
_inject_section(
PipelineConfig,
"pipelines",
PipelineConfig.default_config(),
[
("configure_pipeline", PipelineConfig._configure),
("configure_default_pipeline", PipelineConfig._configure_default),
("set_default_pipeline_configuration", PipelineConfig._set_default_configuration),
],
)
_inject_section(
Expand All @@ -405,7 +408,7 @@ def init_config():
ScenarioConfig.default_config(),
[
("configure_scenario", ScenarioConfig._configure),
("configure_default_scenario", ScenarioConfig._configure_default),
("set_default_scenario_configuration", ScenarioConfig._set_default_configuration),
("configure_scenario_from_tasks", ScenarioConfig._configure_from_tasks),
],
)
Expand Down
48 changes: 24 additions & 24 deletions tests/core/config/test_configure_default_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,33 +17,33 @@
from taipy.config.config import Config


def test_configure_default_data_node():
def test_set_default_data_node_configuration():
data_node1 = Config.configure_data_node(id="input_data1")
assert data_node1.storage_type == "pickle"
assert data_node1.scope == Scope.SCENARIO
assert data_node1.validity_period is None

Config.configure_default_data_node("in_memory", scope=Scope.GLOBAL)
Config.set_default_data_node_configuration("in_memory", scope=Scope.GLOBAL)
data_node2 = Config.configure_data_node(id="input_data2")
assert data_node2.storage_type == "in_memory"
assert data_node2.scope == Scope.GLOBAL
assert data_node2.validity_period is None

Config.configure_default_data_node("csv")
Config.set_default_data_node_configuration("csv")
data_node3 = Config.configure_data_node(id="input_data3")
assert data_node3.storage_type == "csv"
assert data_node3.scope == Scope.SCENARIO
assert data_node3.validity_period is None

Config.configure_default_data_node("json", validity_period=timedelta(1))
Config.set_default_data_node_configuration("json", validity_period=timedelta(1))
data_node4 = Config.configure_data_node(id="input_data4")
assert data_node4.storage_type == "json"
assert data_node4.scope == Scope.SCENARIO
assert data_node4.validity_period == timedelta(1)


def test_configure_default_data_node_replace_old_default_config():
Config.configure_default_data_node(
def test_set_default_data_node_configuration_replace_old_default_config():
Config.set_default_data_node_configuration(
"in_memory",
prop1="1",
prop2="2",
Expand All @@ -52,7 +52,7 @@ def test_configure_default_data_node_replace_old_default_config():
dn1 = Config.configure_data_node(id="dn1")
assert len(dn1.properties) == 3

Config.configure_default_data_node(
Config.set_default_data_node_configuration(
"csv",
prop4="4",
prop5="5",
Expand All @@ -70,7 +70,7 @@ def test_configure_default_data_node_replace_old_default_config():


def test_config_storage_type_different_from_default_data_node():
Config.configure_default_data_node(
Config.set_default_data_node_configuration(
storage_type="pickle",
custom_property={"foo": "bar"},
scope=Scope.GLOBAL,
Expand All @@ -84,8 +84,8 @@ def test_config_storage_type_different_from_default_data_node():
assert csv_dn.scope == Scope.SCENARIO


def test_configure_default_csv_data_node():
Config.configure_default_data_node(
def test_set_default_csv_data_node_configuration():
Config.set_default_data_node_configuration(
storage_type="csv",
default_path="default.csv",
has_header=False,
Expand Down Expand Up @@ -131,14 +131,14 @@ def test_configure_default_csv_data_node():
assert dn3.validity_period == timedelta(1)


def test_configure_default_json_data_node():
def test_set_default_json_data_node_configuration():
class MyCustomEncoder(json.JSONEncoder):
...

class MyCustomDecoder(json.JSONDecoder):
...

Config.configure_default_data_node(
Config.set_default_data_node_configuration(
storage_type="json",
default_path="default.json",
encoder=MyCustomEncoder,
Expand Down Expand Up @@ -183,8 +183,8 @@ class MyCustomDecoder(json.JSONDecoder):
assert dn3.validity_period == timedelta(1)


def test_configure_default_parquet_data_node():
Config.configure_default_data_node(
def test_set_default_parquet_data_node_configuration():
Config.set_default_data_node_configuration(
storage_type="parquet",
default_path="default.parquet",
compression="gzip",
Expand Down Expand Up @@ -244,8 +244,8 @@ def test_configure_default_parquet_data_node():
assert dn3.validity_period == timedelta(1)


def test_configure_default_excel_data_node():
Config.configure_default_data_node(
def test_set_default_excel_data_node_configuration():
Config.set_default_data_node_configuration(
storage_type="excel",
default_path="default.xlsx",
has_header=False,
Expand Down Expand Up @@ -294,8 +294,8 @@ def test_configure_default_excel_data_node():
assert dn3.validity_period == timedelta(1)


def test_configure_default_pickle_data_node():
Config.configure_default_data_node(
def test_set_default_pickle_data_node_configuration():
Config.set_default_data_node_configuration(
storage_type="pickle",
default_data=1,
exposed_type="numpy",
Expand Down Expand Up @@ -340,8 +340,8 @@ def test_configure_default_pickle_data_node():
assert dn3.validity_period == timedelta(1)


def test_configure_default_sql_table_data_node():
Config.configure_default_data_node(
def test_set_default_sql_table_data_node_configuration():
Config.set_default_data_node_configuration(
storage_type="sql_table",
db_username="default_user",
db_password="default_pwd",
Expand Down Expand Up @@ -419,11 +419,11 @@ def test_configure_default_sql_table_data_node():
assert dn3.validity_period == timedelta(1)


def test_configure_default_sql_data_node():
def test_set_default_sql_data_node_configuration():
def query_builder():
...

Config.configure_default_data_node(
Config.set_default_data_node_configuration(
storage_type="sql",
db_username="default_user",
db_password="default_pwd",
Expand Down Expand Up @@ -503,8 +503,8 @@ def query_builder():
assert dn3.validity_period == timedelta(1)


def test_configure_default_mongo_collection_data_node():
Config.configure_default_data_node(
def test_set_default_mongo_collection_data_node_configuration():
Config.set_default_data_node_configuration(
storage_type="mongo_collection",
db_name="default_db_name",
collection_name="default_collection",
Expand Down
4 changes: 2 additions & 2 deletions tests/core/config/test_file_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def test_write_configuration_file():
):
Config.configure_global_app(clean_entities_enabled=True)
Config.configure_job_executions(mode="standalone", max_nb_of_workers=2)
Config.configure_default_data_node(
Config.set_default_data_node_configuration(
storage_type="in_memory",
custom="default_custom_prop",
validity_period=timedelta(1),
Expand All @@ -123,7 +123,7 @@ def test_write_configuration_file():
assert dn2_cfg_v2.scope == Scope.SCENARIO
t1_cfg_v2 = Config.configure_task("t1", print, dn1_cfg_v2, dn2_cfg_v2, description="t1 description")
p1_cfg_v2 = Config.configure_pipeline("p1", t1_cfg_v2, cron="daily")
Config.configure_default_scenario([], Frequency.QUARTERLY, owner="Michel Platini")
Config.set_default_scenario_configuration([], Frequency.QUARTERLY, owner="Michel Platini")
Config.configure_scenario("s1", p1_cfg_v2, frequency=Frequency.QUARTERLY, owner="Raymond Kopa")
Config.backup(tf.filename)
actual_config = tf.read().strip()
Expand Down
2 changes: 1 addition & 1 deletion tests/core/config/test_pipeline_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,4 +171,4 @@ def test_pipeline_config_configure_deprecated():
Config.configure_pipeline("pipeline_id", [])

with pytest.warns(DeprecationWarning):
Config.configure_default_pipeline([])
Config.set_default_pipeline_configuration([])
2 changes: 1 addition & 1 deletion tests/core/config/test_scenario_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def test_clean_config():


def test_pipeline_config_configure_deprecated():
pipeline_config = Config.configure_default_pipeline([])
pipeline_config = Config.set_default_pipeline_configuration([])
scenario_config = Config.configure_scenario("scenario_id", pipeline_configs=[pipeline_config])
with pytest.warns(DeprecationWarning):
scenario_config.pipelines
Expand Down
2 changes: 1 addition & 1 deletion tests/core/data/test_data_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,7 +536,7 @@ def test_create_dn_from_loaded_config_modified_default_config(self):
from src.taipy import core as tp

Config.load(file_config.filename)
Config.configure_default_data_node(storage_type="csv")
Config.set_default_data_node_configuration(storage_type="csv")
scenario = tp.create_scenario(Config.scenarios["my_scenario"])

# assert isinstance(scenario.input, CSVDataNode) TODO Replace the next line by the commented one.
Expand Down

0 comments on commit 07349d6

Please sign in to comment.