Skip to content

Commit

Permalink
Updated test to use mordred_filtered features. Added arguments to def…
Browse files Browse the repository at this point in the history
…ault true and default false lists
  • Loading branch information
stewarthe6 committed Jan 21, 2025
1 parent 53d3f74 commit 8feea36
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 12 deletions.
7 changes: 5 additions & 2 deletions atomsci/ddm/pipeline/parameter_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -775,8 +775,11 @@ def dict_to_list(inp_dictionary,replace_spaces=False):
temp_list_to_command_line = []

# Special case handling for arguments that are False or True by default
default_false = ['previously_split','use_shortlist','datastore', 'save_results','verbose', 'hyperparam', 'split_only', 'is_ki', 'production']
default_true = ['transformers','previously_featurized','uncertainty', 'rerun']
default_false = ['previously_split','use_shortlist','datastore', 'save_results','verbose', 'hyperparam', 'split_only', 'is_ki', 'production',
'robustscaler_unit_variance']
default_true = ['transformers','previously_featurized','uncertainty', 'rerun',
'robustscaler_with_centering', 'robustscaler_with_scaling',
'powertransformer_standardize']
for key, value in inp_dictionary.items():
if key in default_false:
true_options = ['True','true','ture','TRUE','Ture']
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@
"system": "LC",
"transformers": "True",
"model_type": "NN",
"featurizer": "ecfp",
"featurizer": "computed_descriptors",
"descriptor_type": "rdkit_raw",
"feature_transform_type": "PowerTransformer",
"learning_rate": ".0007",
"layer_sizes": "20,10",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,12 @@
"system": "LC",
"transformers": "True",
"model_type": "NN",
"featurizer": "ecfp",
"featurizer": "computed_descriptors",
"descriptor_type": "rdkit_raw",
"feature_transform_type": "RobustScaler",
"robustscaler_with_center": "True",
"robustscaler_with_scaling": "True",
"robustscaler_with_quartile_range": "30.0,80.0",
"robustscaler_quartile_range": "30.0,80.0",
"robustscaler_unit_variance": "True",
"learning_rate": ".0007",
"layer_sizes": "20,10",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -345,9 +345,9 @@ def test_sklearn_transformers():

robustscaler_pipe = make_pipeline(robustscaler_params)
transformers_x = robustscaler_pipe.model_wrapper.transformers_x
assert len(transformers_x)==1
assert isinstance(transformers_x[0], trans.SklearnTransformerWrapper)
assert isinstance(transformers_x[0].sklearn_transformer, RobustScaler)
assert len(transformers_x[0])==1
assert isinstance(transformers_x[0][0], trans.SklearnTransformerWrapper)
assert isinstance(transformers_x[0][0].sklearn_transformer, RobustScaler)

powertransformer_params = read_params(
make_relative_to_file('jsons/PowerTransformer_transformer.json'),
Expand All @@ -357,12 +357,13 @@ def test_sklearn_transformers():

powertransformer_pipe = make_pipeline(powertransformer_params)
transformers_x = powertransformer_pipe.model_wrapper.transformers_x
assert len(transformers_x)==1
assert isinstance(transformers_x[0], trans.SklearnTransformerWrapper)
assert isinstance(transformers_x[0].sklearn_transformer, PowerTransformer)
assert len(transformers_x[0])==1
assert isinstance(transformers_x[0][0], trans.SklearnTransformerWrapper)
assert isinstance(transformers_x[0][0].sklearn_transformer, PowerTransformer)

if __name__ == '__main__':
test_kfold_regression_transformers()
test_sklearn_transformers()
#test_kfold_regression_transformers()
#test_kfold_transformers()
#test_all_transformers()
#test_balancing_transformer()

0 comments on commit 8feea36

Please sign in to comment.