Skip to content

Commit

Permalink
Add LogScaler transformer (#935)
Browse files Browse the repository at this point in the history
  • Loading branch information
rwedge authored Jan 27, 2025
1 parent 2317e28 commit 6583173
Show file tree
Hide file tree
Showing 5 changed files with 509 additions and 0 deletions.
2 changes: 2 additions & 0 deletions rdt/transformers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
ClusterBasedNormalizer,
FloatFormatter,
GaussianNormalizer,
LogScaler,
LogitScaler,
)
from rdt.transformers.pii.anonymizer import (
Expand All @@ -47,6 +48,7 @@
'FrequencyEncoder',
'GaussianNormalizer',
'LabelEncoder',
'LogScaler',
'NullTransformer',
'OneHotEncoder',
'OptimizedTimestampEncoder',
Expand Down
117 changes: 117 additions & 0 deletions rdt/transformers/numerical.py
Original file line number Diff line number Diff line change
Expand Up @@ -731,3 +731,120 @@ def _reverse_transform(self, data):

data[:, 0] = reversed_values
return super()._reverse_transform(data)


class LogScaler(FloatFormatter):
"""Transformer for numerical data using log.
This transformer scales numerical values using log and an optional constant.
Null values are replaced using a ``NullTransformer``.
Args:
missing_value_replacement (object):
Indicate what to replace the null values with. If an integer or float is given,
replace them with the given value. If the strings ``'mean'`` or ``'mode'``
are given, replace them with the corresponding aggregation and if ``'random'``
replace each null value with a random value in the data range. Defaults to ``mean``.
missing_value_generation (str or None):
The way missing values are being handled. There are three strategies:
* ``random``: Randomly generates missing values based on the percentage of
missing values.
* ``from_column``: Creates a binary column that describes whether the original
value was missing. Then use it to recreate missing values.
* ``None``: Do nothing with the missing values on the reverse transform. Simply
pass whatever data we get through.
constant (float):
The constant to set as the 0-value for the log-based transform. Defaults to 0
(do not modify the 0-value of the data).
invert (bool):
Whether to invert the data with respect to the constant value. If False, do not
invert the data (all values will be greater than the constant value). If True,
invert the data (all the values will be less than the constant value).
Defaults to False.
learn_rounding_scheme (bool):
Whether or not to learn what place to round to based on the data seen during ``fit``.
If ``True``, the data returned by ``reverse_transform`` will be rounded to that place.
Defaults to ``False``.
"""

def __init__(
self,
missing_value_replacement='mean',
missing_value_generation='random',
constant: float = 0.0,
invert: bool = False,
learn_rounding_scheme: bool = False,
):
if isinstance(constant, (int, float)):
self.constant = constant
else:
raise ValueError('The constant parameter must be a float or int.')
if isinstance(invert, bool):
self.invert = invert
else:
raise ValueError('The invert parameter must be a bool.')

super().__init__(
missing_value_replacement=missing_value_replacement,
missing_value_generation=missing_value_generation,
learn_rounding_scheme=learn_rounding_scheme,
)

def _validate_data(self, data: pd.Series):
column_name = self.get_input_column()
if self.invert:
if not all(data < self.constant):
raise InvalidDataError(
f"Unable to apply a log transform to column '{column_name}' due to constant"
' being too small.'
)
else:
if not all(data > self.constant):
raise InvalidDataError(
f"Unable to apply a log transform to column '{column_name}' due to constant"
' being too large.'
)

def _fit(self, data):
super()._fit(data)
data = super()._transform(data)

if data.ndim > 1:
self._validate_data(data[:, 0])
else:
self._validate_data(data)

def _log_transform(self, data):
self._validate_data(data)

if self.invert:
return np.log(self.constant - data)
else:
return np.log(data - self.constant)

def _transform(self, data):
data = super()._transform(data)

if data.ndim > 1:
data[:, 0] = self._log_transform(data[:, 0])
else:
data = self._log_transform(data)

return data

def _reverse_log(self, data):
if self.invert:
return self.constant - np.exp(data)
else:
return np.exp(data) + self.constant

def _reverse_transform(self, data):
if not isinstance(data, np.ndarray):
data = data.to_numpy()

if data.ndim > 1:
data[:, 0] = self._reverse_log(data[:, 0])
else:
data = self._reverse_log(data)

return super()._reverse_transform(data)
2 changes: 2 additions & 0 deletions tests/integration/test_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
TEST_COL = 'test_col'

PRIMARY_SDTYPES = ['boolean', 'categorical', 'datetime', 'numerical']
INT64_MIN = np.iinfo(np.int64).min

# Additional arguments for transformers
TRANSFORMER_ARGS = {
Expand All @@ -24,6 +25,7 @@
'FloatFormatter': {'missing_value_generation': 'from_column'},
'GaussianNormalizer': {'missing_value_generation': 'from_column'},
'ClusterBasedNormalizer': {'missing_value_generation': 'from_column'},
'LogScaler': {'constant': INT64_MIN, 'missing_value_generation': 'from_column'},
'LogitScaler': {
'missing_value_generation': 'from_column',
'FROM_DATA': {
Expand Down
59 changes: 59 additions & 0 deletions tests/integration/transformers/test_numerical.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
FloatFormatter,
GaussianNormalizer,
LogitScaler,
LogScaler,
)


Expand Down Expand Up @@ -617,3 +618,61 @@ def test_missing_value_generation_random(self):

# Assert
np.testing.assert_array_almost_equal(reversed_values, expected)


class TestLogScaler:
def test_learn_rounding(self):
"""Test that transformer learns rounding scheme from data."""
# Setup
data = pd.DataFrame({'test': [1.0, np.nan, 1.5]})
transformer = LogScaler(
missing_value_generation=None,
missing_value_replacement='mean',
learn_rounding_scheme=True,
)
expected = pd.DataFrame({'test': [1.0, 1.2, 1.5]})

# Run
transformer.fit(data, 'test')
transformed = transformer.transform(data)
reversed_values = transformer.reverse_transform(transformed)

# Assert
np.testing.assert_array_equal(reversed_values, expected)

def test_missing_value_generation_from_column(self):
"""Test from_column missing value generation with nans present."""
# Setup
data = pd.DataFrame({'test': [1.0, np.nan, 1.5]})
transformer = LogScaler(
missing_value_generation='from_column',
missing_value_replacement='mean',
)

# Run
transformer.fit(data, 'test')
transformed = transformer.transform(data)
reversed_values = transformer.reverse_transform(transformed)

# Assert
np.testing.assert_array_equal(reversed_values, data)

def test_missing_value_generation_random(self):
"""Test random missing_value_generation with nans present."""
# Setup
data = pd.DataFrame({'test': [1.0, np.nan, 1.5, 1.5]})
transformer = LogScaler(
missing_value_generation='random',
missing_value_replacement='mode',
invert=True,
constant=3.0,
)
expected = pd.DataFrame({'test': [np.nan, 1.5, 1.5, 1.5]})

# Run
transformer.fit(data, 'test')
transformed = transformer.transform(data)
reversed_values = transformer.reverse_transform(transformed)

# Assert
np.testing.assert_array_equal(reversed_values, expected)
Loading

0 comments on commit 6583173

Please sign in to comment.