WibbleBias

Bases: Module

A class representing a WibbleBias module.

This module applies wibble bias to a given signal. Right now we have coded it such that the bias must always be a positive effect.

Parameters:

Name Type Description Default
signal_name str

The name of the signal.

required
encodings dict[str, dict[str, int] | str]

A dictionary containing encodings.

required
start_date str | None

The start date for applying bias. Defaults to None.

None
end_date str | None

The end date for applying bias. Defaults to None.

None
hyperparameters Hyperparams | None

Hyperparameters for the module. Defaults to None.

None
name str | None

The name of the module. Defaults to None.

None

Attributes:

Name Type Description
signal_name str

The name of the signal.

encodings dict[str, dict[str, int] | str]

A dictionary containing encodings.

start_idx int

The index of the start date.

end_idx int

The index of the end date.

wibble_var Variable

The wibble variable.

Methods:

Name Description
build

InputShapes): Builds the module.

) -> ImpactIntermediaries

Executes the module.

Source code in wt_ml/layers/wibble_bias.py
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
class WibbleBias(Module):
    """
    A class representing a WibbleBias module.

    This module applies wibble bias to a given signal. Right now we have coded it
    such that the bias must always be a positive effect.

    Args:
        signal_name (str): The name of the signal.
        encodings (dict[str, dict[str, int] | str]): A dictionary containing encodings.
        start_date (str | None, optional): The start date for applying bias. Defaults to None.
        end_date (str | None, optional): The end date for applying bias. Defaults to None.
        hyperparameters (Hyperparams | None, optional): Hyperparameters for the module. Defaults to None.
        name (str | None, optional): The name of the module. Defaults to None.

    Attributes:
        signal_name (str): The name of the signal.
        encodings (dict[str, dict[str, int] | str]): A dictionary containing encodings.
        start_idx (int): The index of the start date.
        end_idx (int): The index of the end date.
        wibble_var (tf.Variable): The wibble variable.

    Methods:
        build(self, input_shapes: InputShapes): Builds the module.
        __call__(
            self,
            batch: WibbleBiasInput,
            training: bool = False,
        ) -> ImpactIntermediaries: Executes the module.
    """

    def __init__(
        self,
        signal_name: str,
        encodings: dict[str, dict[str, int] | str],
        start_date: str | None = None,
        end_date: str | None = None,
        hyperparameters: Hyperparams | None = None,
        name: str | None = None,
    ):
        super().__init__(hyperparameters=hyperparameters, name=name)
        self.signal_name = signal_name
        self.encodings = encodings
        self.start_idx = get_date_idx(dates=self.encodings["date"], date=start_date, default=0)
        self.end_idx = get_date_idx(dates=self.encodings["date"], date=end_date, default=len(self.encodings["date"]))

    def build(self, input_shapes: InputShapes):  # noqa U100
        """
        Builds the wibble bias module.

        Args:
            input_shapes (InputShapes): The input shapes for the module.
        """
        # investment explains 5-25% of our revenue. So initializing this variable
        # such that it defaults to a multiplier of 1.15. Softplus(-1.82) ~= 0.1501

        self.wibble_intercept = self.create_var(
            name=f"{self.signal_name}_wibble_intercept",
            shape=[len(self.encodings["granularity"])],
            annotated_shape=("granularity",),
        )
        self.wibble_slope = self.create_var(
            name=f"{self.signal_name}_wibble_slope",
            shape=[len(self.encodings["granularity"])],
            annotated_shape=("granularity",),
        )

        self.allowed_center = self.hyperparameters.get_float(
            "allowed_center",
            default=1.01,
            min=0.9,
            max=1.2,
            help="The center of the range of values allowed without penalty.",
        )
        self.allowed_margin = self.hyperparameters.get_float(
            "allowed_margin",
            default=0.01,
            min=0.0,
            max=0.1,
            help="The radius of the range of values allowed without penalty.",
        )
        self.margin_weight = self.hyperparameters.get_float(
            "margin_weight",
            default=10.0,
            min=0.0,
            max=1e4,
            help="The strength of the force to push the impacts into the allowed range of values.",
        )
        self.learning_rate_scale = self.hyperparameters.get_float(
            "learning_rate_scale",
            default=1.0,
            min=1.0,
            max=1e6,
            help="Amount to scale the raw values by so they learn faster",
        )
        self.buffer = self.hyperparameters.get_float(
            "buffer_radius",
            default=0.05,
            min=0.01,
            max=1.0,
            help="Percentage by which it is allowed to differ from the provided defaults",
        )

    def __call__(
        self,
        batch: WibbleBiasInput,
        training: bool = False,  # noqa: U100
        debug: bool = False,
        skip_metrics: bool = False,
    ) -> WibbleBiasIntermediaries:
        """
        Executes the wibble bias module.

        Args:
            batch (WibbleBiasInput): The input batch.
            training (bool, optional): Whether the model is in training mode. Defaults to False.

        Returns:
            WibbleBiasIntermediaries: The wibble bias intermediaries.
        """
        mask = create_mask(
            batch.hierarchy["date"],
            self.start_idx,
            self.end_idx,
            tf.shape(batch.hierarchy["granularity"])[0],
            as_float=False,
        )
        lr_scale = tf.constant(self.learning_rate_scale, dtype=tf.float32)
        # shape: batch
        gathered_intercept = (
            tf.gather(self.wibble_intercept, batch.hierarchy["granularity"]) * lr_scale * batch.learning_scales
        ) / self.buffer
        gathered_slope = (
            tf.gather(self.wibble_slope, batch.hierarchy["granularity"])
            * lr_scale
            * batch.learning_scales
            / self.buffer
        )
        gathered_intercept_deviations = self.buffer * transform_softbounded(
            gathered_intercept, self.add_loss, "intercept_devs", fcn=tf.tanh, mult=0.00001, scale=1.0
        )
        gathered_slope_deviations = self.buffer * transform_softbounded(
            gathered_slope, self.add_loss, "slope_devs", fcn=tf.tanh, mult=0.00001, scale=1.0
        )
        intercept = batch.preinvestment_intercept + gathered_intercept_deviations
        slope = batch.preinvestment_slope + gathered_slope_deviations

        # MATH LOGIC:
        # raw impacts = slope * week_num + intercept
        # positive impact = softplus(raw impacts)
        # impact = positive impact if pre investment, otherwise 1

        # shape: batch, time
        raw_impact = (
            slope[:, None] * batch.dates_since_start / tf.constant(self.end_idx - self.start_idx, dtype=tf.float32)
            + intercept[:, None]
        )
        pos_impact = softplus(raw_impact, scale=SCALE_SO_SOFTPLUS_0_IS_1)
        impact = tf.where(mask, pos_impact, tf.ones_like(pos_impact))

        if not skip_metrics:
            extreme_values = softplus(intercept + slope, scale=SCALE_SO_SOFTPLUS_0_IS_1)
            # batch
            margin_breaks = tf.math.maximum(
                tf.abs(extreme_values - tf.constant(self.allowed_center, dtype=tf.float32))
                - tf.constant(self.allowed_margin, dtype=tf.float32),
                0.0,
            )
            self.add_loss(
                "margin_loss",
                tf.math.reduce_sum(tf.math.square(margin_breaks) / batch.learning_scales),
                "aux",
                self.margin_weight,
            )
        # shape: batch, time, signal
        impact_by_signal = tf.expand_dims(impact, -1)
        return WibbleBiasIntermediaries(
            impact_by_signal=impact_by_signal,
            impact=impact,
            signal_names=(self.signal_name,),
            pos_impact=pos_impact if debug else None,
            intercept=tf.expand_dims(intercept, -1) if debug else None,
            slope=tf.expand_dims(slope, -1) if debug else None,
        )

__call__(batch, training=False, debug=False, skip_metrics=False)

Executes the wibble bias module.

Parameters:

Name Type Description Default
batch WibbleBiasInput

The input batch.

required
training bool

Whether the model is in training mode. Defaults to False.

False

Returns:

Name Type Description
WibbleBiasIntermediaries WibbleBiasIntermediaries

The wibble bias intermediaries.

Source code in wt_ml/layers/wibble_bias.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
def __call__(
    self,
    batch: WibbleBiasInput,
    training: bool = False,  # noqa: U100
    debug: bool = False,
    skip_metrics: bool = False,
) -> WibbleBiasIntermediaries:
    """
    Executes the wibble bias module.

    Args:
        batch (WibbleBiasInput): The input batch.
        training (bool, optional): Whether the model is in training mode. Defaults to False.

    Returns:
        WibbleBiasIntermediaries: The wibble bias intermediaries.
    """
    mask = create_mask(
        batch.hierarchy["date"],
        self.start_idx,
        self.end_idx,
        tf.shape(batch.hierarchy["granularity"])[0],
        as_float=False,
    )
    lr_scale = tf.constant(self.learning_rate_scale, dtype=tf.float32)
    # shape: batch
    gathered_intercept = (
        tf.gather(self.wibble_intercept, batch.hierarchy["granularity"]) * lr_scale * batch.learning_scales
    ) / self.buffer
    gathered_slope = (
        tf.gather(self.wibble_slope, batch.hierarchy["granularity"])
        * lr_scale
        * batch.learning_scales
        / self.buffer
    )
    gathered_intercept_deviations = self.buffer * transform_softbounded(
        gathered_intercept, self.add_loss, "intercept_devs", fcn=tf.tanh, mult=0.00001, scale=1.0
    )
    gathered_slope_deviations = self.buffer * transform_softbounded(
        gathered_slope, self.add_loss, "slope_devs", fcn=tf.tanh, mult=0.00001, scale=1.0
    )
    intercept = batch.preinvestment_intercept + gathered_intercept_deviations
    slope = batch.preinvestment_slope + gathered_slope_deviations

    # MATH LOGIC:
    # raw impacts = slope * week_num + intercept
    # positive impact = softplus(raw impacts)
    # impact = positive impact if pre investment, otherwise 1

    # shape: batch, time
    raw_impact = (
        slope[:, None] * batch.dates_since_start / tf.constant(self.end_idx - self.start_idx, dtype=tf.float32)
        + intercept[:, None]
    )
    pos_impact = softplus(raw_impact, scale=SCALE_SO_SOFTPLUS_0_IS_1)
    impact = tf.where(mask, pos_impact, tf.ones_like(pos_impact))

    if not skip_metrics:
        extreme_values = softplus(intercept + slope, scale=SCALE_SO_SOFTPLUS_0_IS_1)
        # batch
        margin_breaks = tf.math.maximum(
            tf.abs(extreme_values - tf.constant(self.allowed_center, dtype=tf.float32))
            - tf.constant(self.allowed_margin, dtype=tf.float32),
            0.0,
        )
        self.add_loss(
            "margin_loss",
            tf.math.reduce_sum(tf.math.square(margin_breaks) / batch.learning_scales),
            "aux",
            self.margin_weight,
        )
    # shape: batch, time, signal
    impact_by_signal = tf.expand_dims(impact, -1)
    return WibbleBiasIntermediaries(
        impact_by_signal=impact_by_signal,
        impact=impact,
        signal_names=(self.signal_name,),
        pos_impact=pos_impact if debug else None,
        intercept=tf.expand_dims(intercept, -1) if debug else None,
        slope=tf.expand_dims(slope, -1) if debug else None,
    )

build(input_shapes)

Builds the wibble bias module.

Parameters:

Name Type Description Default
input_shapes InputShapes

The input shapes for the module.

required
Source code in wt_ml/layers/wibble_bias.py
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
def build(self, input_shapes: InputShapes):  # noqa U100
    """
    Builds the wibble bias module.

    Args:
        input_shapes (InputShapes): The input shapes for the module.
    """
    # investment explains 5-25% of our revenue. So initializing this variable
    # such that it defaults to a multiplier of 1.15. Softplus(-1.82) ~= 0.1501

    self.wibble_intercept = self.create_var(
        name=f"{self.signal_name}_wibble_intercept",
        shape=[len(self.encodings["granularity"])],
        annotated_shape=("granularity",),
    )
    self.wibble_slope = self.create_var(
        name=f"{self.signal_name}_wibble_slope",
        shape=[len(self.encodings["granularity"])],
        annotated_shape=("granularity",),
    )

    self.allowed_center = self.hyperparameters.get_float(
        "allowed_center",
        default=1.01,
        min=0.9,
        max=1.2,
        help="The center of the range of values allowed without penalty.",
    )
    self.allowed_margin = self.hyperparameters.get_float(
        "allowed_margin",
        default=0.01,
        min=0.0,
        max=0.1,
        help="The radius of the range of values allowed without penalty.",
    )
    self.margin_weight = self.hyperparameters.get_float(
        "margin_weight",
        default=10.0,
        min=0.0,
        max=1e4,
        help="The strength of the force to push the impacts into the allowed range of values.",
    )
    self.learning_rate_scale = self.hyperparameters.get_float(
        "learning_rate_scale",
        default=1.0,
        min=1.0,
        max=1e6,
        help="Amount to scale the raw values by so they learn faster",
    )
    self.buffer = self.hyperparameters.get_float(
        "buffer_radius",
        default=0.05,
        min=0.01,
        max=1.0,
        help="Percentage by which it is allowed to differ from the provided defaults",
    )