diff --git a/projects/home/recap/model/config.py b/projects/home/recap/model/config.py index 47d0640..407b7c8 100644 --- a/projects/home/recap/model/config.py +++ b/projects/home/recap/model/config.py @@ -18,7 +18,7 @@ class DropoutConfig(base_config.BaseConfig): class LayerNormConfig(base_config.BaseConfig): - """Configruation for the layer normalization.""" + """Configuration for the layer normalization.""" epsilon: float = pydantic.Field( 1e-3, description="Small float added to variance to avoid dividing by zero." @@ -110,14 +110,6 @@ class FeaturizationConfig(base_config.BaseConfig): ) -class DropoutConfig(base_config.BaseConfig): - """Configuration for the dropout layer.""" - - rate: pydantic.PositiveFloat = pydantic.Field( - 0.1, description="Fraction of inputs to be dropped." - ) - - class MlpConfig(base_config.BaseConfig): """Configuration for MLP model."""