Skip to content

Commit

Permalink
ENH: new methods added to smash.factory.Net (#312)
Browse files Browse the repository at this point in the history
* ENH: add set, get and forward_pass methods for Net

* ENH: add test net forward pass + fix doc net

* Fix indent docstring net

* add weight/bias_shape into trainable layer

* Change x_train name to x in net

* Generate baseline

* Fix doc net

* Fix raise message net.set_weight_bias

* ENH: add random_state to set_weight and set_bias methods

* Minor fix typos

* FIX: re-generate baseline and diff_baseline

* Apply suggestion changes from FC and PAG review
  • Loading branch information
nghi-truyen authored Sep 10, 2024
1 parent d3581d6 commit f1d6416
Show file tree
Hide file tree
Showing 9 changed files with 709 additions and 279 deletions.
5 changes: 5 additions & 0 deletions doc/source/api_reference/sub-packages/factory.rst
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,11 @@ Neural Network Configuration
smash/smash.factory.Net.add_dropout
smash/smash.factory.Net.copy
smash/smash.factory.Net.set_trainable
smash/smash.factory.Net.set_weight
smash/smash.factory.Net.set_bias
smash/smash.factory.Net.get_weight
smash/smash.factory.Net.get_bias
smash/smash.factory.Net.forward_pass

Sample Generation
*****************
Expand Down
4 changes: 2 additions & 2 deletions smash/core/model/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -2528,7 +2528,7 @@ def set_nn_parameters_weight(
Random seed used for the initialization in case of using **initializer**.
.. note::
If not given, the parameters will be initialized with a random seed.
If not given, the neural network parameters will be initialized with a random seed.
See Also
--------
Expand Down Expand Up @@ -2624,7 +2624,7 @@ def set_nn_parameters_bias(
Random seed used for the initialization in case of using **initializer**.
.. note::
If not given, the parameters will be initialized with a random seed.
If not given, the neural network parameters will be initialized with a random seed.
See Also
--------
Expand Down
7 changes: 4 additions & 3 deletions smash/core/simulation/_doc.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,11 +137,12 @@
`int` or None, default None
""",
"""
A random seed used to initialize neural network weights.
A random seed used to initialize neural network parameters.
.. note::
If not given, the weights will be initialized with a random seed. This options is only used when
**mapping** is ``'ann'``.
If not given, the neural network parameters will be initialized with a random seed. This options
is only used when **mapping** is ``'ann'``, and the weights and biases of **net** are not yet
initialized.
""",
),
"termination_crit": (
Expand Down
22 changes: 8 additions & 14 deletions smash/factory/net/_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,18 +105,7 @@ def _initialize_nn_parameter(n_in: int, n_out: int, initializer: str) -> np.ndar


def _set_initialized_wb_to_layer(layer: Layer, kind: str):
if layer.layer_name() == "Dense":
n_in = layer.input_shape[-1]
n_out = layer.neurons

elif layer.layer_name() == "Conv2D":
n_in = layer.input_shape[-1] * np.prod(layer.filter_shape)
n_out = layer.filters
# The real shape of W in this case is (filters, depth, height, width),
# which is simplified as (filters, depth*height*width)

else: # Should be unreachable
pass
n_out, n_in = layer.weight_shape

if kind == "bias":
initializer = layer.bias_initializer
Expand Down Expand Up @@ -150,14 +139,15 @@ def __init__(
self.trainable = True

self.weight = None
self.weight_shape = (neurons, input_shape[-1])

self.bias = None
self.bias_shape = (1, neurons)

self.kernel_initializer = kernel_initializer
self.bias_initializer = bias_initializer

# TODO TYPE HINT: replace function by Callable
def _initialize(self, optimizer: function): # noqa: F821
def _initialize(self, optimizer: callable):
if self.weight is None:
_set_initialized_wb_to_layer(self, "weight")
if self.bias is None:
Expand Down Expand Up @@ -212,8 +202,12 @@ def __init__(
self.trainable = True

self.weight = None
self.weight_shape = (filters, input_shape[-1] * np.prod(filter_shape))
# The real shape of W in this case is (filters, depth, height, width),
# which is simplified as (filters, depth*height*width)

self.bias = None
self.bias_shape = (1, filters)

self.kernel_initializer = kernel_initializer
self.bias_initializer = bias_initializer
Expand Down
95 changes: 94 additions & 1 deletion smash/factory/net/_standardize.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,11 @@

if TYPE_CHECKING:
from smash.factory.net.net import Net
from smash.util._typing import AnyTuple, ListLike, Numeric
from smash.util._typing import Any, AnyTuple, ListLike, Numeric

from smash.factory.samples._standardize import (
_standardize_generate_samples_random_state,
)


def _standardize_add_dense_args(
Expand Down Expand Up @@ -108,6 +112,95 @@ def _standardize_set_trainable_args(net: Net, trainable: tuple | list) -> list:
return trainable


def _standardize_set_weight_args(
net: Net, value: list[Any] | None, random_state: int | None
) -> list[np.ndarray]:
value = _standardize_set_weight_bias_value(net, value, "weight")
random_state = _standardize_generate_samples_random_state(random_state)

return value, random_state


def _standardize_set_bias_args(
net: Net, value: list[Any] | None, random_state: int | None
) -> list[np.ndarray]:
value = _standardize_set_weight_bias_value(net, value, "bias")
random_state = _standardize_generate_samples_random_state(random_state)

return value, random_state


def _standardize_forward_pass_args(net: Net, x: np.ndarray) -> np.ndarray:
if not net.layers:
raise ValueError("The graph of the neural network has not been set yet")

if any(layer.weight is None for layer in net.layers if hasattr(layer, "weight")):
raise ValueError("The neural network weight is not set yet")

if any(layer.bias is None for layer in net.layers if hasattr(layer, "bias")):
raise ValueError("The neural network bias is not set yet")

return _standardize_forward_pass_x(net.layers[0].input_shape, x)


def _standardize_set_weight_bias_value(
net: Net,
value: list[Any] | None,
kind: str,
) -> list[np.ndarray] | None:
if not net.layers:
raise ValueError("The graph of the neural network has not been set yet")

else:
trainable_layers = [layer for layer in net.layers if hasattr(layer, kind)]

if value is None:
pass

elif isinstance(value, list):
if len(value) != len(trainable_layers):
raise ValueError(
f"Inconsistent size between value argument and the number of trainable layers: "
f"{len(value)} != {len(trainable_layers)}"
)

else:
for i, arr in enumerate(value):
kind_shape = getattr(trainable_layers[i], f"{kind}_shape")

if isinstance(arr, (int, float)):
value[i] = np.full(kind_shape, arr)

elif isinstance(arr, np.ndarray):
if arr.shape != kind_shape:
raise ValueError(
f"Invalid shape for value argument. Could not broadcast input array "
f"from shape {arr.shape} into shape {kind_shape}"
)
else:
raise TypeError(
"Each element of value argument must be of Numeric type (int, float) or np.ndarray"
)

else:
raise TypeError("value argument must be a list of a same size with layers")

return value


def _standardize_forward_pass_x(input_shape: tuple, x: np.ndarray) -> np.ndarray:
if isinstance(x, np.ndarray):
if x.shape != input_shape:
raise ValueError(
f"Invalid shape for input x. Could not broadcast input array "
f"from shape {x.shape} into shape {input_shape}"
)
else:
raise TypeError("Input x must be a np.ndarray")

return x


def _standardize_add_conv2d_filter_shape(filter_shape: Numeric | tuple | list) -> tuple:
if isinstance(filter_shape, (int, float)):
filter_shape = (int(filter_shape),) * 2
Expand Down
Loading

0 comments on commit f1d6416

Please sign in to comment.