Требуется реализовать на языке Python класс, описывающий полносвязный слой нейронной сети
import numpy as n
from typing import Optional, Tuple, Unio
class Dense
"""Implements fully-connected layer""
def __init__(self, n_in: int, n_out: int, use_bias: bool = True)
"""Initializes Dense layer
The weights are initialized using uniformly distributed values in range [-1, 1]. Bias vector is not initialized if `use_bias` is False
Weigths matrix has the shape (`n_in`, `n_out`), bias vector has the shape (`n_out`, )
Arguments
n_in: Positive integer, dimensionality of input space
n_out: Positive integer, dimensionality of output space
use_bias: Whether the layer uses a bias vector.""
pas
@propert
def weights(self) -> tuple[np.ndarray, np.ndarray] | tuple[np.ndarray]
"""Returns weights used by the layer.""
pas
@propert
def input(self) -> np.ndarray
"""Returns the last input received by the layer""
pas
def __call__(self, x: np.ndarray) -> np.ndarray
"""Performs the layer forward pass
Arguments
x: Input array of shape (`batch_size`, `n_in`
Returns
An array of shape (`batch_size`, `n_out`)""
pas
def grad(self, gradOutput: np.ndarray) -> tuple[np.ndarray, tuple[np.ndarray, np.ndarray]] | tuple[np.ndarray, tuple[np.ndarray]]
"""Computes layer gradient
Arguments
gradOutput: Gradient of loss function with respect to the layer output, an array of shape (`batch_size`, `n_out`)
Returns
A tuple object
Gradient of loss function with respect to the layer input, an array of shape (`batch_size`, `n_in`
Gradient of loss function with respect to the layer's weights
An array of shape (`n_in`, `n_out`)
Optional array of shape (`n_out`, ).""
pas
Для реализации класса разрешено использовать только модуль numpy.