class SHAInet::Network

Defined in:

shainet/basic/exprimental.cr
shainet/basic/network_run.cr
shainet/basic/network_setup.cr

Constant Summary

CONNECTION_TYPES = ["full", "ind_to_ind", "random"]
COST_FUNCTIONS = ["mse", "c_ent"]
LAYER_TYPES = ["input", "hidden", "output"]
Log = ::Log.for(self)

Constructors

Instance Method Summary

Constructor Detail

def self.new #

First creates an empty shell of the entire network


[View source]

Instance Method Detail

def add_layer(l_type : Symbol | String, l_size : Int32, n_type : Symbol | String = "memory", activation_function : ActivationFunction = SHAInet.sigmoid) #

Create and populate a layer with neurons l_type is: :input, :hidden or :output l_size = how many neurons in the layer n_type = advanced option for different neuron types


[View source]
def all_neurons : Array(SHAInet::Neuron) #

General network parameters


[View source]
def all_synapses : Array(SHAInet::Synapse) #

General network parameters


[View source]
def alpha : Float64 #

Parameters for Adam


[View source]
def alpha=(alpha : Float64) #

Parameters for Adam


[View source]
def b_gradient : Array(Float64) #

[View source]
def beta1 : Float64 #

[View source]
def beta2 : Float64 #

[View source]
def clean_dead_neurons #

[View source]
def connect_ltl(src_layer : Layer, dest_layer : Layer, connection_type : Symbol | String) #

Connect two specific layers with synapses


[View source]
def delta_max : Float64 #

Parameters for Rprop


[View source]
def delta_max=(delta_max : Float64) #

Parameters for Rprop


[View source]
def delta_min : Float64 #

Parameters for Rprop


[View source]
def delta_min=(delta_min : Float64) #

Parameters for Rprop


[View source]
def epsilon : Float64 #

[View source]
def error_signal : Array(Float64) #

[View source]
def etah_minus : Float64 #

Parameters for Rprop


[View source]
def etah_minus=(etah_minus : Float64) #

Parameters for Rprop


[View source]
def etah_plus : Float64 #

Parameters for Rprop


[View source]
def etah_plus=(etah_plus : Float64) #

Parameters for Rprop


[View source]
def evaluate(input_data : Array(GenNum), expected_output : Array(GenNum), cost_function : CostFunction = SHAInet.quadratic_cost) #

Quantifies how good the network performed for a single input compared to the expected output This function returns the actual output and updates the error gradient for the output layer


[View source]
def evaluate_exp(input_data : Array(GenNum), expected_output : Array(GenNum), cost_function : CostFunction = SHAInet.quadratic_cost, stealth : Bool = true) #

[View source]
def fully_connect #

Connect all the layers in order (input and output don't connect between themselves): input, hidden, output


[View source]
def get_cost_proc(function_name : String) : CostFunction #

[View source]
def hidden_layers : Array(SHAInet::Layer) #

General network parameters


[View source]
def input_layers : Array(SHAInet::Layer) #

General network parameters


[View source]
def inspect #
Description copied from class Object

Returns an unambiguous and information-rich string representation of this object, typically intended for developers.

This method should usually not be overridden. It delegates to #inspect(IO) which can be overridden for custom implementations.

Also see #to_s.


[View source]
def learning_rate : Float64 #

Parameters for SGD + Momentum


[View source]
def learning_rate=(learning_rate : Float64) #

Parameters for SGD + Momentum


[View source]
def load_from_file(file_path : String) #

[View source]
def log_summary(e) #

[View source]
def momentum : Float64 #

Parameters for SGD + Momentum


[View source]
def momentum=(momentum : Float64) #

Parameters for SGD + Momentum


[View source]
def mse : Float64 #

[View source]
def output_layers : Array(SHAInet::Layer) #

General network parameters


[View source]
def prev_mse : Float64 #

[View source]
def randomize_all_biases #

[View source]
def randomize_all_weights #

[View source]
def run(input : Array(GenNum), stealth : Bool = false) : Array(Float64) #

Run an input throught the network to get an output (weights & biases do not change)


[View source]
def run_exp(input : Array(GenNum), stealth : Bool = false) : Array(Float64) #

[View source]
def save_to_file(file_path : String) #

[View source]
def test(test_set) #

Evaluate the network performance on a test set


[View source]
def time_step : Int32 #

[View source]
def total_error : Float64 #

[View source]
def train(data : Array(Array(Array(GenNum))) | SHAInet::TrainingData, training_type : Symbol | String, cost_function : Symbol | String | CostFunction = :mse, epochs : Int32 = 1, error_threshold : Float64 = 0.00000001, mini_batch_size : Int32 = 1, log_each : Int32 = 1000, show_slice : Bool = false, autosave : NamedTuple(freq: Int32, path: String) | Nil = nil) #

Training the model ameba:disable Metrics/CyclomaticComplexity


[View source]
def train_batch(data : Array(Array(Array(GenNum))) | SHAInet::TrainingData, training_type : Symbol | String = :sgdm, cost_function : Symbol | String | CostFunction = :mse, epochs : Int32 = 1, error_threshold : Float64 = 0.00000001, mini_batch_size : Int32 = 1, log_each : Int32 = 1, show_slice : Bool = false, autosave : NamedTuple(freq: Int32, path: String) | Nil = nil) #

This method is kept for matching syntax of previous versions. It is possible to use the "train" method instead


[View source]
def train_es(data : Array(Array(Array(GenNum))) | SHAInet::TrainingData, pool_size : Int32, learning_rate : Float64, sigma : Float64, cost_function : Symbol | String | CostFunction = :c_ent, epochs : Int32 = 1, mini_batch_size : Int32 = 1, error_threshold : Float64 = 0.0, log_each : Int32 = 1, show_slice : Bool = false, autosave : NamedTuple(freq: Int32, path: String) | Nil = nil) #

Use evolutionary strategies for network optimization instread of gradient based approach ameba:disable Metrics/CyclomaticComplexity


[View source]
def train_es_exp(data : Array(Array(Array(GenNum))) | SHAInet::TrainingData, pool_size : Int32, learning_rate : Float64, sigma : Float64, cost_function : Symbol | String | CostFunction = :c_ent, epochs : Int32 = 1, mini_batch_size : Int32 = 1, error_threshold : Float64 = 0.0, log_each : Int32 = 1, show_slice : Bool = false, autosave : NamedTuple(freq: Int32, path: String) | Nil = nil) #

[View source]
def update_biases(learn_type : Symbol | String, batch : Bool = false) #

Update biases based on the learning type chosen


[View source]
def update_mse #

Calculate MSE from the error signal of the output layer


[View source]
def update_weights(learn_type : Symbol | String, batch : Bool = false) #

Update weights based on the learning type chosen


[View source]
def validate_values(array : Array(Float64), location : String) #

[View source]
def verify_data(data : Array(Array(Array(GenNum)))) #

[View source]
def verify_net_before_train #

[View source]
def w_gradient : Array(Float64) #

[View source]