Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Batch norm layer #354

Open
wants to merge 11 commits into
base: master
Choose a base branch
from
2 changes: 1 addition & 1 deletion src_cpp/common/worker_definitions_ag.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

namespace nerlnet {

enum LayerTypeEnum{LAYER_TYPE_DEFAULT=0,LAYER_TYPE_SCALING=1,LAYER_TYPE_CONV=2,LAYER_TYPE_PERCEPTRON=3,LAYER_TYPE_POOLING=4,LAYER_TYPE_PROBABILISTIC=5,LAYER_TYPE_LSTM=6,LAYER_TYPE_RECCURRENT=7,LAYER_TYPE_UNSCALING=8,LAYER_TYPE_FLATTEN=9,LAYER_TYPE_BOUNDING=10};
enum LayerTypeEnum{LAYER_TYPE_DEFAULT=0,LAYER_TYPE_SCALING=1,LAYER_TYPE_CONV=2,LAYER_TYPE_PERCEPTRON=3,LAYER_TYPE_POOLING=4,LAYER_TYPE_PROBABILISTIC=5,LAYER_TYPE_BATCHNORMALIZATION=6,LAYER_TYPE_LSTM=7,LAYER_TYPE_RECCURRENT=8,LAYER_TYPE_UNSCALING=9,LAYER_TYPE_FLATTEN=10,LAYER_TYPE_BOUNDING=11};
enum ProbabilisticActivationEnum{PROBABILISTIC_ACTIVATION_BINARY=1,PROBABILISTIC_ACTIVATION_LOGISTIC=2,PROBABILISTIC_ACTIVATION_COMPETITIVE=3,PROBABILISTIC_ACTIVATION_SOFTMAX=4};
enum ScalingEnum{SCALING_NONE=1,SCALING_MINMAX=2,SCALING_MEANSTD=3,SCALING_STD=4,SCALING_LOG=5};
enum BoundingEnum{BOUNDING_NONE=1,BOUNDING_BOUNDING=2};
Expand Down
9 changes: 9 additions & 0 deletions src_cpp/opennnBridge/nerlWorkerOpenNN.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -486,6 +486,15 @@ namespace nerlnet
}
break;
}
case LAYER_TYPE_BATCHNORMALIZATION:
{
int layer_size = curr_layer->get_dim_size(DIM_X_IDX);
BatchNormalizationLayer* newLayer;
newLayer->set(layer_size);
neural_network_ptr->add_layer(newLayer);

break;
}
}
curr_layer = curr_layer->get_next_layer_ptr();
}
Expand Down
11 changes: 6 additions & 5 deletions src_erl/NerlnetApp/src/Bridge/layers_types_ag.hrl
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,9 @@
-define(LAYERS_TYPE_PERCEPTRON_IDX,"3").
-define(LAYERS_TYPE_POOLING_IDX,"4").
-define(LAYERS_TYPE_PROBABILISTIC_IDX,"5").
-define(LAYERS_TYPE_LSTM_IDX,"6").
-define(LAYERS_TYPE_RECCURRENT_IDX,"7").
-define(LAYERS_TYPE_UNSCALING_IDX,"8").
-define(LAYERS_TYPE_FLATTEN_IDX,"9").
-define(LAYERS_TYPE_BOUNDING_IDX,"10").
-define(LAYERS_TYPE_BATCHNORMALIZATION_IDX,"6").
-define(LAYERS_TYPE_LSTM_IDX,"7").
-define(LAYERS_TYPE_RECCURRENT_IDX,"8").
-define(LAYERS_TYPE_UNSCALING_IDX,"9").
-define(LAYERS_TYPE_FLATTEN_IDX,"10").
-define(LAYERS_TYPE_BOUNDING_IDX,"11").
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
_ModelTypeCNN = "0",
_ModelArgsCNN = "",
_LayersSizesCNN = "28x28x1k5x5x1x6p0s1t1,28x28x6k2x2p0s2,14x14x6k4x4x6x12p0s1t0,1,32,10",
_LayersTypesCNN = "2,4,2,9,3,5",
_LayersTypesCNN = "2,4,2,10,3,5",
_LayersFunctionalityCodesCNN = "6,2,6,6,6,4", % change scaler functionality to 6 to check exception handling
_LearningRateCNN = "0.01",
_EpochsCNN = "50",
Expand All @@ -29,11 +29,25 @@
_DistributedSystemTypeCNN = "0",
_DistributedSystemArgCNN = ""} ).

-define(CNN_1D_TESTING_NN,{ _ModelIdCNN_1D = erlang:unique_integer([positive]),
_ModelTypeCNN_1D = "0",
_ModelArgsCNN_1D = "",
_LayersSizesCNN_1D = "70x1x1k5x1x1x128p0s1t0,66x1x128k2x1p0s1,65x1x128k5x1x128x128p0s1t0,61x1x128k2x1p0s1,60x1x128k5x1x128x64p0s1t0,1,64,32,16,9",
_LayersTypesCNN_1D = "2,4,2,4,2,10,3,3,3,5",
_LayersFunctionalityCodesCNN_1D = "6,2,6,2,6,1,6,6,6,4", % change scaler functionality to 6 to check exception handling
_LearningRateCNN_1D = "0.01",
_EpochsCNN_1D = "50",
_OptimizerTypeCNN_1D = "5",
_OptimizerArgsCNN_1D = "",
_LossMethodCNN_1D = "2",
_DistributedSystemTypeCNN_1D = "0",
_DistributedSystemArgCNN_1D = ""} ).

-define(AEC_TESTING_NN,{ _ModelIdAEC = erlang:unique_integer([positive]),
_ModelTypeAEC = "9",
_ModelArgsAEC = "",
_LayersSizesAEC = "32,16,8,4,8,16,32,32", % last layer (perceptron) should be the same as the input layer , followed by bounding layer
_LayersTypesAEC = "1,3,3,3,3,3,3,10",
_LayersTypesAEC = "1,3,3,3,3,3,3,11",
_LayersFunctionalityCodesAEC = "1,11,11,11,11,11,11,1",
_LearningRateAEC = "0.01",
_EpochsAEC = "50",
Expand All @@ -47,7 +61,7 @@
_ModelTypeAE = "8",
_ModelArgsAE = "",
_LayersSizesAE = "32,16,8,4,8,16,32,32", % last layer (perceptron) should be the same as the input layer , followed by bounding layer
_LayersTypesAE = "1,3,3,3,3,3,3,10",
_LayersTypesAE = "1,3,3,3,3,3,3,11",
_LayersFunctionalityCodesAE = "1,11,11,11,11,11,11,1",
_LearningRateAE = "0.01",
_EpochsAE = "50",
Expand All @@ -57,5 +71,5 @@
_DistributedSystemTypeAE = "0",
_DistributedSystemArgAE = ""} ).

-define(NEURAL_NETWORK_TESTING_MODELS_LIST, [?PERCEPTRON_TESTING_NN ,?AEC_TESTING_NN , ?CNN_TESTING_NN]).
-define(NEURAL_NETWORK_TESTING_MODELS_LIST_NAMES, ["Perceptron" ,"AEC" ,"CNN"]).
-define(NEURAL_NETWORK_TESTING_MODELS_LIST, [?PERCEPTRON_TESTING_NN ,?AEC_TESTING_NN , ?CNN_TESTING_NN,?CNN_1D_TESTING_NN]).
-define(NEURAL_NETWORK_TESTING_MODELS_LIST_NAMES, ["Perceptron" ,"AEC" ,"CNN","CNN_1D"]).
2 changes: 1 addition & 1 deletion src_erl/NerlnetApp/src/nerlnetApp.app.src
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{application, nerlnetApp,
[{description, "Nerlnet OTP Application"},
{vsn, "1.4.0"},
{vsn, "1.5.1"},
{registered, []},
{mod, {nerlnetApp_app, []}},
{applications,
Expand Down
1 change: 1 addition & 0 deletions src_py/nerlPlanner/JsonElementWorker.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,7 @@ def get_as_dict(self, documentation = True):
(KEY_LAYERS_FUNCTIONS_POOLING_DOC, VAL_LAYERS_FUNCTIONS_POOLING_DOC),
(KEY_LAYERS_FUNCTIONS_PROBABILISTIC_DOC, VAL_LAYERS_FUNCTIONS_PROBABILISTIC_DOC),
(KEY_LAYERS_FUNCTIONS_SCALER_DOC, VAL_LAYERS_FUNCTIONS_SCALER_DOC),
(KEY_LAYERS_FUNCTIONS_BOUNDING_DOC, VAL_LAYERS_FUNCTIONS_BOUNDING_DOC),
(KEY_LOSS_METHOD, self.LossMethod),
(KEY_LOSS_METHOD_DOC, VAL_LOSS_METHOD_DOC),
(KEY_LEARNING_RATE, self.LearningRate),
Expand Down
18 changes: 13 additions & 5 deletions src_py/nerlPlanner/JsonElementWorkerDefinitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,12 @@
("Perceptron" , "3"),
("Pooling" , "4"),
("Probabilistic" , "5"),
("LSTM" , "6"),
("Reccurrent" , "7"),
("Unscaling" , "8"),
("Flatten" , "9"),
("Bounding" , "10"),
("BatchNormalization" , "6"),
("LSTM" , "7"),
("Reccurrent" , "8"),
("Unscaling" , "9"),
("Flatten" , "10"),
("Bounding" , "11"),
]
)

Expand Down Expand Up @@ -58,6 +59,11 @@
("Avg" , "3")]
)

BatchNomalizationMap = OrderedDict(
[("none" , "1"),
]
)

ActivationFunctionsMap = OrderedDict(
[("Threshold" , "1"),
("Sign" , "2"),
Expand Down Expand Up @@ -154,6 +160,7 @@ def doc_print_dict(d):#define d
KEY_LAYERS_FUNCTIONS_SCALER_DOC = "_doc_layer_functions_scaler"
KEY_LAYERS_FUNCTIONS_POOLING_DOC = "_doc_layer_functions_pooling"
KEY_LAYERS_FUNCTIONS_PROBABILISTIC_DOC = "_doc_layer_functions_probabilistic"
KEY_LAYERS_FUNCTIONS_BOUNDING_DOC = "_doc_layer_functions_bounding"
KEY_LOSS_METHOD = "lossMethod"
KEY_LOSS_METHOD_DOC = "_doc_lossMethod"
KEY_EPOCHS = "epochs"
Expand Down Expand Up @@ -182,6 +189,7 @@ def doc_print_dict(d):#define d
VAL_LAYERS_FUNCTIONS_POOLING_DOC = f"{doc_print_dict(PoolingMethodMap)}"
VAL_LAYERS_FUNCTIONS_PROBABILISTIC_DOC = f"{doc_print_dict(ProbabilisticActivationFunctionMap)}"
VAL_LAYERS_FUNCTIONS_ACTIVATION_DOC = f"{doc_print_dict(ActivationFunctionsMap)}"
VAL_LAYERS_FUNCTIONS_BOUNDING_DOC = f"{doc_print_dict(BoundingMethodMap)}"
VAL_LOSS_METHOD_DOC = f"{doc_print_dict(LossMethodMapping)}"
VAL_EPOCHS_DOC = "Positve Integer"
VAL_LEARNING_RATE_DOC = "Positve float"
Expand Down
20 changes: 16 additions & 4 deletions src_py/nerlPlanner/WinWorkerDialog.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,8 @@ def ui_update_all_values(WorkerWindow):
FlattenDictStr = f'Flatten:\n{pretty_print_dict(FlattenMethodMap)}'
BoundingDictStr = f'Bounding:\n{pretty_print_dict(BoundingMethodMap)}'
ProbabilisticDictStr = f'Probabilistic:\n{pretty_print_dict(ProbabilisticActivationFunctionMap)}'
sg.popup_ok(f"Layer Functions Codes:\n{ActivationDictStr}\n{PoolingDictStr}\n{ScalerDictStr}\n{FlattenDictStr}\n{BoundingDictStr}\n{ProbabilisticDictStr}", keep_on_top=True, title="Layer Type Codes")
BatchNormalizationDictStr = f'Batch Normalization:\n{pretty_print_dict(BatchNomalizationMap)}'
sg.popup_ok(f"Layer Functions Codes:\n{ActivationDictStr}\n{PoolingDictStr}\n{ScalerDictStr}\n{FlattenDictStr}\n{BoundingDictStr}\n{ProbabilisticDictStr}\n{BatchNormalizationDictStr}", keep_on_top=True, title="Layer Type Codes")

if event == KEY_LEARNING_RATE_INPUT:
LearningRate = values[event]
Expand Down Expand Up @@ -269,9 +270,15 @@ def LayerMethodSelection():
sg.Listbox(list(ScalingMethodMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_SCALER)],
[ sg.Text("Bounding",expand_x=True), sg.Text('Flatten', expand_x=True), sg.Text('Probabilistic', expand_x=True)],
[
sg.Listbox(list(BoundingMethodMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BOUNDING),
sg.Listbox(list(FlattenMethodMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_FLATTEN),
sg.Listbox(list(ProbabilisticActivationFunctionMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_PROBABILISTIC)
sg.Listbox(list(BoundingMethodMap.keys()),size=(20,5), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BOUNDING),
sg.Listbox(list(FlattenMethodMap.keys()),size=(20,5), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_FLATTEN),
sg.Listbox(list(ProbabilisticActivationFunctionMap.keys()),size=(20,5), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_PROBABILISTIC)
],
[
sg.Text("BatchNorm",expand_x=True), sg.Text(' ', expand_x=True), sg.Text(' ', expand_x=True)
],
[
sg.Listbox(list(BatchNomalizationMap.keys()),size=(20,5), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BATCH_NORMALIZATION),
],
[sg.Text('Selection', expand_x=True, enable_events=True, key=KEY_LAYER_METHOD_SELECTION_TEXT),sg.Button('Select', expand_x=True, key=KEY_LAYER_METHOD_SELECTION_BUTTON)]]

Expand Down Expand Up @@ -311,6 +318,11 @@ def LayerMethodSelection():
global_layer_method_selection_code = FlattenMethodMap[layer_method_selection]
layer_selection_win[KEY_LAYER_METHOD_SELECTION_TEXT].update(f'Selected {layer_method_selection} code: {global_layer_method_selection_code}')

if event == KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BATCH_NORMALIZATION:
layer_method_selection = values[KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BATCH_NORMALIZATION][0]
global_layer_method_selection_code = BatchNomalizationMap[layer_method_selection]
layer_selection_win[KEY_LAYER_METHOD_SELECTION_TEXT].update(f'Selected {layer_method_selection} code: {global_layer_method_selection_code}')

if event == KEY_LAYER_METHOD_SELECTION_BUTTON:
break

Expand Down
1 change: 1 addition & 0 deletions src_py/nerlPlanner/WinWorkerDialogDefnitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_SCALER = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-SCALER-'
KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_FLATTEN = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-FLATTEN-'
KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BOUNDING = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-BOUNDING-'
KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BATCH_NORMALIZATION = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-BATCH-NORMALIZATION-'
KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_PROBABILISTIC = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-PROBABILISTIC-'
KEY_LAYER_METHOD_SELECTION_TEXT = '-LAYER-METHOD-SELECTION-TEXT-'
KEY_LAYER_METHOD_SELECTION_BUTTON = '-LAYER-METHOD-SELECTION-BUTTON-'
Expand Down