diff --git a/src_cpp/common/worker_definitions_ag.h b/src_cpp/common/worker_definitions_ag.h index 2e4ea2f5c..17ed216e4 100644 --- a/src_cpp/common/worker_definitions_ag.h +++ b/src_cpp/common/worker_definitions_ag.h @@ -5,7 +5,7 @@ namespace nerlnet { -enum LayerTypeEnum{LAYER_TYPE_DEFAULT=0,LAYER_TYPE_SCALING=1,LAYER_TYPE_CONV=2,LAYER_TYPE_PERCEPTRON=3,LAYER_TYPE_POOLING=4,LAYER_TYPE_PROBABILISTIC=5,LAYER_TYPE_LSTM=6,LAYER_TYPE_RECCURRENT=7,LAYER_TYPE_UNSCALING=8,LAYER_TYPE_FLATTEN=9,LAYER_TYPE_BOUNDING=10}; +enum LayerTypeEnum{LAYER_TYPE_DEFAULT=0,LAYER_TYPE_SCALING=1,LAYER_TYPE_CONV=2,LAYER_TYPE_PERCEPTRON=3,LAYER_TYPE_POOLING=4,LAYER_TYPE_PROBABILISTIC=5,LAYER_TYPE_BATCHNORMALIZATION=6,LAYER_TYPE_LSTM=7,LAYER_TYPE_RECCURRENT=8,LAYER_TYPE_UNSCALING=9,LAYER_TYPE_FLATTEN=10,LAYER_TYPE_BOUNDING=11}; enum ProbabilisticActivationEnum{PROBABILISTIC_ACTIVATION_BINARY=1,PROBABILISTIC_ACTIVATION_LOGISTIC=2,PROBABILISTIC_ACTIVATION_COMPETITIVE=3,PROBABILISTIC_ACTIVATION_SOFTMAX=4}; enum ScalingEnum{SCALING_NONE=1,SCALING_MINMAX=2,SCALING_MEANSTD=3,SCALING_STD=4,SCALING_LOG=5}; enum BoundingEnum{BOUNDING_NONE=1,BOUNDING_BOUNDING=2}; diff --git a/src_cpp/opennnBridge/nerlWorkerOpenNN.cpp b/src_cpp/opennnBridge/nerlWorkerOpenNN.cpp index 031d93075..205584cce 100644 --- a/src_cpp/opennnBridge/nerlWorkerOpenNN.cpp +++ b/src_cpp/opennnBridge/nerlWorkerOpenNN.cpp @@ -486,6 +486,15 @@ namespace nerlnet } break; } + case LAYER_TYPE_BATCHNORMALIZATION: + { + int layer_size = curr_layer->get_dim_size(DIM_X_IDX); + BatchNormalizationLayer* newLayer; + newLayer->set(layer_size); + neural_network_ptr->add_layer(newLayer); + + break; + } } curr_layer = curr_layer->get_next_layer_ptr(); } diff --git a/src_erl/NerlnetApp/src/Bridge/layers_types_ag.hrl b/src_erl/NerlnetApp/src/Bridge/layers_types_ag.hrl index 2f1af62cf..341ee11b8 100644 --- a/src_erl/NerlnetApp/src/Bridge/layers_types_ag.hrl +++ b/src_erl/NerlnetApp/src/Bridge/layers_types_ag.hrl @@ -7,8 +7,9 @@ -define(LAYERS_TYPE_PERCEPTRON_IDX,"3"). -define(LAYERS_TYPE_POOLING_IDX,"4"). -define(LAYERS_TYPE_PROBABILISTIC_IDX,"5"). --define(LAYERS_TYPE_LSTM_IDX,"6"). --define(LAYERS_TYPE_RECCURRENT_IDX,"7"). --define(LAYERS_TYPE_UNSCALING_IDX,"8"). --define(LAYERS_TYPE_FLATTEN_IDX,"9"). --define(LAYERS_TYPE_BOUNDING_IDX,"10"). +-define(LAYERS_TYPE_BATCHNORMALIZATION_IDX,"6"). +-define(LAYERS_TYPE_LSTM_IDX,"7"). +-define(LAYERS_TYPE_RECCURRENT_IDX,"8"). +-define(LAYERS_TYPE_UNSCALING_IDX,"9"). +-define(LAYERS_TYPE_FLATTEN_IDX,"10"). +-define(LAYERS_TYPE_BOUNDING_IDX,"11"). diff --git a/src_erl/NerlnetApp/src/Bridge/neural_networks_testing_models.hrl b/src_erl/NerlnetApp/src/Bridge/neural_networks_testing_models.hrl index 80f70dfac..ddf7f6864 100644 --- a/src_erl/NerlnetApp/src/Bridge/neural_networks_testing_models.hrl +++ b/src_erl/NerlnetApp/src/Bridge/neural_networks_testing_models.hrl @@ -19,7 +19,7 @@ _ModelTypeCNN = "0", _ModelArgsCNN = "", _LayersSizesCNN = "28x28x1k5x5x1x6p0s1t1,28x28x6k2x2p0s2,14x14x6k4x4x6x12p0s1t0,1,32,10", - _LayersTypesCNN = "2,4,2,9,3,5", + _LayersTypesCNN = "2,4,2,10,3,5", _LayersFunctionalityCodesCNN = "6,2,6,6,6,4", % change scaler functionality to 6 to check exception handling _LearningRateCNN = "0.01", _EpochsCNN = "50", @@ -29,11 +29,25 @@ _DistributedSystemTypeCNN = "0", _DistributedSystemArgCNN = ""} ). +-define(CNN_1D_TESTING_NN,{ _ModelIdCNN_1D = erlang:unique_integer([positive]), + _ModelTypeCNN_1D = "0", + _ModelArgsCNN_1D = "", + _LayersSizesCNN_1D = "70x1x1k5x1x1x128p0s1t0,66x1x128k2x1p0s1,65x1x128k5x1x128x128p0s1t0,61x1x128k2x1p0s1,60x1x128k5x1x128x64p0s1t0,1,64,32,16,9", + _LayersTypesCNN_1D = "2,4,2,4,2,10,3,3,3,5", + _LayersFunctionalityCodesCNN_1D = "6,2,6,2,6,1,6,6,6,4", % change scaler functionality to 6 to check exception handling + _LearningRateCNN_1D = "0.01", + _EpochsCNN_1D = "50", + _OptimizerTypeCNN_1D = "5", + _OptimizerArgsCNN_1D = "", + _LossMethodCNN_1D = "2", + _DistributedSystemTypeCNN_1D = "0", + _DistributedSystemArgCNN_1D = ""} ). + -define(AEC_TESTING_NN,{ _ModelIdAEC = erlang:unique_integer([positive]), _ModelTypeAEC = "9", _ModelArgsAEC = "", _LayersSizesAEC = "32,16,8,4,8,16,32,32", % last layer (perceptron) should be the same as the input layer , followed by bounding layer - _LayersTypesAEC = "1,3,3,3,3,3,3,10", + _LayersTypesAEC = "1,3,3,3,3,3,3,11", _LayersFunctionalityCodesAEC = "1,11,11,11,11,11,11,1", _LearningRateAEC = "0.01", _EpochsAEC = "50", @@ -47,7 +61,7 @@ _ModelTypeAE = "8", _ModelArgsAE = "", _LayersSizesAE = "32,16,8,4,8,16,32,32", % last layer (perceptron) should be the same as the input layer , followed by bounding layer - _LayersTypesAE = "1,3,3,3,3,3,3,10", + _LayersTypesAE = "1,3,3,3,3,3,3,11", _LayersFunctionalityCodesAE = "1,11,11,11,11,11,11,1", _LearningRateAE = "0.01", _EpochsAE = "50", @@ -57,5 +71,5 @@ _DistributedSystemTypeAE = "0", _DistributedSystemArgAE = ""} ). --define(NEURAL_NETWORK_TESTING_MODELS_LIST, [?PERCEPTRON_TESTING_NN ,?AEC_TESTING_NN , ?CNN_TESTING_NN]). --define(NEURAL_NETWORK_TESTING_MODELS_LIST_NAMES, ["Perceptron" ,"AEC" ,"CNN"]). \ No newline at end of file +-define(NEURAL_NETWORK_TESTING_MODELS_LIST, [?PERCEPTRON_TESTING_NN ,?AEC_TESTING_NN , ?CNN_TESTING_NN,?CNN_1D_TESTING_NN]). +-define(NEURAL_NETWORK_TESTING_MODELS_LIST_NAMES, ["Perceptron" ,"AEC" ,"CNN","CNN_1D"]). \ No newline at end of file diff --git a/src_erl/NerlnetApp/src/nerlnetApp.app.src b/src_erl/NerlnetApp/src/nerlnetApp.app.src index 20c69a4ca..062f5a3cc 100644 --- a/src_erl/NerlnetApp/src/nerlnetApp.app.src +++ b/src_erl/NerlnetApp/src/nerlnetApp.app.src @@ -1,6 +1,6 @@ {application, nerlnetApp, [{description, "Nerlnet OTP Application"}, - {vsn, "1.4.0"}, + {vsn, "1.5.1"}, {registered, []}, {mod, {nerlnetApp_app, []}}, {applications, diff --git a/src_py/nerlPlanner/JsonElementWorker.py b/src_py/nerlPlanner/JsonElementWorker.py index 1eb033cd2..729e27655 100644 --- a/src_py/nerlPlanner/JsonElementWorker.py +++ b/src_py/nerlPlanner/JsonElementWorker.py @@ -206,6 +206,7 @@ def get_as_dict(self, documentation = True): (KEY_LAYERS_FUNCTIONS_POOLING_DOC, VAL_LAYERS_FUNCTIONS_POOLING_DOC), (KEY_LAYERS_FUNCTIONS_PROBABILISTIC_DOC, VAL_LAYERS_FUNCTIONS_PROBABILISTIC_DOC), (KEY_LAYERS_FUNCTIONS_SCALER_DOC, VAL_LAYERS_FUNCTIONS_SCALER_DOC), + (KEY_LAYERS_FUNCTIONS_BOUNDING_DOC, VAL_LAYERS_FUNCTIONS_BOUNDING_DOC), (KEY_LOSS_METHOD, self.LossMethod), (KEY_LOSS_METHOD_DOC, VAL_LOSS_METHOD_DOC), (KEY_LEARNING_RATE, self.LearningRate), diff --git a/src_py/nerlPlanner/JsonElementWorkerDefinitions.py b/src_py/nerlPlanner/JsonElementWorkerDefinitions.py index c5fecfe0f..629f11139 100644 --- a/src_py/nerlPlanner/JsonElementWorkerDefinitions.py +++ b/src_py/nerlPlanner/JsonElementWorkerDefinitions.py @@ -12,11 +12,12 @@ ("Perceptron" , "3"), ("Pooling" , "4"), ("Probabilistic" , "5"), - ("LSTM" , "6"), - ("Reccurrent" , "7"), - ("Unscaling" , "8"), - ("Flatten" , "9"), - ("Bounding" , "10"), + ("BatchNormalization" , "6"), + ("LSTM" , "7"), + ("Reccurrent" , "8"), + ("Unscaling" , "9"), + ("Flatten" , "10"), + ("Bounding" , "11"), ] ) @@ -58,6 +59,11 @@ ("Avg" , "3")] ) +BatchNomalizationMap = OrderedDict( + [("none" , "1"), + ] +) + ActivationFunctionsMap = OrderedDict( [("Threshold" , "1"), ("Sign" , "2"), @@ -154,6 +160,7 @@ def doc_print_dict(d):#define d KEY_LAYERS_FUNCTIONS_SCALER_DOC = "_doc_layer_functions_scaler" KEY_LAYERS_FUNCTIONS_POOLING_DOC = "_doc_layer_functions_pooling" KEY_LAYERS_FUNCTIONS_PROBABILISTIC_DOC = "_doc_layer_functions_probabilistic" +KEY_LAYERS_FUNCTIONS_BOUNDING_DOC = "_doc_layer_functions_bounding" KEY_LOSS_METHOD = "lossMethod" KEY_LOSS_METHOD_DOC = "_doc_lossMethod" KEY_EPOCHS = "epochs" @@ -182,6 +189,7 @@ def doc_print_dict(d):#define d VAL_LAYERS_FUNCTIONS_POOLING_DOC = f"{doc_print_dict(PoolingMethodMap)}" VAL_LAYERS_FUNCTIONS_PROBABILISTIC_DOC = f"{doc_print_dict(ProbabilisticActivationFunctionMap)}" VAL_LAYERS_FUNCTIONS_ACTIVATION_DOC = f"{doc_print_dict(ActivationFunctionsMap)}" +VAL_LAYERS_FUNCTIONS_BOUNDING_DOC = f"{doc_print_dict(BoundingMethodMap)}" VAL_LOSS_METHOD_DOC = f"{doc_print_dict(LossMethodMapping)}" VAL_EPOCHS_DOC = "Positve Integer" VAL_LEARNING_RATE_DOC = "Positve float" diff --git a/src_py/nerlPlanner/WinWorkerDialog.py b/src_py/nerlPlanner/WinWorkerDialog.py index 95b70f47d..2fb418d8a 100644 --- a/src_py/nerlPlanner/WinWorkerDialog.py +++ b/src_py/nerlPlanner/WinWorkerDialog.py @@ -169,7 +169,8 @@ def ui_update_all_values(WorkerWindow): FlattenDictStr = f'Flatten:\n{pretty_print_dict(FlattenMethodMap)}' BoundingDictStr = f'Bounding:\n{pretty_print_dict(BoundingMethodMap)}' ProbabilisticDictStr = f'Probabilistic:\n{pretty_print_dict(ProbabilisticActivationFunctionMap)}' - sg.popup_ok(f"Layer Functions Codes:\n{ActivationDictStr}\n{PoolingDictStr}\n{ScalerDictStr}\n{FlattenDictStr}\n{BoundingDictStr}\n{ProbabilisticDictStr}", keep_on_top=True, title="Layer Type Codes") + BatchNormalizationDictStr = f'Batch Normalization:\n{pretty_print_dict(BatchNomalizationMap)}' + sg.popup_ok(f"Layer Functions Codes:\n{ActivationDictStr}\n{PoolingDictStr}\n{ScalerDictStr}\n{FlattenDictStr}\n{BoundingDictStr}\n{ProbabilisticDictStr}\n{BatchNormalizationDictStr}", keep_on_top=True, title="Layer Type Codes") if event == KEY_LEARNING_RATE_INPUT: LearningRate = values[event] @@ -269,9 +270,15 @@ def LayerMethodSelection(): sg.Listbox(list(ScalingMethodMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_SCALER)], [ sg.Text("Bounding",expand_x=True), sg.Text('Flatten', expand_x=True), sg.Text('Probabilistic', expand_x=True)], [ - sg.Listbox(list(BoundingMethodMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BOUNDING), - sg.Listbox(list(FlattenMethodMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_FLATTEN), - sg.Listbox(list(ProbabilisticActivationFunctionMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_PROBABILISTIC) + sg.Listbox(list(BoundingMethodMap.keys()),size=(20,5), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BOUNDING), + sg.Listbox(list(FlattenMethodMap.keys()),size=(20,5), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_FLATTEN), + sg.Listbox(list(ProbabilisticActivationFunctionMap.keys()),size=(20,5), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_PROBABILISTIC) + ], + [ + sg.Text("BatchNorm",expand_x=True), sg.Text(' ', expand_x=True), sg.Text(' ', expand_x=True) + ], + [ + sg.Listbox(list(BatchNomalizationMap.keys()),size=(20,5), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BATCH_NORMALIZATION), ], [sg.Text('Selection', expand_x=True, enable_events=True, key=KEY_LAYER_METHOD_SELECTION_TEXT),sg.Button('Select', expand_x=True, key=KEY_LAYER_METHOD_SELECTION_BUTTON)]] @@ -311,6 +318,11 @@ def LayerMethodSelection(): global_layer_method_selection_code = FlattenMethodMap[layer_method_selection] layer_selection_win[KEY_LAYER_METHOD_SELECTION_TEXT].update(f'Selected {layer_method_selection} code: {global_layer_method_selection_code}') + if event == KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BATCH_NORMALIZATION: + layer_method_selection = values[KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BATCH_NORMALIZATION][0] + global_layer_method_selection_code = BatchNomalizationMap[layer_method_selection] + layer_selection_win[KEY_LAYER_METHOD_SELECTION_TEXT].update(f'Selected {layer_method_selection} code: {global_layer_method_selection_code}') + if event == KEY_LAYER_METHOD_SELECTION_BUTTON: break diff --git a/src_py/nerlPlanner/WinWorkerDialogDefnitions.py b/src_py/nerlPlanner/WinWorkerDialogDefnitions.py index cabef3a6e..9ed41c320 100644 --- a/src_py/nerlPlanner/WinWorkerDialogDefnitions.py +++ b/src_py/nerlPlanner/WinWorkerDialogDefnitions.py @@ -55,6 +55,7 @@ KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_SCALER = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-SCALER-' KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_FLATTEN = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-FLATTEN-' KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BOUNDING = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-BOUNDING-' +KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BATCH_NORMALIZATION = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-BATCH-NORMALIZATION-' KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_PROBABILISTIC = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-PROBABILISTIC-' KEY_LAYER_METHOD_SELECTION_TEXT = '-LAYER-METHOD-SELECTION-TEXT-' KEY_LAYER_METHOD_SELECTION_BUTTON = '-LAYER-METHOD-SELECTION-BUTTON-'