Skip to content

Commit

Permalink
Merge pull request #286 from leondavi/nerlworker
Browse files Browse the repository at this point in the history
Nerlworker
  • Loading branch information
leondavi committed Mar 13, 2024
2 parents f115621 + 37b9f69 commit 5b9b498
Show file tree
Hide file tree
Showing 33 changed files with 965 additions and 161 deletions.
4 changes: 4 additions & 0 deletions NerlnetBuild.sh
Original file line number Diff line number Diff line change
Expand Up @@ -166,13 +166,17 @@ if command -v python3 >/dev/null 2>&1; then
AUTOGENERATED_DC_DEFINITIONS_PATH_HRL="`pwd`/src_erl/NerlnetApp/src/dc_definitions_ag.hrl"
AUTOGENERATED_SOURCE_DEFINITIONS_PATH_HRL="`pwd`/src_erl/NerlnetApp/src/source_definitions_ag.hrl"
AUTOGENERATED_ROUTER_DEFINITIONS_PATH_HRL="`pwd`/src_erl/NerlnetApp/src/router_definitions_ag.hrl"
AUTOGENERATED_LAYERS_TYPE_INDEX_DEFINITIONS_PATH_HRL="`pwd`/src_erl/NerlnetApp/src/Bridge/layers_types_ag.hrl"
AUTOGENERATED_MODELS_TYPES_INDEX_DEFINITIONS_PATH_HRL="`pwd`/src_erl/NerlnetApp/src/Bridge/models_types_ag.hrl"

echo "$NERLNET_BUILD_PREFIX Generate auto-generated files"
python3 src_py/nerlPlanner/CppHeadersExporter.py --output $AUTOGENERATED_WORKER_DEFINITIONS_PATH #--debug
python3 src_py/nerlPlanner/ErlHeadersExporter.py --gen_worker_fields_hrl --output $AUTOGENERATED_WORKER_DEFINITIONS_PATH_HRL #--debug
python3 src_py/nerlPlanner/ErlHeadersExporter.py --gen_dc_fields_hrl --output $AUTOGENERATED_DC_DEFINITIONS_PATH_HRL #--debug
python3 src_py/nerlPlanner/ErlHeadersExporter.py --gen_source_fields_hrl --output $AUTOGENERATED_SOURCE_DEFINITIONS_PATH_HRL #--debug
python3 src_py/nerlPlanner/ErlHeadersExporter.py --gen_router_fields_hrl --output $AUTOGENERATED_ROUTER_DEFINITIONS_PATH_HRL #--debug
python3 src_py/nerlPlanner/ErlHeadersExporter.py --gen_layers_type_hrl --output $AUTOGENERATED_LAYERS_TYPE_INDEX_DEFINITIONS_PATH_HRL #--debug
python3 src_py/nerlPlanner/ErlHeadersExporter.py --gen_models_types_hrl --output $AUTOGENERATED_MODELS_TYPES_INDEX_DEFINITIONS_PATH_HRL #--debug
set +e
else
echo "$NERLNET_BUILD_PREFIX Python 3 is not installed"
Expand Down
2 changes: 2 additions & 0 deletions src_cpp/common/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
project(common)

set(NIFPP_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../nifpp/")
set(SIMPLE_LOGGER_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../simple-cpp-logger/include")

set(SRC_CODE
"common_definitions.h"
Expand All @@ -19,4 +20,5 @@ add_library(common SHARED ${SRC_CODE})

target_include_directories(common PUBLIC .
${NIFPP_PATH}
${SIMPLE_LOGGER_PATH}
)
2 changes: 1 addition & 1 deletion src_cpp/common/common_definitions.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ namespace nerlnet
#define DIM_X_IDX 0
#define DIM_Y_IDX 1
#define DIM_Z_IDX 2

#define DIM_W_IDX 3
#define NERLNIF_ATOM_STR "nerlnif"
#define NERLNIF_NAN_ATOM_STR "nan"

Expand Down
22 changes: 20 additions & 2 deletions src_cpp/common/nerlLayer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,33 @@ NerlLayer::~NerlLayer()
{
}


// ----- CNN Layer -----
NerlLayerCNN::NerlLayerCNN(int layer_type, std::vector<int> &layers_dims, int layer_functionality,
std::vector<int> kernel_size, std::vector<int> &stride_dims, std::vector<int> padding_size) :
std::vector<int> kernel_size, std::vector<int> &stride_dims, std::vector<int> padding_size, std::vector<int> type_conv) :
NerlLayer(layer_type, layers_dims, layer_functionality)
{
_kernel_size = kernel_size;
_stride_dims = stride_dims;
_padding_size = padding_size;
_type_conv = type_conv;
}

NerlLayerCNN::~NerlLayerCNN()
{
}

NerlLayerPooling::NerlLayerPooling(int layer_type, std::vector<int> &layers_dims, int layer_functionality,
std::vector<int> &pooling_dims, std::vector<int> &stride_dims,std::vector<int> &padding_dims) :
NerlLayer(layer_type, layers_dims, layer_functionality)
{

_pooling_dims = pooling_dims;
_stride_dims = stride_dims;
_padding_dims = padding_dims;
};


NerlLayerPooling::~NerlLayerPooling()
{
}
}
24 changes: 16 additions & 8 deletions src_cpp/common/nerlLayer.h
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ class NerlLayer
{
public:
NerlLayer(int layer_type, std::vector<int> &layers_dims, int layer_functionality);
~NerlLayer();
virtual ~NerlLayer();

std::shared_ptr<NerlLayer> get_next_layer_ptr() {return _next_layer;};
std::shared_ptr<NerlLayer> get_prev_layer_ptr() {return _prev_layer;};
Expand Down Expand Up @@ -44,29 +44,37 @@ class NerlLayerPooling : public NerlLayer
{
public:

NerlLayerPooling(int layer_type, std::vector<int> &layers_dims, int layer_functionality, std::vector<int> &pooling_dims);
NerlLayerPooling(int layer_type, std::vector<int> &layers_dims, int layer_functionality, std::vector<int> &pooling_dims, std::vector<int> &stride_dims,std::vector<int> &padding_dims);
~NerlLayerPooling();

void get_pooling_dims(std::vector<int> &pooling_dims) {pooling_dims = this->pooling_dims;};
int get_dim_pooling_size(int dim_idx) {return _pooling_dims[dim_idx];}; // index 0 is the first dim
int get_stride(int dim_idx) {return _stride_dims[dim_idx];}; // index 0 is the first dim
int get_padding_size(int dim_idx) {return _padding_dims[dim_idx];}; // index 0 is the first dim

private:
std::vector<int> pooling_dims; //TODO


std::vector<int> _pooling_dims;
std::vector<int> _stride_dims;
std::vector<int> _padding_dims;
};

class NerlLayerCNN : public NerlLayer
{
public:

NerlLayerCNN(int layer_type, std::vector<int> &layers_dims, int layer_functionality,
std::vector<int> kernel_size, std::vector<int> &stride_dims, std::vector<int> padding_size);
~NerlLayerCNN();
std::vector<int> kernel_size, std::vector<int> &stride_dims, std::vector<int> padding_size,std::vector<int> type);
virtual ~NerlLayerCNN();

int get_dim_kernel_size(int dim_idx) {return _kernel_size[dim_idx];}; // index 0 is the first dim
const int get_stride(int dim_idx) {return _stride_dims[dim_idx];}; // index 0 is the first dim
const int get_type_conv() {return _type_conv[0];}; // index 0 is the first dim
int get_padding_size(int dim_idx) {return _padding_size[dim_idx];}; // index 0 is the first dim

private:
std::vector<int> _kernel_size;
std::vector<int> _stride_dims;
std::vector<int> _padding_size;
std::vector<int> _type_conv;
};

} // namespace nerlnet
26 changes: 19 additions & 7 deletions src_cpp/common/nerlWorker.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ NerlWorker::NerlWorker(int model_type, std::string &layer_sizes_str, std::string
_distributed_system_type = distributed_system_type;
_distributed_system_args_str = distributed_system_args_str;
_nerl_layers_linked_list = parse_layers_input(layer_sizes_str,layer_types_list,layers_functionality);
// std::cout << "NerlWorker created" << std::endl;
}

NerlWorker::~NerlWorker()
Expand Down Expand Up @@ -43,27 +44,40 @@ std::shared_ptr<NerlLayer> NerlWorker::parse_layers_input(std::string &layer_siz
std::vector<LayerSizingParams_t> layer_sizes_params;

parse_layer_sizes_str(layer_sizes_str, layer_types_vec, layer_sizes_params);

std::vector<std::shared_ptr<NerlLayer>> nerl_layers_vec;
nerl_layers_vec.resize(layer_sizes_params.size());
for (int i = 0; i < layer_sizes_params.size(); i++)
{
int layer_type = std::stoi(layer_types_strs_vec[i]);
// TODO Ori and Nadav add CNN extension
int layer_size = layer_sizes_params[i].dimx;
int layer_functionality = std::stoi(layers_functionality_strs_vec[i]);

std::vector<int> layer_dims = {layer_size}; //TODO
std::vector<int> layer_dims = {layer_sizes_params[i].dimx,
layer_sizes_params[i].dimy,layer_sizes_params[i].dimz};

switch(layer_type)
{
case LAYER_TYPE_POOLING:
{
break; //TODO Ori and Nadav add pooling layer
LayerSizingParams_t params = layer_sizes_params[i];
std::vector<int>pooling_dims = params.get_ext_params(params.KERNEL_SIZE);
std::vector<int>stride_dims = params.get_ext_params(params.STRIDE_SIZE);
std::vector<int>padding_dims = params.get_ext_params(params.PADDING_SIZE);
nerl_layers_vec[i] = std::make_shared<NerlLayerPooling>(layer_type,layer_dims,layer_functionality,
pooling_dims, stride_dims,padding_dims);
break;
}
case LAYER_TYPE_CNN:
{
break; //TODO Ori and Nadav add CNN layer
LayerSizingParams_t params = layer_sizes_params[i];
std::vector<int>kernel_dims = params.get_ext_params(params.KERNEL_SIZE);
std::vector<int>stride_dims = params.get_ext_params(params.STRIDE_SIZE);
std::vector<int>padding_dims = params.get_ext_params(params.PADDING_SIZE);
std::vector<int>type_conv = params.get_ext_params(params.IS_VALID);
// std::cout << "type_conv 0: " << type_conv[0] << std::endl;
// std::cout << "type_conv 1: " << type_conv[1] << std::endl;
nerl_layers_vec[i] = std::make_shared<NerlLayerCNN>(layer_type, layer_dims, layer_functionality, kernel_dims, stride_dims, padding_dims,type_conv);
break;
}
default:
{
Expand All @@ -72,13 +86,11 @@ std::shared_ptr<NerlLayer> NerlWorker::parse_layers_input(std::string &layer_siz
}
}
}

for (size_t i = 1; i < nerl_layers_vec.size(); i++)
{
nerl_layers_vec[i-1]->set_next_layer(nerl_layers_vec[i]);
nerl_layers_vec[i]->set_prev_layer(nerl_layers_vec[i-1]);
}

return nerl_layers_vec.front();
}

Expand Down
127 changes: 95 additions & 32 deletions src_cpp/common/nerlWorkerFunc.h
Original file line number Diff line number Diff line change
@@ -1,32 +1,42 @@
#pragma once

#include <memory>


#include <unordered_map>
#include <Logger.h>
#include "utilities.h"
#include "worker_definitions_ag.h"

//TODO:i probably need to move NERLPLANNER_INPUT to utilities.h
#define NERLPLANNER_INPUT_KERNEL_CHAR 'k'
#define NERLPLANNER_INPUT_STRIDE_CHAR 's'
#define NERLPLANNER_SIZE_DIMENSION_SEP "x"
#define NERLPLANNER_INPUT_PADDING_CHAR 'p'



#define SIMPLE_PARSING -1
#define COMPLEX_PARSING -2


namespace nerlnet
{

typedef struct LayerSizingParams
{
enum {KERNEL_SIZE = -1, PADDING_SIZE = -2, STRIDE_SIZE = -3, POOLING_SIZE= -4};
enum {KERNEL_SIZE = -1, PADDING_SIZE = -2,STRIDE_SIZE = -3 ,POOLING_SIZE= -4 , IS_VALID = -5};
int dimx = 1;
int dimy = 1;
int dimz = 1;
std::vector<int> _ext_params;
int dimz = 1;
std::vector<int> _ext_params;

std::vector<int> get_ext_params(int param_type) {
int get_maxdim() { return (dimz > 1 ? 3 : dimy > 1 ? 2 : 1);} // return the maximum dimension of the param;

std::vector<int> get_ext_params(int param_type) {
std::vector<int> res;
int i = 0;
int param_extracted = false;
int param_start = false;
while (!param_extracted){
while (!param_extracted && i < _ext_params.size()){
if(param_start){
param_extracted = _ext_params[i]<0;
if(!param_extracted){
Expand All @@ -37,7 +47,11 @@ typedef struct LayerSizingParams
param_start = true;
}
i++;
}
}
if(!param_extracted){
res.push_back(0);
res.push_back(0);
}
return res;
}
} LayerSizingParams_t;
Expand All @@ -59,34 +73,85 @@ std::shared_ptr<NerlWorkerType> parse_model_params(std::string &model_type_str,s

static void parse_layer_sizes_str(std::string &layer_sizes_str, std::vector<int> &layers_types_vec, std::vector<LayerSizingParams_t> &out_layer_sizes_params)
{

std::vector<std::string> layer_sizes_strs_vec = nerlnet_utilities::split_strings_by_comma(layer_sizes_str);
out_layer_sizes_params.resize(layer_sizes_strs_vec.size());
out_layer_sizes_params.resize(layer_sizes_strs_vec.size());
assert(layer_sizes_strs_vec.size() == out_layer_sizes_params.size());
for (size_t i = 0; i < layer_sizes_strs_vec.size(); i++) //TODO
{
switch (layers_types_vec[i]) //TODO Ori and Nadav change to switch case only between simple and complex (if there is chars in type)
{
case LAYER_TYPE_PERCEPTRON:
case LAYER_TYPE_DEFAULT:
case LAYER_TYPE_SCALING:
case LAYER_TYPE_UNSCALING:
case LAYER_TYPE_PROBABILISTIC:
case SIMPLE_PARSING:{
out_layer_sizes_params[i].dimx = std::stoi(layer_sizes_strs_vec[i]);
break;
}
case COMPLEX_PARSING:{
//TODO CNN
break;
}
default:
break;
}
int layer_str_type = nerlnet_utilities::is_integer_number(layer_sizes_strs_vec[i]) ? SIMPLE_PARSING : COMPLEX_PARSING;
switch (layer_str_type)
{
case SIMPLE_PARSING:
{
out_layer_sizes_params[i].dimx = std::stoi(layer_sizes_strs_vec[i]);
break;
}
case COMPLEX_PARSING:
{
std::unordered_map<char, std::string> params;
std::regex rgx_dim("[0-9][^kstpx]*");
std::smatch matches; //this matches variable is for the layer size
std::smatch param_match; // this matches variable is for the rest of the string
std::smatch dim_match; // this matches variable is for the dimensions
std::unordered_map<char, int> param_codes = {
{'k', -1},
{'p', -2},
{'s', -3},
{'t',-5}
};
std::string::const_iterator searchStartDim(layer_sizes_strs_vec[i].cbegin());
for (size_t k = 0; k < 3; k++){
std::regex_search(searchStartDim, layer_sizes_strs_vec[i].cend(), dim_match, rgx_dim);
if(k == 0){
out_layer_sizes_params[i].dimx = std::stoi(dim_match[0]);
}else if(k == 1){
out_layer_sizes_params[i].dimy = std::stoi(dim_match[0]);
}else{
out_layer_sizes_params[i].dimz = std::stoi(dim_match[0]);
}
searchStartDim = dim_match.suffix().first;
}
std::regex rgx_rest("[kspt]([0-9]*x?[0-9]*)*"); //search for k, s or p followed by a number and then x and then a number
std::string::const_iterator searchStart(layer_sizes_strs_vec[i].cbegin());
while (std::regex_search(searchStart, layer_sizes_strs_vec[i].cend(), param_match, rgx_rest))
{
char param_char = param_match[0].str()[0]; //the first character of the match
std::string dimensions_str = param_match.str();//the second part of the match (the dimensions)
std::string dimensions_str_sub = dimensions_str.substr(1,-1);
// Convert the parameter and dimensions to the desired format and add them to _ext_params
out_layer_sizes_params[i]._ext_params.push_back(param_codes[param_char]);
std::istringstream dimensions_stream(dimensions_str_sub);
std::string dimension;
while (std::getline(dimensions_stream, dimension, 'x'))
{
// std::cout << "param_char: " << param_char << std::endl;
// std::cout << "dimension: " << dimension << std::endl;
// std::cout << "std::stoi(dimension): " << std::stoi(dimension) << std::endl;
out_layer_sizes_params[i]._ext_params.push_back(std::stoi(dimension));
}
if(dimensions_str_sub.length() == 1 && param_char!='t') out_layer_sizes_params[i]._ext_params.push_back(std::stoi(dimension));
searchStart = param_match.suffix().first;
}
break;
}
default:
LogError("Error parsing layer size string");
break;
}
}
}
} // Closing brace for namespace nerlnet
// Closing brace for namespace nerlnet









// "5x5k2x2p1s1", 5,5,KERNEL_SIZE_IDX,2,2,PADDING_SIZE_IDX,1 |
// "5x5k2x2p1x1s1", 5,5,KERNEL_SIZE_IDX,2,2,PADDING_SIZE_IDX,1,1,STRIDE_SIZE_IDX,1,1
// "5k2p1", 5,KERNEL_SIZE_IDX,2,PADDING_SIZE_IDX,1 |
// "8", 8

Expand All @@ -98,6 +163,4 @@ static void parse_layer_sizes_str(std::string &layer_sizes_str, std::vector<int>
// 2. Represent in a 1D vector and using a second vector for layer start
// 3. Create class

}

}
//
Loading

0 comments on commit 5b9b498

Please sign in to comment.