Skip to content

Commit

Permalink
Merge pull request #192 from leondavi/haranTests
Browse files Browse the repository at this point in the history
Haran tests
  • Loading branch information
leondavi authored Jun 15, 2023
2 parents 3f1d91c + 960ceb1 commit 4f45f8f
Show file tree
Hide file tree
Showing 15 changed files with 74 additions and 47 deletions.
4 changes: 2 additions & 2 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ project(nerlnet DESCRIPTION "Nerlnet Distributed Machine Learning Research Platf

set(PROJECT_BINARY_DIR build)
set(CMAKE_CXX_STANDARD 11)
# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -latomic")
# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -latomic")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -latomic")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -latomic")
set(CMAKE_CXX_FLAGS_RELEASE "-O3")

#add_compile_definitions(EIGEN_MAX_ALIGN_BYTES=8) #Open this line for RASPI
Expand Down
5 changes: 5 additions & 0 deletions NerlnetRun.sh
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,11 @@ if [[ "$buildNerlnetLibrary" -eq 1 ]] ; then
./NerlnetBuild.sh -j $JobsNum
fi

# only for raspberry
is_rasp="$(grep -c raspbian /etc/os-release)"
if [ $is_rasp -gt "0" ]; then
export LD_PRELOAD=/usr/lib/arm-linux-gnueabihf/libatomic.so.1.2.0
fi

cd src_erl/Communication_Layer/http_Nerlserver
echo "$NERLNET_PREFIX Script CWD: $PWD"
Expand Down
Binary file added examples/ExampleOfAPI.pdf
Binary file not shown.
10 changes: 5 additions & 5 deletions inputJsonFiles/Architecture/arch_1PC3WorkerSynthFed.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"NerlNetSettings":
{
"frequency": "5",
"frequency": "100",
"batchSize": "50"
}
,
Expand Down Expand Up @@ -41,7 +41,7 @@
"scalingMethod": "1",
"_commnet_layer_type": "E_LAYER_TYPE_DEFAULT(perceptron) = 0, E_LAYER_TYPE_SCALING = 1, E_LAYER_TYPE_CONVOLUTIONAL = 2 , E_LAYER_TYPE_PERCEPTRON = 3 , E_LAYER_TYPE_POOLING = 4 , E_LAYER_TYPE_PROBABILISTIC = 5 , E_LAYER_TYPE_LSTM = 6 , E_LAYER_TYPE_RECURRENT = 7 , E_LAYER_TYPE_UNSCALING = 8 , E_LAYER_TYPE_BOUNDING = 9 ",
"layerTypesList": "[3,1,1,1,0,0]",
"layersSizes": "[3,1,1,5,2,3]",
"layersSizes": "[3,1,1,5,4,3]",
"_comment2": "Activation functions explain: Threshold = 1, SymmetricThreshold = 2 ,Logistic = 3 ,HyperbolicTangent = 4 ,Linear = 5,RectifiedLinear = 6 ,ExponentialLinear = 7 ,ScaledExponentialLinear = 8 ,SoftPlus = 9 ,SoftSign = 10 ,HardSigmoid = 11",
"layersActivationFunctions": "[3,1,1,6,11,11]",
"federatedMode": "0",
Expand All @@ -64,7 +64,7 @@
"scalingMethod": "1",
"_commnet_layer_type": "E_LAYER_TYPE_DEFAULT(perceptron) = 0, E_LAYER_TYPE_SCALING = 1, E_LAYER_TYPE_CONVOLUTIONAL = 2 , E_LAYER_TYPE_PERCEPTRON = 3 , E_LAYER_TYPE_POOLING = 4 , E_LAYER_TYPE_PROBABILISTIC = 5 , E_LAYER_TYPE_LSTM = 6 , E_LAYER_TYPE_RECURRENT = 7 , E_LAYER_TYPE_UNSCALING = 8 , E_LAYER_TYPE_BOUNDING = 9 ",
"layerTypesList": "[3,1,1,1,0,0]",
"layersSizes": "[3,1,1,5,2,3]",
"layersSizes": "[3,1,1,5,4,3]",
"_comment2": "Activation functions explain: Threshold = 1, SymmetricThreshold = 2 ,Logistic = 3 ,HyperbolicTangent = 4 ,Linear = 5,RectifiedLinear = 6 ,ExponentialLinear = 7 ,ScaledExponentialLinear = 8 ,SoftPlus = 9 ,SoftSign = 10 ,HardSigmoid = 11",
"layersActivationFunctions": "[3,1,1,6,11,11]",
"federatedMode": "0",
Expand All @@ -87,7 +87,7 @@
"scalingMethod": "1",
"_commnet_layer_type": "E_LAYER_TYPE_DEFAULT(perceptron) = 0, E_LAYER_TYPE_SCALING = 1, E_LAYER_TYPE_CONVOLUTIONAL = 2 , E_LAYER_TYPE_PERCEPTRON = 3 , E_LAYER_TYPE_POOLING = 4 , E_LAYER_TYPE_PROBABILISTIC = 5 , E_LAYER_TYPE_LSTM = 6 , E_LAYER_TYPE_RECURRENT = 7 , E_LAYER_TYPE_UNSCALING = 8 , E_LAYER_TYPE_BOUNDING = 9 ",
"layerTypesList": "[3,1,1,1,0,0]",
"layersSizes": "[3,1,1,5,2,3]",
"layersSizes": "[3,1,1,5,4,3]",
"_comment2": "Activation functions explain: Threshold = 1, SymmetricThreshold = 2 ,Logistic = 3 ,HyperbolicTangent = 4 ,Linear = 5,RectifiedLinear = 6 ,ExponentialLinear = 7 ,ScaledExponentialLinear = 8 ,SoftPlus = 9 ,SoftSign = 10 ,HardSigmoid = 11",
"layersActivationFunctions": "[3,1,1,6,11,11]",
"federatedMode": "0",
Expand Down Expand Up @@ -119,7 +119,7 @@
{
"name": "s1",
"port": "8091",
"method": "1",
"method": "2",
"COMMENTS": "method allowed: '1': sends each exapme to all clients listed, '2': round robin between all clients"
}
],
Expand Down
2 changes: 1 addition & 1 deletion inputJsonFiles/Architecture/arch_3PC3WorkerSynthFed.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"NerlNetSettings":
{
"frequency": "5",
"frequency": "50",
"batchSize": "50"
}
,
Expand Down
2 changes: 0 additions & 2 deletions src_cpp/opennnBridge/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@ set(NIFPP_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../nifpp/")
set(OPENNN_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../opennn/opennn/")
set(SIMPLE_LOGGER_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../simple-cpp-logger/include")
set(CMAKE_VERBOSE_MAKEFILE ON)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -latomic")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -latomic")
set(CMAKE_CXX_FLAGS "-fpic")
set(ERL_NIF_DEFAULT_LOCATION "/usr/local/lib/erlang/usr/include")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -361,9 +361,10 @@ code_change(_OldVsn, State = #main_genserver_state{}, _Extra) ->


setClientState(StateAtom,ClientName, NerlnetGraph,MyName) ->
{RouterHost,RouterPort} = nerl_tools:getShortPath(MyName,ClientName,NerlnetGraph),
%{RouterHost,RouterPort} =maps:get(ClientName, ConnectionMap),
nerl_tools:http_request(RouterHost,RouterPort,atom_to_list(StateAtom), atom_to_list(ClientName)).
nerl_tools:sendHTTP(MyName, ClientName, atom_to_list(StateAtom), ClientName).
% {RouterHost,RouterPort} = nerl_tools:getShortPath(MyName,ClientName,NerlnetGraph),
% %{RouterHost,RouterPort} =maps:get(ClientName, ConnectionMap),
% nerl_tools:http_request(RouterHost,RouterPort,atom_to_list(StateAtom), atom_to_list(ClientName)).

%%find Router and send message: finds the path for the named machine from connection map and send to the right router to forword.
%%findroutAndsend([],_,_,WaitingList)->WaitingList;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ start(_StartType, _StartArgs) ->
%% setup the erlang logger for this module
nerl_tools:setup_logger(?MODULE),
%% make sure nif can be loaded:
nerlNIF:init(),
nerlNIF:nif_preload(),
HostName = nerl_tools:getdeviceIP(),
?LOG_INFO(?LOG_HEADER++"This device IP: ~p~n", [HostName]),
%Create a listener that waits for a message from python about the adresses of the wanted json
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,6 @@
% -define(BUILD_TYPE_DEBUG,"debug").
% -define(BUILD_TYPE_RELEASE,"/build/release").

-record(workerGeneric_state, {clientPid, features, labels, myName, modelId, nextState, currentBatchID=0,ackClient=0, missedSamplesCount = 0, missedTrainSamples= [],optimizer, lossMethod, learningRate, customFunc, workerData}).

%% auto generated defintions:
%% TODO: implement this in python
-define(E_CUSTOMNN, 5).
Expand Down
5 changes: 4 additions & 1 deletion src_erl/erlBridge/nerlNIF.erl
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
-include_lib("kernel/include/logger.hrl").
-include("nerlTensor.hrl").

-export([init/0,create_nif/6,train_nif/6,call_to_train/6,predict_nif/3,call_to_predict/6,get_weights_nif/1,printTensor/2]).
-export([init/0,nif_preload/0,create_nif/6,train_nif/6,call_to_train/6,predict_nif/3,call_to_predict/6,get_weights_nif/1,printTensor/2]).
-export([call_to_get_weights/1,call_to_set_weights/2]).
-export([decode_nif/2, nerltensor_binary_decode/2]).
-export([encode_nif/2, nerltensor_encode/5, nerltensor_conversion/2, get_all_binary_types/0, get_all_nerltensor_list_types/0]).
Expand All @@ -22,6 +22,9 @@ init() ->
RES = erlang:load_nif(NELNET_LIB_PATH, 0),
RES.

%% make sure nif can be loaded (activates on_load)
nif_preload() -> done.

% ModelID - Unique ID of the neural network model
% ModelType - E.g. Regression, Classification
create_nif(_ModelID, _ModelType , _ScalingMethod , _LayerTypesList , _LayersSizes , _LayersActivationFunctions) ->
Expand Down
1 change: 1 addition & 0 deletions src_erl/erlBridge/workers/workerDefinitions.hrl
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,6 @@
-define(ETS_KEYVAL_VAL_IDX, 2).
-define(TENSOR_DATA_IDX, 1).

-record(workerGeneric_state, {clientPid, features, labels, myName, modelId, nextState, currentBatchID=0, ackClient=0, missedBatchesCount = 0, missedTrainBatches= [],optimizer, lossMethod, learningRate, customFunc, workerData}).
-record(workerFederatedClient, {syncCount, syncMaxCount, clientPID, myName, serverName}).
-record(workerFederatedServer, {syncCount, syncMaxCount, clientPID, myName, workersNamesList}).
6 changes: 3 additions & 3 deletions src_erl/erlBridge/workers/workerFederatedServer.erl
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,8 @@ update({GenWorkerEts, WorkerData}) ->
ets:insert(ThisEts, {WorkerName, worker, NerlTensorWeights}),

%% check if there are queued messages, and treat them accordingly
Q = ets:lookup_element(GenWorkerEts, message_q, ?ETS_KEYVAL_VAL_IDX),
[ets:insert(ThisEts, {WorkerName, worker, NerlTensorWeights}) || {Action, WorkerName, To, NerlTensorWeights} <- Q, Action == update],
MessageQueue = ets:lookup_element(GenWorkerEts, message_q, ?ETS_KEYVAL_VAL_IDX),
[ets:insert(ThisEts, {WorkerName, worker, NerlTensorWeights}) || {Action, WorkerName, To, NerlTensorWeights} <- MessageQueue, Action == update],

%% check if got all weights of workers
WorkersList = ets:lookup_element(ThisEts, workers, ?ETS_KEYVAL_VAL_IDX),
Expand All @@ -100,7 +100,7 @@ update({GenWorkerEts, WorkerData}) ->
% io:format("AvgWeights = ~p~n",[AvgWeightsNerlTensor]),
ModelID = ets:lookup_element(GenWorkerEts, model_id, ?ETS_KEYVAL_VAL_IDX),
nerlNIF:call_to_set_weights(ModelID, AvgWeightsNerlTensor), %% update self weights to new model
[ets:delete(ThisEts, WorkerName) || WorkerName <- WorkersList ],%% delete old tensors for next aggregation phase
[ets:delete(ThisEts, OldWorkerName) || OldWorkerName <- WorkersList ],%% delete old tensors for next aggregation phase
ClientPID = ets:lookup_element(GenWorkerEts, client_pid, ?ETS_KEYVAL_VAL_IDX),
gen_statem:cast(ClientPID, {custom_worker_message, WorkersList, AvgWeightsNerlTensor}),
false;
Expand Down
27 changes: 17 additions & 10 deletions src_erl/erlBridge/workers/workerGeneric.erl
Original file line number Diff line number Diff line change
Expand Up @@ -199,28 +199,34 @@ wait(cast, {predict}, State) ->
%logger:notice("Waiting, next state - predict"),
{next_state, wait, State#workerGeneric_state{nextState = predict,ackClient=1}};

wait(cast, {sample, _SampleListTrain}, State = #workerGeneric_state{missedSamplesCount = MissedSamplesCount, missedTrainSamples = _MissedTrainSamples}) ->
?LOG_NOTICE(?LOG_HEADER++"Missed in pid: ~p, Missed batches count: ~p\n",[self(), MissedSamplesCount]),
wait(cast, {sample, _SampleListTrain}, State = #workerGeneric_state{missedBatchesCount = MissedCount, missedTrainBatches = _MissedTrainSamples}) ->
NewMissedCount = MissedCount + 1,
?LOG_NOTICE(?LOG_HEADER++"Missed sample in worker ~p count: ~p\n",[ets:lookup_element(get(generic_worker_ets), worker_name, ?ETS_KEYVAL_VAL_IDX), NewMissedCount]),
% Miss = MissedTrainSamples++SampleListTrain,
{next_state, wait, State#workerGeneric_state{missedSamplesCount = MissedSamplesCount+1}};
{next_state, wait, State#workerGeneric_state{missedBatchesCount = NewMissedCount}};

wait(cast, {sample,_CSVname, _BatchID, _SampleListPredict}, State = #workerGeneric_state{missedSamplesCount = MissedSamplesCount, missedTrainSamples = _MissedTrainSamples}) ->
% throw("got sample while calculating"),
?LOG_NOTICE(?LOG_HEADER++"Missed in pid: ~p, Missed batches count: ~p\n",[self(), MissedSamplesCount]),
% ?LOG_NOTICE(?LOG_HEADER++"Missed in pid: ~p, Missed Samples: ~p\n",[self(), SampleListPredict]),
% wait(cast, {sample,_CSVname, _BatchID, _SampleListPredict}, State = #workerGeneric_state{missedBatchesCount = MissedCount, missedTrainBatches = _MissedTrainSamples}) ->
% % throw("got sample while calculating"),
% NewMissedCount = MissedCount + 1,
% ?LOG_NOTICE(?LOG_HEADER++"Missed in pid: ~p, Missed batches count: ~p\n",[self(), NewMissedCount]),
% % ?LOG_NOTICE(?LOG_HEADER++"Missed in pid: ~p, Missed Samples: ~p\n",[self(), SampleListPredict]),

% Miss = MissedTrainSamples++SampleListTrain,
{next_state, wait, State#workerGeneric_state{missedSamplesCount = MissedSamplesCount+1}};
% % Miss = MissedTrainSamples++SampleListTrain,
% {next_state, wait, State#workerGeneric_state{missedBatchesCount = NewMissedCount}};

wait(cast, Data, State) ->
logger:notice("worker in wait cant treat message: ~p\n",[Data]),
logger:notice("worker ~p in wait cant treat message: ~p\n",[ets:lookup_element(get(generic_worker_ets), worker_name, ?ETS_KEYVAL_VAL_IDX), Data]),
OldQ = ets:lookup_element(get(generic_worker_ets), message_q, ?ETS_KEYVAL_VAL_IDX),
ets:insert(get(generic_worker_ets), {message_q, OldQ++[Data]}),
{keep_state, State}.

update(cast, {update, From, NerltensorWeights}, State = #workerGeneric_state{modelId = ModelId, customFunc = CustomFunc, nextState = NextState}) ->
CustomFunc(update, {get(generic_worker_ets), NerltensorWeights}),
{next_state, NextState, State};

update(cast, {idle}, State = #workerGeneric_state{myName = MyName, modelId = ModelId, customFunc = CustomFunc, nextState = NextState}) ->
gen_statem:cast(get(client_pid),{stateChange,MyName}),
{next_state, idle, State#workerGeneric_state{nextState = idle}};

update(cast, Data, State = #workerGeneric_state{modelId = ModelId, customFunc = CustomFunc, nextState = NextState}) ->
% io:format("worker ~p got ~p~n",[ets:lookup_element(get(generic_worker_ets), worker_name, ?ETS_KEYVAL_VAL_IDX), Data]),
Expand All @@ -242,6 +248,7 @@ update(cast, Data, State = #workerGeneric_state{modelId = ModelId, customFunc =
{sample, Tensor} -> {keep_state, State}
end.


%% State train
train(cast, {sample, {<<>>, _Type}}, State ) ->
?LOG_ERROR("Empty sample received"),
Expand Down
36 changes: 26 additions & 10 deletions src_py/apiServer/apiServer.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ def help(self):
==========Setting experiment========
-showJsons(): shows available arch / conn / exp layouts
-printArchParams(Num) print description of selected arch file
-selectJsons(): get input from user for arch / conn / exp selection
-setJsons(arch, conn, exp): set layout in code
-getUserJsons(): returns the selected arch / conn / exp
Expand Down Expand Up @@ -77,6 +78,7 @@ def initialization(self, arch_json: str, conn_map_json, experiment_flow_json):
globe.experiment_flow_global.set_experiment_flow(expData)
globe.components = NetworkComponents(archData)
globe.components.printComponents()
print(f"Connections:\n\t\t{connData['connectionsMap']}")
globe.experiment_flow_global.printExp()

mainServerIP = globe.components.mainServerIp
Expand Down Expand Up @@ -131,6 +133,13 @@ def sendJsonsToDevices(self):
def showJsons(self):
self.json_dir_parser.print_lists()

def printArchParams(self, arch = ""):
if not arch:
print("\n Enter arch file number:", end = ' ')
arch = input()
selectedArch = self.json_dir_parser.arch_list[int(arch)].get_full_path()
NetworkComponents(self.json_dir_parser.json_from_path(selectedArch)).printComponents()

def selectJsons(self):
self.json_dir_parser.select_arch_connmap_experiment()

Expand All @@ -152,6 +161,12 @@ def getSourcesList(self):
def getTransmitter(self):
return self.transmitter

def tic(self):
return time.time()

def toc(self, start):
return time.time() - start

def stopServer(self):
receiver.stop()
return True
Expand Down Expand Up @@ -415,22 +430,23 @@ def accuracy_matrix(self, expNum):
bacc = (tpr + tnr) / 2
inf = tpr + tnr - 1

print(f"\n{worker}, class #{j}:")
print(f"{worker}, class #{j}:")
print(f"Accuracy acquired (TP+TN / Tot): {round(acc*100, 3)}%.")
print(f"Balanced Accuracy (TPR+TNR / 2): {round(bacc*100, 3)}%.")
print(f"Positive Predictive Rate (Precision of P): {round(ppv*100, 3)}%.")
print(f"True Pos Rate (Sensitivity / Hit Rate): {round(tpr*100, 3)}%.")
print(f"True Neg Rate (Selectivity): {round(tnr*100, 3)}%.")
print(f"Informedness (of making decision): {round(inf*100, 3)}%.\n")
f.write(f"\n{worker}, class #{j}:")
f.write(f"Accuracy acquired (TP+TN / Tot): {round(acc*100, 3)}%.")
f.write(f"Balanced Accuracy (TPR+TNR / 2): {round(bacc*100, 3)}%.")
f.write(f"Positive Predictive Rate (Precision of P): {round(ppv*100, 3)}%.")
f.write(f"True Pos Rate (Sensitivity / Hit Rate): {round(tpr*100, 3)}%.")
f.write(f"True Neg Rate (Selectivity): {round(tnr*100, 3)}%.")
f.write(f"Informedness (of making decision): {round(inf*100, 3)}%.\n")
print(f"Informedness (of making decision): {round(inf*100, 3)}%.\n\n")

f.write(f"{worker}, class #{j}:\n")
f.write(f"Accuracy acquired (TP+TN / Tot): {round(acc*100, 3)}%.\n")
f.write(f"Balanced Accuracy (TPR+TNR / 2): {round(bacc*100, 3)}%.\n")
f.write(f"Positive Predictive Rate (Precision of P): {round(ppv*100, 3)}%.\n")
f.write(f"True Pos Rate (Sensitivity / Hit Rate): {round(tpr*100, 3)}%.\n")
f.write(f"True Neg Rate (Selectivity): {round(tnr*100, 3)}%.\n")
f.write(f"Informedness (of making decision): {round(inf*100, 3)}%.\n\n")
f.close()
print(f'\nstats file saved...')

def communication_stats(self):
self.transmitter.statistics()
Expand Down
12 changes: 5 additions & 7 deletions src_py/apiServer/jsonDirParser.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@

import os
import json
import globalVars

JSON_DIRECTORY = '/usr/local/lib/nerlnet-lib/NErlNet/config/jsonDir.nerlconfig'

ARCH_IDX = 0
CONN_MAP_IDX = 1
EXPERIMENT_IDX = 2

PREFIX_ARCH = 'arch_'
PREFIX_CONNECTION_MAP = 'conn_'
PREFIX_EXPERIMENT_FLOW = 'exp_'
Expand Down Expand Up @@ -94,9 +99,6 @@ def set_arch_connmap_experiment(self, arch : int, connection_map : int, experime

def get_user_selection_files(self):
if self.user_selection_tuple:
ARCH_IDX = 0
CONN_MAP_IDX = 1
EXPERIMENT_IDX = 2
try:
selectedArch = self.arch_list[self.user_selection_tuple[ARCH_IDX]].get_full_path()
selectedConn = self.conn_map_list[self.user_selection_tuple[CONN_MAP_IDX]].get_full_path()
Expand All @@ -110,10 +112,6 @@ def get_user_selection_files(self):

def get_user_selection_jsons(self):
if self.user_selection_tuple:
ARCH_IDX = 0
CONN_MAP_IDX = 1
EXPERIMENT_IDX = 2

try:
selectedArch = self.arch_list[self.user_selection_tuple[ARCH_IDX]].get_json()
selectedConn = self.conn_map_list[self.user_selection_tuple[CONN_MAP_IDX]].get_json()
Expand Down

0 comments on commit 4f45f8f

Please sign in to comment.