Skip to content

Commit

Permalink
Clean submodules, delete nnmt, bring onnx2nnv
Browse files Browse the repository at this point in the history
  • Loading branch information
mldiego committed May 18, 2023
1 parent f0aa8b5 commit ce2ded2
Show file tree
Hide file tree
Showing 6 changed files with 256 additions and 9 deletions.
6 changes: 0 additions & 6 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -1,12 +1,6 @@
[submodule "code/nnv/engine/hyst"]
path = code/nnv/engine/hyst
url = https://github.com/verivital/hyst
[submodule "code/nnv/engine/onnx2nnv"]
path = code/nnv/engine/onnx2nnv
url = https://github.com/verivital/onnx2nnv.git
[submodule "code/nnv/engine/nnmt"]
path = code/nnv/engine/nnmt
url = https://github.com/verivital/nnvmt.git
[submodule "code/nnv/engine/cora"]
path = code/nnv/engine/cora
url = https://github.com/verivital/CORA.git
1 change: 0 additions & 1 deletion code/nnv/engine/nnmt
Submodule nnmt deleted from a9918b
1 change: 0 additions & 1 deletion code/nnv/engine/onnx2nnv
Submodule onnx2nnv deleted from 1a0eb5
2 changes: 1 addition & 1 deletion code/nnv/engine/utils/export2vnnlib.m
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ function export2vnnlib(lb, ub, outsize, property, name)
fprintf(fID,"))");
else
constraint = halfspaceConstraint2inequality_1(property.G, property.g);
fprintf(fID,"(assert "+constraint+")");
fprintf(fID,"(assert "+constraint);
end

% close and save file
Expand Down
153 changes: 153 additions & 0 deletions code/nnv/engine/utils/matlab2nnv.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
function net = matlab2nnv(Mnetwork)

%% Check for correct inputs and process network
ntype = class(Mnetwork); % input type
if ~contains(ntype, ["SeriesNetwork", "LayerGraph", "DAGNetwork", "dlnetwork"])
error('Wrong input type. Input must be a SeriesNetwork, LayerGraph, DAGNetwork, or dlnetwork');
end

%% Process input types
% Input is a MATLAB type neural network (layergraph, seriesNetwork, dlnetwork or dagnetwork)
if ntype== "SeriesNetwork"
conns = layerGraph(Mnetwork).Connections; % get the table of connections
else
conns = Mnetwork.Connections; % get the table of connections
end

Layers = Mnetwork.Layers; % get the list of layers

%% Transform to NNV

n = length(Layers);
nnvLayers = cell(n,1);
names = strings(n,1);

% Parse network layer-by-layer
for i=1:n
L = Layers(i);
% fprintf('\nParsing Layer %d... \n', i);
% Layers with no effect on the reachability analysis
if isa(L, 'nnet.cnn.layer.DropoutLayer') || isa(L, 'nnet.cnn.layer.SoftmaxLayer') || isa(L, 'nnet.cnn.layer.ClassificationOutputLayer') ...
|| isa(L,"nnet.onnx.layer.VerifyBatchSizeLayer") || isa(L, "nnet.cnn.layer.RegressionOutputLayer")
Li = PlaceholderLayer.parse(L);

% Image Input Layer
elseif isa(L, 'nnet.cnn.layer.ImageInputLayer')
Li = ImageInputLayer.parse(L);

% Convolutional 2D layer
elseif isa(L, 'nnet.cnn.layer.Convolution2DLayer')
Li = Conv2DLayer.parse(L);

% ReLU Layer (also referred to as poslin)
elseif isa(L, 'nnet.cnn.layer.ReLULayer')
Li = ReluLayer.parse(L);

% Batch Normalization Layer
elseif isa(L, 'nnet.cnn.layer.BatchNormalizationLayer')
Li = BatchNormalizationLayer.parse(L);

% Max Pooling 2D Layer
elseif isa(L, 'nnet.cnn.layer.MaxPooling2DLayer')
Li = MaxPooling2DLayer.parse(L);

% Average Pooling 2D Layer
elseif isa(L, 'nnet.cnn.layer.AveragePooling2DLayer')
Li = AveragePooling2DLayer.parse(L);

% Fully Connected Layer
elseif isa(L, 'nnet.cnn.layer.FullyConnectedLayer')
Li = FullyConnectedLayer.parse(L);

% Pixel Classification Layer (used for Semantic Segmentation output)
elseif isa(L, 'nnet.cnn.layer.PixelClassificationLayer')
Li = PixelClassificationLayer.parse(L);

% Flatten Layer
elseif isa(L, 'nnet.keras.layer.FlattenCStyleLayer') || isa(L, 'nnet.cnn.layer.FlattenLayer') || isa(L, 'nnet.onnx.layer.FlattenLayer') ...
|| isa(L, 'nnet.onnx.layer.FlattenInto2dLayer')
Li = FlattenLayer.parse(L);

% Sigmoid Layer (also referred to as logsig)
elseif isa(L, 'nnet.keras.layer.SigmoidLayer') || isa(L, 'nnet.onnx.layer.SigmoidLayer')
Li = SigmoidLayer.parse(L);

% ElementWise Affine Layer (often used as a bias layer after FC layers)
elseif isa(L, 'nnet.onnx.layer.ElementwiseAffineLayer')
Li = ElementwiseAffineLayer.parse(L);

% Feature input layer
elseif isa(L, 'nnet.cnn.layer.FeatureInputLayer')
Li = FeatureInputLayer.parse(L);

% Transposed Convolution 2D Layer
elseif isa(L, 'nnet.cnn.layer.TransposedConvolution2DLayer')
Li = TransposedConv2DLayer.parse(L);

% Max Unpooling 2D Layer
elseif isa(L, 'nnet.cnn.layer.MaxUnpooling2DLayer')
Li = MaxUnpooling2DLayer.parse(L, conns);
pairedMaxPoolingName = NN.getPairedMaxPoolingName(connects, Li.Name);
Li.setPairedMaxPoolingName(pairedMaxPoolingName);

% Depth Concatenation Layer (common in uNets)
elseif isa(L, 'nnet.cnn.layer.DepthConcatenationLayer')
Li = DepthConcatenationLayer.parse(L);

% Concatenation Layer (concat dim part of layer properties)
elseif isa(L, 'nnet.cnn.layer.ConcatenationLayer')
Li = ConcatenationLayer.parse(L);

% Reshape Layer (custom created after parsing ONNX layers)
elseif contains(class(L), "ReshapeLayer")
Li = ReshapeLayer.parse(L);

% Custom flatten layers (avoid if possible)
elseif contains(class(L), ["flatten"; "Flatten"])
% Check previous layer to see if we can neglect this one in the analysis
for k=i-1:-1:1
if contains(class(nnvLayers{k}), 'Input')
if ~strcmp(nnvLayers{k}.Normalization, 'none')
fprintf('Layer %d is a %s which have not supported yet in nnv, please consider removing this layer for the analysis \n', i, class(L));
error('Unsupported Class of Layer');
end
elseif ~isa(nnvLayers{k}, 'PlaceholderLayer')
fprintf('Layer %d is a %s which have not supported yet in nnv, please consider removing this layer for the analysis \n', i, class(L));
error('Unsupported Class of Layer');
end
end
% If we can neglect all previous layers, reinitialize layers and parse them again as placeholder layers
nnvLayers = cell(n,1);
% Parse all previous layers again
for li = 1:i-1
L = Layers(li);
Li = PlaceholderLayer.parse(L);
nnvLayers{li} = Li;
end
% Parse current flatten layer
L = Layers(i);
Li = PlaceholderLayer.parse(L);

% All other layers are currently not supported in NNV
else
fprintf('Layer %d is a %s which have not supported yet in nnv, please consider removing this layer for the analysis \n', i, class(L));
error('Unsupported Class of Layer');
end

% Add layer name to list
names(i) = string(L.Name);
nnvLayers{i} = Li;
end
indxs = 1:n;

% Assigning layer names to correspnding index
name2number = containers.Map(names,indxs);

% ConnectionsTable = table(new_sources, new_dests, 'VariableNames', {'Source', 'Destination'});

% Create neural network
net = NN(nnvLayers, conns);
net.name2indx = name2number;

end

102 changes: 102 additions & 0 deletions code/nnv/engine/utils/onnx2nnv.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
function nn = onnx2nnv(onnxFile, loadOptions)
% nn = onnx2nnv(onnxFile, loadOptions)
% @nn: NNV's neural network
% @onnxFile: neural network to import in ONNX format
% @loadOptions (optional): see importONNXLayers.m for reference on optional arguments

% Import ONNX neural networks into NNV
% Output : NN -> NNV neural network class

%% Step 1. Load the network into MATLAB

switch nargin
% Only onnx network as input
case 1
try
net = importONNXNetwork(onnxFile, 'GenerateCustomLayers', false);
catch
warning('Using default options. Could not load the neural network with no custom layers');
% If error when no custom layer generation options, try setting
% input and output type, if still error, just load the layers
try
net = importONNXNetwork(onnxFile, 'GenerateCustomLayers', false, 'InputDataFormat', 'BSSC', 'OutputDataFormat', 'BC');
catch
try
net = importONNXLayers(onnxFile, 'InputDataFormat', 'BSSC', 'OutputDataFormat', 'BC', 'FoldConstants', "deep");
catch
try
net = importONNXLayers(onnxFile, 'OutputDataFormat', 'BC', 'FoldConstants', "deep");
catch
net = importONNXLayers(onnxFile);
end
end
end
end
% Onnx network + loading options as inputs (Parsing inputs)
case 2
if ~isstruct(loadOptions)
error('Wrong input type for input 2. loadOptions must be a struct.')
end
if isfield(loadOptions,'InputDataFormat')
InputDataFormat = loadOptions.InputDataFormat;
else
InputDataFormat = []; % automatically detected by impotONNXLayers
end
if isfield(loadOptions, 'OutputDataFormat')
OutputDataFormat = loadOptions.OutputDataFormat;
else
OutputDataFormat = []; % automatically detected by impotONNXLayers
end
if isfield(loadOptions, 'TargetNetwork')
targetNetwork = loadOptions.TargetNetwork;
else
targetNetwork = 'dagnetwork'; % default
end
if isfield(loadOptions, 'GenerateCustomLayers')
GenerateCustomLayers = loadOptions.GenerateCustomLayers;
else
GenerateCustomLayers = true;
end
if isfield(loadOptions, 'FoldConstants')
foldConstants = loadOptions.FoldConstants;
else
foldConstants = 'deep';
end
% Inputs has been parsed, now try loading the network
try
net = importONNXLayers(onnxFile, 'GenerateCustomLayers', GenerateCustomLayers, 'FoldConstants', foldConstants, ...
'TargetNetwork', targetNetwork, 'InputDataFormats', InputDataFormat, 'OutputDataFormats', OutputDataFormat);
catch
warning('Could not load the neural network with defined loading options. Trying default options for NNV.');
try
net = importONNXLayers(onnxFile, 'GenerateCustomLayers', false, 'InputDataFormat', 'BSSC', 'OutputDataFormat', 'BC');
catch
net = importONNXLayers(onnxFile, FoldConstants="deep");
end
end
end

% This function may not be perfect yet, we will define everything as a NN,
% return a succesful or unseccusful note, may even work with just the list
% of layers and connections. If fullyconnected (layer 1 -> layer 2, layer
% 2 -> layer 3, ...)

% The main function to be called will be importONNXLayers(...)
% There are different arguments to be passed in the onnx importers, please
% see importONNXNetwork and importONNXLayers for more info.

%% Step 2. Convert network into NNV format
nn = matlab2nnv(net); % Can use this separate function and add it to utils, or use a NN.parse function

%% Other notes
% Something more complicated but that may be more useful in the long term
% is directly using the function
% nnet.internal.cnn.onnx.ModelProto(Filename);
% found it inside nnet.internal.cnn.onnx.importONNXLayers.
% This returns the "raw" onnx model into matlab, then we can create the
% model ourselves.
% Cons: will be harder to debug, to understand. It will take longer to develop
% Pros: do not rely on MATLAB to add support to some of these operations, should be more robust

end

0 comments on commit ce2ded2

Please sign in to comment.