Skip to content

Commit

Permalink
Fix Constant padding layer input ending up in DPU runtime (#38)
Browse files Browse the repository at this point in the history
  • Loading branch information
jornt-xilinx authored Apr 27, 2021
1 parent e1a04b9 commit f69c345
Show file tree
Hide file tree
Showing 10 changed files with 85 additions and 61 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def xgraph_dpu_build_func(xgraph, work_dir=os.getcwd(), data_layout='NCHW', **kw
def xgraph_dpu_optimizer(xgraph, target=None, **kwargs):
# Annoate and merge patterns (e.g. mul + max = leaky relu)
XGraphPatternAnnotator()(xgraph)
xgraph = XGraphPatternMutator(xgraph)()
xgraph = XGraphPatternMutator()(xgraph)

layout_transform_pass = \
XGraphLayoutTransformationPass('NHWC', target=target)
Expand Down
2 changes: 1 addition & 1 deletion python/pyxir/contrib/target/components/DPUCZDX8G/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def xgraph_dpu_op_support_annotator(xgraph: XGraph, target: Target, **kwargs) ->

def xgraph_dpu_optimizer(xgraph, target=None, **kwargs):
XGraphPatternAnnotator()(xgraph)
xgraph = XGraphPatternMutator(xgraph)()
xgraph = XGraphPatternMutator()(xgraph)

layout_transform_pass = XGraphLayoutTransformationPass('NHWC', target=target)
dpu_xgraph = layout_transform_pass.execute(xgraph, subgraphs_only=False)
Expand Down
55 changes: 29 additions & 26 deletions python/pyxir/frontend/tvm/relay_tools/relay_2_xgraph_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,21 +20,20 @@
from pyxir import graph
from pyxir.shared import fancy_logging
from pyxir.graph.layer import xlayer_factory
from pyxir.graph.optimization.optimizers.transposes_optimizer\
import XGraphTransposesOptimizer
from pyxir.graph.transformers.layout_transformation_pass \
import XGraphLayoutTransformationPass
from pyxir.graph.transformers.remove_unused_ops import RemoveUnusedOps
from pyxir.graph.optimization.optimizers.transposes_optimizer import (
XGraphTransposesOptimizer,
)
from pyxir.graph.transformers.layout_transformation_pass import (
XGraphLayoutTransformationPass,
)

from .util import Schedule
from ..base import BaseConverter
from .relay_2_xlayer_registry import Relay2XLayerRegistry

fancy_logger = fancy_logging.getLogger("pyxir")
logger = logging.getLogger('pyxir')




logger = logging.getLogger("pyxir")


class Relay2XGraphConverter(BaseConverter):
Expand All @@ -43,12 +42,9 @@ class Relay2XGraphConverter(BaseConverter):

RELAY_2_XLAYER = Relay2XLayerRegistry()

def from_relay_to_xgraph(self,
sym,
params,
output_op=None,
postprocessing=None,
cvx_preprocessing=None):
def from_relay_to_xgraph(
self, sym, params, output_op=None, postprocessing=None, cvx_preprocessing=None
):
# type: (tvm.relay.expr.Expr, dict, str, str, list, dict) -> XGraph
"""
Transform a TVM Relay expression to a xfDNN graph and schedule
Expand Down Expand Up @@ -91,14 +87,18 @@ def from_relay_to_xgraph(self,
# CONVERT RELAY EXPRESSION TO XLAYER GRAPH
# This starts a rescursive expression to graph conversion function
X = Relay2XGraphConverter.RELAY_2_XLAYER[sym.__class__.__name__](
sym, params, schedule, net, {},
sym,
params,
schedule,
net,
{},
Relay2XGraphConverter.RELAY_2_XLAYER,
cvx_prep=cvx_preprocessing)
cvx_prep=cvx_preprocessing,
)

# For now only softmax layers can be added to a graph output
OP_2_XLAYER = {
'Softmax': xlayer_factory.get_xop_factory_func('Softmax',
internal=True)
"Softmax": xlayer_factory.get_xop_factory_func("Softmax", internal=True)
}

# Add additional output layers to the network that are not specified
Expand All @@ -114,27 +114,30 @@ def from_relay_to_xgraph(self,
X = OP_2_XLAYER[output](op_name, [X])

if X.name in net:
raise ValueError("This should never happen. Error because the"
" generated output name already exists in the"
" network dictionary used for setup.")
raise ValueError(
"This should never happen. Error because the"
" generated output name already exists in the"
" network dictionary used for setup."
)

schedule.append(X.name)
net[X.name] = X

# Possibly replace Input layers with CvxInput layers
xlayers = [net[op_id] for op_id in schedule]
xgraph = self.xgraph_factory.build_from_xlayer(
net=xlayers,
name='relay_xgraph',
blobs=False
net=xlayers, name="relay_xgraph", blobs=False
)

# TODO remove this layout transformer
layout_transform_pass = XGraphLayoutTransformationPass('NCHW')
layout_transform_pass = XGraphLayoutTransformationPass("NCHW")
xgraph = layout_transform_pass.execute(xgraph, subgraphs_only=False)

# Merge transpose layers
t_optimizer = XGraphTransposesOptimizer(xgraph)
t_optimizer.optimize()

# Remove unused ops
xgraph = RemoveUnusedOps()(xgraph)

return xgraph
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,8 @@ def __base_relay_2_xlayer(expr, params, schedule, net, op_idx,

# !Important: set input layer tops
for iX in iXs:
iX.tops.append(op_name)
if iX.name in X.bottoms:
iX.tops.append(op_name)

return X

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,7 @@

class XGraphBasicOptimizer(XGraphBaseOptimizer):

"""
TODO
"""
"""Basic optimizer used by most targets"""

def __init__(self, xgraph, copy=False):
super(XGraphBasicOptimizer, self).__init__(xgraph, copy)
Expand Down
19 changes: 10 additions & 9 deletions python/pyxir/graph/optimization/xgraph_base_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

from pyxir.graph import XGraph, XLayer

from .xgraph_optimization_pass import XGraphOptimizationPass
logger = logging.getLogger("pyxir")


Expand All @@ -42,19 +43,16 @@ def __init__(self, xgraph, copy=False):
self.xgraph = xgraph if not copy else copy.deepcopy(xgraph)
self.optimization_passes = {}

def add_optimization_pass(self, level, opt_pass):
def add_optimization_pass(self, level: int, opt_pass) -> None:
# type: (int, XfgraphOptimizationPass) -> None
assert(isinstance(level, int))
if level in self.optimization_passes:
self.optimization_passes[level].append(opt_pass)
else:
self.optimization_passes[level] = [opt_pass]

def optimize(self):
# type: () -> XGraph
"""
Start optimization
"""
def optimize(self) -> XGraph:
"""Start optimization"""

xgraph = self.xgraph

Expand All @@ -65,8 +63,11 @@ def optimize(self):
.format(idx, level, len(opt_passes)))

for opt_pass in opt_passes:
logger.info("--name: {}".format(opt_pass.name))

xgraph = opt_pass.execute(xgraph)
# TODO
if isinstance(opt_pass, XGraphOptimizationPass):
logger.info("--name: {}".format(opt_pass.name))
xgraph = opt_pass.execute(xgraph)
else:
xgraph = opt_pass(xgraph)

return self.xgraph
7 changes: 1 addition & 6 deletions python/pyxir/graph/optimization/xgraph_optimization_pass.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

"""
Module containing graph optimization passes
"""
"""Module defining graph optimization passes"""

import logging

Expand Down Expand Up @@ -57,7 +53,6 @@ def add_optimization(self,
})

def execute(self, xgraph):
# type: (XGraph) -> XGraph
"""
"""
condition_funcs = [opt['condition_func'] for opt in self.optimizations]
Expand Down
18 changes: 6 additions & 12 deletions python/pyxir/graph/passing/base_pass.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,23 +55,17 @@ def visit(self, X: XLayer) -> XLayer:

class XGraphMutator(object):

"""
Mutator class for changing XGraph
Arguments
---------
xgraph: XGraph
the XGraph object to be mutated
"""
"""Mutator class for changing XGraph"""

def __init__(self, xgraph: XGraph):
self.xgraph = xgraph
def __init__(self):
self.xgraph = None

def __call__(self):
def __call__(self, xgraph: XGraph):
"""Main method to be called on object to start mutation pass"""
self.xgraph = xgraph
new_xg = XGraph(self.xgraph.get_name())
new_xg.copy_meta_attrs(self.xgraph)
for X in self.xgraph.get_layers():
for X in xgraph.get_layers():
new_X = self.visit(X)
# if new_X not None
if new_X:
Expand Down
4 changes: 2 additions & 2 deletions python/pyxir/graph/pattern/patterns.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ def visit(self, X: XLayer) -> XLayer:
class XGraphPatternMutator(XGraphMutator):
"""Mutate patterns in XGraph (for mul + max = leaky_relu)"""

def __init__(self, xgraph: XGraph):
super().__init__(xgraph)
def __init__(self):
super().__init__()
self.lr_layers = {}
self.lr_layers_bottoms = {}

Expand Down
32 changes: 32 additions & 0 deletions python/pyxir/graph/transformers/remove_unused_ops.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Copyright 2020 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for the RemoveUnusedOperators pass"""


from ..passing import XGraphMutator

from .. import XGraph
from ..layer.xlayer import defaultXLayer, XLayer


class RemoveUnusedOps(XGraphMutator):
"""Remove unused operators"""

def __init__(self):
super().__init__()

def visit(self, X: XLayer) -> XLayer:
if len(X.bottoms) == 0 and len(X.tops) == 0:
return None
return super().visit(X)

0 comments on commit f69c345

Please sign in to comment.