Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 42 additions & 11 deletions include/NeuraDialect/NeuraOps.td
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def Neura_FAddOp : Op<NeuraDialect, "fadd"> {
let arguments = (ins AnyFloat:$lhs, AnyFloat:$rhs, Optional<AnyType>:$predicate);
let results = (outs AnyFloat:$result);
// let assemblyFormat = "$lhs `,` $rhs `,` $predicate attr-dict `:` type($result)";
let traits = [SameOperandsAndResultElementType];
//let traits = [SameOperandsAndResultElementType];
}

// Defines a floating-point multiplication operation.
Expand All @@ -38,7 +38,7 @@ def Neura_FMulOp : Op<NeuraDialect, "fmul"> {
let arguments = (ins AnyFloat:$lhs, AnyFloat:$rhs, Optional<AnyType>:$predicate);
let results = (outs AnyFloat:$result);
// let assemblyFormat = "$lhs `,` $rhs `,` $predicate attr-dict `:` type($result)";
let traits = [SameOperandsAndResultElementType];
// let traits = [SameOperandsAndResultElementType];
}

// Defines a bitwise OR operation.
Expand Down Expand Up @@ -90,7 +90,26 @@ def Neura_CondBr : Op<NeuraDialect, "cond_br", [Terminator, AttrSizedOperandSegm
Variadic<AnyType>:$trueArgs,
Variadic<AnyType>:$falseArgs);
let successors = (successor AnySuccessor:$trueDest, AnySuccessor:$falseDest);
let assemblyFormat = "$condition ($predicate^ `:` type($predicate))? `then` $trueArgs `:` type($trueArgs) `to` $trueDest `else` $falseArgs `:` type($falseArgs) `to` $falseDest attr-dict";
let assemblyFormat = "$condition ($predicate^ `:` type($predicate))? `then` ($trueArgs^)? `:` type($trueArgs) `to` $trueDest `else` ($falseArgs^)? `:` type($falseArgs) `to` $falseDest attr-dict";
}

// Defines an unconditional branch operation.
def Neura_Br : Op<NeuraDialect, "br", [Terminator]> {
let arguments = (ins Variadic<AnyType>:$args);
let successors = (successor AnySuccessor:$dest);
let assemblyFormat = "($args^)? `:` type($args) `to` $dest attr-dict";
}

def Neura_SelOp : Op<NeuraDialect, "sel"> {
let arguments = (ins AnyType:$ifTrue, AnyType:$ifFalse, I1:$cond);
let results = (outs AnyType:$result);
// let assemblyFormat = "$ifTrue `,` $ifFalse `,` $cond attr-dict `:` type($ifTrue)";
}

def Neura_NotOp : Op<NeuraDialect, "not"> {
let arguments = (ins I1:$input);
let results = (outs I1:$output);
// let assemblyFormat = "$input attr-dict `:` type($output)";
}

// Defines a return operation.
Expand Down Expand Up @@ -140,12 +159,24 @@ def Neura_FMulFAddOp : Op<NeuraDialect, "fmul_fadd"> {
let traits = [SameOperandsAndResultElementType];
}

// Defines a move operation for data communication.
def Neura_MovOp : Op<NeuraDialect, "mov"> {
let summary = "Move operation";
let opName = "mov";
let arguments = (ins AnyType:$lhs);
let results = (outs AnyType:$result);
// let assemblyFormat = "$lhs `,` $predicate attr-dict `:` type($lhs) `->` type($result)";
// let traits = [Pure];
// Defines base move operation.
class NeuraBaseMov<string mnemonic> :
Op<NeuraDialect, mnemonic> {
let arguments = (ins AnyType:$input);
let results = (outs AnyType:$output);
// let assemblyFormat = "$input attr-dict `:` type($input) `->` type($output)";
}

def Neura_MovOp : NeuraBaseMov<"mov"> {
let summary = "Base move operation";
}

Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What is the difference in functionalities between MovOp and DataMovOp

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

DataMov is for data delivery while I also provide CtrlMov.

I initially plan to have both inherited from same base Op, however, failed due to some cmake or tablegen issue. And later I found I need CtrlMov to represent the backward flow. So in the latest commit in this PR, I just give two separate MovOp.

// Data movement operation.
def Neura_DataMovOp : NeuraBaseMov<"data_mov"> {
let summary = "Data movement operation";
}

// Control movement operation.
def Neura_CtrlMovOp : NeuraBaseMov<"ctrl_mov"> {
let summary = "Control movement operation";
}
4 changes: 3 additions & 1 deletion include/NeuraDialect/NeuraPasses.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,11 @@ namespace neura {
// Passes defined in GraphPasses.td
#define GEN_PASS_DECL
#include "NeuraDialect/NeuraPasses.h.inc"
std::unique_ptr<mlir::Pass> createInsertMovPass();
std::unique_ptr<mlir::Pass> createInsertDataMovPass();
std::unique_ptr<mlir::Pass> createInsertCtrlMovPass();
std::unique_ptr<mlir::Pass> createFusePatternsPass();
std::unique_ptr<mlir::Pass> createAssignAcceleratorPass();
std::unique_ptr<mlir::Pass> createTransformCtrlToDataFlowPass();

#define GEN_PASS_REGISTRATION
#include "NeuraDialect/NeuraPasses.h.inc"
Expand Down
22 changes: 18 additions & 4 deletions include/NeuraDialect/NeuraPasses.td
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,25 @@ def FusePatterns : Pass<"fuse-patterns", "ModuleOp"> {
let constructor = "neura::createFusePatternsPass()";
}

def InsertMov : Pass<"insert-mov", "ModuleOp"> {
let summary = "Inserts move operations in the Neura dialect";
def InsertDataMov : Pass<"insert-data-mov", "ModuleOp"> {
let summary = "Inserts data move operations in the Neura dialect";
let description =
[{Insert neura.mov before and after all neura dialect operations.}];
let constructor = "neura::createInsertMovPass()";
[{Insert neura.data_mov before all neura dialect operations.}];
let constructor = "neura::createInsertDataMovPass()";
}

def InsertCtrlMov : Pass<"insert-ctrl-mov", "ModuleOp"> {
let summary = "Inserts ctrl move operations in the Neura dialect";
let description =
[{Insert neura.ctrl_mov before all neura dialect operations.}];
let constructor = "neura::createInsertCtrlMovPass()";
}

def TransformCtrlToDataFlow : Pass<"transform-ctrl-to-data-flow", "ModuleOp"> {
let summary = "Inserts ctrl move operations in the Neura dialect";
let description =
[{Transform ctrl to predicate-based data flow.}];
let constructor = "neura::createTransformCtrlToDataFlowPass()";
}

#endif // NEURA_PASSES_TD
18 changes: 18 additions & 0 deletions lib/Conversion/LlvmToNeura/LlvmToNeuraPass.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,23 @@ struct LlvmCondBrToNeuraCondBr : public OpRewritePattern<LLVM::CondBrOp> {
}
};

struct LlvmBrToNeuraBr : public OpRewritePattern<LLVM::BrOp> {
using OpRewritePattern::OpRewritePattern;

LogicalResult matchAndRewrite(mlir::LLVM::BrOp op,
PatternRewriter &rewriter) const override {
// Get the destination block and its operands
Block *dest = op.getDest();
ValueRange destOperands = op.getDestOperands();

// Create the new Neura_Br operation
rewriter.replaceOpWithNewOp<neura::Br>(
op, destOperands, dest);

return success();
}
};

struct LlvmReturnToNeuraReturn : public OpRewritePattern<LLVM::ReturnOp> {
using OpRewritePattern::OpRewritePattern;

Expand Down Expand Up @@ -232,6 +249,7 @@ struct LowerLlvmToNeuraPass
patterns.add<LlvmLoadToNeuraLoad>(&getContext());
patterns.add<LlvmStoreToNeuraStore>(&getContext());
patterns.add<LlvmCondBrToNeuraCondBr>(&getContext());
patterns.add<LlvmBrToNeuraBr>(&getContext());
patterns.add<LlvmReturnToNeuraReturn>(&getContext());

FrozenRewritePatternSet frozen(std::move(patterns));
Expand Down
3 changes: 1 addition & 2 deletions lib/NeuraDialect/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,4 @@ add_mlir_dialect_library(MLIRNeura
MLIRSupport
)

add_subdirectory(Transforms)
# add_subdirectory(Conversion)
add_subdirectory(Transforms)
10 changes: 4 additions & 6 deletions lib/NeuraDialect/Transforms/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@ get_property(dialect_libs GLOBAL PROPERTY MLIR_DIALECT_LIBS)

add_mlir_library(
MLIRNeuraTransforms
InsertMovPass.cpp
InsertDataMovPass.cpp
InsertCtrlMovPass.cpp
FusePatternsPass.cpp
AssignAcceleratorPass.cpp
TransformCtrlToDataFlowPass.cpp

DEPENDS
MLIRNeuraTransformsIncGen
Expand All @@ -15,9 +17,5 @@ add_mlir_library(
MLIRSupport
MLIRTransforms
MLIRNeura
# MLIRNeuraArithToNeuraPass
# MLIRNeuraLlvmToNeuraPass
${dialect_libs}
)
# add_subdirectory(ArithToNeura)
# add_subdirectory(LlvmToNeura)
)
Original file line number Diff line number Diff line change
Expand Up @@ -9,31 +9,31 @@

using namespace mlir;

#define GEN_PASS_DEF_INSERTMOV
#define GEN_PASS_DEF_InsertCtrlMov
#include "NeuraDialect/NeuraPasses.h.inc"

namespace {
struct InsertMovForNeuraOps : public RewritePattern {
InsertMovForNeuraOps(MLIRContext *context)
struct InsertCtrlMovForNeuraOps : public RewritePattern {
InsertCtrlMovForNeuraOps(MLIRContext *context)
: RewritePattern(/*matchAnyOpTypeTag=*/MatchAnyOpTypeTag(), /*benefit=*/1, context) {}

LogicalResult matchAndRewrite(Operation *op, PatternRewriter &rewriter) const override {
if (op->getDialect()->getNamespace() != "neura" ||
isa<neura::MovOp>(op)) {
isa<neura::CtrlMovOp>(op)) {
return failure();
}

// Skips ops that already being inserted mov on the operands.
bool allInputsAreMov = llvm::all_of(op->getOperands(), [](Value v) {
return isa_and_nonnull<neura::MovOp>(v.getDefiningOp());
return isa_and_nonnull<neura::CtrlMovOp>(v.getDefiningOp());
});
if (allInputsAreMov) {
return failure();
}

// Makes sure none of the operand has being processed.
bool hasAnyMovInput = llvm::any_of(op->getOperands(), [](Value v) {
return isa_and_nonnull<neura::MovOp>(v.getDefiningOp());
return isa_and_nonnull<neura::CtrlMovOp>(v.getDefiningOp());
});
assert(!hasAnyMovInput && "Unexpected: operand already wrapped in neura.mov");

Expand All @@ -42,7 +42,7 @@ struct InsertMovForNeuraOps : public RewritePattern {
// Wraps operands in mov.
SmallVector<Value> newOperands;
for (Value operand : op->getOperands()) {
auto mov = rewriter.create<neura::MovOp>(loc, operand.getType(), operand);
auto mov = rewriter.create<neura::CtrlMovOp>(loc, operand.getType(), operand);
newOperands.push_back(mov);
}

Expand All @@ -58,13 +58,13 @@ struct InsertMovForNeuraOps : public RewritePattern {
}
};

struct InsertMovPass
: public PassWrapper<InsertMovPass, OperationPass<ModuleOp>> {
MLIR_DEFINE_EXPLICIT_INTERNAL_INLINE_TYPE_ID(InsertMovPass)
struct InsertCtrlMovPass
: public PassWrapper<InsertCtrlMovPass, OperationPass<ModuleOp>> {
MLIR_DEFINE_EXPLICIT_INTERNAL_INLINE_TYPE_ID(InsertCtrlMovPass)

StringRef getArgument() const override { return "insert-mov"; }
StringRef getArgument() const override { return "insert-ctrl-mov"; }
StringRef getDescription() const override {
return "Insert neura.mov before and after all neura dialect operations.";
return "Insert neura.ctrl_mov before all neura dialect operations.";
}

void getDependentDialects(DialectRegistry &registry) const override {
Expand All @@ -73,7 +73,7 @@ struct InsertMovPass

void runOnOperation() override {
RewritePatternSet patterns(&getContext());
patterns.add<InsertMovForNeuraOps>(&getContext());
patterns.add<InsertCtrlMovForNeuraOps>(&getContext());
FrozenRewritePatternSet frozen(std::move(patterns));

ModuleOp module_op = getOperation();
Expand All @@ -96,8 +96,8 @@ struct InsertMovPass
namespace mlir {
namespace neura {

std::unique_ptr<Pass> createInsertMovPass() {
return std::make_unique<InsertMovPass>();
std::unique_ptr<Pass> createInsertCtrlMovPass() {
return std::make_unique<InsertCtrlMovPass>();
}

} // namespace neura
Expand Down
104 changes: 104 additions & 0 deletions lib/NeuraDialect/Transforms/InsertDataMovPass.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
#include "NeuraDialect/NeuraDialect.h"
#include "NeuraDialect/NeuraOps.h"
#include "NeuraDialect/NeuraPasses.h"
#include "mlir/Dialect/Func/IR/FuncOps.h"
#include "mlir/Dialect/LLVMIR/LLVMDialect.h"
#include "mlir/IR/PatternMatch.h"
#include "mlir/Pass/Pass.h"
#include "mlir/Transforms/GreedyPatternRewriteDriver.h"

using namespace mlir;

#define GEN_PASS_DEF_InsertDataMov
#include "NeuraDialect/NeuraPasses.h.inc"

namespace {
struct InsertDataMovForNeuraOps : public RewritePattern {
InsertDataMovForNeuraOps(MLIRContext *context)
: RewritePattern(/*matchAnyOpTypeTag=*/MatchAnyOpTypeTag(), /*benefit=*/1, context) {}

LogicalResult matchAndRewrite(Operation *op, PatternRewriter &rewriter) const override {
if (op->getDialect()->getNamespace() != "neura" ||
isa<neura::DataMovOp>(op)) {
return failure();
}

// Skips ops that already being inserted mov on the operands.
bool allInputsAreMov = llvm::all_of(op->getOperands(), [](Value v) {
return isa_and_nonnull<neura::DataMovOp>(v.getDefiningOp());
});
if (allInputsAreMov) {
return failure();
}

// Makes sure none of the operand has being processed.
bool hasAnyMovInput = llvm::any_of(op->getOperands(), [](Value v) {
return isa_and_nonnull<neura::DataMovOp>(v.getDefiningOp());
});
assert(!hasAnyMovInput && "Unexpected: operand already wrapped in neura.mov");

Location loc = op->getLoc();

// Wraps operands in mov.
SmallVector<Value> newOperands;
for (Value operand : op->getOperands()) {
auto mov = rewriter.create<neura::DataMovOp>(loc, operand.getType(), operand);
newOperands.push_back(mov);
}

// Clones op with new operands.
OperationState state(loc, op->getName());
state.addOperands(newOperands);
state.addTypes(op->getResultTypes());
state.addAttributes(op->getAttrs());

Operation *newOp = rewriter.create(state);
rewriter.replaceOp(op, newOp->getResults());
return success();
}
};

struct InsertDataMovPass
: public PassWrapper<InsertDataMovPass, OperationPass<ModuleOp>> {
MLIR_DEFINE_EXPLICIT_INTERNAL_INLINE_TYPE_ID(InsertDataMovPass)

StringRef getArgument() const override { return "insert-data-mov"; }
StringRef getDescription() const override {
return "Insert neura.data_mov before all neura dialect operations.";
}

void getDependentDialects(DialectRegistry &registry) const override {
registry.insert<mlir::neura::NeuraDialect>();
}

void runOnOperation() override {
RewritePatternSet patterns(&getContext());
patterns.add<InsertDataMovForNeuraOps>(&getContext());
FrozenRewritePatternSet frozen(std::move(patterns));

ModuleOp module_op = getOperation();

// Applies to every region inside the module (regardless of func type,
// e.g., mlir func or llvm func).
module_op.walk([&](Operation *op) {
if (!op->getRegions().empty()) {
for (Region &region : op->getRegions()) {
if (failed(applyPatternsAndFoldGreedily(region, frozen))) {
signalPassFailure();
}
}
}
});
}
};
} // namespace

namespace mlir {
namespace neura {

std::unique_ptr<Pass> createInsertDataMovPass() {
return std::make_unique<InsertDataMovPass>();
}

} // namespace neura
} // namespace mlir
Loading
Loading