Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions include/NeuraDialect/Mapping/mapping_util.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@ OperationKind getOperationKindFromMlirOp(Operation *op);
// Returns true if the operation does not need CGRA tile placement.
bool is_non_materialized(Operation *op);

// Returns true if the operation is a materialized reserve user, i.e.,
// phi, invariant, carry.
bool isMaterializedReserveUser(Operation *op);

// Represents a recurrence cycle rooted at a reserve operation and closed by
// ctrl_mov.
struct RecurrenceCycle {
Expand Down
78 changes: 78 additions & 0 deletions include/NeuraDialect/NeuraOps.td
Original file line number Diff line number Diff line change
Expand Up @@ -489,4 +489,82 @@ def Neura_LoopControlOp : Op<NeuraDialect, "loop_control">{

// let assemblyFormat =
// " `(``parent_valid` `=` $parentValid `,` `start` `=` $start `,` `end` `=` $end `,` `step` `=` $step`)` attr-dict `:` type($parentValid) `,` type($start) `,` type($end) `,` type($step) `->` type($nextindex) `,` type($valid)";
}

// ----------------------------------------------------
// Defines operations for steering-control based DFG execution.
// ----------------------------------------------------

// Defines the true_steer operation.
def Neura_TrueSteerOp : Op<NeuraDialect, "true_steer">{
let summary = "Conditionally pass a value when condition is true.";
let description = [{When the condition is true, the input value is passed to the output; otherwise, the output is empty.
Example:
%out = neura.true_steer %in, %cond : i32, i1 -> f32
}];

let arguments = (ins AnyType:$input, AnyType:$condition);
let results = (outs AnyType:$output);

let assemblyFormat = "$input `,` $condition attr-dict `:` type($input) `,` type($condition) `->` type($output)";
}

// Defines the false_steer operation.
def Neura_FalseSteerOp : Op<NeuraDialect, "false_steer">{
let summary = "Conditionally pass a value when condition is false.";
let description = [{When the condition is false, the input value is passed to the output; otherwise, the output is empty.
Example:
%out = neura.false_steer %in, %cond : i32, i1 -> f32
}];

let arguments = (ins AnyType:$input, AnyType:$condition);
let results = (outs AnyType:$output);

let assemblyFormat = "$input `,` $condition attr-dict `:` type($input) `,` type($condition) `->` type($output)";
}

// Defines the carry operation.
def Neura_CarryOp : Op<NeuraDialect, "carry">{
let summary = "Carry state across iterations.";
let description = [{
Three inputs for carry operation:
- initial value: used in the first execution.
- condition: determines whether to use the carried value.
- carried value: used when condition is true.
The output is the initial value when it is executed for the first time, otherwise it is the carried value when the condition is true.
Example:
%out = neura.carry %init, %cond, %carry_val : i32, i1, i32 -> i32
}];

let arguments = (ins AnyType:$initial, AnyType:$condition, AnyType:$carried);
let results = (outs AnyType:$result);
let assemblyFormat = "$initial `,` $condition `,` $carried attr-dict `:` type($initial) `,` type($condition) `,` type($carried) `->` type($result)";
}

// Defines the merge operation.
def Neura_MergeOp : Op<NeuraDialect, "merge">{
let summary = "Merge multiple inputs into one output.";
let description = [{
Merges multiple input values into a single output value based on the condition.
Example:
%out = neura.merge %cond, %in1, %in2 : i1, i32, i32 -> i32
}];

let arguments = (ins AnyType:$condition, AnyType:$true_value, AnyType:$false_value);
let results = (outs AnyType:$result);

let assemblyFormat = "$condition `,` $true_value `,` $false_value attr-dict `:` type($condition) `,` type($true_value) `,` type($false_value) `->` type($result)";
}

// Defines the invariant operation.
def Neura_InvariantOp : Op<NeuraDialect, "invariant">{
let summary = "Invariant value across DFG execution.";
let description = [{
Invariant operation is a subset of carry operation where the output is always the initial value.
Example:
%out = neura.invariant %init %cond : i32, i1 -> i32
}];
let arguments = (ins AnyType:$initial, AnyType:$condition);
let results = (outs AnyType:$result);
let assemblyFormat = "$initial `,` $condition attr-dict `:` type($initial) `,` type($condition) `->` type($result)";
}
2 changes: 2 additions & 0 deletions include/NeuraDialect/NeuraPasses.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ std::unique_ptr<mlir::Pass> createMapToAcceleratorPass();
std::unique_ptr<mlir::Pass> createGenerateCodePass();
std::unique_ptr<mlir::Pass> createCanonicalizeLiveInPass();
std::unique_ptr<mlir::Pass> createPromoteFuncArgToConstPass();
std::unique_ptr<mlir::Pass> createTransformToSteerControlPass();
std::unique_ptr<mlir::Pass> createRemovePredicatedTypePass();

// ====================================
// Optimization Passes
Expand Down
19 changes: 19 additions & 0 deletions include/NeuraDialect/NeuraPasses.td
Original file line number Diff line number Diff line change
Expand Up @@ -116,4 +116,23 @@ def FoldConstant : Pass<"fold-constant", "ModuleOp"> {
let constructor = "neura::createFoldConstantPass()";
}

def TransformToSteerControl : Pass<"transform-to-steer-control", "func::FuncOp"> {
let summary = "Transform control flow into data flow using steer control";
let description = [{
This pass transforms Neura control flow graphs (CDFG) into pure dataflow graphs (DFG)
using steer control operations like true_steer, false_steer, carry, and merge.
Unlike predication-based approaches, steer control explicitly directs data through
different paths based on conditions.
}];
let constructor = "neura::createTransformToSteerControlPass()";
}

def RemovePredicatedType : Pass<"remove-predicated-type", "ModuleOp"> {
let summary = "Removes predicated types from Neura dialect operations";
let description = [{
This pass removes predicated types from Neura dialect operations,
converting them back to regular types.
}];
let constructor = "neura::createRemovePredicatedTypePass()";
}
#endif // NEURA_PASSES_TD
35 changes: 22 additions & 13 deletions lib/NeuraDialect/Mapping/mapping_util.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -316,12 +316,9 @@ mlir::Operation *mlir::neura::getMaterializedBackwardUser(Operation *op) {
"Expected the user of ctrl_mov target to be a reserve operation");
auto reserve_op = dyn_cast<neura::ReserveOp>(target.getDefiningOp());

// Skip ctrl_mov users of reserve; return the first phi user.
// Skip ctrl_mov users of reserve; return the first materialized user.
for (Operation *user : reserve_op.getResult().getUsers()) {
if (isa<neura::CtrlMovOp>(user)) {
continue; // skip ctrl_mov user
}
if (isa<neura::PhiOp>(user)) {
if (isMaterializedReserveUser(user)) {
return user;
}
}
Expand Down Expand Up @@ -702,6 +699,19 @@ bool mlir::neura::canReachLocInTime(const MappingLoc &src_loc,
return false;
}

bool mlir::neura::isMaterializedReserveUser(Operation *user) {
if (isa<neura::PhiOp>(user)) {
return true;
}
if (isa<neura::InvariantOp>(user)) {
return true;
}
if (isa<neura::CarryOp>(user)) {
return true;
}
return false;
}

void mlir::neura::updateAward(std::map<MappingLoc, int> &locs_with_award,
MappingLoc loc, int award) {
// Updates the award of the top element in the priority queue.
Expand Down Expand Up @@ -752,8 +762,9 @@ mlir::neura::calculateAward(Operation *op, std::set<Operation *> &critical_ops,
assert(ctrl_mov && "Expected user to be a CtrlMovOp");
mlir::Operation *materialized_backward_op =
getMaterializedBackwardUser(ctrl_mov);
assert(isa<neura::PhiOp>(materialized_backward_op) &&
"Expected materialized operation of ctrl_mov to be a PhiOp");
assert(isMaterializedReserveUser(materialized_backward_op) &&
"Expected materialized operation of ctrl_mov to be a "
"PhiOp/InvariantOp/CarryOp.");
backward_users.push_back(materialized_backward_op);
}

Expand Down Expand Up @@ -794,10 +805,7 @@ mlir::neura::calculateAward(Operation *op, std::set<Operation *> &critical_ops,
award += op->getOperands().size() -
getPhysicalHops(producers, tile, mapping_state);
}
// llvm::errs() << "[DEBUG] checking range: "
// << earliest_start_time_step << " to "
// << latest_end_time_step << " for tile: "
// << tile->getType() << "#" << tile->getId() << "\n";

for (int t = earliest_start_time_step; t < latest_end_time_step; t += 1) {
MappingLoc tile_loc_candidate = {tile, t};
// If the tile at time `t` is available, we can consider it for mapping.
Expand Down Expand Up @@ -942,8 +950,9 @@ bool mlir::neura::placeAndRoute(Operation *op, const MappingLoc &target_loc,
assert(ctrl_mov && "Expected user to be a CtrlMovOp");
mlir::Operation *materialized_backward_op =
getMaterializedBackwardUser(ctrl_mov);
assert(isa<neura::PhiOp>(materialized_backward_op) &&
"Expected materialized operation of ctrl_mov to be a PhiOp");
assert(isMaterializedReserveUser(materialized_backward_op) &&
"Expected materialized operation of ctrl_mov to be a "
"PhiOp/InvariantOp/CarryOp");
// Gets the last location of the materialized operation.
MappingLoc backward_loc =
mapping_state.getAllLocsOfOp(materialized_backward_op).back();
Expand Down
2 changes: 2 additions & 0 deletions lib/NeuraDialect/Transforms/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ add_mlir_library(
CanonicalizeLiveInPass.cpp
CanonicalizeCastPass.cpp
PromoteFuncArgToConstPass.cpp
TransformToSteerControlPass.cpp
RemovePredicatedTypePass.cpp

DEPENDS
MLIRNeuraTransformsIncGen
Expand Down
6 changes: 3 additions & 3 deletions lib/NeuraDialect/Transforms/InsertDataMovPass.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,9 @@ struct InsertDataMovForNeuraOps : public RewritePattern {
SmallVector<Value> new_operands;
for (Value operand : op->getOperands()) {
Operation *producer = operand.getDefiningOp();
// Skips adding mov for neura.reserve -> neura.phi.
if (isa<neura::PhiOp>(op) && producer &&
isa<neura::ReserveOp>(producer)) {

// Skips adding mov for any operand that comes from a reserve op.
if (producer && isa<neura::ReserveOp>(producer)) {
new_operands.push_back(operand);
continue;
}
Expand Down
6 changes: 3 additions & 3 deletions lib/NeuraDialect/Transforms/LeveragePredicatedValuePass.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,15 +45,15 @@ struct LeveragePredicatedValuePass
}

for (BlockArgument arg : block->getArguments()) {
Type origType = arg.getType();
Type orig_type = arg.getType();

// Avoid double-wrapping if already predicated
if (llvm::isa<neura::PredicatedValue>(origType)) {
if (llvm::isa<neura::PredicatedValue>(orig_type)) {
continue;
}

auto predicated_type = neura::PredicatedValue::get(
func.getContext(), origType,
func.getContext(), orig_type,
IntegerType::get(func.getContext(), 1));
arg.setType(predicated_type);
}
Expand Down
68 changes: 44 additions & 24 deletions lib/NeuraDialect/Transforms/MapToAcceleratorPass.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,41 +63,41 @@ struct MapToAcceleratorPass
void runOnOperation() override {
ModuleOp module = getOperation();
std::unique_ptr<Mapping> mapping_strategy;
StringRef mappingStrategy_stringRef(mappingStrategy.getValue());
StringRef backtrackConfig_stringRef(backtrackConfig.getValue());
StringRef mappingMode_stringRef(mappingMode.getValue());
bool is_spatial_only = (mappingMode_stringRef == "spatial-only");
if (is_spatial_only || mappingMode_stringRef == "spatial-temporal" ||
mappingMode_stringRef.empty()) {
if (mappingMode_stringRef.empty()) {
mappingMode_stringRef = "spatial-temporal";
StringRef mapping_strategy_stringRef(mappingStrategy.getValue());
StringRef backtrack_config_stringRef(backtrackConfig.getValue());
StringRef mapping_mode_stringRef(mappingMode.getValue());
bool is_spatial_only = (mapping_mode_stringRef == "spatial-only");
if (is_spatial_only || mapping_mode_stringRef == "spatial-temporal" ||
mapping_mode_stringRef.empty()) {
if (mapping_mode_stringRef.empty()) {
mapping_mode_stringRef = "spatial-temporal";
}
llvm::errs() << "[MapToAcceleratorPass] Using Mapping Mode: "
<< mappingMode_stringRef << "\n";
<< mapping_mode_stringRef << "\n";
} else {
llvm::errs() << "[MapToAcceleratorPass] Unsupported mapping mode: "
<< mappingMode_stringRef << "\n";
<< mapping_mode_stringRef << "\n";
return;
}

if (mappingStrategy_stringRef == "heuristic" ||
mappingStrategy_stringRef.empty()) {
mappingStrategy_stringRef = "heuristic";
if (mapping_strategy_stringRef == "heuristic" ||
mapping_strategy_stringRef.empty()) {
mapping_strategy_stringRef = "heuristic";

if (backtrackConfig_stringRef == "simple") {
if (backtrack_config_stringRef == "simple") {
mapping_strategy = std::make_unique<HeuristicMapping>(1, 1);
} else if (backtrackConfig_stringRef == "greedy") {
} else if (backtrack_config_stringRef == "greedy") {
mapping_strategy = std::make_unique<HeuristicMapping>(INT_MAX, 1);
} else if (backtrackConfig_stringRef == "exhaustive") {
} else if (backtrack_config_stringRef == "exhaustive") {
mapping_strategy = std::make_unique<HeuristicMapping>(INT_MAX, INT_MAX);
} else if (backtrackConfig_stringRef == "customized") {
} else if (backtrack_config_stringRef == "customized") {
mapping_strategy = std::make_unique<HeuristicMapping>(5, 3);
} else if (backtrackConfig_stringRef.starts_with("customized=")) {
} else if (backtrack_config_stringRef.starts_with("customized=")) {
// Used for custom backtrack parameters.
// Example: "customized=5,3" means max_loc=5, max_depth=3
// Extracts the parameters after "customized=".
StringRef paramsRef =
backtrackConfig_stringRef.substr(strlen("customized="));
backtrack_config_stringRef.substr(strlen("customized="));
size_t comma_pos = paramsRef.find(',');

if (comma_pos != StringRef::npos) {
Expand All @@ -116,19 +116,19 @@ struct MapToAcceleratorPass
} else {
llvm::errs() << "[MapToAcceleratorPass] Illegal customized "
"parameters format: "
<< backtrackConfig_stringRef << "\n";
<< backtrack_config_stringRef << "\n";
return;
}
} else {
llvm::errs()
<< "[MapToAcceleratorPass] Illegal customized parameters format: "
<< backtrackConfig_stringRef << "\n";
<< backtrack_config_stringRef << "\n";
return;
}
}
} else {
llvm::errs() << "[MapToAcceleratorPass] Unsupported mapping strategy: "
<< mappingStrategy_stringRef << "\n";
<< mapping_strategy_stringRef << "\n";
return;
}

Expand All @@ -139,6 +139,26 @@ struct MapToAcceleratorPass
return;
}

// Checks the dataflow IR mode.
auto dataflow_mode_attr =
func->getAttrOfType<StringAttr>("dataflow_mode");
bool is_steering_mode =
(dataflow_mode_attr && dataflow_mode_attr.getValue() == "steering");

// If steering mode, enforce spatial-only mapping.
if (is_steering_mode) {
if (mapping_mode_stringRef != "spatial-only") {
func.emitError() << "Steering IR mode requires spatial-only mapping, "
<< "but got mapping mode: "
<< mapping_mode_stringRef;
signalPassFailure();
return;
}
llvm::errs() << "[MapToAcceleratorPass] Using spatial-only mapping for "
"steering mode function: "
<< func.getName() << "\n";
}

// Collects and reports recurrence cycles found in the function.
auto recurrence_cycles = collectRecurrenceCycles(func);
std::set<Operation *> critical_ops;
Expand Down Expand Up @@ -228,9 +248,9 @@ struct MapToAcceleratorPass
IntegerAttr::get(IntegerType::get(ctx, 32),
architecture.getHeight())),
NamedAttribute(StringAttr::get(ctx, "mapping_strategy"),
StringAttr::get(ctx, mappingStrategy_stringRef)),
StringAttr::get(ctx, mapping_strategy_stringRef)),
NamedAttribute(StringAttr::get(ctx, "mapping_mode"),
StringAttr::get(ctx, mappingMode_stringRef)),
StringAttr::get(ctx, mapping_mode_stringRef)),
NamedAttribute(StringAttr::get(ctx, "compiled_ii"),
IntegerAttr::get(IntegerType::get(ctx, 32), ii)),
NamedAttribute(
Expand Down
Loading