Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion include/NeuraDialect/Mapping/BacktrackMapping/BacktrackMapping.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

#include "NeuraDialect/Mapping/MappingState.h"
#include "NeuraDialect/Mapping/MappingStrategy.h"
#include <climits>
#include <map>
#include <set>

Expand All @@ -15,7 +16,16 @@ class BacktrackMapping : public MappingStrategy {
bool map(std::vector<Operation *> &sorted_ops,
const Architecture &architecture,
MappingState &mapping_state) override;
std::string getName() const override { return "backtrack_mapping"; }
std::string getName() const override {
if (max_backtrack_depth == 1 && max_location_to_try == INT_MAX) {
return "heuristic";
} else if (max_backtrack_depth == INT_MAX &&
max_location_to_try == INT_MAX) {
return "exhaustive";
} else {
return "backtrack";
}
}

private:
bool mapWithBacktrack(std::vector<Operation *> &sorted_ops,
Expand Down
21 changes: 0 additions & 21 deletions include/NeuraDialect/Mapping/GreedyMapping/GreedyMapping.h

This file was deleted.

13 changes: 5 additions & 8 deletions include/NeuraDialect/Mapping/MappingStrategy.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,19 +12,16 @@ namespace neura {
class MappingStrategy {
public:
virtual ~MappingStrategy() = default;

// Applies the mapping strategy to map operations onto hardware
virtual bool map(std::vector<Operation*>& sorted_ops,
const Architecture& architecture,
MappingState& mapping_state) = 0;
virtual bool map(std::vector<Operation *> &sorted_ops,
const Architecture &architecture,
MappingState &mapping_state) = 0;

// Gets the name of this strategy
virtual std::string getName() const = 0;
};

// Factory function to create mapping strategies based on name
std::unique_ptr<MappingStrategy> createMappingStrategy(const std::string& name);

} // namespace neura
} // namespace mlir

Expand Down
1 change: 0 additions & 1 deletion lib/NeuraDialect/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ add_mlir_dialect_library(MLIRNeura
Mapping/mapping_util.cpp
Mapping/MappingState.cpp
Mapping/MappingStrategy.cpp
Mapping/GreedyMapping/GreedyMapping.cpp
Mapping/BacktrackMapping/BacktrackMapping.cpp
Architecture/Architecture.cpp

Expand Down
17 changes: 0 additions & 17 deletions lib/NeuraDialect/Mapping/GreedyMapping/GreedyMapping.cpp

This file was deleted.

62 changes: 45 additions & 17 deletions lib/NeuraDialect/Mapping/MappingState.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -121,10 +121,6 @@ MappingState::getCurrentStepLinks(MappingLoc loc) const {
"Current step links can only be queried for tiles");
std::vector<MappingLoc> current_step_links;
const int current_step = loc.time_step;
// llvm::errs() << "getCurrentStepLinks called for loc: "
// << loc.resource->getType() << "#" << loc.resource->getId()
// << " at t=" << current_step << "\n";
// assert(current_step < II * kMaxSteps && "Current step exceeds max steps");
if (!(current_step < II * kMaxSteps)) {
llvm::errs() << "Current step exceeds max steps: " << current_step
<< ", max steps: " << II * kMaxSteps << "\n";
Expand Down Expand Up @@ -186,7 +182,6 @@ void MappingState::dumpOpToLocs(llvm::raw_ostream &os) const {
<< " @t=" << loc.time_step << "\n";
}
}

os << "=== End ===\n";
}

Expand All @@ -198,23 +193,56 @@ void MappingState::encodeMappingState() {
std::string kind_str;
if (loc.resource->getKind() == ResourceKind::Tile) {
kind_str = "tile";
Tile *tile = dyn_cast<Tile>(loc.resource);
auto dict = mlir::DictionaryAttr::get(
ctx, {mlir::NamedAttribute(mlir::StringAttr::get(ctx, "resource"),
mlir::StringAttr::get(ctx, kind_str)),
mlir::NamedAttribute(
mlir::StringAttr::get(ctx, "id"),
mlir::IntegerAttr::get(mlir::IntegerType::get(ctx, 32),
loc.resource->getId())),
mlir::NamedAttribute(
mlir::StringAttr::get(ctx, "time_step"),
mlir::IntegerAttr::get(mlir::IntegerType::get(ctx, 32),
loc.time_step)),
mlir::NamedAttribute(
mlir::StringAttr::get(ctx, "x"),
mlir::IntegerAttr::get(mlir::IntegerType::get(ctx, 32),
tile->getX())),
mlir::NamedAttribute(
mlir::StringAttr::get(ctx, "y"),
mlir::IntegerAttr::get(mlir::IntegerType::get(ctx, 32),
tile->getY()))});
mapping_entries.push_back(dict);
} else if (loc.resource->getKind() == ResourceKind::Link) {
kind_str = "link";
auto dict = mlir::DictionaryAttr::get(
ctx, {mlir::NamedAttribute(mlir::StringAttr::get(ctx, "resource"),
mlir::StringAttr::get(ctx, kind_str)),
mlir::NamedAttribute(
mlir::StringAttr::get(ctx, "id"),
mlir::IntegerAttr::get(mlir::IntegerType::get(ctx, 32),
loc.resource->getId())),
mlir::NamedAttribute(
mlir::StringAttr::get(ctx, "time_step"),
mlir::IntegerAttr::get(mlir::IntegerType::get(ctx, 32),
loc.time_step))});
mapping_entries.push_back(dict);
} else {
kind_str = "unknown";
auto dict = mlir::DictionaryAttr::get(
ctx, {mlir::NamedAttribute(mlir::StringAttr::get(ctx, "resource"),
mlir::StringAttr::get(ctx, kind_str)),
mlir::NamedAttribute(
mlir::StringAttr::get(ctx, "id"),
mlir::IntegerAttr::get(mlir::IntegerType::get(ctx, 32),
loc.resource->getId())),
mlir::NamedAttribute(
mlir::StringAttr::get(ctx, "time_step"),
mlir::IntegerAttr::get(mlir::IntegerType::get(ctx, 32),
loc.time_step))});
mapping_entries.push_back(dict);
}
auto dict = mlir::DictionaryAttr::get(
ctx, {mlir::NamedAttribute(mlir::StringAttr::get(ctx, "resource"),
mlir::StringAttr::get(ctx, kind_str)),
mlir::NamedAttribute(
mlir::StringAttr::get(ctx, "id"),
mlir::IntegerAttr::get(mlir::IntegerType::get(ctx, 32),
loc.resource->getId())),
mlir::NamedAttribute(
mlir::StringAttr::get(ctx, "time_step"),
mlir::IntegerAttr::get(mlir::IntegerType::get(ctx, 32),
loc.time_step))});
mapping_entries.push_back(dict);
}
op->setAttr("mapping_locs", mlir::ArrayAttr::get(ctx, mapping_entries));
}
Expand Down
18 changes: 1 addition & 17 deletions lib/NeuraDialect/Mapping/MappingStrategy.cpp
Original file line number Diff line number Diff line change
@@ -1,18 +1,2 @@
#include "NeuraDialect/Mapping/MappingStrategy.h"
#include "NeuraDialect/Mapping/GreedyMapping/GreedyMapping.h"
#include <memory>

namespace mlir {
namespace neura {

std::unique_ptr<MappingStrategy> createMappingStrategy(const std::string& name) {
if (name == "greedy_mapping") {
return std::make_unique<GreedyMapping>();
} else {
llvm::errs() << "Unsupported mapping strategy: " << name;
return nullptr;
}
}

} // namespace neura
} // namespace mlir
#include <memory>
24 changes: 7 additions & 17 deletions lib/NeuraDialect/Mapping/mapping_util.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -192,10 +192,12 @@ mlir::Operation *mlir::neura::getMaterializedBackwardUser(Operation *op) {

// Skip ctrl_mov users of reserve; return the first phi user.
for (Operation *user : reserve_op.getResult().getUsers()) {
if (isa<neura::CtrlMovOp>(user))
if (isa<neura::CtrlMovOp>(user)) {
continue; // skip ctrl_mov user
if (isa<neura::PhiOp>(user))
}
if (isa<neura::PhiOp>(user)) {
return user;
}
}
assert(false &&
"No materialized backward user (i.e., phi) found for ctrl_mov");
Expand Down Expand Up @@ -297,16 +299,11 @@ bool mlir::neura::tryRouteDataMove(Operation *mov_op, MappingLoc src_loc,
// arrive at the next iteration).
const int deadline_step =
dst_loc.time_step + (is_backward_move ? state.getII() : 0);
// llvm::errs() << "src_tile: " << src_tile->getId()
// << ", dst_tile: " << dst_tile->getId()
// << ", deadline_step: " << deadline_step << "\n";

// BFS-style search for a path from src_tile to dst_tile.
while (!queue.empty()) {
auto [current_tile, current_time, current_path] = queue.front();
queue.pop();
// llvm::errs() << "Visiting tile: " << current_tile->getId()
// << " at time: " << current_time << "\n";

if (current_tile == dst_tile) {
// Confirms path reaches the target tile no later than deadline step.
Expand Down Expand Up @@ -350,13 +347,9 @@ bool mlir::neura::tryRouteDataMove(Operation *mov_op, MappingLoc src_loc,

for (MappingLoc current_step_next_link :
state.getCurrentStepLinks({current_tile, current_time})) {
if (!state.isAvailableAcrossTime(current_step_next_link))
if (!state.isAvailableAcrossTime(current_step_next_link)) {
continue;

// llvm::errs() << "Exploring next link: "
// << current_step_next_link.resource->getType() << "#"
// << current_step_next_link.resource->getId()
// << " at t=" << current_step_next_link.time_step << "\n";
}
Link *next_link = dyn_cast<Link>(current_step_next_link.resource);
Tile *next_tile = next_link->getDstTile();
int next_time = current_time + 1;
Expand All @@ -367,9 +360,6 @@ bool mlir::neura::tryRouteDataMove(Operation *mov_op, MappingLoc src_loc,
std::vector<MappingLoc> extended_path = current_path;
extended_path.push_back(current_step_next_link);
queue.push({next_tile, next_time, std::move(extended_path)});
// llvm::errs() << "Added to queue: " << next_tile->getId()
// << " at time: " << next_time
// << " with path size: " << extended_path.size() << "\n";
}
}

Expand Down Expand Up @@ -707,7 +697,7 @@ bool mlir::neura::placeAndRoute(Operation *op, const MappingLoc &target_loc,
mapping_state.releaseRoute(routed_ctrl_mov);
}

for(Operation *routed_op : routed_operands) {
for (Operation *routed_op : routed_operands) {
llvm::errs() << "[DEBUG] Releasing route for routed operand: "
<< *routed_op << "\n";
mapping_state.releaseRoute(routed_op);
Expand Down
51 changes: 44 additions & 7 deletions lib/NeuraDialect/Transforms/MapToAcceleratorPass.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

#include "NeuraDialect/Architecture/Architecture.h"
#include "NeuraDialect/Mapping/BacktrackMapping/BacktrackMapping.h"
#include "NeuraDialect/Mapping/GreedyMapping/GreedyMapping.h"
#include "NeuraDialect/Mapping/MappingState.h"
#include "NeuraDialect/Mapping/mapping_util.h"
#include "NeuraDialect/NeuraDialect.h"
Expand Down Expand Up @@ -42,18 +41,56 @@ struct MapToAcceleratorPass
Option<std::string> mappingStrategy{
*this, "mapping-strategy",
llvm::cl::desc("Mapping strategy to use for mapping operations to the "
"accelerator. Options: greedy_mapping"),
llvm::cl::init("greedy_mapping")};
"accelerator. Options: exhaustive, "
"backtrack=max_loc,max_depth (default "
"max_loc=5, max_depth=3), heuristic"),
llvm::cl::init("backtrack")};

void runOnOperation() override {
ModuleOp module = getOperation();

StringRef mappingStrategy_stringRef(mappingStrategy.getValue());
// Creates a mapping strategy based on the provided option.
std::unique_ptr<MappingStrategy> mapping_strategy;
if (mappingStrategy == "greedy_mapping") {
mapping_strategy = std::make_unique<GreedyMapping>();
} else if (mappingStrategy == "backtrack_mapping") {
mapping_strategy = std::make_unique<BacktrackMapping>();
if (mappingStrategy_stringRef == "heuristic") {
mapping_strategy = std::make_unique<BacktrackMapping>(INT_MAX, 1);
} else if (mappingStrategy_stringRef == "exhaustive") {
mapping_strategy = std::make_unique<BacktrackMapping>(INT_MAX, INT_MAX);
} else if (mappingStrategy_stringRef == "backtrack") {
mapping_strategy = std::make_unique<BacktrackMapping>(
5, 3); // Randomly picked default values for max_loc and max_depth
} else if (mappingStrategy_stringRef.starts_with("backtrack=")) {
// Used for custom backtrack parameters.
// Example: "backtrack=5,3" means max_loc=5, max_depth=3
// Extracts the parameters after "backtrack=".
StringRef paramsRef =
mappingStrategy_stringRef.substr(strlen("backtrack="));
size_t comma_pos = paramsRef.find(',');

if (comma_pos != StringRef::npos) {
StringRef max_loc_str = paramsRef.substr(0, comma_pos);
StringRef max_depth_str = paramsRef.substr(comma_pos + 1);

int max_loc, max_depth;
if (!max_loc_str.getAsInteger(10, max_loc) &&
!max_depth_str.getAsInteger(10, max_depth)) {
mapping_strategy =
std::make_unique<BacktrackMapping>(max_loc, max_depth);
llvm::errs() << "[MapToAcceleratorPass] Use custom backtrack: "
<< "max_location_to_try=" << max_loc
<< ", max_backtrack_depth=" << max_depth << "\n";
} else {
llvm::errs()
<< "[MapToAcceleratorPass] Illegal backtrack parameters format: "
<< mappingStrategy << "\n";
return;
}
} else {
llvm::errs()
<< "[MapToAcceleratorPass] Illegal backtrack parameters format: "
<< mappingStrategy << "\n";
return;
}
} else {
llvm::errs() << "[MapToAcceleratorPass] Unsupported mapping strategy: "
<< mappingStrategy << "\n";
Expand Down
Loading