Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 23 additions & 2 deletions include/NeuraDialect/Architecture/Architecture.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,12 @@
namespace mlir {
namespace neura {

// Enum for identifying resource type.
enum class ResourceKind {
Tile,
Link,
};

//===----------------------------------------------------------------------===//
// BasicResource: abstract base class for Tile, Link, etc.
//===----------------------------------------------------------------------===//
Expand All @@ -20,6 +26,7 @@ class BasicResource {
virtual ~BasicResource() = default;
virtual int getId() const = 0;
virtual std::string getType() const = 0;
virtual ResourceKind getKind() const = 0;
};

//===----------------------------------------------------------------------===//
Expand All @@ -37,6 +44,12 @@ class Tile : public BasicResource {
int getId() const override;
std::string getType() const override { return "tile"; }

ResourceKind getKind() const override { return ResourceKind::Tile; }

static bool classof(const BasicResource *res) {
return res && res->getKind() == ResourceKind::Tile;
}

int getX() const;
int getY() const;

Expand Down Expand Up @@ -64,8 +77,14 @@ class Link : public BasicResource {
Link(int id);

int getId() const override;

std::string getType() const override { return "link"; }

ResourceKind getKind() const override { return ResourceKind::Link; }

static bool classof(const BasicResource *res) {
return res && res->getKind() == ResourceKind::Link;
}
Tile* getSrcTile() const;
Tile* getDstTile() const;

Expand All @@ -83,7 +102,8 @@ struct PairHash {
}
};

/// Describes the entire CGRA architecture.
// Describes the CGRA architecture template.
// TODO: Model architecture in detail (e.g., registers, ports).
class Architecture {
public:
Architecture(int width, int height);
Expand All @@ -98,8 +118,9 @@ class Architecture {
std::vector<Link*> getAllLinks() const;

private:
// TODO: Model architecture in detail, e.g., ports, registers, crossbars, etc.
// https://github.com/coredac/dataflow/issues/52.
std::vector<std::unique_ptr<Tile>> tile_storage;
// std::vector<Tile*> tiles;
std::vector<std::unique_ptr<Link>> link_storage;
std::unordered_map<int, Tile*> id_to_tile;
std::unordered_map<std::pair<int, int>, Tile*, PairHash> coord_to_tile;
Expand Down
68 changes: 48 additions & 20 deletions include/NeuraDialect/Mapping/MappingState.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
#define NEURA_MAPPING_STATE_H

#include "mlir/IR/Operation.h"
#include "NeuraDialect/Architecture/Architecture.h" // for BasicResource
#include "NeuraDialect/Architecture/Architecture.h"
#include "llvm/Support/raw_ostream.h"
#include <unordered_map>
#include <unordered_set>
#include <vector>
Expand All @@ -19,6 +20,12 @@ struct MappingLoc {
bool operator==(const MappingLoc &other) const {
return resource == other.resource && time_step == other.time_step;
}

bool operator<(const MappingLoc &other) const {
if (time_step != other.time_step)
return time_step < other.time_step;
return resource->getId() < other.resource->getId();
}
};

} // namespace neura
Expand All @@ -43,44 +50,65 @@ class MappingState {
public:
MappingState(const Architecture &arch, int II);
// Binds a (tile/link, time_step) location to an operation.
void bindOp(MappingLoc loc, Operation *op);
bool bindOp(const MappingLoc &loc, Operation *op);

// Unbinds an operation from its (tile/link, time_step) location,
// which is useful for backtracking.
void unbindOp(Operation *op);

// Checks if a (tile/link, time_step) is available (unoccupied).
bool isAvailable(const MappingLoc &loc) const;
// Note that the check is performed in II granularity.
// For example, if II is 4, and we want to check (tile 2, step 5), then
// it will check (tile 2, step 1), (tile 2, step 5), (tile 2, step 9), etc.
bool isAvailableAcrossTime(const MappingLoc &loc) const;

// Gets the operation at a specific (tile/link, time_step) location.
std::optional<Operation*> getOpAt(MappingLoc loc) const;

// Counts the number of operations at a specific resource across time steps.
int countOpsAtResource(BasicResource *resource) const;

// Gets all MRRG nodes.
const std::unordered_set<MappingLoc> &getAllLocs() const;
const std::set<MappingLoc> &getAllLocs() const;

// Gets all MRRG nodes allocated to a given op.
const std::vector<MappingLoc> &getAllLocsOfOp(Operation *op) const;

// Reserves links for an move operation.
void reserveRoute(Operation *op, ArrayRef<MappingLoc> path);

// Releases links for an move operation.
void releaseRoute(Operation *op);

// Gets neighboring tiles on next step of a given MappingLoc.
const std::vector<MappingLoc> &getNextStepTiles(MappingLoc loc) const;
std::vector<MappingLoc> getNextStepTiles(MappingLoc loc) const;

// Gets neighboring links on next step of a given MappingLoc.
const std::vector<MappingLoc> &getNextStepLinks(MappingLoc loc) const;
// // Gets neighboring links on next step of a given MappingLoc.
// const std::vector<MappingLoc> &getNextStepLinks(MappingLoc loc) const;

// Gets neighboring tiles on current step of a given MappingLoc.
const std::vector<MappingLoc> &getCurrentStepTiles(MappingLoc loc) const;
// // Gets neighboring tiles on current step of a given MappingLoc.
// const std::vector<MappingLoc> &getCurrentStepTiles(MappingLoc loc) const;

// Gets neighboring links on current step of a given MappingLoc.
const std::vector<MappingLoc> &getCurrentStepLinks(MappingLoc loc) const;
std::vector<MappingLoc> getCurrentStepLinks(MappingLoc loc) const;

// Gets the target initiation interval (II) for the mapping.
int getII() const { return II; }

// Embeds the mapping states onto the mapped operations.
void encodeMappingState();

void dumpOpToLocs(llvm::raw_ostream &os = llvm::errs()) const;

private:
// Initiation interval.
int II;
std::unordered_set<MappingLoc> all_locs;
// current and next step tiles and links for a given MappingLoc. Note that
// the key MappingLoc is either a pair of (tile, time_step) or (link, time_step).
std::unordered_map<MappingLoc, std::vector<MappingLoc>> next_step_tiles;
std::unordered_map<MappingLoc, std::vector<MappingLoc>> next_step_links;
std::unordered_map<MappingLoc, std::vector<MappingLoc>> current_step_tiles;
std::unordered_map<MappingLoc, std::vector<MappingLoc>> current_step_links;

std::unordered_map<MappingLoc, Operation*> loc_to_op;
std::unordered_set<MappingLoc> occupied_locs;
static constexpr int kMaxSteps = 10;

std::set<MappingLoc> all_locs;
std::set<MappingLoc> occupied_locs;
std::map<MappingLoc, Operation*> loc_to_op;
std::map<Operation*, std::vector<MappingLoc>> op_to_locs;
};

} // namespace neura
Expand Down
66 changes: 66 additions & 0 deletions include/NeuraDialect/Mapping/mapping_util.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

#include "mlir/IR/Operation.h"
#include "NeuraDialect/Architecture/Architecture.h"
#include "NeuraDialect/Mapping/MappingState.h"

namespace mlir {
namespace neura {
Expand All @@ -18,5 +19,70 @@ SmallVector<RecurrenceCycle, 4> collectRecurrenceCycles(Operation *func_op);
// Calculates ResMII: ceil(#ops / #tiles).
int calculateResMii(Operation *func_op, const Architecture &architecture);

// Returns topologically sorted operations in func_op.
std::vector<Operation *> getTopologicallySortedOps(Operation *func_op);

Operation* getMaterializedProducer(Value operand);

// Collects the real users of an operation, excluding ctrl_mov and data_mov.
llvm::SmallVector<mlir::Operation *> getMaterializedUserOps(Operation *op);

// Gets the last materialized backward user of an operation, which is expected
// to be a phi operation.
Operation *getMaterializedBackwardUser(Operation *op);

// Attempts to map a function operation to the accelerator using heuristics.
bool tryHeuristicMapping(std::vector<Operation *> &sorted_ops,
const Architecture &architecture,
MappingState &mapping_state);

// Attempts to route a data move operation from src_loc to dst_loc.
bool tryRouteDataMove(Operation *mov,
MappingLoc src_loc,
MappingLoc dst_loc,
bool is_backward_move,
const MappingState &mapping_state,
std::vector<MappingLoc> &path_out);

bool tryRouteForwardMove(Operation *mov_op,
MappingLoc src_loc,
MappingLoc dst_loc,
const MappingState &state,
std::vector<MappingLoc> &path_out);

bool tryRouteBackwardMove(Operation *mov_op,
MappingLoc src_loc,
MappingLoc dst_loc,
const MappingState &state,
std::vector<MappingLoc> &path_out);

// Calculates the cost of mapping locations for a given op, the returned locations
// are sorted based on the cost.
std::vector<MappingLoc> calculateCost(Operation *op, const MappingState &mapping_state);

// Gets the ctrl_mov users of an operation, empty vector is returned if no ctrl_mov users found.
llvm::SmallVector<Operation *> getCtrlMovUsers(Operation *op);

// Maps a materialized operation to the accelerator, and routes the dataflow from
// the producers to the given op.
bool placeAndRoute(Operation *op, const MappingLoc &target_loc, MappingState &mapping_state);

std::vector<MappingLoc> calculateAward(Operation *op,
const Architecture &architecture,
const MappingState &mapping_state);

void updateAward(std::map<MappingLoc, int> &locs_with_award,
MappingLoc loc, int award);

bool canReachLocInTime(const MappingLoc &src_loc,
const MappingLoc &dst_loc,
int deadline_step,
const MappingState &mapping_state);

bool canReachLocInTime(const std::vector<Operation *> &producers,
const MappingLoc &target_loc,
int deadline_step,
const MappingState &mapping_state);

} // namespace neura
} // namespace mlir
1 change: 1 addition & 0 deletions include/NeuraDialect/NeuraPasses.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ std::unique_ptr<mlir::Pass> createAssignAcceleratorPass();
std::unique_ptr<mlir::Pass> createTransformCtrlToDataFlowPass();
std::unique_ptr<mlir::Pass> createLeveragePredicatedValuePass();
std::unique_ptr<mlir::Pass> createMapToAcceleratorPass();
std::unique_ptr<mlir::Pass> createGenerateCodePass();

#define GEN_PASS_REGISTRATION
#include "NeuraDialect/NeuraPasses.h.inc"
Expand Down
10 changes: 10 additions & 0 deletions include/NeuraDialect/NeuraPasses.td
Original file line number Diff line number Diff line change
Expand Up @@ -57,4 +57,14 @@ def MapToAccelerator : Pass<"map-to-accelerator", "ModuleOp"> {
}];
let constructor = "neura::createMapToAcceleratorPass()";
}

def GenerateCode : Pass<"generate-code", "ModuleOp"> {
let summary = "Generate JSON-formatted instructions from mapped Neura IR";
let description = [{
This pass generates JSON file containing the instructions.
The instructions can be encoded into configuration signals.
}];
let constructor = "neura::createGenerateCodePass()";
}

#endif // NEURA_PASSES_TD
Loading