forked from tracel-ai/burn
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Move and redirect GatherElements to new folders/nodes * Create PyTorch script for gather * Add onnx file for gather * Add a gather test to onnx_tests * Update gather.rs to use select * Rename codegen test * Update gather and gather_elements conversion functions * Validate rank of input node and update output * Add check for Gather
- Loading branch information
Showing
13 changed files
with
270 additions
and
31 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
18 changes: 18 additions & 0 deletions
18
crates/burn-import/onnx-tests/tests/gather_elements/gather_elements.onnx
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
pytorch2.1.1:� | ||
a | ||
onnx::GatherElements_0 | ||
onnx::GatherElements_12/GatherElements"GatherElements* | ||
axis� | ||
main_graphZ( | ||
onnx::GatherElements_0 | ||
Z( | ||
onnx::GatherElements_1 | ||
b | ||
2 | ||
B |
48 changes: 48 additions & 0 deletions
48
crates/burn-import/onnx-tests/tests/gather_elements/gather_elements.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
#!/usr/bin/env python3 | ||
|
||
# used to generate model: onnx-tests/tests/gather/gather_elements.onnx | ||
# note that the ONNX specification for `GatherElements` corresponds to PyTorch's/Burn's `gather` function | ||
|
||
import torch | ||
import torch.nn as nn | ||
|
||
|
||
class Model(nn.Module): | ||
def __init__(self): | ||
super(Model, self).__init__() | ||
|
||
def forward(self, x, index): | ||
x = torch.gather(x, 1, index) | ||
return x | ||
|
||
|
||
def main(): | ||
# Set random seed for reproducibility | ||
torch.manual_seed(0) | ||
|
||
# Export to onnx | ||
model = Model() | ||
model.eval() | ||
device = torch.device("cpu") | ||
onnx_name = "gather_elements.onnx" | ||
dummy_input = torch.randn(2, 2, device=device) | ||
dummy_index = torch.randint(high=2, size=(2, 2), device=device, dtype=torch.int64) | ||
|
||
torch.onnx.export(model, (dummy_input, dummy_index), onnx_name, | ||
verbose=False, opset_version=16) | ||
|
||
print("Finished exporting model to {}".format(onnx_name)) | ||
|
||
# Output some test data for use in the test | ||
test_input = torch.tensor([[1.0, 2.0], | ||
[3.0, 4.0]]) | ||
test_index = torch.tensor([[0, 0], | ||
[1, 0]]) | ||
|
||
print("Test input data: {}, {}".format(test_input, test_index)) | ||
output = model.forward(test_input, test_index) | ||
print("Test output data: {}".format(output)) | ||
|
||
|
||
if __name__ == '__main__': | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,112 @@ | ||
use super::{Node, NodeCodegen}; | ||
use crate::burn::{TensorType, ToTokens, Type}; | ||
|
||
use burn::record::PrecisionSettings; | ||
use quote::quote; | ||
|
||
#[derive(Debug, Clone, new)] | ||
pub struct GatherElementsNode { | ||
pub input: TensorType, | ||
pub index: TensorType, | ||
pub output: TensorType, | ||
pub dim: usize, | ||
} | ||
|
||
impl<PS: PrecisionSettings> NodeCodegen<PS> for GatherElementsNode { | ||
fn output_types(&self) -> Vec<Type> { | ||
vec![Type::Tensor(self.output.clone())] | ||
} | ||
|
||
fn input_types(&self) -> Vec<crate::burn::Type> { | ||
vec![ | ||
Type::Tensor(self.input.clone()), | ||
Type::Tensor(self.index.clone()), | ||
] | ||
} | ||
|
||
fn forward( | ||
&self, | ||
scope: &mut crate::burn::Scope, | ||
node_position: usize, | ||
) -> proc_macro2::TokenStream { | ||
let dim = self.dim.to_tokens(); | ||
let input = scope.tensor_use_owned(&self.input, node_position); | ||
let index = scope.tensor_use_owned(&self.index, node_position); | ||
let output = &self.output.name; | ||
|
||
quote! { | ||
let #output = #input.gather(#dim, #index); | ||
} | ||
} | ||
|
||
fn into_node(self) -> super::Node<PS> { | ||
Node::GatherElements(self) | ||
} | ||
} | ||
|
||
#[cfg(test)] | ||
mod tests { | ||
|
||
use burn::record::FullPrecisionSettings; | ||
|
||
use super::*; | ||
use crate::burn::{ | ||
graph::BurnGraph, | ||
node::{gather_elements::GatherElementsNode, test::assert_tokens}, | ||
TensorType, | ||
}; | ||
|
||
#[test] | ||
fn test_codegen_gather_elements() { | ||
let mut graph = BurnGraph::<FullPrecisionSettings>::default(); | ||
|
||
graph.register(GatherElementsNode::new( | ||
TensorType::new_float("tensor1", 2), | ||
TensorType::new_int("tensor2", 2), | ||
TensorType::new_float("tensor3", 2), | ||
1, | ||
)); | ||
|
||
graph.register_input_output( | ||
vec!["tensor1".to_string(), "tensor2".to_string()], | ||
vec!["tensor3".to_string()], | ||
); | ||
|
||
let expected = quote! { | ||
use burn::tensor::Int; | ||
use burn::{ | ||
module::Module, | ||
tensor::{backend::Backend, Tensor}, | ||
}; | ||
|
||
#[derive(Module, Debug)] | ||
pub struct Model<B: Backend> { | ||
phantom: core::marker::PhantomData<B>, | ||
device: burn::module::Ignored<B::Device>, | ||
} | ||
|
||
impl<B: Backend> Model <B> { | ||
#[allow(unused_variables)] | ||
pub fn new(device: &B::Device) -> Self { | ||
Self { | ||
phantom: core::marker::PhantomData, | ||
device: burn::module::Ignored(device.clone()), | ||
} | ||
} | ||
|
||
#[allow(clippy::let_and_return, clippy::approx_constant)] | ||
pub fn forward( | ||
&self, | ||
tensor1: Tensor<B, 2>, | ||
tensor2: Tensor<B, 2, Int> | ||
) -> Tensor<B, 2> { | ||
let tensor3 = tensor1.gather(1, tensor2); | ||
|
||
tensor3 | ||
} | ||
} | ||
}; | ||
|
||
assert_tokens(graph.codegen(), expected); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.