Skip to content

Commit

Permalink
Add checks for even padding when lkernel size is even
Browse files Browse the repository at this point in the history
  • Loading branch information
laggui committed Jan 9, 2025
1 parent da8de56 commit fec926c
Show file tree
Hide file tree
Showing 9 changed files with 59 additions and 8 deletions.
4 changes: 4 additions & 0 deletions crates/burn-core/src/nn/conv/conv1d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,10 @@ impl<B: Backend> Conv1d<B> {
///
/// - input: `[batch_size, channels_in, length_in]`
/// - output: `[batch_size, channels_out, length_out]`
///
/// ### Panics
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
pub fn forward(&self, input: Tensor<B, 3>) -> Tensor<B, 3> {
let [_batch_size, _channels, length] = input.dims();
let padding = self
Expand Down
4 changes: 4 additions & 0 deletions crates/burn-core/src/nn/conv/conv2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,10 @@ impl<B: Backend> Conv2d<B> {
///
/// - input: `[batch_size, channels_in, height_in, width_in]`
/// - output: `[batch_size, channels_out, height_out, width_out]`
///
/// ### Panics
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
pub fn forward(&self, input: Tensor<B, 4>) -> Tensor<B, 4> {
let [_batch_size, _channels_in, height_in, width_in] = input.dims();
let padding =
Expand Down
4 changes: 4 additions & 0 deletions crates/burn-core/src/nn/conv/conv3d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,10 @@ impl<B: Backend> Conv3d<B> {
///
/// - input: `[batch_size, channels_in, depth_in, height_in, width_in]`
/// - output: `[batch_size, channels_out, depth_out, height_out, width_out]`
///
/// ### Panics
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
pub fn forward(&self, input: Tensor<B, 5>) -> Tensor<B, 5> {
let [_batch_size, _channels_in, depth_in, height_in, width_in] = input.dims();
let padding = self.padding.calculate_padding_3d(
Expand Down
4 changes: 4 additions & 0 deletions crates/burn-core/src/nn/conv/deform_conv2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,10 @@ impl<B: Backend> DeformConv2d<B> {
/// - offset: `[batch_size, 2 * offset_groups * kernel_height * kernel_width, height_out, width_out]`
/// - mask: `[batch_size, offset_groups * kernel_height * kernel_width, height_out, width_out]`
/// - output: `[batch_size, channels_out, height_out, width_out]`
///
/// ### Panics
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
pub fn forward(
&self,
input: Tensor<B, 4>,
Expand Down
28 changes: 27 additions & 1 deletion crates/burn-core/src/nn/padding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,24 @@ use crate::tensor::ops::conv::calculate_conv_padding;

use crate::config::Config;

// https://github.com/tracel-ai/burn/issues/2676
fn check_same_padding_support(kernel_size: &[usize]) {
for k in kernel_size.iter() {
if k % 2 == 0 {
unimplemented!("same padding with an even kernel size is not supported");
}
}
}

/// Padding configuration for 1D operators.
#[derive(Config, Debug, PartialEq)]
pub enum PaddingConfig1d {
/// Dynamically calculate the amount of padding necessary to ensure that the output size will be
/// the same as the input.
///
/// **Warning:**
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
Same,
/// Same as no padding.
Valid,
Expand All @@ -23,7 +36,10 @@ impl PaddingConfig1d {
kernel_size: usize,
stride: usize,
) -> usize {
let same_padding = || calculate_conv_padding(kernel_size, stride, length, length);
let same_padding = || {
check_same_padding_support(&[kernel_size]);
calculate_conv_padding(kernel_size, stride, length, length)
};
match self {
Self::Valid => 0,
Self::Same => same_padding(),
Expand All @@ -37,6 +53,10 @@ impl PaddingConfig1d {
pub enum PaddingConfig2d {
/// Dynamically calculate the amount of padding necessary to ensure that the output size will be
/// the same as the input.
///
/// **Warning:**
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
Same,
/// Same as no padding.
Valid,
Expand All @@ -53,6 +73,7 @@ impl PaddingConfig2d {
stride: &[usize; 2],
) -> [usize; 2] {
let same_padding = || {
check_same_padding_support(kernel_size.as_slice());
let p1 = calculate_conv_padding(kernel_size[0], stride[0], height, height);
let p2 = calculate_conv_padding(kernel_size[1], stride[1], width, width);

Expand All @@ -72,6 +93,10 @@ impl PaddingConfig2d {
pub enum PaddingConfig3d {
/// Dynamically calculate the amount of padding necessary to ensure that the output size will be
/// the same as the input.
///
/// **Warning:**
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
Same,
/// Same as no padding.
Valid,
Expand All @@ -89,6 +114,7 @@ impl PaddingConfig3d {
stride: &[usize; 3],
) -> [usize; 3] {
let same_padding = || {
check_same_padding_support(kernel_size.as_slice());
let p1 = calculate_conv_padding(kernel_size[0], stride[0], depth, depth);
let p2 = calculate_conv_padding(kernel_size[1], stride[1], height, height);
let p3 = calculate_conv_padding(kernel_size[2], stride[2], width, width);
Expand Down
8 changes: 4 additions & 4 deletions crates/burn-core/src/nn/pool/avg_pool1d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,6 @@ pub struct AvgPool1dConfig {
/// legitimate values, and they contribute to the denominator
/// when calculating the average. This is equivalent to
/// `torch.nn.AvgPool2d` with `count_include_pad=True`.
///
/// TODO: Add support for `count_include_pad=False`, see
/// [Issue 636](https://github.com/tracel-ai/burn/issues/636)
#[derive(Module, Clone, Debug)]
#[module(custom_display)]
pub struct AvgPool1d {
Expand Down Expand Up @@ -91,6 +87,10 @@ impl AvgPool1d {
///
/// - input: `[batch_size, channels, length_in]`
/// - output: `[batch_size, channels, length_out]`
///
/// ### Panics
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
pub fn forward<B: Backend>(&self, input: Tensor<B, 3>) -> Tensor<B, 3> {
let [_batch_size, _channels, length] = input.dims();
let padding = self
Expand Down
7 changes: 4 additions & 3 deletions crates/burn-core/src/nn/pool/avg_pool2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,6 @@ pub struct AvgPool2dConfig {
/// legitimate values, and they contribute to the denominator
/// when calculating the average. This is equivalent to
/// `torch.nn.AvgPool2d` with `count_include_pad=True`.
///
/// TODO: Add support for `count_include_pad=False`, see
/// [Issue 636](https://github.com/tracel-ai/burn/issues/636)
#[derive(Module, Clone, Debug)]
#[module(custom_display)]
pub struct AvgPool2d {
Expand Down Expand Up @@ -90,6 +87,10 @@ impl AvgPool2d {
///
/// - input: `[batch_size, channels, height_in, width_in]`
/// - output: `[batch_size, channels, height_out, width_out]`
///
/// ### Panics
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
pub fn forward<B: Backend>(&self, input: Tensor<B, 4>) -> Tensor<B, 4> {
let [_batch_size, _channels_in, height_in, width_in] = input.dims();
let padding =
Expand Down
4 changes: 4 additions & 0 deletions crates/burn-core/src/nn/pool/max_pool1d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,10 @@ impl MaxPool1d {
///
/// - input: `[batch_size, channels, length_in]`
/// - output: `[batch_size, channels, length_out]`
///
/// ### Panics
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
pub fn forward<B: Backend>(&self, input: Tensor<B, 3>) -> Tensor<B, 3> {
let [_batch_size, _channels, length] = input.dims();
let padding = self
Expand Down
4 changes: 4 additions & 0 deletions crates/burn-core/src/nn/pool/max_pool2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,10 @@ impl MaxPool2d {
///
/// - input: `[batch_size, channels, height_in, width_in]`
/// - output: `[batch_size, channels, height_out, width_out]`
///
/// ### Panics
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
pub fn forward<B: Backend>(&self, input: Tensor<B, 4>) -> Tensor<B, 4> {
let [_batch_size, _channels_in, height_in, width_in] = input.dims();
let padding =
Expand Down

0 comments on commit fec926c

Please sign in to comment.