Skip to content

Commit

Permalink
Add checks for even padding when kernel size is even (#2677)
Browse files Browse the repository at this point in the history
* Add checks for even padding when lkernel size is even

* Move the check to config init
  • Loading branch information
laggui authored Jan 11, 2025
1 parent 95593fc commit 5b3079a
Show file tree
Hide file tree
Showing 9 changed files with 127 additions and 7 deletions.
11 changes: 11 additions & 0 deletions crates/burn-core/src/nn/conv/checks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,14 @@ pub(crate) fn checks_channels_div_groups(channels_in: usize, channels_out: usize
);
}
}

// https://github.com/tracel-ai/burn/issues/2676
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
pub(crate) fn check_same_padding_support(kernel_size: &[usize]) {
for k in kernel_size.iter() {
if k % 2 == 0 {
unimplemented!("Same padding with an even kernel size is not supported");
}
}
}
15 changes: 15 additions & 0 deletions crates/burn-core/src/nn/conv/conv1d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,10 @@ pub struct Conv1dConfig {
#[config(default = "1")]
pub groups: usize,
/// The padding configuration.
///
/// ### Warning
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
#[config(default = "PaddingConfig1d::Valid")]
pub padding: PaddingConfig1d,
/// If bias should be added to the output.
Expand Down Expand Up @@ -87,6 +91,9 @@ impl Conv1dConfig {
/// Initialize a new [conv1d](Conv1d) module.
pub fn init<B: Backend>(&self, device: &B::Device) -> Conv1d<B> {
checks::checks_channels_div_groups(self.channels_in, self.channels_out, self.groups);
if self.padding == PaddingConfig1d::Same {
checks::check_same_padding_support(&[self.kernel_size]);
}

let shape = [
self.channels_out,
Expand Down Expand Up @@ -175,6 +182,14 @@ mod tests {
.assert_approx_eq(&TensorData::zeros::<f32, _>(conv.weight.shape()), 3);
}

#[test]
#[should_panic = "Same padding with an even kernel size is not supported"]
fn same_with_even_kernel_is_invalid() {
let device = Default::default();
let config = Conv1dConfig::new(5, 5, 4).with_padding(PaddingConfig1d::Same);
let _ = config.init::<TestBackend>(&device);
}

#[test]
fn display() {
let config = Conv1dConfig::new(5, 5, 5);
Expand Down
15 changes: 15 additions & 0 deletions crates/burn-core/src/nn/conv/conv2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ pub struct Conv2dConfig {
#[config(default = "1")]
pub groups: usize,
/// The padding configuration.
///
/// ### Warning
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
#[config(default = "PaddingConfig2d::Valid")]
pub padding: PaddingConfig2d,
/// If bias should be added to the output.
Expand Down Expand Up @@ -68,6 +72,9 @@ impl Conv2dConfig {
/// Initialize a new [conv2d](Conv2d) module.
pub fn init<B: Backend>(&self, device: &B::Device) -> Conv2d<B> {
checks::checks_channels_div_groups(self.channels[0], self.channels[1], self.groups);
if self.padding == PaddingConfig2d::Same {
checks::check_same_padding_support(&self.kernel_size);
}

let shape = [
self.channels[1],
Expand Down Expand Up @@ -228,6 +235,14 @@ mod tests {
let _ = config.init::<TestBackend>(&device);
}

#[test]
#[should_panic = "Same padding with an even kernel size is not supported"]
fn same_with_even_kernel_is_invalid() {
let device = Default::default();
let config = Conv2dConfig::new([4, 4], [2, 2]).with_padding(PaddingConfig2d::Same);
let _ = config.init::<TestBackend>(&device);
}

#[test]
fn display() {
let config = Conv2dConfig::new([5, 1], [5, 5]);
Expand Down
11 changes: 11 additions & 0 deletions crates/burn-core/src/nn/conv/conv3d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,9 @@ impl Conv3dConfig {
/// Initialize a new [conv3d](Conv3d) module.
pub fn init<B: Backend>(&self, device: &B::Device) -> Conv3d<B> {
checks::checks_channels_div_groups(self.channels[0], self.channels[1], self.groups);
if self.padding == PaddingConfig3d::Same {
checks::check_same_padding_support(&self.kernel_size);
}

let shape = [
self.channels[1],
Expand Down Expand Up @@ -228,6 +231,14 @@ mod tests {
assert_eq!(config.initializer, init);
}

#[test]
#[should_panic = "Same padding with an even kernel size is not supported"]
fn same_with_even_kernel_is_invalid() {
let device = Default::default();
let config = Conv3dConfig::new([4, 4], [2, 2, 2]).with_padding(PaddingConfig3d::Same);
let _ = config.init::<TestBackend>(&device);
}

#[test]
fn display() {
let config = Conv3dConfig::new([5, 1], [5, 5, 5]);
Expand Down
15 changes: 15 additions & 0 deletions crates/burn-core/src/nn/conv/deform_conv2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,10 @@ pub struct DeformConv2dConfig {
#[config(default = "1")]
pub offset_groups: usize,
/// The padding configuration.
///
/// ### Warning
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
#[config(default = "PaddingConfig2d::Valid")]
pub padding: PaddingConfig2d,
/// If bias should be added to the output.
Expand Down Expand Up @@ -73,6 +77,9 @@ impl DeformConv2dConfig {
/// Initialize a new [DeformConv2d](DeformConv2d) module.
pub fn init<B: Backend>(&self, device: &B::Device) -> DeformConv2d<B> {
checks::checks_channels_div_groups(self.channels[0], self.channels[1], self.weight_groups);
if self.padding == PaddingConfig2d::Same {
checks::check_same_padding_support(&self.kernel_size);
}

let shape = [
self.channels[1],
Expand Down Expand Up @@ -250,6 +257,14 @@ mod tests {
let _ = config.init::<TestBackend>(&device);
}

#[test]
#[should_panic = "Same padding with an even kernel size is not supported"]
fn same_with_even_kernel_is_invalid() {
let device = Default::default();
let config = DeformConv2dConfig::new([4, 4], [2, 2]).with_padding(PaddingConfig2d::Same);
let _ = config.init::<TestBackend>(&device);
}

#[test]
fn display() {
let config = DeformConv2dConfig::new([5, 1], [5, 5]);
Expand Down
19 changes: 15 additions & 4 deletions crates/burn-core/src/nn/pool/avg_pool1d.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use crate as burn;
use crate::nn::conv::checks::check_same_padding_support;

use crate::config::Config;
use crate::module::{Content, DisplaySettings, ModuleDisplay};
Expand All @@ -18,6 +19,10 @@ pub struct AvgPool1dConfig {
#[config(default = "1")]
pub stride: usize,
/// The padding configuration.
///
/// ### Warning
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
#[config(default = "PaddingConfig1d::Valid")]
pub padding: PaddingConfig1d,
/// If the padding is counted in the denominator when computing the average.
Expand All @@ -36,10 +41,6 @@ pub struct AvgPool1dConfig {
/// legitimate values, and they contribute to the denominator
/// when calculating the average. This is equivalent to
/// `torch.nn.AvgPool2d` with `count_include_pad=True`.
///
/// TODO: Add support for `count_include_pad=False`, see
/// [Issue 636](https://github.com/tracel-ai/burn/issues/636)
#[derive(Module, Clone, Debug)]
#[module(custom_display)]
pub struct AvgPool1d {
Expand Down Expand Up @@ -73,6 +74,9 @@ impl ModuleDisplay for AvgPool1d {
impl AvgPool1dConfig {
/// Initialize a new [avg pool 1d](AvgPool1d) module.
pub fn init(&self) -> AvgPool1d {
if self.padding == PaddingConfig1d::Same {
check_same_padding_support(&[self.kernel_size]);
}
AvgPool1d {
stride: self.stride,
kernel_size: self.kernel_size,
Expand Down Expand Up @@ -111,6 +115,13 @@ impl AvgPool1d {
mod tests {
use super::*;

#[test]
#[should_panic = "Same padding with an even kernel size is not supported"]
fn same_with_even_kernel_is_invalid() {
let config = AvgPool1dConfig::new(2).with_padding(PaddingConfig1d::Same);
let _ = config.init();
}

#[test]
fn display() {
let config = AvgPool1dConfig::new(3);
Expand Down
18 changes: 15 additions & 3 deletions crates/burn-core/src/nn/pool/avg_pool2d.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use crate as burn;
use crate::nn::conv::checks::check_same_padding_support;

use crate::config::Config;
use crate::module::{Content, DisplaySettings, ModuleDisplay};
Expand All @@ -18,6 +19,10 @@ pub struct AvgPool2dConfig {
#[config(default = "[1, 1]")]
pub strides: [usize; 2],
/// The padding configuration.
///
/// ### Warning
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
#[config(default = "PaddingConfig2d::Valid")]
pub padding: PaddingConfig2d,
/// If the padding is counted in the denominator when computing the average.
Expand All @@ -36,9 +41,6 @@ pub struct AvgPool2dConfig {
/// legitimate values, and they contribute to the denominator
/// when calculating the average. This is equivalent to
/// `torch.nn.AvgPool2d` with `count_include_pad=True`.
///
/// TODO: Add support for `count_include_pad=False`, see
/// [Issue 636](https://github.com/tracel-ai/burn/issues/636)
#[derive(Module, Clone, Debug)]
#[module(custom_display)]
pub struct AvgPool2d {
Expand Down Expand Up @@ -72,6 +74,9 @@ impl ModuleDisplay for AvgPool2d {
impl AvgPool2dConfig {
/// Initialize a new [avg pool 2d](AvgPool2d) module.
pub fn init(&self) -> AvgPool2d {
if self.padding == PaddingConfig2d::Same {
check_same_padding_support(&self.kernel_size);
}
AvgPool2d {
stride: self.strides,
kernel_size: self.kernel_size,
Expand Down Expand Up @@ -110,6 +115,13 @@ impl AvgPool2d {
mod tests {
use super::*;

#[test]
#[should_panic = "Same padding with an even kernel size is not supported"]
fn same_with_even_kernel_is_invalid() {
let config = AvgPool2dConfig::new([2, 2]).with_padding(PaddingConfig2d::Same);
let _ = config.init();
}

#[test]
fn display() {
let config = AvgPool2dConfig::new([3, 3]);
Expand Down
15 changes: 15 additions & 0 deletions crates/burn-core/src/nn/pool/max_pool1d.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use crate as burn;
use crate::nn::conv::checks::check_same_padding_support;

use crate::config::Config;
use crate::module::{Content, DisplaySettings, ModuleDisplay};
Expand All @@ -18,6 +19,10 @@ pub struct MaxPool1dConfig {
#[config(default = "1")]
pub stride: usize,
/// The padding configuration.
///
/// ### Warning
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
#[config(default = "PaddingConfig1d::Valid")]
pub padding: PaddingConfig1d,
/// The dilation.
Expand Down Expand Up @@ -61,6 +66,9 @@ impl ModuleDisplay for MaxPool1d {
impl MaxPool1dConfig {
/// Initialize a new [max pool 1d](MaxPool1d) module.
pub fn init(&self) -> MaxPool1d {
if self.padding == PaddingConfig1d::Same {
check_same_padding_support(&[self.kernel_size]);
}
MaxPool1d {
stride: self.stride,
kernel_size: self.kernel_size,
Expand Down Expand Up @@ -93,6 +101,13 @@ impl MaxPool1d {
mod tests {
use super::*;

#[test]
#[should_panic = "Same padding with an even kernel size is not supported"]
fn same_with_even_kernel_is_invalid() {
let config = MaxPool1dConfig::new(2).with_padding(PaddingConfig1d::Same);
let _ = config.init();
}

#[test]
fn display() {
let config = MaxPool1dConfig::new(3);
Expand Down
15 changes: 15 additions & 0 deletions crates/burn-core/src/nn/pool/max_pool2d.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use crate as burn;
use crate::nn::conv::checks::check_same_padding_support;

use crate::config::Config;
use crate::module::{Content, DisplaySettings, ModuleDisplay};
Expand All @@ -18,6 +19,10 @@ pub struct MaxPool2dConfig {
#[config(default = "[1, 1]")]
pub strides: [usize; 2],
/// The padding configuration.
///
/// ### Warning
/// Only symmetric padding is currently supported. As such, using `Same` padding with an even kernel
/// size is not supported as it will not produce the same output size.
#[config(default = "PaddingConfig2d::Valid")]
pub padding: PaddingConfig2d,
/// The dilation.
Expand Down Expand Up @@ -61,6 +66,9 @@ impl ModuleDisplay for MaxPool2d {
impl MaxPool2dConfig {
/// Initialize a new [max pool 2d](MaxPool2d) module.
pub fn init(&self) -> MaxPool2d {
if self.padding == PaddingConfig2d::Same {
check_same_padding_support(&self.kernel_size);
}
MaxPool2d {
stride: self.strides,
kernel_size: self.kernel_size,
Expand Down Expand Up @@ -93,6 +101,13 @@ impl MaxPool2d {
mod tests {
use super::*;

#[test]
#[should_panic = "Same padding with an even kernel size is not supported"]
fn same_with_even_kernel_is_invalid() {
let config = MaxPool2dConfig::new([2, 2]).with_padding(PaddingConfig2d::Same);
let _ = config.init();
}

#[test]
fn display() {
let config = MaxPool2dConfig::new([3, 3]);
Expand Down

0 comments on commit 5b3079a

Please sign in to comment.