Skip to content

Commit

Permalink
Add bool() op for numerical tensor (#1402)
Browse files Browse the repository at this point in the history
Fixes #1395
  • Loading branch information
antimora authored Mar 4, 2024
1 parent efbe818 commit 4ed90a9
Show file tree
Hide file tree
Showing 7 changed files with 37 additions and 1 deletion.
1 change: 1 addition & 0 deletions burn-book/src/building-blocks/tensor.md
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,7 @@ Those operations are available for numeric tensor kinds: `Float` and `Int`.
| `tensor.all_close(other, atol, rtol)` | `torch.allclose(tensor, other, atol, rtol)` |
| `tensor.argmax(dim)` | `tensor.argmax(dim)` |
| `tensor.argmin(dim)` | `tensor.argmin(dim)` |
| `tensor.bool()` | `tensor.bool()` |
| `tensor.clamp(min, max)` | `torch.clamp(tensor, min=min, max=max)` |
| `tensor.clamp_max(max)` | `torch.clamp(tensor, max=max)` |
| `tensor.clamp_min(min)` | `torch.clamp(tensor, min=min)` |
Expand Down
1 change: 1 addition & 0 deletions crates/burn-candle/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ mod tests {
burn_tensor::testgen_arange!();
burn_tensor::testgen_arange_step!();
burn_tensor::testgen_arg!();
burn_tensor::testgen_bool!();
burn_tensor::testgen_cast!();
burn_tensor::testgen_cat!();
burn_tensor::testgen_recip!();
Expand Down
10 changes: 10 additions & 0 deletions crates/burn-tensor/src/tensor/api/numeric.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use crate::{
backend::Backend, check, check::TensorCheck, BasicOps, Bool, Element, ElementConversion, Float,
Int, Shape, Tensor, TensorKind,
};
use num_traits::Zero;

impl<B, const D: usize, K> Tensor<B, D, K>
where
Expand Down Expand Up @@ -640,6 +641,15 @@ where
pub fn all_close(self, other: Self, rtol: Option<f64>, atol: Option<f64>) -> bool {
self.is_close(other, rtol, atol).all().into_scalar()
}

/// Converts the tensor to a boolean tensor by checking if the elements are non-zero.
///
/// # Returns
///
/// A boolean tensor with the same shape as the input tensor.
pub fn bool(self) -> Tensor<B, D, Bool> {
K::not_equal_elem::<D>(self.primitive, K::Elem::zero())
}
}

impl<B, K> Tensor<B, 2, K>
Expand Down
3 changes: 2 additions & 1 deletion crates/burn-tensor/src/tensor/element.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
use crate::Distribution;
use half::{bf16, f16};
use num_traits::ToPrimitive;
use num_traits::{identities::Zero, ToPrimitive};
use rand::RngCore;

/// Element trait for tensor.
pub trait Element:
ToPrimitive
+ Zero
+ ElementRandom
+ ElementConversion
+ ElementPrecision
Expand Down
1 change: 1 addition & 0 deletions crates/burn-tensor/src/tests/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ macro_rules! testgen_all {
burn_tensor::testgen_powf!();
burn_tensor::testgen_any!();
burn_tensor::testgen_all_op!();
burn_tensor::testgen_bool!();
burn_tensor::testgen_argwhere_nonzero!();

// test stats
Expand Down
21 changes: 21 additions & 0 deletions crates/burn-tensor/src/tests/ops/bool.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
#[burn_tensor_testgen::testgen(bool)]
mod tests {
use super::*;
use burn_tensor::{Data, Tensor};

#[test]
fn test_from_float() {
let tensor1 = TestTensor::from([[0.0, 43.0, 0.0], [2.0, -4.2, 31.33]]);
let data_actual = tensor1.bool().into_data();
let data_expected = Data::from([[false, true, false], [true, true, true]]);
assert_eq!(data_expected, data_actual);
}

#[test]
fn test_from_int() {
let tensor1 = TestTensorInt::from([[0, 43, 0], [2, -4, 31]]);
let data_actual = tensor1.bool().into_data();
let data_expected = Data::from([[false, true, false], [true, true, true]]);
assert_eq!(data_expected, data_actual);
}
}
1 change: 1 addition & 0 deletions crates/burn-tensor/src/tests/ops/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ mod arange;
mod arange_step;
mod arg;
mod argwhere_nonzero;
mod bool;
mod cast;
mod cat;
mod chunk;
Expand Down

0 comments on commit 4ed90a9

Please sign in to comment.