Skip to content

Commit

Permalink
Tanh nn wrapper (tracel-ai#1903)
Browse files Browse the repository at this point in the history
  • Loading branch information
DieracDelta committed Jun 18, 2024
1 parent f8a7c54 commit 263add2
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 0 deletions.
2 changes: 2 additions & 0 deletions crates/burn-core/src/nn/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ mod relu;
mod rnn;
mod rope_encoding;
mod swiglu;
mod tanh;
mod unfold;

pub use dropout::*;
Expand All @@ -46,4 +47,5 @@ pub use relu::*;
pub use rnn::*;
pub use rope_encoding::*;
pub use swiglu::*;
pub use tanh::*;
pub use unfold::*;
26 changes: 26 additions & 0 deletions crates/burn-core/src/nn/tanh.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
use crate as burn;

use crate::module::Module;
use crate::tensor::backend::Backend;
use crate::tensor::Tensor;

/// Applies the tanh activation function element-wise
/// See also [tanh](burn::tensor::activation::tanh)
#[derive(Module, Clone, Debug, Default)]
pub struct Tanh {}

impl Tanh {
/// Create the module.
pub fn new() -> Self {
Self {}
}
/// Applies the forward pass on the input tensor.
///
/// # Shapes
///
/// - input: `[..., any]`
/// - output: `[..., any]`
pub fn forward<B: Backend, const D: usize>(&self, input: Tensor<B, D>) -> Tensor<B, D> {
crate::tensor::activation::tanh(input)
}
}

0 comments on commit 263add2

Please sign in to comment.