org.bytedeco.pytorch.ReLUImpl Maven / Gradle / Ivy
// Targeted by JavaCPP version 1.5.9: DO NOT EDIT THIS FILE
package org.bytedeco.pytorch;
import org.bytedeco.pytorch.Allocator;
import org.bytedeco.pytorch.Function;
import org.bytedeco.pytorch.Module;
import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;
import static org.bytedeco.javacpp.presets.javacpp.*;
import static org.bytedeco.openblas.global.openblas_nolapack.*;
import static org.bytedeco.openblas.global.openblas.*;
import static org.bytedeco.pytorch.global.torch.*;
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ReLU ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/** Applies the ReLU function element-wise.
* See https://pytorch.org/docs/master/nn.html#torch.nn.ReLU to learn
* about the exact behavior of this module.
*
* See the documentation for {@code torch::nn::ReLUOptions} class to learn what
* constructor arguments are supported for this module.
*
* Example:
* {@code
* ReLU model(ReLUOptions().inplace(true));
* } */
@Namespace("torch::nn") @NoOffset @Properties(inherit = org.bytedeco.pytorch.presets.torch.class)
public class ReLUImpl extends ReLUImplCloneable {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public ReLUImpl(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public ReLUImpl(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public ReLUImpl position(long position) {
return (ReLUImpl)super.position(position);
}
@Override public ReLUImpl getPointer(long i) {
return new ReLUImpl((Pointer)this).offsetAddress(i);
}
public ReLUImpl(@Const @ByRef(nullValue = "torch::nn::ReLUOptions{}") ReLUOptions options_) { super((Pointer)null); allocate(options_); }
@NoDeallocator private native void allocate(@Const @ByRef(nullValue = "torch::nn::ReLUOptions{}") ReLUOptions options_);
public ReLUImpl() { super((Pointer)null); allocate(); }
@NoDeallocator private native void allocate();
public native @ByVal Tensor forward(@ByVal Tensor input);
public native void reset();
/** Pretty prints the {@code ReLU} module into the given {@code stream}. */
public native void pretty_print(@Cast("std::ostream*") @ByRef Pointer stream);
/** The options with which this {@code Module} was constructed. */
public native @ByRef ReLUOptions options(); public native ReLUImpl options(ReLUOptions setter);
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy