org.bytedeco.pytorch.CosineEmbeddingLossImpl Maven / Gradle / Ivy
The newest version!
// Targeted by JavaCPP version 1.5.11: DO NOT EDIT THIS FILE
package org.bytedeco.pytorch;
import org.bytedeco.pytorch.Allocator;
import org.bytedeco.pytorch.Function;
import org.bytedeco.pytorch.Module;
import org.bytedeco.javacpp.annotation.Cast;
import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;
import static org.bytedeco.javacpp.presets.javacpp.*;
import static org.bytedeco.openblas.global.openblas_nolapack.*;
import static org.bytedeco.openblas.global.openblas.*;
import org.bytedeco.javacpp.chrono.*;
import static org.bytedeco.javacpp.global.chrono.*;
import static org.bytedeco.pytorch.global.torch.*;
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ CosineEmbeddingLoss
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/** Creates a criterion that measures the loss given input tensors
* {@code input1}, {@code input2}, and a {@code Tensor} label {@code target} with values 1 or
* -1. This is used for measuring whether two inputs are similar or
* dissimilar, using the cosine distance, and is typically used for learning
* nonlinear embeddings or semi-supervised learning.
* See https://pytorch.org/docs/main/nn.html#torch.nn.CosineEmbeddingLoss to
* learn about the exact behavior of this module.
*
* See the documentation for {@code torch::nn::CosineEmbeddingLossOptions} class to
* learn what constructor arguments are supported for this module.
*
* Example:
* {@code
* CosineEmbeddingLoss model(CosineEmbeddingLossOptions().margin(0.5));
* }
*/
@Namespace("torch::nn") @NoOffset @Properties(inherit = org.bytedeco.pytorch.presets.torch.class)
public class CosineEmbeddingLossImpl extends CosineEmbeddingLossImplCloneable {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public CosineEmbeddingLossImpl(Pointer p) { super(p); }
public CosineEmbeddingLossImpl(@ByVal(nullValue = "torch::nn::CosineEmbeddingLossOptions{}") CosineEmbeddingLossOptions options_) { super((Pointer)null); allocate(options_); }
@SharedPtr @Name("std::make_shared") private native void allocate(@ByVal(nullValue = "torch::nn::CosineEmbeddingLossOptions{}") CosineEmbeddingLossOptions options_);
public CosineEmbeddingLossImpl() { super((Pointer)null); allocate(); }
@SharedPtr @Name("std::make_shared") private native void allocate();
public native void reset();
/** Pretty prints the {@code CosineEmbeddingLoss} module into the given {@code stream}. */
public native void pretty_print(@Cast("std::ostream*") @ByRef Pointer stream);
public native @ByVal Tensor forward(
@Const @ByRef Tensor input1,
@Const @ByRef Tensor input2,
@Const @ByRef Tensor target);
/** The options with which this {@code Module} was constructed. */
public native @ByRef CosineEmbeddingLossOptions options(); public native CosineEmbeddingLossImpl options(CosineEmbeddingLossOptions setter);
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy