org.bytedeco.pytorch.DistributedSampler Maven / Gradle / Ivy
// Targeted by JavaCPP version 1.5.9: DO NOT EDIT THIS FILE
package org.bytedeco.pytorch;
import org.bytedeco.pytorch.Allocator;
import org.bytedeco.pytorch.Function;
import org.bytedeco.pytorch.Module;
import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;
import static org.bytedeco.javacpp.presets.javacpp.*;
import static org.bytedeco.openblas.global.openblas_nolapack.*;
import static org.bytedeco.openblas.global.openblas.*;
import static org.bytedeco.pytorch.global.torch.*;
/** A {@code Sampler} that selects a subset of indices to sample from and defines a
* sampling behavior. In a distributed setting, this selects a subset of the
* indices depending on the provided num_replicas and rank parameters. The
* {@code Sampler} performs a rounding operation based on the {@code allow_duplicates}
* parameter to decide the local sample count. */
@Name("torch::data::samplers::DistributedSampler >") @NoOffset @Properties(inherit = org.bytedeco.pytorch.presets.torch.class)
public class DistributedSampler extends Sampler {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public DistributedSampler(Pointer p) { super(p); }
/** Set the epoch for the current enumeration. This can be used to alter the
* sample selection and shuffling behavior. */
public native void set_epoch(@Cast("size_t") long epoch);
public native @Cast("size_t") long epoch();
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy