All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.bytedeco.pytorch.AOTIModelContainerRunnerCpu Maven / Gradle / Ivy

The newest version!
// Targeted by JavaCPP version 1.5.11: DO NOT EDIT THIS FILE

package org.bytedeco.pytorch;

import org.bytedeco.pytorch.Allocator;
import org.bytedeco.pytorch.Function;
import org.bytedeco.pytorch.Module;
import org.bytedeco.javacpp.annotation.Cast;
import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;

import static org.bytedeco.javacpp.presets.javacpp.*;
import static org.bytedeco.openblas.global.openblas_nolapack.*;
import static org.bytedeco.openblas.global.openblas.*;
import org.bytedeco.javacpp.chrono.*;
import static org.bytedeco.javacpp.global.chrono.*;

import static org.bytedeco.pytorch.global.torch.*;

@Namespace("torch::inductor") @Properties(inherit = org.bytedeco.pytorch.presets.torch.class)
public class AOTIModelContainerRunnerCpu extends AOTIModelContainerRunner {
    static { Loader.load(); }
    /** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
    public AOTIModelContainerRunnerCpu(Pointer p) { super(p); }

  public AOTIModelContainerRunnerCpu(
        @StdString BytePointer model_so_path,
        @Cast("size_t") long num_models/*=1*/) { super((Pointer)null); allocate(model_so_path, num_models); }
  private native void allocate(
        @StdString BytePointer model_so_path,
        @Cast("size_t") long num_models/*=1*/);
  public AOTIModelContainerRunnerCpu(
        @StdString BytePointer model_so_path) { super((Pointer)null); allocate(model_so_path); }
  private native void allocate(
        @StdString BytePointer model_so_path);
  public AOTIModelContainerRunnerCpu(
        @StdString String model_so_path,
        @Cast("size_t") long num_models/*=1*/) { super((Pointer)null); allocate(model_so_path, num_models); }
  private native void allocate(
        @StdString String model_so_path,
        @Cast("size_t") long num_models/*=1*/);
  public AOTIModelContainerRunnerCpu(
        @StdString String model_so_path) { super((Pointer)null); allocate(model_so_path); }
  private native void allocate(
        @StdString String model_so_path);

  public native @ByVal TensorVector run(@ByRef TensorVector inputs);
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy