All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.bytedeco.tensorflowlite.InterpreterBuilder Maven / Gradle / Ivy

There is a newer version: 2.18.0-1.5.11
Show newest version
// Targeted by JavaCPP version 1.5.7: DO NOT EDIT THIS FILE

package org.bytedeco.tensorflowlite;

import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;

import static org.bytedeco.tensorflowlite.global.tensorflowlite.*;


/** Build an interpreter capable of interpreting {@code model}.
 * 
 *  {@code model}: A model whose lifetime must be at least as long as any
 *    interpreter(s) created by the builder. In principle multiple interpreters
 *    can be made from a single model.
 *  {@code op_resolver}: An instance that implements the {@code OpResolver} interface, which
 *    maps custom op names and builtin op codes to op registrations. The
 *    lifetime of the provided {@code op_resolver} object must be at least as long as
 *    the {@code InterpreterBuilder}; unlike {@code model} and {@code error_reporter}, the
 *    {@code op_resolver} does not need to exist for the duration of any created
 *    {@code Interpreter} objects.
 *  {@code error_reporter}: a functor that is called to report errors that handles
 *    printf var arg semantics. The lifetime of the {@code error_reporter} object must
 *    be greater than or equal to the {@code Interpreter} created by {@code operator()}.
 * 
 *  Returns a kTfLiteOk when successful and sets interpreter to a valid
 *  Interpreter. Note: The user must ensure the lifetime of the model (and error
 *  reporter, if provided) is at least as long as interpreter's lifetime, and
 *  a single model instance may safely be used with multiple interpreters. */
@Namespace("tflite") @NoOffset @Properties(inherit = org.bytedeco.tensorflowlite.presets.tensorflowlite.class)
public class InterpreterBuilder extends Pointer {
    static { Loader.load(); }
    /** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
    public InterpreterBuilder(Pointer p) { super(p); }

  /** For this constructor, the ErrorReporter will be extracted from the
   *  FlatBufferModel. */
  public InterpreterBuilder(@Const @ByRef FlatBufferModel model,
                       @Const @ByRef OpResolver op_resolver) { super((Pointer)null); allocate(model, op_resolver); }
  private native void allocate(@Const @ByRef FlatBufferModel model,
                       @Const @ByRef OpResolver op_resolver);
  /** Builds an interpreter given only the raw flatbuffer Model object (instead
   *  of a FlatBufferModel). Mostly used for testing.
   *  If {@code error_reporter} is null, then DefaultErrorReporter() is used. */
  public InterpreterBuilder(@Cast("const tflite::Model*") Pointer model,
                       @Const @ByRef OpResolver op_resolver,
                       ErrorReporter error_reporter/*=tflite::DefaultErrorReporter()*/) { super((Pointer)null); allocate(model, op_resolver, error_reporter); }
  private native void allocate(@Cast("const tflite::Model*") Pointer model,
                       @Const @ByRef OpResolver op_resolver,
                       ErrorReporter error_reporter/*=tflite::DefaultErrorReporter()*/);
  public InterpreterBuilder(@Cast("const tflite::Model*") Pointer model,
                       @Const @ByRef OpResolver op_resolver) { super((Pointer)null); allocate(model, op_resolver); }
  private native void allocate(@Cast("const tflite::Model*") Pointer model,
                       @Const @ByRef OpResolver op_resolver);
  
  

  /** Builds an interpreter and stores it in {@code *interpreter}.
   *  On success, returns kTfLiteOk and sets {@code *interpreter} to a valid
   *  Interpreter.
   *  On failure, returns an error status and sets {@code *interpreter} to nullptr. */
  public native @Cast("TfLiteStatus") @Name("operator ()") int apply(@UniquePtr Interpreter interpreter);

  /** Same as above, but also sets the number of CPU threads to use
   *  (overriding any previous call to SetNumThreads).
   *  Deprecated: use the SetNumThreads method instead. */
  public native @Cast("TfLiteStatus") @Name("operator ()") int apply(@UniquePtr Interpreter interpreter,
                            int num_threads);

  /** Sets the number of CPU threads to use for the interpreter.
   *  Returns kTfLiteOk on success, kTfLiteError on error. */
  public native @Cast("TfLiteStatus") int SetNumThreads(int num_threads);

  /** Enables preserving intermediates for debugging. Otherwise, by default
   *  intermediates are undefined due to memory planning and reuse. */
  public native @ByRef InterpreterBuilder PreserveAllTensorsExperimental();

  /** Any delegates added with AddDelegate will be applied to the Interpreter
   *  generated by operator(), in the order that they were added.  (The delegate
   *  parameter passed to AddDelegate should be non-null, otherwise an error
   *  will be reported, and the call to AddDelegate will have no other effect.)
   *  The lifetime of the delegate must be at least as long as the lifetime of
   *  any Interpreter generated by this InterpreterBuilder. */
  public native void AddDelegate(TfLiteDelegate delegate);
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy