All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.clarifai.grpc.api.AndOrBuilder Maven / Gradle / Ivy

The newest version!
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: proto/clarifai/api/resources.proto

package com.clarifai.grpc.api;

public interface AndOrBuilder extends
    // @@protoc_insertion_point(interface_extends:clarifai.api.And)
    com.google.protobuf.MessageOrBuilder {

  /**
   * 
   * FILTER by input.data... information.
   * This can include human provided concepts, geo location info, metadata, etc.
   * This is effectively searching over only the trusted annotation attached to an input in your
   * app. To search by more specific annotation fields use the Annotation object here.
   * ########## Supported fields ##########
   *  - data.concepts[].id
   *  - data.concepts[].name
   *  - data.concepts[].value
   *  - data.geo.geo_box[].geo_point.latitude
   *  - data.geo.geo_box[].geo_point.longitude
   *  - data.geo.geo_limit.type
   *  - data.geo.geo_limit.value
   *  - data.geo.geo_point.latitude
   *  - data.geo.geo_point.longitude
   *  - data.image.url
   *  - data.metadata.fields - filter by metadata. metadata key&value fields are OR-ed.
   *  - dataset_ids[] - filter by dataset IDs
   *  - id - filter by input ID
   *  - status.code - filter by input status
   * 
* * .clarifai.api.Input input = 1; * @return Whether the input field is set. */ boolean hasInput(); /** *
   * FILTER by input.data... information.
   * This can include human provided concepts, geo location info, metadata, etc.
   * This is effectively searching over only the trusted annotation attached to an input in your
   * app. To search by more specific annotation fields use the Annotation object here.
   * ########## Supported fields ##########
   *  - data.concepts[].id
   *  - data.concepts[].name
   *  - data.concepts[].value
   *  - data.geo.geo_box[].geo_point.latitude
   *  - data.geo.geo_box[].geo_point.longitude
   *  - data.geo.geo_limit.type
   *  - data.geo.geo_limit.value
   *  - data.geo.geo_point.latitude
   *  - data.geo.geo_point.longitude
   *  - data.image.url
   *  - data.metadata.fields - filter by metadata. metadata key&value fields are OR-ed.
   *  - dataset_ids[] - filter by dataset IDs
   *  - id - filter by input ID
   *  - status.code - filter by input status
   * 
* * .clarifai.api.Input input = 1; * @return The input. */ com.clarifai.grpc.api.Input getInput(); /** *
   * FILTER by input.data... information.
   * This can include human provided concepts, geo location info, metadata, etc.
   * This is effectively searching over only the trusted annotation attached to an input in your
   * app. To search by more specific annotation fields use the Annotation object here.
   * ########## Supported fields ##########
   *  - data.concepts[].id
   *  - data.concepts[].name
   *  - data.concepts[].value
   *  - data.geo.geo_box[].geo_point.latitude
   *  - data.geo.geo_box[].geo_point.longitude
   *  - data.geo.geo_limit.type
   *  - data.geo.geo_limit.value
   *  - data.geo.geo_point.latitude
   *  - data.geo.geo_point.longitude
   *  - data.image.url
   *  - data.metadata.fields - filter by metadata. metadata key&value fields are OR-ed.
   *  - dataset_ids[] - filter by dataset IDs
   *  - id - filter by input ID
   *  - status.code - filter by input status
   * 
* * .clarifai.api.Input input = 1; */ com.clarifai.grpc.api.InputOrBuilder getInputOrBuilder(); /** *
   * RANK based predicted outputs from models such as custom trained models, pre-trained models,
   * etc. This is also where you enter the image url for a visual search because what we're asking
   * the system to do is find output embedding most visually similar to the provided input (that
   * input being in And.output.input.data.image.url for example). This will return the Hits
   * sorted by visual similarity (1.0 being very similar or exact match and 0.0 being very
   * dissimlar). For a search by Output concept, this means we're asking the system to rank
   * the Hits by confidence of our model's predicted Outputs. So for example if the model
   * predicts an image is 0.95 likely there is a "dog" present, that should related directly
   * to the score returned if you search for Output concept "dog" in your query. This provides
   * a natural ranking to search results based on confidence of predictions from the models and
   * is used when ANDing multiple of these types of RANK by Output queries together as well.
   * ########## Supported fields ##########
   *  - data.clusters[].id
   *  - data.concepts[].id
   *  - data.concepts[].name
   *  - data.concepts[].value
   *  - input.data.image.base64[]
   *  - input.data.image.url
   *  - input.id
   * 
* * .clarifai.api.Output output = 2; * @return Whether the output field is set. */ boolean hasOutput(); /** *
   * RANK based predicted outputs from models such as custom trained models, pre-trained models,
   * etc. This is also where you enter the image url for a visual search because what we're asking
   * the system to do is find output embedding most visually similar to the provided input (that
   * input being in And.output.input.data.image.url for example). This will return the Hits
   * sorted by visual similarity (1.0 being very similar or exact match and 0.0 being very
   * dissimlar). For a search by Output concept, this means we're asking the system to rank
   * the Hits by confidence of our model's predicted Outputs. So for example if the model
   * predicts an image is 0.95 likely there is a "dog" present, that should related directly
   * to the score returned if you search for Output concept "dog" in your query. This provides
   * a natural ranking to search results based on confidence of predictions from the models and
   * is used when ANDing multiple of these types of RANK by Output queries together as well.
   * ########## Supported fields ##########
   *  - data.clusters[].id
   *  - data.concepts[].id
   *  - data.concepts[].name
   *  - data.concepts[].value
   *  - input.data.image.base64[]
   *  - input.data.image.url
   *  - input.id
   * 
* * .clarifai.api.Output output = 2; * @return The output. */ com.clarifai.grpc.api.Output getOutput(); /** *
   * RANK based predicted outputs from models such as custom trained models, pre-trained models,
   * etc. This is also where you enter the image url for a visual search because what we're asking
   * the system to do is find output embedding most visually similar to the provided input (that
   * input being in And.output.input.data.image.url for example). This will return the Hits
   * sorted by visual similarity (1.0 being very similar or exact match and 0.0 being very
   * dissimlar). For a search by Output concept, this means we're asking the system to rank
   * the Hits by confidence of our model's predicted Outputs. So for example if the model
   * predicts an image is 0.95 likely there is a "dog" present, that should related directly
   * to the score returned if you search for Output concept "dog" in your query. This provides
   * a natural ranking to search results based on confidence of predictions from the models and
   * is used when ANDing multiple of these types of RANK by Output queries together as well.
   * ########## Supported fields ##########
   *  - data.clusters[].id
   *  - data.concepts[].id
   *  - data.concepts[].name
   *  - data.concepts[].value
   *  - input.data.image.base64[]
   *  - input.data.image.url
   *  - input.id
   * 
* * .clarifai.api.Output output = 2; */ com.clarifai.grpc.api.OutputOrBuilder getOutputOrBuilder(); /** *
   * If True then this will flip the meaning of this part of the
   * query. This allow for queries such as dog AND ! metadata=={"blah":"value"}
   * 
* * bool negate = 3; * @return The negate. */ boolean getNegate(); /** *
   * FILTER by annotation information. This is more flexible than just filtering by
   * Input information because in the general case each input can have several annotations.
   * Some example use cases for filtering by annotations:
   * 1) find all the inputs annotated "dog" by worker_id = "XYZ"
   * 2) find all the annotations associated with embed_model_version_id = "123"
   * 3) find all the annotations that are trusted, etc.
   * Since all the annotations under the hood are joined to the embedding model's annotation
   * using worker_id's of other models like cluster models or concept models should be
   * combinable with queries like visual search (a query with Output filled in).
   * ########## Supported fields ##########
   *  - annotation_info.fields - filter by annotation info
   *  - data.concepts[].id
   *  - data.concepts[].name
   *  - data.concepts[].value
   *  - data.geo.geo_box[].geo_point.latitude
   *  - data.geo.geo_box[].geo_point.longitude
   *  - data.geo.geo_limit.type
   *  - data.geo.geo_limit.value
   *  - data.geo.geo_point.latitude
   *  - data.geo.geo_point.longitude
   *  - data.image.url
   *  - data.metadata.fields - filter by metadata. metadata key&value fields are OR-ed.
   *  - input_id
   *  - input_level
   *  - model_version_id
   *  - status.code
   *  - task_id
   *  - trusted
   *  - user_id
   * 
* * .clarifai.api.Annotation annotation = 4; * @return Whether the annotation field is set. */ boolean hasAnnotation(); /** *
   * FILTER by annotation information. This is more flexible than just filtering by
   * Input information because in the general case each input can have several annotations.
   * Some example use cases for filtering by annotations:
   * 1) find all the inputs annotated "dog" by worker_id = "XYZ"
   * 2) find all the annotations associated with embed_model_version_id = "123"
   * 3) find all the annotations that are trusted, etc.
   * Since all the annotations under the hood are joined to the embedding model's annotation
   * using worker_id's of other models like cluster models or concept models should be
   * combinable with queries like visual search (a query with Output filled in).
   * ########## Supported fields ##########
   *  - annotation_info.fields - filter by annotation info
   *  - data.concepts[].id
   *  - data.concepts[].name
   *  - data.concepts[].value
   *  - data.geo.geo_box[].geo_point.latitude
   *  - data.geo.geo_box[].geo_point.longitude
   *  - data.geo.geo_limit.type
   *  - data.geo.geo_limit.value
   *  - data.geo.geo_point.latitude
   *  - data.geo.geo_point.longitude
   *  - data.image.url
   *  - data.metadata.fields - filter by metadata. metadata key&value fields are OR-ed.
   *  - input_id
   *  - input_level
   *  - model_version_id
   *  - status.code
   *  - task_id
   *  - trusted
   *  - user_id
   * 
* * .clarifai.api.Annotation annotation = 4; * @return The annotation. */ com.clarifai.grpc.api.Annotation getAnnotation(); /** *
   * FILTER by annotation information. This is more flexible than just filtering by
   * Input information because in the general case each input can have several annotations.
   * Some example use cases for filtering by annotations:
   * 1) find all the inputs annotated "dog" by worker_id = "XYZ"
   * 2) find all the annotations associated with embed_model_version_id = "123"
   * 3) find all the annotations that are trusted, etc.
   * Since all the annotations under the hood are joined to the embedding model's annotation
   * using worker_id's of other models like cluster models or concept models should be
   * combinable with queries like visual search (a query with Output filled in).
   * ########## Supported fields ##########
   *  - annotation_info.fields - filter by annotation info
   *  - data.concepts[].id
   *  - data.concepts[].name
   *  - data.concepts[].value
   *  - data.geo.geo_box[].geo_point.latitude
   *  - data.geo.geo_box[].geo_point.longitude
   *  - data.geo.geo_limit.type
   *  - data.geo.geo_limit.value
   *  - data.geo.geo_point.latitude
   *  - data.geo.geo_point.longitude
   *  - data.image.url
   *  - data.metadata.fields - filter by metadata. metadata key&value fields are OR-ed.
   *  - input_id
   *  - input_level
   *  - model_version_id
   *  - status.code
   *  - task_id
   *  - trusted
   *  - user_id
   * 
* * .clarifai.api.Annotation annotation = 4; */ com.clarifai.grpc.api.AnnotationOrBuilder getAnnotationOrBuilder(); }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy