com.google.cloud.vision.v1.FaceAnnotation Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of grpc-google-cloud-vision-v1 Show documentation
Show all versions of grpc-google-cloud-vision-v1 Show documentation
GRPC library for grpc-google-cloud-vision-v1
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vision/v1/image_annotator.proto
package com.google.cloud.vision.v1;
/**
*
* A face annotation object contains the results of face detection.
*
*
* Protobuf type {@code google.cloud.vision.v1.FaceAnnotation}
*/
public final class FaceAnnotation extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.cloud.vision.v1.FaceAnnotation)
FaceAnnotationOrBuilder {
// Use FaceAnnotation.newBuilder() to construct.
private FaceAnnotation(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private FaceAnnotation() {
landmarks_ = java.util.Collections.emptyList();
rollAngle_ = 0F;
panAngle_ = 0F;
tiltAngle_ = 0F;
detectionConfidence_ = 0F;
landmarkingConfidence_ = 0F;
joyLikelihood_ = 0;
sorrowLikelihood_ = 0;
angerLikelihood_ = 0;
surpriseLikelihood_ = 0;
underExposedLikelihood_ = 0;
blurredLikelihood_ = 0;
headwearLikelihood_ = 0;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private FaceAnnotation(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
com.google.cloud.vision.v1.BoundingPoly.Builder subBuilder = null;
if (boundingPoly_ != null) {
subBuilder = boundingPoly_.toBuilder();
}
boundingPoly_ = input.readMessage(com.google.cloud.vision.v1.BoundingPoly.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(boundingPoly_);
boundingPoly_ = subBuilder.buildPartial();
}
break;
}
case 18: {
com.google.cloud.vision.v1.BoundingPoly.Builder subBuilder = null;
if (fdBoundingPoly_ != null) {
subBuilder = fdBoundingPoly_.toBuilder();
}
fdBoundingPoly_ = input.readMessage(com.google.cloud.vision.v1.BoundingPoly.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(fdBoundingPoly_);
fdBoundingPoly_ = subBuilder.buildPartial();
}
break;
}
case 26: {
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
landmarks_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000004;
}
landmarks_.add(
input.readMessage(com.google.cloud.vision.v1.FaceAnnotation.Landmark.parser(), extensionRegistry));
break;
}
case 37: {
rollAngle_ = input.readFloat();
break;
}
case 45: {
panAngle_ = input.readFloat();
break;
}
case 53: {
tiltAngle_ = input.readFloat();
break;
}
case 61: {
detectionConfidence_ = input.readFloat();
break;
}
case 69: {
landmarkingConfidence_ = input.readFloat();
break;
}
case 72: {
int rawValue = input.readEnum();
joyLikelihood_ = rawValue;
break;
}
case 80: {
int rawValue = input.readEnum();
sorrowLikelihood_ = rawValue;
break;
}
case 88: {
int rawValue = input.readEnum();
angerLikelihood_ = rawValue;
break;
}
case 96: {
int rawValue = input.readEnum();
surpriseLikelihood_ = rawValue;
break;
}
case 104: {
int rawValue = input.readEnum();
underExposedLikelihood_ = rawValue;
break;
}
case 112: {
int rawValue = input.readEnum();
blurredLikelihood_ = rawValue;
break;
}
case 120: {
int rawValue = input.readEnum();
headwearLikelihood_ = rawValue;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
landmarks_ = java.util.Collections.unmodifiableList(landmarks_);
}
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.vision.v1.ImageAnnotatorProto.internal_static_google_cloud_vision_v1_FaceAnnotation_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1.ImageAnnotatorProto.internal_static_google_cloud_vision_v1_FaceAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1.FaceAnnotation.class, com.google.cloud.vision.v1.FaceAnnotation.Builder.class);
}
public interface LandmarkOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.cloud.vision.v1.FaceAnnotation.Landmark)
com.google.protobuf.MessageOrBuilder {
/**
*
* Face landmark type.
*
*
* optional .google.cloud.vision.v1.FaceAnnotation.Landmark.Type type = 3;
*/
int getTypeValue();
/**
*
* Face landmark type.
*
*
* optional .google.cloud.vision.v1.FaceAnnotation.Landmark.Type type = 3;
*/
com.google.cloud.vision.v1.FaceAnnotation.Landmark.Type getType();
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
boolean hasPosition();
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
com.google.cloud.vision.v1.Position getPosition();
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
com.google.cloud.vision.v1.PositionOrBuilder getPositionOrBuilder();
}
/**
*
* A face-specific landmark (for example, a face feature).
* Landmark positions may fall outside the bounds of the image
* when the face is near one or more edges of the image.
* Therefore it is NOT guaranteed that 0 <= x < width or 0 <= y < height.
*
*
* Protobuf type {@code google.cloud.vision.v1.FaceAnnotation.Landmark}
*/
public static final class Landmark extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.cloud.vision.v1.FaceAnnotation.Landmark)
LandmarkOrBuilder {
// Use Landmark.newBuilder() to construct.
private Landmark(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private Landmark() {
type_ = 0;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private Landmark(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 24: {
int rawValue = input.readEnum();
type_ = rawValue;
break;
}
case 34: {
com.google.cloud.vision.v1.Position.Builder subBuilder = null;
if (position_ != null) {
subBuilder = position_.toBuilder();
}
position_ = input.readMessage(com.google.cloud.vision.v1.Position.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(position_);
position_ = subBuilder.buildPartial();
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.vision.v1.ImageAnnotatorProto.internal_static_google_cloud_vision_v1_FaceAnnotation_Landmark_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1.ImageAnnotatorProto.internal_static_google_cloud_vision_v1_FaceAnnotation_Landmark_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1.FaceAnnotation.Landmark.class, com.google.cloud.vision.v1.FaceAnnotation.Landmark.Builder.class);
}
/**
*
* Face landmark (feature) type.
* Left and right are defined from the vantage of the viewer of the image,
* without considering mirror projections typical of photos. So, LEFT_EYE,
* typically is the person's right eye.
*
*
* Protobuf enum {@code google.cloud.vision.v1.FaceAnnotation.Landmark.Type}
*/
public enum Type
implements com.google.protobuf.ProtocolMessageEnum {
/**
*
* Unknown face landmark detected. Should not be filled.
*
*
* UNKNOWN_LANDMARK = 0;
*/
UNKNOWN_LANDMARK(0),
/**
*
* Left eye.
*
*
* LEFT_EYE = 1;
*/
LEFT_EYE(1),
/**
*
* Right eye.
*
*
* RIGHT_EYE = 2;
*/
RIGHT_EYE(2),
/**
*
* Left of left eyebrow.
*
*
* LEFT_OF_LEFT_EYEBROW = 3;
*/
LEFT_OF_LEFT_EYEBROW(3),
/**
*
* Right of left eyebrow.
*
*
* RIGHT_OF_LEFT_EYEBROW = 4;
*/
RIGHT_OF_LEFT_EYEBROW(4),
/**
*
* Left of right eyebrow.
*
*
* LEFT_OF_RIGHT_EYEBROW = 5;
*/
LEFT_OF_RIGHT_EYEBROW(5),
/**
*
* Right of right eyebrow.
*
*
* RIGHT_OF_RIGHT_EYEBROW = 6;
*/
RIGHT_OF_RIGHT_EYEBROW(6),
/**
*
* Midpoint between eyes.
*
*
* MIDPOINT_BETWEEN_EYES = 7;
*/
MIDPOINT_BETWEEN_EYES(7),
/**
*
* Nose tip.
*
*
* NOSE_TIP = 8;
*/
NOSE_TIP(8),
/**
*
* Upper lip.
*
*
* UPPER_LIP = 9;
*/
UPPER_LIP(9),
/**
*
* Lower lip.
*
*
* LOWER_LIP = 10;
*/
LOWER_LIP(10),
/**
*
* Mouth left.
*
*
* MOUTH_LEFT = 11;
*/
MOUTH_LEFT(11),
/**
*
* Mouth right.
*
*
* MOUTH_RIGHT = 12;
*/
MOUTH_RIGHT(12),
/**
*
* Mouth center.
*
*
* MOUTH_CENTER = 13;
*/
MOUTH_CENTER(13),
/**
*
* Nose, bottom right.
*
*
* NOSE_BOTTOM_RIGHT = 14;
*/
NOSE_BOTTOM_RIGHT(14),
/**
*
* Nose, bottom left.
*
*
* NOSE_BOTTOM_LEFT = 15;
*/
NOSE_BOTTOM_LEFT(15),
/**
*
* Nose, bottom center.
*
*
* NOSE_BOTTOM_CENTER = 16;
*/
NOSE_BOTTOM_CENTER(16),
/**
*
* Left eye, top boundary.
*
*
* LEFT_EYE_TOP_BOUNDARY = 17;
*/
LEFT_EYE_TOP_BOUNDARY(17),
/**
*
* Left eye, right corner.
*
*
* LEFT_EYE_RIGHT_CORNER = 18;
*/
LEFT_EYE_RIGHT_CORNER(18),
/**
*
* Left eye, bottom boundary.
*
*
* LEFT_EYE_BOTTOM_BOUNDARY = 19;
*/
LEFT_EYE_BOTTOM_BOUNDARY(19),
/**
*
* Left eye, left corner.
*
*
* LEFT_EYE_LEFT_CORNER = 20;
*/
LEFT_EYE_LEFT_CORNER(20),
/**
*
* Right eye, top boundary.
*
*
* RIGHT_EYE_TOP_BOUNDARY = 21;
*/
RIGHT_EYE_TOP_BOUNDARY(21),
/**
*
* Right eye, right corner.
*
*
* RIGHT_EYE_RIGHT_CORNER = 22;
*/
RIGHT_EYE_RIGHT_CORNER(22),
/**
*
* Right eye, bottom boundary.
*
*
* RIGHT_EYE_BOTTOM_BOUNDARY = 23;
*/
RIGHT_EYE_BOTTOM_BOUNDARY(23),
/**
*
* Right eye, left corner.
*
*
* RIGHT_EYE_LEFT_CORNER = 24;
*/
RIGHT_EYE_LEFT_CORNER(24),
/**
*
* Left eyebrow, upper midpoint.
*
*
* LEFT_EYEBROW_UPPER_MIDPOINT = 25;
*/
LEFT_EYEBROW_UPPER_MIDPOINT(25),
/**
*
* Right eyebrow, upper midpoint.
*
*
* RIGHT_EYEBROW_UPPER_MIDPOINT = 26;
*/
RIGHT_EYEBROW_UPPER_MIDPOINT(26),
/**
*
* Left ear tragion.
*
*
* LEFT_EAR_TRAGION = 27;
*/
LEFT_EAR_TRAGION(27),
/**
*
* Right ear tragion.
*
*
* RIGHT_EAR_TRAGION = 28;
*/
RIGHT_EAR_TRAGION(28),
/**
*
* Left eye pupil.
*
*
* LEFT_EYE_PUPIL = 29;
*/
LEFT_EYE_PUPIL(29),
/**
*
* Right eye pupil.
*
*
* RIGHT_EYE_PUPIL = 30;
*/
RIGHT_EYE_PUPIL(30),
/**
*
* Forehead glabella.
*
*
* FOREHEAD_GLABELLA = 31;
*/
FOREHEAD_GLABELLA(31),
/**
*
* Chin gnathion.
*
*
* CHIN_GNATHION = 32;
*/
CHIN_GNATHION(32),
/**
*
* Chin left gonion.
*
*
* CHIN_LEFT_GONION = 33;
*/
CHIN_LEFT_GONION(33),
/**
*
* Chin right gonion.
*
*
* CHIN_RIGHT_GONION = 34;
*/
CHIN_RIGHT_GONION(34),
UNRECOGNIZED(-1),
;
/**
*
* Unknown face landmark detected. Should not be filled.
*
*
* UNKNOWN_LANDMARK = 0;
*/
public static final int UNKNOWN_LANDMARK_VALUE = 0;
/**
*
* Left eye.
*
*
* LEFT_EYE = 1;
*/
public static final int LEFT_EYE_VALUE = 1;
/**
*
* Right eye.
*
*
* RIGHT_EYE = 2;
*/
public static final int RIGHT_EYE_VALUE = 2;
/**
*
* Left of left eyebrow.
*
*
* LEFT_OF_LEFT_EYEBROW = 3;
*/
public static final int LEFT_OF_LEFT_EYEBROW_VALUE = 3;
/**
*
* Right of left eyebrow.
*
*
* RIGHT_OF_LEFT_EYEBROW = 4;
*/
public static final int RIGHT_OF_LEFT_EYEBROW_VALUE = 4;
/**
*
* Left of right eyebrow.
*
*
* LEFT_OF_RIGHT_EYEBROW = 5;
*/
public static final int LEFT_OF_RIGHT_EYEBROW_VALUE = 5;
/**
*
* Right of right eyebrow.
*
*
* RIGHT_OF_RIGHT_EYEBROW = 6;
*/
public static final int RIGHT_OF_RIGHT_EYEBROW_VALUE = 6;
/**
*
* Midpoint between eyes.
*
*
* MIDPOINT_BETWEEN_EYES = 7;
*/
public static final int MIDPOINT_BETWEEN_EYES_VALUE = 7;
/**
*
* Nose tip.
*
*
* NOSE_TIP = 8;
*/
public static final int NOSE_TIP_VALUE = 8;
/**
*
* Upper lip.
*
*
* UPPER_LIP = 9;
*/
public static final int UPPER_LIP_VALUE = 9;
/**
*
* Lower lip.
*
*
* LOWER_LIP = 10;
*/
public static final int LOWER_LIP_VALUE = 10;
/**
*
* Mouth left.
*
*
* MOUTH_LEFT = 11;
*/
public static final int MOUTH_LEFT_VALUE = 11;
/**
*
* Mouth right.
*
*
* MOUTH_RIGHT = 12;
*/
public static final int MOUTH_RIGHT_VALUE = 12;
/**
*
* Mouth center.
*
*
* MOUTH_CENTER = 13;
*/
public static final int MOUTH_CENTER_VALUE = 13;
/**
*
* Nose, bottom right.
*
*
* NOSE_BOTTOM_RIGHT = 14;
*/
public static final int NOSE_BOTTOM_RIGHT_VALUE = 14;
/**
*
* Nose, bottom left.
*
*
* NOSE_BOTTOM_LEFT = 15;
*/
public static final int NOSE_BOTTOM_LEFT_VALUE = 15;
/**
*
* Nose, bottom center.
*
*
* NOSE_BOTTOM_CENTER = 16;
*/
public static final int NOSE_BOTTOM_CENTER_VALUE = 16;
/**
*
* Left eye, top boundary.
*
*
* LEFT_EYE_TOP_BOUNDARY = 17;
*/
public static final int LEFT_EYE_TOP_BOUNDARY_VALUE = 17;
/**
*
* Left eye, right corner.
*
*
* LEFT_EYE_RIGHT_CORNER = 18;
*/
public static final int LEFT_EYE_RIGHT_CORNER_VALUE = 18;
/**
*
* Left eye, bottom boundary.
*
*
* LEFT_EYE_BOTTOM_BOUNDARY = 19;
*/
public static final int LEFT_EYE_BOTTOM_BOUNDARY_VALUE = 19;
/**
*
* Left eye, left corner.
*
*
* LEFT_EYE_LEFT_CORNER = 20;
*/
public static final int LEFT_EYE_LEFT_CORNER_VALUE = 20;
/**
*
* Right eye, top boundary.
*
*
* RIGHT_EYE_TOP_BOUNDARY = 21;
*/
public static final int RIGHT_EYE_TOP_BOUNDARY_VALUE = 21;
/**
*
* Right eye, right corner.
*
*
* RIGHT_EYE_RIGHT_CORNER = 22;
*/
public static final int RIGHT_EYE_RIGHT_CORNER_VALUE = 22;
/**
*
* Right eye, bottom boundary.
*
*
* RIGHT_EYE_BOTTOM_BOUNDARY = 23;
*/
public static final int RIGHT_EYE_BOTTOM_BOUNDARY_VALUE = 23;
/**
*
* Right eye, left corner.
*
*
* RIGHT_EYE_LEFT_CORNER = 24;
*/
public static final int RIGHT_EYE_LEFT_CORNER_VALUE = 24;
/**
*
* Left eyebrow, upper midpoint.
*
*
* LEFT_EYEBROW_UPPER_MIDPOINT = 25;
*/
public static final int LEFT_EYEBROW_UPPER_MIDPOINT_VALUE = 25;
/**
*
* Right eyebrow, upper midpoint.
*
*
* RIGHT_EYEBROW_UPPER_MIDPOINT = 26;
*/
public static final int RIGHT_EYEBROW_UPPER_MIDPOINT_VALUE = 26;
/**
*
* Left ear tragion.
*
*
* LEFT_EAR_TRAGION = 27;
*/
public static final int LEFT_EAR_TRAGION_VALUE = 27;
/**
*
* Right ear tragion.
*
*
* RIGHT_EAR_TRAGION = 28;
*/
public static final int RIGHT_EAR_TRAGION_VALUE = 28;
/**
*
* Left eye pupil.
*
*
* LEFT_EYE_PUPIL = 29;
*/
public static final int LEFT_EYE_PUPIL_VALUE = 29;
/**
*
* Right eye pupil.
*
*
* RIGHT_EYE_PUPIL = 30;
*/
public static final int RIGHT_EYE_PUPIL_VALUE = 30;
/**
*
* Forehead glabella.
*
*
* FOREHEAD_GLABELLA = 31;
*/
public static final int FOREHEAD_GLABELLA_VALUE = 31;
/**
*
* Chin gnathion.
*
*
* CHIN_GNATHION = 32;
*/
public static final int CHIN_GNATHION_VALUE = 32;
/**
*
* Chin left gonion.
*
*
* CHIN_LEFT_GONION = 33;
*/
public static final int CHIN_LEFT_GONION_VALUE = 33;
/**
*
* Chin right gonion.
*
*
* CHIN_RIGHT_GONION = 34;
*/
public static final int CHIN_RIGHT_GONION_VALUE = 34;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Type valueOf(int value) {
return forNumber(value);
}
public static Type forNumber(int value) {
switch (value) {
case 0: return UNKNOWN_LANDMARK;
case 1: return LEFT_EYE;
case 2: return RIGHT_EYE;
case 3: return LEFT_OF_LEFT_EYEBROW;
case 4: return RIGHT_OF_LEFT_EYEBROW;
case 5: return LEFT_OF_RIGHT_EYEBROW;
case 6: return RIGHT_OF_RIGHT_EYEBROW;
case 7: return MIDPOINT_BETWEEN_EYES;
case 8: return NOSE_TIP;
case 9: return UPPER_LIP;
case 10: return LOWER_LIP;
case 11: return MOUTH_LEFT;
case 12: return MOUTH_RIGHT;
case 13: return MOUTH_CENTER;
case 14: return NOSE_BOTTOM_RIGHT;
case 15: return NOSE_BOTTOM_LEFT;
case 16: return NOSE_BOTTOM_CENTER;
case 17: return LEFT_EYE_TOP_BOUNDARY;
case 18: return LEFT_EYE_RIGHT_CORNER;
case 19: return LEFT_EYE_BOTTOM_BOUNDARY;
case 20: return LEFT_EYE_LEFT_CORNER;
case 21: return RIGHT_EYE_TOP_BOUNDARY;
case 22: return RIGHT_EYE_RIGHT_CORNER;
case 23: return RIGHT_EYE_BOTTOM_BOUNDARY;
case 24: return RIGHT_EYE_LEFT_CORNER;
case 25: return LEFT_EYEBROW_UPPER_MIDPOINT;
case 26: return RIGHT_EYEBROW_UPPER_MIDPOINT;
case 27: return LEFT_EAR_TRAGION;
case 28: return RIGHT_EAR_TRAGION;
case 29: return LEFT_EYE_PUPIL;
case 30: return RIGHT_EYE_PUPIL;
case 31: return FOREHEAD_GLABELLA;
case 32: return CHIN_GNATHION;
case 33: return CHIN_LEFT_GONION;
case 34: return CHIN_RIGHT_GONION;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
Type> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public Type findValueByNumber(int number) {
return Type.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.cloud.vision.v1.FaceAnnotation.Landmark.getDescriptor().getEnumTypes().get(0);
}
private static final Type[] VALUES = values();
public static Type valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Type(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.vision.v1.FaceAnnotation.Landmark.Type)
}
public static final int TYPE_FIELD_NUMBER = 3;
private int type_;
/**
*
* Face landmark type.
*
*
* optional .google.cloud.vision.v1.FaceAnnotation.Landmark.Type type = 3;
*/
public int getTypeValue() {
return type_;
}
/**
*
* Face landmark type.
*
*
* optional .google.cloud.vision.v1.FaceAnnotation.Landmark.Type type = 3;
*/
public com.google.cloud.vision.v1.FaceAnnotation.Landmark.Type getType() {
com.google.cloud.vision.v1.FaceAnnotation.Landmark.Type result = com.google.cloud.vision.v1.FaceAnnotation.Landmark.Type.valueOf(type_);
return result == null ? com.google.cloud.vision.v1.FaceAnnotation.Landmark.Type.UNRECOGNIZED : result;
}
public static final int POSITION_FIELD_NUMBER = 4;
private com.google.cloud.vision.v1.Position position_;
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
public boolean hasPosition() {
return position_ != null;
}
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
public com.google.cloud.vision.v1.Position getPosition() {
return position_ == null ? com.google.cloud.vision.v1.Position.getDefaultInstance() : position_;
}
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
public com.google.cloud.vision.v1.PositionOrBuilder getPositionOrBuilder() {
return getPosition();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (type_ != com.google.cloud.vision.v1.FaceAnnotation.Landmark.Type.UNKNOWN_LANDMARK.getNumber()) {
output.writeEnum(3, type_);
}
if (position_ != null) {
output.writeMessage(4, getPosition());
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (type_ != com.google.cloud.vision.v1.FaceAnnotation.Landmark.Type.UNKNOWN_LANDMARK.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(3, type_);
}
if (position_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, getPosition());
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vision.v1.FaceAnnotation.Landmark)) {
return super.equals(obj);
}
com.google.cloud.vision.v1.FaceAnnotation.Landmark other = (com.google.cloud.vision.v1.FaceAnnotation.Landmark) obj;
boolean result = true;
result = result && type_ == other.type_;
result = result && (hasPosition() == other.hasPosition());
if (hasPosition()) {
result = result && getPosition()
.equals(other.getPosition());
}
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + type_;
if (hasPosition()) {
hash = (37 * hash) + POSITION_FIELD_NUMBER;
hash = (53 * hash) + getPosition().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vision.v1.FaceAnnotation.Landmark parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1.FaceAnnotation.Landmark parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1.FaceAnnotation.Landmark parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1.FaceAnnotation.Landmark parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1.FaceAnnotation.Landmark parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.FaceAnnotation.Landmark parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1.FaceAnnotation.Landmark parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.FaceAnnotation.Landmark parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1.FaceAnnotation.Landmark parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.FaceAnnotation.Landmark parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.vision.v1.FaceAnnotation.Landmark prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* A face-specific landmark (for example, a face feature).
* Landmark positions may fall outside the bounds of the image
* when the face is near one or more edges of the image.
* Therefore it is NOT guaranteed that 0 <= x < width or 0 <= y < height.
*
*
* Protobuf type {@code google.cloud.vision.v1.FaceAnnotation.Landmark}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:google.cloud.vision.v1.FaceAnnotation.Landmark)
com.google.cloud.vision.v1.FaceAnnotation.LandmarkOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.vision.v1.ImageAnnotatorProto.internal_static_google_cloud_vision_v1_FaceAnnotation_Landmark_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1.ImageAnnotatorProto.internal_static_google_cloud_vision_v1_FaceAnnotation_Landmark_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1.FaceAnnotation.Landmark.class, com.google.cloud.vision.v1.FaceAnnotation.Landmark.Builder.class);
}
// Construct using com.google.cloud.vision.v1.FaceAnnotation.Landmark.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
type_ = 0;
if (positionBuilder_ == null) {
position_ = null;
} else {
position_ = null;
positionBuilder_ = null;
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.cloud.vision.v1.ImageAnnotatorProto.internal_static_google_cloud_vision_v1_FaceAnnotation_Landmark_descriptor;
}
public com.google.cloud.vision.v1.FaceAnnotation.Landmark getDefaultInstanceForType() {
return com.google.cloud.vision.v1.FaceAnnotation.Landmark.getDefaultInstance();
}
public com.google.cloud.vision.v1.FaceAnnotation.Landmark build() {
com.google.cloud.vision.v1.FaceAnnotation.Landmark result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.cloud.vision.v1.FaceAnnotation.Landmark buildPartial() {
com.google.cloud.vision.v1.FaceAnnotation.Landmark result = new com.google.cloud.vision.v1.FaceAnnotation.Landmark(this);
result.type_ = type_;
if (positionBuilder_ == null) {
result.position_ = position_;
} else {
result.position_ = positionBuilder_.build();
}
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vision.v1.FaceAnnotation.Landmark) {
return mergeFrom((com.google.cloud.vision.v1.FaceAnnotation.Landmark)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vision.v1.FaceAnnotation.Landmark other) {
if (other == com.google.cloud.vision.v1.FaceAnnotation.Landmark.getDefaultInstance()) return this;
if (other.type_ != 0) {
setTypeValue(other.getTypeValue());
}
if (other.hasPosition()) {
mergePosition(other.getPosition());
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.vision.v1.FaceAnnotation.Landmark parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.vision.v1.FaceAnnotation.Landmark) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int type_ = 0;
/**
*
* Face landmark type.
*
*
* optional .google.cloud.vision.v1.FaceAnnotation.Landmark.Type type = 3;
*/
public int getTypeValue() {
return type_;
}
/**
*
* Face landmark type.
*
*
* optional .google.cloud.vision.v1.FaceAnnotation.Landmark.Type type = 3;
*/
public Builder setTypeValue(int value) {
type_ = value;
onChanged();
return this;
}
/**
*
* Face landmark type.
*
*
* optional .google.cloud.vision.v1.FaceAnnotation.Landmark.Type type = 3;
*/
public com.google.cloud.vision.v1.FaceAnnotation.Landmark.Type getType() {
com.google.cloud.vision.v1.FaceAnnotation.Landmark.Type result = com.google.cloud.vision.v1.FaceAnnotation.Landmark.Type.valueOf(type_);
return result == null ? com.google.cloud.vision.v1.FaceAnnotation.Landmark.Type.UNRECOGNIZED : result;
}
/**
*
* Face landmark type.
*
*
* optional .google.cloud.vision.v1.FaceAnnotation.Landmark.Type type = 3;
*/
public Builder setType(com.google.cloud.vision.v1.FaceAnnotation.Landmark.Type value) {
if (value == null) {
throw new NullPointerException();
}
type_ = value.getNumber();
onChanged();
return this;
}
/**
*
* Face landmark type.
*
*
* optional .google.cloud.vision.v1.FaceAnnotation.Landmark.Type type = 3;
*/
public Builder clearType() {
type_ = 0;
onChanged();
return this;
}
private com.google.cloud.vision.v1.Position position_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.Position, com.google.cloud.vision.v1.Position.Builder, com.google.cloud.vision.v1.PositionOrBuilder> positionBuilder_;
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
public boolean hasPosition() {
return positionBuilder_ != null || position_ != null;
}
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
public com.google.cloud.vision.v1.Position getPosition() {
if (positionBuilder_ == null) {
return position_ == null ? com.google.cloud.vision.v1.Position.getDefaultInstance() : position_;
} else {
return positionBuilder_.getMessage();
}
}
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
public Builder setPosition(com.google.cloud.vision.v1.Position value) {
if (positionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
position_ = value;
onChanged();
} else {
positionBuilder_.setMessage(value);
}
return this;
}
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
public Builder setPosition(
com.google.cloud.vision.v1.Position.Builder builderForValue) {
if (positionBuilder_ == null) {
position_ = builderForValue.build();
onChanged();
} else {
positionBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
public Builder mergePosition(com.google.cloud.vision.v1.Position value) {
if (positionBuilder_ == null) {
if (position_ != null) {
position_ =
com.google.cloud.vision.v1.Position.newBuilder(position_).mergeFrom(value).buildPartial();
} else {
position_ = value;
}
onChanged();
} else {
positionBuilder_.mergeFrom(value);
}
return this;
}
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
public Builder clearPosition() {
if (positionBuilder_ == null) {
position_ = null;
onChanged();
} else {
position_ = null;
positionBuilder_ = null;
}
return this;
}
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
public com.google.cloud.vision.v1.Position.Builder getPositionBuilder() {
onChanged();
return getPositionFieldBuilder().getBuilder();
}
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
public com.google.cloud.vision.v1.PositionOrBuilder getPositionOrBuilder() {
if (positionBuilder_ != null) {
return positionBuilder_.getMessageOrBuilder();
} else {
return position_ == null ?
com.google.cloud.vision.v1.Position.getDefaultInstance() : position_;
}
}
/**
*
* Face landmark position.
*
*
* optional .google.cloud.vision.v1.Position position = 4;
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.Position, com.google.cloud.vision.v1.Position.Builder, com.google.cloud.vision.v1.PositionOrBuilder>
getPositionFieldBuilder() {
if (positionBuilder_ == null) {
positionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.Position, com.google.cloud.vision.v1.Position.Builder, com.google.cloud.vision.v1.PositionOrBuilder>(
getPosition(),
getParentForChildren(),
isClean());
position_ = null;
}
return positionBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.cloud.vision.v1.FaceAnnotation.Landmark)
}
// @@protoc_insertion_point(class_scope:google.cloud.vision.v1.FaceAnnotation.Landmark)
private static final com.google.cloud.vision.v1.FaceAnnotation.Landmark DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vision.v1.FaceAnnotation.Landmark();
}
public static com.google.cloud.vision.v1.FaceAnnotation.Landmark getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser
PARSER = new com.google.protobuf.AbstractParser() {
public Landmark parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Landmark(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
public com.google.cloud.vision.v1.FaceAnnotation.Landmark getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private int bitField0_;
public static final int BOUNDING_POLY_FIELD_NUMBER = 1;
private com.google.cloud.vision.v1.BoundingPoly boundingPoly_;
/**
*
* The bounding polygon around the face. The coordinates of the bounding box
* are in the original image's scale, as returned in ImageParams.
* The bounding box is computed to "frame" the face in accordance with human
* expectations. It is based on the landmarker results.
* Note that one or more x and/or y coordinates may not be generated in the
* BoundingPoly (the polygon will be unbounded) if only a partial face appears in
* the image to be annotated.
*
*
* optional .google.cloud.vision.v1.BoundingPoly bounding_poly = 1;
*/
public boolean hasBoundingPoly() {
return boundingPoly_ != null;
}
/**
*
* The bounding polygon around the face. The coordinates of the bounding box
* are in the original image's scale, as returned in ImageParams.
* The bounding box is computed to "frame" the face in accordance with human
* expectations. It is based on the landmarker results.
* Note that one or more x and/or y coordinates may not be generated in the
* BoundingPoly (the polygon will be unbounded) if only a partial face appears in
* the image to be annotated.
*
*
* optional .google.cloud.vision.v1.BoundingPoly bounding_poly = 1;
*/
public com.google.cloud.vision.v1.BoundingPoly getBoundingPoly() {
return boundingPoly_ == null ? com.google.cloud.vision.v1.BoundingPoly.getDefaultInstance() : boundingPoly_;
}
/**
*
* The bounding polygon around the face. The coordinates of the bounding box
* are in the original image's scale, as returned in ImageParams.
* The bounding box is computed to "frame" the face in accordance with human
* expectations. It is based on the landmarker results.
* Note that one or more x and/or y coordinates may not be generated in the
* BoundingPoly (the polygon will be unbounded) if only a partial face appears in
* the image to be annotated.
*
*
* optional .google.cloud.vision.v1.BoundingPoly bounding_poly = 1;
*/
public com.google.cloud.vision.v1.BoundingPolyOrBuilder getBoundingPolyOrBuilder() {
return getBoundingPoly();
}
public static final int FD_BOUNDING_POLY_FIELD_NUMBER = 2;
private com.google.cloud.vision.v1.BoundingPoly fdBoundingPoly_;
/**
*
* This bounding polygon is tighter than the previous
* <code>boundingPoly</code>, and
* encloses only the skin part of the face. Typically, it is used to
* eliminate the face from any image analysis that detects the
* "amount of skin" visible in an image. It is not based on the
* landmarker results, only on the initial face detection, hence
* the <code>fd</code> (face detection) prefix.
*
*
* optional .google.cloud.vision.v1.BoundingPoly fd_bounding_poly = 2;
*/
public boolean hasFdBoundingPoly() {
return fdBoundingPoly_ != null;
}
/**
*
* This bounding polygon is tighter than the previous
* <code>boundingPoly</code>, and
* encloses only the skin part of the face. Typically, it is used to
* eliminate the face from any image analysis that detects the
* "amount of skin" visible in an image. It is not based on the
* landmarker results, only on the initial face detection, hence
* the <code>fd</code> (face detection) prefix.
*
*
* optional .google.cloud.vision.v1.BoundingPoly fd_bounding_poly = 2;
*/
public com.google.cloud.vision.v1.BoundingPoly getFdBoundingPoly() {
return fdBoundingPoly_ == null ? com.google.cloud.vision.v1.BoundingPoly.getDefaultInstance() : fdBoundingPoly_;
}
/**
*
* This bounding polygon is tighter than the previous
* <code>boundingPoly</code>, and
* encloses only the skin part of the face. Typically, it is used to
* eliminate the face from any image analysis that detects the
* "amount of skin" visible in an image. It is not based on the
* landmarker results, only on the initial face detection, hence
* the <code>fd</code> (face detection) prefix.
*
*
* optional .google.cloud.vision.v1.BoundingPoly fd_bounding_poly = 2;
*/
public com.google.cloud.vision.v1.BoundingPolyOrBuilder getFdBoundingPolyOrBuilder() {
return getFdBoundingPoly();
}
public static final int LANDMARKS_FIELD_NUMBER = 3;
private java.util.List landmarks_;
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public java.util.List getLandmarksList() {
return landmarks_;
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public java.util.List extends com.google.cloud.vision.v1.FaceAnnotation.LandmarkOrBuilder>
getLandmarksOrBuilderList() {
return landmarks_;
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public int getLandmarksCount() {
return landmarks_.size();
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public com.google.cloud.vision.v1.FaceAnnotation.Landmark getLandmarks(int index) {
return landmarks_.get(index);
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public com.google.cloud.vision.v1.FaceAnnotation.LandmarkOrBuilder getLandmarksOrBuilder(
int index) {
return landmarks_.get(index);
}
public static final int ROLL_ANGLE_FIELD_NUMBER = 4;
private float rollAngle_;
/**
*
* Roll angle. Indicates the amount of clockwise/anti-clockwise rotation of
* the
* face relative to the image vertical, about the axis perpendicular to the
* face. Range [-180,180].
*
*
* optional float roll_angle = 4;
*/
public float getRollAngle() {
return rollAngle_;
}
public static final int PAN_ANGLE_FIELD_NUMBER = 5;
private float panAngle_;
/**
*
* Yaw angle. Indicates the leftward/rightward angle that the face is
* pointing, relative to the vertical plane perpendicular to the image. Range
* [-180,180].
*
*
* optional float pan_angle = 5;
*/
public float getPanAngle() {
return panAngle_;
}
public static final int TILT_ANGLE_FIELD_NUMBER = 6;
private float tiltAngle_;
/**
*
* Pitch angle. Indicates the upwards/downwards angle that the face is
* pointing
* relative to the image's horizontal plane. Range [-180,180].
*
*
* optional float tilt_angle = 6;
*/
public float getTiltAngle() {
return tiltAngle_;
}
public static final int DETECTION_CONFIDENCE_FIELD_NUMBER = 7;
private float detectionConfidence_;
/**
*
* Detection confidence. Range [0, 1].
*
*
* optional float detection_confidence = 7;
*/
public float getDetectionConfidence() {
return detectionConfidence_;
}
public static final int LANDMARKING_CONFIDENCE_FIELD_NUMBER = 8;
private float landmarkingConfidence_;
/**
*
* Face landmarking confidence. Range [0, 1].
*
*
* optional float landmarking_confidence = 8;
*/
public float getLandmarkingConfidence() {
return landmarkingConfidence_;
}
public static final int JOY_LIKELIHOOD_FIELD_NUMBER = 9;
private int joyLikelihood_;
/**
*
* Joy likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood joy_likelihood = 9;
*/
public int getJoyLikelihoodValue() {
return joyLikelihood_;
}
/**
*
* Joy likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood joy_likelihood = 9;
*/
public com.google.cloud.vision.v1.Likelihood getJoyLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(joyLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
public static final int SORROW_LIKELIHOOD_FIELD_NUMBER = 10;
private int sorrowLikelihood_;
/**
*
* Sorrow likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood sorrow_likelihood = 10;
*/
public int getSorrowLikelihoodValue() {
return sorrowLikelihood_;
}
/**
*
* Sorrow likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood sorrow_likelihood = 10;
*/
public com.google.cloud.vision.v1.Likelihood getSorrowLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(sorrowLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
public static final int ANGER_LIKELIHOOD_FIELD_NUMBER = 11;
private int angerLikelihood_;
/**
*
* Anger likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood anger_likelihood = 11;
*/
public int getAngerLikelihoodValue() {
return angerLikelihood_;
}
/**
*
* Anger likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood anger_likelihood = 11;
*/
public com.google.cloud.vision.v1.Likelihood getAngerLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(angerLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
public static final int SURPRISE_LIKELIHOOD_FIELD_NUMBER = 12;
private int surpriseLikelihood_;
/**
*
* Surprise likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood surprise_likelihood = 12;
*/
public int getSurpriseLikelihoodValue() {
return surpriseLikelihood_;
}
/**
*
* Surprise likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood surprise_likelihood = 12;
*/
public com.google.cloud.vision.v1.Likelihood getSurpriseLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(surpriseLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
public static final int UNDER_EXPOSED_LIKELIHOOD_FIELD_NUMBER = 13;
private int underExposedLikelihood_;
/**
*
* Under-exposed likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood under_exposed_likelihood = 13;
*/
public int getUnderExposedLikelihoodValue() {
return underExposedLikelihood_;
}
/**
*
* Under-exposed likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood under_exposed_likelihood = 13;
*/
public com.google.cloud.vision.v1.Likelihood getUnderExposedLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(underExposedLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
public static final int BLURRED_LIKELIHOOD_FIELD_NUMBER = 14;
private int blurredLikelihood_;
/**
*
* Blurred likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood blurred_likelihood = 14;
*/
public int getBlurredLikelihoodValue() {
return blurredLikelihood_;
}
/**
*
* Blurred likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood blurred_likelihood = 14;
*/
public com.google.cloud.vision.v1.Likelihood getBlurredLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(blurredLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
public static final int HEADWEAR_LIKELIHOOD_FIELD_NUMBER = 15;
private int headwearLikelihood_;
/**
*
* Headwear likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood headwear_likelihood = 15;
*/
public int getHeadwearLikelihoodValue() {
return headwearLikelihood_;
}
/**
*
* Headwear likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood headwear_likelihood = 15;
*/
public com.google.cloud.vision.v1.Likelihood getHeadwearLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(headwearLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (boundingPoly_ != null) {
output.writeMessage(1, getBoundingPoly());
}
if (fdBoundingPoly_ != null) {
output.writeMessage(2, getFdBoundingPoly());
}
for (int i = 0; i < landmarks_.size(); i++) {
output.writeMessage(3, landmarks_.get(i));
}
if (rollAngle_ != 0F) {
output.writeFloat(4, rollAngle_);
}
if (panAngle_ != 0F) {
output.writeFloat(5, panAngle_);
}
if (tiltAngle_ != 0F) {
output.writeFloat(6, tiltAngle_);
}
if (detectionConfidence_ != 0F) {
output.writeFloat(7, detectionConfidence_);
}
if (landmarkingConfidence_ != 0F) {
output.writeFloat(8, landmarkingConfidence_);
}
if (joyLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
output.writeEnum(9, joyLikelihood_);
}
if (sorrowLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
output.writeEnum(10, sorrowLikelihood_);
}
if (angerLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
output.writeEnum(11, angerLikelihood_);
}
if (surpriseLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
output.writeEnum(12, surpriseLikelihood_);
}
if (underExposedLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
output.writeEnum(13, underExposedLikelihood_);
}
if (blurredLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
output.writeEnum(14, blurredLikelihood_);
}
if (headwearLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
output.writeEnum(15, headwearLikelihood_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (boundingPoly_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getBoundingPoly());
}
if (fdBoundingPoly_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getFdBoundingPoly());
}
for (int i = 0; i < landmarks_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, landmarks_.get(i));
}
if (rollAngle_ != 0F) {
size += com.google.protobuf.CodedOutputStream
.computeFloatSize(4, rollAngle_);
}
if (panAngle_ != 0F) {
size += com.google.protobuf.CodedOutputStream
.computeFloatSize(5, panAngle_);
}
if (tiltAngle_ != 0F) {
size += com.google.protobuf.CodedOutputStream
.computeFloatSize(6, tiltAngle_);
}
if (detectionConfidence_ != 0F) {
size += com.google.protobuf.CodedOutputStream
.computeFloatSize(7, detectionConfidence_);
}
if (landmarkingConfidence_ != 0F) {
size += com.google.protobuf.CodedOutputStream
.computeFloatSize(8, landmarkingConfidence_);
}
if (joyLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(9, joyLikelihood_);
}
if (sorrowLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(10, sorrowLikelihood_);
}
if (angerLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(11, angerLikelihood_);
}
if (surpriseLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(12, surpriseLikelihood_);
}
if (underExposedLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(13, underExposedLikelihood_);
}
if (blurredLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(14, blurredLikelihood_);
}
if (headwearLikelihood_ != com.google.cloud.vision.v1.Likelihood.UNKNOWN.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(15, headwearLikelihood_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vision.v1.FaceAnnotation)) {
return super.equals(obj);
}
com.google.cloud.vision.v1.FaceAnnotation other = (com.google.cloud.vision.v1.FaceAnnotation) obj;
boolean result = true;
result = result && (hasBoundingPoly() == other.hasBoundingPoly());
if (hasBoundingPoly()) {
result = result && getBoundingPoly()
.equals(other.getBoundingPoly());
}
result = result && (hasFdBoundingPoly() == other.hasFdBoundingPoly());
if (hasFdBoundingPoly()) {
result = result && getFdBoundingPoly()
.equals(other.getFdBoundingPoly());
}
result = result && getLandmarksList()
.equals(other.getLandmarksList());
result = result && (
java.lang.Float.floatToIntBits(getRollAngle())
== java.lang.Float.floatToIntBits(
other.getRollAngle()));
result = result && (
java.lang.Float.floatToIntBits(getPanAngle())
== java.lang.Float.floatToIntBits(
other.getPanAngle()));
result = result && (
java.lang.Float.floatToIntBits(getTiltAngle())
== java.lang.Float.floatToIntBits(
other.getTiltAngle()));
result = result && (
java.lang.Float.floatToIntBits(getDetectionConfidence())
== java.lang.Float.floatToIntBits(
other.getDetectionConfidence()));
result = result && (
java.lang.Float.floatToIntBits(getLandmarkingConfidence())
== java.lang.Float.floatToIntBits(
other.getLandmarkingConfidence()));
result = result && joyLikelihood_ == other.joyLikelihood_;
result = result && sorrowLikelihood_ == other.sorrowLikelihood_;
result = result && angerLikelihood_ == other.angerLikelihood_;
result = result && surpriseLikelihood_ == other.surpriseLikelihood_;
result = result && underExposedLikelihood_ == other.underExposedLikelihood_;
result = result && blurredLikelihood_ == other.blurredLikelihood_;
result = result && headwearLikelihood_ == other.headwearLikelihood_;
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasBoundingPoly()) {
hash = (37 * hash) + BOUNDING_POLY_FIELD_NUMBER;
hash = (53 * hash) + getBoundingPoly().hashCode();
}
if (hasFdBoundingPoly()) {
hash = (37 * hash) + FD_BOUNDING_POLY_FIELD_NUMBER;
hash = (53 * hash) + getFdBoundingPoly().hashCode();
}
if (getLandmarksCount() > 0) {
hash = (37 * hash) + LANDMARKS_FIELD_NUMBER;
hash = (53 * hash) + getLandmarksList().hashCode();
}
hash = (37 * hash) + ROLL_ANGLE_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(
getRollAngle());
hash = (37 * hash) + PAN_ANGLE_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(
getPanAngle());
hash = (37 * hash) + TILT_ANGLE_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(
getTiltAngle());
hash = (37 * hash) + DETECTION_CONFIDENCE_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(
getDetectionConfidence());
hash = (37 * hash) + LANDMARKING_CONFIDENCE_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(
getLandmarkingConfidence());
hash = (37 * hash) + JOY_LIKELIHOOD_FIELD_NUMBER;
hash = (53 * hash) + joyLikelihood_;
hash = (37 * hash) + SORROW_LIKELIHOOD_FIELD_NUMBER;
hash = (53 * hash) + sorrowLikelihood_;
hash = (37 * hash) + ANGER_LIKELIHOOD_FIELD_NUMBER;
hash = (53 * hash) + angerLikelihood_;
hash = (37 * hash) + SURPRISE_LIKELIHOOD_FIELD_NUMBER;
hash = (53 * hash) + surpriseLikelihood_;
hash = (37 * hash) + UNDER_EXPOSED_LIKELIHOOD_FIELD_NUMBER;
hash = (53 * hash) + underExposedLikelihood_;
hash = (37 * hash) + BLURRED_LIKELIHOOD_FIELD_NUMBER;
hash = (53 * hash) + blurredLikelihood_;
hash = (37 * hash) + HEADWEAR_LIKELIHOOD_FIELD_NUMBER;
hash = (53 * hash) + headwearLikelihood_;
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vision.v1.FaceAnnotation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1.FaceAnnotation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1.FaceAnnotation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1.FaceAnnotation parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1.FaceAnnotation parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.FaceAnnotation parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1.FaceAnnotation parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.FaceAnnotation parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1.FaceAnnotation parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.FaceAnnotation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.vision.v1.FaceAnnotation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* A face annotation object contains the results of face detection.
*
*
* Protobuf type {@code google.cloud.vision.v1.FaceAnnotation}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:google.cloud.vision.v1.FaceAnnotation)
com.google.cloud.vision.v1.FaceAnnotationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.vision.v1.ImageAnnotatorProto.internal_static_google_cloud_vision_v1_FaceAnnotation_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1.ImageAnnotatorProto.internal_static_google_cloud_vision_v1_FaceAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1.FaceAnnotation.class, com.google.cloud.vision.v1.FaceAnnotation.Builder.class);
}
// Construct using com.google.cloud.vision.v1.FaceAnnotation.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getLandmarksFieldBuilder();
}
}
public Builder clear() {
super.clear();
if (boundingPolyBuilder_ == null) {
boundingPoly_ = null;
} else {
boundingPoly_ = null;
boundingPolyBuilder_ = null;
}
if (fdBoundingPolyBuilder_ == null) {
fdBoundingPoly_ = null;
} else {
fdBoundingPoly_ = null;
fdBoundingPolyBuilder_ = null;
}
if (landmarksBuilder_ == null) {
landmarks_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
landmarksBuilder_.clear();
}
rollAngle_ = 0F;
panAngle_ = 0F;
tiltAngle_ = 0F;
detectionConfidence_ = 0F;
landmarkingConfidence_ = 0F;
joyLikelihood_ = 0;
sorrowLikelihood_ = 0;
angerLikelihood_ = 0;
surpriseLikelihood_ = 0;
underExposedLikelihood_ = 0;
blurredLikelihood_ = 0;
headwearLikelihood_ = 0;
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.cloud.vision.v1.ImageAnnotatorProto.internal_static_google_cloud_vision_v1_FaceAnnotation_descriptor;
}
public com.google.cloud.vision.v1.FaceAnnotation getDefaultInstanceForType() {
return com.google.cloud.vision.v1.FaceAnnotation.getDefaultInstance();
}
public com.google.cloud.vision.v1.FaceAnnotation build() {
com.google.cloud.vision.v1.FaceAnnotation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.cloud.vision.v1.FaceAnnotation buildPartial() {
com.google.cloud.vision.v1.FaceAnnotation result = new com.google.cloud.vision.v1.FaceAnnotation(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (boundingPolyBuilder_ == null) {
result.boundingPoly_ = boundingPoly_;
} else {
result.boundingPoly_ = boundingPolyBuilder_.build();
}
if (fdBoundingPolyBuilder_ == null) {
result.fdBoundingPoly_ = fdBoundingPoly_;
} else {
result.fdBoundingPoly_ = fdBoundingPolyBuilder_.build();
}
if (landmarksBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004)) {
landmarks_ = java.util.Collections.unmodifiableList(landmarks_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.landmarks_ = landmarks_;
} else {
result.landmarks_ = landmarksBuilder_.build();
}
result.rollAngle_ = rollAngle_;
result.panAngle_ = panAngle_;
result.tiltAngle_ = tiltAngle_;
result.detectionConfidence_ = detectionConfidence_;
result.landmarkingConfidence_ = landmarkingConfidence_;
result.joyLikelihood_ = joyLikelihood_;
result.sorrowLikelihood_ = sorrowLikelihood_;
result.angerLikelihood_ = angerLikelihood_;
result.surpriseLikelihood_ = surpriseLikelihood_;
result.underExposedLikelihood_ = underExposedLikelihood_;
result.blurredLikelihood_ = blurredLikelihood_;
result.headwearLikelihood_ = headwearLikelihood_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vision.v1.FaceAnnotation) {
return mergeFrom((com.google.cloud.vision.v1.FaceAnnotation)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vision.v1.FaceAnnotation other) {
if (other == com.google.cloud.vision.v1.FaceAnnotation.getDefaultInstance()) return this;
if (other.hasBoundingPoly()) {
mergeBoundingPoly(other.getBoundingPoly());
}
if (other.hasFdBoundingPoly()) {
mergeFdBoundingPoly(other.getFdBoundingPoly());
}
if (landmarksBuilder_ == null) {
if (!other.landmarks_.isEmpty()) {
if (landmarks_.isEmpty()) {
landmarks_ = other.landmarks_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureLandmarksIsMutable();
landmarks_.addAll(other.landmarks_);
}
onChanged();
}
} else {
if (!other.landmarks_.isEmpty()) {
if (landmarksBuilder_.isEmpty()) {
landmarksBuilder_.dispose();
landmarksBuilder_ = null;
landmarks_ = other.landmarks_;
bitField0_ = (bitField0_ & ~0x00000004);
landmarksBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getLandmarksFieldBuilder() : null;
} else {
landmarksBuilder_.addAllMessages(other.landmarks_);
}
}
}
if (other.getRollAngle() != 0F) {
setRollAngle(other.getRollAngle());
}
if (other.getPanAngle() != 0F) {
setPanAngle(other.getPanAngle());
}
if (other.getTiltAngle() != 0F) {
setTiltAngle(other.getTiltAngle());
}
if (other.getDetectionConfidence() != 0F) {
setDetectionConfidence(other.getDetectionConfidence());
}
if (other.getLandmarkingConfidence() != 0F) {
setLandmarkingConfidence(other.getLandmarkingConfidence());
}
if (other.joyLikelihood_ != 0) {
setJoyLikelihoodValue(other.getJoyLikelihoodValue());
}
if (other.sorrowLikelihood_ != 0) {
setSorrowLikelihoodValue(other.getSorrowLikelihoodValue());
}
if (other.angerLikelihood_ != 0) {
setAngerLikelihoodValue(other.getAngerLikelihoodValue());
}
if (other.surpriseLikelihood_ != 0) {
setSurpriseLikelihoodValue(other.getSurpriseLikelihoodValue());
}
if (other.underExposedLikelihood_ != 0) {
setUnderExposedLikelihoodValue(other.getUnderExposedLikelihoodValue());
}
if (other.blurredLikelihood_ != 0) {
setBlurredLikelihoodValue(other.getBlurredLikelihoodValue());
}
if (other.headwearLikelihood_ != 0) {
setHeadwearLikelihoodValue(other.getHeadwearLikelihoodValue());
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.vision.v1.FaceAnnotation parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.vision.v1.FaceAnnotation) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.google.cloud.vision.v1.BoundingPoly boundingPoly_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.BoundingPoly, com.google.cloud.vision.v1.BoundingPoly.Builder, com.google.cloud.vision.v1.BoundingPolyOrBuilder> boundingPolyBuilder_;
/**
*
* The bounding polygon around the face. The coordinates of the bounding box
* are in the original image's scale, as returned in ImageParams.
* The bounding box is computed to "frame" the face in accordance with human
* expectations. It is based on the landmarker results.
* Note that one or more x and/or y coordinates may not be generated in the
* BoundingPoly (the polygon will be unbounded) if only a partial face appears in
* the image to be annotated.
*
*
* optional .google.cloud.vision.v1.BoundingPoly bounding_poly = 1;
*/
public boolean hasBoundingPoly() {
return boundingPolyBuilder_ != null || boundingPoly_ != null;
}
/**
*
* The bounding polygon around the face. The coordinates of the bounding box
* are in the original image's scale, as returned in ImageParams.
* The bounding box is computed to "frame" the face in accordance with human
* expectations. It is based on the landmarker results.
* Note that one or more x and/or y coordinates may not be generated in the
* BoundingPoly (the polygon will be unbounded) if only a partial face appears in
* the image to be annotated.
*
*
* optional .google.cloud.vision.v1.BoundingPoly bounding_poly = 1;
*/
public com.google.cloud.vision.v1.BoundingPoly getBoundingPoly() {
if (boundingPolyBuilder_ == null) {
return boundingPoly_ == null ? com.google.cloud.vision.v1.BoundingPoly.getDefaultInstance() : boundingPoly_;
} else {
return boundingPolyBuilder_.getMessage();
}
}
/**
*
* The bounding polygon around the face. The coordinates of the bounding box
* are in the original image's scale, as returned in ImageParams.
* The bounding box is computed to "frame" the face in accordance with human
* expectations. It is based on the landmarker results.
* Note that one or more x and/or y coordinates may not be generated in the
* BoundingPoly (the polygon will be unbounded) if only a partial face appears in
* the image to be annotated.
*
*
* optional .google.cloud.vision.v1.BoundingPoly bounding_poly = 1;
*/
public Builder setBoundingPoly(com.google.cloud.vision.v1.BoundingPoly value) {
if (boundingPolyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
boundingPoly_ = value;
onChanged();
} else {
boundingPolyBuilder_.setMessage(value);
}
return this;
}
/**
*
* The bounding polygon around the face. The coordinates of the bounding box
* are in the original image's scale, as returned in ImageParams.
* The bounding box is computed to "frame" the face in accordance with human
* expectations. It is based on the landmarker results.
* Note that one or more x and/or y coordinates may not be generated in the
* BoundingPoly (the polygon will be unbounded) if only a partial face appears in
* the image to be annotated.
*
*
* optional .google.cloud.vision.v1.BoundingPoly bounding_poly = 1;
*/
public Builder setBoundingPoly(
com.google.cloud.vision.v1.BoundingPoly.Builder builderForValue) {
if (boundingPolyBuilder_ == null) {
boundingPoly_ = builderForValue.build();
onChanged();
} else {
boundingPolyBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
* The bounding polygon around the face. The coordinates of the bounding box
* are in the original image's scale, as returned in ImageParams.
* The bounding box is computed to "frame" the face in accordance with human
* expectations. It is based on the landmarker results.
* Note that one or more x and/or y coordinates may not be generated in the
* BoundingPoly (the polygon will be unbounded) if only a partial face appears in
* the image to be annotated.
*
*
* optional .google.cloud.vision.v1.BoundingPoly bounding_poly = 1;
*/
public Builder mergeBoundingPoly(com.google.cloud.vision.v1.BoundingPoly value) {
if (boundingPolyBuilder_ == null) {
if (boundingPoly_ != null) {
boundingPoly_ =
com.google.cloud.vision.v1.BoundingPoly.newBuilder(boundingPoly_).mergeFrom(value).buildPartial();
} else {
boundingPoly_ = value;
}
onChanged();
} else {
boundingPolyBuilder_.mergeFrom(value);
}
return this;
}
/**
*
* The bounding polygon around the face. The coordinates of the bounding box
* are in the original image's scale, as returned in ImageParams.
* The bounding box is computed to "frame" the face in accordance with human
* expectations. It is based on the landmarker results.
* Note that one or more x and/or y coordinates may not be generated in the
* BoundingPoly (the polygon will be unbounded) if only a partial face appears in
* the image to be annotated.
*
*
* optional .google.cloud.vision.v1.BoundingPoly bounding_poly = 1;
*/
public Builder clearBoundingPoly() {
if (boundingPolyBuilder_ == null) {
boundingPoly_ = null;
onChanged();
} else {
boundingPoly_ = null;
boundingPolyBuilder_ = null;
}
return this;
}
/**
*
* The bounding polygon around the face. The coordinates of the bounding box
* are in the original image's scale, as returned in ImageParams.
* The bounding box is computed to "frame" the face in accordance with human
* expectations. It is based on the landmarker results.
* Note that one or more x and/or y coordinates may not be generated in the
* BoundingPoly (the polygon will be unbounded) if only a partial face appears in
* the image to be annotated.
*
*
* optional .google.cloud.vision.v1.BoundingPoly bounding_poly = 1;
*/
public com.google.cloud.vision.v1.BoundingPoly.Builder getBoundingPolyBuilder() {
onChanged();
return getBoundingPolyFieldBuilder().getBuilder();
}
/**
*
* The bounding polygon around the face. The coordinates of the bounding box
* are in the original image's scale, as returned in ImageParams.
* The bounding box is computed to "frame" the face in accordance with human
* expectations. It is based on the landmarker results.
* Note that one or more x and/or y coordinates may not be generated in the
* BoundingPoly (the polygon will be unbounded) if only a partial face appears in
* the image to be annotated.
*
*
* optional .google.cloud.vision.v1.BoundingPoly bounding_poly = 1;
*/
public com.google.cloud.vision.v1.BoundingPolyOrBuilder getBoundingPolyOrBuilder() {
if (boundingPolyBuilder_ != null) {
return boundingPolyBuilder_.getMessageOrBuilder();
} else {
return boundingPoly_ == null ?
com.google.cloud.vision.v1.BoundingPoly.getDefaultInstance() : boundingPoly_;
}
}
/**
*
* The bounding polygon around the face. The coordinates of the bounding box
* are in the original image's scale, as returned in ImageParams.
* The bounding box is computed to "frame" the face in accordance with human
* expectations. It is based on the landmarker results.
* Note that one or more x and/or y coordinates may not be generated in the
* BoundingPoly (the polygon will be unbounded) if only a partial face appears in
* the image to be annotated.
*
*
* optional .google.cloud.vision.v1.BoundingPoly bounding_poly = 1;
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.BoundingPoly, com.google.cloud.vision.v1.BoundingPoly.Builder, com.google.cloud.vision.v1.BoundingPolyOrBuilder>
getBoundingPolyFieldBuilder() {
if (boundingPolyBuilder_ == null) {
boundingPolyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.BoundingPoly, com.google.cloud.vision.v1.BoundingPoly.Builder, com.google.cloud.vision.v1.BoundingPolyOrBuilder>(
getBoundingPoly(),
getParentForChildren(),
isClean());
boundingPoly_ = null;
}
return boundingPolyBuilder_;
}
private com.google.cloud.vision.v1.BoundingPoly fdBoundingPoly_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.BoundingPoly, com.google.cloud.vision.v1.BoundingPoly.Builder, com.google.cloud.vision.v1.BoundingPolyOrBuilder> fdBoundingPolyBuilder_;
/**
*
* This bounding polygon is tighter than the previous
* <code>boundingPoly</code>, and
* encloses only the skin part of the face. Typically, it is used to
* eliminate the face from any image analysis that detects the
* "amount of skin" visible in an image. It is not based on the
* landmarker results, only on the initial face detection, hence
* the <code>fd</code> (face detection) prefix.
*
*
* optional .google.cloud.vision.v1.BoundingPoly fd_bounding_poly = 2;
*/
public boolean hasFdBoundingPoly() {
return fdBoundingPolyBuilder_ != null || fdBoundingPoly_ != null;
}
/**
*
* This bounding polygon is tighter than the previous
* <code>boundingPoly</code>, and
* encloses only the skin part of the face. Typically, it is used to
* eliminate the face from any image analysis that detects the
* "amount of skin" visible in an image. It is not based on the
* landmarker results, only on the initial face detection, hence
* the <code>fd</code> (face detection) prefix.
*
*
* optional .google.cloud.vision.v1.BoundingPoly fd_bounding_poly = 2;
*/
public com.google.cloud.vision.v1.BoundingPoly getFdBoundingPoly() {
if (fdBoundingPolyBuilder_ == null) {
return fdBoundingPoly_ == null ? com.google.cloud.vision.v1.BoundingPoly.getDefaultInstance() : fdBoundingPoly_;
} else {
return fdBoundingPolyBuilder_.getMessage();
}
}
/**
*
* This bounding polygon is tighter than the previous
* <code>boundingPoly</code>, and
* encloses only the skin part of the face. Typically, it is used to
* eliminate the face from any image analysis that detects the
* "amount of skin" visible in an image. It is not based on the
* landmarker results, only on the initial face detection, hence
* the <code>fd</code> (face detection) prefix.
*
*
* optional .google.cloud.vision.v1.BoundingPoly fd_bounding_poly = 2;
*/
public Builder setFdBoundingPoly(com.google.cloud.vision.v1.BoundingPoly value) {
if (fdBoundingPolyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
fdBoundingPoly_ = value;
onChanged();
} else {
fdBoundingPolyBuilder_.setMessage(value);
}
return this;
}
/**
*
* This bounding polygon is tighter than the previous
* <code>boundingPoly</code>, and
* encloses only the skin part of the face. Typically, it is used to
* eliminate the face from any image analysis that detects the
* "amount of skin" visible in an image. It is not based on the
* landmarker results, only on the initial face detection, hence
* the <code>fd</code> (face detection) prefix.
*
*
* optional .google.cloud.vision.v1.BoundingPoly fd_bounding_poly = 2;
*/
public Builder setFdBoundingPoly(
com.google.cloud.vision.v1.BoundingPoly.Builder builderForValue) {
if (fdBoundingPolyBuilder_ == null) {
fdBoundingPoly_ = builderForValue.build();
onChanged();
} else {
fdBoundingPolyBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
* This bounding polygon is tighter than the previous
* <code>boundingPoly</code>, and
* encloses only the skin part of the face. Typically, it is used to
* eliminate the face from any image analysis that detects the
* "amount of skin" visible in an image. It is not based on the
* landmarker results, only on the initial face detection, hence
* the <code>fd</code> (face detection) prefix.
*
*
* optional .google.cloud.vision.v1.BoundingPoly fd_bounding_poly = 2;
*/
public Builder mergeFdBoundingPoly(com.google.cloud.vision.v1.BoundingPoly value) {
if (fdBoundingPolyBuilder_ == null) {
if (fdBoundingPoly_ != null) {
fdBoundingPoly_ =
com.google.cloud.vision.v1.BoundingPoly.newBuilder(fdBoundingPoly_).mergeFrom(value).buildPartial();
} else {
fdBoundingPoly_ = value;
}
onChanged();
} else {
fdBoundingPolyBuilder_.mergeFrom(value);
}
return this;
}
/**
*
* This bounding polygon is tighter than the previous
* <code>boundingPoly</code>, and
* encloses only the skin part of the face. Typically, it is used to
* eliminate the face from any image analysis that detects the
* "amount of skin" visible in an image. It is not based on the
* landmarker results, only on the initial face detection, hence
* the <code>fd</code> (face detection) prefix.
*
*
* optional .google.cloud.vision.v1.BoundingPoly fd_bounding_poly = 2;
*/
public Builder clearFdBoundingPoly() {
if (fdBoundingPolyBuilder_ == null) {
fdBoundingPoly_ = null;
onChanged();
} else {
fdBoundingPoly_ = null;
fdBoundingPolyBuilder_ = null;
}
return this;
}
/**
*
* This bounding polygon is tighter than the previous
* <code>boundingPoly</code>, and
* encloses only the skin part of the face. Typically, it is used to
* eliminate the face from any image analysis that detects the
* "amount of skin" visible in an image. It is not based on the
* landmarker results, only on the initial face detection, hence
* the <code>fd</code> (face detection) prefix.
*
*
* optional .google.cloud.vision.v1.BoundingPoly fd_bounding_poly = 2;
*/
public com.google.cloud.vision.v1.BoundingPoly.Builder getFdBoundingPolyBuilder() {
onChanged();
return getFdBoundingPolyFieldBuilder().getBuilder();
}
/**
*
* This bounding polygon is tighter than the previous
* <code>boundingPoly</code>, and
* encloses only the skin part of the face. Typically, it is used to
* eliminate the face from any image analysis that detects the
* "amount of skin" visible in an image. It is not based on the
* landmarker results, only on the initial face detection, hence
* the <code>fd</code> (face detection) prefix.
*
*
* optional .google.cloud.vision.v1.BoundingPoly fd_bounding_poly = 2;
*/
public com.google.cloud.vision.v1.BoundingPolyOrBuilder getFdBoundingPolyOrBuilder() {
if (fdBoundingPolyBuilder_ != null) {
return fdBoundingPolyBuilder_.getMessageOrBuilder();
} else {
return fdBoundingPoly_ == null ?
com.google.cloud.vision.v1.BoundingPoly.getDefaultInstance() : fdBoundingPoly_;
}
}
/**
*
* This bounding polygon is tighter than the previous
* <code>boundingPoly</code>, and
* encloses only the skin part of the face. Typically, it is used to
* eliminate the face from any image analysis that detects the
* "amount of skin" visible in an image. It is not based on the
* landmarker results, only on the initial face detection, hence
* the <code>fd</code> (face detection) prefix.
*
*
* optional .google.cloud.vision.v1.BoundingPoly fd_bounding_poly = 2;
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.BoundingPoly, com.google.cloud.vision.v1.BoundingPoly.Builder, com.google.cloud.vision.v1.BoundingPolyOrBuilder>
getFdBoundingPolyFieldBuilder() {
if (fdBoundingPolyBuilder_ == null) {
fdBoundingPolyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vision.v1.BoundingPoly, com.google.cloud.vision.v1.BoundingPoly.Builder, com.google.cloud.vision.v1.BoundingPolyOrBuilder>(
getFdBoundingPoly(),
getParentForChildren(),
isClean());
fdBoundingPoly_ = null;
}
return fdBoundingPolyBuilder_;
}
private java.util.List landmarks_ =
java.util.Collections.emptyList();
private void ensureLandmarksIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
landmarks_ = new java.util.ArrayList(landmarks_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1.FaceAnnotation.Landmark, com.google.cloud.vision.v1.FaceAnnotation.Landmark.Builder, com.google.cloud.vision.v1.FaceAnnotation.LandmarkOrBuilder> landmarksBuilder_;
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public java.util.List getLandmarksList() {
if (landmarksBuilder_ == null) {
return java.util.Collections.unmodifiableList(landmarks_);
} else {
return landmarksBuilder_.getMessageList();
}
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public int getLandmarksCount() {
if (landmarksBuilder_ == null) {
return landmarks_.size();
} else {
return landmarksBuilder_.getCount();
}
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public com.google.cloud.vision.v1.FaceAnnotation.Landmark getLandmarks(int index) {
if (landmarksBuilder_ == null) {
return landmarks_.get(index);
} else {
return landmarksBuilder_.getMessage(index);
}
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public Builder setLandmarks(
int index, com.google.cloud.vision.v1.FaceAnnotation.Landmark value) {
if (landmarksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLandmarksIsMutable();
landmarks_.set(index, value);
onChanged();
} else {
landmarksBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public Builder setLandmarks(
int index, com.google.cloud.vision.v1.FaceAnnotation.Landmark.Builder builderForValue) {
if (landmarksBuilder_ == null) {
ensureLandmarksIsMutable();
landmarks_.set(index, builderForValue.build());
onChanged();
} else {
landmarksBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public Builder addLandmarks(com.google.cloud.vision.v1.FaceAnnotation.Landmark value) {
if (landmarksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLandmarksIsMutable();
landmarks_.add(value);
onChanged();
} else {
landmarksBuilder_.addMessage(value);
}
return this;
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public Builder addLandmarks(
int index, com.google.cloud.vision.v1.FaceAnnotation.Landmark value) {
if (landmarksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLandmarksIsMutable();
landmarks_.add(index, value);
onChanged();
} else {
landmarksBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public Builder addLandmarks(
com.google.cloud.vision.v1.FaceAnnotation.Landmark.Builder builderForValue) {
if (landmarksBuilder_ == null) {
ensureLandmarksIsMutable();
landmarks_.add(builderForValue.build());
onChanged();
} else {
landmarksBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public Builder addLandmarks(
int index, com.google.cloud.vision.v1.FaceAnnotation.Landmark.Builder builderForValue) {
if (landmarksBuilder_ == null) {
ensureLandmarksIsMutable();
landmarks_.add(index, builderForValue.build());
onChanged();
} else {
landmarksBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public Builder addAllLandmarks(
java.lang.Iterable extends com.google.cloud.vision.v1.FaceAnnotation.Landmark> values) {
if (landmarksBuilder_ == null) {
ensureLandmarksIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, landmarks_);
onChanged();
} else {
landmarksBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public Builder clearLandmarks() {
if (landmarksBuilder_ == null) {
landmarks_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
landmarksBuilder_.clear();
}
return this;
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public Builder removeLandmarks(int index) {
if (landmarksBuilder_ == null) {
ensureLandmarksIsMutable();
landmarks_.remove(index);
onChanged();
} else {
landmarksBuilder_.remove(index);
}
return this;
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public com.google.cloud.vision.v1.FaceAnnotation.Landmark.Builder getLandmarksBuilder(
int index) {
return getLandmarksFieldBuilder().getBuilder(index);
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public com.google.cloud.vision.v1.FaceAnnotation.LandmarkOrBuilder getLandmarksOrBuilder(
int index) {
if (landmarksBuilder_ == null) {
return landmarks_.get(index); } else {
return landmarksBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public java.util.List extends com.google.cloud.vision.v1.FaceAnnotation.LandmarkOrBuilder>
getLandmarksOrBuilderList() {
if (landmarksBuilder_ != null) {
return landmarksBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(landmarks_);
}
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public com.google.cloud.vision.v1.FaceAnnotation.Landmark.Builder addLandmarksBuilder() {
return getLandmarksFieldBuilder().addBuilder(
com.google.cloud.vision.v1.FaceAnnotation.Landmark.getDefaultInstance());
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public com.google.cloud.vision.v1.FaceAnnotation.Landmark.Builder addLandmarksBuilder(
int index) {
return getLandmarksFieldBuilder().addBuilder(
index, com.google.cloud.vision.v1.FaceAnnotation.Landmark.getDefaultInstance());
}
/**
*
* Detected face landmarks.
*
*
* repeated .google.cloud.vision.v1.FaceAnnotation.Landmark landmarks = 3;
*/
public java.util.List
getLandmarksBuilderList() {
return getLandmarksFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1.FaceAnnotation.Landmark, com.google.cloud.vision.v1.FaceAnnotation.Landmark.Builder, com.google.cloud.vision.v1.FaceAnnotation.LandmarkOrBuilder>
getLandmarksFieldBuilder() {
if (landmarksBuilder_ == null) {
landmarksBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1.FaceAnnotation.Landmark, com.google.cloud.vision.v1.FaceAnnotation.Landmark.Builder, com.google.cloud.vision.v1.FaceAnnotation.LandmarkOrBuilder>(
landmarks_,
((bitField0_ & 0x00000004) == 0x00000004),
getParentForChildren(),
isClean());
landmarks_ = null;
}
return landmarksBuilder_;
}
private float rollAngle_ ;
/**
*
* Roll angle. Indicates the amount of clockwise/anti-clockwise rotation of
* the
* face relative to the image vertical, about the axis perpendicular to the
* face. Range [-180,180].
*
*
* optional float roll_angle = 4;
*/
public float getRollAngle() {
return rollAngle_;
}
/**
*
* Roll angle. Indicates the amount of clockwise/anti-clockwise rotation of
* the
* face relative to the image vertical, about the axis perpendicular to the
* face. Range [-180,180].
*
*
* optional float roll_angle = 4;
*/
public Builder setRollAngle(float value) {
rollAngle_ = value;
onChanged();
return this;
}
/**
*
* Roll angle. Indicates the amount of clockwise/anti-clockwise rotation of
* the
* face relative to the image vertical, about the axis perpendicular to the
* face. Range [-180,180].
*
*
* optional float roll_angle = 4;
*/
public Builder clearRollAngle() {
rollAngle_ = 0F;
onChanged();
return this;
}
private float panAngle_ ;
/**
*
* Yaw angle. Indicates the leftward/rightward angle that the face is
* pointing, relative to the vertical plane perpendicular to the image. Range
* [-180,180].
*
*
* optional float pan_angle = 5;
*/
public float getPanAngle() {
return panAngle_;
}
/**
*
* Yaw angle. Indicates the leftward/rightward angle that the face is
* pointing, relative to the vertical plane perpendicular to the image. Range
* [-180,180].
*
*
* optional float pan_angle = 5;
*/
public Builder setPanAngle(float value) {
panAngle_ = value;
onChanged();
return this;
}
/**
*
* Yaw angle. Indicates the leftward/rightward angle that the face is
* pointing, relative to the vertical plane perpendicular to the image. Range
* [-180,180].
*
*
* optional float pan_angle = 5;
*/
public Builder clearPanAngle() {
panAngle_ = 0F;
onChanged();
return this;
}
private float tiltAngle_ ;
/**
*
* Pitch angle. Indicates the upwards/downwards angle that the face is
* pointing
* relative to the image's horizontal plane. Range [-180,180].
*
*
* optional float tilt_angle = 6;
*/
public float getTiltAngle() {
return tiltAngle_;
}
/**
*
* Pitch angle. Indicates the upwards/downwards angle that the face is
* pointing
* relative to the image's horizontal plane. Range [-180,180].
*
*
* optional float tilt_angle = 6;
*/
public Builder setTiltAngle(float value) {
tiltAngle_ = value;
onChanged();
return this;
}
/**
*
* Pitch angle. Indicates the upwards/downwards angle that the face is
* pointing
* relative to the image's horizontal plane. Range [-180,180].
*
*
* optional float tilt_angle = 6;
*/
public Builder clearTiltAngle() {
tiltAngle_ = 0F;
onChanged();
return this;
}
private float detectionConfidence_ ;
/**
*
* Detection confidence. Range [0, 1].
*
*
* optional float detection_confidence = 7;
*/
public float getDetectionConfidence() {
return detectionConfidence_;
}
/**
*
* Detection confidence. Range [0, 1].
*
*
* optional float detection_confidence = 7;
*/
public Builder setDetectionConfidence(float value) {
detectionConfidence_ = value;
onChanged();
return this;
}
/**
*
* Detection confidence. Range [0, 1].
*
*
* optional float detection_confidence = 7;
*/
public Builder clearDetectionConfidence() {
detectionConfidence_ = 0F;
onChanged();
return this;
}
private float landmarkingConfidence_ ;
/**
*
* Face landmarking confidence. Range [0, 1].
*
*
* optional float landmarking_confidence = 8;
*/
public float getLandmarkingConfidence() {
return landmarkingConfidence_;
}
/**
*
* Face landmarking confidence. Range [0, 1].
*
*
* optional float landmarking_confidence = 8;
*/
public Builder setLandmarkingConfidence(float value) {
landmarkingConfidence_ = value;
onChanged();
return this;
}
/**
*
* Face landmarking confidence. Range [0, 1].
*
*
* optional float landmarking_confidence = 8;
*/
public Builder clearLandmarkingConfidence() {
landmarkingConfidence_ = 0F;
onChanged();
return this;
}
private int joyLikelihood_ = 0;
/**
*
* Joy likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood joy_likelihood = 9;
*/
public int getJoyLikelihoodValue() {
return joyLikelihood_;
}
/**
*
* Joy likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood joy_likelihood = 9;
*/
public Builder setJoyLikelihoodValue(int value) {
joyLikelihood_ = value;
onChanged();
return this;
}
/**
*
* Joy likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood joy_likelihood = 9;
*/
public com.google.cloud.vision.v1.Likelihood getJoyLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(joyLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
/**
*
* Joy likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood joy_likelihood = 9;
*/
public Builder setJoyLikelihood(com.google.cloud.vision.v1.Likelihood value) {
if (value == null) {
throw new NullPointerException();
}
joyLikelihood_ = value.getNumber();
onChanged();
return this;
}
/**
*
* Joy likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood joy_likelihood = 9;
*/
public Builder clearJoyLikelihood() {
joyLikelihood_ = 0;
onChanged();
return this;
}
private int sorrowLikelihood_ = 0;
/**
*
* Sorrow likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood sorrow_likelihood = 10;
*/
public int getSorrowLikelihoodValue() {
return sorrowLikelihood_;
}
/**
*
* Sorrow likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood sorrow_likelihood = 10;
*/
public Builder setSorrowLikelihoodValue(int value) {
sorrowLikelihood_ = value;
onChanged();
return this;
}
/**
*
* Sorrow likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood sorrow_likelihood = 10;
*/
public com.google.cloud.vision.v1.Likelihood getSorrowLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(sorrowLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
/**
*
* Sorrow likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood sorrow_likelihood = 10;
*/
public Builder setSorrowLikelihood(com.google.cloud.vision.v1.Likelihood value) {
if (value == null) {
throw new NullPointerException();
}
sorrowLikelihood_ = value.getNumber();
onChanged();
return this;
}
/**
*
* Sorrow likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood sorrow_likelihood = 10;
*/
public Builder clearSorrowLikelihood() {
sorrowLikelihood_ = 0;
onChanged();
return this;
}
private int angerLikelihood_ = 0;
/**
*
* Anger likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood anger_likelihood = 11;
*/
public int getAngerLikelihoodValue() {
return angerLikelihood_;
}
/**
*
* Anger likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood anger_likelihood = 11;
*/
public Builder setAngerLikelihoodValue(int value) {
angerLikelihood_ = value;
onChanged();
return this;
}
/**
*
* Anger likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood anger_likelihood = 11;
*/
public com.google.cloud.vision.v1.Likelihood getAngerLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(angerLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
/**
*
* Anger likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood anger_likelihood = 11;
*/
public Builder setAngerLikelihood(com.google.cloud.vision.v1.Likelihood value) {
if (value == null) {
throw new NullPointerException();
}
angerLikelihood_ = value.getNumber();
onChanged();
return this;
}
/**
*
* Anger likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood anger_likelihood = 11;
*/
public Builder clearAngerLikelihood() {
angerLikelihood_ = 0;
onChanged();
return this;
}
private int surpriseLikelihood_ = 0;
/**
*
* Surprise likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood surprise_likelihood = 12;
*/
public int getSurpriseLikelihoodValue() {
return surpriseLikelihood_;
}
/**
*
* Surprise likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood surprise_likelihood = 12;
*/
public Builder setSurpriseLikelihoodValue(int value) {
surpriseLikelihood_ = value;
onChanged();
return this;
}
/**
*
* Surprise likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood surprise_likelihood = 12;
*/
public com.google.cloud.vision.v1.Likelihood getSurpriseLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(surpriseLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
/**
*
* Surprise likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood surprise_likelihood = 12;
*/
public Builder setSurpriseLikelihood(com.google.cloud.vision.v1.Likelihood value) {
if (value == null) {
throw new NullPointerException();
}
surpriseLikelihood_ = value.getNumber();
onChanged();
return this;
}
/**
*
* Surprise likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood surprise_likelihood = 12;
*/
public Builder clearSurpriseLikelihood() {
surpriseLikelihood_ = 0;
onChanged();
return this;
}
private int underExposedLikelihood_ = 0;
/**
*
* Under-exposed likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood under_exposed_likelihood = 13;
*/
public int getUnderExposedLikelihoodValue() {
return underExposedLikelihood_;
}
/**
*
* Under-exposed likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood under_exposed_likelihood = 13;
*/
public Builder setUnderExposedLikelihoodValue(int value) {
underExposedLikelihood_ = value;
onChanged();
return this;
}
/**
*
* Under-exposed likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood under_exposed_likelihood = 13;
*/
public com.google.cloud.vision.v1.Likelihood getUnderExposedLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(underExposedLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
/**
*
* Under-exposed likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood under_exposed_likelihood = 13;
*/
public Builder setUnderExposedLikelihood(com.google.cloud.vision.v1.Likelihood value) {
if (value == null) {
throw new NullPointerException();
}
underExposedLikelihood_ = value.getNumber();
onChanged();
return this;
}
/**
*
* Under-exposed likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood under_exposed_likelihood = 13;
*/
public Builder clearUnderExposedLikelihood() {
underExposedLikelihood_ = 0;
onChanged();
return this;
}
private int blurredLikelihood_ = 0;
/**
*
* Blurred likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood blurred_likelihood = 14;
*/
public int getBlurredLikelihoodValue() {
return blurredLikelihood_;
}
/**
*
* Blurred likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood blurred_likelihood = 14;
*/
public Builder setBlurredLikelihoodValue(int value) {
blurredLikelihood_ = value;
onChanged();
return this;
}
/**
*
* Blurred likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood blurred_likelihood = 14;
*/
public com.google.cloud.vision.v1.Likelihood getBlurredLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(blurredLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
/**
*
* Blurred likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood blurred_likelihood = 14;
*/
public Builder setBlurredLikelihood(com.google.cloud.vision.v1.Likelihood value) {
if (value == null) {
throw new NullPointerException();
}
blurredLikelihood_ = value.getNumber();
onChanged();
return this;
}
/**
*
* Blurred likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood blurred_likelihood = 14;
*/
public Builder clearBlurredLikelihood() {
blurredLikelihood_ = 0;
onChanged();
return this;
}
private int headwearLikelihood_ = 0;
/**
*
* Headwear likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood headwear_likelihood = 15;
*/
public int getHeadwearLikelihoodValue() {
return headwearLikelihood_;
}
/**
*
* Headwear likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood headwear_likelihood = 15;
*/
public Builder setHeadwearLikelihoodValue(int value) {
headwearLikelihood_ = value;
onChanged();
return this;
}
/**
*
* Headwear likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood headwear_likelihood = 15;
*/
public com.google.cloud.vision.v1.Likelihood getHeadwearLikelihood() {
com.google.cloud.vision.v1.Likelihood result = com.google.cloud.vision.v1.Likelihood.valueOf(headwearLikelihood_);
return result == null ? com.google.cloud.vision.v1.Likelihood.UNRECOGNIZED : result;
}
/**
*
* Headwear likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood headwear_likelihood = 15;
*/
public Builder setHeadwearLikelihood(com.google.cloud.vision.v1.Likelihood value) {
if (value == null) {
throw new NullPointerException();
}
headwearLikelihood_ = value.getNumber();
onChanged();
return this;
}
/**
*
* Headwear likelihood.
*
*
* optional .google.cloud.vision.v1.Likelihood headwear_likelihood = 15;
*/
public Builder clearHeadwearLikelihood() {
headwearLikelihood_ = 0;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.cloud.vision.v1.FaceAnnotation)
}
// @@protoc_insertion_point(class_scope:google.cloud.vision.v1.FaceAnnotation)
private static final com.google.cloud.vision.v1.FaceAnnotation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vision.v1.FaceAnnotation();
}
public static com.google.cloud.vision.v1.FaceAnnotation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser
PARSER = new com.google.protobuf.AbstractParser() {
public FaceAnnotation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FaceAnnotation(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
public com.google.cloud.vision.v1.FaceAnnotation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy