com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of proto-google-cloud-dialogflow-cx-v3 Show documentation
Show all versions of proto-google-cloud-dialogflow-cx-v3 Show documentation
PROTO library for proto-google-cloud-dialogflow-cx-v3
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3/session.proto
package com.google.cloud.dialogflow.cx.v3;
/**
*
*
*
* Contains a speech recognition result corresponding to a portion of the audio
* that is currently being processed or an indication that this is the end
* of the single requested utterance.
* Example:
* 1. transcript: "tube"
* 2. transcript: "to be a"
* 3. transcript: "to be"
* 4. transcript: "to be or not to be"
* is_final: true
* 5. transcript: " that's"
* 6. transcript: " that is"
* 7. message_type: `END_OF_SINGLE_UTTERANCE`
* 8. transcript: " that is the question"
* is_final: true
* Only two of the responses contain final results (#4 and #8 indicated by
* `is_final: true`). Concatenating these generates the full transcript: "to be
* or not to be that is the question".
* In each response we populate:
* * for `TRANSCRIPT`: `transcript` and possibly `is_final`.
* * for `END_OF_SINGLE_UTTERANCE`: only `message_type`.
*
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.StreamingRecognitionResult}
*/
public final class StreamingRecognitionResult extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.StreamingRecognitionResult)
StreamingRecognitionResultOrBuilder {
private static final long serialVersionUID = 0L;
// Use StreamingRecognitionResult.newBuilder() to construct.
private StreamingRecognitionResult(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private StreamingRecognitionResult() {
messageType_ = 0;
transcript_ = "";
speechWordInfo_ = java.util.Collections.emptyList();
languageCode_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new StreamingRecognitionResult();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private StreamingRecognitionResult(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
int rawValue = input.readEnum();
messageType_ = rawValue;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
transcript_ = s;
break;
}
case 24:
{
isFinal_ = input.readBool();
break;
}
case 37:
{
confidence_ = input.readFloat();
break;
}
case 53:
{
stability_ = input.readFloat();
break;
}
case 58:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
speechWordInfo_ =
new java.util.ArrayList();
mutable_bitField0_ |= 0x00000001;
}
speechWordInfo_.add(
input.readMessage(
com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.parser(),
extensionRegistry));
break;
}
case 66:
{
com.google.protobuf.Duration.Builder subBuilder = null;
if (speechEndOffset_ != null) {
subBuilder = speechEndOffset_.toBuilder();
}
speechEndOffset_ =
input.readMessage(com.google.protobuf.Duration.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(speechEndOffset_);
speechEndOffset_ = subBuilder.buildPartial();
}
break;
}
case 82:
{
java.lang.String s = input.readStringRequireUtf8();
languageCode_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
speechWordInfo_ = java.util.Collections.unmodifiableList(speechWordInfo_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3_StreamingRecognitionResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3_StreamingRecognitionResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.class,
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.Builder.class);
}
/**
*
*
*
* Type of the response message.
*
*
* Protobuf enum {@code google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType}
*/
public enum MessageType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
*
* Not specified. Should never be used.
*
*
* MESSAGE_TYPE_UNSPECIFIED = 0;
*/
MESSAGE_TYPE_UNSPECIFIED(0),
/**
*
*
*
* Message contains a (possibly partial) transcript.
*
*
* TRANSCRIPT = 1;
*/
TRANSCRIPT(1),
/**
*
*
*
* Event indicates that the server has detected the end of the user's speech
* utterance and expects no additional speech. Therefore, the server will
* not process additional audio (although it may subsequently return
* additional results). The client should stop sending additional audio
* data, half-close the gRPC connection, and wait for any additional results
* until the server closes the gRPC connection. This message is only sent if
* [`single_utterance`][google.cloud.dialogflow.cx.v3.InputAudioConfig.single_utterance] was set to
* `true`, and is not used otherwise.
*
*
* END_OF_SINGLE_UTTERANCE = 2;
*/
END_OF_SINGLE_UTTERANCE(2),
UNRECOGNIZED(-1),
;
/**
*
*
*
* Not specified. Should never be used.
*
*
* MESSAGE_TYPE_UNSPECIFIED = 0;
*/
public static final int MESSAGE_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
*
* Message contains a (possibly partial) transcript.
*
*
* TRANSCRIPT = 1;
*/
public static final int TRANSCRIPT_VALUE = 1;
/**
*
*
*
* Event indicates that the server has detected the end of the user's speech
* utterance and expects no additional speech. Therefore, the server will
* not process additional audio (although it may subsequently return
* additional results). The client should stop sending additional audio
* data, half-close the gRPC connection, and wait for any additional results
* until the server closes the gRPC connection. This message is only sent if
* [`single_utterance`][google.cloud.dialogflow.cx.v3.InputAudioConfig.single_utterance] was set to
* `true`, and is not used otherwise.
*
*
* END_OF_SINGLE_UTTERANCE = 2;
*/
public static final int END_OF_SINGLE_UTTERANCE_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static MessageType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static MessageType forNumber(int value) {
switch (value) {
case 0:
return MESSAGE_TYPE_UNSPECIFIED;
case 1:
return TRANSCRIPT;
case 2:
return END_OF_SINGLE_UTTERANCE;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public MessageType findValueByNumber(int number) {
return MessageType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final MessageType[] VALUES = values();
public static MessageType valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private MessageType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType)
}
public static final int MESSAGE_TYPE_FIELD_NUMBER = 1;
private int messageType_;
/**
*
*
*
* Type of the result message.
*
*
* .google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @return The enum numeric value on the wire for messageType.
*/
@java.lang.Override
public int getMessageTypeValue() {
return messageType_;
}
/**
*
*
*
* Type of the result message.
*
*
* .google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @return The messageType.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType getMessageType() {
@SuppressWarnings("deprecation")
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType result =
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType.valueOf(
messageType_);
return result == null
? com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType.UNRECOGNIZED
: result;
}
public static final int TRANSCRIPT_FIELD_NUMBER = 2;
private volatile java.lang.Object transcript_;
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @return The transcript.
*/
@java.lang.Override
public java.lang.String getTranscript() {
java.lang.Object ref = transcript_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
transcript_ = s;
return s;
}
}
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @return The bytes for transcript.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTranscriptBytes() {
java.lang.Object ref = transcript_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
transcript_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int IS_FINAL_FIELD_NUMBER = 3;
private boolean isFinal_;
/**
*
*
*
* If `false`, the `StreamingRecognitionResult` represents an
* interim result that may change. If `true`, the recognizer will not return
* any further hypotheses about this piece of the audio. May only be populated
* for `message_type` = `TRANSCRIPT`.
*
*
* bool is_final = 3;
*
* @return The isFinal.
*/
@java.lang.Override
public boolean getIsFinal() {
return isFinal_;
}
public static final int CONFIDENCE_FIELD_NUMBER = 4;
private float confidence_;
/**
*
*
*
* The Speech confidence between 0.0 and 1.0 for the current portion of audio.
* A higher number indicates an estimated greater likelihood that the
* recognized words are correct. The default of 0.0 is a sentinel value
* indicating that confidence was not set.
* This field is typically only provided if `is_final` is true and you should
* not rely on it being accurate or even set.
*
*
* float confidence = 4;
*
* @return The confidence.
*/
@java.lang.Override
public float getConfidence() {
return confidence_;
}
public static final int STABILITY_FIELD_NUMBER = 6;
private float stability_;
/**
*
*
*
* An estimate of the likelihood that the speech recognizer will
* not change its guess about this interim recognition result:
* * If the value is unspecified or 0.0, Dialogflow didn't compute the
* stability. In particular, Dialogflow will only provide stability for
* `TRANSCRIPT` results with `is_final = false`.
* * Otherwise, the value is in (0.0, 1.0] where 0.0 means completely
* unstable and 1.0 means completely stable.
*
*
* float stability = 6;
*
* @return The stability.
*/
@java.lang.Override
public float getStability() {
return stability_;
}
public static final int SPEECH_WORD_INFO_FIELD_NUMBER = 7;
private java.util.List speechWordInfo_;
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
@java.lang.Override
public java.util.List getSpeechWordInfoList() {
return speechWordInfo_;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
@java.lang.Override
public java.util.List extends com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder>
getSpeechWordInfoOrBuilderList() {
return speechWordInfo_;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
@java.lang.Override
public int getSpeechWordInfoCount() {
return speechWordInfo_.size();
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.SpeechWordInfo getSpeechWordInfo(int index) {
return speechWordInfo_.get(index);
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder getSpeechWordInfoOrBuilder(
int index) {
return speechWordInfo_.get(index);
}
public static final int SPEECH_END_OFFSET_FIELD_NUMBER = 8;
private com.google.protobuf.Duration speechEndOffset_;
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*
* @return Whether the speechEndOffset field is set.
*/
@java.lang.Override
public boolean hasSpeechEndOffset() {
return speechEndOffset_ != null;
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*
* @return The speechEndOffset.
*/
@java.lang.Override
public com.google.protobuf.Duration getSpeechEndOffset() {
return speechEndOffset_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: speechEndOffset_;
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getSpeechEndOffsetOrBuilder() {
return getSpeechEndOffset();
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 10;
private volatile java.lang.Object languageCode_;
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @return The languageCode.
*/
@java.lang.Override
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @return The bytes for languageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (messageType_
!= com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType
.MESSAGE_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, messageType_);
}
if (!getTranscriptBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, transcript_);
}
if (isFinal_ != false) {
output.writeBool(3, isFinal_);
}
if (confidence_ != 0F) {
output.writeFloat(4, confidence_);
}
if (stability_ != 0F) {
output.writeFloat(6, stability_);
}
for (int i = 0; i < speechWordInfo_.size(); i++) {
output.writeMessage(7, speechWordInfo_.get(i));
}
if (speechEndOffset_ != null) {
output.writeMessage(8, getSpeechEndOffset());
}
if (!getLanguageCodeBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 10, languageCode_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (messageType_
!= com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType
.MESSAGE_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, messageType_);
}
if (!getTranscriptBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, transcript_);
}
if (isFinal_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, isFinal_);
}
if (confidence_ != 0F) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(4, confidence_);
}
if (stability_ != 0F) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(6, stability_);
}
for (int i = 0; i < speechWordInfo_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(7, speechWordInfo_.get(i));
}
if (speechEndOffset_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, getSpeechEndOffset());
}
if (!getLanguageCodeBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, languageCode_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult other =
(com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult) obj;
if (messageType_ != other.messageType_) return false;
if (!getTranscript().equals(other.getTranscript())) return false;
if (getIsFinal() != other.getIsFinal()) return false;
if (java.lang.Float.floatToIntBits(getConfidence())
!= java.lang.Float.floatToIntBits(other.getConfidence())) return false;
if (java.lang.Float.floatToIntBits(getStability())
!= java.lang.Float.floatToIntBits(other.getStability())) return false;
if (!getSpeechWordInfoList().equals(other.getSpeechWordInfoList())) return false;
if (hasSpeechEndOffset() != other.hasSpeechEndOffset()) return false;
if (hasSpeechEndOffset()) {
if (!getSpeechEndOffset().equals(other.getSpeechEndOffset())) return false;
}
if (!getLanguageCode().equals(other.getLanguageCode())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + MESSAGE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + messageType_;
hash = (37 * hash) + TRANSCRIPT_FIELD_NUMBER;
hash = (53 * hash) + getTranscript().hashCode();
hash = (37 * hash) + IS_FINAL_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getIsFinal());
hash = (37 * hash) + CONFIDENCE_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getConfidence());
hash = (37 * hash) + STABILITY_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getStability());
if (getSpeechWordInfoCount() > 0) {
hash = (37 * hash) + SPEECH_WORD_INFO_FIELD_NUMBER;
hash = (53 * hash) + getSpeechWordInfoList().hashCode();
}
if (hasSpeechEndOffset()) {
hash = (37 * hash) + SPEECH_END_OFFSET_FIELD_NUMBER;
hash = (53 * hash) + getSpeechEndOffset().hashCode();
}
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
*
* Contains a speech recognition result corresponding to a portion of the audio
* that is currently being processed or an indication that this is the end
* of the single requested utterance.
* Example:
* 1. transcript: "tube"
* 2. transcript: "to be a"
* 3. transcript: "to be"
* 4. transcript: "to be or not to be"
* is_final: true
* 5. transcript: " that's"
* 6. transcript: " that is"
* 7. message_type: `END_OF_SINGLE_UTTERANCE`
* 8. transcript: " that is the question"
* is_final: true
* Only two of the responses contain final results (#4 and #8 indicated by
* `is_final: true`). Concatenating these generates the full transcript: "to be
* or not to be that is the question".
* In each response we populate:
* * for `TRANSCRIPT`: `transcript` and possibly `is_final`.
* * for `END_OF_SINGLE_UTTERANCE`: only `message_type`.
*
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.StreamingRecognitionResult}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.StreamingRecognitionResult)
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3_StreamingRecognitionResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3_StreamingRecognitionResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.class,
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSpeechWordInfoFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
messageType_ = 0;
transcript_ = "";
isFinal_ = false;
confidence_ = 0F;
stability_ = 0F;
if (speechWordInfoBuilder_ == null) {
speechWordInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
speechWordInfoBuilder_.clear();
}
if (speechEndOffsetBuilder_ == null) {
speechEndOffset_ = null;
} else {
speechEndOffset_ = null;
speechEndOffsetBuilder_ = null;
}
languageCode_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3_StreamingRecognitionResult_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult build() {
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult buildPartial() {
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult result =
new com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult(this);
int from_bitField0_ = bitField0_;
result.messageType_ = messageType_;
result.transcript_ = transcript_;
result.isFinal_ = isFinal_;
result.confidence_ = confidence_;
result.stability_ = stability_;
if (speechWordInfoBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
speechWordInfo_ = java.util.Collections.unmodifiableList(speechWordInfo_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.speechWordInfo_ = speechWordInfo_;
} else {
result.speechWordInfo_ = speechWordInfoBuilder_.build();
}
if (speechEndOffsetBuilder_ == null) {
result.speechEndOffset_ = speechEndOffset_;
} else {
result.speechEndOffset_ = speechEndOffsetBuilder_.build();
}
result.languageCode_ = languageCode_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult other) {
if (other
== com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.getDefaultInstance())
return this;
if (other.messageType_ != 0) {
setMessageTypeValue(other.getMessageTypeValue());
}
if (!other.getTranscript().isEmpty()) {
transcript_ = other.transcript_;
onChanged();
}
if (other.getIsFinal() != false) {
setIsFinal(other.getIsFinal());
}
if (other.getConfidence() != 0F) {
setConfidence(other.getConfidence());
}
if (other.getStability() != 0F) {
setStability(other.getStability());
}
if (speechWordInfoBuilder_ == null) {
if (!other.speechWordInfo_.isEmpty()) {
if (speechWordInfo_.isEmpty()) {
speechWordInfo_ = other.speechWordInfo_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSpeechWordInfoIsMutable();
speechWordInfo_.addAll(other.speechWordInfo_);
}
onChanged();
}
} else {
if (!other.speechWordInfo_.isEmpty()) {
if (speechWordInfoBuilder_.isEmpty()) {
speechWordInfoBuilder_.dispose();
speechWordInfoBuilder_ = null;
speechWordInfo_ = other.speechWordInfo_;
bitField0_ = (bitField0_ & ~0x00000001);
speechWordInfoBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSpeechWordInfoFieldBuilder()
: null;
} else {
speechWordInfoBuilder_.addAllMessages(other.speechWordInfo_);
}
}
}
if (other.hasSpeechEndOffset()) {
mergeSpeechEndOffset(other.getSpeechEndOffset());
}
if (!other.getLanguageCode().isEmpty()) {
languageCode_ = other.languageCode_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int messageType_ = 0;
/**
*
*
*
* Type of the result message.
*
*
* .google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @return The enum numeric value on the wire for messageType.
*/
@java.lang.Override
public int getMessageTypeValue() {
return messageType_;
}
/**
*
*
*
* Type of the result message.
*
*
* .google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @param value The enum numeric value on the wire for messageType to set.
* @return This builder for chaining.
*/
public Builder setMessageTypeValue(int value) {
messageType_ = value;
onChanged();
return this;
}
/**
*
*
*
* Type of the result message.
*
*
* .google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @return The messageType.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType
getMessageType() {
@SuppressWarnings("deprecation")
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType result =
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType.valueOf(
messageType_);
return result == null
? com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType.UNRECOGNIZED
: result;
}
/**
*
*
*
* Type of the result message.
*
*
* .google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @param value The messageType to set.
* @return This builder for chaining.
*/
public Builder setMessageType(
com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType value) {
if (value == null) {
throw new NullPointerException();
}
messageType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
*
* Type of the result message.
*
*
* .google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @return This builder for chaining.
*/
public Builder clearMessageType() {
messageType_ = 0;
onChanged();
return this;
}
private java.lang.Object transcript_ = "";
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @return The transcript.
*/
public java.lang.String getTranscript() {
java.lang.Object ref = transcript_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
transcript_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @return The bytes for transcript.
*/
public com.google.protobuf.ByteString getTranscriptBytes() {
java.lang.Object ref = transcript_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
transcript_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @param value The transcript to set.
* @return This builder for chaining.
*/
public Builder setTranscript(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
transcript_ = value;
onChanged();
return this;
}
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @return This builder for chaining.
*/
public Builder clearTranscript() {
transcript_ = getDefaultInstance().getTranscript();
onChanged();
return this;
}
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @param value The bytes for transcript to set.
* @return This builder for chaining.
*/
public Builder setTranscriptBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
transcript_ = value;
onChanged();
return this;
}
private boolean isFinal_;
/**
*
*
*
* If `false`, the `StreamingRecognitionResult` represents an
* interim result that may change. If `true`, the recognizer will not return
* any further hypotheses about this piece of the audio. May only be populated
* for `message_type` = `TRANSCRIPT`.
*
*
* bool is_final = 3;
*
* @return The isFinal.
*/
@java.lang.Override
public boolean getIsFinal() {
return isFinal_;
}
/**
*
*
*
* If `false`, the `StreamingRecognitionResult` represents an
* interim result that may change. If `true`, the recognizer will not return
* any further hypotheses about this piece of the audio. May only be populated
* for `message_type` = `TRANSCRIPT`.
*
*
* bool is_final = 3;
*
* @param value The isFinal to set.
* @return This builder for chaining.
*/
public Builder setIsFinal(boolean value) {
isFinal_ = value;
onChanged();
return this;
}
/**
*
*
*
* If `false`, the `StreamingRecognitionResult` represents an
* interim result that may change. If `true`, the recognizer will not return
* any further hypotheses about this piece of the audio. May only be populated
* for `message_type` = `TRANSCRIPT`.
*
*
* bool is_final = 3;
*
* @return This builder for chaining.
*/
public Builder clearIsFinal() {
isFinal_ = false;
onChanged();
return this;
}
private float confidence_;
/**
*
*
*
* The Speech confidence between 0.0 and 1.0 for the current portion of audio.
* A higher number indicates an estimated greater likelihood that the
* recognized words are correct. The default of 0.0 is a sentinel value
* indicating that confidence was not set.
* This field is typically only provided if `is_final` is true and you should
* not rely on it being accurate or even set.
*
*
* float confidence = 4;
*
* @return The confidence.
*/
@java.lang.Override
public float getConfidence() {
return confidence_;
}
/**
*
*
*
* The Speech confidence between 0.0 and 1.0 for the current portion of audio.
* A higher number indicates an estimated greater likelihood that the
* recognized words are correct. The default of 0.0 is a sentinel value
* indicating that confidence was not set.
* This field is typically only provided if `is_final` is true and you should
* not rely on it being accurate or even set.
*
*
* float confidence = 4;
*
* @param value The confidence to set.
* @return This builder for chaining.
*/
public Builder setConfidence(float value) {
confidence_ = value;
onChanged();
return this;
}
/**
*
*
*
* The Speech confidence between 0.0 and 1.0 for the current portion of audio.
* A higher number indicates an estimated greater likelihood that the
* recognized words are correct. The default of 0.0 is a sentinel value
* indicating that confidence was not set.
* This field is typically only provided if `is_final` is true and you should
* not rely on it being accurate or even set.
*
*
* float confidence = 4;
*
* @return This builder for chaining.
*/
public Builder clearConfidence() {
confidence_ = 0F;
onChanged();
return this;
}
private float stability_;
/**
*
*
*
* An estimate of the likelihood that the speech recognizer will
* not change its guess about this interim recognition result:
* * If the value is unspecified or 0.0, Dialogflow didn't compute the
* stability. In particular, Dialogflow will only provide stability for
* `TRANSCRIPT` results with `is_final = false`.
* * Otherwise, the value is in (0.0, 1.0] where 0.0 means completely
* unstable and 1.0 means completely stable.
*
*
* float stability = 6;
*
* @return The stability.
*/
@java.lang.Override
public float getStability() {
return stability_;
}
/**
*
*
*
* An estimate of the likelihood that the speech recognizer will
* not change its guess about this interim recognition result:
* * If the value is unspecified or 0.0, Dialogflow didn't compute the
* stability. In particular, Dialogflow will only provide stability for
* `TRANSCRIPT` results with `is_final = false`.
* * Otherwise, the value is in (0.0, 1.0] where 0.0 means completely
* unstable and 1.0 means completely stable.
*
*
* float stability = 6;
*
* @param value The stability to set.
* @return This builder for chaining.
*/
public Builder setStability(float value) {
stability_ = value;
onChanged();
return this;
}
/**
*
*
*
* An estimate of the likelihood that the speech recognizer will
* not change its guess about this interim recognition result:
* * If the value is unspecified or 0.0, Dialogflow didn't compute the
* stability. In particular, Dialogflow will only provide stability for
* `TRANSCRIPT` results with `is_final = false`.
* * Otherwise, the value is in (0.0, 1.0] where 0.0 means completely
* unstable and 1.0 means completely stable.
*
*
* float stability = 6;
*
* @return This builder for chaining.
*/
public Builder clearStability() {
stability_ = 0F;
onChanged();
return this;
}
private java.util.List speechWordInfo_ =
java.util.Collections.emptyList();
private void ensureSpeechWordInfoIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
speechWordInfo_ =
new java.util.ArrayList(
speechWordInfo_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.SpeechWordInfo,
com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder,
com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder>
speechWordInfoBuilder_;
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public java.util.List
getSpeechWordInfoList() {
if (speechWordInfoBuilder_ == null) {
return java.util.Collections.unmodifiableList(speechWordInfo_);
} else {
return speechWordInfoBuilder_.getMessageList();
}
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public int getSpeechWordInfoCount() {
if (speechWordInfoBuilder_ == null) {
return speechWordInfo_.size();
} else {
return speechWordInfoBuilder_.getCount();
}
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public com.google.cloud.dialogflow.cx.v3.SpeechWordInfo getSpeechWordInfo(int index) {
if (speechWordInfoBuilder_ == null) {
return speechWordInfo_.get(index);
} else {
return speechWordInfoBuilder_.getMessage(index);
}
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public Builder setSpeechWordInfo(
int index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo value) {
if (speechWordInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpeechWordInfoIsMutable();
speechWordInfo_.set(index, value);
onChanged();
} else {
speechWordInfoBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public Builder setSpeechWordInfo(
int index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder builderForValue) {
if (speechWordInfoBuilder_ == null) {
ensureSpeechWordInfoIsMutable();
speechWordInfo_.set(index, builderForValue.build());
onChanged();
} else {
speechWordInfoBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public Builder addSpeechWordInfo(com.google.cloud.dialogflow.cx.v3.SpeechWordInfo value) {
if (speechWordInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpeechWordInfoIsMutable();
speechWordInfo_.add(value);
onChanged();
} else {
speechWordInfoBuilder_.addMessage(value);
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public Builder addSpeechWordInfo(
int index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo value) {
if (speechWordInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpeechWordInfoIsMutable();
speechWordInfo_.add(index, value);
onChanged();
} else {
speechWordInfoBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public Builder addSpeechWordInfo(
com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder builderForValue) {
if (speechWordInfoBuilder_ == null) {
ensureSpeechWordInfoIsMutable();
speechWordInfo_.add(builderForValue.build());
onChanged();
} else {
speechWordInfoBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public Builder addSpeechWordInfo(
int index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder builderForValue) {
if (speechWordInfoBuilder_ == null) {
ensureSpeechWordInfoIsMutable();
speechWordInfo_.add(index, builderForValue.build());
onChanged();
} else {
speechWordInfoBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public Builder addAllSpeechWordInfo(
java.lang.Iterable extends com.google.cloud.dialogflow.cx.v3.SpeechWordInfo> values) {
if (speechWordInfoBuilder_ == null) {
ensureSpeechWordInfoIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, speechWordInfo_);
onChanged();
} else {
speechWordInfoBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public Builder clearSpeechWordInfo() {
if (speechWordInfoBuilder_ == null) {
speechWordInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
speechWordInfoBuilder_.clear();
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public Builder removeSpeechWordInfo(int index) {
if (speechWordInfoBuilder_ == null) {
ensureSpeechWordInfoIsMutable();
speechWordInfo_.remove(index);
onChanged();
} else {
speechWordInfoBuilder_.remove(index);
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder getSpeechWordInfoBuilder(
int index) {
return getSpeechWordInfoFieldBuilder().getBuilder(index);
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder getSpeechWordInfoOrBuilder(
int index) {
if (speechWordInfoBuilder_ == null) {
return speechWordInfo_.get(index);
} else {
return speechWordInfoBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public java.util.List extends com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder>
getSpeechWordInfoOrBuilderList() {
if (speechWordInfoBuilder_ != null) {
return speechWordInfoBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(speechWordInfo_);
}
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder addSpeechWordInfoBuilder() {
return getSpeechWordInfoFieldBuilder()
.addBuilder(com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.getDefaultInstance());
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder addSpeechWordInfoBuilder(
int index) {
return getSpeechWordInfoFieldBuilder()
.addBuilder(index, com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.getDefaultInstance());
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3.SpeechWordInfo speech_word_info = 7;
*/
public java.util.List
getSpeechWordInfoBuilderList() {
return getSpeechWordInfoFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.SpeechWordInfo,
com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder,
com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder>
getSpeechWordInfoFieldBuilder() {
if (speechWordInfoBuilder_ == null) {
speechWordInfoBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.SpeechWordInfo,
com.google.cloud.dialogflow.cx.v3.SpeechWordInfo.Builder,
com.google.cloud.dialogflow.cx.v3.SpeechWordInfoOrBuilder>(
speechWordInfo_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
speechWordInfo_ = null;
}
return speechWordInfoBuilder_;
}
private com.google.protobuf.Duration speechEndOffset_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
speechEndOffsetBuilder_;
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*
* @return Whether the speechEndOffset field is set.
*/
public boolean hasSpeechEndOffset() {
return speechEndOffsetBuilder_ != null || speechEndOffset_ != null;
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*
* @return The speechEndOffset.
*/
public com.google.protobuf.Duration getSpeechEndOffset() {
if (speechEndOffsetBuilder_ == null) {
return speechEndOffset_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: speechEndOffset_;
} else {
return speechEndOffsetBuilder_.getMessage();
}
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
public Builder setSpeechEndOffset(com.google.protobuf.Duration value) {
if (speechEndOffsetBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
speechEndOffset_ = value;
onChanged();
} else {
speechEndOffsetBuilder_.setMessage(value);
}
return this;
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
public Builder setSpeechEndOffset(com.google.protobuf.Duration.Builder builderForValue) {
if (speechEndOffsetBuilder_ == null) {
speechEndOffset_ = builderForValue.build();
onChanged();
} else {
speechEndOffsetBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
public Builder mergeSpeechEndOffset(com.google.protobuf.Duration value) {
if (speechEndOffsetBuilder_ == null) {
if (speechEndOffset_ != null) {
speechEndOffset_ =
com.google.protobuf.Duration.newBuilder(speechEndOffset_)
.mergeFrom(value)
.buildPartial();
} else {
speechEndOffset_ = value;
}
onChanged();
} else {
speechEndOffsetBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
public Builder clearSpeechEndOffset() {
if (speechEndOffsetBuilder_ == null) {
speechEndOffset_ = null;
onChanged();
} else {
speechEndOffset_ = null;
speechEndOffsetBuilder_ = null;
}
return this;
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
public com.google.protobuf.Duration.Builder getSpeechEndOffsetBuilder() {
onChanged();
return getSpeechEndOffsetFieldBuilder().getBuilder();
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
public com.google.protobuf.DurationOrBuilder getSpeechEndOffsetOrBuilder() {
if (speechEndOffsetBuilder_ != null) {
return speechEndOffsetBuilder_.getMessageOrBuilder();
} else {
return speechEndOffset_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: speechEndOffset_;
}
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getSpeechEndOffsetFieldBuilder() {
if (speechEndOffsetBuilder_ == null) {
speechEndOffsetBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getSpeechEndOffset(), getParentForChildren(), isClean());
speechEndOffset_ = null;
}
return speechEndOffsetBuilder_;
}
private java.lang.Object languageCode_ = "";
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @return The languageCode.
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @return The bytes for languageCode.
*/
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @param value The languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
languageCode_ = value;
onChanged();
return this;
}
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @return This builder for chaining.
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
onChanged();
return this;
}
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @param value The bytes for languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
languageCode_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.StreamingRecognitionResult)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.StreamingRecognitionResult)
private static final com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult();
}
public static com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
@java.lang.Override
public StreamingRecognitionResult parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new StreamingRecognitionResult(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.StreamingRecognitionResult getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy