com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of proto-google-cloud-dialogflow-cx-v3beta1 Show documentation
Show all versions of proto-google-cloud-dialogflow-cx-v3beta1 Show documentation
PROTO library for proto-google-cloud-dialogflow-cx-v3beta1
The newest version!
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/session.proto
// Protobuf Java Version: 3.25.5
package com.google.cloud.dialogflow.cx.v3beta1;
/**
*
*
*
* Contains a speech recognition result corresponding to a portion of the audio
* that is currently being processed or an indication that this is the end
* of the single requested utterance.
*
* While end-user audio is being processed, Dialogflow sends a series of
* results. Each result may contain a `transcript` value. A transcript
* represents a portion of the utterance. While the recognizer is processing
* audio, transcript values may be interim values or finalized values.
* Once a transcript is finalized, the `is_final` value is set to true and
* processing continues for the next transcript.
*
* If `StreamingDetectIntentRequest.query_input.audio.config.single_utterance`
* was true, and the recognizer has completed processing audio,
* the `message_type` value is set to `END_OF_SINGLE_UTTERANCE and the
* following (last) result contains the last finalized transcript.
*
* The complete end-user utterance is determined by concatenating the
* finalized transcript values received for the series of results.
*
* In the following example, single utterance is enabled. In the case where
* single utterance is not enabled, result 7 would not occur.
*
* ```
* Num | transcript | message_type | is_final
* --- | ----------------------- | ----------------------- | --------
* 1 | "tube" | TRANSCRIPT | false
* 2 | "to be a" | TRANSCRIPT | false
* 3 | "to be" | TRANSCRIPT | false
* 4 | "to be or not to be" | TRANSCRIPT | true
* 5 | "that's" | TRANSCRIPT | false
* 6 | "that is | TRANSCRIPT | false
* 7 | unset | END_OF_SINGLE_UTTERANCE | unset
* 8 | " that is the question" | TRANSCRIPT | true
* ```
*
* Concatenating the finalized transcripts with `is_final` set to true,
* the complete utterance becomes "to be or not to be that is the question".
*
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult}
*/
public final class StreamingRecognitionResult extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult)
StreamingRecognitionResultOrBuilder {
private static final long serialVersionUID = 0L;
// Use StreamingRecognitionResult.newBuilder() to construct.
private StreamingRecognitionResult(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private StreamingRecognitionResult() {
messageType_ = 0;
transcript_ = "";
speechWordInfo_ = java.util.Collections.emptyList();
languageCode_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new StreamingRecognitionResult();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_StreamingRecognitionResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_StreamingRecognitionResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.class,
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.Builder.class);
}
/**
*
*
*
* Type of the response message.
*
*
* Protobuf enum {@code google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType}
*/
public enum MessageType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
*
* Not specified. Should never be used.
*
*
* MESSAGE_TYPE_UNSPECIFIED = 0;
*/
MESSAGE_TYPE_UNSPECIFIED(0),
/**
*
*
*
* Message contains a (possibly partial) transcript.
*
*
* TRANSCRIPT = 1;
*/
TRANSCRIPT(1),
/**
*
*
*
* This event indicates that the server has detected the end of the user's
* speech utterance and expects no additional speech. Therefore, the server
* will not process additional audio (although it may subsequently return
* additional results). The client should stop sending additional audio
* data, half-close the gRPC connection, and wait for any additional results
* until the server closes the gRPC connection. This message is only sent if
* [`single_utterance`][google.cloud.dialogflow.cx.v3beta1.InputAudioConfig.single_utterance]
* was set to `true`, and is not used otherwise.
*
*
* END_OF_SINGLE_UTTERANCE = 2;
*/
END_OF_SINGLE_UTTERANCE(2),
UNRECOGNIZED(-1),
;
/**
*
*
*
* Not specified. Should never be used.
*
*
* MESSAGE_TYPE_UNSPECIFIED = 0;
*/
public static final int MESSAGE_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
*
* Message contains a (possibly partial) transcript.
*
*
* TRANSCRIPT = 1;
*/
public static final int TRANSCRIPT_VALUE = 1;
/**
*
*
*
* This event indicates that the server has detected the end of the user's
* speech utterance and expects no additional speech. Therefore, the server
* will not process additional audio (although it may subsequently return
* additional results). The client should stop sending additional audio
* data, half-close the gRPC connection, and wait for any additional results
* until the server closes the gRPC connection. This message is only sent if
* [`single_utterance`][google.cloud.dialogflow.cx.v3beta1.InputAudioConfig.single_utterance]
* was set to `true`, and is not used otherwise.
*
*
* END_OF_SINGLE_UTTERANCE = 2;
*/
public static final int END_OF_SINGLE_UTTERANCE_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static MessageType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static MessageType forNumber(int value) {
switch (value) {
case 0:
return MESSAGE_TYPE_UNSPECIFIED;
case 1:
return TRANSCRIPT;
case 2:
return END_OF_SINGLE_UTTERANCE;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public MessageType findValueByNumber(int number) {
return MessageType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final MessageType[] VALUES = values();
public static MessageType valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private MessageType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType)
}
private int bitField0_;
public static final int MESSAGE_TYPE_FIELD_NUMBER = 1;
private int messageType_ = 0;
/**
*
*
*
* Type of the result message.
*
*
*
* .google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @return The enum numeric value on the wire for messageType.
*/
@java.lang.Override
public int getMessageTypeValue() {
return messageType_;
}
/**
*
*
*
* Type of the result message.
*
*
*
* .google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @return The messageType.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType
getMessageType() {
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType result =
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType.forNumber(
messageType_);
return result == null
? com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType.UNRECOGNIZED
: result;
}
public static final int TRANSCRIPT_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object transcript_ = "";
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @return The transcript.
*/
@java.lang.Override
public java.lang.String getTranscript() {
java.lang.Object ref = transcript_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
transcript_ = s;
return s;
}
}
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @return The bytes for transcript.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTranscriptBytes() {
java.lang.Object ref = transcript_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
transcript_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int IS_FINAL_FIELD_NUMBER = 3;
private boolean isFinal_ = false;
/**
*
*
*
* If `false`, the `StreamingRecognitionResult` represents an
* interim result that may change. If `true`, the recognizer will not return
* any further hypotheses about this piece of the audio. May only be populated
* for `message_type` = `TRANSCRIPT`.
*
*
* bool is_final = 3;
*
* @return The isFinal.
*/
@java.lang.Override
public boolean getIsFinal() {
return isFinal_;
}
public static final int CONFIDENCE_FIELD_NUMBER = 4;
private float confidence_ = 0F;
/**
*
*
*
* The Speech confidence between 0.0 and 1.0 for the current portion of audio.
* A higher number indicates an estimated greater likelihood that the
* recognized words are correct. The default of 0.0 is a sentinel value
* indicating that confidence was not set.
*
* This field is typically only provided if `is_final` is true and you should
* not rely on it being accurate or even set.
*
*
* float confidence = 4;
*
* @return The confidence.
*/
@java.lang.Override
public float getConfidence() {
return confidence_;
}
public static final int STABILITY_FIELD_NUMBER = 6;
private float stability_ = 0F;
/**
*
*
*
* An estimate of the likelihood that the speech recognizer will
* not change its guess about this interim recognition result:
* * If the value is unspecified or 0.0, Dialogflow didn't compute the
* stability. In particular, Dialogflow will only provide stability for
* `TRANSCRIPT` results with `is_final = false`.
* * Otherwise, the value is in (0.0, 1.0] where 0.0 means completely
* unstable and 1.0 means completely stable.
*
*
* float stability = 6;
*
* @return The stability.
*/
@java.lang.Override
public float getStability() {
return stability_;
}
public static final int SPEECH_WORD_INFO_FIELD_NUMBER = 7;
@SuppressWarnings("serial")
private java.util.List speechWordInfo_;
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*/
@java.lang.Override
public java.util.List
getSpeechWordInfoList() {
return speechWordInfo_;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*/
@java.lang.Override
public java.util.List extends com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfoOrBuilder>
getSpeechWordInfoOrBuilderList() {
return speechWordInfo_;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*/
@java.lang.Override
public int getSpeechWordInfoCount() {
return speechWordInfo_.size();
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo getSpeechWordInfo(int index) {
return speechWordInfo_.get(index);
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfoOrBuilder getSpeechWordInfoOrBuilder(
int index) {
return speechWordInfo_.get(index);
}
public static final int SPEECH_END_OFFSET_FIELD_NUMBER = 8;
private com.google.protobuf.Duration speechEndOffset_;
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*
* @return Whether the speechEndOffset field is set.
*/
@java.lang.Override
public boolean hasSpeechEndOffset() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*
* @return The speechEndOffset.
*/
@java.lang.Override
public com.google.protobuf.Duration getSpeechEndOffset() {
return speechEndOffset_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: speechEndOffset_;
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getSpeechEndOffsetOrBuilder() {
return speechEndOffset_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: speechEndOffset_;
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 10;
@SuppressWarnings("serial")
private volatile java.lang.Object languageCode_ = "";
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @return The languageCode.
*/
@java.lang.Override
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @return The bytes for languageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (messageType_
!= com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType
.MESSAGE_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, messageType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(transcript_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, transcript_);
}
if (isFinal_ != false) {
output.writeBool(3, isFinal_);
}
if (java.lang.Float.floatToRawIntBits(confidence_) != 0) {
output.writeFloat(4, confidence_);
}
if (java.lang.Float.floatToRawIntBits(stability_) != 0) {
output.writeFloat(6, stability_);
}
for (int i = 0; i < speechWordInfo_.size(); i++) {
output.writeMessage(7, speechWordInfo_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(8, getSpeechEndOffset());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 10, languageCode_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (messageType_
!= com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType
.MESSAGE_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, messageType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(transcript_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, transcript_);
}
if (isFinal_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, isFinal_);
}
if (java.lang.Float.floatToRawIntBits(confidence_) != 0) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(4, confidence_);
}
if (java.lang.Float.floatToRawIntBits(stability_) != 0) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(6, stability_);
}
for (int i = 0; i < speechWordInfo_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(7, speechWordInfo_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, getSpeechEndOffset());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, languageCode_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult other =
(com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult) obj;
if (messageType_ != other.messageType_) return false;
if (!getTranscript().equals(other.getTranscript())) return false;
if (getIsFinal() != other.getIsFinal()) return false;
if (java.lang.Float.floatToIntBits(getConfidence())
!= java.lang.Float.floatToIntBits(other.getConfidence())) return false;
if (java.lang.Float.floatToIntBits(getStability())
!= java.lang.Float.floatToIntBits(other.getStability())) return false;
if (!getSpeechWordInfoList().equals(other.getSpeechWordInfoList())) return false;
if (hasSpeechEndOffset() != other.hasSpeechEndOffset()) return false;
if (hasSpeechEndOffset()) {
if (!getSpeechEndOffset().equals(other.getSpeechEndOffset())) return false;
}
if (!getLanguageCode().equals(other.getLanguageCode())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + MESSAGE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + messageType_;
hash = (37 * hash) + TRANSCRIPT_FIELD_NUMBER;
hash = (53 * hash) + getTranscript().hashCode();
hash = (37 * hash) + IS_FINAL_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getIsFinal());
hash = (37 * hash) + CONFIDENCE_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getConfidence());
hash = (37 * hash) + STABILITY_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getStability());
if (getSpeechWordInfoCount() > 0) {
hash = (37 * hash) + SPEECH_WORD_INFO_FIELD_NUMBER;
hash = (53 * hash) + getSpeechWordInfoList().hashCode();
}
if (hasSpeechEndOffset()) {
hash = (37 * hash) + SPEECH_END_OFFSET_FIELD_NUMBER;
hash = (53 * hash) + getSpeechEndOffset().hashCode();
}
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
*
* Contains a speech recognition result corresponding to a portion of the audio
* that is currently being processed or an indication that this is the end
* of the single requested utterance.
*
* While end-user audio is being processed, Dialogflow sends a series of
* results. Each result may contain a `transcript` value. A transcript
* represents a portion of the utterance. While the recognizer is processing
* audio, transcript values may be interim values or finalized values.
* Once a transcript is finalized, the `is_final` value is set to true and
* processing continues for the next transcript.
*
* If `StreamingDetectIntentRequest.query_input.audio.config.single_utterance`
* was true, and the recognizer has completed processing audio,
* the `message_type` value is set to `END_OF_SINGLE_UTTERANCE and the
* following (last) result contains the last finalized transcript.
*
* The complete end-user utterance is determined by concatenating the
* finalized transcript values received for the series of results.
*
* In the following example, single utterance is enabled. In the case where
* single utterance is not enabled, result 7 would not occur.
*
* ```
* Num | transcript | message_type | is_final
* --- | ----------------------- | ----------------------- | --------
* 1 | "tube" | TRANSCRIPT | false
* 2 | "to be a" | TRANSCRIPT | false
* 3 | "to be" | TRANSCRIPT | false
* 4 | "to be or not to be" | TRANSCRIPT | true
* 5 | "that's" | TRANSCRIPT | false
* 6 | "that is | TRANSCRIPT | false
* 7 | unset | END_OF_SINGLE_UTTERANCE | unset
* 8 | " that is the question" | TRANSCRIPT | true
* ```
*
* Concatenating the finalized transcripts with `is_final` set to true,
* the complete utterance becomes "to be or not to be that is the question".
*
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult)
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_StreamingRecognitionResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_StreamingRecognitionResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.class,
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.Builder.class);
}
// Construct using
// com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSpeechWordInfoFieldBuilder();
getSpeechEndOffsetFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
messageType_ = 0;
transcript_ = "";
isFinal_ = false;
confidence_ = 0F;
stability_ = 0F;
if (speechWordInfoBuilder_ == null) {
speechWordInfo_ = java.util.Collections.emptyList();
} else {
speechWordInfo_ = null;
speechWordInfoBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
speechEndOffset_ = null;
if (speechEndOffsetBuilder_ != null) {
speechEndOffsetBuilder_.dispose();
speechEndOffsetBuilder_ = null;
}
languageCode_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3beta1.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_StreamingRecognitionResult_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult build() {
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult buildPartial() {
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult result =
new com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult result) {
if (speechWordInfoBuilder_ == null) {
if (((bitField0_ & 0x00000020) != 0)) {
speechWordInfo_ = java.util.Collections.unmodifiableList(speechWordInfo_);
bitField0_ = (bitField0_ & ~0x00000020);
}
result.speechWordInfo_ = speechWordInfo_;
} else {
result.speechWordInfo_ = speechWordInfoBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.messageType_ = messageType_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.transcript_ = transcript_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.isFinal_ = isFinal_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.confidence_ = confidence_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.stability_ = stability_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000040) != 0)) {
result.speechEndOffset_ =
speechEndOffsetBuilder_ == null ? speechEndOffset_ : speechEndOffsetBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.languageCode_ = languageCode_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult other) {
if (other
== com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.getDefaultInstance())
return this;
if (other.messageType_ != 0) {
setMessageTypeValue(other.getMessageTypeValue());
}
if (!other.getTranscript().isEmpty()) {
transcript_ = other.transcript_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getIsFinal() != false) {
setIsFinal(other.getIsFinal());
}
if (other.getConfidence() != 0F) {
setConfidence(other.getConfidence());
}
if (other.getStability() != 0F) {
setStability(other.getStability());
}
if (speechWordInfoBuilder_ == null) {
if (!other.speechWordInfo_.isEmpty()) {
if (speechWordInfo_.isEmpty()) {
speechWordInfo_ = other.speechWordInfo_;
bitField0_ = (bitField0_ & ~0x00000020);
} else {
ensureSpeechWordInfoIsMutable();
speechWordInfo_.addAll(other.speechWordInfo_);
}
onChanged();
}
} else {
if (!other.speechWordInfo_.isEmpty()) {
if (speechWordInfoBuilder_.isEmpty()) {
speechWordInfoBuilder_.dispose();
speechWordInfoBuilder_ = null;
speechWordInfo_ = other.speechWordInfo_;
bitField0_ = (bitField0_ & ~0x00000020);
speechWordInfoBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSpeechWordInfoFieldBuilder()
: null;
} else {
speechWordInfoBuilder_.addAllMessages(other.speechWordInfo_);
}
}
}
if (other.hasSpeechEndOffset()) {
mergeSpeechEndOffset(other.getSpeechEndOffset());
}
if (!other.getLanguageCode().isEmpty()) {
languageCode_ = other.languageCode_;
bitField0_ |= 0x00000080;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
messageType_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
transcript_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
isFinal_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
case 37:
{
confidence_ = input.readFloat();
bitField0_ |= 0x00000008;
break;
} // case 37
case 53:
{
stability_ = input.readFloat();
bitField0_ |= 0x00000010;
break;
} // case 53
case 58:
{
com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo m =
input.readMessage(
com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo.parser(),
extensionRegistry);
if (speechWordInfoBuilder_ == null) {
ensureSpeechWordInfoIsMutable();
speechWordInfo_.add(m);
} else {
speechWordInfoBuilder_.addMessage(m);
}
break;
} // case 58
case 66:
{
input.readMessage(getSpeechEndOffsetFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000040;
break;
} // case 66
case 82:
{
languageCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000080;
break;
} // case 82
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int messageType_ = 0;
/**
*
*
*
* Type of the result message.
*
*
*
* .google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @return The enum numeric value on the wire for messageType.
*/
@java.lang.Override
public int getMessageTypeValue() {
return messageType_;
}
/**
*
*
*
* Type of the result message.
*
*
*
* .google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @param value The enum numeric value on the wire for messageType to set.
* @return This builder for chaining.
*/
public Builder setMessageTypeValue(int value) {
messageType_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
*
* Type of the result message.
*
*
*
* .google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @return The messageType.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType
getMessageType() {
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType result =
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType.forNumber(
messageType_);
return result == null
? com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType
.UNRECOGNIZED
: result;
}
/**
*
*
*
* Type of the result message.
*
*
*
* .google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @param value The messageType to set.
* @return This builder for chaining.
*/
public Builder setMessageType(
com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
messageType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
*
* Type of the result message.
*
*
*
* .google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.MessageType message_type = 1;
*
*
* @return This builder for chaining.
*/
public Builder clearMessageType() {
bitField0_ = (bitField0_ & ~0x00000001);
messageType_ = 0;
onChanged();
return this;
}
private java.lang.Object transcript_ = "";
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @return The transcript.
*/
public java.lang.String getTranscript() {
java.lang.Object ref = transcript_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
transcript_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @return The bytes for transcript.
*/
public com.google.protobuf.ByteString getTranscriptBytes() {
java.lang.Object ref = transcript_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
transcript_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @param value The transcript to set.
* @return This builder for chaining.
*/
public Builder setTranscript(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
transcript_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @return This builder for chaining.
*/
public Builder clearTranscript() {
transcript_ = getDefaultInstance().getTranscript();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
*
* Transcript text representing the words that the user spoke.
* Populated if and only if `message_type` = `TRANSCRIPT`.
*
*
* string transcript = 2;
*
* @param value The bytes for transcript to set.
* @return This builder for chaining.
*/
public Builder setTranscriptBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
transcript_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private boolean isFinal_;
/**
*
*
*
* If `false`, the `StreamingRecognitionResult` represents an
* interim result that may change. If `true`, the recognizer will not return
* any further hypotheses about this piece of the audio. May only be populated
* for `message_type` = `TRANSCRIPT`.
*
*
* bool is_final = 3;
*
* @return The isFinal.
*/
@java.lang.Override
public boolean getIsFinal() {
return isFinal_;
}
/**
*
*
*
* If `false`, the `StreamingRecognitionResult` represents an
* interim result that may change. If `true`, the recognizer will not return
* any further hypotheses about this piece of the audio. May only be populated
* for `message_type` = `TRANSCRIPT`.
*
*
* bool is_final = 3;
*
* @param value The isFinal to set.
* @return This builder for chaining.
*/
public Builder setIsFinal(boolean value) {
isFinal_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
*
* If `false`, the `StreamingRecognitionResult` represents an
* interim result that may change. If `true`, the recognizer will not return
* any further hypotheses about this piece of the audio. May only be populated
* for `message_type` = `TRANSCRIPT`.
*
*
* bool is_final = 3;
*
* @return This builder for chaining.
*/
public Builder clearIsFinal() {
bitField0_ = (bitField0_ & ~0x00000004);
isFinal_ = false;
onChanged();
return this;
}
private float confidence_;
/**
*
*
*
* The Speech confidence between 0.0 and 1.0 for the current portion of audio.
* A higher number indicates an estimated greater likelihood that the
* recognized words are correct. The default of 0.0 is a sentinel value
* indicating that confidence was not set.
*
* This field is typically only provided if `is_final` is true and you should
* not rely on it being accurate or even set.
*
*
* float confidence = 4;
*
* @return The confidence.
*/
@java.lang.Override
public float getConfidence() {
return confidence_;
}
/**
*
*
*
* The Speech confidence between 0.0 and 1.0 for the current portion of audio.
* A higher number indicates an estimated greater likelihood that the
* recognized words are correct. The default of 0.0 is a sentinel value
* indicating that confidence was not set.
*
* This field is typically only provided if `is_final` is true and you should
* not rely on it being accurate or even set.
*
*
* float confidence = 4;
*
* @param value The confidence to set.
* @return This builder for chaining.
*/
public Builder setConfidence(float value) {
confidence_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
*
* The Speech confidence between 0.0 and 1.0 for the current portion of audio.
* A higher number indicates an estimated greater likelihood that the
* recognized words are correct. The default of 0.0 is a sentinel value
* indicating that confidence was not set.
*
* This field is typically only provided if `is_final` is true and you should
* not rely on it being accurate or even set.
*
*
* float confidence = 4;
*
* @return This builder for chaining.
*/
public Builder clearConfidence() {
bitField0_ = (bitField0_ & ~0x00000008);
confidence_ = 0F;
onChanged();
return this;
}
private float stability_;
/**
*
*
*
* An estimate of the likelihood that the speech recognizer will
* not change its guess about this interim recognition result:
* * If the value is unspecified or 0.0, Dialogflow didn't compute the
* stability. In particular, Dialogflow will only provide stability for
* `TRANSCRIPT` results with `is_final = false`.
* * Otherwise, the value is in (0.0, 1.0] where 0.0 means completely
* unstable and 1.0 means completely stable.
*
*
* float stability = 6;
*
* @return The stability.
*/
@java.lang.Override
public float getStability() {
return stability_;
}
/**
*
*
*
* An estimate of the likelihood that the speech recognizer will
* not change its guess about this interim recognition result:
* * If the value is unspecified or 0.0, Dialogflow didn't compute the
* stability. In particular, Dialogflow will only provide stability for
* `TRANSCRIPT` results with `is_final = false`.
* * Otherwise, the value is in (0.0, 1.0] where 0.0 means completely
* unstable and 1.0 means completely stable.
*
*
* float stability = 6;
*
* @param value The stability to set.
* @return This builder for chaining.
*/
public Builder setStability(float value) {
stability_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
*
* An estimate of the likelihood that the speech recognizer will
* not change its guess about this interim recognition result:
* * If the value is unspecified or 0.0, Dialogflow didn't compute the
* stability. In particular, Dialogflow will only provide stability for
* `TRANSCRIPT` results with `is_final = false`.
* * Otherwise, the value is in (0.0, 1.0] where 0.0 means completely
* unstable and 1.0 means completely stable.
*
*
* float stability = 6;
*
* @return This builder for chaining.
*/
public Builder clearStability() {
bitField0_ = (bitField0_ & ~0x00000010);
stability_ = 0F;
onChanged();
return this;
}
private java.util.List speechWordInfo_ =
java.util.Collections.emptyList();
private void ensureSpeechWordInfoIsMutable() {
if (!((bitField0_ & 0x00000020) != 0)) {
speechWordInfo_ =
new java.util.ArrayList(
speechWordInfo_);
bitField0_ |= 0x00000020;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo,
com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo.Builder,
com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfoOrBuilder>
speechWordInfoBuilder_;
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public java.util.List
getSpeechWordInfoList() {
if (speechWordInfoBuilder_ == null) {
return java.util.Collections.unmodifiableList(speechWordInfo_);
} else {
return speechWordInfoBuilder_.getMessageList();
}
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public int getSpeechWordInfoCount() {
if (speechWordInfoBuilder_ == null) {
return speechWordInfo_.size();
} else {
return speechWordInfoBuilder_.getCount();
}
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo getSpeechWordInfo(int index) {
if (speechWordInfoBuilder_ == null) {
return speechWordInfo_.get(index);
} else {
return speechWordInfoBuilder_.getMessage(index);
}
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public Builder setSpeechWordInfo(
int index, com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo value) {
if (speechWordInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpeechWordInfoIsMutable();
speechWordInfo_.set(index, value);
onChanged();
} else {
speechWordInfoBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public Builder setSpeechWordInfo(
int index, com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo.Builder builderForValue) {
if (speechWordInfoBuilder_ == null) {
ensureSpeechWordInfoIsMutable();
speechWordInfo_.set(index, builderForValue.build());
onChanged();
} else {
speechWordInfoBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public Builder addSpeechWordInfo(com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo value) {
if (speechWordInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpeechWordInfoIsMutable();
speechWordInfo_.add(value);
onChanged();
} else {
speechWordInfoBuilder_.addMessage(value);
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public Builder addSpeechWordInfo(
int index, com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo value) {
if (speechWordInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpeechWordInfoIsMutable();
speechWordInfo_.add(index, value);
onChanged();
} else {
speechWordInfoBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public Builder addSpeechWordInfo(
com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo.Builder builderForValue) {
if (speechWordInfoBuilder_ == null) {
ensureSpeechWordInfoIsMutable();
speechWordInfo_.add(builderForValue.build());
onChanged();
} else {
speechWordInfoBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public Builder addSpeechWordInfo(
int index, com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo.Builder builderForValue) {
if (speechWordInfoBuilder_ == null) {
ensureSpeechWordInfoIsMutable();
speechWordInfo_.add(index, builderForValue.build());
onChanged();
} else {
speechWordInfoBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public Builder addAllSpeechWordInfo(
java.lang.Iterable extends com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo>
values) {
if (speechWordInfoBuilder_ == null) {
ensureSpeechWordInfoIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, speechWordInfo_);
onChanged();
} else {
speechWordInfoBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public Builder clearSpeechWordInfo() {
if (speechWordInfoBuilder_ == null) {
speechWordInfo_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
onChanged();
} else {
speechWordInfoBuilder_.clear();
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public Builder removeSpeechWordInfo(int index) {
if (speechWordInfoBuilder_ == null) {
ensureSpeechWordInfoIsMutable();
speechWordInfo_.remove(index);
onChanged();
} else {
speechWordInfoBuilder_.remove(index);
}
return this;
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo.Builder getSpeechWordInfoBuilder(
int index) {
return getSpeechWordInfoFieldBuilder().getBuilder(index);
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfoOrBuilder
getSpeechWordInfoOrBuilder(int index) {
if (speechWordInfoBuilder_ == null) {
return speechWordInfo_.get(index);
} else {
return speechWordInfoBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public java.util.List extends com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfoOrBuilder>
getSpeechWordInfoOrBuilderList() {
if (speechWordInfoBuilder_ != null) {
return speechWordInfoBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(speechWordInfo_);
}
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo.Builder
addSpeechWordInfoBuilder() {
return getSpeechWordInfoFieldBuilder()
.addBuilder(com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo.getDefaultInstance());
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo.Builder addSpeechWordInfoBuilder(
int index) {
return getSpeechWordInfoFieldBuilder()
.addBuilder(
index, com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo.getDefaultInstance());
}
/**
*
*
*
* Word-specific information for the words recognized by Speech in
* [transcript][google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult.transcript].
* Populated if and only if `message_type` = `TRANSCRIPT` and
* [InputAudioConfig.enable_word_info] is set.
*
*
* repeated .google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo speech_word_info = 7;
*
*/
public java.util.List
getSpeechWordInfoBuilderList() {
return getSpeechWordInfoFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo,
com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo.Builder,
com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfoOrBuilder>
getSpeechWordInfoFieldBuilder() {
if (speechWordInfoBuilder_ == null) {
speechWordInfoBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo,
com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfo.Builder,
com.google.cloud.dialogflow.cx.v3beta1.SpeechWordInfoOrBuilder>(
speechWordInfo_,
((bitField0_ & 0x00000020) != 0),
getParentForChildren(),
isClean());
speechWordInfo_ = null;
}
return speechWordInfoBuilder_;
}
private com.google.protobuf.Duration speechEndOffset_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
speechEndOffsetBuilder_;
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*
* @return Whether the speechEndOffset field is set.
*/
public boolean hasSpeechEndOffset() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*
* @return The speechEndOffset.
*/
public com.google.protobuf.Duration getSpeechEndOffset() {
if (speechEndOffsetBuilder_ == null) {
return speechEndOffset_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: speechEndOffset_;
} else {
return speechEndOffsetBuilder_.getMessage();
}
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
public Builder setSpeechEndOffset(com.google.protobuf.Duration value) {
if (speechEndOffsetBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
speechEndOffset_ = value;
} else {
speechEndOffsetBuilder_.setMessage(value);
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
public Builder setSpeechEndOffset(com.google.protobuf.Duration.Builder builderForValue) {
if (speechEndOffsetBuilder_ == null) {
speechEndOffset_ = builderForValue.build();
} else {
speechEndOffsetBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
public Builder mergeSpeechEndOffset(com.google.protobuf.Duration value) {
if (speechEndOffsetBuilder_ == null) {
if (((bitField0_ & 0x00000040) != 0)
&& speechEndOffset_ != null
&& speechEndOffset_ != com.google.protobuf.Duration.getDefaultInstance()) {
getSpeechEndOffsetBuilder().mergeFrom(value);
} else {
speechEndOffset_ = value;
}
} else {
speechEndOffsetBuilder_.mergeFrom(value);
}
if (speechEndOffset_ != null) {
bitField0_ |= 0x00000040;
onChanged();
}
return this;
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
public Builder clearSpeechEndOffset() {
bitField0_ = (bitField0_ & ~0x00000040);
speechEndOffset_ = null;
if (speechEndOffsetBuilder_ != null) {
speechEndOffsetBuilder_.dispose();
speechEndOffsetBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
public com.google.protobuf.Duration.Builder getSpeechEndOffsetBuilder() {
bitField0_ |= 0x00000040;
onChanged();
return getSpeechEndOffsetFieldBuilder().getBuilder();
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
public com.google.protobuf.DurationOrBuilder getSpeechEndOffsetOrBuilder() {
if (speechEndOffsetBuilder_ != null) {
return speechEndOffsetBuilder_.getMessageOrBuilder();
} else {
return speechEndOffset_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: speechEndOffset_;
}
}
/**
*
*
*
* Time offset of the end of this Speech recognition result relative to the
* beginning of the audio. Only populated for `message_type` =
* `TRANSCRIPT`.
*
*
* .google.protobuf.Duration speech_end_offset = 8;
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getSpeechEndOffsetFieldBuilder() {
if (speechEndOffsetBuilder_ == null) {
speechEndOffsetBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getSpeechEndOffset(), getParentForChildren(), isClean());
speechEndOffset_ = null;
}
return speechEndOffsetBuilder_;
}
private java.lang.Object languageCode_ = "";
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @return The languageCode.
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @return The bytes for languageCode.
*/
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @param value The languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
languageCode_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @return This builder for chaining.
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
bitField0_ = (bitField0_ & ~0x00000080);
onChanged();
return this;
}
/**
*
*
*
* Detected language code for the transcript.
*
*
* string language_code = 10;
*
* @param value The bytes for languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
languageCode_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult)
private static final com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult();
}
public static com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
@java.lang.Override
public StreamingRecognitionResult parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.StreamingRecognitionResult
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}