com.google.cloud.speech.v1.StreamingRecognizeResponse Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of proto-google-cloud-speech-v1 Show documentation
Show all versions of proto-google-cloud-speech-v1 Show documentation
PROTO library for proto-google-cloud-speech-v1
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/speech/v1/cloud_speech.proto
// Protobuf Java Version: 3.25.3
package com.google.cloud.speech.v1;
/**
*
*
*
* `StreamingRecognizeResponse` is the only message returned to the client by
* `StreamingRecognize`. A series of zero or more `StreamingRecognizeResponse`
* messages are streamed back to the client. If there is no recognizable
* audio, and `single_utterance` is set to false, then no messages are streamed
* back to the client.
*
* Here's an example of a series of `StreamingRecognizeResponse`s that might be
* returned while processing audio:
*
* 1. results { alternatives { transcript: "tube" } stability: 0.01 }
*
* 2. results { alternatives { transcript: "to be a" } stability: 0.01 }
*
* 3. results { alternatives { transcript: "to be" } stability: 0.9 }
* results { alternatives { transcript: " or not to be" } stability: 0.01 }
*
* 4. results { alternatives { transcript: "to be or not to be"
* confidence: 0.92 }
* alternatives { transcript: "to bee or not to bee" }
* is_final: true }
*
* 5. results { alternatives { transcript: " that's" } stability: 0.01 }
*
* 6. results { alternatives { transcript: " that is" } stability: 0.9 }
* results { alternatives { transcript: " the question" } stability: 0.01 }
*
* 7. results { alternatives { transcript: " that is the question"
* confidence: 0.98 }
* alternatives { transcript: " that was the question" }
* is_final: true }
*
* Notes:
*
* - Only two of the above responses #4 and #7 contain final results; they are
* indicated by `is_final: true`. Concatenating these together generates the
* full transcript: "to be or not to be that is the question".
*
* - The others contain interim `results`. #3 and #6 contain two interim
* `results`: the first portion has a high stability and is less likely to
* change; the second portion has a low stability and is very likely to
* change. A UI designer might choose to show only high stability `results`.
*
* - The specific `stability` and `confidence` values shown above are only for
* illustrative purposes. Actual values may vary.
*
* - In each response, only one of these fields will be set:
* `error`,
* `speech_event_type`, or
* one or more (repeated) `results`.
*
*
* Protobuf type {@code google.cloud.speech.v1.StreamingRecognizeResponse}
*/
public final class StreamingRecognizeResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.speech.v1.StreamingRecognizeResponse)
StreamingRecognizeResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use StreamingRecognizeResponse.newBuilder() to construct.
private StreamingRecognizeResponse(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private StreamingRecognizeResponse() {
results_ = java.util.Collections.emptyList();
speechEventType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new StreamingRecognizeResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_StreamingRecognizeResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_StreamingRecognizeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1.StreamingRecognizeResponse.class,
com.google.cloud.speech.v1.StreamingRecognizeResponse.Builder.class);
}
/**
*
*
*
* Indicates the type of speech event.
*
*
* Protobuf enum {@code google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType}
*/
public enum SpeechEventType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
*
* No speech event specified.
*
*
* SPEECH_EVENT_UNSPECIFIED = 0;
*/
SPEECH_EVENT_UNSPECIFIED(0),
/**
*
*
*
* This event indicates that the server has detected the end of the user's
* speech utterance and expects no additional speech. Therefore, the server
* will not process additional audio (although it may subsequently return
* additional results). The client should stop sending additional audio
* data, half-close the gRPC connection, and wait for any additional results
* until the server closes the gRPC connection. This event is only sent if
* `single_utterance` was set to `true`, and is not used otherwise.
*
*
* END_OF_SINGLE_UTTERANCE = 1;
*/
END_OF_SINGLE_UTTERANCE(1),
/**
*
*
*
* This event indicates that the server has detected the beginning of human
* voice activity in the stream. This event can be returned multiple times
* if speech starts and stops repeatedly throughout the stream. This event
* is only sent if `voice_activity_events` is set to true.
*
*
* SPEECH_ACTIVITY_BEGIN = 2;
*/
SPEECH_ACTIVITY_BEGIN(2),
/**
*
*
*
* This event indicates that the server has detected the end of human voice
* activity in the stream. This event can be returned multiple times if
* speech starts and stops repeatedly throughout the stream. This event is
* only sent if `voice_activity_events` is set to true.
*
*
* SPEECH_ACTIVITY_END = 3;
*/
SPEECH_ACTIVITY_END(3),
/**
*
*
*
* This event indicates that the user-set timeout for speech activity begin
* or end has exceeded. Upon receiving this event, the client is expected to
* send a half close. Further audio will not be processed.
*
*
* SPEECH_ACTIVITY_TIMEOUT = 4;
*/
SPEECH_ACTIVITY_TIMEOUT(4),
UNRECOGNIZED(-1),
;
/**
*
*
*
* No speech event specified.
*
*
* SPEECH_EVENT_UNSPECIFIED = 0;
*/
public static final int SPEECH_EVENT_UNSPECIFIED_VALUE = 0;
/**
*
*
*
* This event indicates that the server has detected the end of the user's
* speech utterance and expects no additional speech. Therefore, the server
* will not process additional audio (although it may subsequently return
* additional results). The client should stop sending additional audio
* data, half-close the gRPC connection, and wait for any additional results
* until the server closes the gRPC connection. This event is only sent if
* `single_utterance` was set to `true`, and is not used otherwise.
*
*
* END_OF_SINGLE_UTTERANCE = 1;
*/
public static final int END_OF_SINGLE_UTTERANCE_VALUE = 1;
/**
*
*
*
* This event indicates that the server has detected the beginning of human
* voice activity in the stream. This event can be returned multiple times
* if speech starts and stops repeatedly throughout the stream. This event
* is only sent if `voice_activity_events` is set to true.
*
*
* SPEECH_ACTIVITY_BEGIN = 2;
*/
public static final int SPEECH_ACTIVITY_BEGIN_VALUE = 2;
/**
*
*
*
* This event indicates that the server has detected the end of human voice
* activity in the stream. This event can be returned multiple times if
* speech starts and stops repeatedly throughout the stream. This event is
* only sent if `voice_activity_events` is set to true.
*
*
* SPEECH_ACTIVITY_END = 3;
*/
public static final int SPEECH_ACTIVITY_END_VALUE = 3;
/**
*
*
*
* This event indicates that the user-set timeout for speech activity begin
* or end has exceeded. Upon receiving this event, the client is expected to
* send a half close. Further audio will not be processed.
*
*
* SPEECH_ACTIVITY_TIMEOUT = 4;
*/
public static final int SPEECH_ACTIVITY_TIMEOUT_VALUE = 4;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SpeechEventType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static SpeechEventType forNumber(int value) {
switch (value) {
case 0:
return SPEECH_EVENT_UNSPECIFIED;
case 1:
return END_OF_SINGLE_UTTERANCE;
case 2:
return SPEECH_ACTIVITY_BEGIN;
case 3:
return SPEECH_ACTIVITY_END;
case 4:
return SPEECH_ACTIVITY_TIMEOUT;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public SpeechEventType findValueByNumber(int number) {
return SpeechEventType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.speech.v1.StreamingRecognizeResponse.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final SpeechEventType[] VALUES = values();
public static SpeechEventType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private SpeechEventType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType)
}
private int bitField0_;
public static final int ERROR_FIELD_NUMBER = 1;
private com.google.rpc.Status error_;
/**
*
*
*
* If set, returns a [google.rpc.Status][google.rpc.Status] message that
* specifies the error for the operation.
*
*
* .google.rpc.Status error = 1;
*
* @return Whether the error field is set.
*/
@java.lang.Override
public boolean hasError() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
*
* If set, returns a [google.rpc.Status][google.rpc.Status] message that
* specifies the error for the operation.
*
*
* .google.rpc.Status error = 1;
*
* @return The error.
*/
@java.lang.Override
public com.google.rpc.Status getError() {
return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
}
/**
*
*
*
* If set, returns a [google.rpc.Status][google.rpc.Status] message that
* specifies the error for the operation.
*
*
* .google.rpc.Status error = 1;
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getErrorOrBuilder() {
return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
}
public static final int RESULTS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List results_;
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
@java.lang.Override
public java.util.List getResultsList() {
return results_;
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
@java.lang.Override
public java.util.List extends com.google.cloud.speech.v1.StreamingRecognitionResultOrBuilder>
getResultsOrBuilderList() {
return results_;
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
@java.lang.Override
public int getResultsCount() {
return results_.size();
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognitionResult getResults(int index) {
return results_.get(index);
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognitionResultOrBuilder getResultsOrBuilder(
int index) {
return results_.get(index);
}
public static final int SPEECH_EVENT_TYPE_FIELD_NUMBER = 4;
private int speechEventType_ = 0;
/**
*
*
*
* Indicates the type of speech event.
*
*
* .google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType speech_event_type = 4;
*
*
* @return The enum numeric value on the wire for speechEventType.
*/
@java.lang.Override
public int getSpeechEventTypeValue() {
return speechEventType_;
}
/**
*
*
*
* Indicates the type of speech event.
*
*
* .google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType speech_event_type = 4;
*
*
* @return The speechEventType.
*/
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType
getSpeechEventType() {
com.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType result =
com.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType.forNumber(
speechEventType_);
return result == null
? com.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType.UNRECOGNIZED
: result;
}
public static final int SPEECH_EVENT_TIME_FIELD_NUMBER = 8;
private com.google.protobuf.Duration speechEventTime_;
/**
*
*
*
* Time offset between the beginning of the audio and event emission.
*
*
* .google.protobuf.Duration speech_event_time = 8;
*
* @return Whether the speechEventTime field is set.
*/
@java.lang.Override
public boolean hasSpeechEventTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
*
* Time offset between the beginning of the audio and event emission.
*
*
* .google.protobuf.Duration speech_event_time = 8;
*
* @return The speechEventTime.
*/
@java.lang.Override
public com.google.protobuf.Duration getSpeechEventTime() {
return speechEventTime_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: speechEventTime_;
}
/**
*
*
*
* Time offset between the beginning of the audio and event emission.
*
*
* .google.protobuf.Duration speech_event_time = 8;
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getSpeechEventTimeOrBuilder() {
return speechEventTime_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: speechEventTime_;
}
public static final int TOTAL_BILLED_TIME_FIELD_NUMBER = 5;
private com.google.protobuf.Duration totalBilledTime_;
/**
*
*
*
* When available, billed audio seconds for the stream.
* Set only if this is the last response in the stream.
*
*
* .google.protobuf.Duration total_billed_time = 5;
*
* @return Whether the totalBilledTime field is set.
*/
@java.lang.Override
public boolean hasTotalBilledTime() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
*
* When available, billed audio seconds for the stream.
* Set only if this is the last response in the stream.
*
*
* .google.protobuf.Duration total_billed_time = 5;
*
* @return The totalBilledTime.
*/
@java.lang.Override
public com.google.protobuf.Duration getTotalBilledTime() {
return totalBilledTime_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: totalBilledTime_;
}
/**
*
*
*
* When available, billed audio seconds for the stream.
* Set only if this is the last response in the stream.
*
*
* .google.protobuf.Duration total_billed_time = 5;
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getTotalBilledTimeOrBuilder() {
return totalBilledTime_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: totalBilledTime_;
}
public static final int SPEECH_ADAPTATION_INFO_FIELD_NUMBER = 9;
private com.google.cloud.speech.v1.SpeechAdaptationInfo speechAdaptationInfo_;
/**
*
*
*
* Provides information on adaptation behavior in response
*
*
* .google.cloud.speech.v1.SpeechAdaptationInfo speech_adaptation_info = 9;
*
* @return Whether the speechAdaptationInfo field is set.
*/
@java.lang.Override
public boolean hasSpeechAdaptationInfo() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
*
*
*
* Provides information on adaptation behavior in response
*
*
* .google.cloud.speech.v1.SpeechAdaptationInfo speech_adaptation_info = 9;
*
* @return The speechAdaptationInfo.
*/
@java.lang.Override
public com.google.cloud.speech.v1.SpeechAdaptationInfo getSpeechAdaptationInfo() {
return speechAdaptationInfo_ == null
? com.google.cloud.speech.v1.SpeechAdaptationInfo.getDefaultInstance()
: speechAdaptationInfo_;
}
/**
*
*
*
* Provides information on adaptation behavior in response
*
*
* .google.cloud.speech.v1.SpeechAdaptationInfo speech_adaptation_info = 9;
*/
@java.lang.Override
public com.google.cloud.speech.v1.SpeechAdaptationInfoOrBuilder
getSpeechAdaptationInfoOrBuilder() {
return speechAdaptationInfo_ == null
? com.google.cloud.speech.v1.SpeechAdaptationInfo.getDefaultInstance()
: speechAdaptationInfo_;
}
public static final int REQUEST_ID_FIELD_NUMBER = 10;
private long requestId_ = 0L;
/**
*
*
*
* The ID associated with the request. This is a unique ID specific only to
* the given request.
*
*
* int64 request_id = 10;
*
* @return The requestId.
*/
@java.lang.Override
public long getRequestId() {
return requestId_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getError());
}
for (int i = 0; i < results_.size(); i++) {
output.writeMessage(2, results_.get(i));
}
if (speechEventType_
!= com.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType
.SPEECH_EVENT_UNSPECIFIED
.getNumber()) {
output.writeEnum(4, speechEventType_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeMessage(5, getTotalBilledTime());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(8, getSpeechEventTime());
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeMessage(9, getSpeechAdaptationInfo());
}
if (requestId_ != 0L) {
output.writeInt64(10, requestId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getError());
}
for (int i = 0; i < results_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, results_.get(i));
}
if (speechEventType_
!= com.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType
.SPEECH_EVENT_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, speechEventType_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getTotalBilledTime());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, getSpeechEventTime());
}
if (((bitField0_ & 0x00000008) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(9, getSpeechAdaptationInfo());
}
if (requestId_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(10, requestId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.speech.v1.StreamingRecognizeResponse)) {
return super.equals(obj);
}
com.google.cloud.speech.v1.StreamingRecognizeResponse other =
(com.google.cloud.speech.v1.StreamingRecognizeResponse) obj;
if (hasError() != other.hasError()) return false;
if (hasError()) {
if (!getError().equals(other.getError())) return false;
}
if (!getResultsList().equals(other.getResultsList())) return false;
if (speechEventType_ != other.speechEventType_) return false;
if (hasSpeechEventTime() != other.hasSpeechEventTime()) return false;
if (hasSpeechEventTime()) {
if (!getSpeechEventTime().equals(other.getSpeechEventTime())) return false;
}
if (hasTotalBilledTime() != other.hasTotalBilledTime()) return false;
if (hasTotalBilledTime()) {
if (!getTotalBilledTime().equals(other.getTotalBilledTime())) return false;
}
if (hasSpeechAdaptationInfo() != other.hasSpeechAdaptationInfo()) return false;
if (hasSpeechAdaptationInfo()) {
if (!getSpeechAdaptationInfo().equals(other.getSpeechAdaptationInfo())) return false;
}
if (getRequestId() != other.getRequestId()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasError()) {
hash = (37 * hash) + ERROR_FIELD_NUMBER;
hash = (53 * hash) + getError().hashCode();
}
if (getResultsCount() > 0) {
hash = (37 * hash) + RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getResultsList().hashCode();
}
hash = (37 * hash) + SPEECH_EVENT_TYPE_FIELD_NUMBER;
hash = (53 * hash) + speechEventType_;
if (hasSpeechEventTime()) {
hash = (37 * hash) + SPEECH_EVENT_TIME_FIELD_NUMBER;
hash = (53 * hash) + getSpeechEventTime().hashCode();
}
if (hasTotalBilledTime()) {
hash = (37 * hash) + TOTAL_BILLED_TIME_FIELD_NUMBER;
hash = (53 * hash) + getTotalBilledTime().hashCode();
}
if (hasSpeechAdaptationInfo()) {
hash = (37 * hash) + SPEECH_ADAPTATION_INFO_FIELD_NUMBER;
hash = (53 * hash) + getSpeechAdaptationInfo().hashCode();
}
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getRequestId());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.speech.v1.StreamingRecognizeResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
*
* `StreamingRecognizeResponse` is the only message returned to the client by
* `StreamingRecognize`. A series of zero or more `StreamingRecognizeResponse`
* messages are streamed back to the client. If there is no recognizable
* audio, and `single_utterance` is set to false, then no messages are streamed
* back to the client.
*
* Here's an example of a series of `StreamingRecognizeResponse`s that might be
* returned while processing audio:
*
* 1. results { alternatives { transcript: "tube" } stability: 0.01 }
*
* 2. results { alternatives { transcript: "to be a" } stability: 0.01 }
*
* 3. results { alternatives { transcript: "to be" } stability: 0.9 }
* results { alternatives { transcript: " or not to be" } stability: 0.01 }
*
* 4. results { alternatives { transcript: "to be or not to be"
* confidence: 0.92 }
* alternatives { transcript: "to bee or not to bee" }
* is_final: true }
*
* 5. results { alternatives { transcript: " that's" } stability: 0.01 }
*
* 6. results { alternatives { transcript: " that is" } stability: 0.9 }
* results { alternatives { transcript: " the question" } stability: 0.01 }
*
* 7. results { alternatives { transcript: " that is the question"
* confidence: 0.98 }
* alternatives { transcript: " that was the question" }
* is_final: true }
*
* Notes:
*
* - Only two of the above responses #4 and #7 contain final results; they are
* indicated by `is_final: true`. Concatenating these together generates the
* full transcript: "to be or not to be that is the question".
*
* - The others contain interim `results`. #3 and #6 contain two interim
* `results`: the first portion has a high stability and is less likely to
* change; the second portion has a low stability and is very likely to
* change. A UI designer might choose to show only high stability `results`.
*
* - The specific `stability` and `confidence` values shown above are only for
* illustrative purposes. Actual values may vary.
*
* - In each response, only one of these fields will be set:
* `error`,
* `speech_event_type`, or
* one or more (repeated) `results`.
*
*
* Protobuf type {@code google.cloud.speech.v1.StreamingRecognizeResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder
implements
// @@protoc_insertion_point(builder_implements:google.cloud.speech.v1.StreamingRecognizeResponse)
com.google.cloud.speech.v1.StreamingRecognizeResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_StreamingRecognizeResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_StreamingRecognizeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1.StreamingRecognizeResponse.class,
com.google.cloud.speech.v1.StreamingRecognizeResponse.Builder.class);
}
// Construct using com.google.cloud.speech.v1.StreamingRecognizeResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getErrorFieldBuilder();
getResultsFieldBuilder();
getSpeechEventTimeFieldBuilder();
getTotalBilledTimeFieldBuilder();
getSpeechAdaptationInfoFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
error_ = null;
if (errorBuilder_ != null) {
errorBuilder_.dispose();
errorBuilder_ = null;
}
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
} else {
results_ = null;
resultsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
speechEventType_ = 0;
speechEventTime_ = null;
if (speechEventTimeBuilder_ != null) {
speechEventTimeBuilder_.dispose();
speechEventTimeBuilder_ = null;
}
totalBilledTime_ = null;
if (totalBilledTimeBuilder_ != null) {
totalBilledTimeBuilder_.dispose();
totalBilledTimeBuilder_ = null;
}
speechAdaptationInfo_ = null;
if (speechAdaptationInfoBuilder_ != null) {
speechAdaptationInfoBuilder_.dispose();
speechAdaptationInfoBuilder_ = null;
}
requestId_ = 0L;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_StreamingRecognizeResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognizeResponse getDefaultInstanceForType() {
return com.google.cloud.speech.v1.StreamingRecognizeResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognizeResponse build() {
com.google.cloud.speech.v1.StreamingRecognizeResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognizeResponse buildPartial() {
com.google.cloud.speech.v1.StreamingRecognizeResponse result =
new com.google.cloud.speech.v1.StreamingRecognizeResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.speech.v1.StreamingRecognizeResponse result) {
if (resultsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
results_ = java.util.Collections.unmodifiableList(results_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.results_ = results_;
} else {
result.results_ = resultsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.speech.v1.StreamingRecognizeResponse result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.error_ = errorBuilder_ == null ? error_ : errorBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.speechEventType_ = speechEventType_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.speechEventTime_ =
speechEventTimeBuilder_ == null ? speechEventTime_ : speechEventTimeBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.totalBilledTime_ =
totalBilledTimeBuilder_ == null ? totalBilledTime_ : totalBilledTimeBuilder_.build();
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.speechAdaptationInfo_ =
speechAdaptationInfoBuilder_ == null
? speechAdaptationInfo_
: speechAdaptationInfoBuilder_.build();
to_bitField0_ |= 0x00000008;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.requestId_ = requestId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.speech.v1.StreamingRecognizeResponse) {
return mergeFrom((com.google.cloud.speech.v1.StreamingRecognizeResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.speech.v1.StreamingRecognizeResponse other) {
if (other == com.google.cloud.speech.v1.StreamingRecognizeResponse.getDefaultInstance())
return this;
if (other.hasError()) {
mergeError(other.getError());
}
if (resultsBuilder_ == null) {
if (!other.results_.isEmpty()) {
if (results_.isEmpty()) {
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureResultsIsMutable();
results_.addAll(other.results_);
}
onChanged();
}
} else {
if (!other.results_.isEmpty()) {
if (resultsBuilder_.isEmpty()) {
resultsBuilder_.dispose();
resultsBuilder_ = null;
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000002);
resultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getResultsFieldBuilder()
: null;
} else {
resultsBuilder_.addAllMessages(other.results_);
}
}
}
if (other.speechEventType_ != 0) {
setSpeechEventTypeValue(other.getSpeechEventTypeValue());
}
if (other.hasSpeechEventTime()) {
mergeSpeechEventTime(other.getSpeechEventTime());
}
if (other.hasTotalBilledTime()) {
mergeTotalBilledTime(other.getTotalBilledTime());
}
if (other.hasSpeechAdaptationInfo()) {
mergeSpeechAdaptationInfo(other.getSpeechAdaptationInfo());
}
if (other.getRequestId() != 0L) {
setRequestId(other.getRequestId());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getErrorFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.speech.v1.StreamingRecognitionResult m =
input.readMessage(
com.google.cloud.speech.v1.StreamingRecognitionResult.parser(),
extensionRegistry);
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(m);
} else {
resultsBuilder_.addMessage(m);
}
break;
} // case 18
case 32:
{
speechEventType_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 32
case 42:
{
input.readMessage(getTotalBilledTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000010;
break;
} // case 42
case 66:
{
input.readMessage(getSpeechEventTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000008;
break;
} // case 66
case 74:
{
input.readMessage(
getSpeechAdaptationInfoFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000020;
break;
} // case 74
case 80:
{
requestId_ = input.readInt64();
bitField0_ |= 0x00000040;
break;
} // case 80
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.rpc.Status error_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
errorBuilder_;
/**
*
*
*
* If set, returns a [google.rpc.Status][google.rpc.Status] message that
* specifies the error for the operation.
*
*
* .google.rpc.Status error = 1;
*
* @return Whether the error field is set.
*/
public boolean hasError() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
*
* If set, returns a [google.rpc.Status][google.rpc.Status] message that
* specifies the error for the operation.
*
*
* .google.rpc.Status error = 1;
*
* @return The error.
*/
public com.google.rpc.Status getError() {
if (errorBuilder_ == null) {
return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
} else {
return errorBuilder_.getMessage();
}
}
/**
*
*
*
* If set, returns a [google.rpc.Status][google.rpc.Status] message that
* specifies the error for the operation.
*
*
* .google.rpc.Status error = 1;
*/
public Builder setError(com.google.rpc.Status value) {
if (errorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
error_ = value;
} else {
errorBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
*
* If set, returns a [google.rpc.Status][google.rpc.Status] message that
* specifies the error for the operation.
*
*
* .google.rpc.Status error = 1;
*/
public Builder setError(com.google.rpc.Status.Builder builderForValue) {
if (errorBuilder_ == null) {
error_ = builderForValue.build();
} else {
errorBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
*
* If set, returns a [google.rpc.Status][google.rpc.Status] message that
* specifies the error for the operation.
*
*
* .google.rpc.Status error = 1;
*/
public Builder mergeError(com.google.rpc.Status value) {
if (errorBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& error_ != null
&& error_ != com.google.rpc.Status.getDefaultInstance()) {
getErrorBuilder().mergeFrom(value);
} else {
error_ = value;
}
} else {
errorBuilder_.mergeFrom(value);
}
if (error_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
*
* If set, returns a [google.rpc.Status][google.rpc.Status] message that
* specifies the error for the operation.
*
*
* .google.rpc.Status error = 1;
*/
public Builder clearError() {
bitField0_ = (bitField0_ & ~0x00000001);
error_ = null;
if (errorBuilder_ != null) {
errorBuilder_.dispose();
errorBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
*
* If set, returns a [google.rpc.Status][google.rpc.Status] message that
* specifies the error for the operation.
*
*
* .google.rpc.Status error = 1;
*/
public com.google.rpc.Status.Builder getErrorBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getErrorFieldBuilder().getBuilder();
}
/**
*
*
*
* If set, returns a [google.rpc.Status][google.rpc.Status] message that
* specifies the error for the operation.
*
*
* .google.rpc.Status error = 1;
*/
public com.google.rpc.StatusOrBuilder getErrorOrBuilder() {
if (errorBuilder_ != null) {
return errorBuilder_.getMessageOrBuilder();
} else {
return error_ == null ? com.google.rpc.Status.getDefaultInstance() : error_;
}
}
/**
*
*
*
* If set, returns a [google.rpc.Status][google.rpc.Status] message that
* specifies the error for the operation.
*
*
* .google.rpc.Status error = 1;
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getErrorFieldBuilder() {
if (errorBuilder_ == null) {
errorBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status,
com.google.rpc.Status.Builder,
com.google.rpc.StatusOrBuilder>(getError(), getParentForChildren(), isClean());
error_ = null;
}
return errorBuilder_;
}
private java.util.List results_ =
java.util.Collections.emptyList();
private void ensureResultsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
results_ =
new java.util.ArrayList(
results_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.speech.v1.StreamingRecognitionResult,
com.google.cloud.speech.v1.StreamingRecognitionResult.Builder,
com.google.cloud.speech.v1.StreamingRecognitionResultOrBuilder>
resultsBuilder_;
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public java.util.List getResultsList() {
if (resultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(results_);
} else {
return resultsBuilder_.getMessageList();
}
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public int getResultsCount() {
if (resultsBuilder_ == null) {
return results_.size();
} else {
return resultsBuilder_.getCount();
}
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public com.google.cloud.speech.v1.StreamingRecognitionResult getResults(int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessage(index);
}
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public Builder setResults(
int index, com.google.cloud.speech.v1.StreamingRecognitionResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.set(index, value);
onChanged();
} else {
resultsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public Builder setResults(
int index, com.google.cloud.speech.v1.StreamingRecognitionResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.set(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public Builder addResults(com.google.cloud.speech.v1.StreamingRecognitionResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(value);
onChanged();
} else {
resultsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public Builder addResults(
int index, com.google.cloud.speech.v1.StreamingRecognitionResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(index, value);
onChanged();
} else {
resultsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public Builder addResults(
com.google.cloud.speech.v1.StreamingRecognitionResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public Builder addResults(
int index, com.google.cloud.speech.v1.StreamingRecognitionResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public Builder addAllResults(
java.lang.Iterable extends com.google.cloud.speech.v1.StreamingRecognitionResult>
values) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, results_);
onChanged();
} else {
resultsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public Builder clearResults() {
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
resultsBuilder_.clear();
}
return this;
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public Builder removeResults(int index) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.remove(index);
onChanged();
} else {
resultsBuilder_.remove(index);
}
return this;
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public com.google.cloud.speech.v1.StreamingRecognitionResult.Builder getResultsBuilder(
int index) {
return getResultsFieldBuilder().getBuilder(index);
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public com.google.cloud.speech.v1.StreamingRecognitionResultOrBuilder getResultsOrBuilder(
int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public java.util.List extends com.google.cloud.speech.v1.StreamingRecognitionResultOrBuilder>
getResultsOrBuilderList() {
if (resultsBuilder_ != null) {
return resultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(results_);
}
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public com.google.cloud.speech.v1.StreamingRecognitionResult.Builder addResultsBuilder() {
return getResultsFieldBuilder()
.addBuilder(com.google.cloud.speech.v1.StreamingRecognitionResult.getDefaultInstance());
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public com.google.cloud.speech.v1.StreamingRecognitionResult.Builder addResultsBuilder(
int index) {
return getResultsFieldBuilder()
.addBuilder(
index, com.google.cloud.speech.v1.StreamingRecognitionResult.getDefaultInstance());
}
/**
*
*
*
* This repeated list contains zero or more results that
* correspond to consecutive portions of the audio currently being processed.
* It contains zero or one `is_final=true` result (the newly settled portion),
* followed by zero or more `is_final=false` results (the interim results).
*
*
* repeated .google.cloud.speech.v1.StreamingRecognitionResult results = 2;
*/
public java.util.List
getResultsBuilderList() {
return getResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.speech.v1.StreamingRecognitionResult,
com.google.cloud.speech.v1.StreamingRecognitionResult.Builder,
com.google.cloud.speech.v1.StreamingRecognitionResultOrBuilder>
getResultsFieldBuilder() {
if (resultsBuilder_ == null) {
resultsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.speech.v1.StreamingRecognitionResult,
com.google.cloud.speech.v1.StreamingRecognitionResult.Builder,
com.google.cloud.speech.v1.StreamingRecognitionResultOrBuilder>(
results_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
results_ = null;
}
return resultsBuilder_;
}
private int speechEventType_ = 0;
/**
*
*
*
* Indicates the type of speech event.
*
*
*
* .google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType speech_event_type = 4;
*
*
* @return The enum numeric value on the wire for speechEventType.
*/
@java.lang.Override
public int getSpeechEventTypeValue() {
return speechEventType_;
}
/**
*
*
*
* Indicates the type of speech event.
*
*
*
* .google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType speech_event_type = 4;
*
*
* @param value The enum numeric value on the wire for speechEventType to set.
* @return This builder for chaining.
*/
public Builder setSpeechEventTypeValue(int value) {
speechEventType_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
*
* Indicates the type of speech event.
*
*
*
* .google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType speech_event_type = 4;
*
*
* @return The speechEventType.
*/
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType
getSpeechEventType() {
com.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType result =
com.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType.forNumber(
speechEventType_);
return result == null
? com.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType.UNRECOGNIZED
: result;
}
/**
*
*
*
* Indicates the type of speech event.
*
*
*
* .google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType speech_event_type = 4;
*
*
* @param value The speechEventType to set.
* @return This builder for chaining.
*/
public Builder setSpeechEventType(
com.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
speechEventType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
*
* Indicates the type of speech event.
*
*
*
* .google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType speech_event_type = 4;
*
*
* @return This builder for chaining.
*/
public Builder clearSpeechEventType() {
bitField0_ = (bitField0_ & ~0x00000004);
speechEventType_ = 0;
onChanged();
return this;
}
private com.google.protobuf.Duration speechEventTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
speechEventTimeBuilder_;
/**
*
*
*
* Time offset between the beginning of the audio and event emission.
*
*
* .google.protobuf.Duration speech_event_time = 8;
*
* @return Whether the speechEventTime field is set.
*/
public boolean hasSpeechEventTime() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
*
*
*
* Time offset between the beginning of the audio and event emission.
*
*
* .google.protobuf.Duration speech_event_time = 8;
*
* @return The speechEventTime.
*/
public com.google.protobuf.Duration getSpeechEventTime() {
if (speechEventTimeBuilder_ == null) {
return speechEventTime_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: speechEventTime_;
} else {
return speechEventTimeBuilder_.getMessage();
}
}
/**
*
*
*
* Time offset between the beginning of the audio and event emission.
*
*
* .google.protobuf.Duration speech_event_time = 8;
*/
public Builder setSpeechEventTime(com.google.protobuf.Duration value) {
if (speechEventTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
speechEventTime_ = value;
} else {
speechEventTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
*
* Time offset between the beginning of the audio and event emission.
*
*
* .google.protobuf.Duration speech_event_time = 8;
*/
public Builder setSpeechEventTime(com.google.protobuf.Duration.Builder builderForValue) {
if (speechEventTimeBuilder_ == null) {
speechEventTime_ = builderForValue.build();
} else {
speechEventTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
*
* Time offset between the beginning of the audio and event emission.
*
*
* .google.protobuf.Duration speech_event_time = 8;
*/
public Builder mergeSpeechEventTime(com.google.protobuf.Duration value) {
if (speechEventTimeBuilder_ == null) {
if (((bitField0_ & 0x00000008) != 0)
&& speechEventTime_ != null
&& speechEventTime_ != com.google.protobuf.Duration.getDefaultInstance()) {
getSpeechEventTimeBuilder().mergeFrom(value);
} else {
speechEventTime_ = value;
}
} else {
speechEventTimeBuilder_.mergeFrom(value);
}
if (speechEventTime_ != null) {
bitField0_ |= 0x00000008;
onChanged();
}
return this;
}
/**
*
*
*
* Time offset between the beginning of the audio and event emission.
*
*
* .google.protobuf.Duration speech_event_time = 8;
*/
public Builder clearSpeechEventTime() {
bitField0_ = (bitField0_ & ~0x00000008);
speechEventTime_ = null;
if (speechEventTimeBuilder_ != null) {
speechEventTimeBuilder_.dispose();
speechEventTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
*
* Time offset between the beginning of the audio and event emission.
*
*
* .google.protobuf.Duration speech_event_time = 8;
*/
public com.google.protobuf.Duration.Builder getSpeechEventTimeBuilder() {
bitField0_ |= 0x00000008;
onChanged();
return getSpeechEventTimeFieldBuilder().getBuilder();
}
/**
*
*
*
* Time offset between the beginning of the audio and event emission.
*
*
* .google.protobuf.Duration speech_event_time = 8;
*/
public com.google.protobuf.DurationOrBuilder getSpeechEventTimeOrBuilder() {
if (speechEventTimeBuilder_ != null) {
return speechEventTimeBuilder_.getMessageOrBuilder();
} else {
return speechEventTime_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: speechEventTime_;
}
}
/**
*
*
*
* Time offset between the beginning of the audio and event emission.
*
*
* .google.protobuf.Duration speech_event_time = 8;
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getSpeechEventTimeFieldBuilder() {
if (speechEventTimeBuilder_ == null) {
speechEventTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getSpeechEventTime(), getParentForChildren(), isClean());
speechEventTime_ = null;
}
return speechEventTimeBuilder_;
}
private com.google.protobuf.Duration totalBilledTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
totalBilledTimeBuilder_;
/**
*
*
*
* When available, billed audio seconds for the stream.
* Set only if this is the last response in the stream.
*
*
* .google.protobuf.Duration total_billed_time = 5;
*
* @return Whether the totalBilledTime field is set.
*/
public boolean hasTotalBilledTime() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
*
*
*
* When available, billed audio seconds for the stream.
* Set only if this is the last response in the stream.
*
*
* .google.protobuf.Duration total_billed_time = 5;
*
* @return The totalBilledTime.
*/
public com.google.protobuf.Duration getTotalBilledTime() {
if (totalBilledTimeBuilder_ == null) {
return totalBilledTime_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: totalBilledTime_;
} else {
return totalBilledTimeBuilder_.getMessage();
}
}
/**
*
*
*
* When available, billed audio seconds for the stream.
* Set only if this is the last response in the stream.
*
*
* .google.protobuf.Duration total_billed_time = 5;
*/
public Builder setTotalBilledTime(com.google.protobuf.Duration value) {
if (totalBilledTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
totalBilledTime_ = value;
} else {
totalBilledTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
*
* When available, billed audio seconds for the stream.
* Set only if this is the last response in the stream.
*
*
* .google.protobuf.Duration total_billed_time = 5;
*/
public Builder setTotalBilledTime(com.google.protobuf.Duration.Builder builderForValue) {
if (totalBilledTimeBuilder_ == null) {
totalBilledTime_ = builderForValue.build();
} else {
totalBilledTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
*
* When available, billed audio seconds for the stream.
* Set only if this is the last response in the stream.
*
*
* .google.protobuf.Duration total_billed_time = 5;
*/
public Builder mergeTotalBilledTime(com.google.protobuf.Duration value) {
if (totalBilledTimeBuilder_ == null) {
if (((bitField0_ & 0x00000010) != 0)
&& totalBilledTime_ != null
&& totalBilledTime_ != com.google.protobuf.Duration.getDefaultInstance()) {
getTotalBilledTimeBuilder().mergeFrom(value);
} else {
totalBilledTime_ = value;
}
} else {
totalBilledTimeBuilder_.mergeFrom(value);
}
if (totalBilledTime_ != null) {
bitField0_ |= 0x00000010;
onChanged();
}
return this;
}
/**
*
*
*
* When available, billed audio seconds for the stream.
* Set only if this is the last response in the stream.
*
*
* .google.protobuf.Duration total_billed_time = 5;
*/
public Builder clearTotalBilledTime() {
bitField0_ = (bitField0_ & ~0x00000010);
totalBilledTime_ = null;
if (totalBilledTimeBuilder_ != null) {
totalBilledTimeBuilder_.dispose();
totalBilledTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
*
* When available, billed audio seconds for the stream.
* Set only if this is the last response in the stream.
*
*
* .google.protobuf.Duration total_billed_time = 5;
*/
public com.google.protobuf.Duration.Builder getTotalBilledTimeBuilder() {
bitField0_ |= 0x00000010;
onChanged();
return getTotalBilledTimeFieldBuilder().getBuilder();
}
/**
*
*
*
* When available, billed audio seconds for the stream.
* Set only if this is the last response in the stream.
*
*
* .google.protobuf.Duration total_billed_time = 5;
*/
public com.google.protobuf.DurationOrBuilder getTotalBilledTimeOrBuilder() {
if (totalBilledTimeBuilder_ != null) {
return totalBilledTimeBuilder_.getMessageOrBuilder();
} else {
return totalBilledTime_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: totalBilledTime_;
}
}
/**
*
*
*
* When available, billed audio seconds for the stream.
* Set only if this is the last response in the stream.
*
*
* .google.protobuf.Duration total_billed_time = 5;
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getTotalBilledTimeFieldBuilder() {
if (totalBilledTimeBuilder_ == null) {
totalBilledTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getTotalBilledTime(), getParentForChildren(), isClean());
totalBilledTime_ = null;
}
return totalBilledTimeBuilder_;
}
private com.google.cloud.speech.v1.SpeechAdaptationInfo speechAdaptationInfo_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.speech.v1.SpeechAdaptationInfo,
com.google.cloud.speech.v1.SpeechAdaptationInfo.Builder,
com.google.cloud.speech.v1.SpeechAdaptationInfoOrBuilder>
speechAdaptationInfoBuilder_;
/**
*
*
*
* Provides information on adaptation behavior in response
*
*
* .google.cloud.speech.v1.SpeechAdaptationInfo speech_adaptation_info = 9;
*
* @return Whether the speechAdaptationInfo field is set.
*/
public boolean hasSpeechAdaptationInfo() {
return ((bitField0_ & 0x00000020) != 0);
}
/**
*
*
*
* Provides information on adaptation behavior in response
*
*
* .google.cloud.speech.v1.SpeechAdaptationInfo speech_adaptation_info = 9;
*
* @return The speechAdaptationInfo.
*/
public com.google.cloud.speech.v1.SpeechAdaptationInfo getSpeechAdaptationInfo() {
if (speechAdaptationInfoBuilder_ == null) {
return speechAdaptationInfo_ == null
? com.google.cloud.speech.v1.SpeechAdaptationInfo.getDefaultInstance()
: speechAdaptationInfo_;
} else {
return speechAdaptationInfoBuilder_.getMessage();
}
}
/**
*
*
*
* Provides information on adaptation behavior in response
*
*
* .google.cloud.speech.v1.SpeechAdaptationInfo speech_adaptation_info = 9;
*/
public Builder setSpeechAdaptationInfo(com.google.cloud.speech.v1.SpeechAdaptationInfo value) {
if (speechAdaptationInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
speechAdaptationInfo_ = value;
} else {
speechAdaptationInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
*
*
*
* Provides information on adaptation behavior in response
*
*
* .google.cloud.speech.v1.SpeechAdaptationInfo speech_adaptation_info = 9;
*/
public Builder setSpeechAdaptationInfo(
com.google.cloud.speech.v1.SpeechAdaptationInfo.Builder builderForValue) {
if (speechAdaptationInfoBuilder_ == null) {
speechAdaptationInfo_ = builderForValue.build();
} else {
speechAdaptationInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
*
*
*
* Provides information on adaptation behavior in response
*
*
* .google.cloud.speech.v1.SpeechAdaptationInfo speech_adaptation_info = 9;
*/
public Builder mergeSpeechAdaptationInfo(
com.google.cloud.speech.v1.SpeechAdaptationInfo value) {
if (speechAdaptationInfoBuilder_ == null) {
if (((bitField0_ & 0x00000020) != 0)
&& speechAdaptationInfo_ != null
&& speechAdaptationInfo_
!= com.google.cloud.speech.v1.SpeechAdaptationInfo.getDefaultInstance()) {
getSpeechAdaptationInfoBuilder().mergeFrom(value);
} else {
speechAdaptationInfo_ = value;
}
} else {
speechAdaptationInfoBuilder_.mergeFrom(value);
}
if (speechAdaptationInfo_ != null) {
bitField0_ |= 0x00000020;
onChanged();
}
return this;
}
/**
*
*
*
* Provides information on adaptation behavior in response
*
*
* .google.cloud.speech.v1.SpeechAdaptationInfo speech_adaptation_info = 9;
*/
public Builder clearSpeechAdaptationInfo() {
bitField0_ = (bitField0_ & ~0x00000020);
speechAdaptationInfo_ = null;
if (speechAdaptationInfoBuilder_ != null) {
speechAdaptationInfoBuilder_.dispose();
speechAdaptationInfoBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
*
* Provides information on adaptation behavior in response
*
*
* .google.cloud.speech.v1.SpeechAdaptationInfo speech_adaptation_info = 9;
*/
public com.google.cloud.speech.v1.SpeechAdaptationInfo.Builder
getSpeechAdaptationInfoBuilder() {
bitField0_ |= 0x00000020;
onChanged();
return getSpeechAdaptationInfoFieldBuilder().getBuilder();
}
/**
*
*
*
* Provides information on adaptation behavior in response
*
*
* .google.cloud.speech.v1.SpeechAdaptationInfo speech_adaptation_info = 9;
*/
public com.google.cloud.speech.v1.SpeechAdaptationInfoOrBuilder
getSpeechAdaptationInfoOrBuilder() {
if (speechAdaptationInfoBuilder_ != null) {
return speechAdaptationInfoBuilder_.getMessageOrBuilder();
} else {
return speechAdaptationInfo_ == null
? com.google.cloud.speech.v1.SpeechAdaptationInfo.getDefaultInstance()
: speechAdaptationInfo_;
}
}
/**
*
*
*
* Provides information on adaptation behavior in response
*
*
* .google.cloud.speech.v1.SpeechAdaptationInfo speech_adaptation_info = 9;
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.speech.v1.SpeechAdaptationInfo,
com.google.cloud.speech.v1.SpeechAdaptationInfo.Builder,
com.google.cloud.speech.v1.SpeechAdaptationInfoOrBuilder>
getSpeechAdaptationInfoFieldBuilder() {
if (speechAdaptationInfoBuilder_ == null) {
speechAdaptationInfoBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.speech.v1.SpeechAdaptationInfo,
com.google.cloud.speech.v1.SpeechAdaptationInfo.Builder,
com.google.cloud.speech.v1.SpeechAdaptationInfoOrBuilder>(
getSpeechAdaptationInfo(), getParentForChildren(), isClean());
speechAdaptationInfo_ = null;
}
return speechAdaptationInfoBuilder_;
}
private long requestId_;
/**
*
*
*
* The ID associated with the request. This is a unique ID specific only to
* the given request.
*
*
* int64 request_id = 10;
*
* @return The requestId.
*/
@java.lang.Override
public long getRequestId() {
return requestId_;
}
/**
*
*
*
* The ID associated with the request. This is a unique ID specific only to
* the given request.
*
*
* int64 request_id = 10;
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(long value) {
requestId_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
*
*
* The ID associated with the request. This is a unique ID specific only to
* the given request.
*
*
* int64 request_id = 10;
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
bitField0_ = (bitField0_ & ~0x00000040);
requestId_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.speech.v1.StreamingRecognizeResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognizeResponse)
private static final com.google.cloud.speech.v1.StreamingRecognizeResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.speech.v1.StreamingRecognizeResponse();
}
public static com.google.cloud.speech.v1.StreamingRecognizeResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
@java.lang.Override
public StreamingRecognizeResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognizeResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy