com.google.cloud.speech.v1.RecognitionMetadata Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of proto-google-cloud-speech-v1 Show documentation
Show all versions of proto-google-cloud-speech-v1 Show documentation
PROTO library for proto-google-cloud-speech-v1
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/speech/v1/cloud_speech.proto
// Protobuf Java Version: 3.25.2
package com.google.cloud.speech.v1;
/**
*
*
*
* Description of audio data to be recognized.
*
*
* Protobuf type {@code google.cloud.speech.v1.RecognitionMetadata}
*/
@java.lang.Deprecated
public final class RecognitionMetadata extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.speech.v1.RecognitionMetadata)
RecognitionMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use RecognitionMetadata.newBuilder() to construct.
private RecognitionMetadata(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RecognitionMetadata() {
interactionType_ = 0;
microphoneDistance_ = 0;
originalMediaType_ = 0;
recordingDeviceType_ = 0;
recordingDeviceName_ = "";
originalMimeType_ = "";
audioTopic_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RecognitionMetadata();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_RecognitionMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_RecognitionMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1.RecognitionMetadata.class,
com.google.cloud.speech.v1.RecognitionMetadata.Builder.class);
}
/**
*
*
*
* Use case categories that the audio recognition request can be described
* by.
*
*
* Protobuf enum {@code google.cloud.speech.v1.RecognitionMetadata.InteractionType}
*/
public enum InteractionType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
*
* Use case is either unknown or is something other than one of the other
* values below.
*
*
* INTERACTION_TYPE_UNSPECIFIED = 0;
*/
INTERACTION_TYPE_UNSPECIFIED(0),
/**
*
*
*
* Multiple people in a conversation or discussion. For example in a
* meeting with two or more people actively participating. Typically
* all the primary people speaking would be in the same room (if not,
* see PHONE_CALL)
*
*
* DISCUSSION = 1;
*/
DISCUSSION(1),
/**
*
*
*
* One or more persons lecturing or presenting to others, mostly
* uninterrupted.
*
*
* PRESENTATION = 2;
*/
PRESENTATION(2),
/**
*
*
*
* A phone-call or video-conference in which two or more people, who are
* not in the same room, are actively participating.
*
*
* PHONE_CALL = 3;
*/
PHONE_CALL(3),
/**
*
*
*
* A recorded message intended for another person to listen to.
*
*
* VOICEMAIL = 4;
*/
VOICEMAIL(4),
/**
*
*
*
* Professionally produced audio (eg. TV Show, Podcast).
*
*
* PROFESSIONALLY_PRODUCED = 5;
*/
PROFESSIONALLY_PRODUCED(5),
/**
*
*
*
* Transcribe spoken questions and queries into text.
*
*
* VOICE_SEARCH = 6;
*/
VOICE_SEARCH(6),
/**
*
*
*
* Transcribe voice commands, such as for controlling a device.
*
*
* VOICE_COMMAND = 7;
*/
VOICE_COMMAND(7),
/**
*
*
*
* Transcribe speech to text to create a written document, such as a
* text-message, email or report.
*
*
* DICTATION = 8;
*/
DICTATION(8),
UNRECOGNIZED(-1),
;
/**
*
*
*
* Use case is either unknown or is something other than one of the other
* values below.
*
*
* INTERACTION_TYPE_UNSPECIFIED = 0;
*/
public static final int INTERACTION_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
*
* Multiple people in a conversation or discussion. For example in a
* meeting with two or more people actively participating. Typically
* all the primary people speaking would be in the same room (if not,
* see PHONE_CALL)
*
*
* DISCUSSION = 1;
*/
public static final int DISCUSSION_VALUE = 1;
/**
*
*
*
* One or more persons lecturing or presenting to others, mostly
* uninterrupted.
*
*
* PRESENTATION = 2;
*/
public static final int PRESENTATION_VALUE = 2;
/**
*
*
*
* A phone-call or video-conference in which two or more people, who are
* not in the same room, are actively participating.
*
*
* PHONE_CALL = 3;
*/
public static final int PHONE_CALL_VALUE = 3;
/**
*
*
*
* A recorded message intended for another person to listen to.
*
*
* VOICEMAIL = 4;
*/
public static final int VOICEMAIL_VALUE = 4;
/**
*
*
*
* Professionally produced audio (eg. TV Show, Podcast).
*
*
* PROFESSIONALLY_PRODUCED = 5;
*/
public static final int PROFESSIONALLY_PRODUCED_VALUE = 5;
/**
*
*
*
* Transcribe spoken questions and queries into text.
*
*
* VOICE_SEARCH = 6;
*/
public static final int VOICE_SEARCH_VALUE = 6;
/**
*
*
*
* Transcribe voice commands, such as for controlling a device.
*
*
* VOICE_COMMAND = 7;
*/
public static final int VOICE_COMMAND_VALUE = 7;
/**
*
*
*
* Transcribe speech to text to create a written document, such as a
* text-message, email or report.
*
*
* DICTATION = 8;
*/
public static final int DICTATION_VALUE = 8;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static InteractionType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static InteractionType forNumber(int value) {
switch (value) {
case 0:
return INTERACTION_TYPE_UNSPECIFIED;
case 1:
return DISCUSSION;
case 2:
return PRESENTATION;
case 3:
return PHONE_CALL;
case 4:
return VOICEMAIL;
case 5:
return PROFESSIONALLY_PRODUCED;
case 6:
return VOICE_SEARCH;
case 7:
return VOICE_COMMAND;
case 8:
return DICTATION;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public InteractionType findValueByNumber(int number) {
return InteractionType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.speech.v1.RecognitionMetadata.getDescriptor().getEnumTypes().get(0);
}
private static final InteractionType[] VALUES = values();
public static InteractionType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private InteractionType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.speech.v1.RecognitionMetadata.InteractionType)
}
/**
*
*
*
* Enumerates the types of capture settings describing an audio file.
*
*
* Protobuf enum {@code google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance}
*/
public enum MicrophoneDistance implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
*
* Audio type is not known.
*
*
* MICROPHONE_DISTANCE_UNSPECIFIED = 0;
*/
MICROPHONE_DISTANCE_UNSPECIFIED(0),
/**
*
*
*
* The audio was captured from a closely placed microphone. Eg. phone,
* dictaphone, or handheld microphone. Generally if there speaker is within
* 1 meter of the microphone.
*
*
* NEARFIELD = 1;
*/
NEARFIELD(1),
/**
*
*
*
* The speaker if within 3 meters of the microphone.
*
*
* MIDFIELD = 2;
*/
MIDFIELD(2),
/**
*
*
*
* The speaker is more than 3 meters away from the microphone.
*
*
* FARFIELD = 3;
*/
FARFIELD(3),
UNRECOGNIZED(-1),
;
/**
*
*
*
* Audio type is not known.
*
*
* MICROPHONE_DISTANCE_UNSPECIFIED = 0;
*/
public static final int MICROPHONE_DISTANCE_UNSPECIFIED_VALUE = 0;
/**
*
*
*
* The audio was captured from a closely placed microphone. Eg. phone,
* dictaphone, or handheld microphone. Generally if there speaker is within
* 1 meter of the microphone.
*
*
* NEARFIELD = 1;
*/
public static final int NEARFIELD_VALUE = 1;
/**
*
*
*
* The speaker if within 3 meters of the microphone.
*
*
* MIDFIELD = 2;
*/
public static final int MIDFIELD_VALUE = 2;
/**
*
*
*
* The speaker is more than 3 meters away from the microphone.
*
*
* FARFIELD = 3;
*/
public static final int FARFIELD_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static MicrophoneDistance valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static MicrophoneDistance forNumber(int value) {
switch (value) {
case 0:
return MICROPHONE_DISTANCE_UNSPECIFIED;
case 1:
return NEARFIELD;
case 2:
return MIDFIELD;
case 3:
return FARFIELD;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public MicrophoneDistance findValueByNumber(int number) {
return MicrophoneDistance.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.speech.v1.RecognitionMetadata.getDescriptor().getEnumTypes().get(1);
}
private static final MicrophoneDistance[] VALUES = values();
public static MicrophoneDistance valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private MicrophoneDistance(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance)
}
/**
*
*
*
* The original media the speech was recorded on.
*
*
* Protobuf enum {@code google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType}
*/
public enum OriginalMediaType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
*
* Unknown original media type.
*
*
* ORIGINAL_MEDIA_TYPE_UNSPECIFIED = 0;
*/
ORIGINAL_MEDIA_TYPE_UNSPECIFIED(0),
/**
*
*
*
* The speech data is an audio recording.
*
*
* AUDIO = 1;
*/
AUDIO(1),
/**
*
*
*
* The speech data originally recorded on a video.
*
*
* VIDEO = 2;
*/
VIDEO(2),
UNRECOGNIZED(-1),
;
/**
*
*
*
* Unknown original media type.
*
*
* ORIGINAL_MEDIA_TYPE_UNSPECIFIED = 0;
*/
public static final int ORIGINAL_MEDIA_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
*
* The speech data is an audio recording.
*
*
* AUDIO = 1;
*/
public static final int AUDIO_VALUE = 1;
/**
*
*
*
* The speech data originally recorded on a video.
*
*
* VIDEO = 2;
*/
public static final int VIDEO_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static OriginalMediaType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static OriginalMediaType forNumber(int value) {
switch (value) {
case 0:
return ORIGINAL_MEDIA_TYPE_UNSPECIFIED;
case 1:
return AUDIO;
case 2:
return VIDEO;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public OriginalMediaType findValueByNumber(int number) {
return OriginalMediaType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.speech.v1.RecognitionMetadata.getDescriptor().getEnumTypes().get(2);
}
private static final OriginalMediaType[] VALUES = values();
public static OriginalMediaType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private OriginalMediaType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType)
}
/**
*
*
*
* The type of device the speech was recorded with.
*
*
* Protobuf enum {@code google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType}
*/
public enum RecordingDeviceType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
*
* The recording device is unknown.
*
*
* RECORDING_DEVICE_TYPE_UNSPECIFIED = 0;
*/
RECORDING_DEVICE_TYPE_UNSPECIFIED(0),
/**
*
*
*
* Speech was recorded on a smartphone.
*
*
* SMARTPHONE = 1;
*/
SMARTPHONE(1),
/**
*
*
*
* Speech was recorded using a personal computer or tablet.
*
*
* PC = 2;
*/
PC(2),
/**
*
*
*
* Speech was recorded over a phone line.
*
*
* PHONE_LINE = 3;
*/
PHONE_LINE(3),
/**
*
*
*
* Speech was recorded in a vehicle.
*
*
* VEHICLE = 4;
*/
VEHICLE(4),
/**
*
*
*
* Speech was recorded outdoors.
*
*
* OTHER_OUTDOOR_DEVICE = 5;
*/
OTHER_OUTDOOR_DEVICE(5),
/**
*
*
*
* Speech was recorded indoors.
*
*
* OTHER_INDOOR_DEVICE = 6;
*/
OTHER_INDOOR_DEVICE(6),
UNRECOGNIZED(-1),
;
/**
*
*
*
* The recording device is unknown.
*
*
* RECORDING_DEVICE_TYPE_UNSPECIFIED = 0;
*/
public static final int RECORDING_DEVICE_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
*
* Speech was recorded on a smartphone.
*
*
* SMARTPHONE = 1;
*/
public static final int SMARTPHONE_VALUE = 1;
/**
*
*
*
* Speech was recorded using a personal computer or tablet.
*
*
* PC = 2;
*/
public static final int PC_VALUE = 2;
/**
*
*
*
* Speech was recorded over a phone line.
*
*
* PHONE_LINE = 3;
*/
public static final int PHONE_LINE_VALUE = 3;
/**
*
*
*
* Speech was recorded in a vehicle.
*
*
* VEHICLE = 4;
*/
public static final int VEHICLE_VALUE = 4;
/**
*
*
*
* Speech was recorded outdoors.
*
*
* OTHER_OUTDOOR_DEVICE = 5;
*/
public static final int OTHER_OUTDOOR_DEVICE_VALUE = 5;
/**
*
*
*
* Speech was recorded indoors.
*
*
* OTHER_INDOOR_DEVICE = 6;
*/
public static final int OTHER_INDOOR_DEVICE_VALUE = 6;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static RecordingDeviceType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static RecordingDeviceType forNumber(int value) {
switch (value) {
case 0:
return RECORDING_DEVICE_TYPE_UNSPECIFIED;
case 1:
return SMARTPHONE;
case 2:
return PC;
case 3:
return PHONE_LINE;
case 4:
return VEHICLE;
case 5:
return OTHER_OUTDOOR_DEVICE;
case 6:
return OTHER_INDOOR_DEVICE;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public RecordingDeviceType findValueByNumber(int number) {
return RecordingDeviceType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.speech.v1.RecognitionMetadata.getDescriptor().getEnumTypes().get(3);
}
private static final RecordingDeviceType[] VALUES = values();
public static RecordingDeviceType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private RecordingDeviceType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType)
}
public static final int INTERACTION_TYPE_FIELD_NUMBER = 1;
private int interactionType_ = 0;
/**
*
*
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1;
*
* @return The enum numeric value on the wire for interactionType.
*/
@java.lang.Override
public int getInteractionTypeValue() {
return interactionType_;
}
/**
*
*
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1;
*
* @return The interactionType.
*/
@java.lang.Override
public com.google.cloud.speech.v1.RecognitionMetadata.InteractionType getInteractionType() {
com.google.cloud.speech.v1.RecognitionMetadata.InteractionType result =
com.google.cloud.speech.v1.RecognitionMetadata.InteractionType.forNumber(interactionType_);
return result == null
? com.google.cloud.speech.v1.RecognitionMetadata.InteractionType.UNRECOGNIZED
: result;
}
public static final int INDUSTRY_NAICS_CODE_OF_AUDIO_FIELD_NUMBER = 3;
private int industryNaicsCodeOfAudio_ = 0;
/**
*
*
*
* The industry vertical to which this speech recognition request most
* closely applies. This is most indicative of the topics contained
* in the audio. Use the 6-digit NAICS code to identify the industry
* vertical - see https://www.naics.com/search/.
*
*
* uint32 industry_naics_code_of_audio = 3;
*
* @return The industryNaicsCodeOfAudio.
*/
@java.lang.Override
public int getIndustryNaicsCodeOfAudio() {
return industryNaicsCodeOfAudio_;
}
public static final int MICROPHONE_DISTANCE_FIELD_NUMBER = 4;
private int microphoneDistance_ = 0;
/**
*
*
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*
*
* @return The enum numeric value on the wire for microphoneDistance.
*/
@java.lang.Override
public int getMicrophoneDistanceValue() {
return microphoneDistance_;
}
/**
*
*
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*
*
* @return The microphoneDistance.
*/
@java.lang.Override
public com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance getMicrophoneDistance() {
com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance result =
com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance.forNumber(
microphoneDistance_);
return result == null
? com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance.UNRECOGNIZED
: result;
}
public static final int ORIGINAL_MEDIA_TYPE_FIELD_NUMBER = 5;
private int originalMediaType_ = 0;
/**
*
*
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*
*
* @return The enum numeric value on the wire for originalMediaType.
*/
@java.lang.Override
public int getOriginalMediaTypeValue() {
return originalMediaType_;
}
/**
*
*
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*
*
* @return The originalMediaType.
*/
@java.lang.Override
public com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType getOriginalMediaType() {
com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType result =
com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType.forNumber(
originalMediaType_);
return result == null
? com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType.UNRECOGNIZED
: result;
}
public static final int RECORDING_DEVICE_TYPE_FIELD_NUMBER = 6;
private int recordingDeviceType_ = 0;
/**
*
*
*
* The type of device the speech was recorded with.
*
*
*
* .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*
*
* @return The enum numeric value on the wire for recordingDeviceType.
*/
@java.lang.Override
public int getRecordingDeviceTypeValue() {
return recordingDeviceType_;
}
/**
*
*
*
* The type of device the speech was recorded with.
*
*
*
* .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*
*
* @return The recordingDeviceType.
*/
@java.lang.Override
public com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType
getRecordingDeviceType() {
com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType result =
com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType.forNumber(
recordingDeviceType_);
return result == null
? com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType.UNRECOGNIZED
: result;
}
public static final int RECORDING_DEVICE_NAME_FIELD_NUMBER = 7;
@SuppressWarnings("serial")
private volatile java.lang.Object recordingDeviceName_ = "";
/**
*
*
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*
* @return The recordingDeviceName.
*/
@java.lang.Override
public java.lang.String getRecordingDeviceName() {
java.lang.Object ref = recordingDeviceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
recordingDeviceName_ = s;
return s;
}
}
/**
*
*
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*
* @return The bytes for recordingDeviceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRecordingDeviceNameBytes() {
java.lang.Object ref = recordingDeviceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
recordingDeviceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORIGINAL_MIME_TYPE_FIELD_NUMBER = 8;
@SuppressWarnings("serial")
private volatile java.lang.Object originalMimeType_ = "";
/**
*
*
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*
* @return The originalMimeType.
*/
@java.lang.Override
public java.lang.String getOriginalMimeType() {
java.lang.Object ref = originalMimeType_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
originalMimeType_ = s;
return s;
}
}
/**
*
*
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*
* @return The bytes for originalMimeType.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOriginalMimeTypeBytes() {
java.lang.Object ref = originalMimeType_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
originalMimeType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int AUDIO_TOPIC_FIELD_NUMBER = 10;
@SuppressWarnings("serial")
private volatile java.lang.Object audioTopic_ = "";
/**
*
*
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*
* @return The audioTopic.
*/
@java.lang.Override
public java.lang.String getAudioTopic() {
java.lang.Object ref = audioTopic_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
audioTopic_ = s;
return s;
}
}
/**
*
*
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*
* @return The bytes for audioTopic.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAudioTopicBytes() {
java.lang.Object ref = audioTopic_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
audioTopic_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (interactionType_
!= com.google.cloud.speech.v1.RecognitionMetadata.InteractionType
.INTERACTION_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, interactionType_);
}
if (industryNaicsCodeOfAudio_ != 0) {
output.writeUInt32(3, industryNaicsCodeOfAudio_);
}
if (microphoneDistance_
!= com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance
.MICROPHONE_DISTANCE_UNSPECIFIED
.getNumber()) {
output.writeEnum(4, microphoneDistance_);
}
if (originalMediaType_
!= com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType
.ORIGINAL_MEDIA_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(5, originalMediaType_);
}
if (recordingDeviceType_
!= com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType
.RECORDING_DEVICE_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(6, recordingDeviceType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(recordingDeviceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 7, recordingDeviceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(originalMimeType_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 8, originalMimeType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(audioTopic_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 10, audioTopic_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (interactionType_
!= com.google.cloud.speech.v1.RecognitionMetadata.InteractionType
.INTERACTION_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, interactionType_);
}
if (industryNaicsCodeOfAudio_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeUInt32Size(3, industryNaicsCodeOfAudio_);
}
if (microphoneDistance_
!= com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance
.MICROPHONE_DISTANCE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, microphoneDistance_);
}
if (originalMediaType_
!= com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType
.ORIGINAL_MEDIA_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(5, originalMediaType_);
}
if (recordingDeviceType_
!= com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType
.RECORDING_DEVICE_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(6, recordingDeviceType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(recordingDeviceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, recordingDeviceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(originalMimeType_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, originalMimeType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(audioTopic_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, audioTopic_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.speech.v1.RecognitionMetadata)) {
return super.equals(obj);
}
com.google.cloud.speech.v1.RecognitionMetadata other =
(com.google.cloud.speech.v1.RecognitionMetadata) obj;
if (interactionType_ != other.interactionType_) return false;
if (getIndustryNaicsCodeOfAudio() != other.getIndustryNaicsCodeOfAudio()) return false;
if (microphoneDistance_ != other.microphoneDistance_) return false;
if (originalMediaType_ != other.originalMediaType_) return false;
if (recordingDeviceType_ != other.recordingDeviceType_) return false;
if (!getRecordingDeviceName().equals(other.getRecordingDeviceName())) return false;
if (!getOriginalMimeType().equals(other.getOriginalMimeType())) return false;
if (!getAudioTopic().equals(other.getAudioTopic())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + INTERACTION_TYPE_FIELD_NUMBER;
hash = (53 * hash) + interactionType_;
hash = (37 * hash) + INDUSTRY_NAICS_CODE_OF_AUDIO_FIELD_NUMBER;
hash = (53 * hash) + getIndustryNaicsCodeOfAudio();
hash = (37 * hash) + MICROPHONE_DISTANCE_FIELD_NUMBER;
hash = (53 * hash) + microphoneDistance_;
hash = (37 * hash) + ORIGINAL_MEDIA_TYPE_FIELD_NUMBER;
hash = (53 * hash) + originalMediaType_;
hash = (37 * hash) + RECORDING_DEVICE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + recordingDeviceType_;
hash = (37 * hash) + RECORDING_DEVICE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getRecordingDeviceName().hashCode();
hash = (37 * hash) + ORIGINAL_MIME_TYPE_FIELD_NUMBER;
hash = (53 * hash) + getOriginalMimeType().hashCode();
hash = (37 * hash) + AUDIO_TOPIC_FIELD_NUMBER;
hash = (53 * hash) + getAudioTopic().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1.RecognitionMetadata parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1.RecognitionMetadata parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1.RecognitionMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.speech.v1.RecognitionMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
*
* Description of audio data to be recognized.
*
*
* Protobuf type {@code google.cloud.speech.v1.RecognitionMetadata}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder
implements
// @@protoc_insertion_point(builder_implements:google.cloud.speech.v1.RecognitionMetadata)
com.google.cloud.speech.v1.RecognitionMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_RecognitionMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_RecognitionMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1.RecognitionMetadata.class,
com.google.cloud.speech.v1.RecognitionMetadata.Builder.class);
}
// Construct using com.google.cloud.speech.v1.RecognitionMetadata.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
interactionType_ = 0;
industryNaicsCodeOfAudio_ = 0;
microphoneDistance_ = 0;
originalMediaType_ = 0;
recordingDeviceType_ = 0;
recordingDeviceName_ = "";
originalMimeType_ = "";
audioTopic_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_RecognitionMetadata_descriptor;
}
@java.lang.Override
public com.google.cloud.speech.v1.RecognitionMetadata getDefaultInstanceForType() {
return com.google.cloud.speech.v1.RecognitionMetadata.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.speech.v1.RecognitionMetadata build() {
com.google.cloud.speech.v1.RecognitionMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.speech.v1.RecognitionMetadata buildPartial() {
com.google.cloud.speech.v1.RecognitionMetadata result =
new com.google.cloud.speech.v1.RecognitionMetadata(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.speech.v1.RecognitionMetadata result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.interactionType_ = interactionType_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.industryNaicsCodeOfAudio_ = industryNaicsCodeOfAudio_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.microphoneDistance_ = microphoneDistance_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.originalMediaType_ = originalMediaType_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.recordingDeviceType_ = recordingDeviceType_;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.recordingDeviceName_ = recordingDeviceName_;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.originalMimeType_ = originalMimeType_;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.audioTopic_ = audioTopic_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.speech.v1.RecognitionMetadata) {
return mergeFrom((com.google.cloud.speech.v1.RecognitionMetadata) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.speech.v1.RecognitionMetadata other) {
if (other == com.google.cloud.speech.v1.RecognitionMetadata.getDefaultInstance()) return this;
if (other.interactionType_ != 0) {
setInteractionTypeValue(other.getInteractionTypeValue());
}
if (other.getIndustryNaicsCodeOfAudio() != 0) {
setIndustryNaicsCodeOfAudio(other.getIndustryNaicsCodeOfAudio());
}
if (other.microphoneDistance_ != 0) {
setMicrophoneDistanceValue(other.getMicrophoneDistanceValue());
}
if (other.originalMediaType_ != 0) {
setOriginalMediaTypeValue(other.getOriginalMediaTypeValue());
}
if (other.recordingDeviceType_ != 0) {
setRecordingDeviceTypeValue(other.getRecordingDeviceTypeValue());
}
if (!other.getRecordingDeviceName().isEmpty()) {
recordingDeviceName_ = other.recordingDeviceName_;
bitField0_ |= 0x00000020;
onChanged();
}
if (!other.getOriginalMimeType().isEmpty()) {
originalMimeType_ = other.originalMimeType_;
bitField0_ |= 0x00000040;
onChanged();
}
if (!other.getAudioTopic().isEmpty()) {
audioTopic_ = other.audioTopic_;
bitField0_ |= 0x00000080;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
interactionType_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 24:
{
industryNaicsCodeOfAudio_ = input.readUInt32();
bitField0_ |= 0x00000002;
break;
} // case 24
case 32:
{
microphoneDistance_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 32
case 40:
{
originalMediaType_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 40
case 48:
{
recordingDeviceType_ = input.readEnum();
bitField0_ |= 0x00000010;
break;
} // case 48
case 58:
{
recordingDeviceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000020;
break;
} // case 58
case 66:
{
originalMimeType_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000040;
break;
} // case 66
case 82:
{
audioTopic_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000080;
break;
} // case 82
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int interactionType_ = 0;
/**
*
*
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1;
*
*
* @return The enum numeric value on the wire for interactionType.
*/
@java.lang.Override
public int getInteractionTypeValue() {
return interactionType_;
}
/**
*
*
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1;
*
*
* @param value The enum numeric value on the wire for interactionType to set.
* @return This builder for chaining.
*/
public Builder setInteractionTypeValue(int value) {
interactionType_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1;
*
*
* @return The interactionType.
*/
@java.lang.Override
public com.google.cloud.speech.v1.RecognitionMetadata.InteractionType getInteractionType() {
com.google.cloud.speech.v1.RecognitionMetadata.InteractionType result =
com.google.cloud.speech.v1.RecognitionMetadata.InteractionType.forNumber(
interactionType_);
return result == null
? com.google.cloud.speech.v1.RecognitionMetadata.InteractionType.UNRECOGNIZED
: result;
}
/**
*
*
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1;
*
*
* @param value The interactionType to set.
* @return This builder for chaining.
*/
public Builder setInteractionType(
com.google.cloud.speech.v1.RecognitionMetadata.InteractionType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
interactionType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.InteractionType interaction_type = 1;
*
*
* @return This builder for chaining.
*/
public Builder clearInteractionType() {
bitField0_ = (bitField0_ & ~0x00000001);
interactionType_ = 0;
onChanged();
return this;
}
private int industryNaicsCodeOfAudio_;
/**
*
*
*
* The industry vertical to which this speech recognition request most
* closely applies. This is most indicative of the topics contained
* in the audio. Use the 6-digit NAICS code to identify the industry
* vertical - see https://www.naics.com/search/.
*
*
* uint32 industry_naics_code_of_audio = 3;
*
* @return The industryNaicsCodeOfAudio.
*/
@java.lang.Override
public int getIndustryNaicsCodeOfAudio() {
return industryNaicsCodeOfAudio_;
}
/**
*
*
*
* The industry vertical to which this speech recognition request most
* closely applies. This is most indicative of the topics contained
* in the audio. Use the 6-digit NAICS code to identify the industry
* vertical - see https://www.naics.com/search/.
*
*
* uint32 industry_naics_code_of_audio = 3;
*
* @param value The industryNaicsCodeOfAudio to set.
* @return This builder for chaining.
*/
public Builder setIndustryNaicsCodeOfAudio(int value) {
industryNaicsCodeOfAudio_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
*
* The industry vertical to which this speech recognition request most
* closely applies. This is most indicative of the topics contained
* in the audio. Use the 6-digit NAICS code to identify the industry
* vertical - see https://www.naics.com/search/.
*
*
* uint32 industry_naics_code_of_audio = 3;
*
* @return This builder for chaining.
*/
public Builder clearIndustryNaicsCodeOfAudio() {
bitField0_ = (bitField0_ & ~0x00000002);
industryNaicsCodeOfAudio_ = 0;
onChanged();
return this;
}
private int microphoneDistance_ = 0;
/**
*
*
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*
*
* @return The enum numeric value on the wire for microphoneDistance.
*/
@java.lang.Override
public int getMicrophoneDistanceValue() {
return microphoneDistance_;
}
/**
*
*
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*
*
* @param value The enum numeric value on the wire for microphoneDistance to set.
* @return This builder for chaining.
*/
public Builder setMicrophoneDistanceValue(int value) {
microphoneDistance_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*
*
* @return The microphoneDistance.
*/
@java.lang.Override
public com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance
getMicrophoneDistance() {
com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance result =
com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance.forNumber(
microphoneDistance_);
return result == null
? com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance.UNRECOGNIZED
: result;
}
/**
*
*
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*
*
* @param value The microphoneDistance to set.
* @return This builder for chaining.
*/
public Builder setMicrophoneDistance(
com.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
microphoneDistance_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*
*
* @return This builder for chaining.
*/
public Builder clearMicrophoneDistance() {
bitField0_ = (bitField0_ & ~0x00000004);
microphoneDistance_ = 0;
onChanged();
return this;
}
private int originalMediaType_ = 0;
/**
*
*
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*
*
* @return The enum numeric value on the wire for originalMediaType.
*/
@java.lang.Override
public int getOriginalMediaTypeValue() {
return originalMediaType_;
}
/**
*
*
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*
*
* @param value The enum numeric value on the wire for originalMediaType to set.
* @return This builder for chaining.
*/
public Builder setOriginalMediaTypeValue(int value) {
originalMediaType_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*
*
* @return The originalMediaType.
*/
@java.lang.Override
public com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType getOriginalMediaType() {
com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType result =
com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType.forNumber(
originalMediaType_);
return result == null
? com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType.UNRECOGNIZED
: result;
}
/**
*
*
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*
*
* @param value The originalMediaType to set.
* @return This builder for chaining.
*/
public Builder setOriginalMediaType(
com.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
originalMediaType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*
*
* @return This builder for chaining.
*/
public Builder clearOriginalMediaType() {
bitField0_ = (bitField0_ & ~0x00000008);
originalMediaType_ = 0;
onChanged();
return this;
}
private int recordingDeviceType_ = 0;
/**
*
*
*
* The type of device the speech was recorded with.
*
*
*
* .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*
*
* @return The enum numeric value on the wire for recordingDeviceType.
*/
@java.lang.Override
public int getRecordingDeviceTypeValue() {
return recordingDeviceType_;
}
/**
*
*
*
* The type of device the speech was recorded with.
*
*
*
* .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*
*
* @param value The enum numeric value on the wire for recordingDeviceType to set.
* @return This builder for chaining.
*/
public Builder setRecordingDeviceTypeValue(int value) {
recordingDeviceType_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
*
* The type of device the speech was recorded with.
*
*
*
* .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*
*
* @return The recordingDeviceType.
*/
@java.lang.Override
public com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType
getRecordingDeviceType() {
com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType result =
com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType.forNumber(
recordingDeviceType_);
return result == null
? com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType.UNRECOGNIZED
: result;
}
/**
*
*
*
* The type of device the speech was recorded with.
*
*
*
* .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*
*
* @param value The recordingDeviceType to set.
* @return This builder for chaining.
*/
public Builder setRecordingDeviceType(
com.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
recordingDeviceType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
*
* The type of device the speech was recorded with.
*
*
*
* .google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*
*
* @return This builder for chaining.
*/
public Builder clearRecordingDeviceType() {
bitField0_ = (bitField0_ & ~0x00000010);
recordingDeviceType_ = 0;
onChanged();
return this;
}
private java.lang.Object recordingDeviceName_ = "";
/**
*
*
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*
* @return The recordingDeviceName.
*/
public java.lang.String getRecordingDeviceName() {
java.lang.Object ref = recordingDeviceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
recordingDeviceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*
* @return The bytes for recordingDeviceName.
*/
public com.google.protobuf.ByteString getRecordingDeviceNameBytes() {
java.lang.Object ref = recordingDeviceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
recordingDeviceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*
* @param value The recordingDeviceName to set.
* @return This builder for chaining.
*/
public Builder setRecordingDeviceName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
recordingDeviceName_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
*
*
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*
* @return This builder for chaining.
*/
public Builder clearRecordingDeviceName() {
recordingDeviceName_ = getDefaultInstance().getRecordingDeviceName();
bitField0_ = (bitField0_ & ~0x00000020);
onChanged();
return this;
}
/**
*
*
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*
* @param value The bytes for recordingDeviceName to set.
* @return This builder for chaining.
*/
public Builder setRecordingDeviceNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
recordingDeviceName_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
private java.lang.Object originalMimeType_ = "";
/**
*
*
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*
* @return The originalMimeType.
*/
public java.lang.String getOriginalMimeType() {
java.lang.Object ref = originalMimeType_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
originalMimeType_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*
* @return The bytes for originalMimeType.
*/
public com.google.protobuf.ByteString getOriginalMimeTypeBytes() {
java.lang.Object ref = originalMimeType_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
originalMimeType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*
* @param value The originalMimeType to set.
* @return This builder for chaining.
*/
public Builder setOriginalMimeType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
originalMimeType_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
*
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*
* @return This builder for chaining.
*/
public Builder clearOriginalMimeType() {
originalMimeType_ = getDefaultInstance().getOriginalMimeType();
bitField0_ = (bitField0_ & ~0x00000040);
onChanged();
return this;
}
/**
*
*
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*
* @param value The bytes for originalMimeType to set.
* @return This builder for chaining.
*/
public Builder setOriginalMimeTypeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
originalMimeType_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
private java.lang.Object audioTopic_ = "";
/**
*
*
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*
* @return The audioTopic.
*/
public java.lang.String getAudioTopic() {
java.lang.Object ref = audioTopic_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
audioTopic_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*
* @return The bytes for audioTopic.
*/
public com.google.protobuf.ByteString getAudioTopicBytes() {
java.lang.Object ref = audioTopic_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
audioTopic_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*
* @param value The audioTopic to set.
* @return This builder for chaining.
*/
public Builder setAudioTopic(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
audioTopic_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
*
*
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*
* @return This builder for chaining.
*/
public Builder clearAudioTopic() {
audioTopic_ = getDefaultInstance().getAudioTopic();
bitField0_ = (bitField0_ & ~0x00000080);
onChanged();
return this;
}
/**
*
*
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*
* @param value The bytes for audioTopic to set.
* @return This builder for chaining.
*/
public Builder setAudioTopicBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
audioTopic_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.speech.v1.RecognitionMetadata)
}
// @@protoc_insertion_point(class_scope:google.cloud.speech.v1.RecognitionMetadata)
private static final com.google.cloud.speech.v1.RecognitionMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.speech.v1.RecognitionMetadata();
}
public static com.google.cloud.speech.v1.RecognitionMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
@java.lang.Override
public RecognitionMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.speech.v1.RecognitionMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}