com.google.cloud.speech.v1p1beta1.RecognitionMetadata Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of proto-google-cloud-speech-v1p1beta1 Show documentation
Show all versions of proto-google-cloud-speech-v1p1beta1 Show documentation
PROTO library for proto-google-cloud-speech-v1p1beta1
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/speech/v1p1beta1/cloud_speech.proto
package com.google.cloud.speech.v1p1beta1;
/**
*
* Description of audio data to be recognized.
*
*
* Protobuf type {@code google.cloud.speech.v1p1beta1.RecognitionMetadata}
*/
public final class RecognitionMetadata extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.cloud.speech.v1p1beta1.RecognitionMetadata)
RecognitionMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use RecognitionMetadata.newBuilder() to construct.
private RecognitionMetadata(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private RecognitionMetadata() {
interactionType_ = 0;
industryNaicsCodeOfAudio_ = 0;
microphoneDistance_ = 0;
originalMediaType_ = 0;
recordingDeviceType_ = 0;
recordingDeviceName_ = "";
originalMimeType_ = "";
obfuscatedId_ = 0L;
audioTopic_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RecognitionMetadata(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
interactionType_ = rawValue;
break;
}
case 24: {
industryNaicsCodeOfAudio_ = input.readUInt32();
break;
}
case 32: {
int rawValue = input.readEnum();
microphoneDistance_ = rawValue;
break;
}
case 40: {
int rawValue = input.readEnum();
originalMediaType_ = rawValue;
break;
}
case 48: {
int rawValue = input.readEnum();
recordingDeviceType_ = rawValue;
break;
}
case 58: {
java.lang.String s = input.readStringRequireUtf8();
recordingDeviceName_ = s;
break;
}
case 66: {
java.lang.String s = input.readStringRequireUtf8();
originalMimeType_ = s;
break;
}
case 72: {
obfuscatedId_ = input.readInt64();
break;
}
case 82: {
java.lang.String s = input.readStringRequireUtf8();
audioTopic_ = s;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.speech.v1p1beta1.SpeechProto.internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1p1beta1.SpeechProto.internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1p1beta1.RecognitionMetadata.class, com.google.cloud.speech.v1p1beta1.RecognitionMetadata.Builder.class);
}
/**
*
* Use case categories that the audio recognition request can be described
* by.
*
*
* Protobuf enum {@code google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType}
*/
public enum InteractionType
implements com.google.protobuf.ProtocolMessageEnum {
/**
*
* Use case is either unknown or is something other than one of the other
* values below.
*
*
* INTERACTION_TYPE_UNSPECIFIED = 0;
*/
INTERACTION_TYPE_UNSPECIFIED(0),
/**
*
* Multiple people in a conversation or discussion. For example in a
* meeting with two or more people actively participating. Typically
* all the primary people speaking would be in the same room (if not,
* see PHONE_CALL)
*
*
* DISCUSSION = 1;
*/
DISCUSSION(1),
/**
*
* One or more persons lecturing or presenting to others, mostly
* uninterrupted.
*
*
* PRESENTATION = 2;
*/
PRESENTATION(2),
/**
*
* A phone-call or video-conference in which two or more people, who are
* not in the same room, are actively participating.
*
*
* PHONE_CALL = 3;
*/
PHONE_CALL(3),
/**
*
* A recorded message intended for another person to listen to.
*
*
* VOICEMAIL = 4;
*/
VOICEMAIL(4),
/**
*
* Professionally produced audio (eg. TV Show, Podcast).
*
*
* PROFESSIONALLY_PRODUCED = 5;
*/
PROFESSIONALLY_PRODUCED(5),
/**
*
* Transcribe spoken questions and queries into text.
*
*
* VOICE_SEARCH = 6;
*/
VOICE_SEARCH(6),
/**
*
* Transcribe voice commands, such as for controlling a device.
*
*
* VOICE_COMMAND = 7;
*/
VOICE_COMMAND(7),
/**
*
* Transcribe speech to text to create a written document, such as a
* text-message, email or report.
*
*
* DICTATION = 8;
*/
DICTATION(8),
UNRECOGNIZED(-1),
;
/**
*
* Use case is either unknown or is something other than one of the other
* values below.
*
*
* INTERACTION_TYPE_UNSPECIFIED = 0;
*/
public static final int INTERACTION_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
* Multiple people in a conversation or discussion. For example in a
* meeting with two or more people actively participating. Typically
* all the primary people speaking would be in the same room (if not,
* see PHONE_CALL)
*
*
* DISCUSSION = 1;
*/
public static final int DISCUSSION_VALUE = 1;
/**
*
* One or more persons lecturing or presenting to others, mostly
* uninterrupted.
*
*
* PRESENTATION = 2;
*/
public static final int PRESENTATION_VALUE = 2;
/**
*
* A phone-call or video-conference in which two or more people, who are
* not in the same room, are actively participating.
*
*
* PHONE_CALL = 3;
*/
public static final int PHONE_CALL_VALUE = 3;
/**
*
* A recorded message intended for another person to listen to.
*
*
* VOICEMAIL = 4;
*/
public static final int VOICEMAIL_VALUE = 4;
/**
*
* Professionally produced audio (eg. TV Show, Podcast).
*
*
* PROFESSIONALLY_PRODUCED = 5;
*/
public static final int PROFESSIONALLY_PRODUCED_VALUE = 5;
/**
*
* Transcribe spoken questions and queries into text.
*
*
* VOICE_SEARCH = 6;
*/
public static final int VOICE_SEARCH_VALUE = 6;
/**
*
* Transcribe voice commands, such as for controlling a device.
*
*
* VOICE_COMMAND = 7;
*/
public static final int VOICE_COMMAND_VALUE = 7;
/**
*
* Transcribe speech to text to create a written document, such as a
* text-message, email or report.
*
*
* DICTATION = 8;
*/
public static final int DICTATION_VALUE = 8;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static InteractionType valueOf(int value) {
return forNumber(value);
}
public static InteractionType forNumber(int value) {
switch (value) {
case 0: return INTERACTION_TYPE_UNSPECIFIED;
case 1: return DISCUSSION;
case 2: return PRESENTATION;
case 3: return PHONE_CALL;
case 4: return VOICEMAIL;
case 5: return PROFESSIONALLY_PRODUCED;
case 6: return VOICE_SEARCH;
case 7: return VOICE_COMMAND;
case 8: return DICTATION;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
InteractionType> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public InteractionType findValueByNumber(int number) {
return InteractionType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.cloud.speech.v1p1beta1.RecognitionMetadata.getDescriptor().getEnumTypes().get(0);
}
private static final InteractionType[] VALUES = values();
public static InteractionType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private InteractionType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType)
}
/**
*
* Enumerates the types of capture settings describing an audio file.
*
*
* Protobuf enum {@code google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance}
*/
public enum MicrophoneDistance
implements com.google.protobuf.ProtocolMessageEnum {
/**
*
* Audio type is not known.
*
*
* MICROPHONE_DISTANCE_UNSPECIFIED = 0;
*/
MICROPHONE_DISTANCE_UNSPECIFIED(0),
/**
*
* The audio was captured from a closely placed microphone. Eg. phone,
* dictaphone, or handheld microphone. Generally if there speaker is within
* 1 meter of the microphone.
*
*
* NEARFIELD = 1;
*/
NEARFIELD(1),
/**
*
* The speaker if within 3 meters of the microphone.
*
*
* MIDFIELD = 2;
*/
MIDFIELD(2),
/**
*
* The speaker is more than 3 meters away from the microphone.
*
*
* FARFIELD = 3;
*/
FARFIELD(3),
UNRECOGNIZED(-1),
;
/**
*
* Audio type is not known.
*
*
* MICROPHONE_DISTANCE_UNSPECIFIED = 0;
*/
public static final int MICROPHONE_DISTANCE_UNSPECIFIED_VALUE = 0;
/**
*
* The audio was captured from a closely placed microphone. Eg. phone,
* dictaphone, or handheld microphone. Generally if there speaker is within
* 1 meter of the microphone.
*
*
* NEARFIELD = 1;
*/
public static final int NEARFIELD_VALUE = 1;
/**
*
* The speaker if within 3 meters of the microphone.
*
*
* MIDFIELD = 2;
*/
public static final int MIDFIELD_VALUE = 2;
/**
*
* The speaker is more than 3 meters away from the microphone.
*
*
* FARFIELD = 3;
*/
public static final int FARFIELD_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static MicrophoneDistance valueOf(int value) {
return forNumber(value);
}
public static MicrophoneDistance forNumber(int value) {
switch (value) {
case 0: return MICROPHONE_DISTANCE_UNSPECIFIED;
case 1: return NEARFIELD;
case 2: return MIDFIELD;
case 3: return FARFIELD;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
MicrophoneDistance> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public MicrophoneDistance findValueByNumber(int number) {
return MicrophoneDistance.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.cloud.speech.v1p1beta1.RecognitionMetadata.getDescriptor().getEnumTypes().get(1);
}
private static final MicrophoneDistance[] VALUES = values();
public static MicrophoneDistance valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private MicrophoneDistance(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance)
}
/**
*
* The original media the speech was recorded on.
*
*
* Protobuf enum {@code google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType}
*/
public enum OriginalMediaType
implements com.google.protobuf.ProtocolMessageEnum {
/**
*
* Unknown original media type.
*
*
* ORIGINAL_MEDIA_TYPE_UNSPECIFIED = 0;
*/
ORIGINAL_MEDIA_TYPE_UNSPECIFIED(0),
/**
*
* The speech data is an audio recording.
*
*
* AUDIO = 1;
*/
AUDIO(1),
/**
*
* The speech data originally recorded on a video.
*
*
* VIDEO = 2;
*/
VIDEO(2),
UNRECOGNIZED(-1),
;
/**
*
* Unknown original media type.
*
*
* ORIGINAL_MEDIA_TYPE_UNSPECIFIED = 0;
*/
public static final int ORIGINAL_MEDIA_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
* The speech data is an audio recording.
*
*
* AUDIO = 1;
*/
public static final int AUDIO_VALUE = 1;
/**
*
* The speech data originally recorded on a video.
*
*
* VIDEO = 2;
*/
public static final int VIDEO_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static OriginalMediaType valueOf(int value) {
return forNumber(value);
}
public static OriginalMediaType forNumber(int value) {
switch (value) {
case 0: return ORIGINAL_MEDIA_TYPE_UNSPECIFIED;
case 1: return AUDIO;
case 2: return VIDEO;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
OriginalMediaType> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public OriginalMediaType findValueByNumber(int number) {
return OriginalMediaType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.cloud.speech.v1p1beta1.RecognitionMetadata.getDescriptor().getEnumTypes().get(2);
}
private static final OriginalMediaType[] VALUES = values();
public static OriginalMediaType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private OriginalMediaType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType)
}
/**
*
* The type of device the speech was recorded with.
*
*
* Protobuf enum {@code google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType}
*/
public enum RecordingDeviceType
implements com.google.protobuf.ProtocolMessageEnum {
/**
*
* The recording device is unknown.
*
*
* RECORDING_DEVICE_TYPE_UNSPECIFIED = 0;
*/
RECORDING_DEVICE_TYPE_UNSPECIFIED(0),
/**
*
* Speech was recorded on a smartphone.
*
*
* SMARTPHONE = 1;
*/
SMARTPHONE(1),
/**
*
* Speech was recorded using a personal computer or tablet.
*
*
* PC = 2;
*/
PC(2),
/**
*
* Speech was recorded over a phone line.
*
*
* PHONE_LINE = 3;
*/
PHONE_LINE(3),
/**
*
* Speech was recorded in a vehicle.
*
*
* VEHICLE = 4;
*/
VEHICLE(4),
/**
*
* Speech was recorded outdoors.
*
*
* OTHER_OUTDOOR_DEVICE = 5;
*/
OTHER_OUTDOOR_DEVICE(5),
/**
*
* Speech was recorded indoors.
*
*
* OTHER_INDOOR_DEVICE = 6;
*/
OTHER_INDOOR_DEVICE(6),
UNRECOGNIZED(-1),
;
/**
*
* The recording device is unknown.
*
*
* RECORDING_DEVICE_TYPE_UNSPECIFIED = 0;
*/
public static final int RECORDING_DEVICE_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
* Speech was recorded on a smartphone.
*
*
* SMARTPHONE = 1;
*/
public static final int SMARTPHONE_VALUE = 1;
/**
*
* Speech was recorded using a personal computer or tablet.
*
*
* PC = 2;
*/
public static final int PC_VALUE = 2;
/**
*
* Speech was recorded over a phone line.
*
*
* PHONE_LINE = 3;
*/
public static final int PHONE_LINE_VALUE = 3;
/**
*
* Speech was recorded in a vehicle.
*
*
* VEHICLE = 4;
*/
public static final int VEHICLE_VALUE = 4;
/**
*
* Speech was recorded outdoors.
*
*
* OTHER_OUTDOOR_DEVICE = 5;
*/
public static final int OTHER_OUTDOOR_DEVICE_VALUE = 5;
/**
*
* Speech was recorded indoors.
*
*
* OTHER_INDOOR_DEVICE = 6;
*/
public static final int OTHER_INDOOR_DEVICE_VALUE = 6;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static RecordingDeviceType valueOf(int value) {
return forNumber(value);
}
public static RecordingDeviceType forNumber(int value) {
switch (value) {
case 0: return RECORDING_DEVICE_TYPE_UNSPECIFIED;
case 1: return SMARTPHONE;
case 2: return PC;
case 3: return PHONE_LINE;
case 4: return VEHICLE;
case 5: return OTHER_OUTDOOR_DEVICE;
case 6: return OTHER_INDOOR_DEVICE;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
RecordingDeviceType> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap() {
public RecordingDeviceType findValueByNumber(int number) {
return RecordingDeviceType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.cloud.speech.v1p1beta1.RecognitionMetadata.getDescriptor().getEnumTypes().get(3);
}
private static final RecordingDeviceType[] VALUES = values();
public static RecordingDeviceType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private RecordingDeviceType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType)
}
public static final int INTERACTION_TYPE_FIELD_NUMBER = 1;
private int interactionType_;
/**
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
*/
public int getInteractionTypeValue() {
return interactionType_;
}
/**
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
*/
public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType getInteractionType() {
com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType result = com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType.valueOf(interactionType_);
return result == null ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType.UNRECOGNIZED : result;
}
public static final int INDUSTRY_NAICS_CODE_OF_AUDIO_FIELD_NUMBER = 3;
private int industryNaicsCodeOfAudio_;
/**
*
* The industry vertical to which this speech recognition request most
* closely applies. This is most indicative of the topics contained
* in the audio. Use the 6-digit NAICS code to identify the industry
* vertical - see https://www.naics.com/search/.
*
*
* uint32 industry_naics_code_of_audio = 3;
*/
public int getIndustryNaicsCodeOfAudio() {
return industryNaicsCodeOfAudio_;
}
public static final int MICROPHONE_DISTANCE_FIELD_NUMBER = 4;
private int microphoneDistance_;
/**
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*/
public int getMicrophoneDistanceValue() {
return microphoneDistance_;
}
/**
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*/
public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance getMicrophoneDistance() {
com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance result = com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance.valueOf(microphoneDistance_);
return result == null ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance.UNRECOGNIZED : result;
}
public static final int ORIGINAL_MEDIA_TYPE_FIELD_NUMBER = 5;
private int originalMediaType_;
/**
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*/
public int getOriginalMediaTypeValue() {
return originalMediaType_;
}
/**
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*/
public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType getOriginalMediaType() {
com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType result = com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType.valueOf(originalMediaType_);
return result == null ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType.UNRECOGNIZED : result;
}
public static final int RECORDING_DEVICE_TYPE_FIELD_NUMBER = 6;
private int recordingDeviceType_;
/**
*
* The type of device the speech was recorded with.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*/
public int getRecordingDeviceTypeValue() {
return recordingDeviceType_;
}
/**
*
* The type of device the speech was recorded with.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*/
public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType getRecordingDeviceType() {
com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType result = com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType.valueOf(recordingDeviceType_);
return result == null ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType.UNRECOGNIZED : result;
}
public static final int RECORDING_DEVICE_NAME_FIELD_NUMBER = 7;
private volatile java.lang.Object recordingDeviceName_;
/**
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*/
public java.lang.String getRecordingDeviceName() {
java.lang.Object ref = recordingDeviceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
recordingDeviceName_ = s;
return s;
}
}
/**
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*/
public com.google.protobuf.ByteString
getRecordingDeviceNameBytes() {
java.lang.Object ref = recordingDeviceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
recordingDeviceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORIGINAL_MIME_TYPE_FIELD_NUMBER = 8;
private volatile java.lang.Object originalMimeType_;
/**
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*/
public java.lang.String getOriginalMimeType() {
java.lang.Object ref = originalMimeType_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
originalMimeType_ = s;
return s;
}
}
/**
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*/
public com.google.protobuf.ByteString
getOriginalMimeTypeBytes() {
java.lang.Object ref = originalMimeType_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
originalMimeType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int OBFUSCATED_ID_FIELD_NUMBER = 9;
private long obfuscatedId_;
/**
*
* Obfuscated (privacy-protected) ID of the user, to identify number of
* unique users using the service.
*
*
* int64 obfuscated_id = 9;
*/
public long getObfuscatedId() {
return obfuscatedId_;
}
public static final int AUDIO_TOPIC_FIELD_NUMBER = 10;
private volatile java.lang.Object audioTopic_;
/**
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*/
public java.lang.String getAudioTopic() {
java.lang.Object ref = audioTopic_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
audioTopic_ = s;
return s;
}
}
/**
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*/
public com.google.protobuf.ByteString
getAudioTopicBytes() {
java.lang.Object ref = audioTopic_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
audioTopic_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (interactionType_ != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType.INTERACTION_TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(1, interactionType_);
}
if (industryNaicsCodeOfAudio_ != 0) {
output.writeUInt32(3, industryNaicsCodeOfAudio_);
}
if (microphoneDistance_ != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance.MICROPHONE_DISTANCE_UNSPECIFIED.getNumber()) {
output.writeEnum(4, microphoneDistance_);
}
if (originalMediaType_ != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType.ORIGINAL_MEDIA_TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(5, originalMediaType_);
}
if (recordingDeviceType_ != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType.RECORDING_DEVICE_TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(6, recordingDeviceType_);
}
if (!getRecordingDeviceNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 7, recordingDeviceName_);
}
if (!getOriginalMimeTypeBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 8, originalMimeType_);
}
if (obfuscatedId_ != 0L) {
output.writeInt64(9, obfuscatedId_);
}
if (!getAudioTopicBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 10, audioTopic_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (interactionType_ != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType.INTERACTION_TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, interactionType_);
}
if (industryNaicsCodeOfAudio_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(3, industryNaicsCodeOfAudio_);
}
if (microphoneDistance_ != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance.MICROPHONE_DISTANCE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(4, microphoneDistance_);
}
if (originalMediaType_ != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType.ORIGINAL_MEDIA_TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(5, originalMediaType_);
}
if (recordingDeviceType_ != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType.RECORDING_DEVICE_TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(6, recordingDeviceType_);
}
if (!getRecordingDeviceNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, recordingDeviceName_);
}
if (!getOriginalMimeTypeBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, originalMimeType_);
}
if (obfuscatedId_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(9, obfuscatedId_);
}
if (!getAudioTopicBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, audioTopic_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.speech.v1p1beta1.RecognitionMetadata)) {
return super.equals(obj);
}
com.google.cloud.speech.v1p1beta1.RecognitionMetadata other = (com.google.cloud.speech.v1p1beta1.RecognitionMetadata) obj;
boolean result = true;
result = result && interactionType_ == other.interactionType_;
result = result && (getIndustryNaicsCodeOfAudio()
== other.getIndustryNaicsCodeOfAudio());
result = result && microphoneDistance_ == other.microphoneDistance_;
result = result && originalMediaType_ == other.originalMediaType_;
result = result && recordingDeviceType_ == other.recordingDeviceType_;
result = result && getRecordingDeviceName()
.equals(other.getRecordingDeviceName());
result = result && getOriginalMimeType()
.equals(other.getOriginalMimeType());
result = result && (getObfuscatedId()
== other.getObfuscatedId());
result = result && getAudioTopic()
.equals(other.getAudioTopic());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + INTERACTION_TYPE_FIELD_NUMBER;
hash = (53 * hash) + interactionType_;
hash = (37 * hash) + INDUSTRY_NAICS_CODE_OF_AUDIO_FIELD_NUMBER;
hash = (53 * hash) + getIndustryNaicsCodeOfAudio();
hash = (37 * hash) + MICROPHONE_DISTANCE_FIELD_NUMBER;
hash = (53 * hash) + microphoneDistance_;
hash = (37 * hash) + ORIGINAL_MEDIA_TYPE_FIELD_NUMBER;
hash = (53 * hash) + originalMediaType_;
hash = (37 * hash) + RECORDING_DEVICE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + recordingDeviceType_;
hash = (37 * hash) + RECORDING_DEVICE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getRecordingDeviceName().hashCode();
hash = (37 * hash) + ORIGINAL_MIME_TYPE_FIELD_NUMBER;
hash = (53 * hash) + getOriginalMimeType().hashCode();
hash = (37 * hash) + OBFUSCATED_ID_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getObfuscatedId());
hash = (37 * hash) + AUDIO_TOPIC_FIELD_NUMBER;
hash = (53 * hash) + getAudioTopic().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.speech.v1p1beta1.RecognitionMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Description of audio data to be recognized.
*
*
* Protobuf type {@code google.cloud.speech.v1p1beta1.RecognitionMetadata}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:google.cloud.speech.v1p1beta1.RecognitionMetadata)
com.google.cloud.speech.v1p1beta1.RecognitionMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.speech.v1p1beta1.SpeechProto.internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1p1beta1.SpeechProto.internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1p1beta1.RecognitionMetadata.class, com.google.cloud.speech.v1p1beta1.RecognitionMetadata.Builder.class);
}
// Construct using com.google.cloud.speech.v1p1beta1.RecognitionMetadata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
interactionType_ = 0;
industryNaicsCodeOfAudio_ = 0;
microphoneDistance_ = 0;
originalMediaType_ = 0;
recordingDeviceType_ = 0;
recordingDeviceName_ = "";
originalMimeType_ = "";
obfuscatedId_ = 0L;
audioTopic_ = "";
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.cloud.speech.v1p1beta1.SpeechProto.internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_descriptor;
}
public com.google.cloud.speech.v1p1beta1.RecognitionMetadata getDefaultInstanceForType() {
return com.google.cloud.speech.v1p1beta1.RecognitionMetadata.getDefaultInstance();
}
public com.google.cloud.speech.v1p1beta1.RecognitionMetadata build() {
com.google.cloud.speech.v1p1beta1.RecognitionMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.cloud.speech.v1p1beta1.RecognitionMetadata buildPartial() {
com.google.cloud.speech.v1p1beta1.RecognitionMetadata result = new com.google.cloud.speech.v1p1beta1.RecognitionMetadata(this);
result.interactionType_ = interactionType_;
result.industryNaicsCodeOfAudio_ = industryNaicsCodeOfAudio_;
result.microphoneDistance_ = microphoneDistance_;
result.originalMediaType_ = originalMediaType_;
result.recordingDeviceType_ = recordingDeviceType_;
result.recordingDeviceName_ = recordingDeviceName_;
result.originalMimeType_ = originalMimeType_;
result.obfuscatedId_ = obfuscatedId_;
result.audioTopic_ = audioTopic_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.speech.v1p1beta1.RecognitionMetadata) {
return mergeFrom((com.google.cloud.speech.v1p1beta1.RecognitionMetadata)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.speech.v1p1beta1.RecognitionMetadata other) {
if (other == com.google.cloud.speech.v1p1beta1.RecognitionMetadata.getDefaultInstance()) return this;
if (other.interactionType_ != 0) {
setInteractionTypeValue(other.getInteractionTypeValue());
}
if (other.getIndustryNaicsCodeOfAudio() != 0) {
setIndustryNaicsCodeOfAudio(other.getIndustryNaicsCodeOfAudio());
}
if (other.microphoneDistance_ != 0) {
setMicrophoneDistanceValue(other.getMicrophoneDistanceValue());
}
if (other.originalMediaType_ != 0) {
setOriginalMediaTypeValue(other.getOriginalMediaTypeValue());
}
if (other.recordingDeviceType_ != 0) {
setRecordingDeviceTypeValue(other.getRecordingDeviceTypeValue());
}
if (!other.getRecordingDeviceName().isEmpty()) {
recordingDeviceName_ = other.recordingDeviceName_;
onChanged();
}
if (!other.getOriginalMimeType().isEmpty()) {
originalMimeType_ = other.originalMimeType_;
onChanged();
}
if (other.getObfuscatedId() != 0L) {
setObfuscatedId(other.getObfuscatedId());
}
if (!other.getAudioTopic().isEmpty()) {
audioTopic_ = other.audioTopic_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.speech.v1p1beta1.RecognitionMetadata parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.speech.v1p1beta1.RecognitionMetadata) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int interactionType_ = 0;
/**
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
*/
public int getInteractionTypeValue() {
return interactionType_;
}
/**
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
*/
public Builder setInteractionTypeValue(int value) {
interactionType_ = value;
onChanged();
return this;
}
/**
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
*/
public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType getInteractionType() {
com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType result = com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType.valueOf(interactionType_);
return result == null ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType.UNRECOGNIZED : result;
}
/**
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
*/
public Builder setInteractionType(com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType value) {
if (value == null) {
throw new NullPointerException();
}
interactionType_ = value.getNumber();
onChanged();
return this;
}
/**
*
* The use case most closely describing the audio content to be recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
*/
public Builder clearInteractionType() {
interactionType_ = 0;
onChanged();
return this;
}
private int industryNaicsCodeOfAudio_ ;
/**
*
* The industry vertical to which this speech recognition request most
* closely applies. This is most indicative of the topics contained
* in the audio. Use the 6-digit NAICS code to identify the industry
* vertical - see https://www.naics.com/search/.
*
*
* uint32 industry_naics_code_of_audio = 3;
*/
public int getIndustryNaicsCodeOfAudio() {
return industryNaicsCodeOfAudio_;
}
/**
*
* The industry vertical to which this speech recognition request most
* closely applies. This is most indicative of the topics contained
* in the audio. Use the 6-digit NAICS code to identify the industry
* vertical - see https://www.naics.com/search/.
*
*
* uint32 industry_naics_code_of_audio = 3;
*/
public Builder setIndustryNaicsCodeOfAudio(int value) {
industryNaicsCodeOfAudio_ = value;
onChanged();
return this;
}
/**
*
* The industry vertical to which this speech recognition request most
* closely applies. This is most indicative of the topics contained
* in the audio. Use the 6-digit NAICS code to identify the industry
* vertical - see https://www.naics.com/search/.
*
*
* uint32 industry_naics_code_of_audio = 3;
*/
public Builder clearIndustryNaicsCodeOfAudio() {
industryNaicsCodeOfAudio_ = 0;
onChanged();
return this;
}
private int microphoneDistance_ = 0;
/**
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*/
public int getMicrophoneDistanceValue() {
return microphoneDistance_;
}
/**
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*/
public Builder setMicrophoneDistanceValue(int value) {
microphoneDistance_ = value;
onChanged();
return this;
}
/**
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*/
public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance getMicrophoneDistance() {
com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance result = com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance.valueOf(microphoneDistance_);
return result == null ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance.UNRECOGNIZED : result;
}
/**
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*/
public Builder setMicrophoneDistance(com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance value) {
if (value == null) {
throw new NullPointerException();
}
microphoneDistance_ = value.getNumber();
onChanged();
return this;
}
/**
*
* The audio type that most closely describes the audio being recognized.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
*/
public Builder clearMicrophoneDistance() {
microphoneDistance_ = 0;
onChanged();
return this;
}
private int originalMediaType_ = 0;
/**
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*/
public int getOriginalMediaTypeValue() {
return originalMediaType_;
}
/**
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*/
public Builder setOriginalMediaTypeValue(int value) {
originalMediaType_ = value;
onChanged();
return this;
}
/**
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*/
public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType getOriginalMediaType() {
com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType result = com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType.valueOf(originalMediaType_);
return result == null ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType.UNRECOGNIZED : result;
}
/**
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*/
public Builder setOriginalMediaType(com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType value) {
if (value == null) {
throw new NullPointerException();
}
originalMediaType_ = value.getNumber();
onChanged();
return this;
}
/**
*
* The original media the speech was recorded on.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
*/
public Builder clearOriginalMediaType() {
originalMediaType_ = 0;
onChanged();
return this;
}
private int recordingDeviceType_ = 0;
/**
*
* The type of device the speech was recorded with.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*/
public int getRecordingDeviceTypeValue() {
return recordingDeviceType_;
}
/**
*
* The type of device the speech was recorded with.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*/
public Builder setRecordingDeviceTypeValue(int value) {
recordingDeviceType_ = value;
onChanged();
return this;
}
/**
*
* The type of device the speech was recorded with.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*/
public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType getRecordingDeviceType() {
com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType result = com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType.valueOf(recordingDeviceType_);
return result == null ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType.UNRECOGNIZED : result;
}
/**
*
* The type of device the speech was recorded with.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*/
public Builder setRecordingDeviceType(com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType value) {
if (value == null) {
throw new NullPointerException();
}
recordingDeviceType_ = value.getNumber();
onChanged();
return this;
}
/**
*
* The type of device the speech was recorded with.
*
*
* .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
*/
public Builder clearRecordingDeviceType() {
recordingDeviceType_ = 0;
onChanged();
return this;
}
private java.lang.Object recordingDeviceName_ = "";
/**
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*/
public java.lang.String getRecordingDeviceName() {
java.lang.Object ref = recordingDeviceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
recordingDeviceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*/
public com.google.protobuf.ByteString
getRecordingDeviceNameBytes() {
java.lang.Object ref = recordingDeviceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
recordingDeviceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*/
public Builder setRecordingDeviceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
recordingDeviceName_ = value;
onChanged();
return this;
}
/**
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*/
public Builder clearRecordingDeviceName() {
recordingDeviceName_ = getDefaultInstance().getRecordingDeviceName();
onChanged();
return this;
}
/**
*
* The device used to make the recording. Examples 'Nexus 5X' or
* 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
* 'Cardioid Microphone'.
*
*
* string recording_device_name = 7;
*/
public Builder setRecordingDeviceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
recordingDeviceName_ = value;
onChanged();
return this;
}
private java.lang.Object originalMimeType_ = "";
/**
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*/
public java.lang.String getOriginalMimeType() {
java.lang.Object ref = originalMimeType_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
originalMimeType_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*/
public com.google.protobuf.ByteString
getOriginalMimeTypeBytes() {
java.lang.Object ref = originalMimeType_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
originalMimeType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*/
public Builder setOriginalMimeType(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
originalMimeType_ = value;
onChanged();
return this;
}
/**
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*/
public Builder clearOriginalMimeType() {
originalMimeType_ = getDefaultInstance().getOriginalMimeType();
onChanged();
return this;
}
/**
*
* Mime type of the original audio file. For example `audio/m4a`,
* `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
* A list of possible audio mime types is maintained at
* http://www.iana.org/assignments/media-types/media-types.xhtml#audio
*
*
* string original_mime_type = 8;
*/
public Builder setOriginalMimeTypeBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
originalMimeType_ = value;
onChanged();
return this;
}
private long obfuscatedId_ ;
/**
*
* Obfuscated (privacy-protected) ID of the user, to identify number of
* unique users using the service.
*
*
* int64 obfuscated_id = 9;
*/
public long getObfuscatedId() {
return obfuscatedId_;
}
/**
*
* Obfuscated (privacy-protected) ID of the user, to identify number of
* unique users using the service.
*
*
* int64 obfuscated_id = 9;
*/
public Builder setObfuscatedId(long value) {
obfuscatedId_ = value;
onChanged();
return this;
}
/**
*
* Obfuscated (privacy-protected) ID of the user, to identify number of
* unique users using the service.
*
*
* int64 obfuscated_id = 9;
*/
public Builder clearObfuscatedId() {
obfuscatedId_ = 0L;
onChanged();
return this;
}
private java.lang.Object audioTopic_ = "";
/**
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*/
public java.lang.String getAudioTopic() {
java.lang.Object ref = audioTopic_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
audioTopic_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*/
public com.google.protobuf.ByteString
getAudioTopicBytes() {
java.lang.Object ref = audioTopic_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
audioTopic_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*/
public Builder setAudioTopic(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
audioTopic_ = value;
onChanged();
return this;
}
/**
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*/
public Builder clearAudioTopic() {
audioTopic_ = getDefaultInstance().getAudioTopic();
onChanged();
return this;
}
/**
*
* Description of the content. Eg. "Recordings of federal supreme court
* hearings from 2012".
*
*
* string audio_topic = 10;
*/
public Builder setAudioTopicBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
audioTopic_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.speech.v1p1beta1.RecognitionMetadata)
}
// @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.RecognitionMetadata)
private static final com.google.cloud.speech.v1p1beta1.RecognitionMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.speech.v1p1beta1.RecognitionMetadata();
}
public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser
PARSER = new com.google.protobuf.AbstractParser() {
public RecognitionMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RecognitionMetadata(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
public com.google.cloud.speech.v1p1beta1.RecognitionMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}