com.google.cloud.video.livestream.v1.AudioStream Maven / Gradle / Ivy
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/video/livestream/v1/outputs.proto
// Protobuf Java Version: 3.25.5
package com.google.cloud.video.livestream.v1;
/**
*
*
*
* Audio stream resource.
*
*
* Protobuf type {@code google.cloud.video.livestream.v1.AudioStream}
*/
public final class AudioStream extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.video.livestream.v1.AudioStream)
AudioStreamOrBuilder {
private static final long serialVersionUID = 0L;
// Use AudioStream.newBuilder() to construct.
private AudioStream(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AudioStream() {
codec_ = "";
channelLayout_ = com.google.protobuf.LazyStringArrayList.emptyList();
mapping_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AudioStream();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.livestream.v1.OutputsProto
.internal_static_google_cloud_video_livestream_v1_AudioStream_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.livestream.v1.OutputsProto
.internal_static_google_cloud_video_livestream_v1_AudioStream_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.livestream.v1.AudioStream.class,
com.google.cloud.video.livestream.v1.AudioStream.Builder.class);
}
public interface AudioMappingOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.video.livestream.v1.AudioStream.AudioMapping)
com.google.protobuf.MessageOrBuilder {
/**
*
*
*
* Required. The `Channel`
* [InputAttachment.key][google.cloud.video.livestream.v1.InputAttachment.key]
* that identifies the input that this audio mapping applies to. If an
* active input doesn't have an audio mapping, the primary audio track in
* the input stream will be selected.
*
*
* string input_key = 6 [(.google.api.field_behavior) = REQUIRED];
*
* @return The inputKey.
*/
java.lang.String getInputKey();
/**
*
*
*
* Required. The `Channel`
* [InputAttachment.key][google.cloud.video.livestream.v1.InputAttachment.key]
* that identifies the input that this audio mapping applies to. If an
* active input doesn't have an audio mapping, the primary audio track in
* the input stream will be selected.
*
*
* string input_key = 6 [(.google.api.field_behavior) = REQUIRED];
*
* @return The bytes for inputKey.
*/
com.google.protobuf.ByteString getInputKeyBytes();
/**
*
*
*
* Required. The zero-based index of the track in the input stream.
* All [mapping][google.cloud.video.livestream.v1.AudioStream.mapping]s in
* the same [AudioStream][google.cloud.video.livestream.v1.AudioStream] must
* have the same input track.
*
*
* int32 input_track = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The inputTrack.
*/
int getInputTrack();
/**
*
*
*
* Required. The zero-based index of the channel in the input stream.
*
*
* int32 input_channel = 3 [(.google.api.field_behavior) = REQUIRED];
*
* @return The inputChannel.
*/
int getInputChannel();
/**
*
*
*
* Required. The zero-based index of the channel in the output audio stream.
* Must be consistent with the
* [input_channel][google.cloud.video.livestream.v1.AudioStream.AudioMapping.input_channel].
*
*
* int32 output_channel = 4 [(.google.api.field_behavior) = REQUIRED];
*
* @return The outputChannel.
*/
int getOutputChannel();
/**
*
*
*
* Audio volume control in dB. Negative values decrease volume,
* positive values increase. The default is 0.
*
*
* double gain_db = 5;
*
* @return The gainDb.
*/
double getGainDb();
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* Protobuf type {@code google.cloud.video.livestream.v1.AudioStream.AudioMapping}
*/
public static final class AudioMapping extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.video.livestream.v1.AudioStream.AudioMapping)
AudioMappingOrBuilder {
private static final long serialVersionUID = 0L;
// Use AudioMapping.newBuilder() to construct.
private AudioMapping(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private AudioMapping() {
inputKey_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AudioMapping();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.livestream.v1.OutputsProto
.internal_static_google_cloud_video_livestream_v1_AudioStream_AudioMapping_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.livestream.v1.OutputsProto
.internal_static_google_cloud_video_livestream_v1_AudioStream_AudioMapping_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.class,
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.Builder.class);
}
public static final int INPUT_KEY_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private volatile java.lang.Object inputKey_ = "";
/**
*
*
*
* Required. The `Channel`
* [InputAttachment.key][google.cloud.video.livestream.v1.InputAttachment.key]
* that identifies the input that this audio mapping applies to. If an
* active input doesn't have an audio mapping, the primary audio track in
* the input stream will be selected.
*
*
* string input_key = 6 [(.google.api.field_behavior) = REQUIRED];
*
* @return The inputKey.
*/
@java.lang.Override
public java.lang.String getInputKey() {
java.lang.Object ref = inputKey_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
inputKey_ = s;
return s;
}
}
/**
*
*
*
* Required. The `Channel`
* [InputAttachment.key][google.cloud.video.livestream.v1.InputAttachment.key]
* that identifies the input that this audio mapping applies to. If an
* active input doesn't have an audio mapping, the primary audio track in
* the input stream will be selected.
*
*
* string input_key = 6 [(.google.api.field_behavior) = REQUIRED];
*
* @return The bytes for inputKey.
*/
@java.lang.Override
public com.google.protobuf.ByteString getInputKeyBytes() {
java.lang.Object ref = inputKey_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
inputKey_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INPUT_TRACK_FIELD_NUMBER = 2;
private int inputTrack_ = 0;
/**
*
*
*
* Required. The zero-based index of the track in the input stream.
* All [mapping][google.cloud.video.livestream.v1.AudioStream.mapping]s in
* the same [AudioStream][google.cloud.video.livestream.v1.AudioStream] must
* have the same input track.
*
*
* int32 input_track = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The inputTrack.
*/
@java.lang.Override
public int getInputTrack() {
return inputTrack_;
}
public static final int INPUT_CHANNEL_FIELD_NUMBER = 3;
private int inputChannel_ = 0;
/**
*
*
*
* Required. The zero-based index of the channel in the input stream.
*
*
* int32 input_channel = 3 [(.google.api.field_behavior) = REQUIRED];
*
* @return The inputChannel.
*/
@java.lang.Override
public int getInputChannel() {
return inputChannel_;
}
public static final int OUTPUT_CHANNEL_FIELD_NUMBER = 4;
private int outputChannel_ = 0;
/**
*
*
*
* Required. The zero-based index of the channel in the output audio stream.
* Must be consistent with the
* [input_channel][google.cloud.video.livestream.v1.AudioStream.AudioMapping.input_channel].
*
*
* int32 output_channel = 4 [(.google.api.field_behavior) = REQUIRED];
*
* @return The outputChannel.
*/
@java.lang.Override
public int getOutputChannel() {
return outputChannel_;
}
public static final int GAIN_DB_FIELD_NUMBER = 5;
private double gainDb_ = 0D;
/**
*
*
*
* Audio volume control in dB. Negative values decrease volume,
* positive values increase. The default is 0.
*
*
* double gain_db = 5;
*
* @return The gainDb.
*/
@java.lang.Override
public double getGainDb() {
return gainDb_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (inputTrack_ != 0) {
output.writeInt32(2, inputTrack_);
}
if (inputChannel_ != 0) {
output.writeInt32(3, inputChannel_);
}
if (outputChannel_ != 0) {
output.writeInt32(4, outputChannel_);
}
if (java.lang.Double.doubleToRawLongBits(gainDb_) != 0) {
output.writeDouble(5, gainDb_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputKey_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, inputKey_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (inputTrack_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, inputTrack_);
}
if (inputChannel_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, inputChannel_);
}
if (outputChannel_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(4, outputChannel_);
}
if (java.lang.Double.doubleToRawLongBits(gainDb_) != 0) {
size += com.google.protobuf.CodedOutputStream.computeDoubleSize(5, gainDb_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputKey_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, inputKey_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.video.livestream.v1.AudioStream.AudioMapping)) {
return super.equals(obj);
}
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping other =
(com.google.cloud.video.livestream.v1.AudioStream.AudioMapping) obj;
if (!getInputKey().equals(other.getInputKey())) return false;
if (getInputTrack() != other.getInputTrack()) return false;
if (getInputChannel() != other.getInputChannel()) return false;
if (getOutputChannel() != other.getOutputChannel()) return false;
if (java.lang.Double.doubleToLongBits(getGainDb())
!= java.lang.Double.doubleToLongBits(other.getGainDb())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + INPUT_KEY_FIELD_NUMBER;
hash = (53 * hash) + getInputKey().hashCode();
hash = (37 * hash) + INPUT_TRACK_FIELD_NUMBER;
hash = (53 * hash) + getInputTrack();
hash = (37 * hash) + INPUT_CHANNEL_FIELD_NUMBER;
hash = (53 * hash) + getInputChannel();
hash = (37 * hash) + OUTPUT_CHANNEL_FIELD_NUMBER;
hash = (53 * hash) + getOutputChannel();
hash = (37 * hash) + GAIN_DB_FIELD_NUMBER;
hash =
(53 * hash)
+ com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getGainDb()));
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* Protobuf type {@code google.cloud.video.livestream.v1.AudioStream.AudioMapping}
*/
public static final class Builder
extends com.google.protobuf.GeneratedMessageV3.Builder
implements
// @@protoc_insertion_point(builder_implements:google.cloud.video.livestream.v1.AudioStream.AudioMapping)
com.google.cloud.video.livestream.v1.AudioStream.AudioMappingOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.livestream.v1.OutputsProto
.internal_static_google_cloud_video_livestream_v1_AudioStream_AudioMapping_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.livestream.v1.OutputsProto
.internal_static_google_cloud_video_livestream_v1_AudioStream_AudioMapping_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.class,
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.Builder.class);
}
// Construct using com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
inputKey_ = "";
inputTrack_ = 0;
inputChannel_ = 0;
outputChannel_ = 0;
gainDb_ = 0D;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.video.livestream.v1.OutputsProto
.internal_static_google_cloud_video_livestream_v1_AudioStream_AudioMapping_descriptor;
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.AudioStream.AudioMapping
getDefaultInstanceForType() {
return com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.AudioStream.AudioMapping build() {
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.AudioStream.AudioMapping buildPartial() {
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping result =
new com.google.cloud.video.livestream.v1.AudioStream.AudioMapping(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.inputKey_ = inputKey_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.inputTrack_ = inputTrack_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.inputChannel_ = inputChannel_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.outputChannel_ = outputChannel_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.gainDb_ = gainDb_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index,
java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.video.livestream.v1.AudioStream.AudioMapping) {
return mergeFrom((com.google.cloud.video.livestream.v1.AudioStream.AudioMapping) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping other) {
if (other
== com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.getDefaultInstance())
return this;
if (!other.getInputKey().isEmpty()) {
inputKey_ = other.inputKey_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getInputTrack() != 0) {
setInputTrack(other.getInputTrack());
}
if (other.getInputChannel() != 0) {
setInputChannel(other.getInputChannel());
}
if (other.getOutputChannel() != 0) {
setOutputChannel(other.getOutputChannel());
}
if (other.getGainDb() != 0D) {
setGainDb(other.getGainDb());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 16:
{
inputTrack_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
inputChannel_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32:
{
outputChannel_ = input.readInt32();
bitField0_ |= 0x00000008;
break;
} // case 32
case 41:
{
gainDb_ = input.readDouble();
bitField0_ |= 0x00000010;
break;
} // case 41
case 50:
{
inputKey_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 50
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object inputKey_ = "";
/**
*
*
*
* Required. The `Channel`
* [InputAttachment.key][google.cloud.video.livestream.v1.InputAttachment.key]
* that identifies the input that this audio mapping applies to. If an
* active input doesn't have an audio mapping, the primary audio track in
* the input stream will be selected.
*
*
* string input_key = 6 [(.google.api.field_behavior) = REQUIRED];
*
* @return The inputKey.
*/
public java.lang.String getInputKey() {
java.lang.Object ref = inputKey_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
inputKey_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
*
* Required. The `Channel`
* [InputAttachment.key][google.cloud.video.livestream.v1.InputAttachment.key]
* that identifies the input that this audio mapping applies to. If an
* active input doesn't have an audio mapping, the primary audio track in
* the input stream will be selected.
*
*
* string input_key = 6 [(.google.api.field_behavior) = REQUIRED];
*
* @return The bytes for inputKey.
*/
public com.google.protobuf.ByteString getInputKeyBytes() {
java.lang.Object ref = inputKey_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
inputKey_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
*
* Required. The `Channel`
* [InputAttachment.key][google.cloud.video.livestream.v1.InputAttachment.key]
* that identifies the input that this audio mapping applies to. If an
* active input doesn't have an audio mapping, the primary audio track in
* the input stream will be selected.
*
*
* string input_key = 6 [(.google.api.field_behavior) = REQUIRED];
*
* @param value The inputKey to set.
* @return This builder for chaining.
*/
public Builder setInputKey(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
inputKey_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
*
* Required. The `Channel`
* [InputAttachment.key][google.cloud.video.livestream.v1.InputAttachment.key]
* that identifies the input that this audio mapping applies to. If an
* active input doesn't have an audio mapping, the primary audio track in
* the input stream will be selected.
*
*
* string input_key = 6 [(.google.api.field_behavior) = REQUIRED];
*
* @return This builder for chaining.
*/
public Builder clearInputKey() {
inputKey_ = getDefaultInstance().getInputKey();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
*
* Required. The `Channel`
* [InputAttachment.key][google.cloud.video.livestream.v1.InputAttachment.key]
* that identifies the input that this audio mapping applies to. If an
* active input doesn't have an audio mapping, the primary audio track in
* the input stream will be selected.
*
*
* string input_key = 6 [(.google.api.field_behavior) = REQUIRED];
*
* @param value The bytes for inputKey to set.
* @return This builder for chaining.
*/
public Builder setInputKeyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
inputKey_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int inputTrack_;
/**
*
*
*
* Required. The zero-based index of the track in the input stream.
* All [mapping][google.cloud.video.livestream.v1.AudioStream.mapping]s in
* the same [AudioStream][google.cloud.video.livestream.v1.AudioStream] must
* have the same input track.
*
*
* int32 input_track = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The inputTrack.
*/
@java.lang.Override
public int getInputTrack() {
return inputTrack_;
}
/**
*
*
*
* Required. The zero-based index of the track in the input stream.
* All [mapping][google.cloud.video.livestream.v1.AudioStream.mapping]s in
* the same [AudioStream][google.cloud.video.livestream.v1.AudioStream] must
* have the same input track.
*
*
* int32 input_track = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @param value The inputTrack to set.
* @return This builder for chaining.
*/
public Builder setInputTrack(int value) {
inputTrack_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
*
* Required. The zero-based index of the track in the input stream.
* All [mapping][google.cloud.video.livestream.v1.AudioStream.mapping]s in
* the same [AudioStream][google.cloud.video.livestream.v1.AudioStream] must
* have the same input track.
*
*
* int32 input_track = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return This builder for chaining.
*/
public Builder clearInputTrack() {
bitField0_ = (bitField0_ & ~0x00000002);
inputTrack_ = 0;
onChanged();
return this;
}
private int inputChannel_;
/**
*
*
*
* Required. The zero-based index of the channel in the input stream.
*
*
* int32 input_channel = 3 [(.google.api.field_behavior) = REQUIRED];
*
* @return The inputChannel.
*/
@java.lang.Override
public int getInputChannel() {
return inputChannel_;
}
/**
*
*
*
* Required. The zero-based index of the channel in the input stream.
*
*
* int32 input_channel = 3 [(.google.api.field_behavior) = REQUIRED];
*
* @param value The inputChannel to set.
* @return This builder for chaining.
*/
public Builder setInputChannel(int value) {
inputChannel_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
*
* Required. The zero-based index of the channel in the input stream.
*
*
* int32 input_channel = 3 [(.google.api.field_behavior) = REQUIRED];
*
* @return This builder for chaining.
*/
public Builder clearInputChannel() {
bitField0_ = (bitField0_ & ~0x00000004);
inputChannel_ = 0;
onChanged();
return this;
}
private int outputChannel_;
/**
*
*
*
* Required. The zero-based index of the channel in the output audio stream.
* Must be consistent with the
* [input_channel][google.cloud.video.livestream.v1.AudioStream.AudioMapping.input_channel].
*
*
* int32 output_channel = 4 [(.google.api.field_behavior) = REQUIRED];
*
* @return The outputChannel.
*/
@java.lang.Override
public int getOutputChannel() {
return outputChannel_;
}
/**
*
*
*
* Required. The zero-based index of the channel in the output audio stream.
* Must be consistent with the
* [input_channel][google.cloud.video.livestream.v1.AudioStream.AudioMapping.input_channel].
*
*
* int32 output_channel = 4 [(.google.api.field_behavior) = REQUIRED];
*
* @param value The outputChannel to set.
* @return This builder for chaining.
*/
public Builder setOutputChannel(int value) {
outputChannel_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
*
* Required. The zero-based index of the channel in the output audio stream.
* Must be consistent with the
* [input_channel][google.cloud.video.livestream.v1.AudioStream.AudioMapping.input_channel].
*
*
* int32 output_channel = 4 [(.google.api.field_behavior) = REQUIRED];
*
* @return This builder for chaining.
*/
public Builder clearOutputChannel() {
bitField0_ = (bitField0_ & ~0x00000008);
outputChannel_ = 0;
onChanged();
return this;
}
private double gainDb_;
/**
*
*
*
* Audio volume control in dB. Negative values decrease volume,
* positive values increase. The default is 0.
*
*
* double gain_db = 5;
*
* @return The gainDb.
*/
@java.lang.Override
public double getGainDb() {
return gainDb_;
}
/**
*
*
*
* Audio volume control in dB. Negative values decrease volume,
* positive values increase. The default is 0.
*
*
* double gain_db = 5;
*
* @param value The gainDb to set.
* @return This builder for chaining.
*/
public Builder setGainDb(double value) {
gainDb_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
*
* Audio volume control in dB. Negative values decrease volume,
* positive values increase. The default is 0.
*
*
* double gain_db = 5;
*
* @return This builder for chaining.
*/
public Builder clearGainDb() {
bitField0_ = (bitField0_ & ~0x00000010);
gainDb_ = 0D;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.video.livestream.v1.AudioStream.AudioMapping)
}
// @@protoc_insertion_point(class_scope:google.cloud.video.livestream.v1.AudioStream.AudioMapping)
private static final com.google.cloud.video.livestream.v1.AudioStream.AudioMapping
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.video.livestream.v1.AudioStream.AudioMapping();
}
public static com.google.cloud.video.livestream.v1.AudioStream.AudioMapping
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
@java.lang.Override
public AudioMapping parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.AudioStream.AudioMapping
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public static final int TRANSMUX_FIELD_NUMBER = 8;
private boolean transmux_ = false;
/**
*
*
*
* Specifies whether pass through (transmuxing) is enabled or not.
* If set to `true`, the rest of the settings, other than `mapping`, will be
* ignored. The default is `false`.
*
*
* bool transmux = 8;
*
* @return The transmux.
*/
@java.lang.Override
public boolean getTransmux() {
return transmux_;
}
public static final int CODEC_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object codec_ = "";
/**
*
*
*
* The codec for this audio stream. The default is `aac`.
*
* Supported audio codecs:
*
* - `aac`
*
*
* string codec = 1;
*
* @return The codec.
*/
@java.lang.Override
public java.lang.String getCodec() {
java.lang.Object ref = codec_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
codec_ = s;
return s;
}
}
/**
*
*
*
* The codec for this audio stream. The default is `aac`.
*
* Supported audio codecs:
*
* - `aac`
*
*
* string codec = 1;
*
* @return The bytes for codec.
*/
@java.lang.Override
public com.google.protobuf.ByteString getCodecBytes() {
java.lang.Object ref = codec_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
codec_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BITRATE_BPS_FIELD_NUMBER = 2;
private int bitrateBps_ = 0;
/**
*
*
*
* Required. Audio bitrate in bits per second. Must be between 1 and
* 10,000,000.
*
*
* int32 bitrate_bps = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The bitrateBps.
*/
@java.lang.Override
public int getBitrateBps() {
return bitrateBps_;
}
public static final int CHANNEL_COUNT_FIELD_NUMBER = 3;
private int channelCount_ = 0;
/**
*
*
*
* Number of audio channels. Must be between 1 and 6. The default is 2.
*
*
* int32 channel_count = 3;
*
* @return The channelCount.
*/
@java.lang.Override
public int getChannelCount() {
return channelCount_;
}
public static final int CHANNEL_LAYOUT_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList channelLayout_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @return A list containing the channelLayout.
*/
public com.google.protobuf.ProtocolStringList getChannelLayoutList() {
return channelLayout_;
}
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @return The count of channelLayout.
*/
public int getChannelLayoutCount() {
return channelLayout_.size();
}
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @param index The index of the element to return.
* @return The channelLayout at the given index.
*/
public java.lang.String getChannelLayout(int index) {
return channelLayout_.get(index);
}
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @param index The index of the value to return.
* @return The bytes of the channelLayout at the given index.
*/
public com.google.protobuf.ByteString getChannelLayoutBytes(int index) {
return channelLayout_.getByteString(index);
}
public static final int MAPPING_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private java.util.List mapping_;
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
@java.lang.Override
public java.util.List
getMappingList() {
return mapping_;
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.video.livestream.v1.AudioStream.AudioMappingOrBuilder>
getMappingOrBuilderList() {
return mapping_;
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
@java.lang.Override
public int getMappingCount() {
return mapping_.size();
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
@java.lang.Override
public com.google.cloud.video.livestream.v1.AudioStream.AudioMapping getMapping(int index) {
return mapping_.get(index);
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
@java.lang.Override
public com.google.cloud.video.livestream.v1.AudioStream.AudioMappingOrBuilder getMappingOrBuilder(
int index) {
return mapping_.get(index);
}
public static final int SAMPLE_RATE_HERTZ_FIELD_NUMBER = 6;
private int sampleRateHertz_ = 0;
/**
*
*
*
* The audio sample rate in Hertz. The default is 48000 Hertz.
*
*
* int32 sample_rate_hertz = 6;
*
* @return The sampleRateHertz.
*/
@java.lang.Override
public int getSampleRateHertz() {
return sampleRateHertz_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(codec_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, codec_);
}
if (bitrateBps_ != 0) {
output.writeInt32(2, bitrateBps_);
}
if (channelCount_ != 0) {
output.writeInt32(3, channelCount_);
}
for (int i = 0; i < channelLayout_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, channelLayout_.getRaw(i));
}
for (int i = 0; i < mapping_.size(); i++) {
output.writeMessage(5, mapping_.get(i));
}
if (sampleRateHertz_ != 0) {
output.writeInt32(6, sampleRateHertz_);
}
if (transmux_ != false) {
output.writeBool(8, transmux_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(codec_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, codec_);
}
if (bitrateBps_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, bitrateBps_);
}
if (channelCount_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, channelCount_);
}
{
int dataSize = 0;
for (int i = 0; i < channelLayout_.size(); i++) {
dataSize += computeStringSizeNoTag(channelLayout_.getRaw(i));
}
size += dataSize;
size += 1 * getChannelLayoutList().size();
}
for (int i = 0; i < mapping_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, mapping_.get(i));
}
if (sampleRateHertz_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(6, sampleRateHertz_);
}
if (transmux_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(8, transmux_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.video.livestream.v1.AudioStream)) {
return super.equals(obj);
}
com.google.cloud.video.livestream.v1.AudioStream other =
(com.google.cloud.video.livestream.v1.AudioStream) obj;
if (getTransmux() != other.getTransmux()) return false;
if (!getCodec().equals(other.getCodec())) return false;
if (getBitrateBps() != other.getBitrateBps()) return false;
if (getChannelCount() != other.getChannelCount()) return false;
if (!getChannelLayoutList().equals(other.getChannelLayoutList())) return false;
if (!getMappingList().equals(other.getMappingList())) return false;
if (getSampleRateHertz() != other.getSampleRateHertz()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TRANSMUX_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getTransmux());
hash = (37 * hash) + CODEC_FIELD_NUMBER;
hash = (53 * hash) + getCodec().hashCode();
hash = (37 * hash) + BITRATE_BPS_FIELD_NUMBER;
hash = (53 * hash) + getBitrateBps();
hash = (37 * hash) + CHANNEL_COUNT_FIELD_NUMBER;
hash = (53 * hash) + getChannelCount();
if (getChannelLayoutCount() > 0) {
hash = (37 * hash) + CHANNEL_LAYOUT_FIELD_NUMBER;
hash = (53 * hash) + getChannelLayoutList().hashCode();
}
if (getMappingCount() > 0) {
hash = (37 * hash) + MAPPING_FIELD_NUMBER;
hash = (53 * hash) + getMappingList().hashCode();
}
hash = (37 * hash) + SAMPLE_RATE_HERTZ_FIELD_NUMBER;
hash = (53 * hash) + getSampleRateHertz();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.video.livestream.v1.AudioStream parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.livestream.v1.AudioStream parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.AudioStream parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.livestream.v1.AudioStream parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.AudioStream parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.livestream.v1.AudioStream parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.AudioStream parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.livestream.v1.AudioStream parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.AudioStream parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.video.livestream.v1.AudioStream parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.AudioStream parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.livestream.v1.AudioStream parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.video.livestream.v1.AudioStream prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
*
* Audio stream resource.
*
*
* Protobuf type {@code google.cloud.video.livestream.v1.AudioStream}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder
implements
// @@protoc_insertion_point(builder_implements:google.cloud.video.livestream.v1.AudioStream)
com.google.cloud.video.livestream.v1.AudioStreamOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.livestream.v1.OutputsProto
.internal_static_google_cloud_video_livestream_v1_AudioStream_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.livestream.v1.OutputsProto
.internal_static_google_cloud_video_livestream_v1_AudioStream_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.livestream.v1.AudioStream.class,
com.google.cloud.video.livestream.v1.AudioStream.Builder.class);
}
// Construct using com.google.cloud.video.livestream.v1.AudioStream.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
transmux_ = false;
codec_ = "";
bitrateBps_ = 0;
channelCount_ = 0;
channelLayout_ = com.google.protobuf.LazyStringArrayList.emptyList();
if (mappingBuilder_ == null) {
mapping_ = java.util.Collections.emptyList();
} else {
mapping_ = null;
mappingBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000020);
sampleRateHertz_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.video.livestream.v1.OutputsProto
.internal_static_google_cloud_video_livestream_v1_AudioStream_descriptor;
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.AudioStream getDefaultInstanceForType() {
return com.google.cloud.video.livestream.v1.AudioStream.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.AudioStream build() {
com.google.cloud.video.livestream.v1.AudioStream result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.AudioStream buildPartial() {
com.google.cloud.video.livestream.v1.AudioStream result =
new com.google.cloud.video.livestream.v1.AudioStream(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.video.livestream.v1.AudioStream result) {
if (mappingBuilder_ == null) {
if (((bitField0_ & 0x00000020) != 0)) {
mapping_ = java.util.Collections.unmodifiableList(mapping_);
bitField0_ = (bitField0_ & ~0x00000020);
}
result.mapping_ = mapping_;
} else {
result.mapping_ = mappingBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.video.livestream.v1.AudioStream result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.transmux_ = transmux_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.codec_ = codec_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.bitrateBps_ = bitrateBps_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.channelCount_ = channelCount_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
channelLayout_.makeImmutable();
result.channelLayout_ = channelLayout_;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.sampleRateHertz_ = sampleRateHertz_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.video.livestream.v1.AudioStream) {
return mergeFrom((com.google.cloud.video.livestream.v1.AudioStream) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.video.livestream.v1.AudioStream other) {
if (other == com.google.cloud.video.livestream.v1.AudioStream.getDefaultInstance())
return this;
if (other.getTransmux() != false) {
setTransmux(other.getTransmux());
}
if (!other.getCodec().isEmpty()) {
codec_ = other.codec_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getBitrateBps() != 0) {
setBitrateBps(other.getBitrateBps());
}
if (other.getChannelCount() != 0) {
setChannelCount(other.getChannelCount());
}
if (!other.channelLayout_.isEmpty()) {
if (channelLayout_.isEmpty()) {
channelLayout_ = other.channelLayout_;
bitField0_ |= 0x00000010;
} else {
ensureChannelLayoutIsMutable();
channelLayout_.addAll(other.channelLayout_);
}
onChanged();
}
if (mappingBuilder_ == null) {
if (!other.mapping_.isEmpty()) {
if (mapping_.isEmpty()) {
mapping_ = other.mapping_;
bitField0_ = (bitField0_ & ~0x00000020);
} else {
ensureMappingIsMutable();
mapping_.addAll(other.mapping_);
}
onChanged();
}
} else {
if (!other.mapping_.isEmpty()) {
if (mappingBuilder_.isEmpty()) {
mappingBuilder_.dispose();
mappingBuilder_ = null;
mapping_ = other.mapping_;
bitField0_ = (bitField0_ & ~0x00000020);
mappingBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getMappingFieldBuilder()
: null;
} else {
mappingBuilder_.addAllMessages(other.mapping_);
}
}
}
if (other.getSampleRateHertz() != 0) {
setSampleRateHertz(other.getSampleRateHertz());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
codec_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 10
case 16:
{
bitrateBps_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 16
case 24:
{
channelCount_ = input.readInt32();
bitField0_ |= 0x00000008;
break;
} // case 24
case 34:
{
java.lang.String s = input.readStringRequireUtf8();
ensureChannelLayoutIsMutable();
channelLayout_.add(s);
break;
} // case 34
case 42:
{
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping m =
input.readMessage(
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.parser(),
extensionRegistry);
if (mappingBuilder_ == null) {
ensureMappingIsMutable();
mapping_.add(m);
} else {
mappingBuilder_.addMessage(m);
}
break;
} // case 42
case 48:
{
sampleRateHertz_ = input.readInt32();
bitField0_ |= 0x00000040;
break;
} // case 48
case 64:
{
transmux_ = input.readBool();
bitField0_ |= 0x00000001;
break;
} // case 64
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private boolean transmux_;
/**
*
*
*
* Specifies whether pass through (transmuxing) is enabled or not.
* If set to `true`, the rest of the settings, other than `mapping`, will be
* ignored. The default is `false`.
*
*
* bool transmux = 8;
*
* @return The transmux.
*/
@java.lang.Override
public boolean getTransmux() {
return transmux_;
}
/**
*
*
*
* Specifies whether pass through (transmuxing) is enabled or not.
* If set to `true`, the rest of the settings, other than `mapping`, will be
* ignored. The default is `false`.
*
*
* bool transmux = 8;
*
* @param value The transmux to set.
* @return This builder for chaining.
*/
public Builder setTransmux(boolean value) {
transmux_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
*
* Specifies whether pass through (transmuxing) is enabled or not.
* If set to `true`, the rest of the settings, other than `mapping`, will be
* ignored. The default is `false`.
*
*
* bool transmux = 8;
*
* @return This builder for chaining.
*/
public Builder clearTransmux() {
bitField0_ = (bitField0_ & ~0x00000001);
transmux_ = false;
onChanged();
return this;
}
private java.lang.Object codec_ = "";
/**
*
*
*
* The codec for this audio stream. The default is `aac`.
*
* Supported audio codecs:
*
* - `aac`
*
*
* string codec = 1;
*
* @return The codec.
*/
public java.lang.String getCodec() {
java.lang.Object ref = codec_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
codec_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
*
* The codec for this audio stream. The default is `aac`.
*
* Supported audio codecs:
*
* - `aac`
*
*
* string codec = 1;
*
* @return The bytes for codec.
*/
public com.google.protobuf.ByteString getCodecBytes() {
java.lang.Object ref = codec_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
codec_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
*
* The codec for this audio stream. The default is `aac`.
*
* Supported audio codecs:
*
* - `aac`
*
*
* string codec = 1;
*
* @param value The codec to set.
* @return This builder for chaining.
*/
public Builder setCodec(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
codec_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
*
* The codec for this audio stream. The default is `aac`.
*
* Supported audio codecs:
*
* - `aac`
*
*
* string codec = 1;
*
* @return This builder for chaining.
*/
public Builder clearCodec() {
codec_ = getDefaultInstance().getCodec();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
*
* The codec for this audio stream. The default is `aac`.
*
* Supported audio codecs:
*
* - `aac`
*
*
* string codec = 1;
*
* @param value The bytes for codec to set.
* @return This builder for chaining.
*/
public Builder setCodecBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
codec_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int bitrateBps_;
/**
*
*
*
* Required. Audio bitrate in bits per second. Must be between 1 and
* 10,000,000.
*
*
* int32 bitrate_bps = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return The bitrateBps.
*/
@java.lang.Override
public int getBitrateBps() {
return bitrateBps_;
}
/**
*
*
*
* Required. Audio bitrate in bits per second. Must be between 1 and
* 10,000,000.
*
*
* int32 bitrate_bps = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @param value The bitrateBps to set.
* @return This builder for chaining.
*/
public Builder setBitrateBps(int value) {
bitrateBps_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
*
* Required. Audio bitrate in bits per second. Must be between 1 and
* 10,000,000.
*
*
* int32 bitrate_bps = 2 [(.google.api.field_behavior) = REQUIRED];
*
* @return This builder for chaining.
*/
public Builder clearBitrateBps() {
bitField0_ = (bitField0_ & ~0x00000004);
bitrateBps_ = 0;
onChanged();
return this;
}
private int channelCount_;
/**
*
*
*
* Number of audio channels. Must be between 1 and 6. The default is 2.
*
*
* int32 channel_count = 3;
*
* @return The channelCount.
*/
@java.lang.Override
public int getChannelCount() {
return channelCount_;
}
/**
*
*
*
* Number of audio channels. Must be between 1 and 6. The default is 2.
*
*
* int32 channel_count = 3;
*
* @param value The channelCount to set.
* @return This builder for chaining.
*/
public Builder setChannelCount(int value) {
channelCount_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
*
* Number of audio channels. Must be between 1 and 6. The default is 2.
*
*
* int32 channel_count = 3;
*
* @return This builder for chaining.
*/
public Builder clearChannelCount() {
bitField0_ = (bitField0_ & ~0x00000008);
channelCount_ = 0;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList channelLayout_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureChannelLayoutIsMutable() {
if (!channelLayout_.isModifiable()) {
channelLayout_ = new com.google.protobuf.LazyStringArrayList(channelLayout_);
}
bitField0_ |= 0x00000010;
}
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @return A list containing the channelLayout.
*/
public com.google.protobuf.ProtocolStringList getChannelLayoutList() {
channelLayout_.makeImmutable();
return channelLayout_;
}
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @return The count of channelLayout.
*/
public int getChannelLayoutCount() {
return channelLayout_.size();
}
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @param index The index of the element to return.
* @return The channelLayout at the given index.
*/
public java.lang.String getChannelLayout(int index) {
return channelLayout_.get(index);
}
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @param index The index of the value to return.
* @return The bytes of the channelLayout at the given index.
*/
public com.google.protobuf.ByteString getChannelLayoutBytes(int index) {
return channelLayout_.getByteString(index);
}
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @param index The index to set the value at.
* @param value The channelLayout to set.
* @return This builder for chaining.
*/
public Builder setChannelLayout(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureChannelLayoutIsMutable();
channelLayout_.set(index, value);
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @param value The channelLayout to add.
* @return This builder for chaining.
*/
public Builder addChannelLayout(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureChannelLayoutIsMutable();
channelLayout_.add(value);
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @param values The channelLayout to add.
* @return This builder for chaining.
*/
public Builder addAllChannelLayout(java.lang.Iterable values) {
ensureChannelLayoutIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, channelLayout_);
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @return This builder for chaining.
*/
public Builder clearChannelLayout() {
channelLayout_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000010);
;
onChanged();
return this;
}
/**
*
*
*
* A list of channel names specifying layout of the audio channels.
* This only affects the metadata embedded in the container headers, if
* supported by the specified format. The default is `[fl, fr]`.
*
* Supported channel names:
*
* - `fl` - Front left channel
* - `fr` - Front right channel
* - `sl` - Side left channel
* - `sr` - Side right channel
* - `fc` - Front center channel
* - `lfe` - Low frequency
*
*
* repeated string channel_layout = 4;
*
* @param value The bytes of the channelLayout to add.
* @return This builder for chaining.
*/
public Builder addChannelLayoutBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureChannelLayoutIsMutable();
channelLayout_.add(value);
bitField0_ |= 0x00000010;
onChanged();
return this;
}
private java.util.List mapping_ =
java.util.Collections.emptyList();
private void ensureMappingIsMutable() {
if (!((bitField0_ & 0x00000020) != 0)) {
mapping_ =
new java.util.ArrayList(
mapping_);
bitField0_ |= 0x00000020;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping,
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.Builder,
com.google.cloud.video.livestream.v1.AudioStream.AudioMappingOrBuilder>
mappingBuilder_;
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public java.util.List
getMappingList() {
if (mappingBuilder_ == null) {
return java.util.Collections.unmodifiableList(mapping_);
} else {
return mappingBuilder_.getMessageList();
}
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public int getMappingCount() {
if (mappingBuilder_ == null) {
return mapping_.size();
} else {
return mappingBuilder_.getCount();
}
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public com.google.cloud.video.livestream.v1.AudioStream.AudioMapping getMapping(int index) {
if (mappingBuilder_ == null) {
return mapping_.get(index);
} else {
return mappingBuilder_.getMessage(index);
}
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public Builder setMapping(
int index, com.google.cloud.video.livestream.v1.AudioStream.AudioMapping value) {
if (mappingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMappingIsMutable();
mapping_.set(index, value);
onChanged();
} else {
mappingBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public Builder setMapping(
int index,
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.Builder builderForValue) {
if (mappingBuilder_ == null) {
ensureMappingIsMutable();
mapping_.set(index, builderForValue.build());
onChanged();
} else {
mappingBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public Builder addMapping(com.google.cloud.video.livestream.v1.AudioStream.AudioMapping value) {
if (mappingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMappingIsMutable();
mapping_.add(value);
onChanged();
} else {
mappingBuilder_.addMessage(value);
}
return this;
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public Builder addMapping(
int index, com.google.cloud.video.livestream.v1.AudioStream.AudioMapping value) {
if (mappingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMappingIsMutable();
mapping_.add(index, value);
onChanged();
} else {
mappingBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public Builder addMapping(
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.Builder builderForValue) {
if (mappingBuilder_ == null) {
ensureMappingIsMutable();
mapping_.add(builderForValue.build());
onChanged();
} else {
mappingBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public Builder addMapping(
int index,
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.Builder builderForValue) {
if (mappingBuilder_ == null) {
ensureMappingIsMutable();
mapping_.add(index, builderForValue.build());
onChanged();
} else {
mappingBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public Builder addAllMapping(
java.lang.Iterable extends com.google.cloud.video.livestream.v1.AudioStream.AudioMapping>
values) {
if (mappingBuilder_ == null) {
ensureMappingIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, mapping_);
onChanged();
} else {
mappingBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public Builder clearMapping() {
if (mappingBuilder_ == null) {
mapping_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000020);
onChanged();
} else {
mappingBuilder_.clear();
}
return this;
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public Builder removeMapping(int index) {
if (mappingBuilder_ == null) {
ensureMappingIsMutable();
mapping_.remove(index);
onChanged();
} else {
mappingBuilder_.remove(index);
}
return this;
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.Builder getMappingBuilder(
int index) {
return getMappingFieldBuilder().getBuilder(index);
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public com.google.cloud.video.livestream.v1.AudioStream.AudioMappingOrBuilder
getMappingOrBuilder(int index) {
if (mappingBuilder_ == null) {
return mapping_.get(index);
} else {
return mappingBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public java.util.List<
? extends com.google.cloud.video.livestream.v1.AudioStream.AudioMappingOrBuilder>
getMappingOrBuilderList() {
if (mappingBuilder_ != null) {
return mappingBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(mapping_);
}
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.Builder
addMappingBuilder() {
return getMappingFieldBuilder()
.addBuilder(
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.getDefaultInstance());
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.Builder addMappingBuilder(
int index) {
return getMappingFieldBuilder()
.addBuilder(
index,
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.getDefaultInstance());
}
/**
*
*
*
* The mapping for the input streams and audio channels.
*
*
* repeated .google.cloud.video.livestream.v1.AudioStream.AudioMapping mapping = 5;
*/
public java.util.List
getMappingBuilderList() {
return getMappingFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping,
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.Builder,
com.google.cloud.video.livestream.v1.AudioStream.AudioMappingOrBuilder>
getMappingFieldBuilder() {
if (mappingBuilder_ == null) {
mappingBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping,
com.google.cloud.video.livestream.v1.AudioStream.AudioMapping.Builder,
com.google.cloud.video.livestream.v1.AudioStream.AudioMappingOrBuilder>(
mapping_, ((bitField0_ & 0x00000020) != 0), getParentForChildren(), isClean());
mapping_ = null;
}
return mappingBuilder_;
}
private int sampleRateHertz_;
/**
*
*
*
* The audio sample rate in Hertz. The default is 48000 Hertz.
*
*
* int32 sample_rate_hertz = 6;
*
* @return The sampleRateHertz.
*/
@java.lang.Override
public int getSampleRateHertz() {
return sampleRateHertz_;
}
/**
*
*
*
* The audio sample rate in Hertz. The default is 48000 Hertz.
*
*
* int32 sample_rate_hertz = 6;
*
* @param value The sampleRateHertz to set.
* @return This builder for chaining.
*/
public Builder setSampleRateHertz(int value) {
sampleRateHertz_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
*
*
* The audio sample rate in Hertz. The default is 48000 Hertz.
*
*
* int32 sample_rate_hertz = 6;
*
* @return This builder for chaining.
*/
public Builder clearSampleRateHertz() {
bitField0_ = (bitField0_ & ~0x00000040);
sampleRateHertz_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.video.livestream.v1.AudioStream)
}
// @@protoc_insertion_point(class_scope:google.cloud.video.livestream.v1.AudioStream)
private static final com.google.cloud.video.livestream.v1.AudioStream DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.video.livestream.v1.AudioStream();
}
public static com.google.cloud.video.livestream.v1.AudioStream getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser PARSER =
new com.google.protobuf.AbstractParser() {
@java.lang.Override
public AudioStream parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.AudioStream getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy