software.amazon.awssdk.services.mediaconvert.model.VideoSelector Maven / Gradle / Ivy
Show all versions of mediaconvert Show documentation
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package software.amazon.awssdk.services.mediaconvert.model;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import software.amazon.awssdk.annotations.Generated;
import software.amazon.awssdk.core.SdkField;
import software.amazon.awssdk.core.SdkPojo;
import software.amazon.awssdk.core.protocol.MarshallLocation;
import software.amazon.awssdk.core.protocol.MarshallingType;
import software.amazon.awssdk.core.traits.LocationTrait;
import software.amazon.awssdk.utils.ToString;
import software.amazon.awssdk.utils.builder.CopyableBuilder;
import software.amazon.awssdk.utils.builder.ToCopyableBuilder;
/**
* Input video selectors contain the video settings for the input. Each of your inputs can have up to one video
* selector.
*/
@Generated("software.amazon.awssdk:codegen")
public final class VideoSelector implements SdkPojo, Serializable, ToCopyableBuilder {
private static final SdkField ALPHA_BEHAVIOR_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("AlphaBehavior").getter(getter(VideoSelector::alphaBehaviorAsString))
.setter(setter(Builder::alphaBehavior))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("alphaBehavior").build()).build();
private static final SdkField COLOR_SPACE_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("ColorSpace").getter(getter(VideoSelector::colorSpaceAsString)).setter(setter(Builder::colorSpace))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("colorSpace").build()).build();
private static final SdkField COLOR_SPACE_USAGE_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("ColorSpaceUsage").getter(getter(VideoSelector::colorSpaceUsageAsString))
.setter(setter(Builder::colorSpaceUsage))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("colorSpaceUsage").build()).build();
private static final SdkField EMBEDDED_TIMECODE_OVERRIDE_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("EmbeddedTimecodeOverride").getter(getter(VideoSelector::embeddedTimecodeOverrideAsString))
.setter(setter(Builder::embeddedTimecodeOverride))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("embeddedTimecodeOverride").build())
.build();
private static final SdkField HDR10_METADATA_FIELD = SdkField
. builder(MarshallingType.SDK_POJO).memberName("Hdr10Metadata")
.getter(getter(VideoSelector::hdr10Metadata)).setter(setter(Builder::hdr10Metadata))
.constructor(Hdr10Metadata::builder)
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("hdr10Metadata").build()).build();
private static final SdkField MAX_LUMINANCE_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("MaxLuminance").getter(getter(VideoSelector::maxLuminance)).setter(setter(Builder::maxLuminance))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("maxLuminance").build()).build();
private static final SdkField PAD_VIDEO_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("PadVideo").getter(getter(VideoSelector::padVideoAsString)).setter(setter(Builder::padVideo))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("padVideo").build()).build();
private static final SdkField PID_FIELD = SdkField. builder(MarshallingType.INTEGER).memberName("Pid")
.getter(getter(VideoSelector::pid)).setter(setter(Builder::pid))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("pid").build()).build();
private static final SdkField PROGRAM_NUMBER_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("ProgramNumber").getter(getter(VideoSelector::programNumber)).setter(setter(Builder::programNumber))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("programNumber").build()).build();
private static final SdkField ROTATE_FIELD = SdkField. builder(MarshallingType.STRING).memberName("Rotate")
.getter(getter(VideoSelector::rotateAsString)).setter(setter(Builder::rotate))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("rotate").build()).build();
private static final SdkField SAMPLE_RANGE_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("SampleRange").getter(getter(VideoSelector::sampleRangeAsString)).setter(setter(Builder::sampleRange))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("sampleRange").build()).build();
private static final List> SDK_FIELDS = Collections.unmodifiableList(Arrays.asList(ALPHA_BEHAVIOR_FIELD,
COLOR_SPACE_FIELD, COLOR_SPACE_USAGE_FIELD, EMBEDDED_TIMECODE_OVERRIDE_FIELD, HDR10_METADATA_FIELD,
MAX_LUMINANCE_FIELD, PAD_VIDEO_FIELD, PID_FIELD, PROGRAM_NUMBER_FIELD, ROTATE_FIELD, SAMPLE_RANGE_FIELD));
private static final long serialVersionUID = 1L;
private final String alphaBehavior;
private final String colorSpace;
private final String colorSpaceUsage;
private final String embeddedTimecodeOverride;
private final Hdr10Metadata hdr10Metadata;
private final Integer maxLuminance;
private final String padVideo;
private final Integer pid;
private final Integer programNumber;
private final String rotate;
private final String sampleRange;
private VideoSelector(BuilderImpl builder) {
this.alphaBehavior = builder.alphaBehavior;
this.colorSpace = builder.colorSpace;
this.colorSpaceUsage = builder.colorSpaceUsage;
this.embeddedTimecodeOverride = builder.embeddedTimecodeOverride;
this.hdr10Metadata = builder.hdr10Metadata;
this.maxLuminance = builder.maxLuminance;
this.padVideo = builder.padVideo;
this.pid = builder.pid;
this.programNumber = builder.programNumber;
this.rotate = builder.rotate;
this.sampleRange = builder.sampleRange;
}
/**
* Ignore this setting unless this input is a QuickTime animation with an alpha channel. Use this setting to create
* separate Key and Fill outputs. In each output, specify which part of the input MediaConvert uses. Leave this
* setting at the default value DISCARD to delete the alpha channel and preserve the video. Set it to REMAP_TO_LUMA
* to delete the video and map the alpha channel to the luma channel of your outputs.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #alphaBehavior}
* will return {@link AlphaBehavior#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #alphaBehaviorAsString}.
*
*
* @return Ignore this setting unless this input is a QuickTime animation with an alpha channel. Use this setting to
* create separate Key and Fill outputs. In each output, specify which part of the input MediaConvert uses.
* Leave this setting at the default value DISCARD to delete the alpha channel and preserve the video. Set
* it to REMAP_TO_LUMA to delete the video and map the alpha channel to the luma channel of your outputs.
* @see AlphaBehavior
*/
public final AlphaBehavior alphaBehavior() {
return AlphaBehavior.fromValue(alphaBehavior);
}
/**
* Ignore this setting unless this input is a QuickTime animation with an alpha channel. Use this setting to create
* separate Key and Fill outputs. In each output, specify which part of the input MediaConvert uses. Leave this
* setting at the default value DISCARD to delete the alpha channel and preserve the video. Set it to REMAP_TO_LUMA
* to delete the video and map the alpha channel to the luma channel of your outputs.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #alphaBehavior}
* will return {@link AlphaBehavior#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #alphaBehaviorAsString}.
*
*
* @return Ignore this setting unless this input is a QuickTime animation with an alpha channel. Use this setting to
* create separate Key and Fill outputs. In each output, specify which part of the input MediaConvert uses.
* Leave this setting at the default value DISCARD to delete the alpha channel and preserve the video. Set
* it to REMAP_TO_LUMA to delete the video and map the alpha channel to the luma channel of your outputs.
* @see AlphaBehavior
*/
public final String alphaBehaviorAsString() {
return alphaBehavior;
}
/**
* If your input video has accurate color space metadata, or if you don't know about color space: Keep the default
* value, Follow. MediaConvert will automatically detect your input color space. If your input video has metadata
* indicating the wrong color space, or has missing metadata: Specify the accurate color space here. If your input
* video is HDR 10 and the SMPTE ST 2086 Mastering Display Color Volume static metadata isn't present in your video
* stream, or if that metadata is present but not accurate: Choose Force HDR 10. Specify correct values in the input
* HDR 10 metadata settings. For more information about HDR jobs, see
* https://docs.aws.amazon.com/console/mediaconvert/hdr. When you specify an input color space, MediaConvert uses
* the following color space metadata, which includes color primaries, transfer characteristics, and matrix
* coefficients: * HDR 10: BT.2020, PQ, BT.2020 non-constant * HLG 2020: BT.2020, HLG, BT.2020 non-constant * P3DCI
* (Theater): DCIP3, SMPTE 428M, BT.709 * P3D65 (SDR): Display P3, sRGB, BT.709 * P3D65 (HDR): Display P3, PQ,
* BT.709
*
* If the service returns an enum value that is not available in the current SDK version, {@link #colorSpace} will
* return {@link ColorSpace#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #colorSpaceAsString}.
*
*
* @return If your input video has accurate color space metadata, or if you don't know about color space: Keep the
* default value, Follow. MediaConvert will automatically detect your input color space. If your input video
* has metadata indicating the wrong color space, or has missing metadata: Specify the accurate color space
* here. If your input video is HDR 10 and the SMPTE ST 2086 Mastering Display Color Volume static metadata
* isn't present in your video stream, or if that metadata is present but not accurate: Choose Force HDR 10.
* Specify correct values in the input HDR 10 metadata settings. For more information about HDR jobs, see
* https://docs.aws.amazon.com/console/mediaconvert/hdr. When you specify an input color space, MediaConvert
* uses the following color space metadata, which includes color primaries, transfer characteristics, and
* matrix coefficients: * HDR 10: BT.2020, PQ, BT.2020 non-constant * HLG 2020: BT.2020, HLG, BT.2020
* non-constant * P3DCI (Theater): DCIP3, SMPTE 428M, BT.709 * P3D65 (SDR): Display P3, sRGB, BT.709 * P3D65
* (HDR): Display P3, PQ, BT.709
* @see ColorSpace
*/
public final ColorSpace colorSpace() {
return ColorSpace.fromValue(colorSpace);
}
/**
* If your input video has accurate color space metadata, or if you don't know about color space: Keep the default
* value, Follow. MediaConvert will automatically detect your input color space. If your input video has metadata
* indicating the wrong color space, or has missing metadata: Specify the accurate color space here. If your input
* video is HDR 10 and the SMPTE ST 2086 Mastering Display Color Volume static metadata isn't present in your video
* stream, or if that metadata is present but not accurate: Choose Force HDR 10. Specify correct values in the input
* HDR 10 metadata settings. For more information about HDR jobs, see
* https://docs.aws.amazon.com/console/mediaconvert/hdr. When you specify an input color space, MediaConvert uses
* the following color space metadata, which includes color primaries, transfer characteristics, and matrix
* coefficients: * HDR 10: BT.2020, PQ, BT.2020 non-constant * HLG 2020: BT.2020, HLG, BT.2020 non-constant * P3DCI
* (Theater): DCIP3, SMPTE 428M, BT.709 * P3D65 (SDR): Display P3, sRGB, BT.709 * P3D65 (HDR): Display P3, PQ,
* BT.709
*
* If the service returns an enum value that is not available in the current SDK version, {@link #colorSpace} will
* return {@link ColorSpace#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #colorSpaceAsString}.
*
*
* @return If your input video has accurate color space metadata, or if you don't know about color space: Keep the
* default value, Follow. MediaConvert will automatically detect your input color space. If your input video
* has metadata indicating the wrong color space, or has missing metadata: Specify the accurate color space
* here. If your input video is HDR 10 and the SMPTE ST 2086 Mastering Display Color Volume static metadata
* isn't present in your video stream, or if that metadata is present but not accurate: Choose Force HDR 10.
* Specify correct values in the input HDR 10 metadata settings. For more information about HDR jobs, see
* https://docs.aws.amazon.com/console/mediaconvert/hdr. When you specify an input color space, MediaConvert
* uses the following color space metadata, which includes color primaries, transfer characteristics, and
* matrix coefficients: * HDR 10: BT.2020, PQ, BT.2020 non-constant * HLG 2020: BT.2020, HLG, BT.2020
* non-constant * P3DCI (Theater): DCIP3, SMPTE 428M, BT.709 * P3D65 (SDR): Display P3, sRGB, BT.709 * P3D65
* (HDR): Display P3, PQ, BT.709
* @see ColorSpace
*/
public final String colorSpaceAsString() {
return colorSpace;
}
/**
* There are two sources for color metadata, the input file and the job input settings Color space and HDR master
* display information settings. The Color space usage setting determines which takes precedence. Choose Force to
* use color metadata from the input job settings. If you don't specify values for those settings, the service
* defaults to using metadata from your input. FALLBACK - Choose Fallback to use color metadata from the source when
* it is present. If there's no color metadata in your input file, the service defaults to using values you specify
* in the input settings.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #colorSpaceUsage}
* will return {@link ColorSpaceUsage#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available
* from {@link #colorSpaceUsageAsString}.
*
*
* @return There are two sources for color metadata, the input file and the job input settings Color space and HDR
* master display information settings. The Color space usage setting determines which takes precedence.
* Choose Force to use color metadata from the input job settings. If you don't specify values for those
* settings, the service defaults to using metadata from your input. FALLBACK - Choose Fallback to use color
* metadata from the source when it is present. If there's no color metadata in your input file, the service
* defaults to using values you specify in the input settings.
* @see ColorSpaceUsage
*/
public final ColorSpaceUsage colorSpaceUsage() {
return ColorSpaceUsage.fromValue(colorSpaceUsage);
}
/**
* There are two sources for color metadata, the input file and the job input settings Color space and HDR master
* display information settings. The Color space usage setting determines which takes precedence. Choose Force to
* use color metadata from the input job settings. If you don't specify values for those settings, the service
* defaults to using metadata from your input. FALLBACK - Choose Fallback to use color metadata from the source when
* it is present. If there's no color metadata in your input file, the service defaults to using values you specify
* in the input settings.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #colorSpaceUsage}
* will return {@link ColorSpaceUsage#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available
* from {@link #colorSpaceUsageAsString}.
*
*
* @return There are two sources for color metadata, the input file and the job input settings Color space and HDR
* master display information settings. The Color space usage setting determines which takes precedence.
* Choose Force to use color metadata from the input job settings. If you don't specify values for those
* settings, the service defaults to using metadata from your input. FALLBACK - Choose Fallback to use color
* metadata from the source when it is present. If there's no color metadata in your input file, the service
* defaults to using values you specify in the input settings.
* @see ColorSpaceUsage
*/
public final String colorSpaceUsageAsString() {
return colorSpaceUsage;
}
/**
* Set Embedded timecode override to Use MDPM when your AVCHD input contains timecode tag data in the Modified
* Digital Video Pack Metadata. When you do, we recommend you also set Timecode source to Embedded. Leave Embedded
* timecode override blank, or set to None, when your input does not contain MDPM timecode.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #embeddedTimecodeOverride} will return {@link EmbeddedTimecodeOverride#UNKNOWN_TO_SDK_VERSION}. The raw
* value returned by the service is available from {@link #embeddedTimecodeOverrideAsString}.
*
*
* @return Set Embedded timecode override to Use MDPM when your AVCHD input contains timecode tag data in the
* Modified Digital Video Pack Metadata. When you do, we recommend you also set Timecode source to Embedded.
* Leave Embedded timecode override blank, or set to None, when your input does not contain MDPM timecode.
* @see EmbeddedTimecodeOverride
*/
public final EmbeddedTimecodeOverride embeddedTimecodeOverride() {
return EmbeddedTimecodeOverride.fromValue(embeddedTimecodeOverride);
}
/**
* Set Embedded timecode override to Use MDPM when your AVCHD input contains timecode tag data in the Modified
* Digital Video Pack Metadata. When you do, we recommend you also set Timecode source to Embedded. Leave Embedded
* timecode override blank, or set to None, when your input does not contain MDPM timecode.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #embeddedTimecodeOverride} will return {@link EmbeddedTimecodeOverride#UNKNOWN_TO_SDK_VERSION}. The raw
* value returned by the service is available from {@link #embeddedTimecodeOverrideAsString}.
*
*
* @return Set Embedded timecode override to Use MDPM when your AVCHD input contains timecode tag data in the
* Modified Digital Video Pack Metadata. When you do, we recommend you also set Timecode source to Embedded.
* Leave Embedded timecode override blank, or set to None, when your input does not contain MDPM timecode.
* @see EmbeddedTimecodeOverride
*/
public final String embeddedTimecodeOverrideAsString() {
return embeddedTimecodeOverride;
}
/**
* Use these settings to provide HDR 10 metadata that is missing or inaccurate in your input video. Appropriate
* values vary depending on the input video and must be provided by a color grader. The color grader generates these
* values during the HDR 10 mastering process. The valid range for each of these settings is 0 to 50,000. Each
* increment represents 0.00002 in CIE1931 color coordinate. Related settings - When you specify these values, you
* must also set Color space to HDR 10. To specify whether the the values you specify here take precedence over the
* values in the metadata of your input file, set Color space usage. To specify whether color metadata is included
* in an output, set Color metadata. For more information about MediaConvert HDR jobs, see
* https://docs.aws.amazon.com/console/mediaconvert/hdr.
*
* @return Use these settings to provide HDR 10 metadata that is missing or inaccurate in your input video.
* Appropriate values vary depending on the input video and must be provided by a color grader. The color
* grader generates these values during the HDR 10 mastering process. The valid range for each of these
* settings is 0 to 50,000. Each increment represents 0.00002 in CIE1931 color coordinate. Related settings
* - When you specify these values, you must also set Color space to HDR 10. To specify whether the the
* values you specify here take precedence over the values in the metadata of your input file, set Color
* space usage. To specify whether color metadata is included in an output, set Color metadata. For more
* information about MediaConvert HDR jobs, see https://docs.aws.amazon.com/console/mediaconvert/hdr.
*/
public final Hdr10Metadata hdr10Metadata() {
return hdr10Metadata;
}
/**
* Specify the maximum mastering display luminance. Enter an integer from 0 to 2147483647, in units of 0.0001 nits.
* For example, enter 10000000 for 1000 nits.
*
* @return Specify the maximum mastering display luminance. Enter an integer from 0 to 2147483647, in units of
* 0.0001 nits. For example, enter 10000000 for 1000 nits.
*/
public final Integer maxLuminance() {
return maxLuminance;
}
/**
* Use this setting if your input has video and audio durations that don't align, and your output or player has
* strict alignment requirements. Examples: Input audio track has a delayed start. Input video track ends before
* audio ends. When you set Pad video to Black, MediaConvert generates black video frames so that output video and
* audio durations match. Black video frames are added at the beginning or end, depending on your input. To keep the
* default behavior and not generate black video, set Pad video to Disabled or leave blank.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #padVideo} will
* return {@link PadVideo#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #padVideoAsString}.
*
*
* @return Use this setting if your input has video and audio durations that don't align, and your output or player
* has strict alignment requirements. Examples: Input audio track has a delayed start. Input video track
* ends before audio ends. When you set Pad video to Black, MediaConvert generates black video frames so
* that output video and audio durations match. Black video frames are added at the beginning or end,
* depending on your input. To keep the default behavior and not generate black video, set Pad video to
* Disabled or leave blank.
* @see PadVideo
*/
public final PadVideo padVideo() {
return PadVideo.fromValue(padVideo);
}
/**
* Use this setting if your input has video and audio durations that don't align, and your output or player has
* strict alignment requirements. Examples: Input audio track has a delayed start. Input video track ends before
* audio ends. When you set Pad video to Black, MediaConvert generates black video frames so that output video and
* audio durations match. Black video frames are added at the beginning or end, depending on your input. To keep the
* default behavior and not generate black video, set Pad video to Disabled or leave blank.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #padVideo} will
* return {@link PadVideo#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #padVideoAsString}.
*
*
* @return Use this setting if your input has video and audio durations that don't align, and your output or player
* has strict alignment requirements. Examples: Input audio track has a delayed start. Input video track
* ends before audio ends. When you set Pad video to Black, MediaConvert generates black video frames so
* that output video and audio durations match. Black video frames are added at the beginning or end,
* depending on your input. To keep the default behavior and not generate black video, set Pad video to
* Disabled or leave blank.
* @see PadVideo
*/
public final String padVideoAsString() {
return padVideo;
}
/**
* Use PID to select specific video data from an input file. Specify this value as an integer; the system
* automatically converts it to the hexidecimal value. For example, 257 selects PID 0x101. A PID, or packet
* identifier, is an identifier for a set of data in an MPEG-2 transport stream container.
*
* @return Use PID to select specific video data from an input file. Specify this value as an integer; the system
* automatically converts it to the hexidecimal value. For example, 257 selects PID 0x101. A PID, or packet
* identifier, is an identifier for a set of data in an MPEG-2 transport stream container.
*/
public final Integer pid() {
return pid;
}
/**
* Selects a specific program from within a multi-program transport stream. Note that Quad 4K is not currently
* supported.
*
* @return Selects a specific program from within a multi-program transport stream. Note that Quad 4K is not
* currently supported.
*/
public final Integer programNumber() {
return programNumber;
}
/**
* Use Rotate to specify how the service rotates your video. You can choose automatic rotation or specify a
* rotation. You can specify a clockwise rotation of 0, 90, 180, or 270 degrees. If your input video container is
* .mov or .mp4 and your input has rotation metadata, you can choose Automatic to have the service rotate your video
* according to the rotation specified in the metadata. The rotation must be within one degree of 90, 180, or 270
* degrees. If the rotation metadata specifies any other rotation, the service will default to no rotation. By
* default, the service does no rotation, even if your input video has rotation metadata. The service doesn't pass
* through rotation metadata.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #rotate} will
* return {@link InputRotate#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #rotateAsString}.
*
*
* @return Use Rotate to specify how the service rotates your video. You can choose automatic rotation or specify a
* rotation. You can specify a clockwise rotation of 0, 90, 180, or 270 degrees. If your input video
* container is .mov or .mp4 and your input has rotation metadata, you can choose Automatic to have the
* service rotate your video according to the rotation specified in the metadata. The rotation must be
* within one degree of 90, 180, or 270 degrees. If the rotation metadata specifies any other rotation, the
* service will default to no rotation. By default, the service does no rotation, even if your input video
* has rotation metadata. The service doesn't pass through rotation metadata.
* @see InputRotate
*/
public final InputRotate rotate() {
return InputRotate.fromValue(rotate);
}
/**
* Use Rotate to specify how the service rotates your video. You can choose automatic rotation or specify a
* rotation. You can specify a clockwise rotation of 0, 90, 180, or 270 degrees. If your input video container is
* .mov or .mp4 and your input has rotation metadata, you can choose Automatic to have the service rotate your video
* according to the rotation specified in the metadata. The rotation must be within one degree of 90, 180, or 270
* degrees. If the rotation metadata specifies any other rotation, the service will default to no rotation. By
* default, the service does no rotation, even if your input video has rotation metadata. The service doesn't pass
* through rotation metadata.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #rotate} will
* return {@link InputRotate#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #rotateAsString}.
*
*
* @return Use Rotate to specify how the service rotates your video. You can choose automatic rotation or specify a
* rotation. You can specify a clockwise rotation of 0, 90, 180, or 270 degrees. If your input video
* container is .mov or .mp4 and your input has rotation metadata, you can choose Automatic to have the
* service rotate your video according to the rotation specified in the metadata. The rotation must be
* within one degree of 90, 180, or 270 degrees. If the rotation metadata specifies any other rotation, the
* service will default to no rotation. By default, the service does no rotation, even if your input video
* has rotation metadata. The service doesn't pass through rotation metadata.
* @see InputRotate
*/
public final String rotateAsString() {
return rotate;
}
/**
* If the sample range metadata in your input video is accurate, or if you don't know about sample range, keep the
* default value, Follow, for this setting. When you do, the service automatically detects your input sample range.
* If your input video has metadata indicating the wrong sample range, specify the accurate sample range here. When
* you do, MediaConvert ignores any sample range information in the input metadata. Regardless of whether
* MediaConvert uses the input sample range or the sample range that you specify, MediaConvert uses the sample range
* for transcoding and also writes it to the output metadata.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #sampleRange} will
* return {@link InputSampleRange#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #sampleRangeAsString}.
*
*
* @return If the sample range metadata in your input video is accurate, or if you don't know about sample range,
* keep the default value, Follow, for this setting. When you do, the service automatically detects your
* input sample range. If your input video has metadata indicating the wrong sample range, specify the
* accurate sample range here. When you do, MediaConvert ignores any sample range information in the input
* metadata. Regardless of whether MediaConvert uses the input sample range or the sample range that you
* specify, MediaConvert uses the sample range for transcoding and also writes it to the output metadata.
* @see InputSampleRange
*/
public final InputSampleRange sampleRange() {
return InputSampleRange.fromValue(sampleRange);
}
/**
* If the sample range metadata in your input video is accurate, or if you don't know about sample range, keep the
* default value, Follow, for this setting. When you do, the service automatically detects your input sample range.
* If your input video has metadata indicating the wrong sample range, specify the accurate sample range here. When
* you do, MediaConvert ignores any sample range information in the input metadata. Regardless of whether
* MediaConvert uses the input sample range or the sample range that you specify, MediaConvert uses the sample range
* for transcoding and also writes it to the output metadata.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #sampleRange} will
* return {@link InputSampleRange#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #sampleRangeAsString}.
*
*
* @return If the sample range metadata in your input video is accurate, or if you don't know about sample range,
* keep the default value, Follow, for this setting. When you do, the service automatically detects your
* input sample range. If your input video has metadata indicating the wrong sample range, specify the
* accurate sample range here. When you do, MediaConvert ignores any sample range information in the input
* metadata. Regardless of whether MediaConvert uses the input sample range or the sample range that you
* specify, MediaConvert uses the sample range for transcoding and also writes it to the output metadata.
* @see InputSampleRange
*/
public final String sampleRangeAsString() {
return sampleRange;
}
@Override
public Builder toBuilder() {
return new BuilderImpl(this);
}
public static Builder builder() {
return new BuilderImpl();
}
public static Class extends Builder> serializableBuilderClass() {
return BuilderImpl.class;
}
@Override
public final int hashCode() {
int hashCode = 1;
hashCode = 31 * hashCode + Objects.hashCode(alphaBehaviorAsString());
hashCode = 31 * hashCode + Objects.hashCode(colorSpaceAsString());
hashCode = 31 * hashCode + Objects.hashCode(colorSpaceUsageAsString());
hashCode = 31 * hashCode + Objects.hashCode(embeddedTimecodeOverrideAsString());
hashCode = 31 * hashCode + Objects.hashCode(hdr10Metadata());
hashCode = 31 * hashCode + Objects.hashCode(maxLuminance());
hashCode = 31 * hashCode + Objects.hashCode(padVideoAsString());
hashCode = 31 * hashCode + Objects.hashCode(pid());
hashCode = 31 * hashCode + Objects.hashCode(programNumber());
hashCode = 31 * hashCode + Objects.hashCode(rotateAsString());
hashCode = 31 * hashCode + Objects.hashCode(sampleRangeAsString());
return hashCode;
}
@Override
public final boolean equals(Object obj) {
return equalsBySdkFields(obj);
}
@Override
public final boolean equalsBySdkFields(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof VideoSelector)) {
return false;
}
VideoSelector other = (VideoSelector) obj;
return Objects.equals(alphaBehaviorAsString(), other.alphaBehaviorAsString())
&& Objects.equals(colorSpaceAsString(), other.colorSpaceAsString())
&& Objects.equals(colorSpaceUsageAsString(), other.colorSpaceUsageAsString())
&& Objects.equals(embeddedTimecodeOverrideAsString(), other.embeddedTimecodeOverrideAsString())
&& Objects.equals(hdr10Metadata(), other.hdr10Metadata()) && Objects.equals(maxLuminance(), other.maxLuminance())
&& Objects.equals(padVideoAsString(), other.padVideoAsString()) && Objects.equals(pid(), other.pid())
&& Objects.equals(programNumber(), other.programNumber())
&& Objects.equals(rotateAsString(), other.rotateAsString())
&& Objects.equals(sampleRangeAsString(), other.sampleRangeAsString());
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*/
@Override
public final String toString() {
return ToString.builder("VideoSelector").add("AlphaBehavior", alphaBehaviorAsString())
.add("ColorSpace", colorSpaceAsString()).add("ColorSpaceUsage", colorSpaceUsageAsString())
.add("EmbeddedTimecodeOverride", embeddedTimecodeOverrideAsString()).add("Hdr10Metadata", hdr10Metadata())
.add("MaxLuminance", maxLuminance()).add("PadVideo", padVideoAsString()).add("Pid", pid())
.add("ProgramNumber", programNumber()).add("Rotate", rotateAsString()).add("SampleRange", sampleRangeAsString())
.build();
}
public final Optional getValueForField(String fieldName, Class clazz) {
switch (fieldName) {
case "AlphaBehavior":
return Optional.ofNullable(clazz.cast(alphaBehaviorAsString()));
case "ColorSpace":
return Optional.ofNullable(clazz.cast(colorSpaceAsString()));
case "ColorSpaceUsage":
return Optional.ofNullable(clazz.cast(colorSpaceUsageAsString()));
case "EmbeddedTimecodeOverride":
return Optional.ofNullable(clazz.cast(embeddedTimecodeOverrideAsString()));
case "Hdr10Metadata":
return Optional.ofNullable(clazz.cast(hdr10Metadata()));
case "MaxLuminance":
return Optional.ofNullable(clazz.cast(maxLuminance()));
case "PadVideo":
return Optional.ofNullable(clazz.cast(padVideoAsString()));
case "Pid":
return Optional.ofNullable(clazz.cast(pid()));
case "ProgramNumber":
return Optional.ofNullable(clazz.cast(programNumber()));
case "Rotate":
return Optional.ofNullable(clazz.cast(rotateAsString()));
case "SampleRange":
return Optional.ofNullable(clazz.cast(sampleRangeAsString()));
default:
return Optional.empty();
}
}
@Override
public final List> sdkFields() {
return SDK_FIELDS;
}
private static Function