All Downloads are FREE. Search and download functionalities are using the official Maven repository.

software.amazon.awssdk.services.mediaconvert.model.VideoSelector Maven / Gradle / Ivy

Go to download

The AWS Java SDK for AWS Elemental MediaConvert module holds the client classes that are used for communicating with AWS Elemental MediaConvert Service

There is a newer version: 2.29.15
Show newest version
/*
 * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
 * 
 * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
 * the License. A copy of the License is located at
 * 
 * http://aws.amazon.com/apache2.0
 * 
 * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
 * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
 * and limitations under the License.
 */

package software.amazon.awssdk.services.mediaconvert.model;

import java.io.Serializable;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import software.amazon.awssdk.annotations.Generated;
import software.amazon.awssdk.core.SdkField;
import software.amazon.awssdk.core.SdkPojo;
import software.amazon.awssdk.core.protocol.MarshallLocation;
import software.amazon.awssdk.core.protocol.MarshallingType;
import software.amazon.awssdk.core.traits.LocationTrait;
import software.amazon.awssdk.utils.ToString;
import software.amazon.awssdk.utils.builder.CopyableBuilder;
import software.amazon.awssdk.utils.builder.ToCopyableBuilder;

/**
 * Input video selectors contain the video settings for the input. Each of your inputs can have up to one video
 * selector.
 */
@Generated("software.amazon.awssdk:codegen")
public final class VideoSelector implements SdkPojo, Serializable, ToCopyableBuilder {
    private static final SdkField ALPHA_BEHAVIOR_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("AlphaBehavior").getter(getter(VideoSelector::alphaBehaviorAsString))
            .setter(setter(Builder::alphaBehavior))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("alphaBehavior").build()).build();

    private static final SdkField COLOR_SPACE_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("ColorSpace").getter(getter(VideoSelector::colorSpaceAsString)).setter(setter(Builder::colorSpace))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("colorSpace").build()).build();

    private static final SdkField COLOR_SPACE_USAGE_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("ColorSpaceUsage").getter(getter(VideoSelector::colorSpaceUsageAsString))
            .setter(setter(Builder::colorSpaceUsage))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("colorSpaceUsage").build()).build();

    private static final SdkField EMBEDDED_TIMECODE_OVERRIDE_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("EmbeddedTimecodeOverride").getter(getter(VideoSelector::embeddedTimecodeOverrideAsString))
            .setter(setter(Builder::embeddedTimecodeOverride))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("embeddedTimecodeOverride").build())
            .build();

    private static final SdkField HDR10_METADATA_FIELD = SdkField
            . builder(MarshallingType.SDK_POJO).memberName("Hdr10Metadata")
            .getter(getter(VideoSelector::hdr10Metadata)).setter(setter(Builder::hdr10Metadata))
            .constructor(Hdr10Metadata::builder)
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("hdr10Metadata").build()).build();

    private static final SdkField MAX_LUMINANCE_FIELD = SdkField. builder(MarshallingType.INTEGER)
            .memberName("MaxLuminance").getter(getter(VideoSelector::maxLuminance)).setter(setter(Builder::maxLuminance))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("maxLuminance").build()).build();

    private static final SdkField PAD_VIDEO_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("PadVideo").getter(getter(VideoSelector::padVideoAsString)).setter(setter(Builder::padVideo))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("padVideo").build()).build();

    private static final SdkField PID_FIELD = SdkField. builder(MarshallingType.INTEGER).memberName("Pid")
            .getter(getter(VideoSelector::pid)).setter(setter(Builder::pid))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("pid").build()).build();

    private static final SdkField PROGRAM_NUMBER_FIELD = SdkField. builder(MarshallingType.INTEGER)
            .memberName("ProgramNumber").getter(getter(VideoSelector::programNumber)).setter(setter(Builder::programNumber))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("programNumber").build()).build();

    private static final SdkField ROTATE_FIELD = SdkField. builder(MarshallingType.STRING).memberName("Rotate")
            .getter(getter(VideoSelector::rotateAsString)).setter(setter(Builder::rotate))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("rotate").build()).build();

    private static final SdkField SAMPLE_RANGE_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("SampleRange").getter(getter(VideoSelector::sampleRangeAsString)).setter(setter(Builder::sampleRange))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("sampleRange").build()).build();

    private static final List> SDK_FIELDS = Collections.unmodifiableList(Arrays.asList(ALPHA_BEHAVIOR_FIELD,
            COLOR_SPACE_FIELD, COLOR_SPACE_USAGE_FIELD, EMBEDDED_TIMECODE_OVERRIDE_FIELD, HDR10_METADATA_FIELD,
            MAX_LUMINANCE_FIELD, PAD_VIDEO_FIELD, PID_FIELD, PROGRAM_NUMBER_FIELD, ROTATE_FIELD, SAMPLE_RANGE_FIELD));

    private static final long serialVersionUID = 1L;

    private final String alphaBehavior;

    private final String colorSpace;

    private final String colorSpaceUsage;

    private final String embeddedTimecodeOverride;

    private final Hdr10Metadata hdr10Metadata;

    private final Integer maxLuminance;

    private final String padVideo;

    private final Integer pid;

    private final Integer programNumber;

    private final String rotate;

    private final String sampleRange;

    private VideoSelector(BuilderImpl builder) {
        this.alphaBehavior = builder.alphaBehavior;
        this.colorSpace = builder.colorSpace;
        this.colorSpaceUsage = builder.colorSpaceUsage;
        this.embeddedTimecodeOverride = builder.embeddedTimecodeOverride;
        this.hdr10Metadata = builder.hdr10Metadata;
        this.maxLuminance = builder.maxLuminance;
        this.padVideo = builder.padVideo;
        this.pid = builder.pid;
        this.programNumber = builder.programNumber;
        this.rotate = builder.rotate;
        this.sampleRange = builder.sampleRange;
    }

    /**
     * Ignore this setting unless this input is a QuickTime animation with an alpha channel. Use this setting to create
     * separate Key and Fill outputs. In each output, specify which part of the input MediaConvert uses. Leave this
     * setting at the default value DISCARD to delete the alpha channel and preserve the video. Set it to REMAP_TO_LUMA
     * to delete the video and map the alpha channel to the luma channel of your outputs.
     * 

* If the service returns an enum value that is not available in the current SDK version, {@link #alphaBehavior} * will return {@link AlphaBehavior#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #alphaBehaviorAsString}. *

* * @return Ignore this setting unless this input is a QuickTime animation with an alpha channel. Use this setting to * create separate Key and Fill outputs. In each output, specify which part of the input MediaConvert uses. * Leave this setting at the default value DISCARD to delete the alpha channel and preserve the video. Set * it to REMAP_TO_LUMA to delete the video and map the alpha channel to the luma channel of your outputs. * @see AlphaBehavior */ public final AlphaBehavior alphaBehavior() { return AlphaBehavior.fromValue(alphaBehavior); } /** * Ignore this setting unless this input is a QuickTime animation with an alpha channel. Use this setting to create * separate Key and Fill outputs. In each output, specify which part of the input MediaConvert uses. Leave this * setting at the default value DISCARD to delete the alpha channel and preserve the video. Set it to REMAP_TO_LUMA * to delete the video and map the alpha channel to the luma channel of your outputs. *

* If the service returns an enum value that is not available in the current SDK version, {@link #alphaBehavior} * will return {@link AlphaBehavior#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #alphaBehaviorAsString}. *

* * @return Ignore this setting unless this input is a QuickTime animation with an alpha channel. Use this setting to * create separate Key and Fill outputs. In each output, specify which part of the input MediaConvert uses. * Leave this setting at the default value DISCARD to delete the alpha channel and preserve the video. Set * it to REMAP_TO_LUMA to delete the video and map the alpha channel to the luma channel of your outputs. * @see AlphaBehavior */ public final String alphaBehaviorAsString() { return alphaBehavior; } /** * If your input video has accurate color space metadata, or if you don't know about color space: Keep the default * value, Follow. MediaConvert will automatically detect your input color space. If your input video has metadata * indicating the wrong color space, or has missing metadata: Specify the accurate color space here. If your input * video is HDR 10 and the SMPTE ST 2086 Mastering Display Color Volume static metadata isn't present in your video * stream, or if that metadata is present but not accurate: Choose Force HDR 10. Specify correct values in the input * HDR 10 metadata settings. For more information about HDR jobs, see * https://docs.aws.amazon.com/console/mediaconvert/hdr. When you specify an input color space, MediaConvert uses * the following color space metadata, which includes color primaries, transfer characteristics, and matrix * coefficients: * HDR 10: BT.2020, PQ, BT.2020 non-constant * HLG 2020: BT.2020, HLG, BT.2020 non-constant * P3DCI * (Theater): DCIP3, SMPTE 428M, BT.709 * P3D65 (SDR): Display P3, sRGB, BT.709 * P3D65 (HDR): Display P3, PQ, * BT.709 *

* If the service returns an enum value that is not available in the current SDK version, {@link #colorSpace} will * return {@link ColorSpace#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #colorSpaceAsString}. *

* * @return If your input video has accurate color space metadata, or if you don't know about color space: Keep the * default value, Follow. MediaConvert will automatically detect your input color space. If your input video * has metadata indicating the wrong color space, or has missing metadata: Specify the accurate color space * here. If your input video is HDR 10 and the SMPTE ST 2086 Mastering Display Color Volume static metadata * isn't present in your video stream, or if that metadata is present but not accurate: Choose Force HDR 10. * Specify correct values in the input HDR 10 metadata settings. For more information about HDR jobs, see * https://docs.aws.amazon.com/console/mediaconvert/hdr. When you specify an input color space, MediaConvert * uses the following color space metadata, which includes color primaries, transfer characteristics, and * matrix coefficients: * HDR 10: BT.2020, PQ, BT.2020 non-constant * HLG 2020: BT.2020, HLG, BT.2020 * non-constant * P3DCI (Theater): DCIP3, SMPTE 428M, BT.709 * P3D65 (SDR): Display P3, sRGB, BT.709 * P3D65 * (HDR): Display P3, PQ, BT.709 * @see ColorSpace */ public final ColorSpace colorSpace() { return ColorSpace.fromValue(colorSpace); } /** * If your input video has accurate color space metadata, or if you don't know about color space: Keep the default * value, Follow. MediaConvert will automatically detect your input color space. If your input video has metadata * indicating the wrong color space, or has missing metadata: Specify the accurate color space here. If your input * video is HDR 10 and the SMPTE ST 2086 Mastering Display Color Volume static metadata isn't present in your video * stream, or if that metadata is present but not accurate: Choose Force HDR 10. Specify correct values in the input * HDR 10 metadata settings. For more information about HDR jobs, see * https://docs.aws.amazon.com/console/mediaconvert/hdr. When you specify an input color space, MediaConvert uses * the following color space metadata, which includes color primaries, transfer characteristics, and matrix * coefficients: * HDR 10: BT.2020, PQ, BT.2020 non-constant * HLG 2020: BT.2020, HLG, BT.2020 non-constant * P3DCI * (Theater): DCIP3, SMPTE 428M, BT.709 * P3D65 (SDR): Display P3, sRGB, BT.709 * P3D65 (HDR): Display P3, PQ, * BT.709 *

* If the service returns an enum value that is not available in the current SDK version, {@link #colorSpace} will * return {@link ColorSpace#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #colorSpaceAsString}. *

* * @return If your input video has accurate color space metadata, or if you don't know about color space: Keep the * default value, Follow. MediaConvert will automatically detect your input color space. If your input video * has metadata indicating the wrong color space, or has missing metadata: Specify the accurate color space * here. If your input video is HDR 10 and the SMPTE ST 2086 Mastering Display Color Volume static metadata * isn't present in your video stream, or if that metadata is present but not accurate: Choose Force HDR 10. * Specify correct values in the input HDR 10 metadata settings. For more information about HDR jobs, see * https://docs.aws.amazon.com/console/mediaconvert/hdr. When you specify an input color space, MediaConvert * uses the following color space metadata, which includes color primaries, transfer characteristics, and * matrix coefficients: * HDR 10: BT.2020, PQ, BT.2020 non-constant * HLG 2020: BT.2020, HLG, BT.2020 * non-constant * P3DCI (Theater): DCIP3, SMPTE 428M, BT.709 * P3D65 (SDR): Display P3, sRGB, BT.709 * P3D65 * (HDR): Display P3, PQ, BT.709 * @see ColorSpace */ public final String colorSpaceAsString() { return colorSpace; } /** * There are two sources for color metadata, the input file and the job input settings Color space and HDR master * display information settings. The Color space usage setting determines which takes precedence. Choose Force to * use color metadata from the input job settings. If you don't specify values for those settings, the service * defaults to using metadata from your input. FALLBACK - Choose Fallback to use color metadata from the source when * it is present. If there's no color metadata in your input file, the service defaults to using values you specify * in the input settings. *

* If the service returns an enum value that is not available in the current SDK version, {@link #colorSpaceUsage} * will return {@link ColorSpaceUsage#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available * from {@link #colorSpaceUsageAsString}. *

* * @return There are two sources for color metadata, the input file and the job input settings Color space and HDR * master display information settings. The Color space usage setting determines which takes precedence. * Choose Force to use color metadata from the input job settings. If you don't specify values for those * settings, the service defaults to using metadata from your input. FALLBACK - Choose Fallback to use color * metadata from the source when it is present. If there's no color metadata in your input file, the service * defaults to using values you specify in the input settings. * @see ColorSpaceUsage */ public final ColorSpaceUsage colorSpaceUsage() { return ColorSpaceUsage.fromValue(colorSpaceUsage); } /** * There are two sources for color metadata, the input file and the job input settings Color space and HDR master * display information settings. The Color space usage setting determines which takes precedence. Choose Force to * use color metadata from the input job settings. If you don't specify values for those settings, the service * defaults to using metadata from your input. FALLBACK - Choose Fallback to use color metadata from the source when * it is present. If there's no color metadata in your input file, the service defaults to using values you specify * in the input settings. *

* If the service returns an enum value that is not available in the current SDK version, {@link #colorSpaceUsage} * will return {@link ColorSpaceUsage#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available * from {@link #colorSpaceUsageAsString}. *

* * @return There are two sources for color metadata, the input file and the job input settings Color space and HDR * master display information settings. The Color space usage setting determines which takes precedence. * Choose Force to use color metadata from the input job settings. If you don't specify values for those * settings, the service defaults to using metadata from your input. FALLBACK - Choose Fallback to use color * metadata from the source when it is present. If there's no color metadata in your input file, the service * defaults to using values you specify in the input settings. * @see ColorSpaceUsage */ public final String colorSpaceUsageAsString() { return colorSpaceUsage; } /** * Set Embedded timecode override to Use MDPM when your AVCHD input contains timecode tag data in the Modified * Digital Video Pack Metadata. When you do, we recommend you also set Timecode source to Embedded. Leave Embedded * timecode override blank, or set to None, when your input does not contain MDPM timecode. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #embeddedTimecodeOverride} will return {@link EmbeddedTimecodeOverride#UNKNOWN_TO_SDK_VERSION}. The raw * value returned by the service is available from {@link #embeddedTimecodeOverrideAsString}. *

* * @return Set Embedded timecode override to Use MDPM when your AVCHD input contains timecode tag data in the * Modified Digital Video Pack Metadata. When you do, we recommend you also set Timecode source to Embedded. * Leave Embedded timecode override blank, or set to None, when your input does not contain MDPM timecode. * @see EmbeddedTimecodeOverride */ public final EmbeddedTimecodeOverride embeddedTimecodeOverride() { return EmbeddedTimecodeOverride.fromValue(embeddedTimecodeOverride); } /** * Set Embedded timecode override to Use MDPM when your AVCHD input contains timecode tag data in the Modified * Digital Video Pack Metadata. When you do, we recommend you also set Timecode source to Embedded. Leave Embedded * timecode override blank, or set to None, when your input does not contain MDPM timecode. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #embeddedTimecodeOverride} will return {@link EmbeddedTimecodeOverride#UNKNOWN_TO_SDK_VERSION}. The raw * value returned by the service is available from {@link #embeddedTimecodeOverrideAsString}. *

* * @return Set Embedded timecode override to Use MDPM when your AVCHD input contains timecode tag data in the * Modified Digital Video Pack Metadata. When you do, we recommend you also set Timecode source to Embedded. * Leave Embedded timecode override blank, or set to None, when your input does not contain MDPM timecode. * @see EmbeddedTimecodeOverride */ public final String embeddedTimecodeOverrideAsString() { return embeddedTimecodeOverride; } /** * Use these settings to provide HDR 10 metadata that is missing or inaccurate in your input video. Appropriate * values vary depending on the input video and must be provided by a color grader. The color grader generates these * values during the HDR 10 mastering process. The valid range for each of these settings is 0 to 50,000. Each * increment represents 0.00002 in CIE1931 color coordinate. Related settings - When you specify these values, you * must also set Color space to HDR 10. To specify whether the the values you specify here take precedence over the * values in the metadata of your input file, set Color space usage. To specify whether color metadata is included * in an output, set Color metadata. For more information about MediaConvert HDR jobs, see * https://docs.aws.amazon.com/console/mediaconvert/hdr. * * @return Use these settings to provide HDR 10 metadata that is missing or inaccurate in your input video. * Appropriate values vary depending on the input video and must be provided by a color grader. The color * grader generates these values during the HDR 10 mastering process. The valid range for each of these * settings is 0 to 50,000. Each increment represents 0.00002 in CIE1931 color coordinate. Related settings * - When you specify these values, you must also set Color space to HDR 10. To specify whether the the * values you specify here take precedence over the values in the metadata of your input file, set Color * space usage. To specify whether color metadata is included in an output, set Color metadata. For more * information about MediaConvert HDR jobs, see https://docs.aws.amazon.com/console/mediaconvert/hdr. */ public final Hdr10Metadata hdr10Metadata() { return hdr10Metadata; } /** * Specify the maximum mastering display luminance. Enter an integer from 0 to 2147483647, in units of 0.0001 nits. * For example, enter 10000000 for 1000 nits. * * @return Specify the maximum mastering display luminance. Enter an integer from 0 to 2147483647, in units of * 0.0001 nits. For example, enter 10000000 for 1000 nits. */ public final Integer maxLuminance() { return maxLuminance; } /** * Use this setting if your input has video and audio durations that don't align, and your output or player has * strict alignment requirements. Examples: Input audio track has a delayed start. Input video track ends before * audio ends. When you set Pad video to Black, MediaConvert generates black video frames so that output video and * audio durations match. Black video frames are added at the beginning or end, depending on your input. To keep the * default behavior and not generate black video, set Pad video to Disabled or leave blank. *

* If the service returns an enum value that is not available in the current SDK version, {@link #padVideo} will * return {@link PadVideo#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #padVideoAsString}. *

* * @return Use this setting if your input has video and audio durations that don't align, and your output or player * has strict alignment requirements. Examples: Input audio track has a delayed start. Input video track * ends before audio ends. When you set Pad video to Black, MediaConvert generates black video frames so * that output video and audio durations match. Black video frames are added at the beginning or end, * depending on your input. To keep the default behavior and not generate black video, set Pad video to * Disabled or leave blank. * @see PadVideo */ public final PadVideo padVideo() { return PadVideo.fromValue(padVideo); } /** * Use this setting if your input has video and audio durations that don't align, and your output or player has * strict alignment requirements. Examples: Input audio track has a delayed start. Input video track ends before * audio ends. When you set Pad video to Black, MediaConvert generates black video frames so that output video and * audio durations match. Black video frames are added at the beginning or end, depending on your input. To keep the * default behavior and not generate black video, set Pad video to Disabled or leave blank. *

* If the service returns an enum value that is not available in the current SDK version, {@link #padVideo} will * return {@link PadVideo#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #padVideoAsString}. *

* * @return Use this setting if your input has video and audio durations that don't align, and your output or player * has strict alignment requirements. Examples: Input audio track has a delayed start. Input video track * ends before audio ends. When you set Pad video to Black, MediaConvert generates black video frames so * that output video and audio durations match. Black video frames are added at the beginning or end, * depending on your input. To keep the default behavior and not generate black video, set Pad video to * Disabled or leave blank. * @see PadVideo */ public final String padVideoAsString() { return padVideo; } /** * Use PID to select specific video data from an input file. Specify this value as an integer; the system * automatically converts it to the hexidecimal value. For example, 257 selects PID 0x101. A PID, or packet * identifier, is an identifier for a set of data in an MPEG-2 transport stream container. * * @return Use PID to select specific video data from an input file. Specify this value as an integer; the system * automatically converts it to the hexidecimal value. For example, 257 selects PID 0x101. A PID, or packet * identifier, is an identifier for a set of data in an MPEG-2 transport stream container. */ public final Integer pid() { return pid; } /** * Selects a specific program from within a multi-program transport stream. Note that Quad 4K is not currently * supported. * * @return Selects a specific program from within a multi-program transport stream. Note that Quad 4K is not * currently supported. */ public final Integer programNumber() { return programNumber; } /** * Use Rotate to specify how the service rotates your video. You can choose automatic rotation or specify a * rotation. You can specify a clockwise rotation of 0, 90, 180, or 270 degrees. If your input video container is * .mov or .mp4 and your input has rotation metadata, you can choose Automatic to have the service rotate your video * according to the rotation specified in the metadata. The rotation must be within one degree of 90, 180, or 270 * degrees. If the rotation metadata specifies any other rotation, the service will default to no rotation. By * default, the service does no rotation, even if your input video has rotation metadata. The service doesn't pass * through rotation metadata. *

* If the service returns an enum value that is not available in the current SDK version, {@link #rotate} will * return {@link InputRotate#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #rotateAsString}. *

* * @return Use Rotate to specify how the service rotates your video. You can choose automatic rotation or specify a * rotation. You can specify a clockwise rotation of 0, 90, 180, or 270 degrees. If your input video * container is .mov or .mp4 and your input has rotation metadata, you can choose Automatic to have the * service rotate your video according to the rotation specified in the metadata. The rotation must be * within one degree of 90, 180, or 270 degrees. If the rotation metadata specifies any other rotation, the * service will default to no rotation. By default, the service does no rotation, even if your input video * has rotation metadata. The service doesn't pass through rotation metadata. * @see InputRotate */ public final InputRotate rotate() { return InputRotate.fromValue(rotate); } /** * Use Rotate to specify how the service rotates your video. You can choose automatic rotation or specify a * rotation. You can specify a clockwise rotation of 0, 90, 180, or 270 degrees. If your input video container is * .mov or .mp4 and your input has rotation metadata, you can choose Automatic to have the service rotate your video * according to the rotation specified in the metadata. The rotation must be within one degree of 90, 180, or 270 * degrees. If the rotation metadata specifies any other rotation, the service will default to no rotation. By * default, the service does no rotation, even if your input video has rotation metadata. The service doesn't pass * through rotation metadata. *

* If the service returns an enum value that is not available in the current SDK version, {@link #rotate} will * return {@link InputRotate#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #rotateAsString}. *

* * @return Use Rotate to specify how the service rotates your video. You can choose automatic rotation or specify a * rotation. You can specify a clockwise rotation of 0, 90, 180, or 270 degrees. If your input video * container is .mov or .mp4 and your input has rotation metadata, you can choose Automatic to have the * service rotate your video according to the rotation specified in the metadata. The rotation must be * within one degree of 90, 180, or 270 degrees. If the rotation metadata specifies any other rotation, the * service will default to no rotation. By default, the service does no rotation, even if your input video * has rotation metadata. The service doesn't pass through rotation metadata. * @see InputRotate */ public final String rotateAsString() { return rotate; } /** * If the sample range metadata in your input video is accurate, or if you don't know about sample range, keep the * default value, Follow, for this setting. When you do, the service automatically detects your input sample range. * If your input video has metadata indicating the wrong sample range, specify the accurate sample range here. When * you do, MediaConvert ignores any sample range information in the input metadata. Regardless of whether * MediaConvert uses the input sample range or the sample range that you specify, MediaConvert uses the sample range * for transcoding and also writes it to the output metadata. *

* If the service returns an enum value that is not available in the current SDK version, {@link #sampleRange} will * return {@link InputSampleRange#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #sampleRangeAsString}. *

* * @return If the sample range metadata in your input video is accurate, or if you don't know about sample range, * keep the default value, Follow, for this setting. When you do, the service automatically detects your * input sample range. If your input video has metadata indicating the wrong sample range, specify the * accurate sample range here. When you do, MediaConvert ignores any sample range information in the input * metadata. Regardless of whether MediaConvert uses the input sample range or the sample range that you * specify, MediaConvert uses the sample range for transcoding and also writes it to the output metadata. * @see InputSampleRange */ public final InputSampleRange sampleRange() { return InputSampleRange.fromValue(sampleRange); } /** * If the sample range metadata in your input video is accurate, or if you don't know about sample range, keep the * default value, Follow, for this setting. When you do, the service automatically detects your input sample range. * If your input video has metadata indicating the wrong sample range, specify the accurate sample range here. When * you do, MediaConvert ignores any sample range information in the input metadata. Regardless of whether * MediaConvert uses the input sample range or the sample range that you specify, MediaConvert uses the sample range * for transcoding and also writes it to the output metadata. *

* If the service returns an enum value that is not available in the current SDK version, {@link #sampleRange} will * return {@link InputSampleRange#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #sampleRangeAsString}. *

* * @return If the sample range metadata in your input video is accurate, or if you don't know about sample range, * keep the default value, Follow, for this setting. When you do, the service automatically detects your * input sample range. If your input video has metadata indicating the wrong sample range, specify the * accurate sample range here. When you do, MediaConvert ignores any sample range information in the input * metadata. Regardless of whether MediaConvert uses the input sample range or the sample range that you * specify, MediaConvert uses the sample range for transcoding and also writes it to the output metadata. * @see InputSampleRange */ public final String sampleRangeAsString() { return sampleRange; } @Override public Builder toBuilder() { return new BuilderImpl(this); } public static Builder builder() { return new BuilderImpl(); } public static Class serializableBuilderClass() { return BuilderImpl.class; } @Override public final int hashCode() { int hashCode = 1; hashCode = 31 * hashCode + Objects.hashCode(alphaBehaviorAsString()); hashCode = 31 * hashCode + Objects.hashCode(colorSpaceAsString()); hashCode = 31 * hashCode + Objects.hashCode(colorSpaceUsageAsString()); hashCode = 31 * hashCode + Objects.hashCode(embeddedTimecodeOverrideAsString()); hashCode = 31 * hashCode + Objects.hashCode(hdr10Metadata()); hashCode = 31 * hashCode + Objects.hashCode(maxLuminance()); hashCode = 31 * hashCode + Objects.hashCode(padVideoAsString()); hashCode = 31 * hashCode + Objects.hashCode(pid()); hashCode = 31 * hashCode + Objects.hashCode(programNumber()); hashCode = 31 * hashCode + Objects.hashCode(rotateAsString()); hashCode = 31 * hashCode + Objects.hashCode(sampleRangeAsString()); return hashCode; } @Override public final boolean equals(Object obj) { return equalsBySdkFields(obj); } @Override public final boolean equalsBySdkFields(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof VideoSelector)) { return false; } VideoSelector other = (VideoSelector) obj; return Objects.equals(alphaBehaviorAsString(), other.alphaBehaviorAsString()) && Objects.equals(colorSpaceAsString(), other.colorSpaceAsString()) && Objects.equals(colorSpaceUsageAsString(), other.colorSpaceUsageAsString()) && Objects.equals(embeddedTimecodeOverrideAsString(), other.embeddedTimecodeOverrideAsString()) && Objects.equals(hdr10Metadata(), other.hdr10Metadata()) && Objects.equals(maxLuminance(), other.maxLuminance()) && Objects.equals(padVideoAsString(), other.padVideoAsString()) && Objects.equals(pid(), other.pid()) && Objects.equals(programNumber(), other.programNumber()) && Objects.equals(rotateAsString(), other.rotateAsString()) && Objects.equals(sampleRangeAsString(), other.sampleRangeAsString()); } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. */ @Override public final String toString() { return ToString.builder("VideoSelector").add("AlphaBehavior", alphaBehaviorAsString()) .add("ColorSpace", colorSpaceAsString()).add("ColorSpaceUsage", colorSpaceUsageAsString()) .add("EmbeddedTimecodeOverride", embeddedTimecodeOverrideAsString()).add("Hdr10Metadata", hdr10Metadata()) .add("MaxLuminance", maxLuminance()).add("PadVideo", padVideoAsString()).add("Pid", pid()) .add("ProgramNumber", programNumber()).add("Rotate", rotateAsString()).add("SampleRange", sampleRangeAsString()) .build(); } public final Optional getValueForField(String fieldName, Class clazz) { switch (fieldName) { case "AlphaBehavior": return Optional.ofNullable(clazz.cast(alphaBehaviorAsString())); case "ColorSpace": return Optional.ofNullable(clazz.cast(colorSpaceAsString())); case "ColorSpaceUsage": return Optional.ofNullable(clazz.cast(colorSpaceUsageAsString())); case "EmbeddedTimecodeOverride": return Optional.ofNullable(clazz.cast(embeddedTimecodeOverrideAsString())); case "Hdr10Metadata": return Optional.ofNullable(clazz.cast(hdr10Metadata())); case "MaxLuminance": return Optional.ofNullable(clazz.cast(maxLuminance())); case "PadVideo": return Optional.ofNullable(clazz.cast(padVideoAsString())); case "Pid": return Optional.ofNullable(clazz.cast(pid())); case "ProgramNumber": return Optional.ofNullable(clazz.cast(programNumber())); case "Rotate": return Optional.ofNullable(clazz.cast(rotateAsString())); case "SampleRange": return Optional.ofNullable(clazz.cast(sampleRangeAsString())); default: return Optional.empty(); } } @Override public final List> sdkFields() { return SDK_FIELDS; } private static Function getter(Function g) { return obj -> g.apply((VideoSelector) obj); } private static BiConsumer setter(BiConsumer s) { return (obj, val) -> s.accept((Builder) obj, val); } public interface Builder extends SdkPojo, CopyableBuilder { /** * Ignore this setting unless this input is a QuickTime animation with an alpha channel. Use this setting to * create separate Key and Fill outputs. In each output, specify which part of the input MediaConvert uses. * Leave this setting at the default value DISCARD to delete the alpha channel and preserve the video. Set it to * REMAP_TO_LUMA to delete the video and map the alpha channel to the luma channel of your outputs. * * @param alphaBehavior * Ignore this setting unless this input is a QuickTime animation with an alpha channel. Use this setting * to create separate Key and Fill outputs. In each output, specify which part of the input MediaConvert * uses. Leave this setting at the default value DISCARD to delete the alpha channel and preserve the * video. Set it to REMAP_TO_LUMA to delete the video and map the alpha channel to the luma channel of * your outputs. * @see AlphaBehavior * @return Returns a reference to this object so that method calls can be chained together. * @see AlphaBehavior */ Builder alphaBehavior(String alphaBehavior); /** * Ignore this setting unless this input is a QuickTime animation with an alpha channel. Use this setting to * create separate Key and Fill outputs. In each output, specify which part of the input MediaConvert uses. * Leave this setting at the default value DISCARD to delete the alpha channel and preserve the video. Set it to * REMAP_TO_LUMA to delete the video and map the alpha channel to the luma channel of your outputs. * * @param alphaBehavior * Ignore this setting unless this input is a QuickTime animation with an alpha channel. Use this setting * to create separate Key and Fill outputs. In each output, specify which part of the input MediaConvert * uses. Leave this setting at the default value DISCARD to delete the alpha channel and preserve the * video. Set it to REMAP_TO_LUMA to delete the video and map the alpha channel to the luma channel of * your outputs. * @see AlphaBehavior * @return Returns a reference to this object so that method calls can be chained together. * @see AlphaBehavior */ Builder alphaBehavior(AlphaBehavior alphaBehavior); /** * If your input video has accurate color space metadata, or if you don't know about color space: Keep the * default value, Follow. MediaConvert will automatically detect your input color space. If your input video has * metadata indicating the wrong color space, or has missing metadata: Specify the accurate color space here. If * your input video is HDR 10 and the SMPTE ST 2086 Mastering Display Color Volume static metadata isn't present * in your video stream, or if that metadata is present but not accurate: Choose Force HDR 10. Specify correct * values in the input HDR 10 metadata settings. For more information about HDR jobs, see * https://docs.aws.amazon.com/console/mediaconvert/hdr. When you specify an input color space, MediaConvert * uses the following color space metadata, which includes color primaries, transfer characteristics, and matrix * coefficients: * HDR 10: BT.2020, PQ, BT.2020 non-constant * HLG 2020: BT.2020, HLG, BT.2020 non-constant * * P3DCI (Theater): DCIP3, SMPTE 428M, BT.709 * P3D65 (SDR): Display P3, sRGB, BT.709 * P3D65 (HDR): Display P3, * PQ, BT.709 * * @param colorSpace * If your input video has accurate color space metadata, or if you don't know about color space: Keep * the default value, Follow. MediaConvert will automatically detect your input color space. If your * input video has metadata indicating the wrong color space, or has missing metadata: Specify the * accurate color space here. If your input video is HDR 10 and the SMPTE ST 2086 Mastering Display Color * Volume static metadata isn't present in your video stream, or if that metadata is present but not * accurate: Choose Force HDR 10. Specify correct values in the input HDR 10 metadata settings. For more * information about HDR jobs, see https://docs.aws.amazon.com/console/mediaconvert/hdr. When you specify * an input color space, MediaConvert uses the following color space metadata, which includes color * primaries, transfer characteristics, and matrix coefficients: * HDR 10: BT.2020, PQ, BT.2020 * non-constant * HLG 2020: BT.2020, HLG, BT.2020 non-constant * P3DCI (Theater): DCIP3, SMPTE 428M, * BT.709 * P3D65 (SDR): Display P3, sRGB, BT.709 * P3D65 (HDR): Display P3, PQ, BT.709 * @see ColorSpace * @return Returns a reference to this object so that method calls can be chained together. * @see ColorSpace */ Builder colorSpace(String colorSpace); /** * If your input video has accurate color space metadata, or if you don't know about color space: Keep the * default value, Follow. MediaConvert will automatically detect your input color space. If your input video has * metadata indicating the wrong color space, or has missing metadata: Specify the accurate color space here. If * your input video is HDR 10 and the SMPTE ST 2086 Mastering Display Color Volume static metadata isn't present * in your video stream, or if that metadata is present but not accurate: Choose Force HDR 10. Specify correct * values in the input HDR 10 metadata settings. For more information about HDR jobs, see * https://docs.aws.amazon.com/console/mediaconvert/hdr. When you specify an input color space, MediaConvert * uses the following color space metadata, which includes color primaries, transfer characteristics, and matrix * coefficients: * HDR 10: BT.2020, PQ, BT.2020 non-constant * HLG 2020: BT.2020, HLG, BT.2020 non-constant * * P3DCI (Theater): DCIP3, SMPTE 428M, BT.709 * P3D65 (SDR): Display P3, sRGB, BT.709 * P3D65 (HDR): Display P3, * PQ, BT.709 * * @param colorSpace * If your input video has accurate color space metadata, or if you don't know about color space: Keep * the default value, Follow. MediaConvert will automatically detect your input color space. If your * input video has metadata indicating the wrong color space, or has missing metadata: Specify the * accurate color space here. If your input video is HDR 10 and the SMPTE ST 2086 Mastering Display Color * Volume static metadata isn't present in your video stream, or if that metadata is present but not * accurate: Choose Force HDR 10. Specify correct values in the input HDR 10 metadata settings. For more * information about HDR jobs, see https://docs.aws.amazon.com/console/mediaconvert/hdr. When you specify * an input color space, MediaConvert uses the following color space metadata, which includes color * primaries, transfer characteristics, and matrix coefficients: * HDR 10: BT.2020, PQ, BT.2020 * non-constant * HLG 2020: BT.2020, HLG, BT.2020 non-constant * P3DCI (Theater): DCIP3, SMPTE 428M, * BT.709 * P3D65 (SDR): Display P3, sRGB, BT.709 * P3D65 (HDR): Display P3, PQ, BT.709 * @see ColorSpace * @return Returns a reference to this object so that method calls can be chained together. * @see ColorSpace */ Builder colorSpace(ColorSpace colorSpace); /** * There are two sources for color metadata, the input file and the job input settings Color space and HDR * master display information settings. The Color space usage setting determines which takes precedence. Choose * Force to use color metadata from the input job settings. If you don't specify values for those settings, the * service defaults to using metadata from your input. FALLBACK - Choose Fallback to use color metadata from the * source when it is present. If there's no color metadata in your input file, the service defaults to using * values you specify in the input settings. * * @param colorSpaceUsage * There are two sources for color metadata, the input file and the job input settings Color space and * HDR master display information settings. The Color space usage setting determines which takes * precedence. Choose Force to use color metadata from the input job settings. If you don't specify * values for those settings, the service defaults to using metadata from your input. FALLBACK - Choose * Fallback to use color metadata from the source when it is present. If there's no color metadata in * your input file, the service defaults to using values you specify in the input settings. * @see ColorSpaceUsage * @return Returns a reference to this object so that method calls can be chained together. * @see ColorSpaceUsage */ Builder colorSpaceUsage(String colorSpaceUsage); /** * There are two sources for color metadata, the input file and the job input settings Color space and HDR * master display information settings. The Color space usage setting determines which takes precedence. Choose * Force to use color metadata from the input job settings. If you don't specify values for those settings, the * service defaults to using metadata from your input. FALLBACK - Choose Fallback to use color metadata from the * source when it is present. If there's no color metadata in your input file, the service defaults to using * values you specify in the input settings. * * @param colorSpaceUsage * There are two sources for color metadata, the input file and the job input settings Color space and * HDR master display information settings. The Color space usage setting determines which takes * precedence. Choose Force to use color metadata from the input job settings. If you don't specify * values for those settings, the service defaults to using metadata from your input. FALLBACK - Choose * Fallback to use color metadata from the source when it is present. If there's no color metadata in * your input file, the service defaults to using values you specify in the input settings. * @see ColorSpaceUsage * @return Returns a reference to this object so that method calls can be chained together. * @see ColorSpaceUsage */ Builder colorSpaceUsage(ColorSpaceUsage colorSpaceUsage); /** * Set Embedded timecode override to Use MDPM when your AVCHD input contains timecode tag data in the Modified * Digital Video Pack Metadata. When you do, we recommend you also set Timecode source to Embedded. Leave * Embedded timecode override blank, or set to None, when your input does not contain MDPM timecode. * * @param embeddedTimecodeOverride * Set Embedded timecode override to Use MDPM when your AVCHD input contains timecode tag data in the * Modified Digital Video Pack Metadata. When you do, we recommend you also set Timecode source to * Embedded. Leave Embedded timecode override blank, or set to None, when your input does not contain * MDPM timecode. * @see EmbeddedTimecodeOverride * @return Returns a reference to this object so that method calls can be chained together. * @see EmbeddedTimecodeOverride */ Builder embeddedTimecodeOverride(String embeddedTimecodeOverride); /** * Set Embedded timecode override to Use MDPM when your AVCHD input contains timecode tag data in the Modified * Digital Video Pack Metadata. When you do, we recommend you also set Timecode source to Embedded. Leave * Embedded timecode override blank, or set to None, when your input does not contain MDPM timecode. * * @param embeddedTimecodeOverride * Set Embedded timecode override to Use MDPM when your AVCHD input contains timecode tag data in the * Modified Digital Video Pack Metadata. When you do, we recommend you also set Timecode source to * Embedded. Leave Embedded timecode override blank, or set to None, when your input does not contain * MDPM timecode. * @see EmbeddedTimecodeOverride * @return Returns a reference to this object so that method calls can be chained together. * @see EmbeddedTimecodeOverride */ Builder embeddedTimecodeOverride(EmbeddedTimecodeOverride embeddedTimecodeOverride); /** * Use these settings to provide HDR 10 metadata that is missing or inaccurate in your input video. Appropriate * values vary depending on the input video and must be provided by a color grader. The color grader generates * these values during the HDR 10 mastering process. The valid range for each of these settings is 0 to 50,000. * Each increment represents 0.00002 in CIE1931 color coordinate. Related settings - When you specify these * values, you must also set Color space to HDR 10. To specify whether the the values you specify here take * precedence over the values in the metadata of your input file, set Color space usage. To specify whether * color metadata is included in an output, set Color metadata. For more information about MediaConvert HDR * jobs, see https://docs.aws.amazon.com/console/mediaconvert/hdr. * * @param hdr10Metadata * Use these settings to provide HDR 10 metadata that is missing or inaccurate in your input video. * Appropriate values vary depending on the input video and must be provided by a color grader. The color * grader generates these values during the HDR 10 mastering process. The valid range for each of these * settings is 0 to 50,000. Each increment represents 0.00002 in CIE1931 color coordinate. Related * settings - When you specify these values, you must also set Color space to HDR 10. To specify whether * the the values you specify here take precedence over the values in the metadata of your input file, * set Color space usage. To specify whether color metadata is included in an output, set Color metadata. * For more information about MediaConvert HDR jobs, see * https://docs.aws.amazon.com/console/mediaconvert/hdr. * @return Returns a reference to this object so that method calls can be chained together. */ Builder hdr10Metadata(Hdr10Metadata hdr10Metadata); /** * Use these settings to provide HDR 10 metadata that is missing or inaccurate in your input video. Appropriate * values vary depending on the input video and must be provided by a color grader. The color grader generates * these values during the HDR 10 mastering process. The valid range for each of these settings is 0 to 50,000. * Each increment represents 0.00002 in CIE1931 color coordinate. Related settings - When you specify these * values, you must also set Color space to HDR 10. To specify whether the the values you specify here take * precedence over the values in the metadata of your input file, set Color space usage. To specify whether * color metadata is included in an output, set Color metadata. For more information about MediaConvert HDR * jobs, see https://docs.aws.amazon.com/console/mediaconvert/hdr. This is a convenience method that creates an * instance of the {@link Hdr10Metadata.Builder} avoiding the need to create one manually via * {@link Hdr10Metadata#builder()}. * *

* When the {@link Consumer} completes, {@link Hdr10Metadata.Builder#build()} is called immediately and its * result is passed to {@link #hdr10Metadata(Hdr10Metadata)}. * * @param hdr10Metadata * a consumer that will call methods on {@link Hdr10Metadata.Builder} * @return Returns a reference to this object so that method calls can be chained together. * @see #hdr10Metadata(Hdr10Metadata) */ default Builder hdr10Metadata(Consumer hdr10Metadata) { return hdr10Metadata(Hdr10Metadata.builder().applyMutation(hdr10Metadata).build()); } /** * Specify the maximum mastering display luminance. Enter an integer from 0 to 2147483647, in units of 0.0001 * nits. For example, enter 10000000 for 1000 nits. * * @param maxLuminance * Specify the maximum mastering display luminance. Enter an integer from 0 to 2147483647, in units of * 0.0001 nits. For example, enter 10000000 for 1000 nits. * @return Returns a reference to this object so that method calls can be chained together. */ Builder maxLuminance(Integer maxLuminance); /** * Use this setting if your input has video and audio durations that don't align, and your output or player has * strict alignment requirements. Examples: Input audio track has a delayed start. Input video track ends before * audio ends. When you set Pad video to Black, MediaConvert generates black video frames so that output video * and audio durations match. Black video frames are added at the beginning or end, depending on your input. To * keep the default behavior and not generate black video, set Pad video to Disabled or leave blank. * * @param padVideo * Use this setting if your input has video and audio durations that don't align, and your output or * player has strict alignment requirements. Examples: Input audio track has a delayed start. Input video * track ends before audio ends. When you set Pad video to Black, MediaConvert generates black video * frames so that output video and audio durations match. Black video frames are added at the beginning * or end, depending on your input. To keep the default behavior and not generate black video, set Pad * video to Disabled or leave blank. * @see PadVideo * @return Returns a reference to this object so that method calls can be chained together. * @see PadVideo */ Builder padVideo(String padVideo); /** * Use this setting if your input has video and audio durations that don't align, and your output or player has * strict alignment requirements. Examples: Input audio track has a delayed start. Input video track ends before * audio ends. When you set Pad video to Black, MediaConvert generates black video frames so that output video * and audio durations match. Black video frames are added at the beginning or end, depending on your input. To * keep the default behavior and not generate black video, set Pad video to Disabled or leave blank. * * @param padVideo * Use this setting if your input has video and audio durations that don't align, and your output or * player has strict alignment requirements. Examples: Input audio track has a delayed start. Input video * track ends before audio ends. When you set Pad video to Black, MediaConvert generates black video * frames so that output video and audio durations match. Black video frames are added at the beginning * or end, depending on your input. To keep the default behavior and not generate black video, set Pad * video to Disabled or leave blank. * @see PadVideo * @return Returns a reference to this object so that method calls can be chained together. * @see PadVideo */ Builder padVideo(PadVideo padVideo); /** * Use PID to select specific video data from an input file. Specify this value as an integer; the system * automatically converts it to the hexidecimal value. For example, 257 selects PID 0x101. A PID, or packet * identifier, is an identifier for a set of data in an MPEG-2 transport stream container. * * @param pid * Use PID to select specific video data from an input file. Specify this value as an integer; the system * automatically converts it to the hexidecimal value. For example, 257 selects PID 0x101. A PID, or * packet identifier, is an identifier for a set of data in an MPEG-2 transport stream container. * @return Returns a reference to this object so that method calls can be chained together. */ Builder pid(Integer pid); /** * Selects a specific program from within a multi-program transport stream. Note that Quad 4K is not currently * supported. * * @param programNumber * Selects a specific program from within a multi-program transport stream. Note that Quad 4K is not * currently supported. * @return Returns a reference to this object so that method calls can be chained together. */ Builder programNumber(Integer programNumber); /** * Use Rotate to specify how the service rotates your video. You can choose automatic rotation or specify a * rotation. You can specify a clockwise rotation of 0, 90, 180, or 270 degrees. If your input video container * is .mov or .mp4 and your input has rotation metadata, you can choose Automatic to have the service rotate * your video according to the rotation specified in the metadata. The rotation must be within one degree of 90, * 180, or 270 degrees. If the rotation metadata specifies any other rotation, the service will default to no * rotation. By default, the service does no rotation, even if your input video has rotation metadata. The * service doesn't pass through rotation metadata. * * @param rotate * Use Rotate to specify how the service rotates your video. You can choose automatic rotation or specify * a rotation. You can specify a clockwise rotation of 0, 90, 180, or 270 degrees. If your input video * container is .mov or .mp4 and your input has rotation metadata, you can choose Automatic to have the * service rotate your video according to the rotation specified in the metadata. The rotation must be * within one degree of 90, 180, or 270 degrees. If the rotation metadata specifies any other rotation, * the service will default to no rotation. By default, the service does no rotation, even if your input * video has rotation metadata. The service doesn't pass through rotation metadata. * @see InputRotate * @return Returns a reference to this object so that method calls can be chained together. * @see InputRotate */ Builder rotate(String rotate); /** * Use Rotate to specify how the service rotates your video. You can choose automatic rotation or specify a * rotation. You can specify a clockwise rotation of 0, 90, 180, or 270 degrees. If your input video container * is .mov or .mp4 and your input has rotation metadata, you can choose Automatic to have the service rotate * your video according to the rotation specified in the metadata. The rotation must be within one degree of 90, * 180, or 270 degrees. If the rotation metadata specifies any other rotation, the service will default to no * rotation. By default, the service does no rotation, even if your input video has rotation metadata. The * service doesn't pass through rotation metadata. * * @param rotate * Use Rotate to specify how the service rotates your video. You can choose automatic rotation or specify * a rotation. You can specify a clockwise rotation of 0, 90, 180, or 270 degrees. If your input video * container is .mov or .mp4 and your input has rotation metadata, you can choose Automatic to have the * service rotate your video according to the rotation specified in the metadata. The rotation must be * within one degree of 90, 180, or 270 degrees. If the rotation metadata specifies any other rotation, * the service will default to no rotation. By default, the service does no rotation, even if your input * video has rotation metadata. The service doesn't pass through rotation metadata. * @see InputRotate * @return Returns a reference to this object so that method calls can be chained together. * @see InputRotate */ Builder rotate(InputRotate rotate); /** * If the sample range metadata in your input video is accurate, or if you don't know about sample range, keep * the default value, Follow, for this setting. When you do, the service automatically detects your input sample * range. If your input video has metadata indicating the wrong sample range, specify the accurate sample range * here. When you do, MediaConvert ignores any sample range information in the input metadata. Regardless of * whether MediaConvert uses the input sample range or the sample range that you specify, MediaConvert uses the * sample range for transcoding and also writes it to the output metadata. * * @param sampleRange * If the sample range metadata in your input video is accurate, or if you don't know about sample range, * keep the default value, Follow, for this setting. When you do, the service automatically detects your * input sample range. If your input video has metadata indicating the wrong sample range, specify the * accurate sample range here. When you do, MediaConvert ignores any sample range information in the * input metadata. Regardless of whether MediaConvert uses the input sample range or the sample range * that you specify, MediaConvert uses the sample range for transcoding and also writes it to the output * metadata. * @see InputSampleRange * @return Returns a reference to this object so that method calls can be chained together. * @see InputSampleRange */ Builder sampleRange(String sampleRange); /** * If the sample range metadata in your input video is accurate, or if you don't know about sample range, keep * the default value, Follow, for this setting. When you do, the service automatically detects your input sample * range. If your input video has metadata indicating the wrong sample range, specify the accurate sample range * here. When you do, MediaConvert ignores any sample range information in the input metadata. Regardless of * whether MediaConvert uses the input sample range or the sample range that you specify, MediaConvert uses the * sample range for transcoding and also writes it to the output metadata. * * @param sampleRange * If the sample range metadata in your input video is accurate, or if you don't know about sample range, * keep the default value, Follow, for this setting. When you do, the service automatically detects your * input sample range. If your input video has metadata indicating the wrong sample range, specify the * accurate sample range here. When you do, MediaConvert ignores any sample range information in the * input metadata. Regardless of whether MediaConvert uses the input sample range or the sample range * that you specify, MediaConvert uses the sample range for transcoding and also writes it to the output * metadata. * @see InputSampleRange * @return Returns a reference to this object so that method calls can be chained together. * @see InputSampleRange */ Builder sampleRange(InputSampleRange sampleRange); } static final class BuilderImpl implements Builder { private String alphaBehavior; private String colorSpace; private String colorSpaceUsage; private String embeddedTimecodeOverride; private Hdr10Metadata hdr10Metadata; private Integer maxLuminance; private String padVideo; private Integer pid; private Integer programNumber; private String rotate; private String sampleRange; private BuilderImpl() { } private BuilderImpl(VideoSelector model) { alphaBehavior(model.alphaBehavior); colorSpace(model.colorSpace); colorSpaceUsage(model.colorSpaceUsage); embeddedTimecodeOverride(model.embeddedTimecodeOverride); hdr10Metadata(model.hdr10Metadata); maxLuminance(model.maxLuminance); padVideo(model.padVideo); pid(model.pid); programNumber(model.programNumber); rotate(model.rotate); sampleRange(model.sampleRange); } public final String getAlphaBehavior() { return alphaBehavior; } public final void setAlphaBehavior(String alphaBehavior) { this.alphaBehavior = alphaBehavior; } @Override public final Builder alphaBehavior(String alphaBehavior) { this.alphaBehavior = alphaBehavior; return this; } @Override public final Builder alphaBehavior(AlphaBehavior alphaBehavior) { this.alphaBehavior(alphaBehavior == null ? null : alphaBehavior.toString()); return this; } public final String getColorSpace() { return colorSpace; } public final void setColorSpace(String colorSpace) { this.colorSpace = colorSpace; } @Override public final Builder colorSpace(String colorSpace) { this.colorSpace = colorSpace; return this; } @Override public final Builder colorSpace(ColorSpace colorSpace) { this.colorSpace(colorSpace == null ? null : colorSpace.toString()); return this; } public final String getColorSpaceUsage() { return colorSpaceUsage; } public final void setColorSpaceUsage(String colorSpaceUsage) { this.colorSpaceUsage = colorSpaceUsage; } @Override public final Builder colorSpaceUsage(String colorSpaceUsage) { this.colorSpaceUsage = colorSpaceUsage; return this; } @Override public final Builder colorSpaceUsage(ColorSpaceUsage colorSpaceUsage) { this.colorSpaceUsage(colorSpaceUsage == null ? null : colorSpaceUsage.toString()); return this; } public final String getEmbeddedTimecodeOverride() { return embeddedTimecodeOverride; } public final void setEmbeddedTimecodeOverride(String embeddedTimecodeOverride) { this.embeddedTimecodeOverride = embeddedTimecodeOverride; } @Override public final Builder embeddedTimecodeOverride(String embeddedTimecodeOverride) { this.embeddedTimecodeOverride = embeddedTimecodeOverride; return this; } @Override public final Builder embeddedTimecodeOverride(EmbeddedTimecodeOverride embeddedTimecodeOverride) { this.embeddedTimecodeOverride(embeddedTimecodeOverride == null ? null : embeddedTimecodeOverride.toString()); return this; } public final Hdr10Metadata.Builder getHdr10Metadata() { return hdr10Metadata != null ? hdr10Metadata.toBuilder() : null; } public final void setHdr10Metadata(Hdr10Metadata.BuilderImpl hdr10Metadata) { this.hdr10Metadata = hdr10Metadata != null ? hdr10Metadata.build() : null; } @Override public final Builder hdr10Metadata(Hdr10Metadata hdr10Metadata) { this.hdr10Metadata = hdr10Metadata; return this; } public final Integer getMaxLuminance() { return maxLuminance; } public final void setMaxLuminance(Integer maxLuminance) { this.maxLuminance = maxLuminance; } @Override public final Builder maxLuminance(Integer maxLuminance) { this.maxLuminance = maxLuminance; return this; } public final String getPadVideo() { return padVideo; } public final void setPadVideo(String padVideo) { this.padVideo = padVideo; } @Override public final Builder padVideo(String padVideo) { this.padVideo = padVideo; return this; } @Override public final Builder padVideo(PadVideo padVideo) { this.padVideo(padVideo == null ? null : padVideo.toString()); return this; } public final Integer getPid() { return pid; } public final void setPid(Integer pid) { this.pid = pid; } @Override public final Builder pid(Integer pid) { this.pid = pid; return this; } public final Integer getProgramNumber() { return programNumber; } public final void setProgramNumber(Integer programNumber) { this.programNumber = programNumber; } @Override public final Builder programNumber(Integer programNumber) { this.programNumber = programNumber; return this; } public final String getRotate() { return rotate; } public final void setRotate(String rotate) { this.rotate = rotate; } @Override public final Builder rotate(String rotate) { this.rotate = rotate; return this; } @Override public final Builder rotate(InputRotate rotate) { this.rotate(rotate == null ? null : rotate.toString()); return this; } public final String getSampleRange() { return sampleRange; } public final void setSampleRange(String sampleRange) { this.sampleRange = sampleRange; } @Override public final Builder sampleRange(String sampleRange) { this.sampleRange = sampleRange; return this; } @Override public final Builder sampleRange(InputSampleRange sampleRange) { this.sampleRange(sampleRange == null ? null : sampleRange.toString()); return this; } @Override public VideoSelector build() { return new VideoSelector(this); } @Override public List> sdkFields() { return SDK_FIELDS; } } }





© 2015 - 2024 Weber Informatics LLC | Privacy Policy