All Downloads are FREE. Search and download functionalities are using the official Maven repository.

software.amazon.awssdk.services.mediaconvert.model.Mpeg2Settings Maven / Gradle / Ivy

Go to download

The AWS Java SDK for AWS Elemental MediaConvert module holds the client classes that are used for communicating with AWS Elemental MediaConvert Service

There is a newer version: 2.29.15
Show newest version
/*
 * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
 * 
 * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
 * the License. A copy of the License is located at
 * 
 * http://aws.amazon.com/apache2.0
 * 
 * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
 * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
 * and limitations under the License.
 */

package software.amazon.awssdk.services.mediaconvert.model;

import java.io.Serializable;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.Function;
import software.amazon.awssdk.annotations.Generated;
import software.amazon.awssdk.core.SdkField;
import software.amazon.awssdk.core.SdkPojo;
import software.amazon.awssdk.core.protocol.MarshallLocation;
import software.amazon.awssdk.core.protocol.MarshallingType;
import software.amazon.awssdk.core.traits.LocationTrait;
import software.amazon.awssdk.utils.ToString;
import software.amazon.awssdk.utils.builder.CopyableBuilder;
import software.amazon.awssdk.utils.builder.ToCopyableBuilder;

/**
 * Required when you set Codec to the value MPEG2.
 */
@Generated("software.amazon.awssdk:codegen")
public final class Mpeg2Settings implements SdkPojo, Serializable, ToCopyableBuilder {
    private static final SdkField ADAPTIVE_QUANTIZATION_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("AdaptiveQuantization").getter(getter(Mpeg2Settings::adaptiveQuantizationAsString))
            .setter(setter(Builder::adaptiveQuantization))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("adaptiveQuantization").build())
            .build();

    private static final SdkField BITRATE_FIELD = SdkField. builder(MarshallingType.INTEGER)
            .memberName("Bitrate").getter(getter(Mpeg2Settings::bitrate)).setter(setter(Builder::bitrate))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("bitrate").build()).build();

    private static final SdkField CODEC_LEVEL_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("CodecLevel").getter(getter(Mpeg2Settings::codecLevelAsString)).setter(setter(Builder::codecLevel))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("codecLevel").build()).build();

    private static final SdkField CODEC_PROFILE_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("CodecProfile").getter(getter(Mpeg2Settings::codecProfileAsString)).setter(setter(Builder::codecProfile))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("codecProfile").build()).build();

    private static final SdkField DYNAMIC_SUB_GOP_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("DynamicSubGop").getter(getter(Mpeg2Settings::dynamicSubGopAsString))
            .setter(setter(Builder::dynamicSubGop))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("dynamicSubGop").build()).build();

    private static final SdkField FRAMERATE_CONTROL_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("FramerateControl").getter(getter(Mpeg2Settings::framerateControlAsString))
            .setter(setter(Builder::framerateControl))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("framerateControl").build()).build();

    private static final SdkField FRAMERATE_CONVERSION_ALGORITHM_FIELD = SdkField
            . builder(MarshallingType.STRING)
            .memberName("FramerateConversionAlgorithm")
            .getter(getter(Mpeg2Settings::framerateConversionAlgorithmAsString))
            .setter(setter(Builder::framerateConversionAlgorithm))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("framerateConversionAlgorithm")
                    .build()).build();

    private static final SdkField FRAMERATE_DENOMINATOR_FIELD = SdkField. builder(MarshallingType.INTEGER)
            .memberName("FramerateDenominator").getter(getter(Mpeg2Settings::framerateDenominator))
            .setter(setter(Builder::framerateDenominator))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("framerateDenominator").build())
            .build();

    private static final SdkField FRAMERATE_NUMERATOR_FIELD = SdkField. builder(MarshallingType.INTEGER)
            .memberName("FramerateNumerator").getter(getter(Mpeg2Settings::framerateNumerator))
            .setter(setter(Builder::framerateNumerator))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("framerateNumerator").build())
            .build();

    private static final SdkField GOP_CLOSED_CADENCE_FIELD = SdkField. builder(MarshallingType.INTEGER)
            .memberName("GopClosedCadence").getter(getter(Mpeg2Settings::gopClosedCadence))
            .setter(setter(Builder::gopClosedCadence))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("gopClosedCadence").build()).build();

    private static final SdkField GOP_SIZE_FIELD = SdkField. builder(MarshallingType.DOUBLE)
            .memberName("GopSize").getter(getter(Mpeg2Settings::gopSize)).setter(setter(Builder::gopSize))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("gopSize").build()).build();

    private static final SdkField GOP_SIZE_UNITS_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("GopSizeUnits").getter(getter(Mpeg2Settings::gopSizeUnitsAsString)).setter(setter(Builder::gopSizeUnits))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("gopSizeUnits").build()).build();

    private static final SdkField HRD_BUFFER_FINAL_FILL_PERCENTAGE_FIELD = SdkField
            . builder(MarshallingType.INTEGER)
            .memberName("HrdBufferFinalFillPercentage")
            .getter(getter(Mpeg2Settings::hrdBufferFinalFillPercentage))
            .setter(setter(Builder::hrdBufferFinalFillPercentage))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("hrdBufferFinalFillPercentage")
                    .build()).build();

    private static final SdkField HRD_BUFFER_INITIAL_FILL_PERCENTAGE_FIELD = SdkField
            . builder(MarshallingType.INTEGER)
            .memberName("HrdBufferInitialFillPercentage")
            .getter(getter(Mpeg2Settings::hrdBufferInitialFillPercentage))
            .setter(setter(Builder::hrdBufferInitialFillPercentage))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("hrdBufferInitialFillPercentage")
                    .build()).build();

    private static final SdkField HRD_BUFFER_SIZE_FIELD = SdkField. builder(MarshallingType.INTEGER)
            .memberName("HrdBufferSize").getter(getter(Mpeg2Settings::hrdBufferSize)).setter(setter(Builder::hrdBufferSize))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("hrdBufferSize").build()).build();

    private static final SdkField INTERLACE_MODE_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("InterlaceMode").getter(getter(Mpeg2Settings::interlaceModeAsString))
            .setter(setter(Builder::interlaceMode))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("interlaceMode").build()).build();

    private static final SdkField INTRA_DC_PRECISION_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("IntraDcPrecision").getter(getter(Mpeg2Settings::intraDcPrecisionAsString))
            .setter(setter(Builder::intraDcPrecision))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("intraDcPrecision").build()).build();

    private static final SdkField MAX_BITRATE_FIELD = SdkField. builder(MarshallingType.INTEGER)
            .memberName("MaxBitrate").getter(getter(Mpeg2Settings::maxBitrate)).setter(setter(Builder::maxBitrate))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("maxBitrate").build()).build();

    private static final SdkField MIN_I_INTERVAL_FIELD = SdkField. builder(MarshallingType.INTEGER)
            .memberName("MinIInterval").getter(getter(Mpeg2Settings::minIInterval)).setter(setter(Builder::minIInterval))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("minIInterval").build()).build();

    private static final SdkField NUMBER_B_FRAMES_BETWEEN_REFERENCE_FRAMES_FIELD = SdkField
            . builder(MarshallingType.INTEGER)
            .memberName("NumberBFramesBetweenReferenceFrames")
            .getter(getter(Mpeg2Settings::numberBFramesBetweenReferenceFrames))
            .setter(setter(Builder::numberBFramesBetweenReferenceFrames))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD)
                    .locationName("numberBFramesBetweenReferenceFrames").build()).build();

    private static final SdkField PAR_CONTROL_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("ParControl").getter(getter(Mpeg2Settings::parControlAsString)).setter(setter(Builder::parControl))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("parControl").build()).build();

    private static final SdkField PAR_DENOMINATOR_FIELD = SdkField. builder(MarshallingType.INTEGER)
            .memberName("ParDenominator").getter(getter(Mpeg2Settings::parDenominator)).setter(setter(Builder::parDenominator))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("parDenominator").build()).build();

    private static final SdkField PAR_NUMERATOR_FIELD = SdkField. builder(MarshallingType.INTEGER)
            .memberName("ParNumerator").getter(getter(Mpeg2Settings::parNumerator)).setter(setter(Builder::parNumerator))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("parNumerator").build()).build();

    private static final SdkField QUALITY_TUNING_LEVEL_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("QualityTuningLevel").getter(getter(Mpeg2Settings::qualityTuningLevelAsString))
            .setter(setter(Builder::qualityTuningLevel))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("qualityTuningLevel").build())
            .build();

    private static final SdkField RATE_CONTROL_MODE_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("RateControlMode").getter(getter(Mpeg2Settings::rateControlModeAsString))
            .setter(setter(Builder::rateControlMode))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("rateControlMode").build()).build();

    private static final SdkField SCAN_TYPE_CONVERSION_MODE_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("ScanTypeConversionMode").getter(getter(Mpeg2Settings::scanTypeConversionModeAsString))
            .setter(setter(Builder::scanTypeConversionMode))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("scanTypeConversionMode").build())
            .build();

    private static final SdkField SCENE_CHANGE_DETECT_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("SceneChangeDetect").getter(getter(Mpeg2Settings::sceneChangeDetectAsString))
            .setter(setter(Builder::sceneChangeDetect))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("sceneChangeDetect").build()).build();

    private static final SdkField SLOW_PAL_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("SlowPal").getter(getter(Mpeg2Settings::slowPalAsString)).setter(setter(Builder::slowPal))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("slowPal").build()).build();

    private static final SdkField SOFTNESS_FIELD = SdkField. builder(MarshallingType.INTEGER)
            .memberName("Softness").getter(getter(Mpeg2Settings::softness)).setter(setter(Builder::softness))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("softness").build()).build();

    private static final SdkField SPATIAL_ADAPTIVE_QUANTIZATION_FIELD = SdkField
            . builder(MarshallingType.STRING)
            .memberName("SpatialAdaptiveQuantization")
            .getter(getter(Mpeg2Settings::spatialAdaptiveQuantizationAsString))
            .setter(setter(Builder::spatialAdaptiveQuantization))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("spatialAdaptiveQuantization")
                    .build()).build();

    private static final SdkField SYNTAX_FIELD = SdkField. builder(MarshallingType.STRING).memberName("Syntax")
            .getter(getter(Mpeg2Settings::syntaxAsString)).setter(setter(Builder::syntax))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("syntax").build()).build();

    private static final SdkField TELECINE_FIELD = SdkField. builder(MarshallingType.STRING)
            .memberName("Telecine").getter(getter(Mpeg2Settings::telecineAsString)).setter(setter(Builder::telecine))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("telecine").build()).build();

    private static final SdkField TEMPORAL_ADAPTIVE_QUANTIZATION_FIELD = SdkField
            . builder(MarshallingType.STRING)
            .memberName("TemporalAdaptiveQuantization")
            .getter(getter(Mpeg2Settings::temporalAdaptiveQuantizationAsString))
            .setter(setter(Builder::temporalAdaptiveQuantization))
            .traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("temporalAdaptiveQuantization")
                    .build()).build();

    private static final List> SDK_FIELDS = Collections.unmodifiableList(Arrays.asList(ADAPTIVE_QUANTIZATION_FIELD,
            BITRATE_FIELD, CODEC_LEVEL_FIELD, CODEC_PROFILE_FIELD, DYNAMIC_SUB_GOP_FIELD, FRAMERATE_CONTROL_FIELD,
            FRAMERATE_CONVERSION_ALGORITHM_FIELD, FRAMERATE_DENOMINATOR_FIELD, FRAMERATE_NUMERATOR_FIELD,
            GOP_CLOSED_CADENCE_FIELD, GOP_SIZE_FIELD, GOP_SIZE_UNITS_FIELD, HRD_BUFFER_FINAL_FILL_PERCENTAGE_FIELD,
            HRD_BUFFER_INITIAL_FILL_PERCENTAGE_FIELD, HRD_BUFFER_SIZE_FIELD, INTERLACE_MODE_FIELD, INTRA_DC_PRECISION_FIELD,
            MAX_BITRATE_FIELD, MIN_I_INTERVAL_FIELD, NUMBER_B_FRAMES_BETWEEN_REFERENCE_FRAMES_FIELD, PAR_CONTROL_FIELD,
            PAR_DENOMINATOR_FIELD, PAR_NUMERATOR_FIELD, QUALITY_TUNING_LEVEL_FIELD, RATE_CONTROL_MODE_FIELD,
            SCAN_TYPE_CONVERSION_MODE_FIELD, SCENE_CHANGE_DETECT_FIELD, SLOW_PAL_FIELD, SOFTNESS_FIELD,
            SPATIAL_ADAPTIVE_QUANTIZATION_FIELD, SYNTAX_FIELD, TELECINE_FIELD, TEMPORAL_ADAPTIVE_QUANTIZATION_FIELD));

    private static final long serialVersionUID = 1L;

    private final String adaptiveQuantization;

    private final Integer bitrate;

    private final String codecLevel;

    private final String codecProfile;

    private final String dynamicSubGop;

    private final String framerateControl;

    private final String framerateConversionAlgorithm;

    private final Integer framerateDenominator;

    private final Integer framerateNumerator;

    private final Integer gopClosedCadence;

    private final Double gopSize;

    private final String gopSizeUnits;

    private final Integer hrdBufferFinalFillPercentage;

    private final Integer hrdBufferInitialFillPercentage;

    private final Integer hrdBufferSize;

    private final String interlaceMode;

    private final String intraDcPrecision;

    private final Integer maxBitrate;

    private final Integer minIInterval;

    private final Integer numberBFramesBetweenReferenceFrames;

    private final String parControl;

    private final Integer parDenominator;

    private final Integer parNumerator;

    private final String qualityTuningLevel;

    private final String rateControlMode;

    private final String scanTypeConversionMode;

    private final String sceneChangeDetect;

    private final String slowPal;

    private final Integer softness;

    private final String spatialAdaptiveQuantization;

    private final String syntax;

    private final String telecine;

    private final String temporalAdaptiveQuantization;

    private Mpeg2Settings(BuilderImpl builder) {
        this.adaptiveQuantization = builder.adaptiveQuantization;
        this.bitrate = builder.bitrate;
        this.codecLevel = builder.codecLevel;
        this.codecProfile = builder.codecProfile;
        this.dynamicSubGop = builder.dynamicSubGop;
        this.framerateControl = builder.framerateControl;
        this.framerateConversionAlgorithm = builder.framerateConversionAlgorithm;
        this.framerateDenominator = builder.framerateDenominator;
        this.framerateNumerator = builder.framerateNumerator;
        this.gopClosedCadence = builder.gopClosedCadence;
        this.gopSize = builder.gopSize;
        this.gopSizeUnits = builder.gopSizeUnits;
        this.hrdBufferFinalFillPercentage = builder.hrdBufferFinalFillPercentage;
        this.hrdBufferInitialFillPercentage = builder.hrdBufferInitialFillPercentage;
        this.hrdBufferSize = builder.hrdBufferSize;
        this.interlaceMode = builder.interlaceMode;
        this.intraDcPrecision = builder.intraDcPrecision;
        this.maxBitrate = builder.maxBitrate;
        this.minIInterval = builder.minIInterval;
        this.numberBFramesBetweenReferenceFrames = builder.numberBFramesBetweenReferenceFrames;
        this.parControl = builder.parControl;
        this.parDenominator = builder.parDenominator;
        this.parNumerator = builder.parNumerator;
        this.qualityTuningLevel = builder.qualityTuningLevel;
        this.rateControlMode = builder.rateControlMode;
        this.scanTypeConversionMode = builder.scanTypeConversionMode;
        this.sceneChangeDetect = builder.sceneChangeDetect;
        this.slowPal = builder.slowPal;
        this.softness = builder.softness;
        this.spatialAdaptiveQuantization = builder.spatialAdaptiveQuantization;
        this.syntax = builder.syntax;
        this.telecine = builder.telecine;
        this.temporalAdaptiveQuantization = builder.temporalAdaptiveQuantization;
    }

    /**
     * Specify the strength of any adaptive quantization filters that you enable. The value that you choose here applies
     * to the following settings: Spatial adaptive quantization, and Temporal adaptive quantization.
     * 

* If the service returns an enum value that is not available in the current SDK version, * {@link #adaptiveQuantization} will return {@link Mpeg2AdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}. The raw value * returned by the service is available from {@link #adaptiveQuantizationAsString}. *

* * @return Specify the strength of any adaptive quantization filters that you enable. The value that you choose here * applies to the following settings: Spatial adaptive quantization, and Temporal adaptive quantization. * @see Mpeg2AdaptiveQuantization */ public final Mpeg2AdaptiveQuantization adaptiveQuantization() { return Mpeg2AdaptiveQuantization.fromValue(adaptiveQuantization); } /** * Specify the strength of any adaptive quantization filters that you enable. The value that you choose here applies * to the following settings: Spatial adaptive quantization, and Temporal adaptive quantization. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #adaptiveQuantization} will return {@link Mpeg2AdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}. The raw value * returned by the service is available from {@link #adaptiveQuantizationAsString}. *

* * @return Specify the strength of any adaptive quantization filters that you enable. The value that you choose here * applies to the following settings: Spatial adaptive quantization, and Temporal adaptive quantization. * @see Mpeg2AdaptiveQuantization */ public final String adaptiveQuantizationAsString() { return adaptiveQuantization; } /** * Specify the average bitrate in bits per second. Required for VBR and CBR. For MS Smooth outputs, bitrates must be * unique when rounded down to the nearest multiple of 1000. * * @return Specify the average bitrate in bits per second. Required for VBR and CBR. For MS Smooth outputs, bitrates * must be unique when rounded down to the nearest multiple of 1000. */ public final Integer bitrate() { return bitrate; } /** * Use Level to set the MPEG-2 level for the video output. *

* If the service returns an enum value that is not available in the current SDK version, {@link #codecLevel} will * return {@link Mpeg2CodecLevel#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #codecLevelAsString}. *

* * @return Use Level to set the MPEG-2 level for the video output. * @see Mpeg2CodecLevel */ public final Mpeg2CodecLevel codecLevel() { return Mpeg2CodecLevel.fromValue(codecLevel); } /** * Use Level to set the MPEG-2 level for the video output. *

* If the service returns an enum value that is not available in the current SDK version, {@link #codecLevel} will * return {@link Mpeg2CodecLevel#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #codecLevelAsString}. *

* * @return Use Level to set the MPEG-2 level for the video output. * @see Mpeg2CodecLevel */ public final String codecLevelAsString() { return codecLevel; } /** * Use Profile to set the MPEG-2 profile for the video output. *

* If the service returns an enum value that is not available in the current SDK version, {@link #codecProfile} will * return {@link Mpeg2CodecProfile#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #codecProfileAsString}. *

* * @return Use Profile to set the MPEG-2 profile for the video output. * @see Mpeg2CodecProfile */ public final Mpeg2CodecProfile codecProfile() { return Mpeg2CodecProfile.fromValue(codecProfile); } /** * Use Profile to set the MPEG-2 profile for the video output. *

* If the service returns an enum value that is not available in the current SDK version, {@link #codecProfile} will * return {@link Mpeg2CodecProfile#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #codecProfileAsString}. *

* * @return Use Profile to set the MPEG-2 profile for the video output. * @see Mpeg2CodecProfile */ public final String codecProfileAsString() { return codecProfile; } /** * Choose Adaptive to improve subjective video quality for high-motion content. This will cause the service to use * fewer B-frames (which infer information based on other frames) for high-motion portions of the video and more * B-frames for low-motion portions. The maximum number of B-frames is limited by the value you provide for the * setting B frames between reference frames. *

* If the service returns an enum value that is not available in the current SDK version, {@link #dynamicSubGop} * will return {@link Mpeg2DynamicSubGop#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available * from {@link #dynamicSubGopAsString}. *

* * @return Choose Adaptive to improve subjective video quality for high-motion content. This will cause the service * to use fewer B-frames (which infer information based on other frames) for high-motion portions of the * video and more B-frames for low-motion portions. The maximum number of B-frames is limited by the value * you provide for the setting B frames between reference frames. * @see Mpeg2DynamicSubGop */ public final Mpeg2DynamicSubGop dynamicSubGop() { return Mpeg2DynamicSubGop.fromValue(dynamicSubGop); } /** * Choose Adaptive to improve subjective video quality for high-motion content. This will cause the service to use * fewer B-frames (which infer information based on other frames) for high-motion portions of the video and more * B-frames for low-motion portions. The maximum number of B-frames is limited by the value you provide for the * setting B frames between reference frames. *

* If the service returns an enum value that is not available in the current SDK version, {@link #dynamicSubGop} * will return {@link Mpeg2DynamicSubGop#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available * from {@link #dynamicSubGopAsString}. *

* * @return Choose Adaptive to improve subjective video quality for high-motion content. This will cause the service * to use fewer B-frames (which infer information based on other frames) for high-motion portions of the * video and more B-frames for low-motion portions. The maximum number of B-frames is limited by the value * you provide for the setting B frames between reference frames. * @see Mpeg2DynamicSubGop */ public final String dynamicSubGopAsString() { return dynamicSubGop; } /** * If you are using the console, use the Framerate setting to specify the frame rate for this output. If you want to * keep the same frame rate as the input video, choose Follow source. If you want to do frame rate conversion, * choose a frame rate from the dropdown list or choose Custom. The framerates shown in the dropdown list are * decimal approximations of fractions. If you choose Custom, specify your frame rate as a fraction. *

* If the service returns an enum value that is not available in the current SDK version, {@link #framerateControl} * will return {@link Mpeg2FramerateControl#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is * available from {@link #framerateControlAsString}. *

* * @return If you are using the console, use the Framerate setting to specify the frame rate for this output. If you * want to keep the same frame rate as the input video, choose Follow source. If you want to do frame rate * conversion, choose a frame rate from the dropdown list or choose Custom. The framerates shown in the * dropdown list are decimal approximations of fractions. If you choose Custom, specify your frame rate as a * fraction. * @see Mpeg2FramerateControl */ public final Mpeg2FramerateControl framerateControl() { return Mpeg2FramerateControl.fromValue(framerateControl); } /** * If you are using the console, use the Framerate setting to specify the frame rate for this output. If you want to * keep the same frame rate as the input video, choose Follow source. If you want to do frame rate conversion, * choose a frame rate from the dropdown list or choose Custom. The framerates shown in the dropdown list are * decimal approximations of fractions. If you choose Custom, specify your frame rate as a fraction. *

* If the service returns an enum value that is not available in the current SDK version, {@link #framerateControl} * will return {@link Mpeg2FramerateControl#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is * available from {@link #framerateControlAsString}. *

* * @return If you are using the console, use the Framerate setting to specify the frame rate for this output. If you * want to keep the same frame rate as the input video, choose Follow source. If you want to do frame rate * conversion, choose a frame rate from the dropdown list or choose Custom. The framerates shown in the * dropdown list are decimal approximations of fractions. If you choose Custom, specify your frame rate as a * fraction. * @see Mpeg2FramerateControl */ public final String framerateControlAsString() { return framerateControl; } /** * Choose the method that you want MediaConvert to use when increasing or decreasing the frame rate. For numerically * simple conversions, such as 60 fps to 30 fps: We recommend that you keep the default value, Drop duplicate. For * numerically complex conversions, to avoid stutter: Choose Interpolate. This results in a smooth picture, but * might introduce undesirable video artifacts. For complex frame rate conversions, especially if your source video * has already been converted from its original cadence: Choose FrameFormer to do motion-compensated interpolation. * FrameFormer uses the best conversion method frame by frame. Note that using FrameFormer increases the transcoding * time and incurs a significant add-on cost. When you choose FrameFormer, your input video resolution must be at * least 128x96. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #framerateConversionAlgorithm} will return * {@link Mpeg2FramerateConversionAlgorithm#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is * available from {@link #framerateConversionAlgorithmAsString}. *

* * @return Choose the method that you want MediaConvert to use when increasing or decreasing the frame rate. For * numerically simple conversions, such as 60 fps to 30 fps: We recommend that you keep the default value, * Drop duplicate. For numerically complex conversions, to avoid stutter: Choose Interpolate. This results * in a smooth picture, but might introduce undesirable video artifacts. For complex frame rate conversions, * especially if your source video has already been converted from its original cadence: Choose FrameFormer * to do motion-compensated interpolation. FrameFormer uses the best conversion method frame by frame. Note * that using FrameFormer increases the transcoding time and incurs a significant add-on cost. When you * choose FrameFormer, your input video resolution must be at least 128x96. * @see Mpeg2FramerateConversionAlgorithm */ public final Mpeg2FramerateConversionAlgorithm framerateConversionAlgorithm() { return Mpeg2FramerateConversionAlgorithm.fromValue(framerateConversionAlgorithm); } /** * Choose the method that you want MediaConvert to use when increasing or decreasing the frame rate. For numerically * simple conversions, such as 60 fps to 30 fps: We recommend that you keep the default value, Drop duplicate. For * numerically complex conversions, to avoid stutter: Choose Interpolate. This results in a smooth picture, but * might introduce undesirable video artifacts. For complex frame rate conversions, especially if your source video * has already been converted from its original cadence: Choose FrameFormer to do motion-compensated interpolation. * FrameFormer uses the best conversion method frame by frame. Note that using FrameFormer increases the transcoding * time and incurs a significant add-on cost. When you choose FrameFormer, your input video resolution must be at * least 128x96. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #framerateConversionAlgorithm} will return * {@link Mpeg2FramerateConversionAlgorithm#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is * available from {@link #framerateConversionAlgorithmAsString}. *

* * @return Choose the method that you want MediaConvert to use when increasing or decreasing the frame rate. For * numerically simple conversions, such as 60 fps to 30 fps: We recommend that you keep the default value, * Drop duplicate. For numerically complex conversions, to avoid stutter: Choose Interpolate. This results * in a smooth picture, but might introduce undesirable video artifacts. For complex frame rate conversions, * especially if your source video has already been converted from its original cadence: Choose FrameFormer * to do motion-compensated interpolation. FrameFormer uses the best conversion method frame by frame. Note * that using FrameFormer increases the transcoding time and incurs a significant add-on cost. When you * choose FrameFormer, your input video resolution must be at least 128x96. * @see Mpeg2FramerateConversionAlgorithm */ public final String framerateConversionAlgorithmAsString() { return framerateConversionAlgorithm; } /** * When you use the API for transcode jobs that use frame rate conversion, specify the frame rate as a fraction. For * example, 24000 / 1001 = 23.976 fps. Use FramerateDenominator to specify the denominator of this fraction. In this * example, use 1001 for the value of FramerateDenominator. When you use the console for transcode jobs that use * frame rate conversion, provide the value as a decimal number for Framerate. In this example, specify 23.976. * * @return When you use the API for transcode jobs that use frame rate conversion, specify the frame rate as a * fraction. For example, 24000 / 1001 = 23.976 fps. Use FramerateDenominator to specify the denominator of * this fraction. In this example, use 1001 for the value of FramerateDenominator. When you use the console * for transcode jobs that use frame rate conversion, provide the value as a decimal number for Framerate. * In this example, specify 23.976. */ public final Integer framerateDenominator() { return framerateDenominator; } /** * When you use the API for transcode jobs that use frame rate conversion, specify the frame rate as a fraction. For * example, 24000 / 1001 = 23.976 fps. Use FramerateNumerator to specify the numerator of this fraction. In this * example, use 24000 for the value of FramerateNumerator. When you use the console for transcode jobs that use * frame rate conversion, provide the value as a decimal number for Framerate. In this example, specify 23.976. * * @return When you use the API for transcode jobs that use frame rate conversion, specify the frame rate as a * fraction. For example, 24000 / 1001 = 23.976 fps. Use FramerateNumerator to specify the numerator of this * fraction. In this example, use 24000 for the value of FramerateNumerator. When you use the console for * transcode jobs that use frame rate conversion, provide the value as a decimal number for Framerate. In * this example, specify 23.976. */ public final Integer framerateNumerator() { return framerateNumerator; } /** * Specify the relative frequency of open to closed GOPs in this output. For example, if you want to allow four open * GOPs and then require a closed GOP, set this value to 5. When you create a streaming output, we recommend that * you keep the default value, 1, so that players starting mid-stream receive an IDR frame as quickly as possible. * Don't set this value to 0; that would break output segmenting. * * @return Specify the relative frequency of open to closed GOPs in this output. For example, if you want to allow * four open GOPs and then require a closed GOP, set this value to 5. When you create a streaming output, we * recommend that you keep the default value, 1, so that players starting mid-stream receive an IDR frame as * quickly as possible. Don't set this value to 0; that would break output segmenting. */ public final Integer gopClosedCadence() { return gopClosedCadence; } /** * Specify the interval between keyframes, in seconds or frames, for this output. Default: 12 Related settings: When * you specify the GOP size in seconds, set GOP mode control to Specified, seconds. The default value for GOP mode * control is Frames. * * @return Specify the interval between keyframes, in seconds or frames, for this output. Default: 12 Related * settings: When you specify the GOP size in seconds, set GOP mode control to Specified, seconds. The * default value for GOP mode control is Frames. */ public final Double gopSize() { return gopSize; } /** * Specify the units for GOP size. If you don't specify a value here, by default the encoder measures GOP size in * frames. *

* If the service returns an enum value that is not available in the current SDK version, {@link #gopSizeUnits} will * return {@link Mpeg2GopSizeUnits#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #gopSizeUnitsAsString}. *

* * @return Specify the units for GOP size. If you don't specify a value here, by default the encoder measures GOP * size in frames. * @see Mpeg2GopSizeUnits */ public final Mpeg2GopSizeUnits gopSizeUnits() { return Mpeg2GopSizeUnits.fromValue(gopSizeUnits); } /** * Specify the units for GOP size. If you don't specify a value here, by default the encoder measures GOP size in * frames. *

* If the service returns an enum value that is not available in the current SDK version, {@link #gopSizeUnits} will * return {@link Mpeg2GopSizeUnits#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #gopSizeUnitsAsString}. *

* * @return Specify the units for GOP size. If you don't specify a value here, by default the encoder measures GOP * size in frames. * @see Mpeg2GopSizeUnits */ public final String gopSizeUnitsAsString() { return gopSizeUnits; } /** * If your downstream systems have strict buffer requirements: Specify the minimum percentage of the HRD buffer * that's available at the end of each encoded video segment. For the best video quality: Set to 0 or leave blank to * automatically determine the final buffer fill percentage. * * @return If your downstream systems have strict buffer requirements: Specify the minimum percentage of the HRD * buffer that's available at the end of each encoded video segment. For the best video quality: Set to 0 or * leave blank to automatically determine the final buffer fill percentage. */ public final Integer hrdBufferFinalFillPercentage() { return hrdBufferFinalFillPercentage; } /** * Percentage of the buffer that should initially be filled (HRD buffer model). * * @return Percentage of the buffer that should initially be filled (HRD buffer model). */ public final Integer hrdBufferInitialFillPercentage() { return hrdBufferInitialFillPercentage; } /** * Size of buffer (HRD buffer model) in bits. For example, enter five megabits as 5000000. * * @return Size of buffer (HRD buffer model) in bits. For example, enter five megabits as 5000000. */ public final Integer hrdBufferSize() { return hrdBufferSize; } /** * Choose the scan line type for the output. Keep the default value, Progressive to create a progressive output, * regardless of the scan type of your input. Use Top field first or Bottom field first to create an output that's * interlaced with the same field polarity throughout. Use Follow, default top or Follow, default bottom to produce * outputs with the same field polarity as the source. For jobs that have multiple inputs, the output field polarity * might change over the course of the output. Follow behavior depends on the input scan type. If the source is * interlaced, the output will be interlaced with the same polarity as the source. If the source is progressive, the * output will be interlaced with top field bottom field first, depending on which of the Follow options you choose. *

* If the service returns an enum value that is not available in the current SDK version, {@link #interlaceMode} * will return {@link Mpeg2InterlaceMode#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available * from {@link #interlaceModeAsString}. *

* * @return Choose the scan line type for the output. Keep the default value, Progressive to create a progressive * output, regardless of the scan type of your input. Use Top field first or Bottom field first to create an * output that's interlaced with the same field polarity throughout. Use Follow, default top or Follow, * default bottom to produce outputs with the same field polarity as the source. For jobs that have multiple * inputs, the output field polarity might change over the course of the output. Follow behavior depends on * the input scan type. If the source is interlaced, the output will be interlaced with the same polarity as * the source. If the source is progressive, the output will be interlaced with top field bottom field * first, depending on which of the Follow options you choose. * @see Mpeg2InterlaceMode */ public final Mpeg2InterlaceMode interlaceMode() { return Mpeg2InterlaceMode.fromValue(interlaceMode); } /** * Choose the scan line type for the output. Keep the default value, Progressive to create a progressive output, * regardless of the scan type of your input. Use Top field first or Bottom field first to create an output that's * interlaced with the same field polarity throughout. Use Follow, default top or Follow, default bottom to produce * outputs with the same field polarity as the source. For jobs that have multiple inputs, the output field polarity * might change over the course of the output. Follow behavior depends on the input scan type. If the source is * interlaced, the output will be interlaced with the same polarity as the source. If the source is progressive, the * output will be interlaced with top field bottom field first, depending on which of the Follow options you choose. *

* If the service returns an enum value that is not available in the current SDK version, {@link #interlaceMode} * will return {@link Mpeg2InterlaceMode#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available * from {@link #interlaceModeAsString}. *

* * @return Choose the scan line type for the output. Keep the default value, Progressive to create a progressive * output, regardless of the scan type of your input. Use Top field first or Bottom field first to create an * output that's interlaced with the same field polarity throughout. Use Follow, default top or Follow, * default bottom to produce outputs with the same field polarity as the source. For jobs that have multiple * inputs, the output field polarity might change over the course of the output. Follow behavior depends on * the input scan type. If the source is interlaced, the output will be interlaced with the same polarity as * the source. If the source is progressive, the output will be interlaced with top field bottom field * first, depending on which of the Follow options you choose. * @see Mpeg2InterlaceMode */ public final String interlaceModeAsString() { return interlaceMode; } /** * Use Intra DC precision to set quantization precision for intra-block DC coefficients. If you choose the value * auto, the service will automatically select the precision based on the per-frame compression ratio. *

* If the service returns an enum value that is not available in the current SDK version, {@link #intraDcPrecision} * will return {@link Mpeg2IntraDcPrecision#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is * available from {@link #intraDcPrecisionAsString}. *

* * @return Use Intra DC precision to set quantization precision for intra-block DC coefficients. If you choose the * value auto, the service will automatically select the precision based on the per-frame compression ratio. * @see Mpeg2IntraDcPrecision */ public final Mpeg2IntraDcPrecision intraDcPrecision() { return Mpeg2IntraDcPrecision.fromValue(intraDcPrecision); } /** * Use Intra DC precision to set quantization precision for intra-block DC coefficients. If you choose the value * auto, the service will automatically select the precision based on the per-frame compression ratio. *

* If the service returns an enum value that is not available in the current SDK version, {@link #intraDcPrecision} * will return {@link Mpeg2IntraDcPrecision#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is * available from {@link #intraDcPrecisionAsString}. *

* * @return Use Intra DC precision to set quantization precision for intra-block DC coefficients. If you choose the * value auto, the service will automatically select the precision based on the per-frame compression ratio. * @see Mpeg2IntraDcPrecision */ public final String intraDcPrecisionAsString() { return intraDcPrecision; } /** * Maximum bitrate in bits/second. For example, enter five megabits per second as 5000000. * * @return Maximum bitrate in bits/second. For example, enter five megabits per second as 5000000. */ public final Integer maxBitrate() { return maxBitrate; } /** * Specify the minimum number of frames allowed between two IDR-frames in your output. This includes frames created * at the start of a GOP or a scene change. Use Min I-Interval to improve video compression by varying GOP size when * two IDR-frames would be created near each other. For example, if a regular cadence-driven IDR-frame would fall * within 5 frames of a scene-change IDR-frame, and you set Min I-interval to 5, then the encoder would only write * an IDR-frame for the scene-change. In this way, one GOP is shortened or extended. If a cadence-driven IDR-frame * would be further than 5 frames from a scene-change IDR-frame, then the encoder leaves all IDR-frames in place. To * manually specify an interval: Enter a value from 1 to 30. Use when your downstream systems have specific GOP size * requirements. To disable GOP size variance: Enter 0. MediaConvert will only create IDR-frames at the start of * your output's cadence-driven GOP. Use when your downstream systems require a regular GOP size. * * @return Specify the minimum number of frames allowed between two IDR-frames in your output. This includes frames * created at the start of a GOP or a scene change. Use Min I-Interval to improve video compression by * varying GOP size when two IDR-frames would be created near each other. For example, if a regular * cadence-driven IDR-frame would fall within 5 frames of a scene-change IDR-frame, and you set Min * I-interval to 5, then the encoder would only write an IDR-frame for the scene-change. In this way, one * GOP is shortened or extended. If a cadence-driven IDR-frame would be further than 5 frames from a * scene-change IDR-frame, then the encoder leaves all IDR-frames in place. To manually specify an interval: * Enter a value from 1 to 30. Use when your downstream systems have specific GOP size requirements. To * disable GOP size variance: Enter 0. MediaConvert will only create IDR-frames at the start of your * output's cadence-driven GOP. Use when your downstream systems require a regular GOP size. */ public final Integer minIInterval() { return minIInterval; } /** * Specify the number of B-frames that MediaConvert puts between reference frames in this output. Valid values are * whole numbers from 0 through 7. When you don't specify a value, MediaConvert defaults to 2. * * @return Specify the number of B-frames that MediaConvert puts between reference frames in this output. Valid * values are whole numbers from 0 through 7. When you don't specify a value, MediaConvert defaults to 2. */ public final Integer numberBFramesBetweenReferenceFrames() { return numberBFramesBetweenReferenceFrames; } /** * Optional. Specify how the service determines the pixel aspect ratio (PAR) for this output. The default behavior, * Follow source, uses the PAR from your input video for your output. To specify a different PAR in the console, * choose any value other than Follow source. When you choose SPECIFIED for this setting, you must also specify * values for the parNumerator and parDenominator settings. *

* If the service returns an enum value that is not available in the current SDK version, {@link #parControl} will * return {@link Mpeg2ParControl#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #parControlAsString}. *

* * @return Optional. Specify how the service determines the pixel aspect ratio (PAR) for this output. The default * behavior, Follow source, uses the PAR from your input video for your output. To specify a different PAR * in the console, choose any value other than Follow source. When you choose SPECIFIED for this setting, * you must also specify values for the parNumerator and parDenominator settings. * @see Mpeg2ParControl */ public final Mpeg2ParControl parControl() { return Mpeg2ParControl.fromValue(parControl); } /** * Optional. Specify how the service determines the pixel aspect ratio (PAR) for this output. The default behavior, * Follow source, uses the PAR from your input video for your output. To specify a different PAR in the console, * choose any value other than Follow source. When you choose SPECIFIED for this setting, you must also specify * values for the parNumerator and parDenominator settings. *

* If the service returns an enum value that is not available in the current SDK version, {@link #parControl} will * return {@link Mpeg2ParControl#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #parControlAsString}. *

* * @return Optional. Specify how the service determines the pixel aspect ratio (PAR) for this output. The default * behavior, Follow source, uses the PAR from your input video for your output. To specify a different PAR * in the console, choose any value other than Follow source. When you choose SPECIFIED for this setting, * you must also specify values for the parNumerator and parDenominator settings. * @see Mpeg2ParControl */ public final String parControlAsString() { return parControl; } /** * Required when you set Pixel aspect ratio to SPECIFIED. On the console, this corresponds to any value other than * Follow source. When you specify an output pixel aspect ratio (PAR) that is different from your input video PAR, * provide your output PAR as a ratio. For example, for D1/DV NTSC widescreen, you would specify the ratio 40:33. In * this example, the value for parDenominator is 33. * * @return Required when you set Pixel aspect ratio to SPECIFIED. On the console, this corresponds to any value * other than Follow source. When you specify an output pixel aspect ratio (PAR) that is different from your * input video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC widescreen, you would * specify the ratio 40:33. In this example, the value for parDenominator is 33. */ public final Integer parDenominator() { return parDenominator; } /** * Required when you set Pixel aspect ratio to SPECIFIED. On the console, this corresponds to any value other than * Follow source. When you specify an output pixel aspect ratio (PAR) that is different from your input video PAR, * provide your output PAR as a ratio. For example, for D1/DV NTSC widescreen, you would specify the ratio 40:33. In * this example, the value for parNumerator is 40. * * @return Required when you set Pixel aspect ratio to SPECIFIED. On the console, this corresponds to any value * other than Follow source. When you specify an output pixel aspect ratio (PAR) that is different from your * input video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC widescreen, you would * specify the ratio 40:33. In this example, the value for parNumerator is 40. */ public final Integer parNumerator() { return parNumerator; } /** * Optional. Use Quality tuning level to choose how you want to trade off encoding speed for output video quality. * The default behavior is faster, lower quality, single-pass encoding. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #qualityTuningLevel} will return {@link Mpeg2QualityTuningLevel#UNKNOWN_TO_SDK_VERSION}. The raw value * returned by the service is available from {@link #qualityTuningLevelAsString}. *

* * @return Optional. Use Quality tuning level to choose how you want to trade off encoding speed for output video * quality. The default behavior is faster, lower quality, single-pass encoding. * @see Mpeg2QualityTuningLevel */ public final Mpeg2QualityTuningLevel qualityTuningLevel() { return Mpeg2QualityTuningLevel.fromValue(qualityTuningLevel); } /** * Optional. Use Quality tuning level to choose how you want to trade off encoding speed for output video quality. * The default behavior is faster, lower quality, single-pass encoding. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #qualityTuningLevel} will return {@link Mpeg2QualityTuningLevel#UNKNOWN_TO_SDK_VERSION}. The raw value * returned by the service is available from {@link #qualityTuningLevelAsString}. *

* * @return Optional. Use Quality tuning level to choose how you want to trade off encoding speed for output video * quality. The default behavior is faster, lower quality, single-pass encoding. * @see Mpeg2QualityTuningLevel */ public final String qualityTuningLevelAsString() { return qualityTuningLevel; } /** * Use Rate control mode to specify whether the bitrate is variable (vbr) or constant (cbr). *

* If the service returns an enum value that is not available in the current SDK version, {@link #rateControlMode} * will return {@link Mpeg2RateControlMode#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is * available from {@link #rateControlModeAsString}. *

* * @return Use Rate control mode to specify whether the bitrate is variable (vbr) or constant (cbr). * @see Mpeg2RateControlMode */ public final Mpeg2RateControlMode rateControlMode() { return Mpeg2RateControlMode.fromValue(rateControlMode); } /** * Use Rate control mode to specify whether the bitrate is variable (vbr) or constant (cbr). *

* If the service returns an enum value that is not available in the current SDK version, {@link #rateControlMode} * will return {@link Mpeg2RateControlMode#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is * available from {@link #rateControlModeAsString}. *

* * @return Use Rate control mode to specify whether the bitrate is variable (vbr) or constant (cbr). * @see Mpeg2RateControlMode */ public final String rateControlModeAsString() { return rateControlMode; } /** * Use this setting for interlaced outputs, when your output frame rate is half of your input frame rate. In this * situation, choose Optimized interlacing to create a better quality interlaced output. In this case, each * progressive frame from the input corresponds to an interlaced field in the output. Keep the default value, Basic * interlacing, for all other output frame rates. With basic interlacing, MediaConvert performs any frame rate * conversion first and then interlaces the frames. When you choose Optimized interlacing and you set your output * frame rate to a value that isn't suitable for optimized interlacing, MediaConvert automatically falls back to * basic interlacing. Required settings: To use optimized interlacing, you must set Telecine to None or Soft. You * can't use optimized interlacing for hard telecine outputs. You must also set Interlace mode to a value other than * Progressive. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #scanTypeConversionMode} will return {@link Mpeg2ScanTypeConversionMode#UNKNOWN_TO_SDK_VERSION}. The raw * value returned by the service is available from {@link #scanTypeConversionModeAsString}. *

* * @return Use this setting for interlaced outputs, when your output frame rate is half of your input frame rate. In * this situation, choose Optimized interlacing to create a better quality interlaced output. In this case, * each progressive frame from the input corresponds to an interlaced field in the output. Keep the default * value, Basic interlacing, for all other output frame rates. With basic interlacing, MediaConvert performs * any frame rate conversion first and then interlaces the frames. When you choose Optimized interlacing and * you set your output frame rate to a value that isn't suitable for optimized interlacing, MediaConvert * automatically falls back to basic interlacing. Required settings: To use optimized interlacing, you must * set Telecine to None or Soft. You can't use optimized interlacing for hard telecine outputs. You must * also set Interlace mode to a value other than Progressive. * @see Mpeg2ScanTypeConversionMode */ public final Mpeg2ScanTypeConversionMode scanTypeConversionMode() { return Mpeg2ScanTypeConversionMode.fromValue(scanTypeConversionMode); } /** * Use this setting for interlaced outputs, when your output frame rate is half of your input frame rate. In this * situation, choose Optimized interlacing to create a better quality interlaced output. In this case, each * progressive frame from the input corresponds to an interlaced field in the output. Keep the default value, Basic * interlacing, for all other output frame rates. With basic interlacing, MediaConvert performs any frame rate * conversion first and then interlaces the frames. When you choose Optimized interlacing and you set your output * frame rate to a value that isn't suitable for optimized interlacing, MediaConvert automatically falls back to * basic interlacing. Required settings: To use optimized interlacing, you must set Telecine to None or Soft. You * can't use optimized interlacing for hard telecine outputs. You must also set Interlace mode to a value other than * Progressive. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #scanTypeConversionMode} will return {@link Mpeg2ScanTypeConversionMode#UNKNOWN_TO_SDK_VERSION}. The raw * value returned by the service is available from {@link #scanTypeConversionModeAsString}. *

* * @return Use this setting for interlaced outputs, when your output frame rate is half of your input frame rate. In * this situation, choose Optimized interlacing to create a better quality interlaced output. In this case, * each progressive frame from the input corresponds to an interlaced field in the output. Keep the default * value, Basic interlacing, for all other output frame rates. With basic interlacing, MediaConvert performs * any frame rate conversion first and then interlaces the frames. When you choose Optimized interlacing and * you set your output frame rate to a value that isn't suitable for optimized interlacing, MediaConvert * automatically falls back to basic interlacing. Required settings: To use optimized interlacing, you must * set Telecine to None or Soft. You can't use optimized interlacing for hard telecine outputs. You must * also set Interlace mode to a value other than Progressive. * @see Mpeg2ScanTypeConversionMode */ public final String scanTypeConversionModeAsString() { return scanTypeConversionMode; } /** * Enable this setting to insert I-frames at scene changes that the service automatically detects. This improves * video quality and is enabled by default. *

* If the service returns an enum value that is not available in the current SDK version, {@link #sceneChangeDetect} * will return {@link Mpeg2SceneChangeDetect#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is * available from {@link #sceneChangeDetectAsString}. *

* * @return Enable this setting to insert I-frames at scene changes that the service automatically detects. This * improves video quality and is enabled by default. * @see Mpeg2SceneChangeDetect */ public final Mpeg2SceneChangeDetect sceneChangeDetect() { return Mpeg2SceneChangeDetect.fromValue(sceneChangeDetect); } /** * Enable this setting to insert I-frames at scene changes that the service automatically detects. This improves * video quality and is enabled by default. *

* If the service returns an enum value that is not available in the current SDK version, {@link #sceneChangeDetect} * will return {@link Mpeg2SceneChangeDetect#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is * available from {@link #sceneChangeDetectAsString}. *

* * @return Enable this setting to insert I-frames at scene changes that the service automatically detects. This * improves video quality and is enabled by default. * @see Mpeg2SceneChangeDetect */ public final String sceneChangeDetectAsString() { return sceneChangeDetect; } /** * Ignore this setting unless your input frame rate is 23.976 or 24 frames per second (fps). Enable slow PAL to * create a 25 fps output. When you enable slow PAL, MediaConvert relabels the video frames to 25 fps and resamples * your audio to keep it synchronized with the video. Note that enabling this setting will slightly reduce the * duration of your video. Required settings: You must also set Framerate to 25. *

* If the service returns an enum value that is not available in the current SDK version, {@link #slowPal} will * return {@link Mpeg2SlowPal#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #slowPalAsString}. *

* * @return Ignore this setting unless your input frame rate is 23.976 or 24 frames per second (fps). Enable slow PAL * to create a 25 fps output. When you enable slow PAL, MediaConvert relabels the video frames to 25 fps and * resamples your audio to keep it synchronized with the video. Note that enabling this setting will * slightly reduce the duration of your video. Required settings: You must also set Framerate to 25. * @see Mpeg2SlowPal */ public final Mpeg2SlowPal slowPal() { return Mpeg2SlowPal.fromValue(slowPal); } /** * Ignore this setting unless your input frame rate is 23.976 or 24 frames per second (fps). Enable slow PAL to * create a 25 fps output. When you enable slow PAL, MediaConvert relabels the video frames to 25 fps and resamples * your audio to keep it synchronized with the video. Note that enabling this setting will slightly reduce the * duration of your video. Required settings: You must also set Framerate to 25. *

* If the service returns an enum value that is not available in the current SDK version, {@link #slowPal} will * return {@link Mpeg2SlowPal#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #slowPalAsString}. *

* * @return Ignore this setting unless your input frame rate is 23.976 or 24 frames per second (fps). Enable slow PAL * to create a 25 fps output. When you enable slow PAL, MediaConvert relabels the video frames to 25 fps and * resamples your audio to keep it synchronized with the video. Note that enabling this setting will * slightly reduce the duration of your video. Required settings: You must also set Framerate to 25. * @see Mpeg2SlowPal */ public final String slowPalAsString() { return slowPal; } /** * Ignore this setting unless you need to comply with a specification that requires a specific value. If you don't * have a specification requirement, we recommend that you adjust the softness of your output by using a lower value * for the setting Sharpness or by enabling a noise reducer filter. The Softness setting specifies the quantization * matrices that the encoder uses. Keep the default value, 0, to use the AWS Elemental default matrices. Choose a * value from 17 to 128 to use planar interpolation. Increasing values from 17 to 128 result in increasing reduction * of high-frequency data. The value 128 results in the softest video. * * @return Ignore this setting unless you need to comply with a specification that requires a specific value. If you * don't have a specification requirement, we recommend that you adjust the softness of your output by using * a lower value for the setting Sharpness or by enabling a noise reducer filter. The Softness setting * specifies the quantization matrices that the encoder uses. Keep the default value, 0, to use the AWS * Elemental default matrices. Choose a value from 17 to 128 to use planar interpolation. Increasing values * from 17 to 128 result in increasing reduction of high-frequency data. The value 128 results in the * softest video. */ public final Integer softness() { return softness; } /** * Keep the default value, Enabled, to adjust quantization within each frame based on spatial variation of content * complexity. When you enable this feature, the encoder uses fewer bits on areas that can sustain more distortion * with no noticeable visual degradation and uses more bits on areas where any small distortion will be noticeable. * For example, complex textured blocks are encoded with fewer bits and smooth textured blocks are encoded with more * bits. Enabling this feature will almost always improve your video quality. Note, though, that this feature * doesn't take into account where the viewer's attention is likely to be. If viewers are likely to be focusing * their attention on a part of the screen with a lot of complex texture, you might choose to disable this feature. * Related setting: When you enable spatial adaptive quantization, set the value for Adaptive quantization depending * on your content. For homogeneous content, such as cartoons and video games, set it to Low. For content with a * wider variety of textures, set it to High or Higher. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #spatialAdaptiveQuantization} will return {@link Mpeg2SpatialAdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}. * The raw value returned by the service is available from {@link #spatialAdaptiveQuantizationAsString}. *

* * @return Keep the default value, Enabled, to adjust quantization within each frame based on spatial variation of * content complexity. When you enable this feature, the encoder uses fewer bits on areas that can sustain * more distortion with no noticeable visual degradation and uses more bits on areas where any small * distortion will be noticeable. For example, complex textured blocks are encoded with fewer bits and * smooth textured blocks are encoded with more bits. Enabling this feature will almost always improve your * video quality. Note, though, that this feature doesn't take into account where the viewer's attention is * likely to be. If viewers are likely to be focusing their attention on a part of the screen with a lot of * complex texture, you might choose to disable this feature. Related setting: When you enable spatial * adaptive quantization, set the value for Adaptive quantization depending on your content. For homogeneous * content, such as cartoons and video games, set it to Low. For content with a wider variety of textures, * set it to High or Higher. * @see Mpeg2SpatialAdaptiveQuantization */ public final Mpeg2SpatialAdaptiveQuantization spatialAdaptiveQuantization() { return Mpeg2SpatialAdaptiveQuantization.fromValue(spatialAdaptiveQuantization); } /** * Keep the default value, Enabled, to adjust quantization within each frame based on spatial variation of content * complexity. When you enable this feature, the encoder uses fewer bits on areas that can sustain more distortion * with no noticeable visual degradation and uses more bits on areas where any small distortion will be noticeable. * For example, complex textured blocks are encoded with fewer bits and smooth textured blocks are encoded with more * bits. Enabling this feature will almost always improve your video quality. Note, though, that this feature * doesn't take into account where the viewer's attention is likely to be. If viewers are likely to be focusing * their attention on a part of the screen with a lot of complex texture, you might choose to disable this feature. * Related setting: When you enable spatial adaptive quantization, set the value for Adaptive quantization depending * on your content. For homogeneous content, such as cartoons and video games, set it to Low. For content with a * wider variety of textures, set it to High or Higher. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #spatialAdaptiveQuantization} will return {@link Mpeg2SpatialAdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}. * The raw value returned by the service is available from {@link #spatialAdaptiveQuantizationAsString}. *

* * @return Keep the default value, Enabled, to adjust quantization within each frame based on spatial variation of * content complexity. When you enable this feature, the encoder uses fewer bits on areas that can sustain * more distortion with no noticeable visual degradation and uses more bits on areas where any small * distortion will be noticeable. For example, complex textured blocks are encoded with fewer bits and * smooth textured blocks are encoded with more bits. Enabling this feature will almost always improve your * video quality. Note, though, that this feature doesn't take into account where the viewer's attention is * likely to be. If viewers are likely to be focusing their attention on a part of the screen with a lot of * complex texture, you might choose to disable this feature. Related setting: When you enable spatial * adaptive quantization, set the value for Adaptive quantization depending on your content. For homogeneous * content, such as cartoons and video games, set it to Low. For content with a wider variety of textures, * set it to High or Higher. * @see Mpeg2SpatialAdaptiveQuantization */ public final String spatialAdaptiveQuantizationAsString() { return spatialAdaptiveQuantization; } /** * Specify whether this output's video uses the D10 syntax. Keep the default value to not use the syntax. Related * settings: When you choose D10 for your MXF profile, you must also set this value to D10. *

* If the service returns an enum value that is not available in the current SDK version, {@link #syntax} will * return {@link Mpeg2Syntax#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #syntaxAsString}. *

* * @return Specify whether this output's video uses the D10 syntax. Keep the default value to not use the syntax. * Related settings: When you choose D10 for your MXF profile, you must also set this value to D10. * @see Mpeg2Syntax */ public final Mpeg2Syntax syntax() { return Mpeg2Syntax.fromValue(syntax); } /** * Specify whether this output's video uses the D10 syntax. Keep the default value to not use the syntax. Related * settings: When you choose D10 for your MXF profile, you must also set this value to D10. *

* If the service returns an enum value that is not available in the current SDK version, {@link #syntax} will * return {@link Mpeg2Syntax#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #syntaxAsString}. *

* * @return Specify whether this output's video uses the D10 syntax. Keep the default value to not use the syntax. * Related settings: When you choose D10 for your MXF profile, you must also set this value to D10. * @see Mpeg2Syntax */ public final String syntaxAsString() { return syntax; } /** * When you do frame rate conversion from 23.976 frames per second (fps) to 29.97 fps, and your output scan type is * interlaced, you can optionally enable hard or soft telecine to create a smoother picture. Hard telecine produces * a 29.97i output. Soft telecine produces an output with a 23.976 output that signals to the video player device to * do the conversion during play back. When you keep the default value, None, MediaConvert does a standard frame * rate conversion to 29.97 without doing anything with the field polarity to create a smoother picture. *

* If the service returns an enum value that is not available in the current SDK version, {@link #telecine} will * return {@link Mpeg2Telecine#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #telecineAsString}. *

* * @return When you do frame rate conversion from 23.976 frames per second (fps) to 29.97 fps, and your output scan * type is interlaced, you can optionally enable hard or soft telecine to create a smoother picture. Hard * telecine produces a 29.97i output. Soft telecine produces an output with a 23.976 output that signals to * the video player device to do the conversion during play back. When you keep the default value, None, * MediaConvert does a standard frame rate conversion to 29.97 without doing anything with the field * polarity to create a smoother picture. * @see Mpeg2Telecine */ public final Mpeg2Telecine telecine() { return Mpeg2Telecine.fromValue(telecine); } /** * When you do frame rate conversion from 23.976 frames per second (fps) to 29.97 fps, and your output scan type is * interlaced, you can optionally enable hard or soft telecine to create a smoother picture. Hard telecine produces * a 29.97i output. Soft telecine produces an output with a 23.976 output that signals to the video player device to * do the conversion during play back. When you keep the default value, None, MediaConvert does a standard frame * rate conversion to 29.97 without doing anything with the field polarity to create a smoother picture. *

* If the service returns an enum value that is not available in the current SDK version, {@link #telecine} will * return {@link Mpeg2Telecine#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from * {@link #telecineAsString}. *

* * @return When you do frame rate conversion from 23.976 frames per second (fps) to 29.97 fps, and your output scan * type is interlaced, you can optionally enable hard or soft telecine to create a smoother picture. Hard * telecine produces a 29.97i output. Soft telecine produces an output with a 23.976 output that signals to * the video player device to do the conversion during play back. When you keep the default value, None, * MediaConvert does a standard frame rate conversion to 29.97 without doing anything with the field * polarity to create a smoother picture. * @see Mpeg2Telecine */ public final String telecineAsString() { return telecine; } /** * Keep the default value, Enabled, to adjust quantization within each frame based on temporal variation of content * complexity. When you enable this feature, the encoder uses fewer bits on areas of the frame that aren't moving * and uses more bits on complex objects with sharp edges that move a lot. For example, this feature improves the * readability of text tickers on newscasts and scoreboards on sports matches. Enabling this feature will almost * always improve your video quality. Note, though, that this feature doesn't take into account where the viewer's * attention is likely to be. If viewers are likely to be focusing their attention on a part of the screen that * doesn't have moving objects with sharp edges, such as sports athletes' faces, you might choose to disable this * feature. Related setting: When you enable temporal quantization, adjust the strength of the filter with the * setting Adaptive quantization. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #temporalAdaptiveQuantization} will return * {@link Mpeg2TemporalAdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is * available from {@link #temporalAdaptiveQuantizationAsString}. *

* * @return Keep the default value, Enabled, to adjust quantization within each frame based on temporal variation of * content complexity. When you enable this feature, the encoder uses fewer bits on areas of the frame that * aren't moving and uses more bits on complex objects with sharp edges that move a lot. For example, this * feature improves the readability of text tickers on newscasts and scoreboards on sports matches. Enabling * this feature will almost always improve your video quality. Note, though, that this feature doesn't take * into account where the viewer's attention is likely to be. If viewers are likely to be focusing their * attention on a part of the screen that doesn't have moving objects with sharp edges, such as sports * athletes' faces, you might choose to disable this feature. Related setting: When you enable temporal * quantization, adjust the strength of the filter with the setting Adaptive quantization. * @see Mpeg2TemporalAdaptiveQuantization */ public final Mpeg2TemporalAdaptiveQuantization temporalAdaptiveQuantization() { return Mpeg2TemporalAdaptiveQuantization.fromValue(temporalAdaptiveQuantization); } /** * Keep the default value, Enabled, to adjust quantization within each frame based on temporal variation of content * complexity. When you enable this feature, the encoder uses fewer bits on areas of the frame that aren't moving * and uses more bits on complex objects with sharp edges that move a lot. For example, this feature improves the * readability of text tickers on newscasts and scoreboards on sports matches. Enabling this feature will almost * always improve your video quality. Note, though, that this feature doesn't take into account where the viewer's * attention is likely to be. If viewers are likely to be focusing their attention on a part of the screen that * doesn't have moving objects with sharp edges, such as sports athletes' faces, you might choose to disable this * feature. Related setting: When you enable temporal quantization, adjust the strength of the filter with the * setting Adaptive quantization. *

* If the service returns an enum value that is not available in the current SDK version, * {@link #temporalAdaptiveQuantization} will return * {@link Mpeg2TemporalAdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is * available from {@link #temporalAdaptiveQuantizationAsString}. *

* * @return Keep the default value, Enabled, to adjust quantization within each frame based on temporal variation of * content complexity. When you enable this feature, the encoder uses fewer bits on areas of the frame that * aren't moving and uses more bits on complex objects with sharp edges that move a lot. For example, this * feature improves the readability of text tickers on newscasts and scoreboards on sports matches. Enabling * this feature will almost always improve your video quality. Note, though, that this feature doesn't take * into account where the viewer's attention is likely to be. If viewers are likely to be focusing their * attention on a part of the screen that doesn't have moving objects with sharp edges, such as sports * athletes' faces, you might choose to disable this feature. Related setting: When you enable temporal * quantization, adjust the strength of the filter with the setting Adaptive quantization. * @see Mpeg2TemporalAdaptiveQuantization */ public final String temporalAdaptiveQuantizationAsString() { return temporalAdaptiveQuantization; } @Override public Builder toBuilder() { return new BuilderImpl(this); } public static Builder builder() { return new BuilderImpl(); } public static Class serializableBuilderClass() { return BuilderImpl.class; } @Override public final int hashCode() { int hashCode = 1; hashCode = 31 * hashCode + Objects.hashCode(adaptiveQuantizationAsString()); hashCode = 31 * hashCode + Objects.hashCode(bitrate()); hashCode = 31 * hashCode + Objects.hashCode(codecLevelAsString()); hashCode = 31 * hashCode + Objects.hashCode(codecProfileAsString()); hashCode = 31 * hashCode + Objects.hashCode(dynamicSubGopAsString()); hashCode = 31 * hashCode + Objects.hashCode(framerateControlAsString()); hashCode = 31 * hashCode + Objects.hashCode(framerateConversionAlgorithmAsString()); hashCode = 31 * hashCode + Objects.hashCode(framerateDenominator()); hashCode = 31 * hashCode + Objects.hashCode(framerateNumerator()); hashCode = 31 * hashCode + Objects.hashCode(gopClosedCadence()); hashCode = 31 * hashCode + Objects.hashCode(gopSize()); hashCode = 31 * hashCode + Objects.hashCode(gopSizeUnitsAsString()); hashCode = 31 * hashCode + Objects.hashCode(hrdBufferFinalFillPercentage()); hashCode = 31 * hashCode + Objects.hashCode(hrdBufferInitialFillPercentage()); hashCode = 31 * hashCode + Objects.hashCode(hrdBufferSize()); hashCode = 31 * hashCode + Objects.hashCode(interlaceModeAsString()); hashCode = 31 * hashCode + Objects.hashCode(intraDcPrecisionAsString()); hashCode = 31 * hashCode + Objects.hashCode(maxBitrate()); hashCode = 31 * hashCode + Objects.hashCode(minIInterval()); hashCode = 31 * hashCode + Objects.hashCode(numberBFramesBetweenReferenceFrames()); hashCode = 31 * hashCode + Objects.hashCode(parControlAsString()); hashCode = 31 * hashCode + Objects.hashCode(parDenominator()); hashCode = 31 * hashCode + Objects.hashCode(parNumerator()); hashCode = 31 * hashCode + Objects.hashCode(qualityTuningLevelAsString()); hashCode = 31 * hashCode + Objects.hashCode(rateControlModeAsString()); hashCode = 31 * hashCode + Objects.hashCode(scanTypeConversionModeAsString()); hashCode = 31 * hashCode + Objects.hashCode(sceneChangeDetectAsString()); hashCode = 31 * hashCode + Objects.hashCode(slowPalAsString()); hashCode = 31 * hashCode + Objects.hashCode(softness()); hashCode = 31 * hashCode + Objects.hashCode(spatialAdaptiveQuantizationAsString()); hashCode = 31 * hashCode + Objects.hashCode(syntaxAsString()); hashCode = 31 * hashCode + Objects.hashCode(telecineAsString()); hashCode = 31 * hashCode + Objects.hashCode(temporalAdaptiveQuantizationAsString()); return hashCode; } @Override public final boolean equals(Object obj) { return equalsBySdkFields(obj); } @Override public final boolean equalsBySdkFields(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof Mpeg2Settings)) { return false; } Mpeg2Settings other = (Mpeg2Settings) obj; return Objects.equals(adaptiveQuantizationAsString(), other.adaptiveQuantizationAsString()) && Objects.equals(bitrate(), other.bitrate()) && Objects.equals(codecLevelAsString(), other.codecLevelAsString()) && Objects.equals(codecProfileAsString(), other.codecProfileAsString()) && Objects.equals(dynamicSubGopAsString(), other.dynamicSubGopAsString()) && Objects.equals(framerateControlAsString(), other.framerateControlAsString()) && Objects.equals(framerateConversionAlgorithmAsString(), other.framerateConversionAlgorithmAsString()) && Objects.equals(framerateDenominator(), other.framerateDenominator()) && Objects.equals(framerateNumerator(), other.framerateNumerator()) && Objects.equals(gopClosedCadence(), other.gopClosedCadence()) && Objects.equals(gopSize(), other.gopSize()) && Objects.equals(gopSizeUnitsAsString(), other.gopSizeUnitsAsString()) && Objects.equals(hrdBufferFinalFillPercentage(), other.hrdBufferFinalFillPercentage()) && Objects.equals(hrdBufferInitialFillPercentage(), other.hrdBufferInitialFillPercentage()) && Objects.equals(hrdBufferSize(), other.hrdBufferSize()) && Objects.equals(interlaceModeAsString(), other.interlaceModeAsString()) && Objects.equals(intraDcPrecisionAsString(), other.intraDcPrecisionAsString()) && Objects.equals(maxBitrate(), other.maxBitrate()) && Objects.equals(minIInterval(), other.minIInterval()) && Objects.equals(numberBFramesBetweenReferenceFrames(), other.numberBFramesBetweenReferenceFrames()) && Objects.equals(parControlAsString(), other.parControlAsString()) && Objects.equals(parDenominator(), other.parDenominator()) && Objects.equals(parNumerator(), other.parNumerator()) && Objects.equals(qualityTuningLevelAsString(), other.qualityTuningLevelAsString()) && Objects.equals(rateControlModeAsString(), other.rateControlModeAsString()) && Objects.equals(scanTypeConversionModeAsString(), other.scanTypeConversionModeAsString()) && Objects.equals(sceneChangeDetectAsString(), other.sceneChangeDetectAsString()) && Objects.equals(slowPalAsString(), other.slowPalAsString()) && Objects.equals(softness(), other.softness()) && Objects.equals(spatialAdaptiveQuantizationAsString(), other.spatialAdaptiveQuantizationAsString()) && Objects.equals(syntaxAsString(), other.syntaxAsString()) && Objects.equals(telecineAsString(), other.telecineAsString()) && Objects.equals(temporalAdaptiveQuantizationAsString(), other.temporalAdaptiveQuantizationAsString()); } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. */ @Override public final String toString() { return ToString.builder("Mpeg2Settings").add("AdaptiveQuantization", adaptiveQuantizationAsString()) .add("Bitrate", bitrate()).add("CodecLevel", codecLevelAsString()).add("CodecProfile", codecProfileAsString()) .add("DynamicSubGop", dynamicSubGopAsString()).add("FramerateControl", framerateControlAsString()) .add("FramerateConversionAlgorithm", framerateConversionAlgorithmAsString()) .add("FramerateDenominator", framerateDenominator()).add("FramerateNumerator", framerateNumerator()) .add("GopClosedCadence", gopClosedCadence()).add("GopSize", gopSize()) .add("GopSizeUnits", gopSizeUnitsAsString()).add("HrdBufferFinalFillPercentage", hrdBufferFinalFillPercentage()) .add("HrdBufferInitialFillPercentage", hrdBufferInitialFillPercentage()).add("HrdBufferSize", hrdBufferSize()) .add("InterlaceMode", interlaceModeAsString()).add("IntraDcPrecision", intraDcPrecisionAsString()) .add("MaxBitrate", maxBitrate()).add("MinIInterval", minIInterval()) .add("NumberBFramesBetweenReferenceFrames", numberBFramesBetweenReferenceFrames()) .add("ParControl", parControlAsString()).add("ParDenominator", parDenominator()) .add("ParNumerator", parNumerator()).add("QualityTuningLevel", qualityTuningLevelAsString()) .add("RateControlMode", rateControlModeAsString()) .add("ScanTypeConversionMode", scanTypeConversionModeAsString()) .add("SceneChangeDetect", sceneChangeDetectAsString()).add("SlowPal", slowPalAsString()) .add("Softness", softness()).add("SpatialAdaptiveQuantization", spatialAdaptiveQuantizationAsString()) .add("Syntax", syntaxAsString()).add("Telecine", telecineAsString()) .add("TemporalAdaptiveQuantization", temporalAdaptiveQuantizationAsString()).build(); } public final Optional getValueForField(String fieldName, Class clazz) { switch (fieldName) { case "AdaptiveQuantization": return Optional.ofNullable(clazz.cast(adaptiveQuantizationAsString())); case "Bitrate": return Optional.ofNullable(clazz.cast(bitrate())); case "CodecLevel": return Optional.ofNullable(clazz.cast(codecLevelAsString())); case "CodecProfile": return Optional.ofNullable(clazz.cast(codecProfileAsString())); case "DynamicSubGop": return Optional.ofNullable(clazz.cast(dynamicSubGopAsString())); case "FramerateControl": return Optional.ofNullable(clazz.cast(framerateControlAsString())); case "FramerateConversionAlgorithm": return Optional.ofNullable(clazz.cast(framerateConversionAlgorithmAsString())); case "FramerateDenominator": return Optional.ofNullable(clazz.cast(framerateDenominator())); case "FramerateNumerator": return Optional.ofNullable(clazz.cast(framerateNumerator())); case "GopClosedCadence": return Optional.ofNullable(clazz.cast(gopClosedCadence())); case "GopSize": return Optional.ofNullable(clazz.cast(gopSize())); case "GopSizeUnits": return Optional.ofNullable(clazz.cast(gopSizeUnitsAsString())); case "HrdBufferFinalFillPercentage": return Optional.ofNullable(clazz.cast(hrdBufferFinalFillPercentage())); case "HrdBufferInitialFillPercentage": return Optional.ofNullable(clazz.cast(hrdBufferInitialFillPercentage())); case "HrdBufferSize": return Optional.ofNullable(clazz.cast(hrdBufferSize())); case "InterlaceMode": return Optional.ofNullable(clazz.cast(interlaceModeAsString())); case "IntraDcPrecision": return Optional.ofNullable(clazz.cast(intraDcPrecisionAsString())); case "MaxBitrate": return Optional.ofNullable(clazz.cast(maxBitrate())); case "MinIInterval": return Optional.ofNullable(clazz.cast(minIInterval())); case "NumberBFramesBetweenReferenceFrames": return Optional.ofNullable(clazz.cast(numberBFramesBetweenReferenceFrames())); case "ParControl": return Optional.ofNullable(clazz.cast(parControlAsString())); case "ParDenominator": return Optional.ofNullable(clazz.cast(parDenominator())); case "ParNumerator": return Optional.ofNullable(clazz.cast(parNumerator())); case "QualityTuningLevel": return Optional.ofNullable(clazz.cast(qualityTuningLevelAsString())); case "RateControlMode": return Optional.ofNullable(clazz.cast(rateControlModeAsString())); case "ScanTypeConversionMode": return Optional.ofNullable(clazz.cast(scanTypeConversionModeAsString())); case "SceneChangeDetect": return Optional.ofNullable(clazz.cast(sceneChangeDetectAsString())); case "SlowPal": return Optional.ofNullable(clazz.cast(slowPalAsString())); case "Softness": return Optional.ofNullable(clazz.cast(softness())); case "SpatialAdaptiveQuantization": return Optional.ofNullable(clazz.cast(spatialAdaptiveQuantizationAsString())); case "Syntax": return Optional.ofNullable(clazz.cast(syntaxAsString())); case "Telecine": return Optional.ofNullable(clazz.cast(telecineAsString())); case "TemporalAdaptiveQuantization": return Optional.ofNullable(clazz.cast(temporalAdaptiveQuantizationAsString())); default: return Optional.empty(); } } @Override public final List> sdkFields() { return SDK_FIELDS; } private static Function getter(Function g) { return obj -> g.apply((Mpeg2Settings) obj); } private static BiConsumer setter(BiConsumer s) { return (obj, val) -> s.accept((Builder) obj, val); } public interface Builder extends SdkPojo, CopyableBuilder { /** * Specify the strength of any adaptive quantization filters that you enable. The value that you choose here * applies to the following settings: Spatial adaptive quantization, and Temporal adaptive quantization. * * @param adaptiveQuantization * Specify the strength of any adaptive quantization filters that you enable. The value that you choose * here applies to the following settings: Spatial adaptive quantization, and Temporal adaptive * quantization. * @see Mpeg2AdaptiveQuantization * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2AdaptiveQuantization */ Builder adaptiveQuantization(String adaptiveQuantization); /** * Specify the strength of any adaptive quantization filters that you enable. The value that you choose here * applies to the following settings: Spatial adaptive quantization, and Temporal adaptive quantization. * * @param adaptiveQuantization * Specify the strength of any adaptive quantization filters that you enable. The value that you choose * here applies to the following settings: Spatial adaptive quantization, and Temporal adaptive * quantization. * @see Mpeg2AdaptiveQuantization * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2AdaptiveQuantization */ Builder adaptiveQuantization(Mpeg2AdaptiveQuantization adaptiveQuantization); /** * Specify the average bitrate in bits per second. Required for VBR and CBR. For MS Smooth outputs, bitrates * must be unique when rounded down to the nearest multiple of 1000. * * @param bitrate * Specify the average bitrate in bits per second. Required for VBR and CBR. For MS Smooth outputs, * bitrates must be unique when rounded down to the nearest multiple of 1000. * @return Returns a reference to this object so that method calls can be chained together. */ Builder bitrate(Integer bitrate); /** * Use Level to set the MPEG-2 level for the video output. * * @param codecLevel * Use Level to set the MPEG-2 level for the video output. * @see Mpeg2CodecLevel * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2CodecLevel */ Builder codecLevel(String codecLevel); /** * Use Level to set the MPEG-2 level for the video output. * * @param codecLevel * Use Level to set the MPEG-2 level for the video output. * @see Mpeg2CodecLevel * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2CodecLevel */ Builder codecLevel(Mpeg2CodecLevel codecLevel); /** * Use Profile to set the MPEG-2 profile for the video output. * * @param codecProfile * Use Profile to set the MPEG-2 profile for the video output. * @see Mpeg2CodecProfile * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2CodecProfile */ Builder codecProfile(String codecProfile); /** * Use Profile to set the MPEG-2 profile for the video output. * * @param codecProfile * Use Profile to set the MPEG-2 profile for the video output. * @see Mpeg2CodecProfile * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2CodecProfile */ Builder codecProfile(Mpeg2CodecProfile codecProfile); /** * Choose Adaptive to improve subjective video quality for high-motion content. This will cause the service to * use fewer B-frames (which infer information based on other frames) for high-motion portions of the video and * more B-frames for low-motion portions. The maximum number of B-frames is limited by the value you provide for * the setting B frames between reference frames. * * @param dynamicSubGop * Choose Adaptive to improve subjective video quality for high-motion content. This will cause the * service to use fewer B-frames (which infer information based on other frames) for high-motion portions * of the video and more B-frames for low-motion portions. The maximum number of B-frames is limited by * the value you provide for the setting B frames between reference frames. * @see Mpeg2DynamicSubGop * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2DynamicSubGop */ Builder dynamicSubGop(String dynamicSubGop); /** * Choose Adaptive to improve subjective video quality for high-motion content. This will cause the service to * use fewer B-frames (which infer information based on other frames) for high-motion portions of the video and * more B-frames for low-motion portions. The maximum number of B-frames is limited by the value you provide for * the setting B frames between reference frames. * * @param dynamicSubGop * Choose Adaptive to improve subjective video quality for high-motion content. This will cause the * service to use fewer B-frames (which infer information based on other frames) for high-motion portions * of the video and more B-frames for low-motion portions. The maximum number of B-frames is limited by * the value you provide for the setting B frames between reference frames. * @see Mpeg2DynamicSubGop * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2DynamicSubGop */ Builder dynamicSubGop(Mpeg2DynamicSubGop dynamicSubGop); /** * If you are using the console, use the Framerate setting to specify the frame rate for this output. If you * want to keep the same frame rate as the input video, choose Follow source. If you want to do frame rate * conversion, choose a frame rate from the dropdown list or choose Custom. The framerates shown in the dropdown * list are decimal approximations of fractions. If you choose Custom, specify your frame rate as a fraction. * * @param framerateControl * If you are using the console, use the Framerate setting to specify the frame rate for this output. If * you want to keep the same frame rate as the input video, choose Follow source. If you want to do frame * rate conversion, choose a frame rate from the dropdown list or choose Custom. The framerates shown in * the dropdown list are decimal approximations of fractions. If you choose Custom, specify your frame * rate as a fraction. * @see Mpeg2FramerateControl * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2FramerateControl */ Builder framerateControl(String framerateControl); /** * If you are using the console, use the Framerate setting to specify the frame rate for this output. If you * want to keep the same frame rate as the input video, choose Follow source. If you want to do frame rate * conversion, choose a frame rate from the dropdown list or choose Custom. The framerates shown in the dropdown * list are decimal approximations of fractions. If you choose Custom, specify your frame rate as a fraction. * * @param framerateControl * If you are using the console, use the Framerate setting to specify the frame rate for this output. If * you want to keep the same frame rate as the input video, choose Follow source. If you want to do frame * rate conversion, choose a frame rate from the dropdown list or choose Custom. The framerates shown in * the dropdown list are decimal approximations of fractions. If you choose Custom, specify your frame * rate as a fraction. * @see Mpeg2FramerateControl * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2FramerateControl */ Builder framerateControl(Mpeg2FramerateControl framerateControl); /** * Choose the method that you want MediaConvert to use when increasing or decreasing the frame rate. For * numerically simple conversions, such as 60 fps to 30 fps: We recommend that you keep the default value, Drop * duplicate. For numerically complex conversions, to avoid stutter: Choose Interpolate. This results in a * smooth picture, but might introduce undesirable video artifacts. For complex frame rate conversions, * especially if your source video has already been converted from its original cadence: Choose FrameFormer to * do motion-compensated interpolation. FrameFormer uses the best conversion method frame by frame. Note that * using FrameFormer increases the transcoding time and incurs a significant add-on cost. When you choose * FrameFormer, your input video resolution must be at least 128x96. * * @param framerateConversionAlgorithm * Choose the method that you want MediaConvert to use when increasing or decreasing the frame rate. For * numerically simple conversions, such as 60 fps to 30 fps: We recommend that you keep the default * value, Drop duplicate. For numerically complex conversions, to avoid stutter: Choose Interpolate. This * results in a smooth picture, but might introduce undesirable video artifacts. For complex frame rate * conversions, especially if your source video has already been converted from its original cadence: * Choose FrameFormer to do motion-compensated interpolation. FrameFormer uses the best conversion method * frame by frame. Note that using FrameFormer increases the transcoding time and incurs a significant * add-on cost. When you choose FrameFormer, your input video resolution must be at least 128x96. * @see Mpeg2FramerateConversionAlgorithm * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2FramerateConversionAlgorithm */ Builder framerateConversionAlgorithm(String framerateConversionAlgorithm); /** * Choose the method that you want MediaConvert to use when increasing or decreasing the frame rate. For * numerically simple conversions, such as 60 fps to 30 fps: We recommend that you keep the default value, Drop * duplicate. For numerically complex conversions, to avoid stutter: Choose Interpolate. This results in a * smooth picture, but might introduce undesirable video artifacts. For complex frame rate conversions, * especially if your source video has already been converted from its original cadence: Choose FrameFormer to * do motion-compensated interpolation. FrameFormer uses the best conversion method frame by frame. Note that * using FrameFormer increases the transcoding time and incurs a significant add-on cost. When you choose * FrameFormer, your input video resolution must be at least 128x96. * * @param framerateConversionAlgorithm * Choose the method that you want MediaConvert to use when increasing or decreasing the frame rate. For * numerically simple conversions, such as 60 fps to 30 fps: We recommend that you keep the default * value, Drop duplicate. For numerically complex conversions, to avoid stutter: Choose Interpolate. This * results in a smooth picture, but might introduce undesirable video artifacts. For complex frame rate * conversions, especially if your source video has already been converted from its original cadence: * Choose FrameFormer to do motion-compensated interpolation. FrameFormer uses the best conversion method * frame by frame. Note that using FrameFormer increases the transcoding time and incurs a significant * add-on cost. When you choose FrameFormer, your input video resolution must be at least 128x96. * @see Mpeg2FramerateConversionAlgorithm * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2FramerateConversionAlgorithm */ Builder framerateConversionAlgorithm(Mpeg2FramerateConversionAlgorithm framerateConversionAlgorithm); /** * When you use the API for transcode jobs that use frame rate conversion, specify the frame rate as a fraction. * For example, 24000 / 1001 = 23.976 fps. Use FramerateDenominator to specify the denominator of this fraction. * In this example, use 1001 for the value of FramerateDenominator. When you use the console for transcode jobs * that use frame rate conversion, provide the value as a decimal number for Framerate. In this example, specify * 23.976. * * @param framerateDenominator * When you use the API for transcode jobs that use frame rate conversion, specify the frame rate as a * fraction. For example, 24000 / 1001 = 23.976 fps. Use FramerateDenominator to specify the denominator * of this fraction. In this example, use 1001 for the value of FramerateDenominator. When you use the * console for transcode jobs that use frame rate conversion, provide the value as a decimal number for * Framerate. In this example, specify 23.976. * @return Returns a reference to this object so that method calls can be chained together. */ Builder framerateDenominator(Integer framerateDenominator); /** * When you use the API for transcode jobs that use frame rate conversion, specify the frame rate as a fraction. * For example, 24000 / 1001 = 23.976 fps. Use FramerateNumerator to specify the numerator of this fraction. In * this example, use 24000 for the value of FramerateNumerator. When you use the console for transcode jobs that * use frame rate conversion, provide the value as a decimal number for Framerate. In this example, specify * 23.976. * * @param framerateNumerator * When you use the API for transcode jobs that use frame rate conversion, specify the frame rate as a * fraction. For example, 24000 / 1001 = 23.976 fps. Use FramerateNumerator to specify the numerator of * this fraction. In this example, use 24000 for the value of FramerateNumerator. When you use the * console for transcode jobs that use frame rate conversion, provide the value as a decimal number for * Framerate. In this example, specify 23.976. * @return Returns a reference to this object so that method calls can be chained together. */ Builder framerateNumerator(Integer framerateNumerator); /** * Specify the relative frequency of open to closed GOPs in this output. For example, if you want to allow four * open GOPs and then require a closed GOP, set this value to 5. When you create a streaming output, we * recommend that you keep the default value, 1, so that players starting mid-stream receive an IDR frame as * quickly as possible. Don't set this value to 0; that would break output segmenting. * * @param gopClosedCadence * Specify the relative frequency of open to closed GOPs in this output. For example, if you want to * allow four open GOPs and then require a closed GOP, set this value to 5. When you create a streaming * output, we recommend that you keep the default value, 1, so that players starting mid-stream receive * an IDR frame as quickly as possible. Don't set this value to 0; that would break output segmenting. * @return Returns a reference to this object so that method calls can be chained together. */ Builder gopClosedCadence(Integer gopClosedCadence); /** * Specify the interval between keyframes, in seconds or frames, for this output. Default: 12 Related settings: * When you specify the GOP size in seconds, set GOP mode control to Specified, seconds. The default value for * GOP mode control is Frames. * * @param gopSize * Specify the interval between keyframes, in seconds or frames, for this output. Default: 12 Related * settings: When you specify the GOP size in seconds, set GOP mode control to Specified, seconds. The * default value for GOP mode control is Frames. * @return Returns a reference to this object so that method calls can be chained together. */ Builder gopSize(Double gopSize); /** * Specify the units for GOP size. If you don't specify a value here, by default the encoder measures GOP size * in frames. * * @param gopSizeUnits * Specify the units for GOP size. If you don't specify a value here, by default the encoder measures GOP * size in frames. * @see Mpeg2GopSizeUnits * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2GopSizeUnits */ Builder gopSizeUnits(String gopSizeUnits); /** * Specify the units for GOP size. If you don't specify a value here, by default the encoder measures GOP size * in frames. * * @param gopSizeUnits * Specify the units for GOP size. If you don't specify a value here, by default the encoder measures GOP * size in frames. * @see Mpeg2GopSizeUnits * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2GopSizeUnits */ Builder gopSizeUnits(Mpeg2GopSizeUnits gopSizeUnits); /** * If your downstream systems have strict buffer requirements: Specify the minimum percentage of the HRD buffer * that's available at the end of each encoded video segment. For the best video quality: Set to 0 or leave * blank to automatically determine the final buffer fill percentage. * * @param hrdBufferFinalFillPercentage * If your downstream systems have strict buffer requirements: Specify the minimum percentage of the HRD * buffer that's available at the end of each encoded video segment. For the best video quality: Set to 0 * or leave blank to automatically determine the final buffer fill percentage. * @return Returns a reference to this object so that method calls can be chained together. */ Builder hrdBufferFinalFillPercentage(Integer hrdBufferFinalFillPercentage); /** * Percentage of the buffer that should initially be filled (HRD buffer model). * * @param hrdBufferInitialFillPercentage * Percentage of the buffer that should initially be filled (HRD buffer model). * @return Returns a reference to this object so that method calls can be chained together. */ Builder hrdBufferInitialFillPercentage(Integer hrdBufferInitialFillPercentage); /** * Size of buffer (HRD buffer model) in bits. For example, enter five megabits as 5000000. * * @param hrdBufferSize * Size of buffer (HRD buffer model) in bits. For example, enter five megabits as 5000000. * @return Returns a reference to this object so that method calls can be chained together. */ Builder hrdBufferSize(Integer hrdBufferSize); /** * Choose the scan line type for the output. Keep the default value, Progressive to create a progressive output, * regardless of the scan type of your input. Use Top field first or Bottom field first to create an output * that's interlaced with the same field polarity throughout. Use Follow, default top or Follow, default bottom * to produce outputs with the same field polarity as the source. For jobs that have multiple inputs, the output * field polarity might change over the course of the output. Follow behavior depends on the input scan type. If * the source is interlaced, the output will be interlaced with the same polarity as the source. If the source * is progressive, the output will be interlaced with top field bottom field first, depending on which of the * Follow options you choose. * * @param interlaceMode * Choose the scan line type for the output. Keep the default value, Progressive to create a progressive * output, regardless of the scan type of your input. Use Top field first or Bottom field first to create * an output that's interlaced with the same field polarity throughout. Use Follow, default top or * Follow, default bottom to produce outputs with the same field polarity as the source. For jobs that * have multiple inputs, the output field polarity might change over the course of the output. Follow * behavior depends on the input scan type. If the source is interlaced, the output will be interlaced * with the same polarity as the source. If the source is progressive, the output will be interlaced with * top field bottom field first, depending on which of the Follow options you choose. * @see Mpeg2InterlaceMode * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2InterlaceMode */ Builder interlaceMode(String interlaceMode); /** * Choose the scan line type for the output. Keep the default value, Progressive to create a progressive output, * regardless of the scan type of your input. Use Top field first or Bottom field first to create an output * that's interlaced with the same field polarity throughout. Use Follow, default top or Follow, default bottom * to produce outputs with the same field polarity as the source. For jobs that have multiple inputs, the output * field polarity might change over the course of the output. Follow behavior depends on the input scan type. If * the source is interlaced, the output will be interlaced with the same polarity as the source. If the source * is progressive, the output will be interlaced with top field bottom field first, depending on which of the * Follow options you choose. * * @param interlaceMode * Choose the scan line type for the output. Keep the default value, Progressive to create a progressive * output, regardless of the scan type of your input. Use Top field first or Bottom field first to create * an output that's interlaced with the same field polarity throughout. Use Follow, default top or * Follow, default bottom to produce outputs with the same field polarity as the source. For jobs that * have multiple inputs, the output field polarity might change over the course of the output. Follow * behavior depends on the input scan type. If the source is interlaced, the output will be interlaced * with the same polarity as the source. If the source is progressive, the output will be interlaced with * top field bottom field first, depending on which of the Follow options you choose. * @see Mpeg2InterlaceMode * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2InterlaceMode */ Builder interlaceMode(Mpeg2InterlaceMode interlaceMode); /** * Use Intra DC precision to set quantization precision for intra-block DC coefficients. If you choose the value * auto, the service will automatically select the precision based on the per-frame compression ratio. * * @param intraDcPrecision * Use Intra DC precision to set quantization precision for intra-block DC coefficients. If you choose * the value auto, the service will automatically select the precision based on the per-frame compression * ratio. * @see Mpeg2IntraDcPrecision * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2IntraDcPrecision */ Builder intraDcPrecision(String intraDcPrecision); /** * Use Intra DC precision to set quantization precision for intra-block DC coefficients. If you choose the value * auto, the service will automatically select the precision based on the per-frame compression ratio. * * @param intraDcPrecision * Use Intra DC precision to set quantization precision for intra-block DC coefficients. If you choose * the value auto, the service will automatically select the precision based on the per-frame compression * ratio. * @see Mpeg2IntraDcPrecision * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2IntraDcPrecision */ Builder intraDcPrecision(Mpeg2IntraDcPrecision intraDcPrecision); /** * Maximum bitrate in bits/second. For example, enter five megabits per second as 5000000. * * @param maxBitrate * Maximum bitrate in bits/second. For example, enter five megabits per second as 5000000. * @return Returns a reference to this object so that method calls can be chained together. */ Builder maxBitrate(Integer maxBitrate); /** * Specify the minimum number of frames allowed between two IDR-frames in your output. This includes frames * created at the start of a GOP or a scene change. Use Min I-Interval to improve video compression by varying * GOP size when two IDR-frames would be created near each other. For example, if a regular cadence-driven * IDR-frame would fall within 5 frames of a scene-change IDR-frame, and you set Min I-interval to 5, then the * encoder would only write an IDR-frame for the scene-change. In this way, one GOP is shortened or extended. If * a cadence-driven IDR-frame would be further than 5 frames from a scene-change IDR-frame, then the encoder * leaves all IDR-frames in place. To manually specify an interval: Enter a value from 1 to 30. Use when your * downstream systems have specific GOP size requirements. To disable GOP size variance: Enter 0. MediaConvert * will only create IDR-frames at the start of your output's cadence-driven GOP. Use when your downstream * systems require a regular GOP size. * * @param minIInterval * Specify the minimum number of frames allowed between two IDR-frames in your output. This includes * frames created at the start of a GOP or a scene change. Use Min I-Interval to improve video * compression by varying GOP size when two IDR-frames would be created near each other. For example, if * a regular cadence-driven IDR-frame would fall within 5 frames of a scene-change IDR-frame, and you set * Min I-interval to 5, then the encoder would only write an IDR-frame for the scene-change. In this way, * one GOP is shortened or extended. If a cadence-driven IDR-frame would be further than 5 frames from a * scene-change IDR-frame, then the encoder leaves all IDR-frames in place. To manually specify an * interval: Enter a value from 1 to 30. Use when your downstream systems have specific GOP size * requirements. To disable GOP size variance: Enter 0. MediaConvert will only create IDR-frames at the * start of your output's cadence-driven GOP. Use when your downstream systems require a regular GOP * size. * @return Returns a reference to this object so that method calls can be chained together. */ Builder minIInterval(Integer minIInterval); /** * Specify the number of B-frames that MediaConvert puts between reference frames in this output. Valid values * are whole numbers from 0 through 7. When you don't specify a value, MediaConvert defaults to 2. * * @param numberBFramesBetweenReferenceFrames * Specify the number of B-frames that MediaConvert puts between reference frames in this output. Valid * values are whole numbers from 0 through 7. When you don't specify a value, MediaConvert defaults to 2. * @return Returns a reference to this object so that method calls can be chained together. */ Builder numberBFramesBetweenReferenceFrames(Integer numberBFramesBetweenReferenceFrames); /** * Optional. Specify how the service determines the pixel aspect ratio (PAR) for this output. The default * behavior, Follow source, uses the PAR from your input video for your output. To specify a different PAR in * the console, choose any value other than Follow source. When you choose SPECIFIED for this setting, you must * also specify values for the parNumerator and parDenominator settings. * * @param parControl * Optional. Specify how the service determines the pixel aspect ratio (PAR) for this output. The default * behavior, Follow source, uses the PAR from your input video for your output. To specify a different * PAR in the console, choose any value other than Follow source. When you choose SPECIFIED for this * setting, you must also specify values for the parNumerator and parDenominator settings. * @see Mpeg2ParControl * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2ParControl */ Builder parControl(String parControl); /** * Optional. Specify how the service determines the pixel aspect ratio (PAR) for this output. The default * behavior, Follow source, uses the PAR from your input video for your output. To specify a different PAR in * the console, choose any value other than Follow source. When you choose SPECIFIED for this setting, you must * also specify values for the parNumerator and parDenominator settings. * * @param parControl * Optional. Specify how the service determines the pixel aspect ratio (PAR) for this output. The default * behavior, Follow source, uses the PAR from your input video for your output. To specify a different * PAR in the console, choose any value other than Follow source. When you choose SPECIFIED for this * setting, you must also specify values for the parNumerator and parDenominator settings. * @see Mpeg2ParControl * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2ParControl */ Builder parControl(Mpeg2ParControl parControl); /** * Required when you set Pixel aspect ratio to SPECIFIED. On the console, this corresponds to any value other * than Follow source. When you specify an output pixel aspect ratio (PAR) that is different from your input * video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC widescreen, you would specify the * ratio 40:33. In this example, the value for parDenominator is 33. * * @param parDenominator * Required when you set Pixel aspect ratio to SPECIFIED. On the console, this corresponds to any value * other than Follow source. When you specify an output pixel aspect ratio (PAR) that is different from * your input video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC widescreen, you * would specify the ratio 40:33. In this example, the value for parDenominator is 33. * @return Returns a reference to this object so that method calls can be chained together. */ Builder parDenominator(Integer parDenominator); /** * Required when you set Pixel aspect ratio to SPECIFIED. On the console, this corresponds to any value other * than Follow source. When you specify an output pixel aspect ratio (PAR) that is different from your input * video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC widescreen, you would specify the * ratio 40:33. In this example, the value for parNumerator is 40. * * @param parNumerator * Required when you set Pixel aspect ratio to SPECIFIED. On the console, this corresponds to any value * other than Follow source. When you specify an output pixel aspect ratio (PAR) that is different from * your input video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC widescreen, you * would specify the ratio 40:33. In this example, the value for parNumerator is 40. * @return Returns a reference to this object so that method calls can be chained together. */ Builder parNumerator(Integer parNumerator); /** * Optional. Use Quality tuning level to choose how you want to trade off encoding speed for output video * quality. The default behavior is faster, lower quality, single-pass encoding. * * @param qualityTuningLevel * Optional. Use Quality tuning level to choose how you want to trade off encoding speed for output video * quality. The default behavior is faster, lower quality, single-pass encoding. * @see Mpeg2QualityTuningLevel * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2QualityTuningLevel */ Builder qualityTuningLevel(String qualityTuningLevel); /** * Optional. Use Quality tuning level to choose how you want to trade off encoding speed for output video * quality. The default behavior is faster, lower quality, single-pass encoding. * * @param qualityTuningLevel * Optional. Use Quality tuning level to choose how you want to trade off encoding speed for output video * quality. The default behavior is faster, lower quality, single-pass encoding. * @see Mpeg2QualityTuningLevel * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2QualityTuningLevel */ Builder qualityTuningLevel(Mpeg2QualityTuningLevel qualityTuningLevel); /** * Use Rate control mode to specify whether the bitrate is variable (vbr) or constant (cbr). * * @param rateControlMode * Use Rate control mode to specify whether the bitrate is variable (vbr) or constant (cbr). * @see Mpeg2RateControlMode * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2RateControlMode */ Builder rateControlMode(String rateControlMode); /** * Use Rate control mode to specify whether the bitrate is variable (vbr) or constant (cbr). * * @param rateControlMode * Use Rate control mode to specify whether the bitrate is variable (vbr) or constant (cbr). * @see Mpeg2RateControlMode * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2RateControlMode */ Builder rateControlMode(Mpeg2RateControlMode rateControlMode); /** * Use this setting for interlaced outputs, when your output frame rate is half of your input frame rate. In * this situation, choose Optimized interlacing to create a better quality interlaced output. In this case, each * progressive frame from the input corresponds to an interlaced field in the output. Keep the default value, * Basic interlacing, for all other output frame rates. With basic interlacing, MediaConvert performs any frame * rate conversion first and then interlaces the frames. When you choose Optimized interlacing and you set your * output frame rate to a value that isn't suitable for optimized interlacing, MediaConvert automatically falls * back to basic interlacing. Required settings: To use optimized interlacing, you must set Telecine to None or * Soft. You can't use optimized interlacing for hard telecine outputs. You must also set Interlace mode to a * value other than Progressive. * * @param scanTypeConversionMode * Use this setting for interlaced outputs, when your output frame rate is half of your input frame rate. * In this situation, choose Optimized interlacing to create a better quality interlaced output. In this * case, each progressive frame from the input corresponds to an interlaced field in the output. Keep the * default value, Basic interlacing, for all other output frame rates. With basic interlacing, * MediaConvert performs any frame rate conversion first and then interlaces the frames. When you choose * Optimized interlacing and you set your output frame rate to a value that isn't suitable for optimized * interlacing, MediaConvert automatically falls back to basic interlacing. Required settings: To use * optimized interlacing, you must set Telecine to None or Soft. You can't use optimized interlacing for * hard telecine outputs. You must also set Interlace mode to a value other than Progressive. * @see Mpeg2ScanTypeConversionMode * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2ScanTypeConversionMode */ Builder scanTypeConversionMode(String scanTypeConversionMode); /** * Use this setting for interlaced outputs, when your output frame rate is half of your input frame rate. In * this situation, choose Optimized interlacing to create a better quality interlaced output. In this case, each * progressive frame from the input corresponds to an interlaced field in the output. Keep the default value, * Basic interlacing, for all other output frame rates. With basic interlacing, MediaConvert performs any frame * rate conversion first and then interlaces the frames. When you choose Optimized interlacing and you set your * output frame rate to a value that isn't suitable for optimized interlacing, MediaConvert automatically falls * back to basic interlacing. Required settings: To use optimized interlacing, you must set Telecine to None or * Soft. You can't use optimized interlacing for hard telecine outputs. You must also set Interlace mode to a * value other than Progressive. * * @param scanTypeConversionMode * Use this setting for interlaced outputs, when your output frame rate is half of your input frame rate. * In this situation, choose Optimized interlacing to create a better quality interlaced output. In this * case, each progressive frame from the input corresponds to an interlaced field in the output. Keep the * default value, Basic interlacing, for all other output frame rates. With basic interlacing, * MediaConvert performs any frame rate conversion first and then interlaces the frames. When you choose * Optimized interlacing and you set your output frame rate to a value that isn't suitable for optimized * interlacing, MediaConvert automatically falls back to basic interlacing. Required settings: To use * optimized interlacing, you must set Telecine to None or Soft. You can't use optimized interlacing for * hard telecine outputs. You must also set Interlace mode to a value other than Progressive. * @see Mpeg2ScanTypeConversionMode * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2ScanTypeConversionMode */ Builder scanTypeConversionMode(Mpeg2ScanTypeConversionMode scanTypeConversionMode); /** * Enable this setting to insert I-frames at scene changes that the service automatically detects. This improves * video quality and is enabled by default. * * @param sceneChangeDetect * Enable this setting to insert I-frames at scene changes that the service automatically detects. This * improves video quality and is enabled by default. * @see Mpeg2SceneChangeDetect * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2SceneChangeDetect */ Builder sceneChangeDetect(String sceneChangeDetect); /** * Enable this setting to insert I-frames at scene changes that the service automatically detects. This improves * video quality and is enabled by default. * * @param sceneChangeDetect * Enable this setting to insert I-frames at scene changes that the service automatically detects. This * improves video quality and is enabled by default. * @see Mpeg2SceneChangeDetect * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2SceneChangeDetect */ Builder sceneChangeDetect(Mpeg2SceneChangeDetect sceneChangeDetect); /** * Ignore this setting unless your input frame rate is 23.976 or 24 frames per second (fps). Enable slow PAL to * create a 25 fps output. When you enable slow PAL, MediaConvert relabels the video frames to 25 fps and * resamples your audio to keep it synchronized with the video. Note that enabling this setting will slightly * reduce the duration of your video. Required settings: You must also set Framerate to 25. * * @param slowPal * Ignore this setting unless your input frame rate is 23.976 or 24 frames per second (fps). Enable slow * PAL to create a 25 fps output. When you enable slow PAL, MediaConvert relabels the video frames to 25 * fps and resamples your audio to keep it synchronized with the video. Note that enabling this setting * will slightly reduce the duration of your video. Required settings: You must also set Framerate to 25. * @see Mpeg2SlowPal * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2SlowPal */ Builder slowPal(String slowPal); /** * Ignore this setting unless your input frame rate is 23.976 or 24 frames per second (fps). Enable slow PAL to * create a 25 fps output. When you enable slow PAL, MediaConvert relabels the video frames to 25 fps and * resamples your audio to keep it synchronized with the video. Note that enabling this setting will slightly * reduce the duration of your video. Required settings: You must also set Framerate to 25. * * @param slowPal * Ignore this setting unless your input frame rate is 23.976 or 24 frames per second (fps). Enable slow * PAL to create a 25 fps output. When you enable slow PAL, MediaConvert relabels the video frames to 25 * fps and resamples your audio to keep it synchronized with the video. Note that enabling this setting * will slightly reduce the duration of your video. Required settings: You must also set Framerate to 25. * @see Mpeg2SlowPal * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2SlowPal */ Builder slowPal(Mpeg2SlowPal slowPal); /** * Ignore this setting unless you need to comply with a specification that requires a specific value. If you * don't have a specification requirement, we recommend that you adjust the softness of your output by using a * lower value for the setting Sharpness or by enabling a noise reducer filter. The Softness setting specifies * the quantization matrices that the encoder uses. Keep the default value, 0, to use the AWS Elemental default * matrices. Choose a value from 17 to 128 to use planar interpolation. Increasing values from 17 to 128 result * in increasing reduction of high-frequency data. The value 128 results in the softest video. * * @param softness * Ignore this setting unless you need to comply with a specification that requires a specific value. If * you don't have a specification requirement, we recommend that you adjust the softness of your output * by using a lower value for the setting Sharpness or by enabling a noise reducer filter. The Softness * setting specifies the quantization matrices that the encoder uses. Keep the default value, 0, to use * the AWS Elemental default matrices. Choose a value from 17 to 128 to use planar interpolation. * Increasing values from 17 to 128 result in increasing reduction of high-frequency data. The value 128 * results in the softest video. * @return Returns a reference to this object so that method calls can be chained together. */ Builder softness(Integer softness); /** * Keep the default value, Enabled, to adjust quantization within each frame based on spatial variation of * content complexity. When you enable this feature, the encoder uses fewer bits on areas that can sustain more * distortion with no noticeable visual degradation and uses more bits on areas where any small distortion will * be noticeable. For example, complex textured blocks are encoded with fewer bits and smooth textured blocks * are encoded with more bits. Enabling this feature will almost always improve your video quality. Note, * though, that this feature doesn't take into account where the viewer's attention is likely to be. If viewers * are likely to be focusing their attention on a part of the screen with a lot of complex texture, you might * choose to disable this feature. Related setting: When you enable spatial adaptive quantization, set the value * for Adaptive quantization depending on your content. For homogeneous content, such as cartoons and video * games, set it to Low. For content with a wider variety of textures, set it to High or Higher. * * @param spatialAdaptiveQuantization * Keep the default value, Enabled, to adjust quantization within each frame based on spatial variation * of content complexity. When you enable this feature, the encoder uses fewer bits on areas that can * sustain more distortion with no noticeable visual degradation and uses more bits on areas where any * small distortion will be noticeable. For example, complex textured blocks are encoded with fewer bits * and smooth textured blocks are encoded with more bits. Enabling this feature will almost always * improve your video quality. Note, though, that this feature doesn't take into account where the * viewer's attention is likely to be. If viewers are likely to be focusing their attention on a part of * the screen with a lot of complex texture, you might choose to disable this feature. Related setting: * When you enable spatial adaptive quantization, set the value for Adaptive quantization depending on * your content. For homogeneous content, such as cartoons and video games, set it to Low. For content * with a wider variety of textures, set it to High or Higher. * @see Mpeg2SpatialAdaptiveQuantization * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2SpatialAdaptiveQuantization */ Builder spatialAdaptiveQuantization(String spatialAdaptiveQuantization); /** * Keep the default value, Enabled, to adjust quantization within each frame based on spatial variation of * content complexity. When you enable this feature, the encoder uses fewer bits on areas that can sustain more * distortion with no noticeable visual degradation and uses more bits on areas where any small distortion will * be noticeable. For example, complex textured blocks are encoded with fewer bits and smooth textured blocks * are encoded with more bits. Enabling this feature will almost always improve your video quality. Note, * though, that this feature doesn't take into account where the viewer's attention is likely to be. If viewers * are likely to be focusing their attention on a part of the screen with a lot of complex texture, you might * choose to disable this feature. Related setting: When you enable spatial adaptive quantization, set the value * for Adaptive quantization depending on your content. For homogeneous content, such as cartoons and video * games, set it to Low. For content with a wider variety of textures, set it to High or Higher. * * @param spatialAdaptiveQuantization * Keep the default value, Enabled, to adjust quantization within each frame based on spatial variation * of content complexity. When you enable this feature, the encoder uses fewer bits on areas that can * sustain more distortion with no noticeable visual degradation and uses more bits on areas where any * small distortion will be noticeable. For example, complex textured blocks are encoded with fewer bits * and smooth textured blocks are encoded with more bits. Enabling this feature will almost always * improve your video quality. Note, though, that this feature doesn't take into account where the * viewer's attention is likely to be. If viewers are likely to be focusing their attention on a part of * the screen with a lot of complex texture, you might choose to disable this feature. Related setting: * When you enable spatial adaptive quantization, set the value for Adaptive quantization depending on * your content. For homogeneous content, such as cartoons and video games, set it to Low. For content * with a wider variety of textures, set it to High or Higher. * @see Mpeg2SpatialAdaptiveQuantization * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2SpatialAdaptiveQuantization */ Builder spatialAdaptiveQuantization(Mpeg2SpatialAdaptiveQuantization spatialAdaptiveQuantization); /** * Specify whether this output's video uses the D10 syntax. Keep the default value to not use the syntax. * Related settings: When you choose D10 for your MXF profile, you must also set this value to D10. * * @param syntax * Specify whether this output's video uses the D10 syntax. Keep the default value to not use the syntax. * Related settings: When you choose D10 for your MXF profile, you must also set this value to D10. * @see Mpeg2Syntax * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2Syntax */ Builder syntax(String syntax); /** * Specify whether this output's video uses the D10 syntax. Keep the default value to not use the syntax. * Related settings: When you choose D10 for your MXF profile, you must also set this value to D10. * * @param syntax * Specify whether this output's video uses the D10 syntax. Keep the default value to not use the syntax. * Related settings: When you choose D10 for your MXF profile, you must also set this value to D10. * @see Mpeg2Syntax * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2Syntax */ Builder syntax(Mpeg2Syntax syntax); /** * When you do frame rate conversion from 23.976 frames per second (fps) to 29.97 fps, and your output scan type * is interlaced, you can optionally enable hard or soft telecine to create a smoother picture. Hard telecine * produces a 29.97i output. Soft telecine produces an output with a 23.976 output that signals to the video * player device to do the conversion during play back. When you keep the default value, None, MediaConvert does * a standard frame rate conversion to 29.97 without doing anything with the field polarity to create a smoother * picture. * * @param telecine * When you do frame rate conversion from 23.976 frames per second (fps) to 29.97 fps, and your output * scan type is interlaced, you can optionally enable hard or soft telecine to create a smoother picture. * Hard telecine produces a 29.97i output. Soft telecine produces an output with a 23.976 output that * signals to the video player device to do the conversion during play back. When you keep the default * value, None, MediaConvert does a standard frame rate conversion to 29.97 without doing anything with * the field polarity to create a smoother picture. * @see Mpeg2Telecine * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2Telecine */ Builder telecine(String telecine); /** * When you do frame rate conversion from 23.976 frames per second (fps) to 29.97 fps, and your output scan type * is interlaced, you can optionally enable hard or soft telecine to create a smoother picture. Hard telecine * produces a 29.97i output. Soft telecine produces an output with a 23.976 output that signals to the video * player device to do the conversion during play back. When you keep the default value, None, MediaConvert does * a standard frame rate conversion to 29.97 without doing anything with the field polarity to create a smoother * picture. * * @param telecine * When you do frame rate conversion from 23.976 frames per second (fps) to 29.97 fps, and your output * scan type is interlaced, you can optionally enable hard or soft telecine to create a smoother picture. * Hard telecine produces a 29.97i output. Soft telecine produces an output with a 23.976 output that * signals to the video player device to do the conversion during play back. When you keep the default * value, None, MediaConvert does a standard frame rate conversion to 29.97 without doing anything with * the field polarity to create a smoother picture. * @see Mpeg2Telecine * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2Telecine */ Builder telecine(Mpeg2Telecine telecine); /** * Keep the default value, Enabled, to adjust quantization within each frame based on temporal variation of * content complexity. When you enable this feature, the encoder uses fewer bits on areas of the frame that * aren't moving and uses more bits on complex objects with sharp edges that move a lot. For example, this * feature improves the readability of text tickers on newscasts and scoreboards on sports matches. Enabling * this feature will almost always improve your video quality. Note, though, that this feature doesn't take into * account where the viewer's attention is likely to be. If viewers are likely to be focusing their attention on * a part of the screen that doesn't have moving objects with sharp edges, such as sports athletes' faces, you * might choose to disable this feature. Related setting: When you enable temporal quantization, adjust the * strength of the filter with the setting Adaptive quantization. * * @param temporalAdaptiveQuantization * Keep the default value, Enabled, to adjust quantization within each frame based on temporal variation * of content complexity. When you enable this feature, the encoder uses fewer bits on areas of the frame * that aren't moving and uses more bits on complex objects with sharp edges that move a lot. For * example, this feature improves the readability of text tickers on newscasts and scoreboards on sports * matches. Enabling this feature will almost always improve your video quality. Note, though, that this * feature doesn't take into account where the viewer's attention is likely to be. If viewers are likely * to be focusing their attention on a part of the screen that doesn't have moving objects with sharp * edges, such as sports athletes' faces, you might choose to disable this feature. Related setting: When * you enable temporal quantization, adjust the strength of the filter with the setting Adaptive * quantization. * @see Mpeg2TemporalAdaptiveQuantization * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2TemporalAdaptiveQuantization */ Builder temporalAdaptiveQuantization(String temporalAdaptiveQuantization); /** * Keep the default value, Enabled, to adjust quantization within each frame based on temporal variation of * content complexity. When you enable this feature, the encoder uses fewer bits on areas of the frame that * aren't moving and uses more bits on complex objects with sharp edges that move a lot. For example, this * feature improves the readability of text tickers on newscasts and scoreboards on sports matches. Enabling * this feature will almost always improve your video quality. Note, though, that this feature doesn't take into * account where the viewer's attention is likely to be. If viewers are likely to be focusing their attention on * a part of the screen that doesn't have moving objects with sharp edges, such as sports athletes' faces, you * might choose to disable this feature. Related setting: When you enable temporal quantization, adjust the * strength of the filter with the setting Adaptive quantization. * * @param temporalAdaptiveQuantization * Keep the default value, Enabled, to adjust quantization within each frame based on temporal variation * of content complexity. When you enable this feature, the encoder uses fewer bits on areas of the frame * that aren't moving and uses more bits on complex objects with sharp edges that move a lot. For * example, this feature improves the readability of text tickers on newscasts and scoreboards on sports * matches. Enabling this feature will almost always improve your video quality. Note, though, that this * feature doesn't take into account where the viewer's attention is likely to be. If viewers are likely * to be focusing their attention on a part of the screen that doesn't have moving objects with sharp * edges, such as sports athletes' faces, you might choose to disable this feature. Related setting: When * you enable temporal quantization, adjust the strength of the filter with the setting Adaptive * quantization. * @see Mpeg2TemporalAdaptiveQuantization * @return Returns a reference to this object so that method calls can be chained together. * @see Mpeg2TemporalAdaptiveQuantization */ Builder temporalAdaptiveQuantization(Mpeg2TemporalAdaptiveQuantization temporalAdaptiveQuantization); } static final class BuilderImpl implements Builder { private String adaptiveQuantization; private Integer bitrate; private String codecLevel; private String codecProfile; private String dynamicSubGop; private String framerateControl; private String framerateConversionAlgorithm; private Integer framerateDenominator; private Integer framerateNumerator; private Integer gopClosedCadence; private Double gopSize; private String gopSizeUnits; private Integer hrdBufferFinalFillPercentage; private Integer hrdBufferInitialFillPercentage; private Integer hrdBufferSize; private String interlaceMode; private String intraDcPrecision; private Integer maxBitrate; private Integer minIInterval; private Integer numberBFramesBetweenReferenceFrames; private String parControl; private Integer parDenominator; private Integer parNumerator; private String qualityTuningLevel; private String rateControlMode; private String scanTypeConversionMode; private String sceneChangeDetect; private String slowPal; private Integer softness; private String spatialAdaptiveQuantization; private String syntax; private String telecine; private String temporalAdaptiveQuantization; private BuilderImpl() { } private BuilderImpl(Mpeg2Settings model) { adaptiveQuantization(model.adaptiveQuantization); bitrate(model.bitrate); codecLevel(model.codecLevel); codecProfile(model.codecProfile); dynamicSubGop(model.dynamicSubGop); framerateControl(model.framerateControl); framerateConversionAlgorithm(model.framerateConversionAlgorithm); framerateDenominator(model.framerateDenominator); framerateNumerator(model.framerateNumerator); gopClosedCadence(model.gopClosedCadence); gopSize(model.gopSize); gopSizeUnits(model.gopSizeUnits); hrdBufferFinalFillPercentage(model.hrdBufferFinalFillPercentage); hrdBufferInitialFillPercentage(model.hrdBufferInitialFillPercentage); hrdBufferSize(model.hrdBufferSize); interlaceMode(model.interlaceMode); intraDcPrecision(model.intraDcPrecision); maxBitrate(model.maxBitrate); minIInterval(model.minIInterval); numberBFramesBetweenReferenceFrames(model.numberBFramesBetweenReferenceFrames); parControl(model.parControl); parDenominator(model.parDenominator); parNumerator(model.parNumerator); qualityTuningLevel(model.qualityTuningLevel); rateControlMode(model.rateControlMode); scanTypeConversionMode(model.scanTypeConversionMode); sceneChangeDetect(model.sceneChangeDetect); slowPal(model.slowPal); softness(model.softness); spatialAdaptiveQuantization(model.spatialAdaptiveQuantization); syntax(model.syntax); telecine(model.telecine); temporalAdaptiveQuantization(model.temporalAdaptiveQuantization); } public final String getAdaptiveQuantization() { return adaptiveQuantization; } public final void setAdaptiveQuantization(String adaptiveQuantization) { this.adaptiveQuantization = adaptiveQuantization; } @Override public final Builder adaptiveQuantization(String adaptiveQuantization) { this.adaptiveQuantization = adaptiveQuantization; return this; } @Override public final Builder adaptiveQuantization(Mpeg2AdaptiveQuantization adaptiveQuantization) { this.adaptiveQuantization(adaptiveQuantization == null ? null : adaptiveQuantization.toString()); return this; } public final Integer getBitrate() { return bitrate; } public final void setBitrate(Integer bitrate) { this.bitrate = bitrate; } @Override public final Builder bitrate(Integer bitrate) { this.bitrate = bitrate; return this; } public final String getCodecLevel() { return codecLevel; } public final void setCodecLevel(String codecLevel) { this.codecLevel = codecLevel; } @Override public final Builder codecLevel(String codecLevel) { this.codecLevel = codecLevel; return this; } @Override public final Builder codecLevel(Mpeg2CodecLevel codecLevel) { this.codecLevel(codecLevel == null ? null : codecLevel.toString()); return this; } public final String getCodecProfile() { return codecProfile; } public final void setCodecProfile(String codecProfile) { this.codecProfile = codecProfile; } @Override public final Builder codecProfile(String codecProfile) { this.codecProfile = codecProfile; return this; } @Override public final Builder codecProfile(Mpeg2CodecProfile codecProfile) { this.codecProfile(codecProfile == null ? null : codecProfile.toString()); return this; } public final String getDynamicSubGop() { return dynamicSubGop; } public final void setDynamicSubGop(String dynamicSubGop) { this.dynamicSubGop = dynamicSubGop; } @Override public final Builder dynamicSubGop(String dynamicSubGop) { this.dynamicSubGop = dynamicSubGop; return this; } @Override public final Builder dynamicSubGop(Mpeg2DynamicSubGop dynamicSubGop) { this.dynamicSubGop(dynamicSubGop == null ? null : dynamicSubGop.toString()); return this; } public final String getFramerateControl() { return framerateControl; } public final void setFramerateControl(String framerateControl) { this.framerateControl = framerateControl; } @Override public final Builder framerateControl(String framerateControl) { this.framerateControl = framerateControl; return this; } @Override public final Builder framerateControl(Mpeg2FramerateControl framerateControl) { this.framerateControl(framerateControl == null ? null : framerateControl.toString()); return this; } public final String getFramerateConversionAlgorithm() { return framerateConversionAlgorithm; } public final void setFramerateConversionAlgorithm(String framerateConversionAlgorithm) { this.framerateConversionAlgorithm = framerateConversionAlgorithm; } @Override public final Builder framerateConversionAlgorithm(String framerateConversionAlgorithm) { this.framerateConversionAlgorithm = framerateConversionAlgorithm; return this; } @Override public final Builder framerateConversionAlgorithm(Mpeg2FramerateConversionAlgorithm framerateConversionAlgorithm) { this.framerateConversionAlgorithm(framerateConversionAlgorithm == null ? null : framerateConversionAlgorithm .toString()); return this; } public final Integer getFramerateDenominator() { return framerateDenominator; } public final void setFramerateDenominator(Integer framerateDenominator) { this.framerateDenominator = framerateDenominator; } @Override public final Builder framerateDenominator(Integer framerateDenominator) { this.framerateDenominator = framerateDenominator; return this; } public final Integer getFramerateNumerator() { return framerateNumerator; } public final void setFramerateNumerator(Integer framerateNumerator) { this.framerateNumerator = framerateNumerator; } @Override public final Builder framerateNumerator(Integer framerateNumerator) { this.framerateNumerator = framerateNumerator; return this; } public final Integer getGopClosedCadence() { return gopClosedCadence; } public final void setGopClosedCadence(Integer gopClosedCadence) { this.gopClosedCadence = gopClosedCadence; } @Override public final Builder gopClosedCadence(Integer gopClosedCadence) { this.gopClosedCadence = gopClosedCadence; return this; } public final Double getGopSize() { return gopSize; } public final void setGopSize(Double gopSize) { this.gopSize = gopSize; } @Override public final Builder gopSize(Double gopSize) { this.gopSize = gopSize; return this; } public final String getGopSizeUnits() { return gopSizeUnits; } public final void setGopSizeUnits(String gopSizeUnits) { this.gopSizeUnits = gopSizeUnits; } @Override public final Builder gopSizeUnits(String gopSizeUnits) { this.gopSizeUnits = gopSizeUnits; return this; } @Override public final Builder gopSizeUnits(Mpeg2GopSizeUnits gopSizeUnits) { this.gopSizeUnits(gopSizeUnits == null ? null : gopSizeUnits.toString()); return this; } public final Integer getHrdBufferFinalFillPercentage() { return hrdBufferFinalFillPercentage; } public final void setHrdBufferFinalFillPercentage(Integer hrdBufferFinalFillPercentage) { this.hrdBufferFinalFillPercentage = hrdBufferFinalFillPercentage; } @Override public final Builder hrdBufferFinalFillPercentage(Integer hrdBufferFinalFillPercentage) { this.hrdBufferFinalFillPercentage = hrdBufferFinalFillPercentage; return this; } public final Integer getHrdBufferInitialFillPercentage() { return hrdBufferInitialFillPercentage; } public final void setHrdBufferInitialFillPercentage(Integer hrdBufferInitialFillPercentage) { this.hrdBufferInitialFillPercentage = hrdBufferInitialFillPercentage; } @Override public final Builder hrdBufferInitialFillPercentage(Integer hrdBufferInitialFillPercentage) { this.hrdBufferInitialFillPercentage = hrdBufferInitialFillPercentage; return this; } public final Integer getHrdBufferSize() { return hrdBufferSize; } public final void setHrdBufferSize(Integer hrdBufferSize) { this.hrdBufferSize = hrdBufferSize; } @Override public final Builder hrdBufferSize(Integer hrdBufferSize) { this.hrdBufferSize = hrdBufferSize; return this; } public final String getInterlaceMode() { return interlaceMode; } public final void setInterlaceMode(String interlaceMode) { this.interlaceMode = interlaceMode; } @Override public final Builder interlaceMode(String interlaceMode) { this.interlaceMode = interlaceMode; return this; } @Override public final Builder interlaceMode(Mpeg2InterlaceMode interlaceMode) { this.interlaceMode(interlaceMode == null ? null : interlaceMode.toString()); return this; } public final String getIntraDcPrecision() { return intraDcPrecision; } public final void setIntraDcPrecision(String intraDcPrecision) { this.intraDcPrecision = intraDcPrecision; } @Override public final Builder intraDcPrecision(String intraDcPrecision) { this.intraDcPrecision = intraDcPrecision; return this; } @Override public final Builder intraDcPrecision(Mpeg2IntraDcPrecision intraDcPrecision) { this.intraDcPrecision(intraDcPrecision == null ? null : intraDcPrecision.toString()); return this; } public final Integer getMaxBitrate() { return maxBitrate; } public final void setMaxBitrate(Integer maxBitrate) { this.maxBitrate = maxBitrate; } @Override public final Builder maxBitrate(Integer maxBitrate) { this.maxBitrate = maxBitrate; return this; } public final Integer getMinIInterval() { return minIInterval; } public final void setMinIInterval(Integer minIInterval) { this.minIInterval = minIInterval; } @Override public final Builder minIInterval(Integer minIInterval) { this.minIInterval = minIInterval; return this; } public final Integer getNumberBFramesBetweenReferenceFrames() { return numberBFramesBetweenReferenceFrames; } public final void setNumberBFramesBetweenReferenceFrames(Integer numberBFramesBetweenReferenceFrames) { this.numberBFramesBetweenReferenceFrames = numberBFramesBetweenReferenceFrames; } @Override public final Builder numberBFramesBetweenReferenceFrames(Integer numberBFramesBetweenReferenceFrames) { this.numberBFramesBetweenReferenceFrames = numberBFramesBetweenReferenceFrames; return this; } public final String getParControl() { return parControl; } public final void setParControl(String parControl) { this.parControl = parControl; } @Override public final Builder parControl(String parControl) { this.parControl = parControl; return this; } @Override public final Builder parControl(Mpeg2ParControl parControl) { this.parControl(parControl == null ? null : parControl.toString()); return this; } public final Integer getParDenominator() { return parDenominator; } public final void setParDenominator(Integer parDenominator) { this.parDenominator = parDenominator; } @Override public final Builder parDenominator(Integer parDenominator) { this.parDenominator = parDenominator; return this; } public final Integer getParNumerator() { return parNumerator; } public final void setParNumerator(Integer parNumerator) { this.parNumerator = parNumerator; } @Override public final Builder parNumerator(Integer parNumerator) { this.parNumerator = parNumerator; return this; } public final String getQualityTuningLevel() { return qualityTuningLevel; } public final void setQualityTuningLevel(String qualityTuningLevel) { this.qualityTuningLevel = qualityTuningLevel; } @Override public final Builder qualityTuningLevel(String qualityTuningLevel) { this.qualityTuningLevel = qualityTuningLevel; return this; } @Override public final Builder qualityTuningLevel(Mpeg2QualityTuningLevel qualityTuningLevel) { this.qualityTuningLevel(qualityTuningLevel == null ? null : qualityTuningLevel.toString()); return this; } public final String getRateControlMode() { return rateControlMode; } public final void setRateControlMode(String rateControlMode) { this.rateControlMode = rateControlMode; } @Override public final Builder rateControlMode(String rateControlMode) { this.rateControlMode = rateControlMode; return this; } @Override public final Builder rateControlMode(Mpeg2RateControlMode rateControlMode) { this.rateControlMode(rateControlMode == null ? null : rateControlMode.toString()); return this; } public final String getScanTypeConversionMode() { return scanTypeConversionMode; } public final void setScanTypeConversionMode(String scanTypeConversionMode) { this.scanTypeConversionMode = scanTypeConversionMode; } @Override public final Builder scanTypeConversionMode(String scanTypeConversionMode) { this.scanTypeConversionMode = scanTypeConversionMode; return this; } @Override public final Builder scanTypeConversionMode(Mpeg2ScanTypeConversionMode scanTypeConversionMode) { this.scanTypeConversionMode(scanTypeConversionMode == null ? null : scanTypeConversionMode.toString()); return this; } public final String getSceneChangeDetect() { return sceneChangeDetect; } public final void setSceneChangeDetect(String sceneChangeDetect) { this.sceneChangeDetect = sceneChangeDetect; } @Override public final Builder sceneChangeDetect(String sceneChangeDetect) { this.sceneChangeDetect = sceneChangeDetect; return this; } @Override public final Builder sceneChangeDetect(Mpeg2SceneChangeDetect sceneChangeDetect) { this.sceneChangeDetect(sceneChangeDetect == null ? null : sceneChangeDetect.toString()); return this; } public final String getSlowPal() { return slowPal; } public final void setSlowPal(String slowPal) { this.slowPal = slowPal; } @Override public final Builder slowPal(String slowPal) { this.slowPal = slowPal; return this; } @Override public final Builder slowPal(Mpeg2SlowPal slowPal) { this.slowPal(slowPal == null ? null : slowPal.toString()); return this; } public final Integer getSoftness() { return softness; } public final void setSoftness(Integer softness) { this.softness = softness; } @Override public final Builder softness(Integer softness) { this.softness = softness; return this; } public final String getSpatialAdaptiveQuantization() { return spatialAdaptiveQuantization; } public final void setSpatialAdaptiveQuantization(String spatialAdaptiveQuantization) { this.spatialAdaptiveQuantization = spatialAdaptiveQuantization; } @Override public final Builder spatialAdaptiveQuantization(String spatialAdaptiveQuantization) { this.spatialAdaptiveQuantization = spatialAdaptiveQuantization; return this; } @Override public final Builder spatialAdaptiveQuantization(Mpeg2SpatialAdaptiveQuantization spatialAdaptiveQuantization) { this.spatialAdaptiveQuantization(spatialAdaptiveQuantization == null ? null : spatialAdaptiveQuantization.toString()); return this; } public final String getSyntax() { return syntax; } public final void setSyntax(String syntax) { this.syntax = syntax; } @Override public final Builder syntax(String syntax) { this.syntax = syntax; return this; } @Override public final Builder syntax(Mpeg2Syntax syntax) { this.syntax(syntax == null ? null : syntax.toString()); return this; } public final String getTelecine() { return telecine; } public final void setTelecine(String telecine) { this.telecine = telecine; } @Override public final Builder telecine(String telecine) { this.telecine = telecine; return this; } @Override public final Builder telecine(Mpeg2Telecine telecine) { this.telecine(telecine == null ? null : telecine.toString()); return this; } public final String getTemporalAdaptiveQuantization() { return temporalAdaptiveQuantization; } public final void setTemporalAdaptiveQuantization(String temporalAdaptiveQuantization) { this.temporalAdaptiveQuantization = temporalAdaptiveQuantization; } @Override public final Builder temporalAdaptiveQuantization(String temporalAdaptiveQuantization) { this.temporalAdaptiveQuantization = temporalAdaptiveQuantization; return this; } @Override public final Builder temporalAdaptiveQuantization(Mpeg2TemporalAdaptiveQuantization temporalAdaptiveQuantization) { this.temporalAdaptiveQuantization(temporalAdaptiveQuantization == null ? null : temporalAdaptiveQuantization .toString()); return this; } @Override public Mpeg2Settings build() { return new Mpeg2Settings(this); } @Override public List> sdkFields() { return SDK_FIELDS; } } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy