software.amazon.awssdk.services.mediaconvert.model.H265Settings Maven / Gradle / Ivy
Show all versions of mediaconvert Show documentation
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package software.amazon.awssdk.services.mediaconvert.model;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import software.amazon.awssdk.annotations.Generated;
import software.amazon.awssdk.core.SdkField;
import software.amazon.awssdk.core.SdkPojo;
import software.amazon.awssdk.core.protocol.MarshallLocation;
import software.amazon.awssdk.core.protocol.MarshallingType;
import software.amazon.awssdk.core.traits.LocationTrait;
import software.amazon.awssdk.utils.ToString;
import software.amazon.awssdk.utils.builder.CopyableBuilder;
import software.amazon.awssdk.utils.builder.ToCopyableBuilder;
/**
* Settings for H265 codec
*/
@Generated("software.amazon.awssdk:codegen")
public final class H265Settings implements SdkPojo, Serializable, ToCopyableBuilder {
private static final SdkField ADAPTIVE_QUANTIZATION_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("AdaptiveQuantization").getter(getter(H265Settings::adaptiveQuantizationAsString))
.setter(setter(Builder::adaptiveQuantization))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("adaptiveQuantization").build())
.build();
private static final SdkField ALTERNATE_TRANSFER_FUNCTION_SEI_FIELD = SdkField
. builder(MarshallingType.STRING)
.memberName("AlternateTransferFunctionSei")
.getter(getter(H265Settings::alternateTransferFunctionSeiAsString))
.setter(setter(Builder::alternateTransferFunctionSei))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("alternateTransferFunctionSei")
.build()).build();
private static final SdkField BANDWIDTH_REDUCTION_FILTER_FIELD = SdkField
. builder(MarshallingType.SDK_POJO).memberName("BandwidthReductionFilter")
.getter(getter(H265Settings::bandwidthReductionFilter)).setter(setter(Builder::bandwidthReductionFilter))
.constructor(BandwidthReductionFilter::builder)
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("bandwidthReductionFilter").build())
.build();
private static final SdkField BITRATE_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("Bitrate").getter(getter(H265Settings::bitrate)).setter(setter(Builder::bitrate))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("bitrate").build()).build();
private static final SdkField CODEC_LEVEL_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("CodecLevel").getter(getter(H265Settings::codecLevelAsString)).setter(setter(Builder::codecLevel))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("codecLevel").build()).build();
private static final SdkField CODEC_PROFILE_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("CodecProfile").getter(getter(H265Settings::codecProfileAsString)).setter(setter(Builder::codecProfile))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("codecProfile").build()).build();
private static final SdkField DYNAMIC_SUB_GOP_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("DynamicSubGop").getter(getter(H265Settings::dynamicSubGopAsString))
.setter(setter(Builder::dynamicSubGop))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("dynamicSubGop").build()).build();
private static final SdkField END_OF_STREAM_MARKERS_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("EndOfStreamMarkers").getter(getter(H265Settings::endOfStreamMarkersAsString))
.setter(setter(Builder::endOfStreamMarkers))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("endOfStreamMarkers").build())
.build();
private static final SdkField FLICKER_ADAPTIVE_QUANTIZATION_FIELD = SdkField
. builder(MarshallingType.STRING)
.memberName("FlickerAdaptiveQuantization")
.getter(getter(H265Settings::flickerAdaptiveQuantizationAsString))
.setter(setter(Builder::flickerAdaptiveQuantization))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("flickerAdaptiveQuantization")
.build()).build();
private static final SdkField FRAMERATE_CONTROL_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("FramerateControl").getter(getter(H265Settings::framerateControlAsString))
.setter(setter(Builder::framerateControl))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("framerateControl").build()).build();
private static final SdkField FRAMERATE_CONVERSION_ALGORITHM_FIELD = SdkField
. builder(MarshallingType.STRING)
.memberName("FramerateConversionAlgorithm")
.getter(getter(H265Settings::framerateConversionAlgorithmAsString))
.setter(setter(Builder::framerateConversionAlgorithm))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("framerateConversionAlgorithm")
.build()).build();
private static final SdkField FRAMERATE_DENOMINATOR_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("FramerateDenominator").getter(getter(H265Settings::framerateDenominator))
.setter(setter(Builder::framerateDenominator))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("framerateDenominator").build())
.build();
private static final SdkField FRAMERATE_NUMERATOR_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("FramerateNumerator").getter(getter(H265Settings::framerateNumerator))
.setter(setter(Builder::framerateNumerator))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("framerateNumerator").build())
.build();
private static final SdkField GOP_B_REFERENCE_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("GopBReference").getter(getter(H265Settings::gopBReferenceAsString))
.setter(setter(Builder::gopBReference))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("gopBReference").build()).build();
private static final SdkField GOP_CLOSED_CADENCE_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("GopClosedCadence").getter(getter(H265Settings::gopClosedCadence))
.setter(setter(Builder::gopClosedCadence))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("gopClosedCadence").build()).build();
private static final SdkField GOP_SIZE_FIELD = SdkField. builder(MarshallingType.DOUBLE)
.memberName("GopSize").getter(getter(H265Settings::gopSize)).setter(setter(Builder::gopSize))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("gopSize").build()).build();
private static final SdkField GOP_SIZE_UNITS_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("GopSizeUnits").getter(getter(H265Settings::gopSizeUnitsAsString)).setter(setter(Builder::gopSizeUnits))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("gopSizeUnits").build()).build();
private static final SdkField HRD_BUFFER_FINAL_FILL_PERCENTAGE_FIELD = SdkField
. builder(MarshallingType.INTEGER)
.memberName("HrdBufferFinalFillPercentage")
.getter(getter(H265Settings::hrdBufferFinalFillPercentage))
.setter(setter(Builder::hrdBufferFinalFillPercentage))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("hrdBufferFinalFillPercentage")
.build()).build();
private static final SdkField HRD_BUFFER_INITIAL_FILL_PERCENTAGE_FIELD = SdkField
. builder(MarshallingType.INTEGER)
.memberName("HrdBufferInitialFillPercentage")
.getter(getter(H265Settings::hrdBufferInitialFillPercentage))
.setter(setter(Builder::hrdBufferInitialFillPercentage))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("hrdBufferInitialFillPercentage")
.build()).build();
private static final SdkField HRD_BUFFER_SIZE_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("HrdBufferSize").getter(getter(H265Settings::hrdBufferSize)).setter(setter(Builder::hrdBufferSize))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("hrdBufferSize").build()).build();
private static final SdkField INTERLACE_MODE_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("InterlaceMode").getter(getter(H265Settings::interlaceModeAsString))
.setter(setter(Builder::interlaceMode))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("interlaceMode").build()).build();
private static final SdkField MAX_BITRATE_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("MaxBitrate").getter(getter(H265Settings::maxBitrate)).setter(setter(Builder::maxBitrate))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("maxBitrate").build()).build();
private static final SdkField MIN_I_INTERVAL_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("MinIInterval").getter(getter(H265Settings::minIInterval)).setter(setter(Builder::minIInterval))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("minIInterval").build()).build();
private static final SdkField NUMBER_B_FRAMES_BETWEEN_REFERENCE_FRAMES_FIELD = SdkField
. builder(MarshallingType.INTEGER)
.memberName("NumberBFramesBetweenReferenceFrames")
.getter(getter(H265Settings::numberBFramesBetweenReferenceFrames))
.setter(setter(Builder::numberBFramesBetweenReferenceFrames))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD)
.locationName("numberBFramesBetweenReferenceFrames").build()).build();
private static final SdkField NUMBER_REFERENCE_FRAMES_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("NumberReferenceFrames").getter(getter(H265Settings::numberReferenceFrames))
.setter(setter(Builder::numberReferenceFrames))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("numberReferenceFrames").build())
.build();
private static final SdkField PAR_CONTROL_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("ParControl").getter(getter(H265Settings::parControlAsString)).setter(setter(Builder::parControl))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("parControl").build()).build();
private static final SdkField PAR_DENOMINATOR_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("ParDenominator").getter(getter(H265Settings::parDenominator)).setter(setter(Builder::parDenominator))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("parDenominator").build()).build();
private static final SdkField PAR_NUMERATOR_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("ParNumerator").getter(getter(H265Settings::parNumerator)).setter(setter(Builder::parNumerator))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("parNumerator").build()).build();
private static final SdkField QUALITY_TUNING_LEVEL_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("QualityTuningLevel").getter(getter(H265Settings::qualityTuningLevelAsString))
.setter(setter(Builder::qualityTuningLevel))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("qualityTuningLevel").build())
.build();
private static final SdkField QVBR_SETTINGS_FIELD = SdkField
. builder(MarshallingType.SDK_POJO).memberName("QvbrSettings")
.getter(getter(H265Settings::qvbrSettings)).setter(setter(Builder::qvbrSettings))
.constructor(H265QvbrSettings::builder)
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("qvbrSettings").build()).build();
private static final SdkField RATE_CONTROL_MODE_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("RateControlMode").getter(getter(H265Settings::rateControlModeAsString))
.setter(setter(Builder::rateControlMode))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("rateControlMode").build()).build();
private static final SdkField SAMPLE_ADAPTIVE_OFFSET_FILTER_MODE_FIELD = SdkField
. builder(MarshallingType.STRING)
.memberName("SampleAdaptiveOffsetFilterMode")
.getter(getter(H265Settings::sampleAdaptiveOffsetFilterModeAsString))
.setter(setter(Builder::sampleAdaptiveOffsetFilterMode))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("sampleAdaptiveOffsetFilterMode")
.build()).build();
private static final SdkField SCAN_TYPE_CONVERSION_MODE_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("ScanTypeConversionMode").getter(getter(H265Settings::scanTypeConversionModeAsString))
.setter(setter(Builder::scanTypeConversionMode))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("scanTypeConversionMode").build())
.build();
private static final SdkField SCENE_CHANGE_DETECT_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("SceneChangeDetect").getter(getter(H265Settings::sceneChangeDetectAsString))
.setter(setter(Builder::sceneChangeDetect))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("sceneChangeDetect").build()).build();
private static final SdkField SLICES_FIELD = SdkField. builder(MarshallingType.INTEGER)
.memberName("Slices").getter(getter(H265Settings::slices)).setter(setter(Builder::slices))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("slices").build()).build();
private static final SdkField SLOW_PAL_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("SlowPal").getter(getter(H265Settings::slowPalAsString)).setter(setter(Builder::slowPal))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("slowPal").build()).build();
private static final SdkField SPATIAL_ADAPTIVE_QUANTIZATION_FIELD = SdkField
. builder(MarshallingType.STRING)
.memberName("SpatialAdaptiveQuantization")
.getter(getter(H265Settings::spatialAdaptiveQuantizationAsString))
.setter(setter(Builder::spatialAdaptiveQuantization))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("spatialAdaptiveQuantization")
.build()).build();
private static final SdkField TELECINE_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("Telecine").getter(getter(H265Settings::telecineAsString)).setter(setter(Builder::telecine))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("telecine").build()).build();
private static final SdkField TEMPORAL_ADAPTIVE_QUANTIZATION_FIELD = SdkField
. builder(MarshallingType.STRING)
.memberName("TemporalAdaptiveQuantization")
.getter(getter(H265Settings::temporalAdaptiveQuantizationAsString))
.setter(setter(Builder::temporalAdaptiveQuantization))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("temporalAdaptiveQuantization")
.build()).build();
private static final SdkField TEMPORAL_IDS_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("TemporalIds").getter(getter(H265Settings::temporalIdsAsString)).setter(setter(Builder::temporalIds))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("temporalIds").build()).build();
private static final SdkField TILES_FIELD = SdkField. builder(MarshallingType.STRING).memberName("Tiles")
.getter(getter(H265Settings::tilesAsString)).setter(setter(Builder::tiles))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("tiles").build()).build();
private static final SdkField UNREGISTERED_SEI_TIMECODE_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("UnregisteredSeiTimecode").getter(getter(H265Settings::unregisteredSeiTimecodeAsString))
.setter(setter(Builder::unregisteredSeiTimecode))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("unregisteredSeiTimecode").build())
.build();
private static final SdkField WRITE_MP4_PACKAGING_TYPE_FIELD = SdkField. builder(MarshallingType.STRING)
.memberName("WriteMp4PackagingType").getter(getter(H265Settings::writeMp4PackagingTypeAsString))
.setter(setter(Builder::writeMp4PackagingType))
.traits(LocationTrait.builder().location(MarshallLocation.PAYLOAD).locationName("writeMp4PackagingType").build())
.build();
private static final List> SDK_FIELDS = Collections.unmodifiableList(Arrays.asList(ADAPTIVE_QUANTIZATION_FIELD,
ALTERNATE_TRANSFER_FUNCTION_SEI_FIELD, BANDWIDTH_REDUCTION_FILTER_FIELD, BITRATE_FIELD, CODEC_LEVEL_FIELD,
CODEC_PROFILE_FIELD, DYNAMIC_SUB_GOP_FIELD, END_OF_STREAM_MARKERS_FIELD, FLICKER_ADAPTIVE_QUANTIZATION_FIELD,
FRAMERATE_CONTROL_FIELD, FRAMERATE_CONVERSION_ALGORITHM_FIELD, FRAMERATE_DENOMINATOR_FIELD,
FRAMERATE_NUMERATOR_FIELD, GOP_B_REFERENCE_FIELD, GOP_CLOSED_CADENCE_FIELD, GOP_SIZE_FIELD, GOP_SIZE_UNITS_FIELD,
HRD_BUFFER_FINAL_FILL_PERCENTAGE_FIELD, HRD_BUFFER_INITIAL_FILL_PERCENTAGE_FIELD, HRD_BUFFER_SIZE_FIELD,
INTERLACE_MODE_FIELD, MAX_BITRATE_FIELD, MIN_I_INTERVAL_FIELD, NUMBER_B_FRAMES_BETWEEN_REFERENCE_FRAMES_FIELD,
NUMBER_REFERENCE_FRAMES_FIELD, PAR_CONTROL_FIELD, PAR_DENOMINATOR_FIELD, PAR_NUMERATOR_FIELD,
QUALITY_TUNING_LEVEL_FIELD, QVBR_SETTINGS_FIELD, RATE_CONTROL_MODE_FIELD, SAMPLE_ADAPTIVE_OFFSET_FILTER_MODE_FIELD,
SCAN_TYPE_CONVERSION_MODE_FIELD, SCENE_CHANGE_DETECT_FIELD, SLICES_FIELD, SLOW_PAL_FIELD,
SPATIAL_ADAPTIVE_QUANTIZATION_FIELD, TELECINE_FIELD, TEMPORAL_ADAPTIVE_QUANTIZATION_FIELD, TEMPORAL_IDS_FIELD,
TILES_FIELD, UNREGISTERED_SEI_TIMECODE_FIELD, WRITE_MP4_PACKAGING_TYPE_FIELD));
private static final long serialVersionUID = 1L;
private final String adaptiveQuantization;
private final String alternateTransferFunctionSei;
private final BandwidthReductionFilter bandwidthReductionFilter;
private final Integer bitrate;
private final String codecLevel;
private final String codecProfile;
private final String dynamicSubGop;
private final String endOfStreamMarkers;
private final String flickerAdaptiveQuantization;
private final String framerateControl;
private final String framerateConversionAlgorithm;
private final Integer framerateDenominator;
private final Integer framerateNumerator;
private final String gopBReference;
private final Integer gopClosedCadence;
private final Double gopSize;
private final String gopSizeUnits;
private final Integer hrdBufferFinalFillPercentage;
private final Integer hrdBufferInitialFillPercentage;
private final Integer hrdBufferSize;
private final String interlaceMode;
private final Integer maxBitrate;
private final Integer minIInterval;
private final Integer numberBFramesBetweenReferenceFrames;
private final Integer numberReferenceFrames;
private final String parControl;
private final Integer parDenominator;
private final Integer parNumerator;
private final String qualityTuningLevel;
private final H265QvbrSettings qvbrSettings;
private final String rateControlMode;
private final String sampleAdaptiveOffsetFilterMode;
private final String scanTypeConversionMode;
private final String sceneChangeDetect;
private final Integer slices;
private final String slowPal;
private final String spatialAdaptiveQuantization;
private final String telecine;
private final String temporalAdaptiveQuantization;
private final String temporalIds;
private final String tiles;
private final String unregisteredSeiTimecode;
private final String writeMp4PackagingType;
private H265Settings(BuilderImpl builder) {
this.adaptiveQuantization = builder.adaptiveQuantization;
this.alternateTransferFunctionSei = builder.alternateTransferFunctionSei;
this.bandwidthReductionFilter = builder.bandwidthReductionFilter;
this.bitrate = builder.bitrate;
this.codecLevel = builder.codecLevel;
this.codecProfile = builder.codecProfile;
this.dynamicSubGop = builder.dynamicSubGop;
this.endOfStreamMarkers = builder.endOfStreamMarkers;
this.flickerAdaptiveQuantization = builder.flickerAdaptiveQuantization;
this.framerateControl = builder.framerateControl;
this.framerateConversionAlgorithm = builder.framerateConversionAlgorithm;
this.framerateDenominator = builder.framerateDenominator;
this.framerateNumerator = builder.framerateNumerator;
this.gopBReference = builder.gopBReference;
this.gopClosedCadence = builder.gopClosedCadence;
this.gopSize = builder.gopSize;
this.gopSizeUnits = builder.gopSizeUnits;
this.hrdBufferFinalFillPercentage = builder.hrdBufferFinalFillPercentage;
this.hrdBufferInitialFillPercentage = builder.hrdBufferInitialFillPercentage;
this.hrdBufferSize = builder.hrdBufferSize;
this.interlaceMode = builder.interlaceMode;
this.maxBitrate = builder.maxBitrate;
this.minIInterval = builder.minIInterval;
this.numberBFramesBetweenReferenceFrames = builder.numberBFramesBetweenReferenceFrames;
this.numberReferenceFrames = builder.numberReferenceFrames;
this.parControl = builder.parControl;
this.parDenominator = builder.parDenominator;
this.parNumerator = builder.parNumerator;
this.qualityTuningLevel = builder.qualityTuningLevel;
this.qvbrSettings = builder.qvbrSettings;
this.rateControlMode = builder.rateControlMode;
this.sampleAdaptiveOffsetFilterMode = builder.sampleAdaptiveOffsetFilterMode;
this.scanTypeConversionMode = builder.scanTypeConversionMode;
this.sceneChangeDetect = builder.sceneChangeDetect;
this.slices = builder.slices;
this.slowPal = builder.slowPal;
this.spatialAdaptiveQuantization = builder.spatialAdaptiveQuantization;
this.telecine = builder.telecine;
this.temporalAdaptiveQuantization = builder.temporalAdaptiveQuantization;
this.temporalIds = builder.temporalIds;
this.tiles = builder.tiles;
this.unregisteredSeiTimecode = builder.unregisteredSeiTimecode;
this.writeMp4PackagingType = builder.writeMp4PackagingType;
}
/**
* When you set Adaptive Quantization to Auto, or leave blank, MediaConvert automatically applies quantization to
* improve the video quality of your output. Set Adaptive Quantization to Low, Medium, High, Higher, or Max to
* manually control the strength of the quantization filter. When you do, you can specify a value for Spatial
* Adaptive Quantization, Temporal Adaptive Quantization, and Flicker Adaptive Quantization, to further control the
* quantization filter. Set Adaptive Quantization to Off to apply no quantization to your output.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #adaptiveQuantization} will return {@link H265AdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}. The raw value
* returned by the service is available from {@link #adaptiveQuantizationAsString}.
*
*
* @return When you set Adaptive Quantization to Auto, or leave blank, MediaConvert automatically applies
* quantization to improve the video quality of your output. Set Adaptive Quantization to Low, Medium, High,
* Higher, or Max to manually control the strength of the quantization filter. When you do, you can specify
* a value for Spatial Adaptive Quantization, Temporal Adaptive Quantization, and Flicker Adaptive
* Quantization, to further control the quantization filter. Set Adaptive Quantization to Off to apply no
* quantization to your output.
* @see H265AdaptiveQuantization
*/
public final H265AdaptiveQuantization adaptiveQuantization() {
return H265AdaptiveQuantization.fromValue(adaptiveQuantization);
}
/**
* When you set Adaptive Quantization to Auto, or leave blank, MediaConvert automatically applies quantization to
* improve the video quality of your output. Set Adaptive Quantization to Low, Medium, High, Higher, or Max to
* manually control the strength of the quantization filter. When you do, you can specify a value for Spatial
* Adaptive Quantization, Temporal Adaptive Quantization, and Flicker Adaptive Quantization, to further control the
* quantization filter. Set Adaptive Quantization to Off to apply no quantization to your output.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #adaptiveQuantization} will return {@link H265AdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}. The raw value
* returned by the service is available from {@link #adaptiveQuantizationAsString}.
*
*
* @return When you set Adaptive Quantization to Auto, or leave blank, MediaConvert automatically applies
* quantization to improve the video quality of your output. Set Adaptive Quantization to Low, Medium, High,
* Higher, or Max to manually control the strength of the quantization filter. When you do, you can specify
* a value for Spatial Adaptive Quantization, Temporal Adaptive Quantization, and Flicker Adaptive
* Quantization, to further control the quantization filter. Set Adaptive Quantization to Off to apply no
* quantization to your output.
* @see H265AdaptiveQuantization
*/
public final String adaptiveQuantizationAsString() {
return adaptiveQuantization;
}
/**
* Enables Alternate Transfer Function SEI message for outputs using Hybrid Log Gamma (HLG) Electro-Optical Transfer
* Function (EOTF).
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #alternateTransferFunctionSei} will return {@link H265AlternateTransferFunctionSei#UNKNOWN_TO_SDK_VERSION}
* . The raw value returned by the service is available from {@link #alternateTransferFunctionSeiAsString}.
*
*
* @return Enables Alternate Transfer Function SEI message for outputs using Hybrid Log Gamma (HLG) Electro-Optical
* Transfer Function (EOTF).
* @see H265AlternateTransferFunctionSei
*/
public final H265AlternateTransferFunctionSei alternateTransferFunctionSei() {
return H265AlternateTransferFunctionSei.fromValue(alternateTransferFunctionSei);
}
/**
* Enables Alternate Transfer Function SEI message for outputs using Hybrid Log Gamma (HLG) Electro-Optical Transfer
* Function (EOTF).
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #alternateTransferFunctionSei} will return {@link H265AlternateTransferFunctionSei#UNKNOWN_TO_SDK_VERSION}
* . The raw value returned by the service is available from {@link #alternateTransferFunctionSeiAsString}.
*
*
* @return Enables Alternate Transfer Function SEI message for outputs using Hybrid Log Gamma (HLG) Electro-Optical
* Transfer Function (EOTF).
* @see H265AlternateTransferFunctionSei
*/
public final String alternateTransferFunctionSeiAsString() {
return alternateTransferFunctionSei;
}
/**
* The Bandwidth reduction filter increases the video quality of your output relative to its bitrate. Use to lower
* the bitrate of your constant quality QVBR output, with little or no perceptual decrease in quality. Or, use to
* increase the video quality of outputs with other rate control modes relative to the bitrate that you specify.
* Bandwidth reduction increases further when your input is low quality or noisy. Outputs that use this feature
* incur pro-tier pricing. When you include Bandwidth reduction filter, you cannot include the Noise reducer
* preprocessor.
*
* @return The Bandwidth reduction filter increases the video quality of your output relative to its bitrate. Use to
* lower the bitrate of your constant quality QVBR output, with little or no perceptual decrease in quality.
* Or, use to increase the video quality of outputs with other rate control modes relative to the bitrate
* that you specify. Bandwidth reduction increases further when your input is low quality or noisy. Outputs
* that use this feature incur pro-tier pricing. When you include Bandwidth reduction filter, you cannot
* include the Noise reducer preprocessor.
*/
public final BandwidthReductionFilter bandwidthReductionFilter() {
return bandwidthReductionFilter;
}
/**
* Specify the average bitrate in bits per second. Required for VBR and CBR. For MS Smooth outputs, bitrates must be
* unique when rounded down to the nearest multiple of 1000.
*
* @return Specify the average bitrate in bits per second. Required for VBR and CBR. For MS Smooth outputs, bitrates
* must be unique when rounded down to the nearest multiple of 1000.
*/
public final Integer bitrate() {
return bitrate;
}
/**
* H.265 Level.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #codecLevel} will
* return {@link H265CodecLevel#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #codecLevelAsString}.
*
*
* @return H.265 Level.
* @see H265CodecLevel
*/
public final H265CodecLevel codecLevel() {
return H265CodecLevel.fromValue(codecLevel);
}
/**
* H.265 Level.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #codecLevel} will
* return {@link H265CodecLevel#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #codecLevelAsString}.
*
*
* @return H.265 Level.
* @see H265CodecLevel
*/
public final String codecLevelAsString() {
return codecLevel;
}
/**
* Represents the Profile and Tier, per the HEVC (H.265) specification. Selections are grouped as [Profile] /
* [Tier], so "Main/High" represents Main Profile with High Tier. 4:2:2 profiles are only available with the HEVC
* 4:2:2 License.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #codecProfile} will
* return {@link H265CodecProfile#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #codecProfileAsString}.
*
*
* @return Represents the Profile and Tier, per the HEVC (H.265) specification. Selections are grouped as [Profile]
* / [Tier], so "Main/High" represents Main Profile with High Tier. 4:2:2 profiles are only available with
* the HEVC 4:2:2 License.
* @see H265CodecProfile
*/
public final H265CodecProfile codecProfile() {
return H265CodecProfile.fromValue(codecProfile);
}
/**
* Represents the Profile and Tier, per the HEVC (H.265) specification. Selections are grouped as [Profile] /
* [Tier], so "Main/High" represents Main Profile with High Tier. 4:2:2 profiles are only available with the HEVC
* 4:2:2 License.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #codecProfile} will
* return {@link H265CodecProfile#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #codecProfileAsString}.
*
*
* @return Represents the Profile and Tier, per the HEVC (H.265) specification. Selections are grouped as [Profile]
* / [Tier], so "Main/High" represents Main Profile with High Tier. 4:2:2 profiles are only available with
* the HEVC 4:2:2 License.
* @see H265CodecProfile
*/
public final String codecProfileAsString() {
return codecProfile;
}
/**
* Specify whether to allow the number of B-frames in your output GOP structure to vary or not depending on your
* input video content. To improve the subjective video quality of your output that has high-motion content: Leave
* blank or keep the default value Adaptive. MediaConvert will use fewer B-frames for high-motion video content than
* low-motion content. The maximum number of B- frames is limited by the value that you choose for B-frames between
* reference frames. To use the same number B-frames for all types of content: Choose Static.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #dynamicSubGop}
* will return {@link H265DynamicSubGop#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available
* from {@link #dynamicSubGopAsString}.
*
*
* @return Specify whether to allow the number of B-frames in your output GOP structure to vary or not depending on
* your input video content. To improve the subjective video quality of your output that has high-motion
* content: Leave blank or keep the default value Adaptive. MediaConvert will use fewer B-frames for
* high-motion video content than low-motion content. The maximum number of B- frames is limited by the
* value that you choose for B-frames between reference frames. To use the same number B-frames for all
* types of content: Choose Static.
* @see H265DynamicSubGop
*/
public final H265DynamicSubGop dynamicSubGop() {
return H265DynamicSubGop.fromValue(dynamicSubGop);
}
/**
* Specify whether to allow the number of B-frames in your output GOP structure to vary or not depending on your
* input video content. To improve the subjective video quality of your output that has high-motion content: Leave
* blank or keep the default value Adaptive. MediaConvert will use fewer B-frames for high-motion video content than
* low-motion content. The maximum number of B- frames is limited by the value that you choose for B-frames between
* reference frames. To use the same number B-frames for all types of content: Choose Static.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #dynamicSubGop}
* will return {@link H265DynamicSubGop#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available
* from {@link #dynamicSubGopAsString}.
*
*
* @return Specify whether to allow the number of B-frames in your output GOP structure to vary or not depending on
* your input video content. To improve the subjective video quality of your output that has high-motion
* content: Leave blank or keep the default value Adaptive. MediaConvert will use fewer B-frames for
* high-motion video content than low-motion content. The maximum number of B- frames is limited by the
* value that you choose for B-frames between reference frames. To use the same number B-frames for all
* types of content: Choose Static.
* @see H265DynamicSubGop
*/
public final String dynamicSubGopAsString() {
return dynamicSubGop;
}
/**
* Optionally include or suppress markers at the end of your output that signal the end of the video stream. To
* include end of stream markers: Leave blank or keep the default value, Include. To not include end of stream
* markers: Choose Suppress. This is useful when your output will be inserted into another stream.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #endOfStreamMarkers} will return {@link H265EndOfStreamMarkers#UNKNOWN_TO_SDK_VERSION}. The raw value
* returned by the service is available from {@link #endOfStreamMarkersAsString}.
*
*
* @return Optionally include or suppress markers at the end of your output that signal the end of the video stream.
* To include end of stream markers: Leave blank or keep the default value, Include. To not include end of
* stream markers: Choose Suppress. This is useful when your output will be inserted into another stream.
* @see H265EndOfStreamMarkers
*/
public final H265EndOfStreamMarkers endOfStreamMarkers() {
return H265EndOfStreamMarkers.fromValue(endOfStreamMarkers);
}
/**
* Optionally include or suppress markers at the end of your output that signal the end of the video stream. To
* include end of stream markers: Leave blank or keep the default value, Include. To not include end of stream
* markers: Choose Suppress. This is useful when your output will be inserted into another stream.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #endOfStreamMarkers} will return {@link H265EndOfStreamMarkers#UNKNOWN_TO_SDK_VERSION}. The raw value
* returned by the service is available from {@link #endOfStreamMarkersAsString}.
*
*
* @return Optionally include or suppress markers at the end of your output that signal the end of the video stream.
* To include end of stream markers: Leave blank or keep the default value, Include. To not include end of
* stream markers: Choose Suppress. This is useful when your output will be inserted into another stream.
* @see H265EndOfStreamMarkers
*/
public final String endOfStreamMarkersAsString() {
return endOfStreamMarkers;
}
/**
* Enable this setting to have the encoder reduce I-frame pop. I-frame pop appears as a visual flicker that can
* arise when the encoder saves bits by copying some macroblocks many times from frame to frame, and then refreshes
* them at the I-frame. When you enable this setting, the encoder updates these macroblocks slightly more often to
* smooth out the flicker. This setting is disabled by default. Related setting: In addition to enabling this
* setting, you must also set adaptiveQuantization to a value other than Off.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #flickerAdaptiveQuantization} will return {@link H265FlickerAdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}.
* The raw value returned by the service is available from {@link #flickerAdaptiveQuantizationAsString}.
*
*
* @return Enable this setting to have the encoder reduce I-frame pop. I-frame pop appears as a visual flicker that
* can arise when the encoder saves bits by copying some macroblocks many times from frame to frame, and
* then refreshes them at the I-frame. When you enable this setting, the encoder updates these macroblocks
* slightly more often to smooth out the flicker. This setting is disabled by default. Related setting: In
* addition to enabling this setting, you must also set adaptiveQuantization to a value other than Off.
* @see H265FlickerAdaptiveQuantization
*/
public final H265FlickerAdaptiveQuantization flickerAdaptiveQuantization() {
return H265FlickerAdaptiveQuantization.fromValue(flickerAdaptiveQuantization);
}
/**
* Enable this setting to have the encoder reduce I-frame pop. I-frame pop appears as a visual flicker that can
* arise when the encoder saves bits by copying some macroblocks many times from frame to frame, and then refreshes
* them at the I-frame. When you enable this setting, the encoder updates these macroblocks slightly more often to
* smooth out the flicker. This setting is disabled by default. Related setting: In addition to enabling this
* setting, you must also set adaptiveQuantization to a value other than Off.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #flickerAdaptiveQuantization} will return {@link H265FlickerAdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}.
* The raw value returned by the service is available from {@link #flickerAdaptiveQuantizationAsString}.
*
*
* @return Enable this setting to have the encoder reduce I-frame pop. I-frame pop appears as a visual flicker that
* can arise when the encoder saves bits by copying some macroblocks many times from frame to frame, and
* then refreshes them at the I-frame. When you enable this setting, the encoder updates these macroblocks
* slightly more often to smooth out the flicker. This setting is disabled by default. Related setting: In
* addition to enabling this setting, you must also set adaptiveQuantization to a value other than Off.
* @see H265FlickerAdaptiveQuantization
*/
public final String flickerAdaptiveQuantizationAsString() {
return flickerAdaptiveQuantization;
}
/**
* Use the Framerate setting to specify the frame rate for this output. If you want to keep the same frame rate as
* the input video, choose Follow source. If you want to do frame rate conversion, choose a frame rate from the
* dropdown list or choose Custom. The framerates shown in the dropdown list are decimal approximations of
* fractions. If you choose Custom, specify your frame rate as a fraction.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #framerateControl}
* will return {@link H265FramerateControl#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is
* available from {@link #framerateControlAsString}.
*
*
* @return Use the Framerate setting to specify the frame rate for this output. If you want to keep the same frame
* rate as the input video, choose Follow source. If you want to do frame rate conversion, choose a frame
* rate from the dropdown list or choose Custom. The framerates shown in the dropdown list are decimal
* approximations of fractions. If you choose Custom, specify your frame rate as a fraction.
* @see H265FramerateControl
*/
public final H265FramerateControl framerateControl() {
return H265FramerateControl.fromValue(framerateControl);
}
/**
* Use the Framerate setting to specify the frame rate for this output. If you want to keep the same frame rate as
* the input video, choose Follow source. If you want to do frame rate conversion, choose a frame rate from the
* dropdown list or choose Custom. The framerates shown in the dropdown list are decimal approximations of
* fractions. If you choose Custom, specify your frame rate as a fraction.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #framerateControl}
* will return {@link H265FramerateControl#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is
* available from {@link #framerateControlAsString}.
*
*
* @return Use the Framerate setting to specify the frame rate for this output. If you want to keep the same frame
* rate as the input video, choose Follow source. If you want to do frame rate conversion, choose a frame
* rate from the dropdown list or choose Custom. The framerates shown in the dropdown list are decimal
* approximations of fractions. If you choose Custom, specify your frame rate as a fraction.
* @see H265FramerateControl
*/
public final String framerateControlAsString() {
return framerateControl;
}
/**
* Choose the method that you want MediaConvert to use when increasing or decreasing the frame rate. For numerically
* simple conversions, such as 60 fps to 30 fps: We recommend that you keep the default value, Drop duplicate. For
* numerically complex conversions, to avoid stutter: Choose Interpolate. This results in a smooth picture, but
* might introduce undesirable video artifacts. For complex frame rate conversions, especially if your source video
* has already been converted from its original cadence: Choose FrameFormer to do motion-compensated interpolation.
* FrameFormer uses the best conversion method frame by frame. Note that using FrameFormer increases the transcoding
* time and incurs a significant add-on cost. When you choose FrameFormer, your input video resolution must be at
* least 128x96.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #framerateConversionAlgorithm} will return {@link H265FramerateConversionAlgorithm#UNKNOWN_TO_SDK_VERSION}
* . The raw value returned by the service is available from {@link #framerateConversionAlgorithmAsString}.
*
*
* @return Choose the method that you want MediaConvert to use when increasing or decreasing the frame rate. For
* numerically simple conversions, such as 60 fps to 30 fps: We recommend that you keep the default value,
* Drop duplicate. For numerically complex conversions, to avoid stutter: Choose Interpolate. This results
* in a smooth picture, but might introduce undesirable video artifacts. For complex frame rate conversions,
* especially if your source video has already been converted from its original cadence: Choose FrameFormer
* to do motion-compensated interpolation. FrameFormer uses the best conversion method frame by frame. Note
* that using FrameFormer increases the transcoding time and incurs a significant add-on cost. When you
* choose FrameFormer, your input video resolution must be at least 128x96.
* @see H265FramerateConversionAlgorithm
*/
public final H265FramerateConversionAlgorithm framerateConversionAlgorithm() {
return H265FramerateConversionAlgorithm.fromValue(framerateConversionAlgorithm);
}
/**
* Choose the method that you want MediaConvert to use when increasing or decreasing the frame rate. For numerically
* simple conversions, such as 60 fps to 30 fps: We recommend that you keep the default value, Drop duplicate. For
* numerically complex conversions, to avoid stutter: Choose Interpolate. This results in a smooth picture, but
* might introduce undesirable video artifacts. For complex frame rate conversions, especially if your source video
* has already been converted from its original cadence: Choose FrameFormer to do motion-compensated interpolation.
* FrameFormer uses the best conversion method frame by frame. Note that using FrameFormer increases the transcoding
* time and incurs a significant add-on cost. When you choose FrameFormer, your input video resolution must be at
* least 128x96.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #framerateConversionAlgorithm} will return {@link H265FramerateConversionAlgorithm#UNKNOWN_TO_SDK_VERSION}
* . The raw value returned by the service is available from {@link #framerateConversionAlgorithmAsString}.
*
*
* @return Choose the method that you want MediaConvert to use when increasing or decreasing the frame rate. For
* numerically simple conversions, such as 60 fps to 30 fps: We recommend that you keep the default value,
* Drop duplicate. For numerically complex conversions, to avoid stutter: Choose Interpolate. This results
* in a smooth picture, but might introduce undesirable video artifacts. For complex frame rate conversions,
* especially if your source video has already been converted from its original cadence: Choose FrameFormer
* to do motion-compensated interpolation. FrameFormer uses the best conversion method frame by frame. Note
* that using FrameFormer increases the transcoding time and incurs a significant add-on cost. When you
* choose FrameFormer, your input video resolution must be at least 128x96.
* @see H265FramerateConversionAlgorithm
*/
public final String framerateConversionAlgorithmAsString() {
return framerateConversionAlgorithm;
}
/**
* When you use the API for transcode jobs that use frame rate conversion, specify the frame rate as a fraction. For
* example, 24000 / 1001 = 23.976 fps. Use FramerateDenominator to specify the denominator of this fraction. In this
* example, use 1001 for the value of FramerateDenominator. When you use the console for transcode jobs that use
* frame rate conversion, provide the value as a decimal number for Framerate. In this example, specify 23.976.
*
* @return When you use the API for transcode jobs that use frame rate conversion, specify the frame rate as a
* fraction. For example, 24000 / 1001 = 23.976 fps. Use FramerateDenominator to specify the denominator of
* this fraction. In this example, use 1001 for the value of FramerateDenominator. When you use the console
* for transcode jobs that use frame rate conversion, provide the value as a decimal number for Framerate.
* In this example, specify 23.976.
*/
public final Integer framerateDenominator() {
return framerateDenominator;
}
/**
* When you use the API for transcode jobs that use frame rate conversion, specify the frame rate as a fraction. For
* example, 24000 / 1001 = 23.976 fps. Use FramerateNumerator to specify the numerator of this fraction. In this
* example, use 24000 for the value of FramerateNumerator. When you use the console for transcode jobs that use
* frame rate conversion, provide the value as a decimal number for Framerate. In this example, specify 23.976.
*
* @return When you use the API for transcode jobs that use frame rate conversion, specify the frame rate as a
* fraction. For example, 24000 / 1001 = 23.976 fps. Use FramerateNumerator to specify the numerator of this
* fraction. In this example, use 24000 for the value of FramerateNumerator. When you use the console for
* transcode jobs that use frame rate conversion, provide the value as a decimal number for Framerate. In
* this example, specify 23.976.
*/
public final Integer framerateNumerator() {
return framerateNumerator;
}
/**
* Specify whether to allow B-frames to be referenced by other frame types. To use reference B-frames when your GOP
* structure has 1 or more B-frames: Leave blank or keep the default value Enabled. We recommend that you choose
* Enabled to help improve the video quality of your output relative to its bitrate. To not use reference B-frames:
* Choose Disabled.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #gopBReference}
* will return {@link H265GopBReference#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available
* from {@link #gopBReferenceAsString}.
*
*
* @return Specify whether to allow B-frames to be referenced by other frame types. To use reference B-frames when
* your GOP structure has 1 or more B-frames: Leave blank or keep the default value Enabled. We recommend
* that you choose Enabled to help improve the video quality of your output relative to its bitrate. To not
* use reference B-frames: Choose Disabled.
* @see H265GopBReference
*/
public final H265GopBReference gopBReference() {
return H265GopBReference.fromValue(gopBReference);
}
/**
* Specify whether to allow B-frames to be referenced by other frame types. To use reference B-frames when your GOP
* structure has 1 or more B-frames: Leave blank or keep the default value Enabled. We recommend that you choose
* Enabled to help improve the video quality of your output relative to its bitrate. To not use reference B-frames:
* Choose Disabled.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #gopBReference}
* will return {@link H265GopBReference#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available
* from {@link #gopBReferenceAsString}.
*
*
* @return Specify whether to allow B-frames to be referenced by other frame types. To use reference B-frames when
* your GOP structure has 1 or more B-frames: Leave blank or keep the default value Enabled. We recommend
* that you choose Enabled to help improve the video quality of your output relative to its bitrate. To not
* use reference B-frames: Choose Disabled.
* @see H265GopBReference
*/
public final String gopBReferenceAsString() {
return gopBReference;
}
/**
* Specify the relative frequency of open to closed GOPs in this output. For example, if you want to allow four open
* GOPs and then require a closed GOP, set this value to 5. We recommend that you have the transcoder automatically
* choose this value for you based on characteristics of your input video. To enable this automatic behavior, do
* this by keeping the default empty value. If you do explicitly specify a value, for segmented outputs, don't set
* this value to 0.
*
* @return Specify the relative frequency of open to closed GOPs in this output. For example, if you want to allow
* four open GOPs and then require a closed GOP, set this value to 5. We recommend that you have the
* transcoder automatically choose this value for you based on characteristics of your input video. To
* enable this automatic behavior, do this by keeping the default empty value. If you do explicitly specify
* a value, for segmented outputs, don't set this value to 0.
*/
public final Integer gopClosedCadence() {
return gopClosedCadence;
}
/**
* Use this setting only when you set GOP mode control to Specified, frames or Specified, seconds. Specify the GOP
* length using a whole number of frames or a decimal value of seconds. MediaConvert will interpret this value as
* frames or seconds depending on the value you choose for GOP mode control. If you want to allow MediaConvert to
* automatically determine GOP size, leave GOP size blank and set GOP mode control to Auto. If your output group
* specifies HLS, DASH, or CMAF, leave GOP size blank and set GOP mode control to Auto in each output in your output
* group.
*
* @return Use this setting only when you set GOP mode control to Specified, frames or Specified, seconds. Specify
* the GOP length using a whole number of frames or a decimal value of seconds. MediaConvert will interpret
* this value as frames or seconds depending on the value you choose for GOP mode control. If you want to
* allow MediaConvert to automatically determine GOP size, leave GOP size blank and set GOP mode control to
* Auto. If your output group specifies HLS, DASH, or CMAF, leave GOP size blank and set GOP mode control to
* Auto in each output in your output group.
*/
public final Double gopSize() {
return gopSize;
}
/**
* Specify how the transcoder determines GOP size for this output. We recommend that you have the transcoder
* automatically choose this value for you based on characteristics of your input video. To enable this automatic
* behavior, choose Auto and and leave GOP size blank. By default, if you don't specify GOP mode control,
* MediaConvert will use automatic behavior. If your output group specifies HLS, DASH, or CMAF, set GOP mode control
* to Auto and leave GOP size blank in each output in your output group. To explicitly specify the GOP length,
* choose Specified, frames or Specified, seconds and then provide the GOP length in the related setting GOP size.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #gopSizeUnits} will
* return {@link H265GopSizeUnits#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #gopSizeUnitsAsString}.
*
*
* @return Specify how the transcoder determines GOP size for this output. We recommend that you have the transcoder
* automatically choose this value for you based on characteristics of your input video. To enable this
* automatic behavior, choose Auto and and leave GOP size blank. By default, if you don't specify GOP mode
* control, MediaConvert will use automatic behavior. If your output group specifies HLS, DASH, or CMAF, set
* GOP mode control to Auto and leave GOP size blank in each output in your output group. To explicitly
* specify the GOP length, choose Specified, frames or Specified, seconds and then provide the GOP length in
* the related setting GOP size.
* @see H265GopSizeUnits
*/
public final H265GopSizeUnits gopSizeUnits() {
return H265GopSizeUnits.fromValue(gopSizeUnits);
}
/**
* Specify how the transcoder determines GOP size for this output. We recommend that you have the transcoder
* automatically choose this value for you based on characteristics of your input video. To enable this automatic
* behavior, choose Auto and and leave GOP size blank. By default, if you don't specify GOP mode control,
* MediaConvert will use automatic behavior. If your output group specifies HLS, DASH, or CMAF, set GOP mode control
* to Auto and leave GOP size blank in each output in your output group. To explicitly specify the GOP length,
* choose Specified, frames or Specified, seconds and then provide the GOP length in the related setting GOP size.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #gopSizeUnits} will
* return {@link H265GopSizeUnits#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #gopSizeUnitsAsString}.
*
*
* @return Specify how the transcoder determines GOP size for this output. We recommend that you have the transcoder
* automatically choose this value for you based on characteristics of your input video. To enable this
* automatic behavior, choose Auto and and leave GOP size blank. By default, if you don't specify GOP mode
* control, MediaConvert will use automatic behavior. If your output group specifies HLS, DASH, or CMAF, set
* GOP mode control to Auto and leave GOP size blank in each output in your output group. To explicitly
* specify the GOP length, choose Specified, frames or Specified, seconds and then provide the GOP length in
* the related setting GOP size.
* @see H265GopSizeUnits
*/
public final String gopSizeUnitsAsString() {
return gopSizeUnits;
}
/**
* If your downstream systems have strict buffer requirements: Specify the minimum percentage of the HRD buffer
* that's available at the end of each encoded video segment. For the best video quality: Set to 0 or leave blank to
* automatically determine the final buffer fill percentage.
*
* @return If your downstream systems have strict buffer requirements: Specify the minimum percentage of the HRD
* buffer that's available at the end of each encoded video segment. For the best video quality: Set to 0 or
* leave blank to automatically determine the final buffer fill percentage.
*/
public final Integer hrdBufferFinalFillPercentage() {
return hrdBufferFinalFillPercentage;
}
/**
* Percentage of the buffer that should initially be filled (HRD buffer model).
*
* @return Percentage of the buffer that should initially be filled (HRD buffer model).
*/
public final Integer hrdBufferInitialFillPercentage() {
return hrdBufferInitialFillPercentage;
}
/**
* Size of buffer (HRD buffer model) in bits. For example, enter five megabits as 5000000.
*
* @return Size of buffer (HRD buffer model) in bits. For example, enter five megabits as 5000000.
*/
public final Integer hrdBufferSize() {
return hrdBufferSize;
}
/**
* Choose the scan line type for the output. Keep the default value, Progressive to create a progressive output,
* regardless of the scan type of your input. Use Top field first or Bottom field first to create an output that's
* interlaced with the same field polarity throughout. Use Follow, default top or Follow, default bottom to produce
* outputs with the same field polarity as the source. For jobs that have multiple inputs, the output field polarity
* might change over the course of the output. Follow behavior depends on the input scan type. If the source is
* interlaced, the output will be interlaced with the same polarity as the source. If the source is progressive, the
* output will be interlaced with top field bottom field first, depending on which of the Follow options you choose.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #interlaceMode}
* will return {@link H265InterlaceMode#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available
* from {@link #interlaceModeAsString}.
*
*
* @return Choose the scan line type for the output. Keep the default value, Progressive to create a progressive
* output, regardless of the scan type of your input. Use Top field first or Bottom field first to create an
* output that's interlaced with the same field polarity throughout. Use Follow, default top or Follow,
* default bottom to produce outputs with the same field polarity as the source. For jobs that have multiple
* inputs, the output field polarity might change over the course of the output. Follow behavior depends on
* the input scan type. If the source is interlaced, the output will be interlaced with the same polarity as
* the source. If the source is progressive, the output will be interlaced with top field bottom field
* first, depending on which of the Follow options you choose.
* @see H265InterlaceMode
*/
public final H265InterlaceMode interlaceMode() {
return H265InterlaceMode.fromValue(interlaceMode);
}
/**
* Choose the scan line type for the output. Keep the default value, Progressive to create a progressive output,
* regardless of the scan type of your input. Use Top field first or Bottom field first to create an output that's
* interlaced with the same field polarity throughout. Use Follow, default top or Follow, default bottom to produce
* outputs with the same field polarity as the source. For jobs that have multiple inputs, the output field polarity
* might change over the course of the output. Follow behavior depends on the input scan type. If the source is
* interlaced, the output will be interlaced with the same polarity as the source. If the source is progressive, the
* output will be interlaced with top field bottom field first, depending on which of the Follow options you choose.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #interlaceMode}
* will return {@link H265InterlaceMode#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available
* from {@link #interlaceModeAsString}.
*
*
* @return Choose the scan line type for the output. Keep the default value, Progressive to create a progressive
* output, regardless of the scan type of your input. Use Top field first or Bottom field first to create an
* output that's interlaced with the same field polarity throughout. Use Follow, default top or Follow,
* default bottom to produce outputs with the same field polarity as the source. For jobs that have multiple
* inputs, the output field polarity might change over the course of the output. Follow behavior depends on
* the input scan type. If the source is interlaced, the output will be interlaced with the same polarity as
* the source. If the source is progressive, the output will be interlaced with top field bottom field
* first, depending on which of the Follow options you choose.
* @see H265InterlaceMode
*/
public final String interlaceModeAsString() {
return interlaceMode;
}
/**
* Maximum bitrate in bits/second. For example, enter five megabits per second as 5000000. Required when Rate
* control mode is QVBR.
*
* @return Maximum bitrate in bits/second. For example, enter five megabits per second as 5000000. Required when
* Rate control mode is QVBR.
*/
public final Integer maxBitrate() {
return maxBitrate;
}
/**
* Specify the minimum number of frames allowed between two IDR-frames in your output. This includes frames created
* at the start of a GOP or a scene change. Use Min I-Interval to improve video compression by varying GOP size when
* two IDR-frames would be created near each other. For example, if a regular cadence-driven IDR-frame would fall
* within 5 frames of a scene-change IDR-frame, and you set Min I-interval to 5, then the encoder would only write
* an IDR-frame for the scene-change. In this way, one GOP is shortened or extended. If a cadence-driven IDR-frame
* would be further than 5 frames from a scene-change IDR-frame, then the encoder leaves all IDR-frames in place. To
* use an automatically determined interval: We recommend that you keep this value blank. This allows for
* MediaConvert to use an optimal setting according to the characteristics of your input video, and results in
* better video compression. To manually specify an interval: Enter a value from 1 to 30. Use when your downstream
* systems have specific GOP size requirements. To disable GOP size variance: Enter 0. MediaConvert will only create
* IDR-frames at the start of your output's cadence-driven GOP. Use when your downstream systems require a regular
* GOP size.
*
* @return Specify the minimum number of frames allowed between two IDR-frames in your output. This includes frames
* created at the start of a GOP or a scene change. Use Min I-Interval to improve video compression by
* varying GOP size when two IDR-frames would be created near each other. For example, if a regular
* cadence-driven IDR-frame would fall within 5 frames of a scene-change IDR-frame, and you set Min
* I-interval to 5, then the encoder would only write an IDR-frame for the scene-change. In this way, one
* GOP is shortened or extended. If a cadence-driven IDR-frame would be further than 5 frames from a
* scene-change IDR-frame, then the encoder leaves all IDR-frames in place. To use an automatically
* determined interval: We recommend that you keep this value blank. This allows for MediaConvert to use an
* optimal setting according to the characteristics of your input video, and results in better video
* compression. To manually specify an interval: Enter a value from 1 to 30. Use when your downstream
* systems have specific GOP size requirements. To disable GOP size variance: Enter 0. MediaConvert will
* only create IDR-frames at the start of your output's cadence-driven GOP. Use when your downstream systems
* require a regular GOP size.
*/
public final Integer minIInterval() {
return minIInterval;
}
/**
* Specify the number of B-frames between reference frames in this output. For the best video quality: Leave blank.
* MediaConvert automatically determines the number of B-frames to use based on the characteristics of your input
* video. To manually specify the number of B-frames between reference frames: Enter an integer from 0 to 7.
*
* @return Specify the number of B-frames between reference frames in this output. For the best video quality: Leave
* blank. MediaConvert automatically determines the number of B-frames to use based on the characteristics
* of your input video. To manually specify the number of B-frames between reference frames: Enter an
* integer from 0 to 7.
*/
public final Integer numberBFramesBetweenReferenceFrames() {
return numberBFramesBetweenReferenceFrames;
}
/**
* Number of reference frames to use. The encoder may use more than requested if using B-frames and/or interlaced
* encoding.
*
* @return Number of reference frames to use. The encoder may use more than requested if using B-frames and/or
* interlaced encoding.
*/
public final Integer numberReferenceFrames() {
return numberReferenceFrames;
}
/**
* Optional. Specify how the service determines the pixel aspect ratio (PAR) for this output. The default behavior,
* Follow source, uses the PAR from your input video for your output. To specify a different PAR, choose any value
* other than Follow source. When you choose SPECIFIED for this setting, you must also specify values for the
* parNumerator and parDenominator settings.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #parControl} will
* return {@link H265ParControl#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #parControlAsString}.
*
*
* @return Optional. Specify how the service determines the pixel aspect ratio (PAR) for this output. The default
* behavior, Follow source, uses the PAR from your input video for your output. To specify a different PAR,
* choose any value other than Follow source. When you choose SPECIFIED for this setting, you must also
* specify values for the parNumerator and parDenominator settings.
* @see H265ParControl
*/
public final H265ParControl parControl() {
return H265ParControl.fromValue(parControl);
}
/**
* Optional. Specify how the service determines the pixel aspect ratio (PAR) for this output. The default behavior,
* Follow source, uses the PAR from your input video for your output. To specify a different PAR, choose any value
* other than Follow source. When you choose SPECIFIED for this setting, you must also specify values for the
* parNumerator and parDenominator settings.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #parControl} will
* return {@link H265ParControl#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #parControlAsString}.
*
*
* @return Optional. Specify how the service determines the pixel aspect ratio (PAR) for this output. The default
* behavior, Follow source, uses the PAR from your input video for your output. To specify a different PAR,
* choose any value other than Follow source. When you choose SPECIFIED for this setting, you must also
* specify values for the parNumerator and parDenominator settings.
* @see H265ParControl
*/
public final String parControlAsString() {
return parControl;
}
/**
* Required when you set Pixel aspect ratio to SPECIFIED. On the console, this corresponds to any value other than
* Follow source. When you specify an output pixel aspect ratio (PAR) that is different from your input video PAR,
* provide your output PAR as a ratio. For example, for D1/DV NTSC widescreen, you would specify the ratio 40:33. In
* this example, the value for parDenominator is 33.
*
* @return Required when you set Pixel aspect ratio to SPECIFIED. On the console, this corresponds to any value
* other than Follow source. When you specify an output pixel aspect ratio (PAR) that is different from your
* input video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC widescreen, you would
* specify the ratio 40:33. In this example, the value for parDenominator is 33.
*/
public final Integer parDenominator() {
return parDenominator;
}
/**
* Required when you set Pixel aspect ratio to SPECIFIED. On the console, this corresponds to any value other than
* Follow source. When you specify an output pixel aspect ratio (PAR) that is different from your input video PAR,
* provide your output PAR as a ratio. For example, for D1/DV NTSC widescreen, you would specify the ratio 40:33. In
* this example, the value for parNumerator is 40.
*
* @return Required when you set Pixel aspect ratio to SPECIFIED. On the console, this corresponds to any value
* other than Follow source. When you specify an output pixel aspect ratio (PAR) that is different from your
* input video PAR, provide your output PAR as a ratio. For example, for D1/DV NTSC widescreen, you would
* specify the ratio 40:33. In this example, the value for parNumerator is 40.
*/
public final Integer parNumerator() {
return parNumerator;
}
/**
* Optional. Use Quality tuning level to choose how you want to trade off encoding speed for output video quality.
* The default behavior is faster, lower quality, single-pass encoding.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #qualityTuningLevel} will return {@link H265QualityTuningLevel#UNKNOWN_TO_SDK_VERSION}. The raw value
* returned by the service is available from {@link #qualityTuningLevelAsString}.
*
*
* @return Optional. Use Quality tuning level to choose how you want to trade off encoding speed for output video
* quality. The default behavior is faster, lower quality, single-pass encoding.
* @see H265QualityTuningLevel
*/
public final H265QualityTuningLevel qualityTuningLevel() {
return H265QualityTuningLevel.fromValue(qualityTuningLevel);
}
/**
* Optional. Use Quality tuning level to choose how you want to trade off encoding speed for output video quality.
* The default behavior is faster, lower quality, single-pass encoding.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #qualityTuningLevel} will return {@link H265QualityTuningLevel#UNKNOWN_TO_SDK_VERSION}. The raw value
* returned by the service is available from {@link #qualityTuningLevelAsString}.
*
*
* @return Optional. Use Quality tuning level to choose how you want to trade off encoding speed for output video
* quality. The default behavior is faster, lower quality, single-pass encoding.
* @see H265QualityTuningLevel
*/
public final String qualityTuningLevelAsString() {
return qualityTuningLevel;
}
/**
* Settings for quality-defined variable bitrate encoding with the H.265 codec. Use these settings only when you set
* QVBR for Rate control mode.
*
* @return Settings for quality-defined variable bitrate encoding with the H.265 codec. Use these settings only when
* you set QVBR for Rate control mode.
*/
public final H265QvbrSettings qvbrSettings() {
return qvbrSettings;
}
/**
* Use this setting to specify whether this output has a variable bitrate (VBR), constant bitrate (CBR) or
* quality-defined variable bitrate (QVBR).
*
* If the service returns an enum value that is not available in the current SDK version, {@link #rateControlMode}
* will return {@link H265RateControlMode#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is
* available from {@link #rateControlModeAsString}.
*
*
* @return Use this setting to specify whether this output has a variable bitrate (VBR), constant bitrate (CBR) or
* quality-defined variable bitrate (QVBR).
* @see H265RateControlMode
*/
public final H265RateControlMode rateControlMode() {
return H265RateControlMode.fromValue(rateControlMode);
}
/**
* Use this setting to specify whether this output has a variable bitrate (VBR), constant bitrate (CBR) or
* quality-defined variable bitrate (QVBR).
*
* If the service returns an enum value that is not available in the current SDK version, {@link #rateControlMode}
* will return {@link H265RateControlMode#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is
* available from {@link #rateControlModeAsString}.
*
*
* @return Use this setting to specify whether this output has a variable bitrate (VBR), constant bitrate (CBR) or
* quality-defined variable bitrate (QVBR).
* @see H265RateControlMode
*/
public final String rateControlModeAsString() {
return rateControlMode;
}
/**
* Specify Sample Adaptive Offset (SAO) filter strength. Adaptive mode dynamically selects best strength based on
* content
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #sampleAdaptiveOffsetFilterMode} will return
* {@link H265SampleAdaptiveOffsetFilterMode#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is
* available from {@link #sampleAdaptiveOffsetFilterModeAsString}.
*
*
* @return Specify Sample Adaptive Offset (SAO) filter strength. Adaptive mode dynamically selects best strength
* based on content
* @see H265SampleAdaptiveOffsetFilterMode
*/
public final H265SampleAdaptiveOffsetFilterMode sampleAdaptiveOffsetFilterMode() {
return H265SampleAdaptiveOffsetFilterMode.fromValue(sampleAdaptiveOffsetFilterMode);
}
/**
* Specify Sample Adaptive Offset (SAO) filter strength. Adaptive mode dynamically selects best strength based on
* content
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #sampleAdaptiveOffsetFilterMode} will return
* {@link H265SampleAdaptiveOffsetFilterMode#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is
* available from {@link #sampleAdaptiveOffsetFilterModeAsString}.
*
*
* @return Specify Sample Adaptive Offset (SAO) filter strength. Adaptive mode dynamically selects best strength
* based on content
* @see H265SampleAdaptiveOffsetFilterMode
*/
public final String sampleAdaptiveOffsetFilterModeAsString() {
return sampleAdaptiveOffsetFilterMode;
}
/**
* Use this setting for interlaced outputs, when your output frame rate is half of your input frame rate. In this
* situation, choose Optimized interlacing to create a better quality interlaced output. In this case, each
* progressive frame from the input corresponds to an interlaced field in the output. Keep the default value, Basic
* interlacing, for all other output frame rates. With basic interlacing, MediaConvert performs any frame rate
* conversion first and then interlaces the frames. When you choose Optimized interlacing and you set your output
* frame rate to a value that isn't suitable for optimized interlacing, MediaConvert automatically falls back to
* basic interlacing. Required settings: To use optimized interlacing, you must set Telecine to None or Soft. You
* can't use optimized interlacing for hard telecine outputs. You must also set Interlace mode to a value other than
* Progressive.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #scanTypeConversionMode} will return {@link H265ScanTypeConversionMode#UNKNOWN_TO_SDK_VERSION}. The raw
* value returned by the service is available from {@link #scanTypeConversionModeAsString}.
*
*
* @return Use this setting for interlaced outputs, when your output frame rate is half of your input frame rate. In
* this situation, choose Optimized interlacing to create a better quality interlaced output. In this case,
* each progressive frame from the input corresponds to an interlaced field in the output. Keep the default
* value, Basic interlacing, for all other output frame rates. With basic interlacing, MediaConvert performs
* any frame rate conversion first and then interlaces the frames. When you choose Optimized interlacing and
* you set your output frame rate to a value that isn't suitable for optimized interlacing, MediaConvert
* automatically falls back to basic interlacing. Required settings: To use optimized interlacing, you must
* set Telecine to None or Soft. You can't use optimized interlacing for hard telecine outputs. You must
* also set Interlace mode to a value other than Progressive.
* @see H265ScanTypeConversionMode
*/
public final H265ScanTypeConversionMode scanTypeConversionMode() {
return H265ScanTypeConversionMode.fromValue(scanTypeConversionMode);
}
/**
* Use this setting for interlaced outputs, when your output frame rate is half of your input frame rate. In this
* situation, choose Optimized interlacing to create a better quality interlaced output. In this case, each
* progressive frame from the input corresponds to an interlaced field in the output. Keep the default value, Basic
* interlacing, for all other output frame rates. With basic interlacing, MediaConvert performs any frame rate
* conversion first and then interlaces the frames. When you choose Optimized interlacing and you set your output
* frame rate to a value that isn't suitable for optimized interlacing, MediaConvert automatically falls back to
* basic interlacing. Required settings: To use optimized interlacing, you must set Telecine to None or Soft. You
* can't use optimized interlacing for hard telecine outputs. You must also set Interlace mode to a value other than
* Progressive.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #scanTypeConversionMode} will return {@link H265ScanTypeConversionMode#UNKNOWN_TO_SDK_VERSION}. The raw
* value returned by the service is available from {@link #scanTypeConversionModeAsString}.
*
*
* @return Use this setting for interlaced outputs, when your output frame rate is half of your input frame rate. In
* this situation, choose Optimized interlacing to create a better quality interlaced output. In this case,
* each progressive frame from the input corresponds to an interlaced field in the output. Keep the default
* value, Basic interlacing, for all other output frame rates. With basic interlacing, MediaConvert performs
* any frame rate conversion first and then interlaces the frames. When you choose Optimized interlacing and
* you set your output frame rate to a value that isn't suitable for optimized interlacing, MediaConvert
* automatically falls back to basic interlacing. Required settings: To use optimized interlacing, you must
* set Telecine to None or Soft. You can't use optimized interlacing for hard telecine outputs. You must
* also set Interlace mode to a value other than Progressive.
* @see H265ScanTypeConversionMode
*/
public final String scanTypeConversionModeAsString() {
return scanTypeConversionMode;
}
/**
* Enable this setting to insert I-frames at scene changes that the service automatically detects. This improves
* video quality and is enabled by default. If this output uses QVBR, choose Transition detection for further video
* quality improvement. For more information about QVBR, see
* https://docs.aws.amazon.com/console/mediaconvert/cbr-vbr-qvbr.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #sceneChangeDetect}
* will return {@link H265SceneChangeDetect#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is
* available from {@link #sceneChangeDetectAsString}.
*
*
* @return Enable this setting to insert I-frames at scene changes that the service automatically detects. This
* improves video quality and is enabled by default. If this output uses QVBR, choose Transition detection
* for further video quality improvement. For more information about QVBR, see
* https://docs.aws.amazon.com/console/mediaconvert/cbr-vbr-qvbr.
* @see H265SceneChangeDetect
*/
public final H265SceneChangeDetect sceneChangeDetect() {
return H265SceneChangeDetect.fromValue(sceneChangeDetect);
}
/**
* Enable this setting to insert I-frames at scene changes that the service automatically detects. This improves
* video quality and is enabled by default. If this output uses QVBR, choose Transition detection for further video
* quality improvement. For more information about QVBR, see
* https://docs.aws.amazon.com/console/mediaconvert/cbr-vbr-qvbr.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #sceneChangeDetect}
* will return {@link H265SceneChangeDetect#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is
* available from {@link #sceneChangeDetectAsString}.
*
*
* @return Enable this setting to insert I-frames at scene changes that the service automatically detects. This
* improves video quality and is enabled by default. If this output uses QVBR, choose Transition detection
* for further video quality improvement. For more information about QVBR, see
* https://docs.aws.amazon.com/console/mediaconvert/cbr-vbr-qvbr.
* @see H265SceneChangeDetect
*/
public final String sceneChangeDetectAsString() {
return sceneChangeDetect;
}
/**
* Number of slices per picture. Must be less than or equal to the number of macroblock rows for progressive
* pictures, and less than or equal to half the number of macroblock rows for interlaced pictures.
*
* @return Number of slices per picture. Must be less than or equal to the number of macroblock rows for progressive
* pictures, and less than or equal to half the number of macroblock rows for interlaced pictures.
*/
public final Integer slices() {
return slices;
}
/**
* Ignore this setting unless your input frame rate is 23.976 or 24 frames per second (fps). Enable slow PAL to
* create a 25 fps output. When you enable slow PAL, MediaConvert relabels the video frames to 25 fps and resamples
* your audio to keep it synchronized with the video. Note that enabling this setting will slightly reduce the
* duration of your video. Required settings: You must also set Framerate to 25.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #slowPal} will
* return {@link H265SlowPal#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #slowPalAsString}.
*
*
* @return Ignore this setting unless your input frame rate is 23.976 or 24 frames per second (fps). Enable slow PAL
* to create a 25 fps output. When you enable slow PAL, MediaConvert relabels the video frames to 25 fps and
* resamples your audio to keep it synchronized with the video. Note that enabling this setting will
* slightly reduce the duration of your video. Required settings: You must also set Framerate to 25.
* @see H265SlowPal
*/
public final H265SlowPal slowPal() {
return H265SlowPal.fromValue(slowPal);
}
/**
* Ignore this setting unless your input frame rate is 23.976 or 24 frames per second (fps). Enable slow PAL to
* create a 25 fps output. When you enable slow PAL, MediaConvert relabels the video frames to 25 fps and resamples
* your audio to keep it synchronized with the video. Note that enabling this setting will slightly reduce the
* duration of your video. Required settings: You must also set Framerate to 25.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #slowPal} will
* return {@link H265SlowPal#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #slowPalAsString}.
*
*
* @return Ignore this setting unless your input frame rate is 23.976 or 24 frames per second (fps). Enable slow PAL
* to create a 25 fps output. When you enable slow PAL, MediaConvert relabels the video frames to 25 fps and
* resamples your audio to keep it synchronized with the video. Note that enabling this setting will
* slightly reduce the duration of your video. Required settings: You must also set Framerate to 25.
* @see H265SlowPal
*/
public final String slowPalAsString() {
return slowPal;
}
/**
* Keep the default value, Enabled, to adjust quantization within each frame based on spatial variation of content
* complexity. When you enable this feature, the encoder uses fewer bits on areas that can sustain more distortion
* with no noticeable visual degradation and uses more bits on areas where any small distortion will be noticeable.
* For example, complex textured blocks are encoded with fewer bits and smooth textured blocks are encoded with more
* bits. Enabling this feature will almost always improve your video quality. Note, though, that this feature
* doesn't take into account where the viewer's attention is likely to be. If viewers are likely to be focusing
* their attention on a part of the screen with a lot of complex texture, you might choose to disable this feature.
* Related setting: When you enable spatial adaptive quantization, set the value for Adaptive quantization depending
* on your content. For homogeneous content, such as cartoons and video games, set it to Low. For content with a
* wider variety of textures, set it to High or Higher.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #spatialAdaptiveQuantization} will return {@link H265SpatialAdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}.
* The raw value returned by the service is available from {@link #spatialAdaptiveQuantizationAsString}.
*
*
* @return Keep the default value, Enabled, to adjust quantization within each frame based on spatial variation of
* content complexity. When you enable this feature, the encoder uses fewer bits on areas that can sustain
* more distortion with no noticeable visual degradation and uses more bits on areas where any small
* distortion will be noticeable. For example, complex textured blocks are encoded with fewer bits and
* smooth textured blocks are encoded with more bits. Enabling this feature will almost always improve your
* video quality. Note, though, that this feature doesn't take into account where the viewer's attention is
* likely to be. If viewers are likely to be focusing their attention on a part of the screen with a lot of
* complex texture, you might choose to disable this feature. Related setting: When you enable spatial
* adaptive quantization, set the value for Adaptive quantization depending on your content. For homogeneous
* content, such as cartoons and video games, set it to Low. For content with a wider variety of textures,
* set it to High or Higher.
* @see H265SpatialAdaptiveQuantization
*/
public final H265SpatialAdaptiveQuantization spatialAdaptiveQuantization() {
return H265SpatialAdaptiveQuantization.fromValue(spatialAdaptiveQuantization);
}
/**
* Keep the default value, Enabled, to adjust quantization within each frame based on spatial variation of content
* complexity. When you enable this feature, the encoder uses fewer bits on areas that can sustain more distortion
* with no noticeable visual degradation and uses more bits on areas where any small distortion will be noticeable.
* For example, complex textured blocks are encoded with fewer bits and smooth textured blocks are encoded with more
* bits. Enabling this feature will almost always improve your video quality. Note, though, that this feature
* doesn't take into account where the viewer's attention is likely to be. If viewers are likely to be focusing
* their attention on a part of the screen with a lot of complex texture, you might choose to disable this feature.
* Related setting: When you enable spatial adaptive quantization, set the value for Adaptive quantization depending
* on your content. For homogeneous content, such as cartoons and video games, set it to Low. For content with a
* wider variety of textures, set it to High or Higher.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #spatialAdaptiveQuantization} will return {@link H265SpatialAdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}.
* The raw value returned by the service is available from {@link #spatialAdaptiveQuantizationAsString}.
*
*
* @return Keep the default value, Enabled, to adjust quantization within each frame based on spatial variation of
* content complexity. When you enable this feature, the encoder uses fewer bits on areas that can sustain
* more distortion with no noticeable visual degradation and uses more bits on areas where any small
* distortion will be noticeable. For example, complex textured blocks are encoded with fewer bits and
* smooth textured blocks are encoded with more bits. Enabling this feature will almost always improve your
* video quality. Note, though, that this feature doesn't take into account where the viewer's attention is
* likely to be. If viewers are likely to be focusing their attention on a part of the screen with a lot of
* complex texture, you might choose to disable this feature. Related setting: When you enable spatial
* adaptive quantization, set the value for Adaptive quantization depending on your content. For homogeneous
* content, such as cartoons and video games, set it to Low. For content with a wider variety of textures,
* set it to High or Higher.
* @see H265SpatialAdaptiveQuantization
*/
public final String spatialAdaptiveQuantizationAsString() {
return spatialAdaptiveQuantization;
}
/**
* This field applies only if the Streams > Advanced > Framerate field is set to 29.970. This field works with the
* Streams > Advanced > Preprocessors > Deinterlacer field and the Streams > Advanced > Interlaced Mode field to
* identify the scan type for the output: Progressive, Interlaced, Hard Telecine or Soft Telecine. - Hard: produces
* 29.97i output from 23.976 input. - Soft: produces 23.976; the player converts this output to 29.97i.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #telecine} will
* return {@link H265Telecine#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #telecineAsString}.
*
*
* @return This field applies only if the Streams > Advanced > Framerate field is set to 29.970. This field works
* with the Streams > Advanced > Preprocessors > Deinterlacer field and the Streams > Advanced > Interlaced
* Mode field to identify the scan type for the output: Progressive, Interlaced, Hard Telecine or Soft
* Telecine. - Hard: produces 29.97i output from 23.976 input. - Soft: produces 23.976; the player converts
* this output to 29.97i.
* @see H265Telecine
*/
public final H265Telecine telecine() {
return H265Telecine.fromValue(telecine);
}
/**
* This field applies only if the Streams > Advanced > Framerate field is set to 29.970. This field works with the
* Streams > Advanced > Preprocessors > Deinterlacer field and the Streams > Advanced > Interlaced Mode field to
* identify the scan type for the output: Progressive, Interlaced, Hard Telecine or Soft Telecine. - Hard: produces
* 29.97i output from 23.976 input. - Soft: produces 23.976; the player converts this output to 29.97i.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #telecine} will
* return {@link H265Telecine#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #telecineAsString}.
*
*
* @return This field applies only if the Streams > Advanced > Framerate field is set to 29.970. This field works
* with the Streams > Advanced > Preprocessors > Deinterlacer field and the Streams > Advanced > Interlaced
* Mode field to identify the scan type for the output: Progressive, Interlaced, Hard Telecine or Soft
* Telecine. - Hard: produces 29.97i output from 23.976 input. - Soft: produces 23.976; the player converts
* this output to 29.97i.
* @see H265Telecine
*/
public final String telecineAsString() {
return telecine;
}
/**
* Keep the default value, Enabled, to adjust quantization within each frame based on temporal variation of content
* complexity. When you enable this feature, the encoder uses fewer bits on areas of the frame that aren't moving
* and uses more bits on complex objects with sharp edges that move a lot. For example, this feature improves the
* readability of text tickers on newscasts and scoreboards on sports matches. Enabling this feature will almost
* always improve your video quality. Note, though, that this feature doesn't take into account where the viewer's
* attention is likely to be. If viewers are likely to be focusing their attention on a part of the screen that
* doesn't have moving objects with sharp edges, such as sports athletes' faces, you might choose to disable this
* feature. Related setting: When you enable temporal quantization, adjust the strength of the filter with the
* setting Adaptive quantization.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #temporalAdaptiveQuantization} will return {@link H265TemporalAdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}
* . The raw value returned by the service is available from {@link #temporalAdaptiveQuantizationAsString}.
*
*
* @return Keep the default value, Enabled, to adjust quantization within each frame based on temporal variation of
* content complexity. When you enable this feature, the encoder uses fewer bits on areas of the frame that
* aren't moving and uses more bits on complex objects with sharp edges that move a lot. For example, this
* feature improves the readability of text tickers on newscasts and scoreboards on sports matches. Enabling
* this feature will almost always improve your video quality. Note, though, that this feature doesn't take
* into account where the viewer's attention is likely to be. If viewers are likely to be focusing their
* attention on a part of the screen that doesn't have moving objects with sharp edges, such as sports
* athletes' faces, you might choose to disable this feature. Related setting: When you enable temporal
* quantization, adjust the strength of the filter with the setting Adaptive quantization.
* @see H265TemporalAdaptiveQuantization
*/
public final H265TemporalAdaptiveQuantization temporalAdaptiveQuantization() {
return H265TemporalAdaptiveQuantization.fromValue(temporalAdaptiveQuantization);
}
/**
* Keep the default value, Enabled, to adjust quantization within each frame based on temporal variation of content
* complexity. When you enable this feature, the encoder uses fewer bits on areas of the frame that aren't moving
* and uses more bits on complex objects with sharp edges that move a lot. For example, this feature improves the
* readability of text tickers on newscasts and scoreboards on sports matches. Enabling this feature will almost
* always improve your video quality. Note, though, that this feature doesn't take into account where the viewer's
* attention is likely to be. If viewers are likely to be focusing their attention on a part of the screen that
* doesn't have moving objects with sharp edges, such as sports athletes' faces, you might choose to disable this
* feature. Related setting: When you enable temporal quantization, adjust the strength of the filter with the
* setting Adaptive quantization.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #temporalAdaptiveQuantization} will return {@link H265TemporalAdaptiveQuantization#UNKNOWN_TO_SDK_VERSION}
* . The raw value returned by the service is available from {@link #temporalAdaptiveQuantizationAsString}.
*
*
* @return Keep the default value, Enabled, to adjust quantization within each frame based on temporal variation of
* content complexity. When you enable this feature, the encoder uses fewer bits on areas of the frame that
* aren't moving and uses more bits on complex objects with sharp edges that move a lot. For example, this
* feature improves the readability of text tickers on newscasts and scoreboards on sports matches. Enabling
* this feature will almost always improve your video quality. Note, though, that this feature doesn't take
* into account where the viewer's attention is likely to be. If viewers are likely to be focusing their
* attention on a part of the screen that doesn't have moving objects with sharp edges, such as sports
* athletes' faces, you might choose to disable this feature. Related setting: When you enable temporal
* quantization, adjust the strength of the filter with the setting Adaptive quantization.
* @see H265TemporalAdaptiveQuantization
*/
public final String temporalAdaptiveQuantizationAsString() {
return temporalAdaptiveQuantization;
}
/**
* Enables temporal layer identifiers in the encoded bitstream. Up to 3 layers are supported depending on GOP
* structure: I- and P-frames form one layer, reference B-frames can form a second layer and non-reference b-frames
* can form a third layer. Decoders can optionally decode only the lower temporal layers to generate a lower frame
* rate output. For example, given a bitstream with temporal IDs and with b-frames = 1 (i.e. IbPbPb display order),
* a decoder could decode all the frames for full frame rate output or only the I and P frames (lowest temporal
* layer) for a half frame rate output.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #temporalIds} will
* return {@link H265TemporalIds#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #temporalIdsAsString}.
*
*
* @return Enables temporal layer identifiers in the encoded bitstream. Up to 3 layers are supported depending on
* GOP structure: I- and P-frames form one layer, reference B-frames can form a second layer and
* non-reference b-frames can form a third layer. Decoders can optionally decode only the lower temporal
* layers to generate a lower frame rate output. For example, given a bitstream with temporal IDs and with
* b-frames = 1 (i.e. IbPbPb display order), a decoder could decode all the frames for full frame rate
* output or only the I and P frames (lowest temporal layer) for a half frame rate output.
* @see H265TemporalIds
*/
public final H265TemporalIds temporalIds() {
return H265TemporalIds.fromValue(temporalIds);
}
/**
* Enables temporal layer identifiers in the encoded bitstream. Up to 3 layers are supported depending on GOP
* structure: I- and P-frames form one layer, reference B-frames can form a second layer and non-reference b-frames
* can form a third layer. Decoders can optionally decode only the lower temporal layers to generate a lower frame
* rate output. For example, given a bitstream with temporal IDs and with b-frames = 1 (i.e. IbPbPb display order),
* a decoder could decode all the frames for full frame rate output or only the I and P frames (lowest temporal
* layer) for a half frame rate output.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #temporalIds} will
* return {@link H265TemporalIds#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #temporalIdsAsString}.
*
*
* @return Enables temporal layer identifiers in the encoded bitstream. Up to 3 layers are supported depending on
* GOP structure: I- and P-frames form one layer, reference B-frames can form a second layer and
* non-reference b-frames can form a third layer. Decoders can optionally decode only the lower temporal
* layers to generate a lower frame rate output. For example, given a bitstream with temporal IDs and with
* b-frames = 1 (i.e. IbPbPb display order), a decoder could decode all the frames for full frame rate
* output or only the I and P frames (lowest temporal layer) for a half frame rate output.
* @see H265TemporalIds
*/
public final String temporalIdsAsString() {
return temporalIds;
}
/**
* Enable use of tiles, allowing horizontal as well as vertical subdivision of the encoded pictures.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #tiles} will return
* {@link H265Tiles#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #tilesAsString}.
*
*
* @return Enable use of tiles, allowing horizontal as well as vertical subdivision of the encoded pictures.
* @see H265Tiles
*/
public final H265Tiles tiles() {
return H265Tiles.fromValue(tiles);
}
/**
* Enable use of tiles, allowing horizontal as well as vertical subdivision of the encoded pictures.
*
* If the service returns an enum value that is not available in the current SDK version, {@link #tiles} will return
* {@link H265Tiles#UNKNOWN_TO_SDK_VERSION}. The raw value returned by the service is available from
* {@link #tilesAsString}.
*
*
* @return Enable use of tiles, allowing horizontal as well as vertical subdivision of the encoded pictures.
* @see H265Tiles
*/
public final String tilesAsString() {
return tiles;
}
/**
* Inserts timecode for each frame as 4 bytes of an unregistered SEI message.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #unregisteredSeiTimecode} will return {@link H265UnregisteredSeiTimecode#UNKNOWN_TO_SDK_VERSION}. The raw
* value returned by the service is available from {@link #unregisteredSeiTimecodeAsString}.
*
*
* @return Inserts timecode for each frame as 4 bytes of an unregistered SEI message.
* @see H265UnregisteredSeiTimecode
*/
public final H265UnregisteredSeiTimecode unregisteredSeiTimecode() {
return H265UnregisteredSeiTimecode.fromValue(unregisteredSeiTimecode);
}
/**
* Inserts timecode for each frame as 4 bytes of an unregistered SEI message.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #unregisteredSeiTimecode} will return {@link H265UnregisteredSeiTimecode#UNKNOWN_TO_SDK_VERSION}. The raw
* value returned by the service is available from {@link #unregisteredSeiTimecodeAsString}.
*
*
* @return Inserts timecode for each frame as 4 bytes of an unregistered SEI message.
* @see H265UnregisteredSeiTimecode
*/
public final String unregisteredSeiTimecodeAsString() {
return unregisteredSeiTimecode;
}
/**
* If the location of parameter set NAL units doesn't matter in your workflow, ignore this setting. Use this setting
* only with CMAF or DASH outputs, or with standalone file outputs in an MPEG-4 container (MP4 outputs). Choose HVC1
* to mark your output as HVC1. This makes your output compliant with the following specification: ISO IECJTC1 SC29
* N13798 Text ISO/IEC FDIS 14496-15 3rd Edition. For these outputs, the service stores parameter set NAL units in
* the sample headers but not in the samples directly. For MP4 outputs, when you choose HVC1, your output video
* might not work properly with some downstream systems and video players. The service defaults to marking your
* output as HEV1. For these outputs, the service writes parameter set NAL units directly into the samples.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #writeMp4PackagingType} will return {@link H265WriteMp4PackagingType#UNKNOWN_TO_SDK_VERSION}. The raw
* value returned by the service is available from {@link #writeMp4PackagingTypeAsString}.
*
*
* @return If the location of parameter set NAL units doesn't matter in your workflow, ignore this setting. Use this
* setting only with CMAF or DASH outputs, or with standalone file outputs in an MPEG-4 container (MP4
* outputs). Choose HVC1 to mark your output as HVC1. This makes your output compliant with the following
* specification: ISO IECJTC1 SC29 N13798 Text ISO/IEC FDIS 14496-15 3rd Edition. For these outputs, the
* service stores parameter set NAL units in the sample headers but not in the samples directly. For MP4
* outputs, when you choose HVC1, your output video might not work properly with some downstream systems and
* video players. The service defaults to marking your output as HEV1. For these outputs, the service writes
* parameter set NAL units directly into the samples.
* @see H265WriteMp4PackagingType
*/
public final H265WriteMp4PackagingType writeMp4PackagingType() {
return H265WriteMp4PackagingType.fromValue(writeMp4PackagingType);
}
/**
* If the location of parameter set NAL units doesn't matter in your workflow, ignore this setting. Use this setting
* only with CMAF or DASH outputs, or with standalone file outputs in an MPEG-4 container (MP4 outputs). Choose HVC1
* to mark your output as HVC1. This makes your output compliant with the following specification: ISO IECJTC1 SC29
* N13798 Text ISO/IEC FDIS 14496-15 3rd Edition. For these outputs, the service stores parameter set NAL units in
* the sample headers but not in the samples directly. For MP4 outputs, when you choose HVC1, your output video
* might not work properly with some downstream systems and video players. The service defaults to marking your
* output as HEV1. For these outputs, the service writes parameter set NAL units directly into the samples.
*
* If the service returns an enum value that is not available in the current SDK version,
* {@link #writeMp4PackagingType} will return {@link H265WriteMp4PackagingType#UNKNOWN_TO_SDK_VERSION}. The raw
* value returned by the service is available from {@link #writeMp4PackagingTypeAsString}.
*
*
* @return If the location of parameter set NAL units doesn't matter in your workflow, ignore this setting. Use this
* setting only with CMAF or DASH outputs, or with standalone file outputs in an MPEG-4 container (MP4
* outputs). Choose HVC1 to mark your output as HVC1. This makes your output compliant with the following
* specification: ISO IECJTC1 SC29 N13798 Text ISO/IEC FDIS 14496-15 3rd Edition. For these outputs, the
* service stores parameter set NAL units in the sample headers but not in the samples directly. For MP4
* outputs, when you choose HVC1, your output video might not work properly with some downstream systems and
* video players. The service defaults to marking your output as HEV1. For these outputs, the service writes
* parameter set NAL units directly into the samples.
* @see H265WriteMp4PackagingType
*/
public final String writeMp4PackagingTypeAsString() {
return writeMp4PackagingType;
}
@Override
public Builder toBuilder() {
return new BuilderImpl(this);
}
public static Builder builder() {
return new BuilderImpl();
}
public static Class extends Builder> serializableBuilderClass() {
return BuilderImpl.class;
}
@Override
public final int hashCode() {
int hashCode = 1;
hashCode = 31 * hashCode + Objects.hashCode(adaptiveQuantizationAsString());
hashCode = 31 * hashCode + Objects.hashCode(alternateTransferFunctionSeiAsString());
hashCode = 31 * hashCode + Objects.hashCode(bandwidthReductionFilter());
hashCode = 31 * hashCode + Objects.hashCode(bitrate());
hashCode = 31 * hashCode + Objects.hashCode(codecLevelAsString());
hashCode = 31 * hashCode + Objects.hashCode(codecProfileAsString());
hashCode = 31 * hashCode + Objects.hashCode(dynamicSubGopAsString());
hashCode = 31 * hashCode + Objects.hashCode(endOfStreamMarkersAsString());
hashCode = 31 * hashCode + Objects.hashCode(flickerAdaptiveQuantizationAsString());
hashCode = 31 * hashCode + Objects.hashCode(framerateControlAsString());
hashCode = 31 * hashCode + Objects.hashCode(framerateConversionAlgorithmAsString());
hashCode = 31 * hashCode + Objects.hashCode(framerateDenominator());
hashCode = 31 * hashCode + Objects.hashCode(framerateNumerator());
hashCode = 31 * hashCode + Objects.hashCode(gopBReferenceAsString());
hashCode = 31 * hashCode + Objects.hashCode(gopClosedCadence());
hashCode = 31 * hashCode + Objects.hashCode(gopSize());
hashCode = 31 * hashCode + Objects.hashCode(gopSizeUnitsAsString());
hashCode = 31 * hashCode + Objects.hashCode(hrdBufferFinalFillPercentage());
hashCode = 31 * hashCode + Objects.hashCode(hrdBufferInitialFillPercentage());
hashCode = 31 * hashCode + Objects.hashCode(hrdBufferSize());
hashCode = 31 * hashCode + Objects.hashCode(interlaceModeAsString());
hashCode = 31 * hashCode + Objects.hashCode(maxBitrate());
hashCode = 31 * hashCode + Objects.hashCode(minIInterval());
hashCode = 31 * hashCode + Objects.hashCode(numberBFramesBetweenReferenceFrames());
hashCode = 31 * hashCode + Objects.hashCode(numberReferenceFrames());
hashCode = 31 * hashCode + Objects.hashCode(parControlAsString());
hashCode = 31 * hashCode + Objects.hashCode(parDenominator());
hashCode = 31 * hashCode + Objects.hashCode(parNumerator());
hashCode = 31 * hashCode + Objects.hashCode(qualityTuningLevelAsString());
hashCode = 31 * hashCode + Objects.hashCode(qvbrSettings());
hashCode = 31 * hashCode + Objects.hashCode(rateControlModeAsString());
hashCode = 31 * hashCode + Objects.hashCode(sampleAdaptiveOffsetFilterModeAsString());
hashCode = 31 * hashCode + Objects.hashCode(scanTypeConversionModeAsString());
hashCode = 31 * hashCode + Objects.hashCode(sceneChangeDetectAsString());
hashCode = 31 * hashCode + Objects.hashCode(slices());
hashCode = 31 * hashCode + Objects.hashCode(slowPalAsString());
hashCode = 31 * hashCode + Objects.hashCode(spatialAdaptiveQuantizationAsString());
hashCode = 31 * hashCode + Objects.hashCode(telecineAsString());
hashCode = 31 * hashCode + Objects.hashCode(temporalAdaptiveQuantizationAsString());
hashCode = 31 * hashCode + Objects.hashCode(temporalIdsAsString());
hashCode = 31 * hashCode + Objects.hashCode(tilesAsString());
hashCode = 31 * hashCode + Objects.hashCode(unregisteredSeiTimecodeAsString());
hashCode = 31 * hashCode + Objects.hashCode(writeMp4PackagingTypeAsString());
return hashCode;
}
@Override
public final boolean equals(Object obj) {
return equalsBySdkFields(obj);
}
@Override
public final boolean equalsBySdkFields(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof H265Settings)) {
return false;
}
H265Settings other = (H265Settings) obj;
return Objects.equals(adaptiveQuantizationAsString(), other.adaptiveQuantizationAsString())
&& Objects.equals(alternateTransferFunctionSeiAsString(), other.alternateTransferFunctionSeiAsString())
&& Objects.equals(bandwidthReductionFilter(), other.bandwidthReductionFilter())
&& Objects.equals(bitrate(), other.bitrate()) && Objects.equals(codecLevelAsString(), other.codecLevelAsString())
&& Objects.equals(codecProfileAsString(), other.codecProfileAsString())
&& Objects.equals(dynamicSubGopAsString(), other.dynamicSubGopAsString())
&& Objects.equals(endOfStreamMarkersAsString(), other.endOfStreamMarkersAsString())
&& Objects.equals(flickerAdaptiveQuantizationAsString(), other.flickerAdaptiveQuantizationAsString())
&& Objects.equals(framerateControlAsString(), other.framerateControlAsString())
&& Objects.equals(framerateConversionAlgorithmAsString(), other.framerateConversionAlgorithmAsString())
&& Objects.equals(framerateDenominator(), other.framerateDenominator())
&& Objects.equals(framerateNumerator(), other.framerateNumerator())
&& Objects.equals(gopBReferenceAsString(), other.gopBReferenceAsString())
&& Objects.equals(gopClosedCadence(), other.gopClosedCadence()) && Objects.equals(gopSize(), other.gopSize())
&& Objects.equals(gopSizeUnitsAsString(), other.gopSizeUnitsAsString())
&& Objects.equals(hrdBufferFinalFillPercentage(), other.hrdBufferFinalFillPercentage())
&& Objects.equals(hrdBufferInitialFillPercentage(), other.hrdBufferInitialFillPercentage())
&& Objects.equals(hrdBufferSize(), other.hrdBufferSize())
&& Objects.equals(interlaceModeAsString(), other.interlaceModeAsString())
&& Objects.equals(maxBitrate(), other.maxBitrate()) && Objects.equals(minIInterval(), other.minIInterval())
&& Objects.equals(numberBFramesBetweenReferenceFrames(), other.numberBFramesBetweenReferenceFrames())
&& Objects.equals(numberReferenceFrames(), other.numberReferenceFrames())
&& Objects.equals(parControlAsString(), other.parControlAsString())
&& Objects.equals(parDenominator(), other.parDenominator())
&& Objects.equals(parNumerator(), other.parNumerator())
&& Objects.equals(qualityTuningLevelAsString(), other.qualityTuningLevelAsString())
&& Objects.equals(qvbrSettings(), other.qvbrSettings())
&& Objects.equals(rateControlModeAsString(), other.rateControlModeAsString())
&& Objects.equals(sampleAdaptiveOffsetFilterModeAsString(), other.sampleAdaptiveOffsetFilterModeAsString())
&& Objects.equals(scanTypeConversionModeAsString(), other.scanTypeConversionModeAsString())
&& Objects.equals(sceneChangeDetectAsString(), other.sceneChangeDetectAsString())
&& Objects.equals(slices(), other.slices()) && Objects.equals(slowPalAsString(), other.slowPalAsString())
&& Objects.equals(spatialAdaptiveQuantizationAsString(), other.spatialAdaptiveQuantizationAsString())
&& Objects.equals(telecineAsString(), other.telecineAsString())
&& Objects.equals(temporalAdaptiveQuantizationAsString(), other.temporalAdaptiveQuantizationAsString())
&& Objects.equals(temporalIdsAsString(), other.temporalIdsAsString())
&& Objects.equals(tilesAsString(), other.tilesAsString())
&& Objects.equals(unregisteredSeiTimecodeAsString(), other.unregisteredSeiTimecodeAsString())
&& Objects.equals(writeMp4PackagingTypeAsString(), other.writeMp4PackagingTypeAsString());
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*/
@Override
public final String toString() {
return ToString.builder("H265Settings").add("AdaptiveQuantization", adaptiveQuantizationAsString())
.add("AlternateTransferFunctionSei", alternateTransferFunctionSeiAsString())
.add("BandwidthReductionFilter", bandwidthReductionFilter()).add("Bitrate", bitrate())
.add("CodecLevel", codecLevelAsString()).add("CodecProfile", codecProfileAsString())
.add("DynamicSubGop", dynamicSubGopAsString()).add("EndOfStreamMarkers", endOfStreamMarkersAsString())
.add("FlickerAdaptiveQuantization", flickerAdaptiveQuantizationAsString())
.add("FramerateControl", framerateControlAsString())
.add("FramerateConversionAlgorithm", framerateConversionAlgorithmAsString())
.add("FramerateDenominator", framerateDenominator()).add("FramerateNumerator", framerateNumerator())
.add("GopBReference", gopBReferenceAsString()).add("GopClosedCadence", gopClosedCadence())
.add("GopSize", gopSize()).add("GopSizeUnits", gopSizeUnitsAsString())
.add("HrdBufferFinalFillPercentage", hrdBufferFinalFillPercentage())
.add("HrdBufferInitialFillPercentage", hrdBufferInitialFillPercentage()).add("HrdBufferSize", hrdBufferSize())
.add("InterlaceMode", interlaceModeAsString()).add("MaxBitrate", maxBitrate())
.add("MinIInterval", minIInterval())
.add("NumberBFramesBetweenReferenceFrames", numberBFramesBetweenReferenceFrames())
.add("NumberReferenceFrames", numberReferenceFrames()).add("ParControl", parControlAsString())
.add("ParDenominator", parDenominator()).add("ParNumerator", parNumerator())
.add("QualityTuningLevel", qualityTuningLevelAsString()).add("QvbrSettings", qvbrSettings())
.add("RateControlMode", rateControlModeAsString())
.add("SampleAdaptiveOffsetFilterMode", sampleAdaptiveOffsetFilterModeAsString())
.add("ScanTypeConversionMode", scanTypeConversionModeAsString())
.add("SceneChangeDetect", sceneChangeDetectAsString()).add("Slices", slices()).add("SlowPal", slowPalAsString())
.add("SpatialAdaptiveQuantization", spatialAdaptiveQuantizationAsString()).add("Telecine", telecineAsString())
.add("TemporalAdaptiveQuantization", temporalAdaptiveQuantizationAsString())
.add("TemporalIds", temporalIdsAsString()).add("Tiles", tilesAsString())
.add("UnregisteredSeiTimecode", unregisteredSeiTimecodeAsString())
.add("WriteMp4PackagingType", writeMp4PackagingTypeAsString()).build();
}
public final Optional getValueForField(String fieldName, Class clazz) {
switch (fieldName) {
case "AdaptiveQuantization":
return Optional.ofNullable(clazz.cast(adaptiveQuantizationAsString()));
case "AlternateTransferFunctionSei":
return Optional.ofNullable(clazz.cast(alternateTransferFunctionSeiAsString()));
case "BandwidthReductionFilter":
return Optional.ofNullable(clazz.cast(bandwidthReductionFilter()));
case "Bitrate":
return Optional.ofNullable(clazz.cast(bitrate()));
case "CodecLevel":
return Optional.ofNullable(clazz.cast(codecLevelAsString()));
case "CodecProfile":
return Optional.ofNullable(clazz.cast(codecProfileAsString()));
case "DynamicSubGop":
return Optional.ofNullable(clazz.cast(dynamicSubGopAsString()));
case "EndOfStreamMarkers":
return Optional.ofNullable(clazz.cast(endOfStreamMarkersAsString()));
case "FlickerAdaptiveQuantization":
return Optional.ofNullable(clazz.cast(flickerAdaptiveQuantizationAsString()));
case "FramerateControl":
return Optional.ofNullable(clazz.cast(framerateControlAsString()));
case "FramerateConversionAlgorithm":
return Optional.ofNullable(clazz.cast(framerateConversionAlgorithmAsString()));
case "FramerateDenominator":
return Optional.ofNullable(clazz.cast(framerateDenominator()));
case "FramerateNumerator":
return Optional.ofNullable(clazz.cast(framerateNumerator()));
case "GopBReference":
return Optional.ofNullable(clazz.cast(gopBReferenceAsString()));
case "GopClosedCadence":
return Optional.ofNullable(clazz.cast(gopClosedCadence()));
case "GopSize":
return Optional.ofNullable(clazz.cast(gopSize()));
case "GopSizeUnits":
return Optional.ofNullable(clazz.cast(gopSizeUnitsAsString()));
case "HrdBufferFinalFillPercentage":
return Optional.ofNullable(clazz.cast(hrdBufferFinalFillPercentage()));
case "HrdBufferInitialFillPercentage":
return Optional.ofNullable(clazz.cast(hrdBufferInitialFillPercentage()));
case "HrdBufferSize":
return Optional.ofNullable(clazz.cast(hrdBufferSize()));
case "InterlaceMode":
return Optional.ofNullable(clazz.cast(interlaceModeAsString()));
case "MaxBitrate":
return Optional.ofNullable(clazz.cast(maxBitrate()));
case "MinIInterval":
return Optional.ofNullable(clazz.cast(minIInterval()));
case "NumberBFramesBetweenReferenceFrames":
return Optional.ofNullable(clazz.cast(numberBFramesBetweenReferenceFrames()));
case "NumberReferenceFrames":
return Optional.ofNullable(clazz.cast(numberReferenceFrames()));
case "ParControl":
return Optional.ofNullable(clazz.cast(parControlAsString()));
case "ParDenominator":
return Optional.ofNullable(clazz.cast(parDenominator()));
case "ParNumerator":
return Optional.ofNullable(clazz.cast(parNumerator()));
case "QualityTuningLevel":
return Optional.ofNullable(clazz.cast(qualityTuningLevelAsString()));
case "QvbrSettings":
return Optional.ofNullable(clazz.cast(qvbrSettings()));
case "RateControlMode":
return Optional.ofNullable(clazz.cast(rateControlModeAsString()));
case "SampleAdaptiveOffsetFilterMode":
return Optional.ofNullable(clazz.cast(sampleAdaptiveOffsetFilterModeAsString()));
case "ScanTypeConversionMode":
return Optional.ofNullable(clazz.cast(scanTypeConversionModeAsString()));
case "SceneChangeDetect":
return Optional.ofNullable(clazz.cast(sceneChangeDetectAsString()));
case "Slices":
return Optional.ofNullable(clazz.cast(slices()));
case "SlowPal":
return Optional.ofNullable(clazz.cast(slowPalAsString()));
case "SpatialAdaptiveQuantization":
return Optional.ofNullable(clazz.cast(spatialAdaptiveQuantizationAsString()));
case "Telecine":
return Optional.ofNullable(clazz.cast(telecineAsString()));
case "TemporalAdaptiveQuantization":
return Optional.ofNullable(clazz.cast(temporalAdaptiveQuantizationAsString()));
case "TemporalIds":
return Optional.ofNullable(clazz.cast(temporalIdsAsString()));
case "Tiles":
return Optional.ofNullable(clazz.cast(tilesAsString()));
case "UnregisteredSeiTimecode":
return Optional.ofNullable(clazz.cast(unregisteredSeiTimecodeAsString()));
case "WriteMp4PackagingType":
return Optional.ofNullable(clazz.cast(writeMp4PackagingTypeAsString()));
default:
return Optional.empty();
}
}
@Override
public final List> sdkFields() {
return SDK_FIELDS;
}
private static Function