All Downloads are FREE. Search and download functionalities are using the official Maven repository.

io.opentelemetry.proto.metrics.v1.ExponentialHistogram.scala Maven / Gradle / Ivy

There is a newer version: 1.23.0-dev-f04150-1
Show newest version
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3

package io.opentelemetry.proto.metrics.v1

/** ExponentialHistogram represents the type of a metric that is calculated by aggregating
  * as a ExponentialHistogram of all reported double measurements over a time interval.
  *
  * @param aggregationTemporality
  *   aggregation_temporality describes if the aggregator reports delta changes
  *   since last report time, or cumulative changes since a fixed start time.
  */
@SerialVersionUID(0L)
final case class ExponentialHistogram(
    dataPoints: _root_.scala.Seq[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint] = _root_.scala.Seq.empty,
    aggregationTemporality: io.opentelemetry.proto.metrics.v1.AggregationTemporality = io.opentelemetry.proto.metrics.v1.AggregationTemporality.AGGREGATION_TEMPORALITY_UNSPECIFIED,
    unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
    ) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[ExponentialHistogram] {
    @transient
    private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
    private[this] def __computeSerializedSize(): _root_.scala.Int = {
      var __size = 0
      dataPoints.foreach { __item =>
        val __value = __item
        __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
      }
      
      {
        val __value = aggregationTemporality.value
        if (__value != 0) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeEnumSize(2, __value)
        }
      };
      __size += unknownFields.serializedSize
      __size
    }
    override def serializedSize: _root_.scala.Int = {
      var __size = __serializedSizeMemoized
      if (__size == 0) {
        __size = __computeSerializedSize() + 1
        __serializedSizeMemoized = __size
      }
      __size - 1
      
    }
    def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
      dataPoints.foreach { __v =>
        val __m = __v
        _output__.writeTag(1, 2)
        _output__.writeUInt32NoTag(__m.serializedSize)
        __m.writeTo(_output__)
      };
      {
        val __v = aggregationTemporality.value
        if (__v != 0) {
          _output__.writeEnum(2, __v)
        }
      };
      unknownFields.writeTo(_output__)
    }
    def clearDataPoints = copy(dataPoints = _root_.scala.Seq.empty)
    def addDataPoints(__vs: io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint *): ExponentialHistogram = addAllDataPoints(__vs)
    def addAllDataPoints(__vs: Iterable[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint]): ExponentialHistogram = copy(dataPoints = dataPoints ++ __vs)
    def withDataPoints(__v: _root_.scala.Seq[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint]): ExponentialHistogram = copy(dataPoints = __v)
    def withAggregationTemporality(__v: io.opentelemetry.proto.metrics.v1.AggregationTemporality): ExponentialHistogram = copy(aggregationTemporality = __v)
    def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
    def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
    def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
      (__fieldNumber: @_root_.scala.unchecked) match {
        case 1 => dataPoints
        case 2 => {
          val __t = aggregationTemporality.javaValueDescriptor
          if (__t.getNumber() != 0) __t else null
        }
      }
    }
    def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
      _root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
      (__field.number: @_root_.scala.unchecked) match {
        case 1 => _root_.scalapb.descriptors.PRepeated(dataPoints.iterator.map(_.toPMessage).toVector)
        case 2 => _root_.scalapb.descriptors.PEnum(aggregationTemporality.scalaValueDescriptor)
      }
    }
    def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
    def companion: io.opentelemetry.proto.metrics.v1.ExponentialHistogram.type = io.opentelemetry.proto.metrics.v1.ExponentialHistogram
    // @@protoc_insertion_point(GeneratedMessage[opentelemetry.proto.metrics.v1.ExponentialHistogram])
}

object ExponentialHistogram extends scalapb.GeneratedMessageCompanion[io.opentelemetry.proto.metrics.v1.ExponentialHistogram] {
  implicit def messageCompanion: scalapb.GeneratedMessageCompanion[io.opentelemetry.proto.metrics.v1.ExponentialHistogram] = this
  def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): io.opentelemetry.proto.metrics.v1.ExponentialHistogram = {
    val __dataPoints: _root_.scala.collection.immutable.VectorBuilder[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint] = new _root_.scala.collection.immutable.VectorBuilder[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint]
    var __aggregationTemporality: io.opentelemetry.proto.metrics.v1.AggregationTemporality = io.opentelemetry.proto.metrics.v1.AggregationTemporality.AGGREGATION_TEMPORALITY_UNSPECIFIED
    var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
    var _done__ = false
    while (!_done__) {
      val _tag__ = _input__.readTag()
      _tag__ match {
        case 0 => _done__ = true
        case 10 =>
          __dataPoints += _root_.scalapb.LiteParser.readMessage[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint](_input__)
        case 16 =>
          __aggregationTemporality = io.opentelemetry.proto.metrics.v1.AggregationTemporality.fromValue(_input__.readEnum())
        case tag =>
          if (_unknownFields__ == null) {
            _unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
          }
          _unknownFields__.parseField(tag, _input__)
      }
    }
    io.opentelemetry.proto.metrics.v1.ExponentialHistogram(
        dataPoints = __dataPoints.result(),
        aggregationTemporality = __aggregationTemporality,
        unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
    )
  }
  implicit def messageReads: _root_.scalapb.descriptors.Reads[io.opentelemetry.proto.metrics.v1.ExponentialHistogram] = _root_.scalapb.descriptors.Reads{
    case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
      _root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
      io.opentelemetry.proto.metrics.v1.ExponentialHistogram(
        dataPoints = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Seq[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint]]).getOrElse(_root_.scala.Seq.empty),
        aggregationTemporality = io.opentelemetry.proto.metrics.v1.AggregationTemporality.fromValue(__fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).map(_.as[_root_.scalapb.descriptors.EnumValueDescriptor]).getOrElse(io.opentelemetry.proto.metrics.v1.AggregationTemporality.AGGREGATION_TEMPORALITY_UNSPECIFIED.scalaValueDescriptor).number)
      )
    case _ => throw new RuntimeException("Expected PMessage")
  }
  def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = MetricsProto.javaDescriptor.getMessageTypes().get(8)
  def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = MetricsProto.scalaDescriptor.messages(8)
  def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
    var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
    (__number: @_root_.scala.unchecked) match {
      case 1 => __out = io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint
    }
    __out
  }
  lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
  def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = {
    (__fieldNumber: @_root_.scala.unchecked) match {
      case 2 => io.opentelemetry.proto.metrics.v1.AggregationTemporality
    }
  }
  lazy val defaultInstance = io.opentelemetry.proto.metrics.v1.ExponentialHistogram(
    dataPoints = _root_.scala.Seq.empty,
    aggregationTemporality = io.opentelemetry.proto.metrics.v1.AggregationTemporality.AGGREGATION_TEMPORALITY_UNSPECIFIED
  )
  implicit class ExponentialHistogramLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, io.opentelemetry.proto.metrics.v1.ExponentialHistogram]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, io.opentelemetry.proto.metrics.v1.ExponentialHistogram](_l) {
    def dataPoints: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint]] = field(_.dataPoints)((c_, f_) => c_.copy(dataPoints = f_))
    def aggregationTemporality: _root_.scalapb.lenses.Lens[UpperPB, io.opentelemetry.proto.metrics.v1.AggregationTemporality] = field(_.aggregationTemporality)((c_, f_) => c_.copy(aggregationTemporality = f_))
  }
  final val DATA_POINTS_FIELD_NUMBER = 1
  final val AGGREGATION_TEMPORALITY_FIELD_NUMBER = 2
  def of(
    dataPoints: _root_.scala.Seq[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint],
    aggregationTemporality: io.opentelemetry.proto.metrics.v1.AggregationTemporality
  ): _root_.io.opentelemetry.proto.metrics.v1.ExponentialHistogram = _root_.io.opentelemetry.proto.metrics.v1.ExponentialHistogram(
    dataPoints,
    aggregationTemporality
  )
  // @@protoc_insertion_point(GeneratedMessageCompanion[opentelemetry.proto.metrics.v1.ExponentialHistogram])
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy