All Downloads are FREE. Search and download functionalities are using the official Maven repository.

io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.scala Maven / Gradle / Ivy

// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3

package io.opentelemetry.proto.metrics.v1

/** ExponentialHistogramDataPoint is a single data point in a timeseries that describes the
  * time-varying values of a ExponentialHistogram of double values. A ExponentialHistogram contains
  * summary statistics for a population of values, it may optionally contain the
  * distribution of those values across a set of buckets.
  *
  * @param attributes
  *   The set of key/value pairs that uniquely identify the timeseries from
  *   where this point belongs. The list may be empty (may contain 0 elements).
  *   Attribute keys MUST be unique (it is not allowed to have more than one
  *   attribute with the same key).
  * @param startTimeUnixNano
  *   StartTimeUnixNano is optional but strongly encouraged, see the
  *   the detailed comments above Metric.
  *  
  *   Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
  *   1970.
  * @param timeUnixNano
  *   TimeUnixNano is required, see the detailed comments above Metric.
  *  
  *   Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
  *   1970.
  * @param count
  *   count is the number of values in the population. Must be
  *   non-negative. This value must be equal to the sum of the "bucket_counts"
  *   values in the positive and negative Buckets plus the "zero_count" field.
  * @param sum
  *   sum of the values in the population. If count is zero then this field
  *   must be zero.
  *  
  *   Note: Sum should only be filled out when measuring non-negative discrete
  *   events, and is assumed to be monotonic over the values of these events.
  *   Negative events *can* be recorded, but sum should not be filled out when
  *   doing so.  This is specifically to enforce compatibility w/ OpenMetrics,
  *   see: https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#histogram
  * @param scale
  *   scale describes the resolution of the histogram.  Boundaries are
  *   located at powers of the base, where:
  *  
  *     base = (2^(2^-scale))
  *  
  *   The histogram bucket identified by `index`, a signed integer,
  *   contains values that are greater than or equal to (base^index) and
  *   less than (base^(index+1)).
  *  
  *   The positive and negative ranges of the histogram are expressed
  *   separately.  Negative values are mapped by their absolute value
  *   into the negative range using the same scale as the positive range.
  *  
  *   scale is not restricted by the protocol, as the permissible
  *   values depend on the range of the data.
  * @param zeroCount
  *   zero_count is the count of values that are either exactly zero or
  *   within the region considered zero by the instrumentation at the
  *   tolerated degree of precision.  This bucket stores values that
  *   cannot be expressed using the standard exponential formula as
  *   well as values that have been rounded to zero.
  *  
  *   Implementations MAY consider the zero bucket to have probability
  *   mass equal to (zero_count / count).
  * @param positive
  *   positive carries the positive range of exponential bucket counts.
  * @param negative
  *   negative carries the negative range of exponential bucket counts.
  * @param flags
  *   Flags that apply to this specific data point.  See DataPointFlags
  *   for the available flags and their meaning.
  * @param exemplars
  *   (Optional) List of exemplars collected from
  *   measurements that were used to form the data point
  * @param min
  *   min is the minimum value over (start_time, end_time].
  * @param max
  *   max is the maximum value over (start_time, end_time].
  */
@SerialVersionUID(0L)
final case class ExponentialHistogramDataPoint(
    attributes: _root_.scala.Seq[io.opentelemetry.proto.common.v1.KeyValue] = _root_.scala.Seq.empty,
    startTimeUnixNano: _root_.scala.Long = 0L,
    timeUnixNano: _root_.scala.Long = 0L,
    count: _root_.scala.Long = 0L,
    sum: _root_.scala.Option[_root_.scala.Double] = _root_.scala.None,
    scale: _root_.scala.Int = 0,
    zeroCount: _root_.scala.Long = 0L,
    positive: _root_.scala.Option[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets] = _root_.scala.None,
    negative: _root_.scala.Option[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets] = _root_.scala.None,
    flags: _root_.scala.Int = 0,
    exemplars: _root_.scala.Seq[io.opentelemetry.proto.metrics.v1.Exemplar] = _root_.scala.Seq.empty,
    min: _root_.scala.Option[_root_.scala.Double] = _root_.scala.None,
    max: _root_.scala.Option[_root_.scala.Double] = _root_.scala.None,
    unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
    ) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[ExponentialHistogramDataPoint] {
    @transient
    private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
    private[this] def __computeSerializedSize(): _root_.scala.Int = {
      var __size = 0
      attributes.foreach { __item =>
        val __value = __item
        __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
      }
      
      {
        val __value = startTimeUnixNano
        if (__value != 0L) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeFixed64Size(2, __value)
        }
      };
      
      {
        val __value = timeUnixNano
        if (__value != 0L) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeFixed64Size(3, __value)
        }
      };
      
      {
        val __value = count
        if (__value != 0L) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeFixed64Size(4, __value)
        }
      };
      if (sum.isDefined) {
        val __value = sum.get
        __size += _root_.com.google.protobuf.CodedOutputStream.computeDoubleSize(5, __value)
      };
      
      {
        val __value = scale
        if (__value != 0) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeSInt32Size(6, __value)
        }
      };
      
      {
        val __value = zeroCount
        if (__value != 0L) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeFixed64Size(7, __value)
        }
      };
      if (positive.isDefined) {
        val __value = positive.get
        __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
      };
      if (negative.isDefined) {
        val __value = negative.get
        __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
      };
      
      {
        val __value = flags
        if (__value != 0) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeUInt32Size(10, __value)
        }
      };
      exemplars.foreach { __item =>
        val __value = __item
        __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
      }
      if (min.isDefined) {
        val __value = min.get
        __size += _root_.com.google.protobuf.CodedOutputStream.computeDoubleSize(12, __value)
      };
      if (max.isDefined) {
        val __value = max.get
        __size += _root_.com.google.protobuf.CodedOutputStream.computeDoubleSize(13, __value)
      };
      __size += unknownFields.serializedSize
      __size
    }
    override def serializedSize: _root_.scala.Int = {
      var __size = __serializedSizeMemoized
      if (__size == 0) {
        __size = __computeSerializedSize() + 1
        __serializedSizeMemoized = __size
      }
      __size - 1
      
    }
    def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
      attributes.foreach { __v =>
        val __m = __v
        _output__.writeTag(1, 2)
        _output__.writeUInt32NoTag(__m.serializedSize)
        __m.writeTo(_output__)
      };
      {
        val __v = startTimeUnixNano
        if (__v != 0L) {
          _output__.writeFixed64(2, __v)
        }
      };
      {
        val __v = timeUnixNano
        if (__v != 0L) {
          _output__.writeFixed64(3, __v)
        }
      };
      {
        val __v = count
        if (__v != 0L) {
          _output__.writeFixed64(4, __v)
        }
      };
      sum.foreach { __v =>
        val __m = __v
        _output__.writeDouble(5, __m)
      };
      {
        val __v = scale
        if (__v != 0) {
          _output__.writeSInt32(6, __v)
        }
      };
      {
        val __v = zeroCount
        if (__v != 0L) {
          _output__.writeFixed64(7, __v)
        }
      };
      positive.foreach { __v =>
        val __m = __v
        _output__.writeTag(8, 2)
        _output__.writeUInt32NoTag(__m.serializedSize)
        __m.writeTo(_output__)
      };
      negative.foreach { __v =>
        val __m = __v
        _output__.writeTag(9, 2)
        _output__.writeUInt32NoTag(__m.serializedSize)
        __m.writeTo(_output__)
      };
      {
        val __v = flags
        if (__v != 0) {
          _output__.writeUInt32(10, __v)
        }
      };
      exemplars.foreach { __v =>
        val __m = __v
        _output__.writeTag(11, 2)
        _output__.writeUInt32NoTag(__m.serializedSize)
        __m.writeTo(_output__)
      };
      min.foreach { __v =>
        val __m = __v
        _output__.writeDouble(12, __m)
      };
      max.foreach { __v =>
        val __m = __v
        _output__.writeDouble(13, __m)
      };
      unknownFields.writeTo(_output__)
    }
    def clearAttributes = copy(attributes = _root_.scala.Seq.empty)
    def addAttributes(__vs: io.opentelemetry.proto.common.v1.KeyValue *): ExponentialHistogramDataPoint = addAllAttributes(__vs)
    def addAllAttributes(__vs: Iterable[io.opentelemetry.proto.common.v1.KeyValue]): ExponentialHistogramDataPoint = copy(attributes = attributes ++ __vs)
    def withAttributes(__v: _root_.scala.Seq[io.opentelemetry.proto.common.v1.KeyValue]): ExponentialHistogramDataPoint = copy(attributes = __v)
    def withStartTimeUnixNano(__v: _root_.scala.Long): ExponentialHistogramDataPoint = copy(startTimeUnixNano = __v)
    def withTimeUnixNano(__v: _root_.scala.Long): ExponentialHistogramDataPoint = copy(timeUnixNano = __v)
    def withCount(__v: _root_.scala.Long): ExponentialHistogramDataPoint = copy(count = __v)
    def getSum: _root_.scala.Double = sum.getOrElse(0.0)
    def clearSum: ExponentialHistogramDataPoint = copy(sum = _root_.scala.None)
    def withSum(__v: _root_.scala.Double): ExponentialHistogramDataPoint = copy(sum = Option(__v))
    def withScale(__v: _root_.scala.Int): ExponentialHistogramDataPoint = copy(scale = __v)
    def withZeroCount(__v: _root_.scala.Long): ExponentialHistogramDataPoint = copy(zeroCount = __v)
    def getPositive: io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets = positive.getOrElse(io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets.defaultInstance)
    def clearPositive: ExponentialHistogramDataPoint = copy(positive = _root_.scala.None)
    def withPositive(__v: io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets): ExponentialHistogramDataPoint = copy(positive = Option(__v))
    def getNegative: io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets = negative.getOrElse(io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets.defaultInstance)
    def clearNegative: ExponentialHistogramDataPoint = copy(negative = _root_.scala.None)
    def withNegative(__v: io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets): ExponentialHistogramDataPoint = copy(negative = Option(__v))
    def withFlags(__v: _root_.scala.Int): ExponentialHistogramDataPoint = copy(flags = __v)
    def clearExemplars = copy(exemplars = _root_.scala.Seq.empty)
    def addExemplars(__vs: io.opentelemetry.proto.metrics.v1.Exemplar *): ExponentialHistogramDataPoint = addAllExemplars(__vs)
    def addAllExemplars(__vs: Iterable[io.opentelemetry.proto.metrics.v1.Exemplar]): ExponentialHistogramDataPoint = copy(exemplars = exemplars ++ __vs)
    def withExemplars(__v: _root_.scala.Seq[io.opentelemetry.proto.metrics.v1.Exemplar]): ExponentialHistogramDataPoint = copy(exemplars = __v)
    def getMin: _root_.scala.Double = min.getOrElse(0.0)
    def clearMin: ExponentialHistogramDataPoint = copy(min = _root_.scala.None)
    def withMin(__v: _root_.scala.Double): ExponentialHistogramDataPoint = copy(min = Option(__v))
    def getMax: _root_.scala.Double = max.getOrElse(0.0)
    def clearMax: ExponentialHistogramDataPoint = copy(max = _root_.scala.None)
    def withMax(__v: _root_.scala.Double): ExponentialHistogramDataPoint = copy(max = Option(__v))
    def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
    def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
    def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
      (__fieldNumber: @_root_.scala.unchecked) match {
        case 1 => attributes
        case 2 => {
          val __t = startTimeUnixNano
          if (__t != 0L) __t else null
        }
        case 3 => {
          val __t = timeUnixNano
          if (__t != 0L) __t else null
        }
        case 4 => {
          val __t = count
          if (__t != 0L) __t else null
        }
        case 5 => sum.orNull
        case 6 => {
          val __t = scale
          if (__t != 0) __t else null
        }
        case 7 => {
          val __t = zeroCount
          if (__t != 0L) __t else null
        }
        case 8 => positive.orNull
        case 9 => negative.orNull
        case 10 => {
          val __t = flags
          if (__t != 0) __t else null
        }
        case 11 => exemplars
        case 12 => min.orNull
        case 13 => max.orNull
      }
    }
    def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
      _root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
      (__field.number: @_root_.scala.unchecked) match {
        case 1 => _root_.scalapb.descriptors.PRepeated(attributes.iterator.map(_.toPMessage).toVector)
        case 2 => _root_.scalapb.descriptors.PLong(startTimeUnixNano)
        case 3 => _root_.scalapb.descriptors.PLong(timeUnixNano)
        case 4 => _root_.scalapb.descriptors.PLong(count)
        case 5 => sum.map(_root_.scalapb.descriptors.PDouble(_)).getOrElse(_root_.scalapb.descriptors.PEmpty)
        case 6 => _root_.scalapb.descriptors.PInt(scale)
        case 7 => _root_.scalapb.descriptors.PLong(zeroCount)
        case 8 => positive.map(_.toPMessage).getOrElse(_root_.scalapb.descriptors.PEmpty)
        case 9 => negative.map(_.toPMessage).getOrElse(_root_.scalapb.descriptors.PEmpty)
        case 10 => _root_.scalapb.descriptors.PInt(flags)
        case 11 => _root_.scalapb.descriptors.PRepeated(exemplars.iterator.map(_.toPMessage).toVector)
        case 12 => min.map(_root_.scalapb.descriptors.PDouble(_)).getOrElse(_root_.scalapb.descriptors.PEmpty)
        case 13 => max.map(_root_.scalapb.descriptors.PDouble(_)).getOrElse(_root_.scalapb.descriptors.PEmpty)
      }
    }
    def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
    def companion: io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.type = io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint
    // @@protoc_insertion_point(GeneratedMessage[opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint])
}

object ExponentialHistogramDataPoint extends scalapb.GeneratedMessageCompanion[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint] {
  implicit def messageCompanion: scalapb.GeneratedMessageCompanion[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint] = this
  def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint = {
    val __attributes: _root_.scala.collection.immutable.VectorBuilder[io.opentelemetry.proto.common.v1.KeyValue] = new _root_.scala.collection.immutable.VectorBuilder[io.opentelemetry.proto.common.v1.KeyValue]
    var __startTimeUnixNano: _root_.scala.Long = 0L
    var __timeUnixNano: _root_.scala.Long = 0L
    var __count: _root_.scala.Long = 0L
    var __sum: _root_.scala.Option[_root_.scala.Double] = _root_.scala.None
    var __scale: _root_.scala.Int = 0
    var __zeroCount: _root_.scala.Long = 0L
    var __positive: _root_.scala.Option[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets] = _root_.scala.None
    var __negative: _root_.scala.Option[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets] = _root_.scala.None
    var __flags: _root_.scala.Int = 0
    val __exemplars: _root_.scala.collection.immutable.VectorBuilder[io.opentelemetry.proto.metrics.v1.Exemplar] = new _root_.scala.collection.immutable.VectorBuilder[io.opentelemetry.proto.metrics.v1.Exemplar]
    var __min: _root_.scala.Option[_root_.scala.Double] = _root_.scala.None
    var __max: _root_.scala.Option[_root_.scala.Double] = _root_.scala.None
    var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
    var _done__ = false
    while (!_done__) {
      val _tag__ = _input__.readTag()
      _tag__ match {
        case 0 => _done__ = true
        case 10 =>
          __attributes += _root_.scalapb.LiteParser.readMessage[io.opentelemetry.proto.common.v1.KeyValue](_input__)
        case 17 =>
          __startTimeUnixNano = _input__.readFixed64()
        case 25 =>
          __timeUnixNano = _input__.readFixed64()
        case 33 =>
          __count = _input__.readFixed64()
        case 41 =>
          __sum = Option(_input__.readDouble())
        case 48 =>
          __scale = _input__.readSInt32()
        case 57 =>
          __zeroCount = _input__.readFixed64()
        case 66 =>
          __positive = Option(__positive.fold(_root_.scalapb.LiteParser.readMessage[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets](_input__))(_root_.scalapb.LiteParser.readMessage(_input__, _)))
        case 74 =>
          __negative = Option(__negative.fold(_root_.scalapb.LiteParser.readMessage[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets](_input__))(_root_.scalapb.LiteParser.readMessage(_input__, _)))
        case 80 =>
          __flags = _input__.readUInt32()
        case 90 =>
          __exemplars += _root_.scalapb.LiteParser.readMessage[io.opentelemetry.proto.metrics.v1.Exemplar](_input__)
        case 97 =>
          __min = Option(_input__.readDouble())
        case 105 =>
          __max = Option(_input__.readDouble())
        case tag =>
          if (_unknownFields__ == null) {
            _unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
          }
          _unknownFields__.parseField(tag, _input__)
      }
    }
    io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint(
        attributes = __attributes.result(),
        startTimeUnixNano = __startTimeUnixNano,
        timeUnixNano = __timeUnixNano,
        count = __count,
        sum = __sum,
        scale = __scale,
        zeroCount = __zeroCount,
        positive = __positive,
        negative = __negative,
        flags = __flags,
        exemplars = __exemplars.result(),
        min = __min,
        max = __max,
        unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
    )
  }
  implicit def messageReads: _root_.scalapb.descriptors.Reads[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint] = _root_.scalapb.descriptors.Reads{
    case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
      _root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
      io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint(
        attributes = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Seq[io.opentelemetry.proto.common.v1.KeyValue]]).getOrElse(_root_.scala.Seq.empty),
        startTimeUnixNano = __fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).map(_.as[_root_.scala.Long]).getOrElse(0L),
        timeUnixNano = __fieldsMap.get(scalaDescriptor.findFieldByNumber(3).get).map(_.as[_root_.scala.Long]).getOrElse(0L),
        count = __fieldsMap.get(scalaDescriptor.findFieldByNumber(4).get).map(_.as[_root_.scala.Long]).getOrElse(0L),
        sum = __fieldsMap.get(scalaDescriptor.findFieldByNumber(5).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Double]]),
        scale = __fieldsMap.get(scalaDescriptor.findFieldByNumber(6).get).map(_.as[_root_.scala.Int]).getOrElse(0),
        zeroCount = __fieldsMap.get(scalaDescriptor.findFieldByNumber(7).get).map(_.as[_root_.scala.Long]).getOrElse(0L),
        positive = __fieldsMap.get(scalaDescriptor.findFieldByNumber(8).get).flatMap(_.as[_root_.scala.Option[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets]]),
        negative = __fieldsMap.get(scalaDescriptor.findFieldByNumber(9).get).flatMap(_.as[_root_.scala.Option[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets]]),
        flags = __fieldsMap.get(scalaDescriptor.findFieldByNumber(10).get).map(_.as[_root_.scala.Int]).getOrElse(0),
        exemplars = __fieldsMap.get(scalaDescriptor.findFieldByNumber(11).get).map(_.as[_root_.scala.Seq[io.opentelemetry.proto.metrics.v1.Exemplar]]).getOrElse(_root_.scala.Seq.empty),
        min = __fieldsMap.get(scalaDescriptor.findFieldByNumber(12).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Double]]),
        max = __fieldsMap.get(scalaDescriptor.findFieldByNumber(13).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Double]])
      )
    case _ => throw new RuntimeException("Expected PMessage")
  }
  def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = MetricsProto.javaDescriptor.getMessageTypes().get(12)
  def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = MetricsProto.scalaDescriptor.messages(12)
  def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
    var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
    (__number: @_root_.scala.unchecked) match {
      case 1 => __out = io.opentelemetry.proto.common.v1.KeyValue
      case 8 => __out = io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets
      case 9 => __out = io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets
      case 11 => __out = io.opentelemetry.proto.metrics.v1.Exemplar
    }
    __out
  }
  lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] =
    Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]](
      _root_.io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets
    )
  def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
  lazy val defaultInstance = io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint(
    attributes = _root_.scala.Seq.empty,
    startTimeUnixNano = 0L,
    timeUnixNano = 0L,
    count = 0L,
    sum = _root_.scala.None,
    scale = 0,
    zeroCount = 0L,
    positive = _root_.scala.None,
    negative = _root_.scala.None,
    flags = 0,
    exemplars = _root_.scala.Seq.empty,
    min = _root_.scala.None,
    max = _root_.scala.None
  )
  /** Buckets are a set of bucket counts, encoded in a contiguous array
    * of counts.
    *
    * @param offset
    *   Offset is the bucket index of the first entry in the bucket_counts array.
    *   
    *   Note: This uses a varint encoding as a simple form of compression.
    * @param bucketCounts
    *   Count is an array of counts, where count[i] carries the count
    *   of the bucket at index (offset+i).  count[i] is the count of
    *   values greater than or equal to base^(offset+i) and less than
    *   base^(offset+i+1).
    *  
    *   Note: By contrast, the explicit HistogramDataPoint uses
    *   fixed64.  This field is expected to have many buckets,
    *   especially zeros, so uint64 has been selected to ensure
    *   varint encoding.
    */
  @SerialVersionUID(0L)
  final case class Buckets(
      offset: _root_.scala.Int = 0,
      bucketCounts: _root_.scala.Seq[_root_.scala.Long] = _root_.scala.Seq.empty,
      unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
      ) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[Buckets] {
      private[this] def bucketCountsSerializedSize = {
        if (__bucketCountsSerializedSizeField == 0) __bucketCountsSerializedSizeField = {
          var __s: _root_.scala.Int = 0
          bucketCounts.foreach(__i => __s += _root_.com.google.protobuf.CodedOutputStream.computeUInt64SizeNoTag(__i))
          __s
        }
        __bucketCountsSerializedSizeField
      }
      @transient private[this] var __bucketCountsSerializedSizeField: _root_.scala.Int = 0
      @transient
      private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
      private[this] def __computeSerializedSize(): _root_.scala.Int = {
        var __size = 0
        
        {
          val __value = offset
          if (__value != 0) {
            __size += _root_.com.google.protobuf.CodedOutputStream.computeSInt32Size(1, __value)
          }
        };
        if (bucketCounts.nonEmpty) {
          val __localsize = bucketCountsSerializedSize
          __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__localsize) + __localsize
        }
        __size += unknownFields.serializedSize
        __size
      }
      override def serializedSize: _root_.scala.Int = {
        var __size = __serializedSizeMemoized
        if (__size == 0) {
          __size = __computeSerializedSize() + 1
          __serializedSizeMemoized = __size
        }
        __size - 1
        
      }
      def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
        {
          val __v = offset
          if (__v != 0) {
            _output__.writeSInt32(1, __v)
          }
        };
        if (bucketCounts.nonEmpty) {
          _output__.writeTag(2, 2)
          _output__.writeUInt32NoTag(bucketCountsSerializedSize)
          bucketCounts.foreach(_output__.writeUInt64NoTag)
        };
        unknownFields.writeTo(_output__)
      }
      def withOffset(__v: _root_.scala.Int): Buckets = copy(offset = __v)
      def clearBucketCounts = copy(bucketCounts = _root_.scala.Seq.empty)
      def addBucketCounts(__vs: _root_.scala.Long *): Buckets = addAllBucketCounts(__vs)
      def addAllBucketCounts(__vs: Iterable[_root_.scala.Long]): Buckets = copy(bucketCounts = bucketCounts ++ __vs)
      def withBucketCounts(__v: _root_.scala.Seq[_root_.scala.Long]): Buckets = copy(bucketCounts = __v)
      def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
      def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
      def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
        (__fieldNumber: @_root_.scala.unchecked) match {
          case 1 => {
            val __t = offset
            if (__t != 0) __t else null
          }
          case 2 => bucketCounts
        }
      }
      def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
        _root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
        (__field.number: @_root_.scala.unchecked) match {
          case 1 => _root_.scalapb.descriptors.PInt(offset)
          case 2 => _root_.scalapb.descriptors.PRepeated(bucketCounts.iterator.map(_root_.scalapb.descriptors.PLong(_)).toVector)
        }
      }
      def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
      def companion: io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets.type = io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets
      // @@protoc_insertion_point(GeneratedMessage[opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets])
  }
  
  object Buckets extends scalapb.GeneratedMessageCompanion[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets] {
    implicit def messageCompanion: scalapb.GeneratedMessageCompanion[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets] = this
    def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets = {
      var __offset: _root_.scala.Int = 0
      val __bucketCounts: _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Long] = new _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Long]
      var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
      var _done__ = false
      while (!_done__) {
        val _tag__ = _input__.readTag()
        _tag__ match {
          case 0 => _done__ = true
          case 8 =>
            __offset = _input__.readSInt32()
          case 16 =>
            __bucketCounts += _input__.readUInt64()
          case 18 => {
            val length = _input__.readRawVarint32()
            val oldLimit = _input__.pushLimit(length)
            while (_input__.getBytesUntilLimit > 0) {
              __bucketCounts += _input__.readUInt64()
            }
            _input__.popLimit(oldLimit)
          }
          case tag =>
            if (_unknownFields__ == null) {
              _unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
            }
            _unknownFields__.parseField(tag, _input__)
        }
      }
      io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets(
          offset = __offset,
          bucketCounts = __bucketCounts.result(),
          unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
      )
    }
    implicit def messageReads: _root_.scalapb.descriptors.Reads[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets] = _root_.scalapb.descriptors.Reads{
      case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
        _root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
        io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets(
          offset = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Int]).getOrElse(0),
          bucketCounts = __fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).map(_.as[_root_.scala.Seq[_root_.scala.Long]]).getOrElse(_root_.scala.Seq.empty)
        )
      case _ => throw new RuntimeException("Expected PMessage")
    }
    def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.javaDescriptor.getNestedTypes().get(0)
    def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.scalaDescriptor.nestedMessages(0)
    def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = throw new MatchError(__number)
    lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
    def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
    lazy val defaultInstance = io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets(
      offset = 0,
      bucketCounts = _root_.scala.Seq.empty
    )
    implicit class BucketsLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets](_l) {
      def offset: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Int] = field(_.offset)((c_, f_) => c_.copy(offset = f_))
      def bucketCounts: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[_root_.scala.Long]] = field(_.bucketCounts)((c_, f_) => c_.copy(bucketCounts = f_))
    }
    final val OFFSET_FIELD_NUMBER = 1
    final val BUCKET_COUNTS_FIELD_NUMBER = 2
    def of(
      offset: _root_.scala.Int,
      bucketCounts: _root_.scala.Seq[_root_.scala.Long]
    ): _root_.io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets = _root_.io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets(
      offset,
      bucketCounts
    )
    // @@protoc_insertion_point(GeneratedMessageCompanion[opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets])
  }
  
  implicit class ExponentialHistogramDataPointLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint](_l) {
    def attributes: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[io.opentelemetry.proto.common.v1.KeyValue]] = field(_.attributes)((c_, f_) => c_.copy(attributes = f_))
    def startTimeUnixNano: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Long] = field(_.startTimeUnixNano)((c_, f_) => c_.copy(startTimeUnixNano = f_))
    def timeUnixNano: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Long] = field(_.timeUnixNano)((c_, f_) => c_.copy(timeUnixNano = f_))
    def count: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Long] = field(_.count)((c_, f_) => c_.copy(count = f_))
    def sum: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Double] = field(_.getSum)((c_, f_) => c_.copy(sum = Option(f_)))
    def optionalSum: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Double]] = field(_.sum)((c_, f_) => c_.copy(sum = f_))
    def scale: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Int] = field(_.scale)((c_, f_) => c_.copy(scale = f_))
    def zeroCount: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Long] = field(_.zeroCount)((c_, f_) => c_.copy(zeroCount = f_))
    def positive: _root_.scalapb.lenses.Lens[UpperPB, io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets] = field(_.getPositive)((c_, f_) => c_.copy(positive = Option(f_)))
    def optionalPositive: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets]] = field(_.positive)((c_, f_) => c_.copy(positive = f_))
    def negative: _root_.scalapb.lenses.Lens[UpperPB, io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets] = field(_.getNegative)((c_, f_) => c_.copy(negative = Option(f_)))
    def optionalNegative: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets]] = field(_.negative)((c_, f_) => c_.copy(negative = f_))
    def flags: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Int] = field(_.flags)((c_, f_) => c_.copy(flags = f_))
    def exemplars: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[io.opentelemetry.proto.metrics.v1.Exemplar]] = field(_.exemplars)((c_, f_) => c_.copy(exemplars = f_))
    def min: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Double] = field(_.getMin)((c_, f_) => c_.copy(min = Option(f_)))
    def optionalMin: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Double]] = field(_.min)((c_, f_) => c_.copy(min = f_))
    def max: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Double] = field(_.getMax)((c_, f_) => c_.copy(max = Option(f_)))
    def optionalMax: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Double]] = field(_.max)((c_, f_) => c_.copy(max = f_))
  }
  final val ATTRIBUTES_FIELD_NUMBER = 1
  final val START_TIME_UNIX_NANO_FIELD_NUMBER = 2
  final val TIME_UNIX_NANO_FIELD_NUMBER = 3
  final val COUNT_FIELD_NUMBER = 4
  final val SUM_FIELD_NUMBER = 5
  final val SCALE_FIELD_NUMBER = 6
  final val ZERO_COUNT_FIELD_NUMBER = 7
  final val POSITIVE_FIELD_NUMBER = 8
  final val NEGATIVE_FIELD_NUMBER = 9
  final val FLAGS_FIELD_NUMBER = 10
  final val EXEMPLARS_FIELD_NUMBER = 11
  final val MIN_FIELD_NUMBER = 12
  final val MAX_FIELD_NUMBER = 13
  def of(
    attributes: _root_.scala.Seq[io.opentelemetry.proto.common.v1.KeyValue],
    startTimeUnixNano: _root_.scala.Long,
    timeUnixNano: _root_.scala.Long,
    count: _root_.scala.Long,
    sum: _root_.scala.Option[_root_.scala.Double],
    scale: _root_.scala.Int,
    zeroCount: _root_.scala.Long,
    positive: _root_.scala.Option[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets],
    negative: _root_.scala.Option[io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets],
    flags: _root_.scala.Int,
    exemplars: _root_.scala.Seq[io.opentelemetry.proto.metrics.v1.Exemplar],
    min: _root_.scala.Option[_root_.scala.Double],
    max: _root_.scala.Option[_root_.scala.Double]
  ): _root_.io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint = _root_.io.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint(
    attributes,
    startTimeUnixNano,
    timeUnixNano,
    count,
    sum,
    scale,
    zeroCount,
    positive,
    negative,
    flags,
    exemplars,
    min,
    max
  )
  // @@protoc_insertion_point(GeneratedMessageCompanion[opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint])
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy