All Downloads are FREE. Search and download functionalities are using the official Maven repository.

io.opentelemetry.proto.metrics.v1.HistogramDataPoint.scala Maven / Gradle / Ivy

There is a newer version: 1.23.0-dev-f04150-1
Show newest version
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3

package io.opentelemetry.proto.metrics.v1

/** HistogramDataPoint is a single data point in a timeseries that describes the
  * time-varying values of a Histogram. A Histogram contains summary statistics
  * for a population of values, it may optionally contain the distribution of
  * those values across a set of buckets.
  *
  * If the histogram contains the distribution of values, then both
  * "explicit_bounds" and "bucket counts" fields must be defined.
  * If the histogram does not contain the distribution of values, then both
  * "explicit_bounds" and "bucket_counts" must be omitted and only "count" and
  * "sum" are known.
  *
  * @param attributes
  *   The set of key/value pairs that uniquely identify the timeseries from
  *   where this point belongs. The list may be empty (may contain 0 elements).
  *   Attribute keys MUST be unique (it is not allowed to have more than one
  *   attribute with the same key).
  * @param startTimeUnixNano
  *   StartTimeUnixNano is optional but strongly encouraged, see the
  *   the detailed comments above Metric.
  *  
  *   Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
  *   1970.
  * @param timeUnixNano
  *   TimeUnixNano is required, see the detailed comments above Metric.
  *  
  *   Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
  *   1970.
  * @param count
  *   count is the number of values in the population. Must be non-negative. This
  *   value must be equal to the sum of the "count" fields in buckets if a
  *   histogram is provided.
  * @param sum
  *   sum of the values in the population. If count is zero then this field
  *   must be zero.
  *  
  *   Note: Sum should only be filled out when measuring non-negative discrete
  *   events, and is assumed to be monotonic over the values of these events.
  *   Negative events *can* be recorded, but sum should not be filled out when
  *   doing so.  This is specifically to enforce compatibility w/ OpenMetrics,
  *   see: https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#histogram
  * @param bucketCounts
  *   bucket_counts is an optional field contains the count values of histogram
  *   for each bucket.
  *  
  *   The sum of the bucket_counts must equal the value in the count field.
  *  
  *   The number of elements in bucket_counts array must be by one greater than
  *   the number of elements in explicit_bounds array.
  * @param explicitBounds
  *   explicit_bounds specifies buckets with explicitly defined bounds for values.
  *  
  *   The boundaries for bucket at index i are:
  *  
  *   (-infinity, explicit_bounds[i]] for i == 0
  *   (explicit_bounds[i-1], explicit_bounds[i]] for 0 < i < size(explicit_bounds)
  *   (explicit_bounds[i-1], +infinity) for i == size(explicit_bounds)
  *  
  *   The values in the explicit_bounds array must be strictly increasing.
  *  
  *   Histogram buckets are inclusive of their upper boundary, except the last
  *   bucket where the boundary is at infinity. This format is intentionally
  *   compatible with the OpenMetrics histogram definition.
  * @param exemplars
  *   (Optional) List of exemplars collected from
  *   measurements that were used to form the data point
  * @param flags
  *   Flags that apply to this specific data point.  See DataPointFlags
  *   for the available flags and their meaning.
  * @param min
  *   min is the minimum value over (start_time, end_time].
  * @param max
  *   max is the maximum value over (start_time, end_time].
  */
@SerialVersionUID(0L)
final case class HistogramDataPoint(
    attributes: _root_.scala.Seq[io.opentelemetry.proto.common.v1.KeyValue] = _root_.scala.Seq.empty,
    startTimeUnixNano: _root_.scala.Long = 0L,
    timeUnixNano: _root_.scala.Long = 0L,
    count: _root_.scala.Long = 0L,
    sum: _root_.scala.Option[_root_.scala.Double] = _root_.scala.None,
    bucketCounts: _root_.scala.Seq[_root_.scala.Long] = _root_.scala.Seq.empty,
    explicitBounds: _root_.scala.Seq[_root_.scala.Double] = _root_.scala.Seq.empty,
    exemplars: _root_.scala.Seq[io.opentelemetry.proto.metrics.v1.Exemplar] = _root_.scala.Seq.empty,
    flags: _root_.scala.Int = 0,
    min: _root_.scala.Option[_root_.scala.Double] = _root_.scala.None,
    max: _root_.scala.Option[_root_.scala.Double] = _root_.scala.None,
    unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
    ) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[HistogramDataPoint] {
    private[this] def bucketCountsSerializedSize = {
      8 * bucketCounts.size
    }
    private[this] def explicitBoundsSerializedSize = {
      8 * explicitBounds.size
    }
    @transient
    private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
    private[this] def __computeSerializedSize(): _root_.scala.Int = {
      var __size = 0
      attributes.foreach { __item =>
        val __value = __item
        __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
      }
      
      {
        val __value = startTimeUnixNano
        if (__value != 0L) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeFixed64Size(2, __value)
        }
      };
      
      {
        val __value = timeUnixNano
        if (__value != 0L) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeFixed64Size(3, __value)
        }
      };
      
      {
        val __value = count
        if (__value != 0L) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeFixed64Size(4, __value)
        }
      };
      if (sum.isDefined) {
        val __value = sum.get
        __size += _root_.com.google.protobuf.CodedOutputStream.computeDoubleSize(5, __value)
      };
      if (bucketCounts.nonEmpty) {
        val __localsize = bucketCountsSerializedSize
        __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__localsize) + __localsize
      }
      if (explicitBounds.nonEmpty) {
        val __localsize = explicitBoundsSerializedSize
        __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__localsize) + __localsize
      }
      exemplars.foreach { __item =>
        val __value = __item
        __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
      }
      
      {
        val __value = flags
        if (__value != 0) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeUInt32Size(10, __value)
        }
      };
      if (min.isDefined) {
        val __value = min.get
        __size += _root_.com.google.protobuf.CodedOutputStream.computeDoubleSize(11, __value)
      };
      if (max.isDefined) {
        val __value = max.get
        __size += _root_.com.google.protobuf.CodedOutputStream.computeDoubleSize(12, __value)
      };
      __size += unknownFields.serializedSize
      __size
    }
    override def serializedSize: _root_.scala.Int = {
      var __size = __serializedSizeMemoized
      if (__size == 0) {
        __size = __computeSerializedSize() + 1
        __serializedSizeMemoized = __size
      }
      __size - 1
      
    }
    def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
      {
        val __v = startTimeUnixNano
        if (__v != 0L) {
          _output__.writeFixed64(2, __v)
        }
      };
      {
        val __v = timeUnixNano
        if (__v != 0L) {
          _output__.writeFixed64(3, __v)
        }
      };
      {
        val __v = count
        if (__v != 0L) {
          _output__.writeFixed64(4, __v)
        }
      };
      sum.foreach { __v =>
        val __m = __v
        _output__.writeDouble(5, __m)
      };
      if (bucketCounts.nonEmpty) {
        _output__.writeTag(6, 2)
        _output__.writeUInt32NoTag(bucketCountsSerializedSize)
        bucketCounts.foreach(_output__.writeFixed64NoTag)
      };
      if (explicitBounds.nonEmpty) {
        _output__.writeTag(7, 2)
        _output__.writeUInt32NoTag(explicitBoundsSerializedSize)
        explicitBounds.foreach(_output__.writeDoubleNoTag)
      };
      exemplars.foreach { __v =>
        val __m = __v
        _output__.writeTag(8, 2)
        _output__.writeUInt32NoTag(__m.serializedSize)
        __m.writeTo(_output__)
      };
      attributes.foreach { __v =>
        val __m = __v
        _output__.writeTag(9, 2)
        _output__.writeUInt32NoTag(__m.serializedSize)
        __m.writeTo(_output__)
      };
      {
        val __v = flags
        if (__v != 0) {
          _output__.writeUInt32(10, __v)
        }
      };
      min.foreach { __v =>
        val __m = __v
        _output__.writeDouble(11, __m)
      };
      max.foreach { __v =>
        val __m = __v
        _output__.writeDouble(12, __m)
      };
      unknownFields.writeTo(_output__)
    }
    def clearAttributes = copy(attributes = _root_.scala.Seq.empty)
    def addAttributes(__vs: io.opentelemetry.proto.common.v1.KeyValue *): HistogramDataPoint = addAllAttributes(__vs)
    def addAllAttributes(__vs: Iterable[io.opentelemetry.proto.common.v1.KeyValue]): HistogramDataPoint = copy(attributes = attributes ++ __vs)
    def withAttributes(__v: _root_.scala.Seq[io.opentelemetry.proto.common.v1.KeyValue]): HistogramDataPoint = copy(attributes = __v)
    def withStartTimeUnixNano(__v: _root_.scala.Long): HistogramDataPoint = copy(startTimeUnixNano = __v)
    def withTimeUnixNano(__v: _root_.scala.Long): HistogramDataPoint = copy(timeUnixNano = __v)
    def withCount(__v: _root_.scala.Long): HistogramDataPoint = copy(count = __v)
    def getSum: _root_.scala.Double = sum.getOrElse(0.0)
    def clearSum: HistogramDataPoint = copy(sum = _root_.scala.None)
    def withSum(__v: _root_.scala.Double): HistogramDataPoint = copy(sum = Option(__v))
    def clearBucketCounts = copy(bucketCounts = _root_.scala.Seq.empty)
    def addBucketCounts(__vs: _root_.scala.Long *): HistogramDataPoint = addAllBucketCounts(__vs)
    def addAllBucketCounts(__vs: Iterable[_root_.scala.Long]): HistogramDataPoint = copy(bucketCounts = bucketCounts ++ __vs)
    def withBucketCounts(__v: _root_.scala.Seq[_root_.scala.Long]): HistogramDataPoint = copy(bucketCounts = __v)
    def clearExplicitBounds = copy(explicitBounds = _root_.scala.Seq.empty)
    def addExplicitBounds(__vs: _root_.scala.Double *): HistogramDataPoint = addAllExplicitBounds(__vs)
    def addAllExplicitBounds(__vs: Iterable[_root_.scala.Double]): HistogramDataPoint = copy(explicitBounds = explicitBounds ++ __vs)
    def withExplicitBounds(__v: _root_.scala.Seq[_root_.scala.Double]): HistogramDataPoint = copy(explicitBounds = __v)
    def clearExemplars = copy(exemplars = _root_.scala.Seq.empty)
    def addExemplars(__vs: io.opentelemetry.proto.metrics.v1.Exemplar *): HistogramDataPoint = addAllExemplars(__vs)
    def addAllExemplars(__vs: Iterable[io.opentelemetry.proto.metrics.v1.Exemplar]): HistogramDataPoint = copy(exemplars = exemplars ++ __vs)
    def withExemplars(__v: _root_.scala.Seq[io.opentelemetry.proto.metrics.v1.Exemplar]): HistogramDataPoint = copy(exemplars = __v)
    def withFlags(__v: _root_.scala.Int): HistogramDataPoint = copy(flags = __v)
    def getMin: _root_.scala.Double = min.getOrElse(0.0)
    def clearMin: HistogramDataPoint = copy(min = _root_.scala.None)
    def withMin(__v: _root_.scala.Double): HistogramDataPoint = copy(min = Option(__v))
    def getMax: _root_.scala.Double = max.getOrElse(0.0)
    def clearMax: HistogramDataPoint = copy(max = _root_.scala.None)
    def withMax(__v: _root_.scala.Double): HistogramDataPoint = copy(max = Option(__v))
    def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
    def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
    def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
      (__fieldNumber: @_root_.scala.unchecked) match {
        case 9 => attributes
        case 2 => {
          val __t = startTimeUnixNano
          if (__t != 0L) __t else null
        }
        case 3 => {
          val __t = timeUnixNano
          if (__t != 0L) __t else null
        }
        case 4 => {
          val __t = count
          if (__t != 0L) __t else null
        }
        case 5 => sum.orNull
        case 6 => bucketCounts
        case 7 => explicitBounds
        case 8 => exemplars
        case 10 => {
          val __t = flags
          if (__t != 0) __t else null
        }
        case 11 => min.orNull
        case 12 => max.orNull
      }
    }
    def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
      _root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
      (__field.number: @_root_.scala.unchecked) match {
        case 9 => _root_.scalapb.descriptors.PRepeated(attributes.iterator.map(_.toPMessage).toVector)
        case 2 => _root_.scalapb.descriptors.PLong(startTimeUnixNano)
        case 3 => _root_.scalapb.descriptors.PLong(timeUnixNano)
        case 4 => _root_.scalapb.descriptors.PLong(count)
        case 5 => sum.map(_root_.scalapb.descriptors.PDouble(_)).getOrElse(_root_.scalapb.descriptors.PEmpty)
        case 6 => _root_.scalapb.descriptors.PRepeated(bucketCounts.iterator.map(_root_.scalapb.descriptors.PLong(_)).toVector)
        case 7 => _root_.scalapb.descriptors.PRepeated(explicitBounds.iterator.map(_root_.scalapb.descriptors.PDouble(_)).toVector)
        case 8 => _root_.scalapb.descriptors.PRepeated(exemplars.iterator.map(_.toPMessage).toVector)
        case 10 => _root_.scalapb.descriptors.PInt(flags)
        case 11 => min.map(_root_.scalapb.descriptors.PDouble(_)).getOrElse(_root_.scalapb.descriptors.PEmpty)
        case 12 => max.map(_root_.scalapb.descriptors.PDouble(_)).getOrElse(_root_.scalapb.descriptors.PEmpty)
      }
    }
    def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
    def companion: io.opentelemetry.proto.metrics.v1.HistogramDataPoint.type = io.opentelemetry.proto.metrics.v1.HistogramDataPoint
    // @@protoc_insertion_point(GeneratedMessage[opentelemetry.proto.metrics.v1.HistogramDataPoint])
}

object HistogramDataPoint extends scalapb.GeneratedMessageCompanion[io.opentelemetry.proto.metrics.v1.HistogramDataPoint] {
  implicit def messageCompanion: scalapb.GeneratedMessageCompanion[io.opentelemetry.proto.metrics.v1.HistogramDataPoint] = this
  def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): io.opentelemetry.proto.metrics.v1.HistogramDataPoint = {
    val __attributes: _root_.scala.collection.immutable.VectorBuilder[io.opentelemetry.proto.common.v1.KeyValue] = new _root_.scala.collection.immutable.VectorBuilder[io.opentelemetry.proto.common.v1.KeyValue]
    var __startTimeUnixNano: _root_.scala.Long = 0L
    var __timeUnixNano: _root_.scala.Long = 0L
    var __count: _root_.scala.Long = 0L
    var __sum: _root_.scala.Option[_root_.scala.Double] = _root_.scala.None
    val __bucketCounts: _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Long] = new _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Long]
    val __explicitBounds: _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Double] = new _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Double]
    val __exemplars: _root_.scala.collection.immutable.VectorBuilder[io.opentelemetry.proto.metrics.v1.Exemplar] = new _root_.scala.collection.immutable.VectorBuilder[io.opentelemetry.proto.metrics.v1.Exemplar]
    var __flags: _root_.scala.Int = 0
    var __min: _root_.scala.Option[_root_.scala.Double] = _root_.scala.None
    var __max: _root_.scala.Option[_root_.scala.Double] = _root_.scala.None
    var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
    var _done__ = false
    while (!_done__) {
      val _tag__ = _input__.readTag()
      _tag__ match {
        case 0 => _done__ = true
        case 74 =>
          __attributes += _root_.scalapb.LiteParser.readMessage[io.opentelemetry.proto.common.v1.KeyValue](_input__)
        case 17 =>
          __startTimeUnixNano = _input__.readFixed64()
        case 25 =>
          __timeUnixNano = _input__.readFixed64()
        case 33 =>
          __count = _input__.readFixed64()
        case 41 =>
          __sum = Option(_input__.readDouble())
        case 49 =>
          __bucketCounts += _input__.readFixed64()
        case 50 => {
          val length = _input__.readRawVarint32()
          val oldLimit = _input__.pushLimit(length)
          while (_input__.getBytesUntilLimit > 0) {
            __bucketCounts += _input__.readFixed64()
          }
          _input__.popLimit(oldLimit)
        }
        case 57 =>
          __explicitBounds += _input__.readDouble()
        case 58 => {
          val length = _input__.readRawVarint32()
          val oldLimit = _input__.pushLimit(length)
          while (_input__.getBytesUntilLimit > 0) {
            __explicitBounds += _input__.readDouble()
          }
          _input__.popLimit(oldLimit)
        }
        case 66 =>
          __exemplars += _root_.scalapb.LiteParser.readMessage[io.opentelemetry.proto.metrics.v1.Exemplar](_input__)
        case 80 =>
          __flags = _input__.readUInt32()
        case 89 =>
          __min = Option(_input__.readDouble())
        case 97 =>
          __max = Option(_input__.readDouble())
        case tag =>
          if (_unknownFields__ == null) {
            _unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
          }
          _unknownFields__.parseField(tag, _input__)
      }
    }
    io.opentelemetry.proto.metrics.v1.HistogramDataPoint(
        attributes = __attributes.result(),
        startTimeUnixNano = __startTimeUnixNano,
        timeUnixNano = __timeUnixNano,
        count = __count,
        sum = __sum,
        bucketCounts = __bucketCounts.result(),
        explicitBounds = __explicitBounds.result(),
        exemplars = __exemplars.result(),
        flags = __flags,
        min = __min,
        max = __max,
        unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
    )
  }
  implicit def messageReads: _root_.scalapb.descriptors.Reads[io.opentelemetry.proto.metrics.v1.HistogramDataPoint] = _root_.scalapb.descriptors.Reads{
    case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
      _root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
      io.opentelemetry.proto.metrics.v1.HistogramDataPoint(
        attributes = __fieldsMap.get(scalaDescriptor.findFieldByNumber(9).get).map(_.as[_root_.scala.Seq[io.opentelemetry.proto.common.v1.KeyValue]]).getOrElse(_root_.scala.Seq.empty),
        startTimeUnixNano = __fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).map(_.as[_root_.scala.Long]).getOrElse(0L),
        timeUnixNano = __fieldsMap.get(scalaDescriptor.findFieldByNumber(3).get).map(_.as[_root_.scala.Long]).getOrElse(0L),
        count = __fieldsMap.get(scalaDescriptor.findFieldByNumber(4).get).map(_.as[_root_.scala.Long]).getOrElse(0L),
        sum = __fieldsMap.get(scalaDescriptor.findFieldByNumber(5).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Double]]),
        bucketCounts = __fieldsMap.get(scalaDescriptor.findFieldByNumber(6).get).map(_.as[_root_.scala.Seq[_root_.scala.Long]]).getOrElse(_root_.scala.Seq.empty),
        explicitBounds = __fieldsMap.get(scalaDescriptor.findFieldByNumber(7).get).map(_.as[_root_.scala.Seq[_root_.scala.Double]]).getOrElse(_root_.scala.Seq.empty),
        exemplars = __fieldsMap.get(scalaDescriptor.findFieldByNumber(8).get).map(_.as[_root_.scala.Seq[io.opentelemetry.proto.metrics.v1.Exemplar]]).getOrElse(_root_.scala.Seq.empty),
        flags = __fieldsMap.get(scalaDescriptor.findFieldByNumber(10).get).map(_.as[_root_.scala.Int]).getOrElse(0),
        min = __fieldsMap.get(scalaDescriptor.findFieldByNumber(11).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Double]]),
        max = __fieldsMap.get(scalaDescriptor.findFieldByNumber(12).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Double]])
      )
    case _ => throw new RuntimeException("Expected PMessage")
  }
  def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = MetricsProto.javaDescriptor.getMessageTypes().get(11)
  def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = MetricsProto.scalaDescriptor.messages(11)
  def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
    var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
    (__number: @_root_.scala.unchecked) match {
      case 9 => __out = io.opentelemetry.proto.common.v1.KeyValue
      case 8 => __out = io.opentelemetry.proto.metrics.v1.Exemplar
    }
    __out
  }
  lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
  def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
  lazy val defaultInstance = io.opentelemetry.proto.metrics.v1.HistogramDataPoint(
    attributes = _root_.scala.Seq.empty,
    startTimeUnixNano = 0L,
    timeUnixNano = 0L,
    count = 0L,
    sum = _root_.scala.None,
    bucketCounts = _root_.scala.Seq.empty,
    explicitBounds = _root_.scala.Seq.empty,
    exemplars = _root_.scala.Seq.empty,
    flags = 0,
    min = _root_.scala.None,
    max = _root_.scala.None
  )
  implicit class HistogramDataPointLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, io.opentelemetry.proto.metrics.v1.HistogramDataPoint]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, io.opentelemetry.proto.metrics.v1.HistogramDataPoint](_l) {
    def attributes: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[io.opentelemetry.proto.common.v1.KeyValue]] = field(_.attributes)((c_, f_) => c_.copy(attributes = f_))
    def startTimeUnixNano: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Long] = field(_.startTimeUnixNano)((c_, f_) => c_.copy(startTimeUnixNano = f_))
    def timeUnixNano: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Long] = field(_.timeUnixNano)((c_, f_) => c_.copy(timeUnixNano = f_))
    def count: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Long] = field(_.count)((c_, f_) => c_.copy(count = f_))
    def sum: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Double] = field(_.getSum)((c_, f_) => c_.copy(sum = Option(f_)))
    def optionalSum: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Double]] = field(_.sum)((c_, f_) => c_.copy(sum = f_))
    def bucketCounts: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[_root_.scala.Long]] = field(_.bucketCounts)((c_, f_) => c_.copy(bucketCounts = f_))
    def explicitBounds: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[_root_.scala.Double]] = field(_.explicitBounds)((c_, f_) => c_.copy(explicitBounds = f_))
    def exemplars: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[io.opentelemetry.proto.metrics.v1.Exemplar]] = field(_.exemplars)((c_, f_) => c_.copy(exemplars = f_))
    def flags: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Int] = field(_.flags)((c_, f_) => c_.copy(flags = f_))
    def min: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Double] = field(_.getMin)((c_, f_) => c_.copy(min = Option(f_)))
    def optionalMin: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Double]] = field(_.min)((c_, f_) => c_.copy(min = f_))
    def max: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Double] = field(_.getMax)((c_, f_) => c_.copy(max = Option(f_)))
    def optionalMax: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Double]] = field(_.max)((c_, f_) => c_.copy(max = f_))
  }
  final val ATTRIBUTES_FIELD_NUMBER = 9
  final val START_TIME_UNIX_NANO_FIELD_NUMBER = 2
  final val TIME_UNIX_NANO_FIELD_NUMBER = 3
  final val COUNT_FIELD_NUMBER = 4
  final val SUM_FIELD_NUMBER = 5
  final val BUCKET_COUNTS_FIELD_NUMBER = 6
  final val EXPLICIT_BOUNDS_FIELD_NUMBER = 7
  final val EXEMPLARS_FIELD_NUMBER = 8
  final val FLAGS_FIELD_NUMBER = 10
  final val MIN_FIELD_NUMBER = 11
  final val MAX_FIELD_NUMBER = 12
  def of(
    attributes: _root_.scala.Seq[io.opentelemetry.proto.common.v1.KeyValue],
    startTimeUnixNano: _root_.scala.Long,
    timeUnixNano: _root_.scala.Long,
    count: _root_.scala.Long,
    sum: _root_.scala.Option[_root_.scala.Double],
    bucketCounts: _root_.scala.Seq[_root_.scala.Long],
    explicitBounds: _root_.scala.Seq[_root_.scala.Double],
    exemplars: _root_.scala.Seq[io.opentelemetry.proto.metrics.v1.Exemplar],
    flags: _root_.scala.Int,
    min: _root_.scala.Option[_root_.scala.Double],
    max: _root_.scala.Option[_root_.scala.Double]
  ): _root_.io.opentelemetry.proto.metrics.v1.HistogramDataPoint = _root_.io.opentelemetry.proto.metrics.v1.HistogramDataPoint(
    attributes,
    startTimeUnixNano,
    timeUnixNano,
    count,
    sum,
    bucketCounts,
    explicitBounds,
    exemplars,
    flags,
    min,
    max
  )
  // @@protoc_insertion_point(GeneratedMessageCompanion[opentelemetry.proto.metrics.v1.HistogramDataPoint])
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy