All Downloads are FREE. Search and download functionalities are using the official Maven repository.

io.opencensus.proto.stats.v1.DistributionAggregation.scala Maven / Gradle / Ivy

There is a newer version: 1.23.0-dev-f04150-1
Show newest version
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3

package io.opencensus.proto.stats.v1

/** @param bucketBounds
  *   A Distribution may optionally contain a histogram of the values in the
  *   population. The bucket boundaries for that histogram are described by
  *   `bucket_bounds`. This defines `size(bucket_bounds) + 1` (= N)
  *   buckets. The boundaries for bucket index i are:
  *  
  *   (-infinity, bucket_bounds[i]) for i == 0
  *   [bucket_bounds[i-1], bucket_bounds[i]) for 0 < i < N-2
  *   [bucket_bounds[i-1], +infinity) for i == N-1
  *  
  *   i.e. an underflow bucket (number 0), zero or more finite buckets (1
  *   through N - 2, and an overflow bucket (N - 1), with inclusive lower
  *   bounds and exclusive upper bounds.
  *  
  *   If `bucket_bounds` has no elements (zero size), then there is no
  *   histogram associated with the Distribution. If `bucket_bounds` has only
  *   one element, there are no finite buckets, and that single element is the
  *   common boundary of the overflow and underflow buckets. The values must
  *   be monotonically increasing.
  */
@SerialVersionUID(0L)
final case class DistributionAggregation(
    bucketBounds: _root_.scala.Seq[_root_.scala.Double] = _root_.scala.Seq.empty,
    unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
    ) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[DistributionAggregation] {
    private[this] def bucketBoundsSerializedSize = {
      8 * bucketBounds.size
    }
    @transient
    private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
    private[this] def __computeSerializedSize(): _root_.scala.Int = {
      var __size = 0
      if (bucketBounds.nonEmpty) {
        val __localsize = bucketBoundsSerializedSize
        __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__localsize) + __localsize
      }
      __size += unknownFields.serializedSize
      __size
    }
    override def serializedSize: _root_.scala.Int = {
      var __size = __serializedSizeMemoized
      if (__size == 0) {
        __size = __computeSerializedSize() + 1
        __serializedSizeMemoized = __size
      }
      __size - 1
      
    }
    def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
      if (bucketBounds.nonEmpty) {
        _output__.writeTag(1, 2)
        _output__.writeUInt32NoTag(bucketBoundsSerializedSize)
        bucketBounds.foreach(_output__.writeDoubleNoTag)
      };
      unknownFields.writeTo(_output__)
    }
    def clearBucketBounds = copy(bucketBounds = _root_.scala.Seq.empty)
    def addBucketBounds(__vs: _root_.scala.Double *): DistributionAggregation = addAllBucketBounds(__vs)
    def addAllBucketBounds(__vs: Iterable[_root_.scala.Double]): DistributionAggregation = copy(bucketBounds = bucketBounds ++ __vs)
    def withBucketBounds(__v: _root_.scala.Seq[_root_.scala.Double]): DistributionAggregation = copy(bucketBounds = __v)
    def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
    def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
    def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
      (__fieldNumber: @_root_.scala.unchecked) match {
        case 1 => bucketBounds
      }
    }
    def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
      _root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
      (__field.number: @_root_.scala.unchecked) match {
        case 1 => _root_.scalapb.descriptors.PRepeated(bucketBounds.iterator.map(_root_.scalapb.descriptors.PDouble(_)).toVector)
      }
    }
    def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
    def companion: io.opencensus.proto.stats.v1.DistributionAggregation.type = io.opencensus.proto.stats.v1.DistributionAggregation
    // @@protoc_insertion_point(GeneratedMessage[opencensus.proto.stats.v1.DistributionAggregation])
}

object DistributionAggregation extends scalapb.GeneratedMessageCompanion[io.opencensus.proto.stats.v1.DistributionAggregation] {
  implicit def messageCompanion: scalapb.GeneratedMessageCompanion[io.opencensus.proto.stats.v1.DistributionAggregation] = this
  def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): io.opencensus.proto.stats.v1.DistributionAggregation = {
    val __bucketBounds: _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Double] = new _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Double]
    var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
    var _done__ = false
    while (!_done__) {
      val _tag__ = _input__.readTag()
      _tag__ match {
        case 0 => _done__ = true
        case 9 =>
          __bucketBounds += _input__.readDouble()
        case 10 => {
          val length = _input__.readRawVarint32()
          val oldLimit = _input__.pushLimit(length)
          while (_input__.getBytesUntilLimit > 0) {
            __bucketBounds += _input__.readDouble()
          }
          _input__.popLimit(oldLimit)
        }
        case tag =>
          if (_unknownFields__ == null) {
            _unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
          }
          _unknownFields__.parseField(tag, _input__)
      }
    }
    io.opencensus.proto.stats.v1.DistributionAggregation(
        bucketBounds = __bucketBounds.result(),
        unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
    )
  }
  implicit def messageReads: _root_.scalapb.descriptors.Reads[io.opencensus.proto.stats.v1.DistributionAggregation] = _root_.scalapb.descriptors.Reads{
    case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
      _root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
      io.opencensus.proto.stats.v1.DistributionAggregation(
        bucketBounds = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Seq[_root_.scala.Double]]).getOrElse(_root_.scala.Seq.empty)
      )
    case _ => throw new RuntimeException("Expected PMessage")
  }
  def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = StatsProto.javaDescriptor.getMessageTypes().get(6)
  def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = StatsProto.scalaDescriptor.messages(6)
  def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = throw new MatchError(__number)
  lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
  def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
  lazy val defaultInstance = io.opencensus.proto.stats.v1.DistributionAggregation(
    bucketBounds = _root_.scala.Seq.empty
  )
  implicit class DistributionAggregationLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, io.opencensus.proto.stats.v1.DistributionAggregation]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, io.opencensus.proto.stats.v1.DistributionAggregation](_l) {
    def bucketBounds: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[_root_.scala.Double]] = field(_.bucketBounds)((c_, f_) => c_.copy(bucketBounds = f_))
  }
  final val BUCKET_BOUNDS_FIELD_NUMBER = 1
  def of(
    bucketBounds: _root_.scala.Seq[_root_.scala.Double]
  ): _root_.io.opencensus.proto.stats.v1.DistributionAggregation = _root_.io.opencensus.proto.stats.v1.DistributionAggregation(
    bucketBounds
  )
  // @@protoc_insertion_point(GeneratedMessageCompanion[opencensus.proto.stats.v1.DistributionAggregation])
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy