All Downloads are FREE. Search and download functionalities are using the official Maven repository.

io.opencensus.proto.metrics.v1.DistributionValue.scala Maven / Gradle / Ivy

There is a newer version: 1.23.0-dev-f04150-1
Show newest version
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3

package io.opencensus.proto.metrics.v1

/** Distribution contains summary statistics for a population of values. It
  * optionally contains a histogram representing the distribution of those
  * values across a set of buckets.
  *
  * @param count
  *   The number of values in the population. Must be non-negative. This value
  *   must equal the sum of the values in bucket_counts if a histogram is
  *   provided.
  * @param sum
  *   The sum of the values in the population. If count is zero then this field
  *   must be zero.
  * @param sumOfSquaredDeviation
  *   The sum of squared deviations from the mean of the values in the
  *   population. For values x_i this is:
  *  
  *       Sum[i=1..n]((x_i - mean)^2)
  *  
  *   Knuth, "The Art of Computer Programming", Vol. 2, page 323, 3rd edition
  *   describes Welford's method for accumulating this sum in one pass.
  *  
  *   If count is zero then this field must be zero.
  * @param bucketOptions
  *   Don't change bucket boundaries within a TimeSeries if your backend doesn't
  *   support this.
  *   TODO(issue #152): consider not required to send bucket options for
  *   optimization.
  * @param buckets
  *   If the distribution does not have a histogram, then omit this field.
  *   If there is a histogram, then the sum of the values in the Bucket counts
  *   must equal the value in the count field of the distribution.
  */
@SerialVersionUID(0L)
final case class DistributionValue(
    count: _root_.scala.Long = 0L,
    sum: _root_.scala.Double = 0.0,
    sumOfSquaredDeviation: _root_.scala.Double = 0.0,
    bucketOptions: _root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions] = _root_.scala.None,
    buckets: _root_.scala.Seq[io.opencensus.proto.metrics.v1.DistributionValue.Bucket] = _root_.scala.Seq.empty,
    unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
    ) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[DistributionValue] {
    @transient
    private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
    private[this] def __computeSerializedSize(): _root_.scala.Int = {
      var __size = 0
      
      {
        val __value = count
        if (__value != 0L) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeInt64Size(1, __value)
        }
      };
      
      {
        val __value = sum
        if (__value != 0.0) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeDoubleSize(2, __value)
        }
      };
      
      {
        val __value = sumOfSquaredDeviation
        if (__value != 0.0) {
          __size += _root_.com.google.protobuf.CodedOutputStream.computeDoubleSize(3, __value)
        }
      };
      if (bucketOptions.isDefined) {
        val __value = bucketOptions.get
        __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
      };
      buckets.foreach { __item =>
        val __value = __item
        __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
      }
      __size += unknownFields.serializedSize
      __size
    }
    override def serializedSize: _root_.scala.Int = {
      var __size = __serializedSizeMemoized
      if (__size == 0) {
        __size = __computeSerializedSize() + 1
        __serializedSizeMemoized = __size
      }
      __size - 1
      
    }
    def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
      {
        val __v = count
        if (__v != 0L) {
          _output__.writeInt64(1, __v)
        }
      };
      {
        val __v = sum
        if (__v != 0.0) {
          _output__.writeDouble(2, __v)
        }
      };
      {
        val __v = sumOfSquaredDeviation
        if (__v != 0.0) {
          _output__.writeDouble(3, __v)
        }
      };
      bucketOptions.foreach { __v =>
        val __m = __v
        _output__.writeTag(4, 2)
        _output__.writeUInt32NoTag(__m.serializedSize)
        __m.writeTo(_output__)
      };
      buckets.foreach { __v =>
        val __m = __v
        _output__.writeTag(5, 2)
        _output__.writeUInt32NoTag(__m.serializedSize)
        __m.writeTo(_output__)
      };
      unknownFields.writeTo(_output__)
    }
    def withCount(__v: _root_.scala.Long): DistributionValue = copy(count = __v)
    def withSum(__v: _root_.scala.Double): DistributionValue = copy(sum = __v)
    def withSumOfSquaredDeviation(__v: _root_.scala.Double): DistributionValue = copy(sumOfSquaredDeviation = __v)
    def getBucketOptions: io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions = bucketOptions.getOrElse(io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.defaultInstance)
    def clearBucketOptions: DistributionValue = copy(bucketOptions = _root_.scala.None)
    def withBucketOptions(__v: io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions): DistributionValue = copy(bucketOptions = Option(__v))
    def clearBuckets = copy(buckets = _root_.scala.Seq.empty)
    def addBuckets(__vs: io.opencensus.proto.metrics.v1.DistributionValue.Bucket *): DistributionValue = addAllBuckets(__vs)
    def addAllBuckets(__vs: Iterable[io.opencensus.proto.metrics.v1.DistributionValue.Bucket]): DistributionValue = copy(buckets = buckets ++ __vs)
    def withBuckets(__v: _root_.scala.Seq[io.opencensus.proto.metrics.v1.DistributionValue.Bucket]): DistributionValue = copy(buckets = __v)
    def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
    def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
    def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
      (__fieldNumber: @_root_.scala.unchecked) match {
        case 1 => {
          val __t = count
          if (__t != 0L) __t else null
        }
        case 2 => {
          val __t = sum
          if (__t != 0.0) __t else null
        }
        case 3 => {
          val __t = sumOfSquaredDeviation
          if (__t != 0.0) __t else null
        }
        case 4 => bucketOptions.orNull
        case 5 => buckets
      }
    }
    def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
      _root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
      (__field.number: @_root_.scala.unchecked) match {
        case 1 => _root_.scalapb.descriptors.PLong(count)
        case 2 => _root_.scalapb.descriptors.PDouble(sum)
        case 3 => _root_.scalapb.descriptors.PDouble(sumOfSquaredDeviation)
        case 4 => bucketOptions.map(_.toPMessage).getOrElse(_root_.scalapb.descriptors.PEmpty)
        case 5 => _root_.scalapb.descriptors.PRepeated(buckets.iterator.map(_.toPMessage).toVector)
      }
    }
    def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
    def companion: io.opencensus.proto.metrics.v1.DistributionValue.type = io.opencensus.proto.metrics.v1.DistributionValue
    // @@protoc_insertion_point(GeneratedMessage[opencensus.proto.metrics.v1.DistributionValue])
}

object DistributionValue extends scalapb.GeneratedMessageCompanion[io.opencensus.proto.metrics.v1.DistributionValue] {
  implicit def messageCompanion: scalapb.GeneratedMessageCompanion[io.opencensus.proto.metrics.v1.DistributionValue] = this
  def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): io.opencensus.proto.metrics.v1.DistributionValue = {
    var __count: _root_.scala.Long = 0L
    var __sum: _root_.scala.Double = 0.0
    var __sumOfSquaredDeviation: _root_.scala.Double = 0.0
    var __bucketOptions: _root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions] = _root_.scala.None
    val __buckets: _root_.scala.collection.immutable.VectorBuilder[io.opencensus.proto.metrics.v1.DistributionValue.Bucket] = new _root_.scala.collection.immutable.VectorBuilder[io.opencensus.proto.metrics.v1.DistributionValue.Bucket]
    var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
    var _done__ = false
    while (!_done__) {
      val _tag__ = _input__.readTag()
      _tag__ match {
        case 0 => _done__ = true
        case 8 =>
          __count = _input__.readInt64()
        case 17 =>
          __sum = _input__.readDouble()
        case 25 =>
          __sumOfSquaredDeviation = _input__.readDouble()
        case 34 =>
          __bucketOptions = Option(__bucketOptions.fold(_root_.scalapb.LiteParser.readMessage[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions](_input__))(_root_.scalapb.LiteParser.readMessage(_input__, _)))
        case 42 =>
          __buckets += _root_.scalapb.LiteParser.readMessage[io.opencensus.proto.metrics.v1.DistributionValue.Bucket](_input__)
        case tag =>
          if (_unknownFields__ == null) {
            _unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
          }
          _unknownFields__.parseField(tag, _input__)
      }
    }
    io.opencensus.proto.metrics.v1.DistributionValue(
        count = __count,
        sum = __sum,
        sumOfSquaredDeviation = __sumOfSquaredDeviation,
        bucketOptions = __bucketOptions,
        buckets = __buckets.result(),
        unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
    )
  }
  implicit def messageReads: _root_.scalapb.descriptors.Reads[io.opencensus.proto.metrics.v1.DistributionValue] = _root_.scalapb.descriptors.Reads{
    case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
      _root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
      io.opencensus.proto.metrics.v1.DistributionValue(
        count = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Long]).getOrElse(0L),
        sum = __fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).map(_.as[_root_.scala.Double]).getOrElse(0.0),
        sumOfSquaredDeviation = __fieldsMap.get(scalaDescriptor.findFieldByNumber(3).get).map(_.as[_root_.scala.Double]).getOrElse(0.0),
        bucketOptions = __fieldsMap.get(scalaDescriptor.findFieldByNumber(4).get).flatMap(_.as[_root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions]]),
        buckets = __fieldsMap.get(scalaDescriptor.findFieldByNumber(5).get).map(_.as[_root_.scala.Seq[io.opencensus.proto.metrics.v1.DistributionValue.Bucket]]).getOrElse(_root_.scala.Seq.empty)
      )
    case _ => throw new RuntimeException("Expected PMessage")
  }
  def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = MetricsProto.javaDescriptor.getMessageTypes().get(6)
  def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = MetricsProto.scalaDescriptor.messages(6)
  def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
    var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
    (__number: @_root_.scala.unchecked) match {
      case 4 => __out = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions
      case 5 => __out = io.opencensus.proto.metrics.v1.DistributionValue.Bucket
    }
    __out
  }
  lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] =
    Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]](
      _root_.io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions,
      _root_.io.opencensus.proto.metrics.v1.DistributionValue.Bucket,
      _root_.io.opencensus.proto.metrics.v1.DistributionValue.Exemplar
    )
  def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
  lazy val defaultInstance = io.opencensus.proto.metrics.v1.DistributionValue(
    count = 0L,
    sum = 0.0,
    sumOfSquaredDeviation = 0.0,
    bucketOptions = _root_.scala.None,
    buckets = _root_.scala.Seq.empty
  )
  /** A Distribution may optionally contain a histogram of the values in the
    * population. The bucket boundaries for that histogram are described by
    * BucketOptions.
    *
    * If bucket_options has no type, then there is no histogram associated with
    * the Distribution.
    */
  @SerialVersionUID(0L)
  final case class BucketOptions(
      `type`: io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type.Empty,
      unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
      ) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[BucketOptions] {
      @transient
      private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
      private[this] def __computeSerializedSize(): _root_.scala.Int = {
        var __size = 0
        if (`type`.explicit.isDefined) {
          val __value = `type`.explicit.get
          __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
        };
        __size += unknownFields.serializedSize
        __size
      }
      override def serializedSize: _root_.scala.Int = {
        var __size = __serializedSizeMemoized
        if (__size == 0) {
          __size = __computeSerializedSize() + 1
          __serializedSizeMemoized = __size
        }
        __size - 1
        
      }
      def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
        `type`.explicit.foreach { __v =>
          val __m = __v
          _output__.writeTag(1, 2)
          _output__.writeUInt32NoTag(__m.serializedSize)
          __m.writeTo(_output__)
        };
        unknownFields.writeTo(_output__)
      }
      def getExplicit: io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit = `type`.explicit.getOrElse(io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit.defaultInstance)
      def withExplicit(__v: io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit): BucketOptions = copy(`type` = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type.Explicit(__v))
      def clearType: BucketOptions = copy(`type` = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type.Empty)
      def withType(__v: io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type): BucketOptions = copy(`type` = __v)
      def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
      def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
      def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
        (__fieldNumber: @_root_.scala.unchecked) match {
          case 1 => `type`.explicit.orNull
        }
      }
      def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
        _root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
        (__field.number: @_root_.scala.unchecked) match {
          case 1 => `type`.explicit.map(_.toPMessage).getOrElse(_root_.scalapb.descriptors.PEmpty)
        }
      }
      def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
      def companion: io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.type = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions
      // @@protoc_insertion_point(GeneratedMessage[opencensus.proto.metrics.v1.DistributionValue.BucketOptions])
  }
  
  object BucketOptions extends scalapb.GeneratedMessageCompanion[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions] {
    implicit def messageCompanion: scalapb.GeneratedMessageCompanion[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions] = this
    def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions = {
      var __type: io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type.Empty
      var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
      var _done__ = false
      while (!_done__) {
        val _tag__ = _input__.readTag()
        _tag__ match {
          case 0 => _done__ = true
          case 10 =>
            __type = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type.Explicit(__type.explicit.fold(_root_.scalapb.LiteParser.readMessage[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit](_input__))(_root_.scalapb.LiteParser.readMessage(_input__, _)))
          case tag =>
            if (_unknownFields__ == null) {
              _unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
            }
            _unknownFields__.parseField(tag, _input__)
        }
      }
      io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions(
          `type` = __type,
          unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
      )
    }
    implicit def messageReads: _root_.scalapb.descriptors.Reads[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions] = _root_.scalapb.descriptors.Reads{
      case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
        _root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
        io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions(
          `type` = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).flatMap(_.as[_root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit]]).map(io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type.Explicit(_))
              .getOrElse(io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type.Empty)
        )
      case _ => throw new RuntimeException("Expected PMessage")
    }
    def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = io.opencensus.proto.metrics.v1.DistributionValue.javaDescriptor.getNestedTypes().get(0)
    def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = io.opencensus.proto.metrics.v1.DistributionValue.scalaDescriptor.nestedMessages(0)
    def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
      var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
      (__number: @_root_.scala.unchecked) match {
        case 1 => __out = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit
      }
      __out
    }
    lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] =
      Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]](
        _root_.io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit
      )
    def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
    lazy val defaultInstance = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions(
      `type` = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type.Empty
    )
    sealed trait Type extends _root_.scalapb.GeneratedOneof {
      def isEmpty: _root_.scala.Boolean = false
      def isDefined: _root_.scala.Boolean = true
      def isExplicit: _root_.scala.Boolean = false
      def explicit: _root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit] = _root_.scala.None
    }
    object Type {
      @SerialVersionUID(0L)
      case object Empty extends io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type {
        type ValueType = _root_.scala.Nothing
        override def isEmpty: _root_.scala.Boolean = true
        override def isDefined: _root_.scala.Boolean = false
        override def number: _root_.scala.Int = 0
        override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value")
      }
    
      @SerialVersionUID(0L)
      final case class Explicit(value: io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit) extends io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type {
        type ValueType = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit
        override def isExplicit: _root_.scala.Boolean = true
        override def explicit: _root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit] = Some(value)
        override def number: _root_.scala.Int = 1
      }
    }
    /** Specifies a set of buckets with arbitrary upper-bounds.
      * This defines size(bounds) + 1 (= N) buckets. The boundaries for bucket
      * index i are:
      *
      * [0, bucket_bounds[i]) for i == 0
      * [bucket_bounds[i-1], bucket_bounds[i]) for 0 < i < N-1
      * [bucket_bounds[i], +infinity) for i == N-1
      *
      * @param bounds
      *   The values must be strictly increasing and > 0.
      */
    @SerialVersionUID(0L)
    final case class Explicit(
        bounds: _root_.scala.Seq[_root_.scala.Double] = _root_.scala.Seq.empty,
        unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
        ) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[Explicit] {
        private[this] def boundsSerializedSize = {
          8 * bounds.size
        }
        @transient
        private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
        private[this] def __computeSerializedSize(): _root_.scala.Int = {
          var __size = 0
          if (bounds.nonEmpty) {
            val __localsize = boundsSerializedSize
            __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__localsize) + __localsize
          }
          __size += unknownFields.serializedSize
          __size
        }
        override def serializedSize: _root_.scala.Int = {
          var __size = __serializedSizeMemoized
          if (__size == 0) {
            __size = __computeSerializedSize() + 1
            __serializedSizeMemoized = __size
          }
          __size - 1
          
        }
        def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
          if (bounds.nonEmpty) {
            _output__.writeTag(1, 2)
            _output__.writeUInt32NoTag(boundsSerializedSize)
            bounds.foreach(_output__.writeDoubleNoTag)
          };
          unknownFields.writeTo(_output__)
        }
        def clearBounds = copy(bounds = _root_.scala.Seq.empty)
        def addBounds(__vs: _root_.scala.Double *): Explicit = addAllBounds(__vs)
        def addAllBounds(__vs: Iterable[_root_.scala.Double]): Explicit = copy(bounds = bounds ++ __vs)
        def withBounds(__v: _root_.scala.Seq[_root_.scala.Double]): Explicit = copy(bounds = __v)
        def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
        def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
        def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
          (__fieldNumber: @_root_.scala.unchecked) match {
            case 1 => bounds
          }
        }
        def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
          _root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
          (__field.number: @_root_.scala.unchecked) match {
            case 1 => _root_.scalapb.descriptors.PRepeated(bounds.iterator.map(_root_.scalapb.descriptors.PDouble(_)).toVector)
          }
        }
        def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
        def companion: io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit.type = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit
        // @@protoc_insertion_point(GeneratedMessage[opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit])
    }
    
    object Explicit extends scalapb.GeneratedMessageCompanion[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit] {
      implicit def messageCompanion: scalapb.GeneratedMessageCompanion[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit] = this
      def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit = {
        val __bounds: _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Double] = new _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Double]
        var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
        var _done__ = false
        while (!_done__) {
          val _tag__ = _input__.readTag()
          _tag__ match {
            case 0 => _done__ = true
            case 9 =>
              __bounds += _input__.readDouble()
            case 10 => {
              val length = _input__.readRawVarint32()
              val oldLimit = _input__.pushLimit(length)
              while (_input__.getBytesUntilLimit > 0) {
                __bounds += _input__.readDouble()
              }
              _input__.popLimit(oldLimit)
            }
            case tag =>
              if (_unknownFields__ == null) {
                _unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
              }
              _unknownFields__.parseField(tag, _input__)
          }
        }
        io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit(
            bounds = __bounds.result(),
            unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
        )
      }
      implicit def messageReads: _root_.scalapb.descriptors.Reads[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit] = _root_.scalapb.descriptors.Reads{
        case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
          _root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
          io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit(
            bounds = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Seq[_root_.scala.Double]]).getOrElse(_root_.scala.Seq.empty)
          )
        case _ => throw new RuntimeException("Expected PMessage")
      }
      def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.javaDescriptor.getNestedTypes().get(0)
      def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.scalaDescriptor.nestedMessages(0)
      def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = throw new MatchError(__number)
      lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
      def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
      lazy val defaultInstance = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit(
        bounds = _root_.scala.Seq.empty
      )
      implicit class ExplicitLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit](_l) {
        def bounds: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[_root_.scala.Double]] = field(_.bounds)((c_, f_) => c_.copy(bounds = f_))
      }
      final val BOUNDS_FIELD_NUMBER = 1
      def of(
        bounds: _root_.scala.Seq[_root_.scala.Double]
      ): _root_.io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit = _root_.io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit(
        bounds
      )
      // @@protoc_insertion_point(GeneratedMessageCompanion[opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit])
    }
    
    implicit class BucketOptionsLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions](_l) {
      def explicit: _root_.scalapb.lenses.Lens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Explicit] = field(_.getExplicit)((c_, f_) => c_.copy(`type` = io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type.Explicit(f_)))
      def `type`: _root_.scalapb.lenses.Lens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type] = field(_.`type`)((c_, f_) => c_.copy(`type` = f_))
    }
    final val EXPLICIT_FIELD_NUMBER = 1
    def of(
      `type`: io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions.Type
    ): _root_.io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions = _root_.io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions(
      `type`
    )
    // @@protoc_insertion_point(GeneratedMessageCompanion[opencensus.proto.metrics.v1.DistributionValue.BucketOptions])
  }
  
  /** @param count
    *   The number of values in each bucket of the histogram, as described in
    *   bucket_bounds.
    * @param exemplar
    *   If the distribution does not have a histogram, then omit this field.
    */
  @SerialVersionUID(0L)
  final case class Bucket(
      count: _root_.scala.Long = 0L,
      exemplar: _root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar] = _root_.scala.None,
      unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
      ) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[Bucket] {
      @transient
      private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
      private[this] def __computeSerializedSize(): _root_.scala.Int = {
        var __size = 0
        
        {
          val __value = count
          if (__value != 0L) {
            __size += _root_.com.google.protobuf.CodedOutputStream.computeInt64Size(1, __value)
          }
        };
        if (exemplar.isDefined) {
          val __value = exemplar.get
          __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
        };
        __size += unknownFields.serializedSize
        __size
      }
      override def serializedSize: _root_.scala.Int = {
        var __size = __serializedSizeMemoized
        if (__size == 0) {
          __size = __computeSerializedSize() + 1
          __serializedSizeMemoized = __size
        }
        __size - 1
        
      }
      def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
        {
          val __v = count
          if (__v != 0L) {
            _output__.writeInt64(1, __v)
          }
        };
        exemplar.foreach { __v =>
          val __m = __v
          _output__.writeTag(2, 2)
          _output__.writeUInt32NoTag(__m.serializedSize)
          __m.writeTo(_output__)
        };
        unknownFields.writeTo(_output__)
      }
      def withCount(__v: _root_.scala.Long): Bucket = copy(count = __v)
      def getExemplar: io.opencensus.proto.metrics.v1.DistributionValue.Exemplar = exemplar.getOrElse(io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.defaultInstance)
      def clearExemplar: Bucket = copy(exemplar = _root_.scala.None)
      def withExemplar(__v: io.opencensus.proto.metrics.v1.DistributionValue.Exemplar): Bucket = copy(exemplar = Option(__v))
      def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
      def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
      def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
        (__fieldNumber: @_root_.scala.unchecked) match {
          case 1 => {
            val __t = count
            if (__t != 0L) __t else null
          }
          case 2 => exemplar.orNull
        }
      }
      def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
        _root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
        (__field.number: @_root_.scala.unchecked) match {
          case 1 => _root_.scalapb.descriptors.PLong(count)
          case 2 => exemplar.map(_.toPMessage).getOrElse(_root_.scalapb.descriptors.PEmpty)
        }
      }
      def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
      def companion: io.opencensus.proto.metrics.v1.DistributionValue.Bucket.type = io.opencensus.proto.metrics.v1.DistributionValue.Bucket
      // @@protoc_insertion_point(GeneratedMessage[opencensus.proto.metrics.v1.DistributionValue.Bucket])
  }
  
  object Bucket extends scalapb.GeneratedMessageCompanion[io.opencensus.proto.metrics.v1.DistributionValue.Bucket] {
    implicit def messageCompanion: scalapb.GeneratedMessageCompanion[io.opencensus.proto.metrics.v1.DistributionValue.Bucket] = this
    def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): io.opencensus.proto.metrics.v1.DistributionValue.Bucket = {
      var __count: _root_.scala.Long = 0L
      var __exemplar: _root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar] = _root_.scala.None
      var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
      var _done__ = false
      while (!_done__) {
        val _tag__ = _input__.readTag()
        _tag__ match {
          case 0 => _done__ = true
          case 8 =>
            __count = _input__.readInt64()
          case 18 =>
            __exemplar = Option(__exemplar.fold(_root_.scalapb.LiteParser.readMessage[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar](_input__))(_root_.scalapb.LiteParser.readMessage(_input__, _)))
          case tag =>
            if (_unknownFields__ == null) {
              _unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
            }
            _unknownFields__.parseField(tag, _input__)
        }
      }
      io.opencensus.proto.metrics.v1.DistributionValue.Bucket(
          count = __count,
          exemplar = __exemplar,
          unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
      )
    }
    implicit def messageReads: _root_.scalapb.descriptors.Reads[io.opencensus.proto.metrics.v1.DistributionValue.Bucket] = _root_.scalapb.descriptors.Reads{
      case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
        _root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
        io.opencensus.proto.metrics.v1.DistributionValue.Bucket(
          count = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Long]).getOrElse(0L),
          exemplar = __fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).flatMap(_.as[_root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar]])
        )
      case _ => throw new RuntimeException("Expected PMessage")
    }
    def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = io.opencensus.proto.metrics.v1.DistributionValue.javaDescriptor.getNestedTypes().get(1)
    def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = io.opencensus.proto.metrics.v1.DistributionValue.scalaDescriptor.nestedMessages(1)
    def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
      var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
      (__number: @_root_.scala.unchecked) match {
        case 2 => __out = io.opencensus.proto.metrics.v1.DistributionValue.Exemplar
      }
      __out
    }
    lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
    def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
    lazy val defaultInstance = io.opencensus.proto.metrics.v1.DistributionValue.Bucket(
      count = 0L,
      exemplar = _root_.scala.None
    )
    implicit class BucketLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.Bucket]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.Bucket](_l) {
      def count: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Long] = field(_.count)((c_, f_) => c_.copy(count = f_))
      def exemplar: _root_.scalapb.lenses.Lens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.Exemplar] = field(_.getExemplar)((c_, f_) => c_.copy(exemplar = Option(f_)))
      def optionalExemplar: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar]] = field(_.exemplar)((c_, f_) => c_.copy(exemplar = f_))
    }
    final val COUNT_FIELD_NUMBER = 1
    final val EXEMPLAR_FIELD_NUMBER = 2
    def of(
      count: _root_.scala.Long,
      exemplar: _root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar]
    ): _root_.io.opencensus.proto.metrics.v1.DistributionValue.Bucket = _root_.io.opencensus.proto.metrics.v1.DistributionValue.Bucket(
      count,
      exemplar
    )
    // @@protoc_insertion_point(GeneratedMessageCompanion[opencensus.proto.metrics.v1.DistributionValue.Bucket])
  }
  
  /** Exemplars are example points that may be used to annotate aggregated
    * Distribution values. They are metadata that gives information about a
    * particular value added to a Distribution bucket.
    *
    * @param value
    *   Value of the exemplar point. It determines which bucket the exemplar
    *   belongs to.
    * @param timestamp
    *   The observation (sampling) time of the above value.
    * @param attachments
    *   Contextual information about the example value.
    */
  @SerialVersionUID(0L)
  final case class Exemplar(
      value: _root_.scala.Double = 0.0,
      timestamp: _root_.scala.Option[com.google.protobuf.timestamp.Timestamp] = _root_.scala.None,
      attachments: _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, _root_.scala.Predef.String] = _root_.scala.collection.immutable.Map.empty,
      unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
      ) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[Exemplar] {
      @transient
      private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
      private[this] def __computeSerializedSize(): _root_.scala.Int = {
        var __size = 0
        
        {
          val __value = value
          if (__value != 0.0) {
            __size += _root_.com.google.protobuf.CodedOutputStream.computeDoubleSize(1, __value)
          }
        };
        if (timestamp.isDefined) {
          val __value = timestamp.get
          __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
        };
        attachments.foreach { __item =>
          val __value = io.opencensus.proto.metrics.v1.DistributionValue.Exemplar._typemapper_attachments.toBase(__item)
          __size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
        }
        __size += unknownFields.serializedSize
        __size
      }
      override def serializedSize: _root_.scala.Int = {
        var __size = __serializedSizeMemoized
        if (__size == 0) {
          __size = __computeSerializedSize() + 1
          __serializedSizeMemoized = __size
        }
        __size - 1
        
      }
      def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
        {
          val __v = value
          if (__v != 0.0) {
            _output__.writeDouble(1, __v)
          }
        };
        timestamp.foreach { __v =>
          val __m = __v
          _output__.writeTag(2, 2)
          _output__.writeUInt32NoTag(__m.serializedSize)
          __m.writeTo(_output__)
        };
        attachments.foreach { __v =>
          val __m = io.opencensus.proto.metrics.v1.DistributionValue.Exemplar._typemapper_attachments.toBase(__v)
          _output__.writeTag(3, 2)
          _output__.writeUInt32NoTag(__m.serializedSize)
          __m.writeTo(_output__)
        };
        unknownFields.writeTo(_output__)
      }
      def withValue(__v: _root_.scala.Double): Exemplar = copy(value = __v)
      def getTimestamp: com.google.protobuf.timestamp.Timestamp = timestamp.getOrElse(com.google.protobuf.timestamp.Timestamp.defaultInstance)
      def clearTimestamp: Exemplar = copy(timestamp = _root_.scala.None)
      def withTimestamp(__v: com.google.protobuf.timestamp.Timestamp): Exemplar = copy(timestamp = Option(__v))
      def clearAttachments = copy(attachments = _root_.scala.collection.immutable.Map.empty)
      def addAttachments(__vs: (_root_.scala.Predef.String, _root_.scala.Predef.String) *): Exemplar = addAllAttachments(__vs)
      def addAllAttachments(__vs: Iterable[(_root_.scala.Predef.String, _root_.scala.Predef.String)]): Exemplar = copy(attachments = attachments ++ __vs)
      def withAttachments(__v: _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, _root_.scala.Predef.String]): Exemplar = copy(attachments = __v)
      def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
      def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
      def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
        (__fieldNumber: @_root_.scala.unchecked) match {
          case 1 => {
            val __t = value
            if (__t != 0.0) __t else null
          }
          case 2 => timestamp.orNull
          case 3 => attachments.iterator.map(io.opencensus.proto.metrics.v1.DistributionValue.Exemplar._typemapper_attachments.toBase(_)).toSeq
        }
      }
      def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
        _root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
        (__field.number: @_root_.scala.unchecked) match {
          case 1 => _root_.scalapb.descriptors.PDouble(value)
          case 2 => timestamp.map(_.toPMessage).getOrElse(_root_.scalapb.descriptors.PEmpty)
          case 3 => _root_.scalapb.descriptors.PRepeated(attachments.iterator.map(io.opencensus.proto.metrics.v1.DistributionValue.Exemplar._typemapper_attachments.toBase(_).toPMessage).toVector)
        }
      }
      def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
      def companion: io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.type = io.opencensus.proto.metrics.v1.DistributionValue.Exemplar
      // @@protoc_insertion_point(GeneratedMessage[opencensus.proto.metrics.v1.DistributionValue.Exemplar])
  }
  
  object Exemplar extends scalapb.GeneratedMessageCompanion[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar] {
    implicit def messageCompanion: scalapb.GeneratedMessageCompanion[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar] = this
    def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): io.opencensus.proto.metrics.v1.DistributionValue.Exemplar = {
      var __value: _root_.scala.Double = 0.0
      var __timestamp: _root_.scala.Option[com.google.protobuf.timestamp.Timestamp] = _root_.scala.None
      val __attachments: _root_.scala.collection.mutable.Builder[(_root_.scala.Predef.String, _root_.scala.Predef.String), _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, _root_.scala.Predef.String]] = _root_.scala.collection.immutable.Map.newBuilder[_root_.scala.Predef.String, _root_.scala.Predef.String]
      var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
      var _done__ = false
      while (!_done__) {
        val _tag__ = _input__.readTag()
        _tag__ match {
          case 0 => _done__ = true
          case 9 =>
            __value = _input__.readDouble()
          case 18 =>
            __timestamp = Option(__timestamp.fold(_root_.scalapb.LiteParser.readMessage[com.google.protobuf.timestamp.Timestamp](_input__))(_root_.scalapb.LiteParser.readMessage(_input__, _)))
          case 26 =>
            __attachments += io.opencensus.proto.metrics.v1.DistributionValue.Exemplar._typemapper_attachments.toCustom(_root_.scalapb.LiteParser.readMessage[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry](_input__))
          case tag =>
            if (_unknownFields__ == null) {
              _unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
            }
            _unknownFields__.parseField(tag, _input__)
        }
      }
      io.opencensus.proto.metrics.v1.DistributionValue.Exemplar(
          value = __value,
          timestamp = __timestamp,
          attachments = __attachments.result(),
          unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
      )
    }
    implicit def messageReads: _root_.scalapb.descriptors.Reads[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar] = _root_.scalapb.descriptors.Reads{
      case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
        _root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
        io.opencensus.proto.metrics.v1.DistributionValue.Exemplar(
          value = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Double]).getOrElse(0.0),
          timestamp = __fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).flatMap(_.as[_root_.scala.Option[com.google.protobuf.timestamp.Timestamp]]),
          attachments = __fieldsMap.get(scalaDescriptor.findFieldByNumber(3).get).map(_.as[_root_.scala.Seq[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry]]).getOrElse(_root_.scala.Seq.empty).iterator.map(io.opencensus.proto.metrics.v1.DistributionValue.Exemplar._typemapper_attachments.toCustom(_)).toMap
        )
      case _ => throw new RuntimeException("Expected PMessage")
    }
    def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = io.opencensus.proto.metrics.v1.DistributionValue.javaDescriptor.getNestedTypes().get(2)
    def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = io.opencensus.proto.metrics.v1.DistributionValue.scalaDescriptor.nestedMessages(2)
    def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
      var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
      (__number: @_root_.scala.unchecked) match {
        case 2 => __out = com.google.protobuf.timestamp.Timestamp
        case 3 => __out = io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry
      }
      __out
    }
    lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] =
      Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]](
        _root_.io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry
      )
    def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
    lazy val defaultInstance = io.opencensus.proto.metrics.v1.DistributionValue.Exemplar(
      value = 0.0,
      timestamp = _root_.scala.None,
      attachments = _root_.scala.collection.immutable.Map.empty
    )
    @SerialVersionUID(0L)
    final case class AttachmentsEntry(
        key: _root_.scala.Predef.String = "",
        value: _root_.scala.Predef.String = "",
        unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
        ) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[AttachmentsEntry] {
        @transient
        private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
        private[this] def __computeSerializedSize(): _root_.scala.Int = {
          var __size = 0
          
          {
            val __value = key
            if (!__value.isEmpty) {
              __size += _root_.com.google.protobuf.CodedOutputStream.computeStringSize(1, __value)
            }
          };
          
          {
            val __value = value
            if (!__value.isEmpty) {
              __size += _root_.com.google.protobuf.CodedOutputStream.computeStringSize(2, __value)
            }
          };
          __size += unknownFields.serializedSize
          __size
        }
        override def serializedSize: _root_.scala.Int = {
          var __size = __serializedSizeMemoized
          if (__size == 0) {
            __size = __computeSerializedSize() + 1
            __serializedSizeMemoized = __size
          }
          __size - 1
          
        }
        def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
          {
            val __v = key
            if (!__v.isEmpty) {
              _output__.writeString(1, __v)
            }
          };
          {
            val __v = value
            if (!__v.isEmpty) {
              _output__.writeString(2, __v)
            }
          };
          unknownFields.writeTo(_output__)
        }
        def withKey(__v: _root_.scala.Predef.String): AttachmentsEntry = copy(key = __v)
        def withValue(__v: _root_.scala.Predef.String): AttachmentsEntry = copy(value = __v)
        def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
        def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
        def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
          (__fieldNumber: @_root_.scala.unchecked) match {
            case 1 => {
              val __t = key
              if (__t != "") __t else null
            }
            case 2 => {
              val __t = value
              if (__t != "") __t else null
            }
          }
        }
        def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
          _root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
          (__field.number: @_root_.scala.unchecked) match {
            case 1 => _root_.scalapb.descriptors.PString(key)
            case 2 => _root_.scalapb.descriptors.PString(value)
          }
        }
        def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
        def companion: io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry.type = io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry
        // @@protoc_insertion_point(GeneratedMessage[opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry])
    }
    
    object AttachmentsEntry extends scalapb.GeneratedMessageCompanion[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry] {
      implicit def messageCompanion: scalapb.GeneratedMessageCompanion[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry] = this
      def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry = {
        var __key: _root_.scala.Predef.String = ""
        var __value: _root_.scala.Predef.String = ""
        var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
        var _done__ = false
        while (!_done__) {
          val _tag__ = _input__.readTag()
          _tag__ match {
            case 0 => _done__ = true
            case 10 =>
              __key = _input__.readStringRequireUtf8()
            case 18 =>
              __value = _input__.readStringRequireUtf8()
            case tag =>
              if (_unknownFields__ == null) {
                _unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
              }
              _unknownFields__.parseField(tag, _input__)
          }
        }
        io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry(
            key = __key,
            value = __value,
            unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
        )
      }
      implicit def messageReads: _root_.scalapb.descriptors.Reads[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry] = _root_.scalapb.descriptors.Reads{
        case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
          _root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
          io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry(
            key = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Predef.String]).getOrElse(""),
            value = __fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).map(_.as[_root_.scala.Predef.String]).getOrElse("")
          )
        case _ => throw new RuntimeException("Expected PMessage")
      }
      def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.javaDescriptor.getNestedTypes().get(0)
      def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.scalaDescriptor.nestedMessages(0)
      def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = throw new MatchError(__number)
      lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
      def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
      lazy val defaultInstance = io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry(
        key = "",
        value = ""
      )
      implicit class AttachmentsEntryLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry](_l) {
        def key: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Predef.String] = field(_.key)((c_, f_) => c_.copy(key = f_))
        def value: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Predef.String] = field(_.value)((c_, f_) => c_.copy(value = f_))
      }
      final val KEY_FIELD_NUMBER = 1
      final val VALUE_FIELD_NUMBER = 2
      @transient
      implicit val keyValueMapper: _root_.scalapb.TypeMapper[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry, (_root_.scala.Predef.String, _root_.scala.Predef.String)] =
        _root_.scalapb.TypeMapper[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry, (_root_.scala.Predef.String, _root_.scala.Predef.String)](__m => (__m.key, __m.value))(__p => io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry(__p._1, __p._2))
      def of(
        key: _root_.scala.Predef.String,
        value: _root_.scala.Predef.String
      ): _root_.io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry = _root_.io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry(
        key,
        value
      )
      // @@protoc_insertion_point(GeneratedMessageCompanion[opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry])
    }
    
    implicit class ExemplarLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.Exemplar]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.Exemplar](_l) {
      def value: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Double] = field(_.value)((c_, f_) => c_.copy(value = f_))
      def timestamp: _root_.scalapb.lenses.Lens[UpperPB, com.google.protobuf.timestamp.Timestamp] = field(_.getTimestamp)((c_, f_) => c_.copy(timestamp = Option(f_)))
      def optionalTimestamp: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[com.google.protobuf.timestamp.Timestamp]] = field(_.timestamp)((c_, f_) => c_.copy(timestamp = f_))
      def attachments: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, _root_.scala.Predef.String]] = field(_.attachments)((c_, f_) => c_.copy(attachments = f_))
    }
    final val VALUE_FIELD_NUMBER = 1
    final val TIMESTAMP_FIELD_NUMBER = 2
    final val ATTACHMENTS_FIELD_NUMBER = 3
    @transient
    private[v1] val _typemapper_attachments: _root_.scalapb.TypeMapper[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry, (_root_.scala.Predef.String, _root_.scala.Predef.String)] = implicitly[_root_.scalapb.TypeMapper[io.opencensus.proto.metrics.v1.DistributionValue.Exemplar.AttachmentsEntry, (_root_.scala.Predef.String, _root_.scala.Predef.String)]]
    def of(
      value: _root_.scala.Double,
      timestamp: _root_.scala.Option[com.google.protobuf.timestamp.Timestamp],
      attachments: _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, _root_.scala.Predef.String]
    ): _root_.io.opencensus.proto.metrics.v1.DistributionValue.Exemplar = _root_.io.opencensus.proto.metrics.v1.DistributionValue.Exemplar(
      value,
      timestamp,
      attachments
    )
    // @@protoc_insertion_point(GeneratedMessageCompanion[opencensus.proto.metrics.v1.DistributionValue.Exemplar])
  }
  
  implicit class DistributionValueLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue](_l) {
    def count: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Long] = field(_.count)((c_, f_) => c_.copy(count = f_))
    def sum: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Double] = field(_.sum)((c_, f_) => c_.copy(sum = f_))
    def sumOfSquaredDeviation: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Double] = field(_.sumOfSquaredDeviation)((c_, f_) => c_.copy(sumOfSquaredDeviation = f_))
    def bucketOptions: _root_.scalapb.lenses.Lens[UpperPB, io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions] = field(_.getBucketOptions)((c_, f_) => c_.copy(bucketOptions = Option(f_)))
    def optionalBucketOptions: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions]] = field(_.bucketOptions)((c_, f_) => c_.copy(bucketOptions = f_))
    def buckets: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[io.opencensus.proto.metrics.v1.DistributionValue.Bucket]] = field(_.buckets)((c_, f_) => c_.copy(buckets = f_))
  }
  final val COUNT_FIELD_NUMBER = 1
  final val SUM_FIELD_NUMBER = 2
  final val SUM_OF_SQUARED_DEVIATION_FIELD_NUMBER = 3
  final val BUCKET_OPTIONS_FIELD_NUMBER = 4
  final val BUCKETS_FIELD_NUMBER = 5
  def of(
    count: _root_.scala.Long,
    sum: _root_.scala.Double,
    sumOfSquaredDeviation: _root_.scala.Double,
    bucketOptions: _root_.scala.Option[io.opencensus.proto.metrics.v1.DistributionValue.BucketOptions],
    buckets: _root_.scala.Seq[io.opencensus.proto.metrics.v1.DistributionValue.Bucket]
  ): _root_.io.opencensus.proto.metrics.v1.DistributionValue = _root_.io.opencensus.proto.metrics.v1.DistributionValue(
    count,
    sum,
    sumOfSquaredDeviation,
    bucketOptions,
    buckets
  )
  // @@protoc_insertion_point(GeneratedMessageCompanion[opencensus.proto.metrics.v1.DistributionValue])
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy