lnrpc.NodeInfo.scala Maven / Gradle / Ivy
The newest version!
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3
package lnrpc
import org.bitcoins.lnd.rpc.LndUtils._
/** @param node
*
* An individual vertex/node within the channel graph. A node is
* connected to other nodes by one or more channel edges emanating from it. As
* the graph is directed, a node will also have an incoming edge attached to
* it for each outgoing edge.
* @param numChannels
* The total number of channels for the node.
* @param totalCapacity
* The sum of all channels capacity for the node, denominated in satoshis.
* @param channels
* A list of all public channels for the node.
*/
@SerialVersionUID(0L)
final case class NodeInfo(
node: _root_.scala.Option[lnrpc.LightningNode] = _root_.scala.None,
numChannels: org.bitcoins.core.number.UInt32 = lnrpc.NodeInfo._typemapper_numChannels.toCustom(0),
totalCapacity: _root_.scala.Long = 0L,
channels: _root_.scala.Seq[lnrpc.ChannelEdge] = _root_.scala.Seq.empty,
unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[NodeInfo] {
@transient
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
if (node.isDefined) {
val __value = node.get
__size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
};
{
val __value = lnrpc.NodeInfo._typemapper_numChannels.toBase(numChannels)
if (__value != 0) {
__size += _root_.com.google.protobuf.CodedOutputStream.computeUInt32Size(2, __value)
}
};
{
val __value = totalCapacity
if (__value != 0L) {
__size += _root_.com.google.protobuf.CodedOutputStream.computeInt64Size(3, __value)
}
};
channels.foreach { __item =>
val __value = __item
__size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
__size += unknownFields.serializedSize
__size
}
override def serializedSize: _root_.scala.Int = {
var __size = __serializedSizeMemoized
if (__size == 0) {
__size = __computeSerializedSize() + 1
__serializedSizeMemoized = __size
}
__size - 1
}
def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
node.foreach { __v =>
val __m = __v
_output__.writeTag(1, 2)
_output__.writeUInt32NoTag(__m.serializedSize)
__m.writeTo(_output__)
};
{
val __v = lnrpc.NodeInfo._typemapper_numChannels.toBase(numChannels)
if (__v != 0) {
_output__.writeUInt32(2, __v)
}
};
{
val __v = totalCapacity
if (__v != 0L) {
_output__.writeInt64(3, __v)
}
};
channels.foreach { __v =>
val __m = __v
_output__.writeTag(4, 2)
_output__.writeUInt32NoTag(__m.serializedSize)
__m.writeTo(_output__)
};
unknownFields.writeTo(_output__)
}
def getNode: lnrpc.LightningNode = node.getOrElse(lnrpc.LightningNode.defaultInstance)
def clearNode: NodeInfo = copy(node = _root_.scala.None)
def withNode(__v: lnrpc.LightningNode): NodeInfo = copy(node = Option(__v))
def withNumChannels(__v: org.bitcoins.core.number.UInt32): NodeInfo = copy(numChannels = __v)
def withTotalCapacity(__v: _root_.scala.Long): NodeInfo = copy(totalCapacity = __v)
def clearChannels = copy(channels = _root_.scala.Seq.empty)
def addChannels(__vs: lnrpc.ChannelEdge *): NodeInfo = addAllChannels(__vs)
def addAllChannels(__vs: Iterable[lnrpc.ChannelEdge]): NodeInfo = copy(channels = channels ++ __vs)
def withChannels(__v: _root_.scala.Seq[lnrpc.ChannelEdge]): NodeInfo = copy(channels = __v)
def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
(__fieldNumber: @_root_.scala.unchecked) match {
case 1 => node.orNull
case 2 => {
val __t = lnrpc.NodeInfo._typemapper_numChannels.toBase(numChannels)
if (__t != 0) __t else null
}
case 3 => {
val __t = totalCapacity
if (__t != 0L) __t else null
}
case 4 => channels
}
}
def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
_root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
(__field.number: @_root_.scala.unchecked) match {
case 1 => node.map(_.toPMessage).getOrElse(_root_.scalapb.descriptors.PEmpty)
case 2 => _root_.scalapb.descriptors.PInt(lnrpc.NodeInfo._typemapper_numChannels.toBase(numChannels))
case 3 => _root_.scalapb.descriptors.PLong(totalCapacity)
case 4 => _root_.scalapb.descriptors.PRepeated(channels.iterator.map(_.toPMessage).toVector)
}
}
def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
def companion: lnrpc.NodeInfo.type = lnrpc.NodeInfo
// @@protoc_insertion_point(GeneratedMessage[lnrpc.NodeInfo])
}
object NodeInfo extends scalapb.GeneratedMessageCompanion[lnrpc.NodeInfo] {
implicit def messageCompanion: scalapb.GeneratedMessageCompanion[lnrpc.NodeInfo] = this
def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): lnrpc.NodeInfo = {
var __node: _root_.scala.Option[lnrpc.LightningNode] = _root_.scala.None
var __numChannels: _root_.scala.Int = 0
var __totalCapacity: _root_.scala.Long = 0L
val __channels: _root_.scala.collection.immutable.VectorBuilder[lnrpc.ChannelEdge] = new _root_.scala.collection.immutable.VectorBuilder[lnrpc.ChannelEdge]
var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
var _done__ = false
while (!_done__) {
val _tag__ = _input__.readTag()
_tag__ match {
case 0 => _done__ = true
case 10 =>
__node = Option(__node.fold(_root_.scalapb.LiteParser.readMessage[lnrpc.LightningNode](_input__))(_root_.scalapb.LiteParser.readMessage(_input__, _)))
case 16 =>
__numChannels = _input__.readUInt32()
case 24 =>
__totalCapacity = _input__.readInt64()
case 34 =>
__channels += _root_.scalapb.LiteParser.readMessage[lnrpc.ChannelEdge](_input__)
case tag =>
if (_unknownFields__ == null) {
_unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
}
_unknownFields__.parseField(tag, _input__)
}
}
lnrpc.NodeInfo(
node = __node,
numChannels = lnrpc.NodeInfo._typemapper_numChannels.toCustom(__numChannels),
totalCapacity = __totalCapacity,
channels = __channels.result(),
unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
)
}
implicit def messageReads: _root_.scalapb.descriptors.Reads[lnrpc.NodeInfo] = _root_.scalapb.descriptors.Reads{
case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
_root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
lnrpc.NodeInfo(
node = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).flatMap(_.as[_root_.scala.Option[lnrpc.LightningNode]]),
numChannels = lnrpc.NodeInfo._typemapper_numChannels.toCustom(__fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).map(_.as[_root_.scala.Int]).getOrElse(0)),
totalCapacity = __fieldsMap.get(scalaDescriptor.findFieldByNumber(3).get).map(_.as[_root_.scala.Long]).getOrElse(0L),
channels = __fieldsMap.get(scalaDescriptor.findFieldByNumber(4).get).map(_.as[_root_.scala.Seq[lnrpc.ChannelEdge]]).getOrElse(_root_.scala.Seq.empty)
)
case _ => throw new RuntimeException("Expected PMessage")
}
def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = LightningProto.javaDescriptor.getMessageTypes().get(104)
def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = LightningProto.scalaDescriptor.messages(104)
def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
(__number: @_root_.scala.unchecked) match {
case 1 => __out = lnrpc.LightningNode
case 4 => __out = lnrpc.ChannelEdge
}
__out
}
lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
lazy val defaultInstance = lnrpc.NodeInfo(
node = _root_.scala.None,
numChannels = lnrpc.NodeInfo._typemapper_numChannels.toCustom(0),
totalCapacity = 0L,
channels = _root_.scala.Seq.empty
)
implicit class NodeInfoLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, lnrpc.NodeInfo]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, lnrpc.NodeInfo](_l) {
def node: _root_.scalapb.lenses.Lens[UpperPB, lnrpc.LightningNode] = field(_.getNode)((c_, f_) => c_.copy(node = Option(f_)))
def optionalNode: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[lnrpc.LightningNode]] = field(_.node)((c_, f_) => c_.copy(node = f_))
def numChannels: _root_.scalapb.lenses.Lens[UpperPB, org.bitcoins.core.number.UInt32] = field(_.numChannels)((c_, f_) => c_.copy(numChannels = f_))
def totalCapacity: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Long] = field(_.totalCapacity)((c_, f_) => c_.copy(totalCapacity = f_))
def channels: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[lnrpc.ChannelEdge]] = field(_.channels)((c_, f_) => c_.copy(channels = f_))
}
final val NODE_FIELD_NUMBER = 1
final val NUM_CHANNELS_FIELD_NUMBER = 2
final val TOTAL_CAPACITY_FIELD_NUMBER = 3
final val CHANNELS_FIELD_NUMBER = 4
@transient
private[lnrpc] val _typemapper_numChannels: _root_.scalapb.TypeMapper[_root_.scala.Int, org.bitcoins.core.number.UInt32] = implicitly[_root_.scalapb.TypeMapper[_root_.scala.Int, org.bitcoins.core.number.UInt32]]
def of(
node: _root_.scala.Option[lnrpc.LightningNode],
numChannels: org.bitcoins.core.number.UInt32,
totalCapacity: _root_.scala.Long,
channels: _root_.scala.Seq[lnrpc.ChannelEdge]
): _root_.lnrpc.NodeInfo = _root_.lnrpc.NodeInfo(
node,
numChannels,
totalCapacity,
channels
)
// @@protoc_insertion_point(GeneratedMessageCompanion[lnrpc.NodeInfo])
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy