
io.envoyproxy.envoy.service.metrics.v3.MetricsServiceGrpc.scala Maven / Gradle / Ivy
package io.envoyproxy.envoy.service.metrics.v3
object MetricsServiceGrpc {
val METHOD_STREAM_METRICS: _root_.io.grpc.MethodDescriptor[io.envoyproxy.envoy.service.metrics.v3.StreamMetricsMessage, io.envoyproxy.envoy.service.metrics.v3.StreamMetricsResponse] =
_root_.io.grpc.MethodDescriptor.newBuilder()
.setType(_root_.io.grpc.MethodDescriptor.MethodType.CLIENT_STREAMING)
.setFullMethodName(_root_.io.grpc.MethodDescriptor.generateFullMethodName("envoy.service.metrics.v3.MetricsService", "StreamMetrics"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(_root_.scalapb.grpc.Marshaller.forMessage[io.envoyproxy.envoy.service.metrics.v3.StreamMetricsMessage])
.setResponseMarshaller(_root_.scalapb.grpc.Marshaller.forMessage[io.envoyproxy.envoy.service.metrics.v3.StreamMetricsResponse])
.setSchemaDescriptor(_root_.scalapb.grpc.ConcreteProtoMethodDescriptorSupplier.fromMethodDescriptor(io.envoyproxy.envoy.service.metrics.v3.MetricsServiceProto.javaDescriptor.getServices().get(0).getMethods().get(0)))
.build()
val SERVICE: _root_.io.grpc.ServiceDescriptor =
_root_.io.grpc.ServiceDescriptor.newBuilder("envoy.service.metrics.v3.MetricsService")
.setSchemaDescriptor(new _root_.scalapb.grpc.ConcreteProtoFileDescriptorSupplier(io.envoyproxy.envoy.service.metrics.v3.MetricsServiceProto.javaDescriptor))
.addMethod(METHOD_STREAM_METRICS)
.build()
/** Service for streaming metrics to server that consumes the metrics data. It uses Prometheus metric
* data model as a standard to represent metrics information.
*/
trait MetricsService extends _root_.scalapb.grpc.AbstractService {
override def serviceCompanion = MetricsService
/** Envoy will connect and send StreamMetricsMessage messages forever. It does not expect any
* response to be sent as nothing would be done in the case of failure.
*/
def streamMetrics(responseObserver: _root_.io.grpc.stub.StreamObserver[io.envoyproxy.envoy.service.metrics.v3.StreamMetricsResponse]): _root_.io.grpc.stub.StreamObserver[io.envoyproxy.envoy.service.metrics.v3.StreamMetricsMessage]
}
object MetricsService extends _root_.scalapb.grpc.ServiceCompanion[MetricsService] {
implicit def serviceCompanion: _root_.scalapb.grpc.ServiceCompanion[MetricsService] = this
def javaDescriptor: _root_.com.google.protobuf.Descriptors.ServiceDescriptor = io.envoyproxy.envoy.service.metrics.v3.MetricsServiceProto.javaDescriptor.getServices().get(0)
def scalaDescriptor: _root_.scalapb.descriptors.ServiceDescriptor = io.envoyproxy.envoy.service.metrics.v3.MetricsServiceProto.scalaDescriptor.services(0)
def bindService(serviceImpl: MetricsService, executionContext: scala.concurrent.ExecutionContext): _root_.io.grpc.ServerServiceDefinition =
_root_.io.grpc.ServerServiceDefinition.builder(SERVICE)
.addMethod(
METHOD_STREAM_METRICS,
_root_.io.grpc.stub.ServerCalls.asyncClientStreamingCall(new _root_.io.grpc.stub.ServerCalls.ClientStreamingMethod[io.envoyproxy.envoy.service.metrics.v3.StreamMetricsMessage, io.envoyproxy.envoy.service.metrics.v3.StreamMetricsResponse] {
override def invoke(observer: _root_.io.grpc.stub.StreamObserver[io.envoyproxy.envoy.service.metrics.v3.StreamMetricsResponse]): _root_.io.grpc.stub.StreamObserver[io.envoyproxy.envoy.service.metrics.v3.StreamMetricsMessage] =
serviceImpl.streamMetrics(observer)
}))
.build()
}
/** Service for streaming metrics to server that consumes the metrics data. It uses Prometheus metric
* data model as a standard to represent metrics information.
*/
trait MetricsServiceBlockingClient {
def serviceCompanion = MetricsService
}
class MetricsServiceBlockingStub(channel: _root_.io.grpc.Channel, options: _root_.io.grpc.CallOptions = _root_.io.grpc.CallOptions.DEFAULT) extends _root_.io.grpc.stub.AbstractStub[MetricsServiceBlockingStub](channel, options) with MetricsServiceBlockingClient {
override def build(channel: _root_.io.grpc.Channel, options: _root_.io.grpc.CallOptions): MetricsServiceBlockingStub = new MetricsServiceBlockingStub(channel, options)
}
class MetricsServiceStub(channel: _root_.io.grpc.Channel, options: _root_.io.grpc.CallOptions = _root_.io.grpc.CallOptions.DEFAULT) extends _root_.io.grpc.stub.AbstractStub[MetricsServiceStub](channel, options) with MetricsService {
/** Envoy will connect and send StreamMetricsMessage messages forever. It does not expect any
* response to be sent as nothing would be done in the case of failure.
*/
override def streamMetrics(responseObserver: _root_.io.grpc.stub.StreamObserver[io.envoyproxy.envoy.service.metrics.v3.StreamMetricsResponse]): _root_.io.grpc.stub.StreamObserver[io.envoyproxy.envoy.service.metrics.v3.StreamMetricsMessage] = {
_root_.scalapb.grpc.ClientCalls.asyncClientStreamingCall(channel, METHOD_STREAM_METRICS, options, responseObserver)
}
override def build(channel: _root_.io.grpc.Channel, options: _root_.io.grpc.CallOptions): MetricsServiceStub = new MetricsServiceStub(channel, options)
}
def bindService(serviceImpl: MetricsService, executionContext: scala.concurrent.ExecutionContext): _root_.io.grpc.ServerServiceDefinition = MetricsService.bindService(serviceImpl, executionContext)
def blockingStub(channel: _root_.io.grpc.Channel): MetricsServiceBlockingStub = new MetricsServiceBlockingStub(channel)
def stub(channel: _root_.io.grpc.Channel): MetricsServiceStub = new MetricsServiceStub(channel)
def javaDescriptor: _root_.com.google.protobuf.Descriptors.ServiceDescriptor = io.envoyproxy.envoy.service.metrics.v3.MetricsServiceProto.javaDescriptor.getServices().get(0)
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy