All Downloads are FREE. Search and download functionalities are using the official Maven repository.

commonMain.io.dyte.media.hive.HiveTransport.kt Maven / Gradle / Ivy

The newest version!
package io.dyte.media.hive

import io.dyte.media.handlers.sdp.RtcpFb
import io.dyte.media.hive.handlers.*
import io.dyte.media.utils.IMediaClientLogger
import io.dyte.media.utils.UUIDUtils
import io.dyte.media.utils.sdp.SDPUtils
import io.dyte.webrtc.*
import kotlinx.coroutines.*
import kotlinx.coroutines.channels.Channel
import kotlinx.coroutines.flow.MutableSharedFlow
import kotlinx.coroutines.flow.first
import kotlinx.coroutines.flow.launchIn
import kotlinx.coroutines.flow.onEach
import kotlinx.coroutines.flow.receiveAsFlow
import kotlinx.coroutines.flow.takeWhile
import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock
import kotlinx.serialization.json.Json
import kotlinx.serialization.json.buildJsonObject
import kotlinx.serialization.json.put

const val REASON_TRANSPORT_CLOSED = "transport closed"
const val REASON_DISCONNECTION_CLEANUP = "disconnection cleanup"

open class HiveTransportOptions(
  open val iceServers: List? = null,
  open val iceTransportPolicy: IceTransportPolicy? = null,
  open val additionalSettings: MutableMap? = null,
  open val proprietaryConstraints: Any? = null,
  open val appData: Map? = null,
)

class HiveInternalTransportOptions(
  val id: String? = null,
  val direction: RtpTransceiverDirection,
  val handlerFactory: HiveHandlerFactory? = null,
  override val iceServers: List,
  override val iceTransportPolicy: IceTransportPolicy?,
  override val additionalSettings: MutableMap?,
  override val proprietaryConstraints: Any?,
  override val appData: Map?,
) :
  HiveTransportOptions(
    iceServers,
    iceTransportPolicy,
    additionalSettings,
    proprietaryConstraints,
    appData,
  )

enum class HiveConnectionState {
  New,
  Connecting,
  Connected,
  Failed,
  Disconnected,
  Closed
}

open class HiveConsumerStateObject(
  open val consumerId: String,
  open val trackId: String,
  open val streamId: String,
  open val screenShare: Boolean,
  open val paused: Boolean,
  open val kind: MediaStreamTrackKind,
  open val producingTransportId: String,
  open val mimeType: String,
) {
  override fun toString(): String {
    return "ConsumerStateObject(consumerId='$consumerId', trackId='$trackId', streamId='$streamId', screenShare=$screenShare, paused=$paused, kind=$kind, producingTransportId='$producingTransportId')"
  }
}

data class HiveConsumerState(
  val producerId: String,
  val consumerId: String,
  val trackId: String,
  val streamId: String,
)

class HiveConsumerCreationTaskOptions(
  override val consumerId: String,
  override val trackId: String,
  override val streamId: String,
  override val screenShare: Boolean,
  override val paused: Boolean,
  override val kind: MediaStreamTrackKind,
  val producerId: String,
  val producingPeerId: String,
  override val producingTransportId: String,
  val appData: MutableMap,
  override val mimeType: String,
) :
  HiveConsumerStateObject(
    consumerId,
    trackId,
    streamId,
    screenShare,
    paused,
    kind,
    producingTransportId,
    mimeType,
  )

class HiveConsumerCreationTaskException(
  val options: HiveConsumerCreationTaskOptions,
  var isTimedOut: Boolean = false,
  val name: String = "Consumer Creation Task Exception",
  override val message: String = "Consumer Creation Failed",
) : Exception(message)

class ConsumerTrackEvent(
  val mid: String,
  val receiver: RtpReceiver,
  val track: MediaStreamTrack? = null,
  val transceiver: RtpTransceiver? = null,
)

class HiveTransport(
  val options: HiveInternalTransportOptions,
  val coroutineScope: CoroutineScope,
  private val logger: IMediaClientLogger,
) {
  /** Id */
  private val _id = options.id

  private lateinit var _serverId: String

  /** Closed flag */
  private var _closed = false

  /** Direction */
  private val _direction = options.direction

  /** SCTP max message size if enabled, null otherwise. */
  //  private val _maxSctpMessageSize: Long [Unused]

  /** RTC handler instance */
  //  private val _handler = options.handlerFactory // check
  private var _handler = HiveUnifiedPlan(coroutineScope, logger)

  /** Transport connection state */
  private var _connectionState = HiveConnectionState.New

  /** Producers map */
  private var _producers = mutableMapOf()

  /** Consumers map */
  private var _consumers = mutableMapOf()

  private var _connected = false

  private val _transportConnection = CompletableDeferred()

  @OptIn(ExperimentalCoroutinesApi::class)
  val limitedDispatcher = Dispatchers.Default.limitedParallelism(1)

  val limitedScope = CoroutineScope(limitedDispatcher)

  val observer = MutableSharedFlow()

  val externalObserver = MutableSharedFlow()

  val consumerChannel = Channel(Channel.BUFFERED)

  private var consumerTrackEvents = mutableMapOf()

  private val consumerTrackPool = mutableMapOf()

  private var unknownTracksMap = mutableMapOf()

  /** App custom data */
  private val _appData: Map = options.appData ?: emptyMap()

  private var dataChannelCache = mutableMapOf>()

  private var _dataChannels = mutableMapOf()

  private var consumerCounter = 0

  /** Transport Id */
  fun getId() = this._id

  fun getServerId() = this._serverId

  fun getConnected() = this._connected

  fun getIsConnected() = this._transportConnection

  /** Whether the Transport is closed. */
  fun getClosed() = this._closed

  /** Transport direction * */
  fun getDirection() = this._direction

  /** RTC Handler instance */
  fun getHandler() = this._handler

  /** Connection state */
  fun getConnectionState() = this._connectionState

  /** Custom data */
  fun getAppData() = this._appData

  //  fun setAppData() = throw Error("Cannot override appData object")

  fun setServerId(id: String) {
    this._serverId = id
  }

  fun getDataChannels() = _dataChannels

  fun getDataChannel(label: String) = this._dataChannels[label]

  suspend fun init() {
    _handler.init(
      HiveHandlerRunOptions(
        direction = options.direction,
        iceServers = options.iceServers,
        iceTransportPolicy = options.iceTransportPolicy,
        additionalSettings = options.additionalSettings,
        proprietaryConstraints = options.proprietaryConstraints,
        onTrackHandler = ::_onTrack,
      )
    )

    coroutineScope.launch {
      observer.collect {
        when (it.eventName) {
          "connected" -> _transportConnection.complete(true)
          "disconnect" -> _transportConnection.complete(false)
          "close" -> _transportConnection.complete(false)
        }
      }
    }

    coroutineScope.launch {
      _handler.observer.collect {
        when (it.eventName) {
          "@connectionstatechange" -> {
            val connectionState = it.data as HiveConnectionState

            if (connectionState != _connectionState) {
              logger.traceLog(
                "DyteMediaClient: Hive: ${getDirection()} Transport: Connection state changed to ${connectionState.name}"
              )

              _connectionState = connectionState

              when (connectionState) {
                HiveConnectionState.Connected -> {
                  _connected = true
                  observer.emit(HiveEmitData("connected"))
                }
                HiveConnectionState.Disconnected -> {
                  _connected = false
                  observer.emit(HiveEmitData("disconnected"))
                }
                HiveConnectionState.Failed -> {
                  _connected = false
                  observer.emit(HiveEmitData("close"))
                }
                HiveConnectionState.Closed -> {
                  _connected = false
                  observer.emit(HiveEmitData("close"))
                }
                else -> {}
              }

              if (!_closed)
                observer.emit(
                  HiveEmitData(eventName = "connectionstatechange", data = connectionState)
                )
            }
          }
          "@icecandidate" -> {
            if (!_closed) {
              observer.emit(
                HiveEmitData(eventName = "icecandidate", data = it.data as IceCandidate)
              )
            }
          }
          "datachannel" -> {
            @Suppress("UNCHECKED_CAST") val data = it.data as Map

            val channel = data["channel"] as DataChannel

            if (!_dataChannels.contains(channel.label)) _dataChannels[channel.label] = channel

            val dcmsgstr = data["message"] as String

            logger.traceLog(
              "DyteMediaClient: HiveTransport: DataChannel message received - $dcmsgstr on ${channel.id} ${channel.label}"
            )

            // Handle errornous data channel messages to prevent crashes like:
            // kotlinx.serialization.json.internal.JsonDecodingException: Expected start of the
            // object '{', but had 'EOF' instead at path: $
            // JSON input:
            // ,�Z4????????????�????????????????????????????????????????????????????????????????`�?�z????��
            var dcmsg =
              try {
                Json.decodeFromString(DCMessageChunked.serializer(), dcmsgstr)
              } catch (e: Exception) {
                logger.traceLog(
                  "DyteMediaClient: HiveTransport: Error parsing datachannel message chunk - $e"
                )
                return@collect
              }

            // The message is received in chunks, so we need to cache it until we have all the
            // chunks.
            // First check if we have a cache entry for this message id. If we don't,
            if (!dataChannelCache.contains(dcmsg.id)) {
              dataChannelCache[dcmsg.id] = Array(dcmsg.count) { null }
            }

            val messageCache = dataChannelCache[dcmsg.id]!!
            // Add the chunk to the cache
            messageCache[dcmsg.chunkIndex] = dcmsg

            // Check if we have all the chunks
            if (
              dataChannelCache[dcmsg.id]?.size == dcmsg.count &&
                dataChannelCache[dcmsg.id]?.none { c -> c == null } == true
            ) {
              // We have all the chunks, so we can reassemble the message
              val chunks = dataChannelCache[dcmsg.id]
              val message =
                chunks?.fold("") { acc, dcMessageChunked -> acc + dcMessageChunked!!.chunk }

              // Delete the cache entry
              dataChannelCache.remove(dcmsg.id)

              // The message itself is a JSON object, so we need to parse it
              try {
                val parsedMessage = Json.decodeFromString(DCMessage.serializer(), message!!)
                observer.emit(
                  HiveEmitData(
                    eventName = "datachannel",
                    data =
                      mapOf(
                        "channel" to channel, // get label from channel.label
                        "parsedMessage" to parsedMessage,
                      ),
                  )
                )
              } catch (e: Error) {
                logger.traceWarning(
                  "DyteMediaClient: HiveTransport: Error assembling datachannel message chunks - $e"
                )
              }
            }
          }
        }
      }
    }
  }

  /** Close the transport */
  suspend fun close() {
    if (this._closed) return

    logger.traceLog("DyteMediaClient: HiveTransport: close()")

    this._connected = false
    this._closed = true

    // Close the handler
    this._handler.close()

    // Close all producers
    val producers = ArrayList(_producers.values)
    producers.forEach { producer -> producer.close(REASON_TRANSPORT_CLOSED) }
    this._producers.clear()

    // Close all consumers
    val consumers = ArrayList(_consumers.values)
    consumers.forEach { consumer -> consumer.close(REASON_TRANSPORT_CLOSED) }
    this._consumers.clear()

    this.consumerTrackPool.clear()
    this.consumerTrackEvents.clear()

    observer.emit(HiveEmitData("close"))
  }

  /** Get associated Transport (RTCPeerConnection) stats */
  suspend fun getStats(): RtcStatsReport? {
    if (this._closed) throw IllegalStateException("closed")

    return this._handler.getTransportStats()
  }

  suspend fun connect() {
    logger.traceLog("DyteMediaClient: Connecting hive transport: $this.id")

    try {
      val connectResult = this._handler.connect()

      // TODO: Check how to receive answer
      this.observer.emit(HiveEmitData("connect", connectResult.offerSdp))

      // val externalResult = HiveEmitData("returnConnect")

      val answer =
        externalObserver.takeWhile { it.eventName == "returnConnect" }.first().data
          as SessionDescription

      // call callback on answer
      connectResult.callback.invoke(answer)

      if (!this.getIsConnected().await()) throw Error("Ice Connection Failed")
    } catch (e: Error) {
      logger.traceLog("DyteMediaClient: HiveTransport: Failed to connect - $e")
    }
  }

  /** Restart ICE connection */
  suspend fun restartIce(): HiveGenericHandlerResult {
    logger.traceLog("DyteMediaClient: HiveTransport: restartIce()")

    if (this._closed) throw IllegalStateException("closed")

    return this._handler.restartIce()
  }

  /** Update ICE servers */
  suspend fun updateIceServers(iceServers: List) {
    logger.traceLog("DyteMediaClient: HiveTransport: updateIceServers()")

    if (this._closed) throw IllegalStateException("closed")

    this._handler.updateIceServers(iceServers)
  }

  private suspend fun _handleProducer(producer: HiveProducer) {
    producer.observer.collect {
      if (it.eventName == "close") this._producers.remove(producer.getId())
    }
  }

  private val producerMutex = Mutex()
  private val consumerMutex = Mutex()

  suspend fun produce(options: HiveProducerOptions): HiveProducer {
    return producerMutex.withLock { produceInternal(options) }
  }

  /** Create a producer */
  suspend fun produceInternal(options: HiveProducerOptions): HiveProducer {
    if (options.track == null) throw Error("TypeError: Missing Track")
    else if (this._direction != RtpTransceiverDirection.SendOnly)
      throw UnsupportedOperationException("Not a sending transport")
    else if (options.track.readyState is MediaStreamTrackState.Ended)
      throw IllegalStateException("Track ended")

    if (!this.getIsConnected().await()) throw Error("Transport not connected")

    lateinit var producerId: String
    lateinit var localId: String

    // First we generate offer SDP
    val sendResult =
      _handler.send(
        HiveHandlerSendOptions(
          track = options.track,
          encodings = options.encodings ?: emptyList(),
          codecOptions = options.codecOptions,
          screenShare = options.appData?.get("screenShare") as? Boolean ?: false,
          stream = options.stream,
        )
      )

    // Then we send this offer to the server
    observer.emit(
      HiveEmitData(
        eventName = "produce",
        data =
          mapOf(
            "offer" to sendResult.offerSdp,
            "kind" to options.track.kind,
            "paused" to
              if (options.disableTrackOnPause != null && options.disableTrackOnPause)
                !options.track.enabled
              else false,
            "appData" to options.appData,
          ),
      )
    )

    //      val consumerObject: HiveConsumerStateObject =
    //        externalObserver.takeWhile { it.eventName == "returnConsume" }.first().data as
    // HiveConsumerStateObject

    val data: Map =
      externalObserver.takeWhile { it.eventName == "returnProduce" }.first().data
        as Map

    val answer = data["answer"] as SessionDescription
    producerId = data["producerId"] as String

    // Then we set the answer on remote and get the localId
    localId = sendResult.callback(answer) as String

    val producer =
      HiveProducer(
        HiveInternalProducerOptions(
          id = producerId,
          localId = localId,
          track = options.track,
          stopTracks = options.stopTracks ?: true,
          disableTrackOnPause = options.disableTrackOnPause ?: true,
          zeroRtpOnPause = options.zeroRtpOnPause ?: false,
          appData = options.appData ?: emptyMap(),
          handler = this.getHandler(),
        ),
        coroutineScope = coroutineScope,
        logger = logger,
      )

    logger.traceLog(
      "DyteMediaClient: HiveTransport: ${producer.getKind()} producer created ${producer.getId()}"
    )

    this._producers[producerId] = producer

    coroutineScope.launch { _handleProducer(producer) }

    this.observer.emit(HiveEmitData("newproducer", producer))

    return producer
  }

  suspend fun consume(
    producers: List,
    producingPeerId: String,
    consumerOverDc: Boolean = true,
  ): List> {
    consumerMutex.withLock {
      logger.traceLog(
        "DyteMediaClient: HiveTransport: consume() with producingPeerId = $producingPeerId"
      )

      if (this._closed) throw IllegalStateException("closed")
      else if (this._direction != RtpTransceiverDirection.RecvOnly)
        throw UnsupportedOperationException("Not a receiving transport")

      if (!this.getIsConnected().await()) throw IllegalStateException("Transport not connected")

      val deferredResults = mutableListOf>()

      val producersMap = mutableMapOf()
      producers.forEach { producersMap[it.producerId] = it }

      var consumersMap = mutableMapOf()

      if (!consumerOverDc) {
        this.observer.emit(HiveEmitData(eventName = "consumePeer", data = producingPeerId))
        @Suppress("UNCHECKED_CAST")
        consumersMap =
          (externalObserver.takeWhile { it.eventName == "returnConsumePeer" }.first().data
              as Map)
            .toMutableMap()
      } else {
        createConsumerOverDC(producers)
        @Suppress("UNCHECKED_CAST")
        val res = consumerChannel.receiveAsFlow().first().data as Map

        logger.traceLog("DyteMediaClient: HiveTransport: Consumers created over DC $res")

        res.forEach { (consumerId, entry) ->
          producersMap[entry.producerId]?.let { p ->
            consumersMap[entry.producerId] =
              HiveConsumerStateObject(
                consumerId = consumerId,
                trackId = entry.trackId,
                streamId = entry.streamId,
                screenShare = p.screenShare,
                paused = p.paused,
                kind =
                  if (p.kind == "video") MediaStreamTrackKind.Video else MediaStreamTrackKind.Audio,
                producingTransportId = p.producingTransportId,
                mimeType = p.mimeType,
              )
          }
        }
      }

      consumersMap.forEach {
        deferredResults.add(
          this._consumerCreationTask(
            HiveConsumerCreationTaskOptions(
              consumerId = it.value.consumerId,
              trackId = it.value.trackId,
              streamId = it.value.streamId,
              kind = it.value.kind,
              producerId = it.key,
              producingPeerId = producingPeerId,
              paused = it.value.paused,
              screenShare = it.value.screenShare,
              producingTransportId = it.value.producingTransportId,
              appData = mutableMapOf("screenShare" to it.value.screenShare),
              mimeType = it.value.mimeType,
            )
          )
        )
      }

      return deferredResults
    }
  }

  private fun _handleConsumer(consumer: HiveConsumer, scope: CoroutineScope) {
    consumer.observer
      .takeWhile { it.eventName == "close" }
      .onEach {
        this._consumers.remove(consumer.getId())
        this._handler.mapMidTransceiver.remove(consumer.getLocalId()) // transceiver.mid
      }
      .launchIn(scope)
  }

  private suspend fun _consumerCreationTask(
    options: HiveConsumerCreationTaskOptions
  ): CompletableDeferred {
    val key = "${options.streamId}:${options.kind}"
    val exception = HiveConsumerCreationTaskException(options)

    val deferredConsumer = CompletableDeferred()

    val timeoutTimer =
      coroutineScope.launch {
        delay(5000)
        if (isActive) {
          consumerTrackEvents.remove(key)
          exception.isTimedOut = true
          deferredConsumer.completeExceptionally(exception)
        }
      }

    val consumeHandler: suspend (ConsumerTrackEvent) -> Unit = { event ->
      try {
        if (event.track?.readyState is MediaStreamTrackState.Ended) {
          timeoutTimer.cancel()
          deferredConsumer.completeExceptionally(exception)
        } else {
          val consumerLocalId = event.mid

          this._handler.mapMidTransceiver[consumerLocalId] = event.transceiver!!
          this._handler.mapMidReceiver[consumerLocalId] = event.receiver
          event.track!!.enabled = true

          val consumer =
            HiveConsumer(
              HiveInternalConsumerOptions(
                id = options.consumerId,
                localId = consumerLocalId,
                track = event.track,
                kind = event.track.kind,
                paused = options.paused,
                producerId = options.producerId,
                producingPeerId = options.producingPeerId,
                producingTransportId = options.producingTransportId,
                handler = this._handler,
                appData = options.appData,
                // screenShare = options.screenShare
                reuseTrack = true,
                ssrc = getConsumerSsrc(event.track),
                mimeType = options.mimeType,
              ),
              coroutineScope = coroutineScope,
              logger = logger,
            )

          this._consumers[options.consumerId] = consumer
          consumerCounter++

          _handleConsumer(consumer, coroutineScope)

          logger.traceLog(
            "DyteMediaClient: HiveTransport: Consumer created for producerId = ${options.producerId} trackId = ${options.trackId} " +
              "producingPeerId = ${options.producingPeerId} + kind = ${options.kind}"
          )

          this.observer.emit(HiveEmitData(eventName = "newconsumer", data = consumer))

          timeoutTimer.cancel()
          deferredConsumer.complete(consumer)
        }
      } catch (e: Error) {
        logger.traceLog("ConsumerDebug: Error while creating consumer: $e")
        timeoutTimer.cancel()
        deferredConsumer.completeExceptionally(exception)
      }
    }

    val reuseTrack = this.consumerTrackPool[options.consumerId]

    if (reuseTrack != null) {
      if (reuseTrack.track != null && reuseTrack.transceiver != null) {
        consumeHandler(reuseTrack)
      }
      return deferredConsumer
    }

    val existingTrackEvent = this.unknownTracksMap[key]

    if (existingTrackEvent != null) {
      this.unknownTracksMap.remove(key)
      consumeHandler(
        ConsumerTrackEvent(
          mid = existingTrackEvent.mid,
          receiver = existingTrackEvent.receiver,
          track = existingTrackEvent.track,
          transceiver = existingTrackEvent.transceiver,
        )
      )
    } else {
      this.consumerTrackEvents[key] = consumeHandler
    }

    return deferredConsumer
  }

  private fun getConsumerSsrc(track: MediaStreamTrack?): Long? {
    var result: Long? = null

    val parsedSdp = SDPUtils.parse(this._handler.getPc().remoteDescription?.sdp!!)

    parsedSdp.media.forEach { media ->
      if (media.type == track?.kind.toString().lowercase()) {
        media.ssrcs?.forEach { ssrc ->
          if (ssrc.value.split(" ").contains(track?.id)) result = ssrc.id
        }
      }
    }

    return result
  }

  private suspend fun _onTrack(event: TrackEvent) {
    val key = "${event.streams[0].id}:${event.track?.kind}"

    val trackId = event.track?.id

    event.track
      ?.onEnded
      ?.onEach {
        this.consumerTrackPool.remove(trackId)
        this.unknownTracksMap.remove(key)
      }
      ?.launchIn(coroutineScope)

    val consumerTrackEvent =
      ConsumerTrackEvent(
        mid = event.mid,
        receiver = event.receiver,
        track = event.track,
        transceiver = event.transceiver,
      )

    consumerTrackPool[trackId!!] = consumerTrackEvent

    val eventHandler = this.consumerTrackEvents[key]

    if (eventHandler != null) {
      eventHandler(consumerTrackEvent)
      this.consumerTrackEvents.remove(key)
    } else {
      logger.traceWarning(
        "DyteMediaClient: HiveTransport: Track event handler not found for key = $key"
      )

      this.unknownTracksMap[key] = event
    }
  }

  private suspend fun setRemoteDescription(sdp: SessionDescription) =
    this._handler.getPc().setRemoteDescription(sdp)

  private suspend fun setLocalDescription(sdp: SessionDescription) {
    this._handler.getPc().setLocalDescription(sdp)
  }

  suspend fun setRemoteOffer(offer: SessionDescription): SessionDescription {
    this.setRemoteDescription(offer)

    val ans = this._handler.getPc().createAnswer(OfferAnswerOptions())

    val parsedSDP = SDPUtils.parse(ans.sdp)

    parsedSDP.media =
      parsedSDP.media
        .map {
          if (it.type == "audio") {
            val updatedMediaObject = it

            if (updatedMediaObject.rtcpFb != null) {
              updatedMediaObject.rtcpFb = mutableListOf()
            }

            val hasNack =
              updatedMediaObject.rtcpFb?.any { rtcpFb -> rtcpFb.type == "nack" } ?: false

            val opusRtcpFb =
              RtcpFb(type = "nack", payload = updatedMediaObject.payloads!!.toInt(10))

            if (!hasNack)
              if (updatedMediaObject.rtcpFb == null) {
                updatedMediaObject.rtcpFb = mutableListOf(opusRtcpFb)
              } else {
                updatedMediaObject.rtcpFb!!.add(opusRtcpFb)
              }

            updatedMediaObject
          } else {
            it
          }
        }
        .toMutableList()

    val updatedAnswer = SessionDescription(type = ans.type, sdp = SDPUtils.write(parsedSDP))

    this.setLocalDescription(updatedAnswer)

    return updatedAnswer
  }

  fun createConsumerOverDC(producers: List) {
    val channel = _dataChannels["events"]

    if (channel == null) {
      logger.traceLog("createConsumerOverDC: events datachannel not ready")
      return
    }

    if (producers.isEmpty()) {
      logger.traceLog("createConsumerOverDC: one of producer id or publisher id is required")
      return
    }

    var req =
      DCMessage(
        type = "create_consumer",
        payload =
          buildJsonObject {
            val producer = producers.first()

            if (producers.size == 1) {
              put("producerId", producer.producerId)
            }

            put(
              "preferredCodec",
              buildJsonObject {
                if (producer.kind == "video") {
                  put("video", producer.mimeType)
                } else {
                  put("audio", producer.mimeType)
                }
              },
            )

            put("publisherId", producer.producingTransportId)
          },
      )

    req = req.withBolt(Bolt(id = DCMessage.generateId(), type = BoltSendType.REQUEST))

    channel.send(Json.encodeToString(DCMessage.serializer(), req).encodeToByteArray())
  }

  fun sendResponseOverDC(label: String, id: String, message: DCMessage) {
    val channel = _dataChannels[label] ?: throw Error("DataChannel not found")

    val extendedDcMessage = message.withBolt(Bolt(id = id, type = BoltSendType.RESPONSE))

    val jsonPayload = Json.encodeToString(DCMessage.serializer(), extendedDcMessage)
    val maxChunkSize = 16348

    if (jsonPayload.length > maxChunkSize) {
      val chunkSize = maxChunkSize - 200 // buffer for other data
      val rawChunks = jsonPayload.chunked(chunkSize)
      val totalChunks = rawChunks.size
      val messageId = UUIDUtils.getRandom()

      rawChunks.forEachIndexed { i, chunk ->
        val chunkedDCMessage =
          DCMessageChunked(id = messageId, count = totalChunks, chunkIndex = i, chunk = chunk)
        channel.send(
          Json.encodeToString(DCMessageChunked.serializer(), chunkedDCMessage).encodeToByteArray()
        )
      }
    } else {
      channel.send(jsonPayload.encodeToByteArray())
    }
  }

  suspend fun retryFailedConsumerCreationTasks(
    tasks: List
  ): List> {
    val deferredResults = mutableListOf>()

    tasks.forEach { deferredResults.add(this._consumerCreationTask(it.options)) }

    return deferredResults
  }
}

typealias consumerTrackEvent = suspend (ConsumerTrackEvent) -> Unit

data class PeerProducerMeta(
  var producerId: String,
  var producingTransportId: String,
  var kind: String,
  var paused: Boolean,
  var screenShare: Boolean,
  var peerId: String,
  var mimeType: String,
)




© 2015 - 2024 Weber Informatics LLC | Privacy Policy