Swift 音频流添加到哪里?

Swift 音频流添加到哪里?,swift,libjingle,Swift,Libjingle,我找不到音频流添加到“扬声器”的位置。我是否可以修改流,然后自己添加?我觉得Libkingle正在处理流并添加它 我已经在我的代码中添加了歌词部分: import AVFoundation import UIKit let TAG = "ViewController" let AUDIO_TRACK_ID = TAG + "AUDIO" let LOCAL_MEDIA_STREAM_ID = TAG + "STREAM" class ViewController: UIViewControl

我找不到音频流添加到“扬声器”的位置。我是否可以修改流,然后自己添加?我觉得Libkingle正在处理流并添加它

我已经在我的代码中添加了歌词部分:

import AVFoundation
import UIKit

let TAG = "ViewController"
let AUDIO_TRACK_ID = TAG + "AUDIO"
let LOCAL_MEDIA_STREAM_ID = TAG + "STREAM"

class ViewController: UIViewController, RTCSessionDescriptionDelegate, RTCPeerConnectionDelegate {

    var mediaStream: RTCMediaStream!
    var localAudioTrack: RTCAudioTrack!
    var remoteAudioTrack: RTCAudioTrack!
    var renderer: RTCEAGLVideoView!
    var renderer_sub: RTCEAGLVideoView!
    var roomName: String!    

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view, typically from a nib.

        initWebRTC();
        sigConnect(wsUrl: "http://192.168.1.59:3000");

        localAudioTrack = peerConnectionFactory.audioTrack(withID: AUDIO_TRACK_ID)
        mediaStream = peerConnectionFactory.mediaStream(withLabel: LOCAL_MEDIA_STREAM_ID)
        mediaStream.addAudioTrack(localAudioTrack)
    }

    var peerConnectionFactory: RTCPeerConnectionFactory! = nil
    var peerConnection: RTCPeerConnection! = nil
    var pcConstraints: RTCMediaConstraints! = nil
    var audioConstraints: RTCMediaConstraints! = nil
    var mediaConstraints: RTCMediaConstraints! = nil

    var wsServerUrl: String! = nil
    var peerStarted: Bool = false

    func initWebRTC() {
        RTCPeerConnectionFactory.initializeSSL()
        peerConnectionFactory = RTCPeerConnectionFactory()

        pcConstraints = RTCMediaConstraints()
        audioConstraints = RTCMediaConstraints()
        mediaConstraints = RTCMediaConstraints(
            mandatoryConstraints: [
                RTCPair(key: "OfferToReceiveAudio", value: "true"),
            ],
            optionalConstraints: nil)
    }

    func prepareNewConnection() -> RTCPeerConnection {
        var icsServers: [RTCICEServer] = []

        icsServers.append(RTCICEServer(uri: NSURL(string: "stun:stun.l.google.com:19302") as URL!, username: "",
        password: ""))

        let rtcConfig: RTCConfiguration = RTCConfiguration()
        rtcConfig.tcpCandidatePolicy = RTCTcpCandidatePolicy.disabled
        rtcConfig.bundlePolicy = RTCBundlePolicy.maxBundle
        rtcConfig.rtcpMuxPolicy = RTCRtcpMuxPolicy.require

        peerConnection = peerConnectionFactory.peerConnection(withICEServers: icsServers, constraints: pcConstraints, delegate: self)
        peerConnection.add(mediaStream);
        return peerConnection;
    }


    func peerConnection(_ peerConnection: RTCPeerConnection!, signalingStateChanged stateChanged: RTCSignalingState) {
    }

    func peerConnection(_ peerConnection: RTCPeerConnection!, iceConnectionChanged newState: RTCICEConnectionState) {
    }

    func peerConnection(_ peerConnection: RTCPeerConnection!, iceGatheringChanged newState: RTCICEGatheringState) {
    }

    func peerConnection(_ peerConnection: RTCPeerConnection!, gotICECandidate candidate: RTCICECandidate!) {
        if (candidate != nil) {
            print("iceCandidate: " + candidate.description)
            let json:[String: AnyObject] = [
                "type" : "candidate" as AnyObject,
                "sdpMLineIndex" : candidate.sdpMLineIndex as AnyObject,
                "sdpMid" : candidate.sdpMid as AnyObject,
                "candidate" : candidate.sdp as AnyObject
            ]
            sigSend(msg: json as NSDictionary)
        } else {
            print("End of candidates. -------------------")
        }
    }

    func peerConnection(_ peerConnection: RTCPeerConnection!, addedStream stream: RTCMediaStream!) {
        if (peerConnection == nil) {
            return
        }

        if (stream.audioTracks.count > 1) {
            print("Weird-looking stream: " + stream.description)
            return
        }
    }

    func peerConnection(_ peerConnection: RTCPeerConnection!, removedStream stream: RTCMediaStream!) {
    }

    func peerConnection(_ peerConnection: RTCPeerConnection!, didOpen dataChannel: RTCDataChannel!) {
    }

    func peerConnection(onRenegotiationNeeded peerConnection: RTCPeerConnection!) {

    }
}
我的想法是,我可以在这个命令下捕获函数中的音频流。对吗?此外,我可以手动将流添加到扬声器中吗

 func peerConnection(_ peerConnection: RTCPeerConnection!, addedStream stream: RTCMediaStream!) {
            if (peerConnection == nil) {
                return
            }

            if (stream.audioTracks.count > 1) {
                print("Weird-looking stream: " + stream.description)
                return
            }
        }

连接webRTC调用时,webRTC堆栈使用平台API播放或录制音频。你只能控制像这样的事情

  • 静音或取消静音音频流
  • 使用系统API增加或减少音量或更改音频配置
  • 您不能手动将流添加到扬声器,但可以选择将默认音频输出更改为扬声器或耳机,以便将webrtc音频重定向到正确的输出。这可以使用avfoundationapi完成