Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/swift/19.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
扩展可能不包含存储属性,并且swift中没有成员问题_Swift - Fatal编程技术网

扩展可能不包含存储属性,并且swift中没有成员问题

扩展可能不包含存储属性,并且swift中没有成员问题,swift,Swift,我无法理解为什么会出现这些错误。有人请帮帮我。这是源代码 import UIKit import AVFoundation // MARK: - PlaySoundsViewController: AVAudioPlayerDelegate extension PlaySoundsViewController: AVAudioPlayerDelegate { var audioEngine = AVAudioEngine() // MARK: Alerts s

我无法理解为什么会出现这些错误。有人请帮帮我。这是源代码

import UIKit
import AVFoundation

// MARK: - PlaySoundsViewController: AVAudioPlayerDelegate

extension PlaySoundsViewController: AVAudioPlayerDelegate {

    var audioEngine = AVAudioEngine()


    // MARK: Alerts

    struct Alerts {
    static let DismissAlert = "Dismiss"
    static let RecordingDisabledTitle = "Recording disabled"
    static let RecordginDisabledMessage = "Youve disabled this app from recording your microphone. Check settings"
    static let RecodingFailedTitle = "Recording failed"
    static let RecordingFailedMessage = "Something went wrong with the recording"
    static let AudioRecordedError = "Audio Recorder Error"
    static let AudioSessionError = "Audio Session Error"
    static let AudioRecordingError = "Audio Recording Error"
    static let AudioFileError = "Audio File Error"
    static let AudioEngineError = "Audio Engine Error"
    }

    // MARK: PlayingState (raw values correspond to sender tags)

    enum PlayingState { case playing, notPlaying }

    // MARK: Audio Functions

    func setupAudio() {
        // initialize (recording) audio file
        do {
            audioFile = try AVAudioFile(forReading: recordedAudioURL as URL)
        } catch {
            showAlert(Alerts.AudioFileError, message: String(describing: error))
        }        
    }

    func playSound(rate: Float? = nil, pitch: Float? = nil, echo: Bool = false, reverb: Bool = false) {

        // initialize audio engine components
        audioEngine = AVAudioEngine()

        // node for playing audio
        audioPlayerNode = AVAudioPlayerNode()
        audioEngine.attach(audioPlayerNode)

        // node for adjusting rate/pitch
        let changeRatePitchNode = AVAudioUnitTimePitch()
        if let pitch = pitch {
            changeRatePitchNode.pitch = pitch
        }
        if let rate = rate {
            changeRatePitchNode.rate = rate
        }
        audioEngine.attach(changeRatePitchNode)

        // node for echo
        let echoNode = AVAudioUnitDistortion()
        echoNode.loadFactoryPreset(.multiEcho1)
        audioEngine.attach(echoNode)

        // node for reverb
        let reverbNode = AVAudioUnitReverb()
        reverbNode.loadFactoryPreset(.cathedral)
        reverbNode.wetDryMix = 50
        audioEngine.attach(reverbNode)

        // connect nodes
        if echo == true && reverb == true {
            connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, reverbNode, audioEngine.outputNode)
        } else if echo == true {
            connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, audioEngine.outputNode)
        } else if reverb == true {
            connectAudioNodes(audioPlayerNode, changeRatePitchNode, reverbNode, audioEngine.outputNode)
        } else {
            connectAudioNodes(audioPlayerNode, changeRatePitchNode, audioEngine.outputNode)
        }

        // schedule to play and start the engine!
        audioPlayerNode.stop()
        audioPlayerNode.scheduleFile(audioFile, at: nil) {

            var delayInSeconds: Double = 0

            if let lastRenderTime = self.audioPlayerNode.lastRenderTime, let playerTime = self.audioPlayerNode.playerTime(forNodeTime: lastRenderTime) {

                if let rate = rate {
                    delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate) / Double(rate)
                } else {
                    delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate)
                }
            }

            // schedule a stop timer for when audio finishes playing
            self.stopTimer = Timer(timeInterval: delayInSeconds, target: self, selector: #selector(PlaySoundsViewController.stopAudio), userInfo: nil, repeats: false)
            RunLoop.main.add(self.stopTimer!, forMode: RunLoopMode.defaultRunLoopMode)
        }

        do {
            try audioEngine.start()
        } catch {
            showAlert(Alerts.AudioEngineError, message: String(describing: error))
            return
        }

        // play the recording!
        audioPlayerNode.play()
    }

    func stopAudio() {

        if let audioPlayerNode = audioPlayerNode {
            audioPlayerNode.stop()
        }

        if let stopTimer = stopTimer {
            stopTimer.invalidate()
        }

        configureUI(.notPlaying)

        if let audioEngine = audioEngine {
            audioEngine.stop()
            audioEngine.reset()
        }
    }

    // MARK: Connect List of Audio Nodes

    func connectAudioNodes(_ nodes: AVAudioNode...) {
        for x in 0..<nodes.count-1 {
            audioEngine.connect(nodes[x], to: nodes[x+1], format: audioFile.processingFormat)
        }
    }

    // MARK: UI Functions

    func configureUI(_ playState: PlayingState) {
        switch(playState) {
        case .playing:
            setPlayButtonsEnabled(false)
            stopButton.isEnabled = true
        case .notPlaying:
            setPlayButtonsEnabled(true)
            stopButton.isEnabled = false
        }
    }

    func setPlayButtonsEnabled(_ enabled: Bool) {
        snailButton.isEnabled = enabled
        chipmunkButton.isEnabled = enabled
        rabbitButton.isEnabled = enabled
        vaderButton.isEnabled = enabled
        echoButton.isEnabled = enabled
        reverbButton.isEnabled = enabled
    }

    func showAlert(_ title: String, message: String) {
        let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
        alert.addAction(UIAlertAction(title: Alerts.DismissAlert, style: .default, handler: nil))
        self.present(alert, animated: true, completion: nil)
    }
}
导入UIKit
进口AVF基金会
//MARK:-播放声音视频控制器:AVAudioPlayerDelegate
扩展播放声音可视控制器:AVAudioPlayerDelegate{
var audioEngine=AVAudioEngine()
//标记:警报
结构警报{
静态let DismissAlert=“dismise”
静态let RecordingDisabledTitle=“已禁用录制”
static let RecordginDisabledMessage=“您已禁用此应用程序录制麦克风。请检查设置”
静态let RecodingFailedTitle=“录制失败”
static let RecordingFailedMessage=“录制出现问题”
静态let AudioRecordedError=“音频记录器错误”
静态let AudioSessionError=“音频会话错误”
静态let AudioRecordingError=“音频录制错误”
静态let AudioFileError=“音频文件错误”
静态let AudioEngineError=“音频引擎错误”
}
//MARK:PlayingState(原始值对应于发送者标记)
枚举播放状态{案例播放,不播放}
//马克:音频功能
函数设置音频(){
//初始化(录制)音频文件
做{
audioFile=尝试AVAudioFile(用于读取:recordedAudioURL作为URL)
}抓住{
showAlert(Alerts.AudioFileError,消息:字符串(描述:错误))
}        
}
func播放声音(速率:浮点?=nil,音高:浮点?=nil,回声:Bool=false,混响:Bool=false){
//初始化音频引擎组件
audioEngine=AVAudioEngine()
//用于播放音频的节点
audioPlayerNode=AVAudioPlayerNode()
audioEngine.attach(audioPlayerNode)
//用于调整速率/节距的节点
让changeRatePitchNode=AVAudioUnitTimePitch()
如果让螺距=螺距{
changeRatePitchNode.pitch=变桨
}
如果出租率=出租率{
changeRatePitchNode.rate=速率
}
audioEngine.attach(changeRatePitchNode)
//回波节点
设echoNode=AvaudioUnit畸变()
echoNode.loadFactoryPreset(.multiEcho1)
音频引擎。连接(echoNode)
//混响节点
设reverbNode=AVAudioUnitReverb()
混响节点.加载工厂预设(.cathedral)
reverbNode.wetDryMix=50
音频引擎。连接(混响节点)
//连接节点
如果echo==真&&混响==真{
连接音频节点(audioPlayerNode、changeRatePitchNode、echoNode、混响节点、audioEngine.outputNode)
}如果echo==true,则为else{
connectAudioNodes(audioPlayerNode、changeRatePitchNode、echoNode、audioEngine.outputNode)
}否则,如果混响==真{
连接音频节点(audioPlayerNode、changeRatePitchNode、混响节点、audioEngine.outputNode)
}否则{
connectAudioNodes(audioPlayerNode、changeRatePitchNode、audioEngine.outputNode)
}
//安排播放并启动发动机!
audioPlayerNode.stop()
audioPlayerNode.scheduleFile(audioFile,at:nil){
var delayInSeconds:Double=0
如果让lastRenderTime=self.audioPlayerNode.lastRenderTime,则让playerTime=self.audioPlayerNode.playerTime(forNodeTime:lastRenderTime){
如果出租率=出租率{
DelayUnseconds=Double(self.audioFile.length-playerTime.sampleTime)/Double(self.audioFile.processingFormat.sampleRate)/Double(rate)
}否则{
delayUnseconds=Double(self.audioFile.length-playerTime.sampleTime)/Double(self.audioFile.processingFormat.sampleRate)
}
}
//为音频播放结束安排停止计时器
self.stopTimer=Timer(时间间隔:delayUnseconds,目标:self,选择器:#选择器(PlaySoundsViewController.stopAudio),用户信息:nil,重复:false)
RunLoop.main.add(self.stopTimer!,forMode:RunLoopMode.defaultRunLoopMode)
}
做{
试试audioEngine.start()
}抓住{
showAlert(Alerts.AudioEngineError,消息:字符串(描述:错误))
返回
}
//播放录音!
audioPlayerNode.play()
}
func stopAudio(){
如果让audioPlayerNode=audioPlayerNode{
audioPlayerNode.stop()
}
如果让stopTimer=stopTimer{
stopTimer.invalidate()
}
配置UI(.notPlaying)
如果让audioEngine=audioEngine{
audioEngine.stop()
audioEngine.reset()
}
}
//标记:连接音频节点列表
func connectAudioNodes(u节点:AVAudioNode…){

对于0..中的x,您不能在扩展内声明
var audioEngine=AVAudioEngine()
。请改为在
PlaySoundsViewController
类内声明它

扩展旨在增强行为,而不是从根本上改变类


@来源:

更多信息:非常感谢您的帮助。我已经在PlaySoundsViewController中声明了这一点,但我仍然有同样的问题。嘿,我设法调试了代码,但现在我又遇到了另一个问题。在运行应用程序时,当我从一个屏幕移动到另一个屏幕时,模拟器退出,它需要我进行Xcode并抛出多个错误错误是“线程1:信号SIGABRT”。所有IBoutlet和IBAction都连接到故事板。