Swiftui Firebase现在可播放音频AVPlayerItem。无法转换类型为';StorageReference';到预期的参数类型';URL';
我似乎不知道如何通过firebase将我的流式音频制作成一个现在可以在Xcode 12中使用SwiftUI播放的应用程序。 这是我的密码。我向苹果寻求帮助,他们给了我一个样本,使用应用程序内部保存的m4a音频。我正在使用Firebase实时播放音频、封面艺术、接收歌曲标题和艺术家信息。我只是想弄清楚如何使它现在可以用最少的代码播放。该代码适用于应用程序中存储的url和音频,但通过Firebase流式传输则不起作用。所有的信息、音频和艺术都上传到Firebase并实时显示,因此我可以立即上传新音乐,而无需在应用商店中更新应用程序。我只知道如何用SwiftUI编写代码Swiftui Firebase现在可播放音频AVPlayerItem。无法转换类型为';StorageReference';到预期的参数类型';URL';,firebase,audio,swiftui,stream,avplayer,Firebase,Audio,Swiftui,Stream,Avplayer,我似乎不知道如何通过firebase将我的流式音频制作成一个现在可以在Xcode 12中使用SwiftUI播放的应用程序。 这是我的密码。我向苹果寻求帮助,他们给了我一个样本,使用应用程序内部保存的m4a音频。我正在使用Firebase实时播放音频、封面艺术、接收歌曲标题和艺术家信息。我只是想弄清楚如何使它现在可以用最少的代码播放。该代码适用于应用程序中存储的url和音频,但通过Firebase流式传输则不起作用。所有的信息、音频和艺术都上传到Firebase并实时显示,因此我可以立即上传新音乐
import AVFoundation
import MediaPlayer
import Firebase
// Static metadata about each song.
struct Album: Identifiable, Codable, Equatable, Hashable {
var id = UUID()
var name : String
var image: String
var artist: String
var songs : [Song]
enum CodingKeys: String, CodingKey {
case name
case image
case artist
case songs
}
}
struct Song: Identifiable, Codable, Equatable, Hashable {
var id = UUID()
var name : String
var image: String
var artist: String
var time: String
var file: String
enum CodingKeys: String, CodingKey {
case name
case image
case artist
case time
case file
}
}
struct datatype: Identifiable, Equatable, Hashable {
var id : String
var image : String
}
struct DynamicMetadata {
var playerState: PlayerState
var playerRate: Float
var totalTime: Float
var elapsedTime: Float
}
// Possible values of the `playerState` property.
enum PlayerState {
case stopped
case playing
case paused
}
class AudioPlayer: ObservableObject {
// MARK: Playback Machinery
// The songs being played.
private var songItems: [Song]
// The player used to play individual songs.
private var player: AVPlayer!
// Index of the currently playing item.
private var currentItemIndex = 0
// The currently playing item.
@Published private(set) var currentStaticMetadata: Song?
// The current logical state of the player.
private(set) var playerState: PlayerState = .stopped
// The current playback state.
@Published private(set) var currentDynamicMetadata: DynamicMetadata?
// `true` if the current session has been interrupted by another app.
private var isInterrupted: Bool = false
// Observers of notifications and property changes.
private var itemObserver: NSKeyValueObservation!
private var rateObserver: NSKeyValueObservation!
private var statusObserver: NSObjectProtocol!
private var interruptionObserver: NSObjectProtocol!
private var timeObserver: Any!
init(metadata: [Song]) {
songItems = metadata
}
// MARK: State Changes
// Update Now Playing Info when the current item changes.
private func handlePlayerItemChange() {
guard playerState != .stopped else { return }
// Set the Now Playing Info from static item metadata.
let songItem = songItems[currentItemIndex]
let nowPlayingInfoCenter = MPNowPlayingInfoCenter.default()
var nowPlayingInfo = [String: Any]()
nowPlayingInfo[MPNowPlayingInfoPropertyMediaType] = MPNowPlayingInfoMediaType.audio.rawValue
nowPlayingInfo[MPMediaItemPropertyTitle] = songItem.name
nowPlayingInfo[MPMediaItemPropertyArtist] = songItem.artist
nowPlayingInfo[MPMediaItemPropertyAlbumTitle] = songItem.name
nowPlayingInfoCenter.nowPlayingInfo = nowPlayingInfo
// And set it for the UI too.
currentStaticMetadata = songItem
}
// Update Now Playing Info when playback rate or position changes.
private func handlePlaybackChange() {
guard playerState != .stopped else { return }
// Find the current item.
guard let currentItem = player.currentItem else { stop(); return }
guard currentItem.status == .readyToPlay else { return }
// Set the Now Playing Info from dynamic metadata.
let playbackInfo = DynamicMetadata(playerState: playerState, playerRate: player.rate, totalTime: Float(currentItem.duration.seconds), elapsedTime: Float(currentItem.currentTime().seconds))
let nowPlayingInfoCenter = MPNowPlayingInfoCenter.default()
var nowPlayingInfo = nowPlayingInfoCenter.nowPlayingInfo ?? [String: Any]()
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = playbackInfo.totalTime
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = playbackInfo.elapsedTime
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = playbackInfo.playerRate
nowPlayingInfo[MPNowPlayingInfoPropertyDefaultPlaybackRate] = 1.0
nowPlayingInfoCenter.nowPlayingInfo = nowPlayingInfo
// And set it in the UI too.
currentDynamicMetadata = playbackInfo
}
// Handle an audio session interruption notification.
private func handleAudioSessionInterruption(notification: Notification) {
// Retrieve the interruption type from the notification.
guard let userInfo = notification.userInfo,
let interruptionTypeUInt = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let interruptionType = AVAudioSession.InterruptionType(rawValue: interruptionTypeUInt) else { return }
// Begin or end an interruption.
switch interruptionType {
case .began:
isInterrupted = true
case .ended:
// When an interruption ends, determine whether playback should resume
// automatically, and reactivate the audio session if necessary.
do {
try AVAudioSession.sharedInstance().setActive(true)
isInterrupted = false
var shouldResume = false
if let optionsUInt = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt,
AVAudioSession.InterruptionOptions(rawValue: optionsUInt).contains(.shouldResume) {
shouldResume = true
}
switch playerState {
case .stopped:
break
case .playing where shouldResume:
player.play()
case .playing:
playerState = .paused
case .paused:
break
}
}
// When the audio session cannot be resumed after an interruption,
// invoke the handler with error information.
catch {
print(error.localizedDescription)
}
@unknown default:
break
}
}
// MARK: Utilities
private func playerItem(atIndex itemIndex: Int) -> AVPlayerItem {
let songItem = songItems[itemIndex]
let songURL = Storage.storage().reference(forURL: songItem.file)
songURL.downloadURL { (url, error) in
if error != nil {
print("error")
} else {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, mode: .default)
// try AVAudioSession.sharedInstance().setActive(true)
}
catch {
// report for an error
}
// var player = AVPlayer(url: url!)
self.player = AVPlayer(url: url!) //
// data.player = try! AVAudioPlayer(contentsOf: URL(fileURLWithPath: url!))
// data.player = try! AVAudioPlayer(contentsOf: url!)
self.player.play()
// self.isPlaying = true
// self.showcontrols = true
// audioPlayer.playPauseTrack()
}
}
return AVPlayerItem(url: songURL)
}
我猜错误来自这里:
return AVPlayerItem(url: songURL)
您已将songURL
声明为let songURL=Storage.Storage().reference(forURL:songItem.file)
,这意味着它是StorageReference
,而AVPlayerItem
无法处理
问题是,您无法在那里返回有效的内容,因为下载URL尚未加载。我不确定AVPlayerItem是做什么的,但您可能想看看是否可以稍后构建它-在回调中,您已经在那里做了
self.player=AVPlayer(url:url!)
您已经共享了很多代码,而我并没有阅读所有代码来查看错误发生的确切位置(提示:您可能想了解一下)。但是从错误消息来看,您似乎需要从文件中删除,或者为了播放它。谢谢。我把代码缩短了一点。我真的很感谢你的帮助。希望这有助于理解我的问题。嘿@Vic。你在这方面有什么进展吗?我试着在下面给出一个答案。你有没有机会阅读并尝试一下?如果我的答案有用,请单击向上投票按钮(▲) 如果它回答了您的问题,请单击复选标记(✓) 接受它。这样别人就会知道你已经(足够)了帮助。也请看@Frank我感谢你的回答,但这并没有帮助我成功编译。当我问他们时,苹果公司非常含糊。我没有看到任何Firebase代码将文件下载到内存或磁盘。此外,你的问题标题中的错误是准确的;对存储空间的引用与URL不同。此外这里的代码太多了,我们无法解析;230行大约是200行。请将其缩小到您用于下载文件的代码范围,并确保允许读取所述数据。