Ios 观看视频时用音频录制视频
当用户同时观看视频时,我试图从前置摄像头录制视频。如果没有音频输入,源代码就像一个魔咒,但是当我激活音频输入时,视频不会开始播放。这是可能的还是我正在努力实现一些不可能的事情 录制视频源代码Ios 观看视频时用音频录制视频,ios,iphone,swift,swift3,Ios,Iphone,Swift,Swift3,当用户同时观看视频时,我试图从前置摄像头录制视频。如果没有音频输入,源代码就像一个魔咒,但是当我激活音频输入时,视频不会开始播放。这是可能的还是我正在努力实现一些不可能的事情 录制视频源代码 override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) self.session.beginConfiguration() self.session.se
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.session.beginConfiguration()
self.session.sessionPreset = AVCaptureSessionPresetMedium
// Add video input.
do {
guard let videoDevice = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .front) else {fatalError()}
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
if self.session.canAddInput(videoDeviceInput) {
self.session.addInput(videoDeviceInput)
} else {
print("Could not add video device input to the session")
self.session.commitConfiguration()
return
}
} catch {
print("Could not create video device input: \(error)")
self.session.commitConfiguration()
return
}
// Add audio input.
do {
guard let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) else {fatalError()}
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
if self.session.canAddInput(audioDeviceInput) {
self.session.addInput(audioDeviceInput)
}
else {
print("Could not add audio device input to the session")
}
} catch {
print("Could not create audio device input: \(error)")
}
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.videoPreviewLayer!.videoGravity = AVLayerVideoGravityResizeAspect
self.videoPreviewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
self.cameraElement.layer.addSublayer(self.videoPreviewLayer!)
self.session.commitConfiguration()
self.session.startRunning()
}
func startRecording() {
let recordingDelegate: AVCaptureFileOutputRecordingDelegate? = self
self.videoFileOutput = AVCaptureMovieFileOutput()
self.session.addOutput(videoFileOutput)
let filePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("tmpVideo.mov")
ContentController.tmpFilePath = filePath
videoFileOutput?.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
player = AVPlayer(url: ContentController.content!.url!)
let playerLayer: AVPlayerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.videoElement.bounds
self.videoElement.layer.addSublayer(playerLayer)
player?.currentItem!.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions(), context: nil)
}
播放视频源代码
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.session.beginConfiguration()
self.session.sessionPreset = AVCaptureSessionPresetMedium
// Add video input.
do {
guard let videoDevice = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .front) else {fatalError()}
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
if self.session.canAddInput(videoDeviceInput) {
self.session.addInput(videoDeviceInput)
} else {
print("Could not add video device input to the session")
self.session.commitConfiguration()
return
}
} catch {
print("Could not create video device input: \(error)")
self.session.commitConfiguration()
return
}
// Add audio input.
do {
guard let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) else {fatalError()}
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
if self.session.canAddInput(audioDeviceInput) {
self.session.addInput(audioDeviceInput)
}
else {
print("Could not add audio device input to the session")
}
} catch {
print("Could not create audio device input: \(error)")
}
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.videoPreviewLayer!.videoGravity = AVLayerVideoGravityResizeAspect
self.videoPreviewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
self.cameraElement.layer.addSublayer(self.videoPreviewLayer!)
self.session.commitConfiguration()
self.session.startRunning()
}
func startRecording() {
let recordingDelegate: AVCaptureFileOutputRecordingDelegate? = self
self.videoFileOutput = AVCaptureMovieFileOutput()
self.session.addOutput(videoFileOutput)
let filePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("tmpVideo.mov")
ContentController.tmpFilePath = filePath
videoFileOutput?.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
player = AVPlayer(url: ContentController.content!.url!)
let playerLayer: AVPlayerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.videoElement.bounds
self.videoElement.layer.addSublayer(playerLayer)
player?.currentItem!.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions(), context: nil)
}
问题是线程处理 我的解决办法是:
func playerReadyToPlay() {
DispatchQueue.global(qos: .userInitiated).async {
self.player?.play()
}
super.startRecording()
}
这听起来很可怕。这是什么样的应用程序?@kthorat将是一款革命性的应用程序。;-)就像视频分享聊天。有点像Skype,但没有视频流。我想把它储存起来!请注意用户的隐私,并通知用户有关录制和其他活动的信息。祝你好运。@kthorat别担心。一切都是合法的