Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/114.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios 需要在mpmovieplayercontroller swift中显示视频图像_Ios_Swift_Mpmovieplayercontroller_Avasset - Fatal编程技术网

Ios 需要在mpmovieplayercontroller swift中显示视频图像

Ios 需要在mpmovieplayercontroller swift中显示视频图像,ios,swift,mpmovieplayercontroller,avasset,Ios,Swift,Mpmovieplayercontroller,Avasset,我已经在我的同事档案中提出了这个疑问,但还没有找到解决办法。请帮我解决这个问题。我被感动了4个多小时 到目前为止,我做到了: 我使用mpmovieplayercontroller从多媒体资料中选择视频并在屏幕上显示。在这里,我选择了2个视频一个接一个。如果我选择第一个视频并显示,当我移动到加载第二个视频时,我的第一个视频屏幕是黑色的。但我需要在播放器中显示视频图像。怎么做,请帮帮我 我的代码: import UIKit import AVFoundation import MobileCoreS

我已经在我的同事档案中提出了这个疑问,但还没有找到解决办法。请帮我解决这个问题。我被感动了4个多小时

到目前为止,我做到了:

我使用
mpmovieplayercontroller
从多媒体资料中选择视频并在屏幕上显示。在这里,我选择了2个视频一个接一个。如果我选择第一个视频并显示,当我移动到加载第二个视频时,我的第一个视频屏幕是黑色的。但我需要在播放器中显示视频图像。怎么做,请帮帮我

我的代码:

import UIKit
import AVFoundation
import MobileCoreServices
import AssetsLibrary
import MediaPlayer
import CoreMedia

class ViewController: UIViewController,UIGestureRecognizerDelegate {
    var Asset1: AVAsset?
    var Asset2: AVAsset?
    var Asset3: AVAsset?
    var audioAsset: AVAsset?
    var loadingAssetOne = false

// swt duplicate image for thumbnail image for audio
    @IBOutlet weak var musicImg: UIImageView!

    var videoPlayer = MPMoviePlayerController()
var mediaUI = UIImagePickerController()
    var videoURL = NSURL()


    override func viewDidLoad() {
        super.viewDidLoad()
        musicImg.hidden = true
        let gestureRecognizer: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: "handleTapGesture")

        gestureRecognizer.delegate = self;
        videoPlayer.view.addGestureRecognizer(gestureRecognizer)


    }


    func handleTap(gestureRecognizer: UIGestureRecognizer) {
        //location = gestureRecognizer .locationInView(videoPlayer.view)
        print("tapped")
    }

    //#pragma mark - gesture delegate
    // this allows you to dispatch touches

    func gestureRecognizer(gestureRecognizer: UIGestureRecognizer, shouldReceiveTouch touch: UITouch) -> Bool {

        return true

    }

   func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer,
        shouldRecognizeSimultaneouslyWithGestureRecognizer otherGestureRecognizer: UIGestureRecognizer) -> Bool {
            return true


    }







    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }


    func startMediaBrowserFromViewController(viewController: UIViewController!, usingDelegate delegate : protocol<UINavigationControllerDelegate, UIImagePickerControllerDelegate>!) -> Bool {

        if UIImagePickerController.isSourceTypeAvailable(.SavedPhotosAlbum) == false {
            return false
        }

        let mediaUI = UIImagePickerController()
        mediaUI.sourceType = .SavedPhotosAlbum



        mediaUI.mediaTypes = [kUTTypeMovie as String]
        mediaUI.allowsEditing = true
        mediaUI.delegate = delegate
        presentViewController(mediaUI, animated: true, completion: nil)
        return true
    }


// after merge all video and audio. the final video will be saved in gallery and also will display like preview
    func exportDidFinish(session: AVAssetExportSession) {
        if session.status == AVAssetExportSessionStatus.Completed {
            let outputURL = session.outputURL
            let library = ALAssetsLibrary()
            if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
                library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
                    completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in

                        if error != nil {

                            print("some files went wrong")

                        } else {

                            // get the output url to display the final video in screen
                            self.videoURL = outputURL!

                            self.mediaUI.dismissViewControllerAnimated(true, completion: nil)
                            self.videoPlayer = MPMoviePlayerController()
                            self.videoPlayer.contentURL = self.videoURL


                            self.videoPlayer.controlStyle = .Embedded

                            self.videoPlayer.scalingMode = .AspectFill

                            self.videoPlayer.shouldAutoplay = true

                            self.videoPlayer.backgroundView.backgroundColor = UIColor.clearColor()
                            self.videoPlayer.fullscreen = true
                            self.videoPlayer.view.frame = CGRectMake(38, 442, 220, 106)

                            self.view.addSubview(self.videoPlayer.view)

                            self.videoPlayer.play()
                            self.videoPlayer.prepareToPlay()

                        }

                })
            }
        }


        Asset1 = nil
        Asset2 = nil
        Asset3 = nil
        audioAsset = nil
    }



    // click first video
    @IBAction func FirstVideo(sender: AnyObject) {
                  loadingAssetOne = true
            startMediaBrowserFromViewController(self, usingDelegate: self)

    }

// clcik second video
    @IBAction func SecondVideo(sender: AnyObject) {

            loadingAssetOne = false
            startMediaBrowserFromViewController(self, usingDelegate: self)

    }
   // click audio
    @IBAction func Audio(sender: AnyObject) {

        let mediaPickerController = MPMediaPickerController(mediaTypes: .Any)
        mediaPickerController.delegate = self
        mediaPickerController.prompt = "Select Audio"
        presentViewController(mediaPickerController, animated: true, completion: nil)
    }







    @IBAction func playPreview(sender: AnyObject) {

         startMediaBrowserFromViewController(self, usingDelegate: self)
    }




    // orientation for the video
    func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
        var assetOrientation = UIImageOrientation.Up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .Right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .Left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .Up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .Down
        }
        return (assetOrientation, isPortrait)
    }

    func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0]

        let transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform)

        var scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.width
        if assetInfo.isPortrait {
            scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.height
            let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
            instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor),
                atTime: kCMTimeZero)
        } else {
            let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
            var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width / 2))
            if assetInfo.orientation == .Down {
                let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI))
                let windowBounds = UIScreen.mainScreen().bounds
                let yFix = assetTrack.naturalSize.height + windowBounds.height
                let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix)
                concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor)
            }
            instruction.setTransform(concat, atTime: kCMTimeZero)
        }

        return instruction
    }
   // merge all file
    @IBAction func MergeAll(sender: AnyObject) {
        if let firstAsset = Asset1, secondAsset = Asset2 {

            let mixComposition = AVMutableComposition()

            //load first video
            let firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
                preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
            do {
                try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration),
                    ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo)[0] ,
                    atTime: kCMTimeZero)
            } catch _ {
            }
            // load second video
            let secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
                preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
            do {
                try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration),
                    ofTrack: secondAsset.tracksWithMediaType(AVMediaTypeVideo)[0] ,
                    atTime: firstAsset.duration)
            } catch _ {
            }


            let mainInstruction = AVMutableVideoCompositionInstruction()
            mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration))

            let firstInstruction = videoCompositionInstructionForTrack(firstTrack, asset: firstAsset)
            firstInstruction.setOpacity(0.0, atTime: firstAsset.duration)
            let secondInstruction = videoCompositionInstructionForTrack(secondTrack, asset: secondAsset)

            mainInstruction.layerInstructions = [firstInstruction, secondInstruction]
            let mainComposition = AVMutableVideoComposition()
            mainComposition.instructions = [mainInstruction]
            mainComposition.frameDuration = CMTimeMake(1, 30)
            mainComposition.renderSize = CGSize(width: UIScreen.mainScreen().bounds.width, height: UIScreen.mainScreen().bounds.height)

            //load audio
            if let loadedAudioAsset = audioAsset {
                let audioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: 0)
                do {
                    try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)),
                        ofTrack: loadedAudioAsset.tracksWithMediaType(AVMediaTypeAudio)[0] ,
                        atTime: kCMTimeZero)
                } catch _ {
                }
            }

          // save the final video to gallery
            let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
            let dateFormatter = NSDateFormatter()
            dateFormatter.dateStyle = .LongStyle
            dateFormatter.timeStyle = .ShortStyle
            let date = dateFormatter.stringFromDate(NSDate())
            // let savePath = documentDirectory.URLByAppendingPathComponent("mergeVideo-\(date).mov")

         let savePath = (documentDirectory as NSString).stringByAppendingPathComponent("final-\(date).mov")

            let url = NSURL(fileURLWithPath: savePath)


            let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
            exporter!.outputURL = url
            exporter!.outputFileType = AVFileTypeQuickTimeMovie
            exporter!.shouldOptimizeForNetworkUse = true
            exporter!.videoComposition = mainComposition

            exporter!.exportAsynchronouslyWithCompletionHandler() {
                dispatch_async(dispatch_get_main_queue(), { () -> Void in
                    self.exportDidFinish(exporter!)
                })
            }
        }
    }

}

extension ViewController: UIImagePickerControllerDelegate {



    // display the first & second video after it picked from gallery
    func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : AnyObject]) {
        let mediaType = info[UIImagePickerControllerMediaType] as! NSString
        dismissViewControllerAnimated(true, completion: nil)

        if mediaType == kUTTypeMovie {

            let avAsset = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL)

            if loadingAssetOne {

                if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
                    self.videoURL = vURL
                } else {
                    print("oops, no url")
                }
                mediaUI.dismissViewControllerAnimated(true, completion: nil)
                self.videoPlayer = MPMoviePlayerController()
                self.videoPlayer.contentURL = videoURL
                self.videoPlayer.view.frame = CGRectMake(38, 57, 220, 106)
                self.view.addSubview(self.videoPlayer.view)

                self.videoPlayer.controlStyle = .Embedded

                self.videoPlayer.scalingMode = .AspectFill


                self.videoPlayer.shouldAutoplay = true
                self.videoPlayer.prepareToPlay()
                self.videoPlayer.play()


                Asset1 = avAsset
            } else {

                if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
                    self.videoURL = vURL
                } else {
                    print("oops, no url")
                }
                mediaUI.dismissViewControllerAnimated(true, completion: nil)
                self.videoPlayer = MPMoviePlayerController()
                self.videoPlayer.contentURL = videoURL
                self.videoPlayer.view.frame = CGRectMake(38, 206, 220, 106)
                self.view.addSubview(self.videoPlayer.view)
                self.videoPlayer.play()

                self.videoPlayer.controlStyle = .Embedded

                self.videoPlayer.scalingMode = .AspectFill


                self.videoPlayer.shouldAutoplay = true
                self.videoPlayer.prepareToPlay()


                Asset2 = avAsset
            }

        }


}


}






extension ViewController: UINavigationControllerDelegate {

}

extension ViewController: MPMediaPickerControllerDelegate {
    func mediaPicker(mediaPicker: MPMediaPickerController, didPickMediaItems mediaItemCollection: MPMediaItemCollection) {
        let selectedSongs = mediaItemCollection.items
        if selectedSongs.count > 0 {
            let song = selectedSongs[0]


            if let vURL = song.valueForProperty(MPMediaItemPropertyAssetURL) as? NSURL {
                audioAsset = AVAsset(URL: vURL)

                dismissViewControllerAnimated(true, completion: nil)


                mediaUI.dismissViewControllerAnimated(true, completion: nil)
                      musicImg.hidden = false


                let alert = UIAlertController(title: "yes", message: "Audio Loaded", preferredStyle: .Alert)
                alert.addAction(UIAlertAction(title: "OK", style: .Cancel, handler:nil))
                presentViewController(alert, animated: true, completion: nil)
            } else {
                dismissViewControllerAnimated(true, completion: nil)
                let alert = UIAlertController(title: "No audio", message: "Audio Not Loaded", preferredStyle: .Alert)
                alert.addAction(UIAlertAction(title: "OK", style: .Cancel, handler:nil))
                presentViewController(alert, animated: true, completion: nil)
            }
        } else {
            dismissViewControllerAnimated(true, completion: nil)
        }
    }

    func mediaPickerDidCancel(mediaPicker: MPMediaPickerController) {
        dismissViewControllerAnimated(true, completion: nil)
    }
}
导入UIKit
进口AVF基金会
进口流动储备
导入资产库
导入MediaPlayer
导入CoreMedia
类ViewController:UIViewController、UIGestureRecognitizerDelegate{
var资产1:AVAsset?
var资产2:AVAsset?
var资产3:AVAsset?
var音频资产:AVAsset?
var loadingAssetOne=false
//用于音频缩略图的swt复制图像
@IBVAR音乐:UIImageView!
var videoPlayer=MPMoviePlayerController()
var mediaUI=UIImagePickerController()
var videoURL=NSURL()
重写func viewDidLoad(){
super.viewDidLoad()
musicImg.hidden=true
让手势识别器:UITapGestureRecognizer=UITapGestureRecognizer(目标:self,动作:“handletappershive”)
gestureRecognizer.delegate=self;
videoPlayer.view.addGestureRecognizer(gestureRecognizer)
}
func handleTap(手势识别器:UI手势识别器){
//位置=手势识别器.locationInView(videoPlayer.view)
打印(“抽头”)
}
//#pragma标记-手势委托
//这允许您发送触摸
func手势识别器(手势识别器:UIGestureRecognitor,应接收触摸:UITouch)->Bool{
返回真值
}
func gestureRecognizer(\uGestureRecognizer:UIGestureRecognizer,
应使用手势识别器同时识别其他手势识别器:UIGestureRecognitor)->Bool{
返回真值
}
重写函数didReceiveMemoryWarning(){
超级。我收到了记忆警告()
//处置所有可以重新创建的资源。
}
func startMediaBrowserFromViewController(viewController:UIViewController!,使用委托:协议!)->Bool{
如果UIImagePickerController.isSourceTypeAvailable(.SavedPhotosAlbum)=false{
返回错误
}
让mediaUI=UIImagePickerController()
mediaUI.sourceType=.SavedPhotosAlbum
mediaUI.mediaTypes=[kUTTypeMovie作为字符串]
mediaUI.allowsEditing=true
mediaUI.delegate=委托
presentViewController(mediaUI,动画:true,完成:nil)
返回真值
}
//合并所有视频和音频后。最终视频将保存在多媒体资料中,并将像预览一样显示
func exportDidFinish(会话:AVAssetExportSession){
如果session.status==AVAssetExportSessionStatus.Completed{
让outputURL=session.outputURL
let library=ALAssetsLibrary()
如果library.videoatpath与savedphotosalbum(outputURL)兼容{
library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
completionBlock:{(assetURL:NSURL!,错误:NSError!)->中的Void
如果错误!=nil{
打印(“某些文件出错”)
}否则{
//获取输出url以在屏幕中显示最终视频
self.videoURL=outputURL!
self.mediaUI.dismissViewControllerAnimated(true,完成:nil)
self.videoPlayer=MPMoviePlayerController()
self.videoPlayer.contentURL=self.videoURL
self.videoPlayer.controlStyle=.Embedded
self.videoPlayer.scalingMode=.AspectFill
self.videoPlayer.shouldAutoplay=true
self.videoPlayer.backgroundView.backgroundColor=UIColor.clearColor()
self.videoPlayer.fullscreen=true
self.videoPlayer.view.frame=CGRectMake(38442200106)
self.view.addSubview(self.videoPlayer.view)
self.videoPlayer.play()
self.videoPlayer.prepareToPlay()
}
})
}
}
资产1=零
资产2=零
资产3=零
音频资产=零
}
//单击第一个视频
@iAction func FirstVideo(发件人:AnyObject){
loadingAssetOne=true
startMediaBrowserFromViewController(self,使用委托:self)
}
//clcik第二视频
@iAction func SecondVideo(发件人:AnyObject){
loadingAssetOne=false
startMediaBrowserFromViewController(self,使用委托:self)
}
//单击音频
@iAction func音频(发件人:AnyObject){
让mediaPickerController=MPMediaPickerController(媒体类型:。任意)
mediaPickerController.delegate=self
mediaPickerController.prompt=“选择音频”
presentViewController(mediaPickerController,动画:true,完成:nil)
}
@iAction func播放预览(发件人:AnyObject){
startMediaBrowserFromViewController(self,使用委托:self)
}
//视频的定位
函数方向FromTransform(变换:CGAffineTransform)->(方向:UIImageOrientation,isPortrait:Bool){
var assetOrientation=UIImageOrientation.Up
var isPortrait=false
如果transform.a==0&&transform.b==1.0&&transform.c==1.0&&transform.d==0{
资产=。对
isPortrait=true
}如果transform.a==0&&transform.b,则为else
import UIKit
import AVFoundation
import MobileCoreServices
import AssetsLibrary
import MediaPlayer
import CoreMedia

class ViewController: UIViewController,UIGestureRecognizerDelegate {
var optionalInteger: Int?
var Asset1: AVAsset?
var Asset2: AVAsset?
var Asset3: AVAsset?
var audioAsset: AVAsset?
var loadingAssetOne = false


@IBOutlet weak var musicImg: UIImageView!

@IBOutlet var videoView: UIView!
@IBOutlet var videoViewTwo: UIView!

var player : AVPlayer? = nil
var playerLayer : AVPlayerLayer? = nil
var asset : AVAsset? = nil
var playerItem: AVPlayerItem? = nil

override func viewDidLoad() 
{
    super.viewDidLoad()
    musicImg.hidden = true
    let gestureRecognizer: UITapGestureRecognizer = UITapGestureRecognizer(target: self, action: "handleTapGesture")

    gestureRecognizer.delegate = self;
    videoPlayer.view.addGestureRecognizer(gestureRecognizer)
    imageViewVideoOne.hidden = true
    imageViewVideoTwo.hidden = true
}
func startMediaBrowserFromViewController(viewController: UIViewController!, usingDelegate delegate : protocol<UINavigationControllerDelegate, UIImagePickerControllerDelegate>!) -> Bool 
{

    if UIImagePickerController.isSourceTypeAvailable(.SavedPhotosAlbum) == false {
        return false
    }

    let mediaUI = UIImagePickerController()
    mediaUI.sourceType = .SavedPhotosAlbum



    mediaUI.mediaTypes = [kUTTypeMovie as String]
    mediaUI.allowsEditing = true
    mediaUI.delegate = delegate
    presentViewController(mediaUI, animated: true, completion: nil)
    return true

}
 // click first video
@IBAction func FirstVideo(sender: AnyObject) {
        loadingAssetOne = true
         optionalInteger = 0
        startMediaBrowserFromViewController(self, usingDelegate: self)

}

// clcik second video
@IBAction func SecondVideo(sender: AnyObject) {

        loadingAssetOne = false
        optionalInteger = 1
        startMediaBrowserFromViewController(self, usingDelegate: self)

}
// click audio
@IBAction func Audio(sender: AnyObject) {

    let mediaPickerController = MPMediaPickerController(mediaTypes: .Any)
    mediaPickerController.delegate = self
    mediaPickerController.prompt = "Select Audio"
    presentViewController(mediaPickerController, animated: true, completion: nil)
}
 @IBAction func playPreview(sender: AnyObject) 
 {
    startMediaBrowserFromViewController(self, usingDelegate: self)
 }
 extension ViewController: UIImagePickerControllerDelegate 
 {

    // display the first & second video after it picked from gallery
    func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : AnyObject]) 
    {
      let mediaType = info[UIImagePickerControllerMediaType] as! NSString
      dismissViewControllerAnimated(true, completion: nil)

      if mediaType == kUTTypeMovie 
      {
        if loadingAssetOne 
        {
          let avAsset = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL)
          print(avAsset)
            if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
                self.videoURL = vURL

            } else {
                print("oops, no url")
            }
            mediaUI.dismissViewControllerAnimated(true, completion: nil)
  let videoURLWithPath = info[UIImagePickerControllerMediaURL] as? NSURL
            self.videoURL = videoURLWithPath!
            asset = AVAsset(URL: videoURL) as AVAsset

            playerItem = AVPlayerItem(asset: asset!)
            player = AVPlayer (playerItem: self.playerItem!)
            playerLayer = AVPlayerLayer(player: self.player)
            videoView.frame = CGRectMake(38, 57, 220, 106)
            playerLayer?.frame = videoView.frame
            videoView.layer.addSublayer(self.playerLayer!)

            player!.play()
        } 
        else 
        {
           let avAssetTwo = AVAsset(URL: info[UIImagePickerControllerMediaURL] as! NSURL)
            print(avAssetTwo)
           if let vURL = info[UIImagePickerControllerMediaURL] as? NSURL {
                self.videoURL = vURL
            } else {
                print("oops, no url")
            }
            mediaUI.dismissViewControllerAnimated(true, completion: nil)
          let videoURLWithPath = info[UIImagePickerControllerMediaURL] as? NSURL
           self.videoURL = videoURLWithPath!
           asset = AVAsset(URL: videoURL) as AVAsset

            playerItem = AVPlayerItem(asset: asset!)
            player = AVPlayer (playerItem: self.playerItem!)
            playerLayer = AVPlayerLayer(player: self.player)
            videoView.frame =  CGRectMake(38, 206, 220, 106)
            playerLayer?.frame =  videoView.frame
            videoView.layer.addSublayer(self.playerLayer!)
            player!.play()
           }
         }
       }
 }