Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/116.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/ssh/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
找不到金属iOS的资源_Ios_Metal - Fatal编程技术网

找不到金属iOS的资源

找不到金属iOS的资源,ios,metal,Ios,Metal,我正在努力学习iOS 3D开发的金属。我在网上看了看,有几篇短文只介绍了金属的基本知识。还有一本书叫《金属举例》,但是这本书也没有详细介绍金属。我还检查了一个关于堆栈溢出的类似问题,但这些问题的答案只是将我引向同一篇文章。有谁知道一个好的资源(书籍、视频或网络教程)可以教我从基础知识到金属的专家水平 很难获得一份全面的金属资源指南,因为它还处于初级阶段。有一些东西,但感觉还不足以让你真正擅长这个 我的建议是:咬紧牙关,先学习一点OpenGL ES。网上有成吨的教程和书籍。只要做一点;足以绘制纹理

我正在努力学习iOS 3D开发的金属。我在网上看了看,有几篇短文只介绍了金属的基本知识。还有一本书叫《金属举例》,但是这本书也没有详细介绍金属。我还检查了一个关于堆栈溢出的类似问题,但这些问题的答案只是将我引向同一篇文章。有谁知道一个好的资源(书籍、视频或网络教程)可以教我从基础知识到金属的专家水平

很难获得一份全面的金属资源指南,因为它还处于初级阶段。有一些东西,但感觉还不足以让你真正擅长这个

我的建议是:咬紧牙关,先学习一点OpenGL ES。网上有成吨的教程和书籍。只要做一点;足以绘制纹理对象。但请确保您能够理解这些基础知识的基本概念。之后,拿起一本关于一般计算机图形学概念的好书。一个好的方法是实时渲染(Möller、Haines和Hoffman)。这将建立你作为一门科学的图形知识,而金属只是实现这些概念的工具。为了实现这些目标,苹果的官方文档和WWDC视频就足够了


Metal和OpenGL ES之间有很多通用术语,这就是为什么我建议先学习它。反正都是电脑绘图。所以你的目标应该是学习计算机图形学。金属将是您完成这项工作的工具。

从MetalKit开始,通过构建一些重要的东西,如使用任何金属性能着色器处理视频输出的摄像头应用程序:

@import UIKit;
@import AVFoundation;
@import CoreMedia;
#import <MetalKit/MetalKit.h>
#import <Metal/Metal.h>
#import <MetalPerformanceShaders/MetalPerformanceShaders.h>

@interface ViewController : UIViewController <MTKViewDelegate, AVCaptureVideoDataOutputSampleBufferDelegate>  {
    NSString *_displayName;
    NSString *serviceType;
}

@property (retain, nonatomic) SessionContainer *session;
@property (retain, nonatomic) AVCaptureSession *avSession;

@end;

#import "ViewController.h"

@interface ViewController () {
    MTKView *_metalView;

    id<MTLDevice> _device;
    id<MTLCommandQueue> _commandQueue;
    id<MTLTexture> _texture;

    CVMetalTextureCacheRef _textureCache;
}

@property (strong, nonatomic) AVCaptureDevice *videoDevice;
@property (nonatomic) dispatch_queue_t sessionQueue;

@end

@implementation ViewController

- (void)viewDidLoad {
    NSLog(@"%s", __PRETTY_FUNCTION__);
    [super viewDidLoad];

    _device = MTLCreateSystemDefaultDevice();
    _metalView = [[MTKView alloc] initWithFrame:self.view.bounds];
    [_metalView setContentMode:UIViewContentModeScaleAspectFit];
    _metalView.device = _device;
    _metalView.delegate = self;
    _metalView.clearColor = MTLClearColorMake(1, 1, 1, 1);
    _metalView.colorPixelFormat = MTLPixelFormatBGRA8Unorm;
    _metalView.framebufferOnly = NO;
    _metalView.autoResizeDrawable = NO;

    CVMetalTextureCacheCreate(NULL, NULL, _device, NULL, &_textureCache);

    [self.view addSubview:_metalView];

    self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL );

    if ([self setupCamera]) {
        [_avSession startRunning];
    }
}

- (BOOL)setupCamera {
    NSLog(@"%s", __PRETTY_FUNCTION__);
    @try {
        NSError * error;

            _avSession = [[AVCaptureSession alloc] init];
            [_avSession beginConfiguration];
            [_avSession setSessionPreset:AVCaptureSessionPreset640x480];

            // get list of devices; connect to front-facing camera
            self.videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
            if (self.videoDevice == nil) return FALSE;

            AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.videoDevice error:&error];
            [_avSession addInput:input];

            dispatch_queue_t sampleBufferQueue = dispatch_queue_create("CameraMulticaster", DISPATCH_QUEUE_SERIAL);

            AVCaptureVideoDataOutput * dataOutput = [[AVCaptureVideoDataOutput alloc] init];
            [dataOutput setAlwaysDiscardsLateVideoFrames:YES];
            [dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}];
            [dataOutput setSampleBufferDelegate:self queue:sampleBufferQueue];

            [_avSession addOutput:dataOutput];
            [_avSession commitConfiguration]; 
    } @catch (NSException *exception) {
        NSLog(@"%s - %@", __PRETTY_FUNCTION__, exception.description);
        return FALSE;
    } @finally {
        return TRUE;
    }

}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    {
        size_t width = CVPixelBufferGetWidth(pixelBuffer);
        size_t height = CVPixelBufferGetHeight(pixelBuffer);

        CVMetalTextureRef texture = NULL;
        CVReturn status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, MTLPixelFormatBGRA8Unorm, width, height, 0, &texture);
        if(status == kCVReturnSuccess)
        {
            _metalView.drawableSize = CGSizeMake(width, height);
            _texture = CVMetalTextureGetTexture(texture);
            _commandQueue = [_device newCommandQueue];
            CFRelease(texture);
        }
    }
}

- (void)drawInMTKView:(MTKView *)view {
    // creating command encoder
    if (_texture) {
        id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
        id<MTLTexture> drawingTexture = view.currentDrawable.texture;

        // set up and encode the filter
        MPSImageGaussianBlur *filter = [[MPSImageGaussianBlur alloc] initWithDevice:_device sigma:5];

        [filter encodeToCommandBuffer:commandBuffer sourceTexture:_texture destinationTexture:drawingTexture];

        // committing the drawing
        [commandBuffer presentDrawable:view.currentDrawable];
        [commandBuffer commit];
        _texture = nil;
    }
}

- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {

}

@end
@import-UIKit;
@进口基金会;
@进口CoreMedia;
#进口
#进口
#进口
@界面ViewController:UIViewController{
NSString*\u显示名称;
NSString*服务类型;
}
@属性(保留,非原子)会话容器*会话;
@属性(保留,非原子)AVCaptureSession*avSession;
@结束;
#导入“ViewController.h”
@界面视图控制器(){
MTKView*_metalView;
id_装置;
id_命令队列;
id_纹理;
CVMetalTextureCachef_textureCache;
}
@属性(强,非原子)AVCaptureDevice*视频设备;
@属性(非原子)调度队列会话队列;
@结束
@实现视图控制器
-(无效)viewDidLoad{
NSLog(@“%s”,函数);
[超级视图下载];
_设备=MTLCreateSystemDefaultDevice();
_metalView=[[MTKView alloc]initWithFrame:self.view.bounds];
[\u metalView设置内容模式:UIViewContentModeScaleAspectFit];
_metalView.device=\u设备;
_metalView.delegate=self;
_metalView.clearColor=MTLClearColorMake(1,1,1,1);
_metalView.colorPixelFormat=MTLPixelFormatBGRA8Unorm;
_metalView.framebufferOnly=否;
_metalView.autoResizeDrawable=否;
CVMetalTextureCacheCreate(NULL、NULL、_设备、NULL和_textureCache);
[self.view addSubview:_metalView];
self.sessionQueue=dispatch\u queue\u create(“会话队列”,dispatch\u queue\u SERIAL);
如果([自设置摄像头]){
[_avsessionstartrunning];
}
}
-(BOOL)设置照相机{
NSLog(@“%s”,函数);
@试一试{
n错误*错误;
_avSession=[[AVCaptureSession alloc]init];
[_avsessionbeginconfiguration];
[_AVSESSIONSETSESSIONPRESET:AVCaptureSessionPreset640x480];
//获取设备列表;连接到前置摄像头
self.videoDevice=[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
如果(self.videoDevice==nil)返回FALSE;
AVCaptureDeviceInput*输入=[AVCaptureDeviceInputDeviceInputWithDevice:self.videoDevice错误:&error];
[_avsessionaddinput:input];
dispatch_queue_t sampleBufferQueue=dispatch_queue_create(“CameraMulticaster”,dispatch_queue_SERIAL);
AVCaptureVideoDataOutput*数据输出=[[AVCaptureVideoDataOutput alloc]init];
[dataOutput setAlwaysDiscardsLateVideoFrames:是];
[dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}];
[dataOutput setSampleBufferDelegate:自队列:sampleBufferQueue];
[_avsessionaddoutput:dataOutput];
[_AvsessionCommitConfiguration];
}@catch(NSException*exception){
NSLog(@“%s-%@”,函数,异常。描述);
返回FALSE;
}@终于{
返回TRUE;
}
}
-(void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)SampleBufferfromConnection:(AVCaptureConnection*)连接
{
CVPixelBufferRef pixelBuffer=CMSampleBufferGetImageBuffer(sampleBuffer);
{
size\u t width=CVPixelBufferGetWidth(pixelBuffer);
大小\u t高度=CVPixelBufferGetHeight(pixelBuffer);
CVMetalTextureRef纹理=空;
CVReturn status=CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault、_textureCache、pixelBuffer、NULL、MTLPixelFormatBGRA8Unorm、宽度、高度、0和纹理);
如果(状态==kCVReturnSuccess)
{
_metalView.drawableSize=CGSizeMake(宽度、高度);
_纹理=CVMetalTextureGetTexture(纹理);
_commandQueue=[[u设备队列];
释放(纹理);
}
}
}
-(无效)drawInMTKView:(MTKView*)视图{
//创建命令编码器
如果(_纹理){
id commandBuffer=[\u commandQueue commandBuffer];
id drawingTexture=view.currentDrawable.texture;
//设置并编码过滤器
MPSImageGaussianBlur*过滤器=[[MPSImageGaussianBlur alloc]initWithDevice:_DeviceSigma:5];
[filter encodeToCommandBuffer:commandBuffer sourceTexture:_TextureDestinationTexture:drawingTexture];
//提交绘图
[commandBuffer presentDrawable:view.currentD