DirectX:使用DXGI\U格式\U NV12格式从ID3D11Texture2D获取RGB数据的最佳方法?

DirectX:使用DXGI\U格式\U NV12格式从ID3D11Texture2D获取RGB数据的最佳方法?,rgb,directx-11,yuv,texture2d,Rgb,Directx 11,Yuv,Texture2d,我正在使用DirectX绘制视频。通过Intel Media SDK解码后。然后我用下面的英特尔代码来画它: mfxStatus CD3D11Device::RenderFrame(mfxFrameSurface1 * pSrf, mfxFrameAllocator * pAlloc) { HRESULT hres = S_OK; mfxStatus sts; sts = CreateVideoProcessor(pSrf); MSDK_CHECK_STATUS

我正在使用DirectX绘制视频。通过Intel Media SDK解码后。然后我用下面的英特尔代码来画它:

mfxStatus CD3D11Device::RenderFrame(mfxFrameSurface1 * pSrf, mfxFrameAllocator * pAlloc)
{
    HRESULT hres = S_OK;
    mfxStatus sts;

    sts = CreateVideoProcessor(pSrf);
    MSDK_CHECK_STATUS(sts, "CreateVideoProcessor failed");

    hres = m_pSwapChain->GetBuffer(0, __uuidof( ID3D11Texture2D ), (void**)&m_pDXGIBackBuffer.p);
    if (FAILED(hres))
        return MFX_ERR_DEVICE_FAILED;

    D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC OutputViewDesc;
    if (2 == m_nViews)
    {
        m_pVideoContext->VideoProcessorSetStreamStereoFormat(m_pVideoProcessor, 0, TRUE,D3D11_VIDEO_PROCESSOR_STEREO_FORMAT_SEPARATE,
            TRUE, TRUE, D3D11_VIDEO_PROCESSOR_STEREO_FLIP_NONE, NULL);
        m_pVideoContext->VideoProcessorSetOutputStereoMode(m_pVideoProcessor,TRUE);

        OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2DARRAY;
        OutputViewDesc.Texture2DArray.ArraySize = 2;
        OutputViewDesc.Texture2DArray.MipSlice = 0;
        OutputViewDesc.Texture2DArray.FirstArraySlice = 0;
    }
    else
    {
        OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D;
        OutputViewDesc.Texture2D.MipSlice = 0;
    }

    if (1 == m_nViews || 0 == pSrf->Info.FrameId.ViewId)
    {
        hres = m_pDX11VideoDevice->CreateVideoProcessorOutputView(
            m_pDXGIBackBuffer,
            m_VideoProcessorEnum,
            &OutputViewDesc,
            &m_pOutputView.p );
        if (FAILED(hres))
            return MFX_ERR_DEVICE_FAILED;
    }

    D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC InputViewDesc;
    InputViewDesc.FourCC = 0;
    InputViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
    InputViewDesc.Texture2D.MipSlice = 0;
    InputViewDesc.Texture2D.ArraySlice = 0;

    mfxHDLPair pair = {NULL};
    sts = pAlloc->GetHDL(pAlloc->pthis, pSrf->Data.MemId, (mfxHDL*)&pair);
    MSDK_CHECK_STATUS(sts, "pAlloc->GetHDL failed");

    ID3D11Texture2D  *pRTTexture2D = reinterpret_cast<ID3D11Texture2D*>(pair.first);
    D3D11_TEXTURE2D_DESC RTTexture2DDesc;

    if(!m_pTempTexture && m_nViews == 2)
    {
        pRTTexture2D->GetDesc(&RTTexture2DDesc);
        hres = m_pD3D11Device->CreateTexture2D(&RTTexture2DDesc,NULL,&m_pTempTexture.p);
        if (FAILED(hres))
            return MFX_ERR_DEVICE_FAILED;
    }

    // Creating input views for left and righ eyes
    if (1 == m_nViews)
    {
        hres = m_pDX11VideoDevice->CreateVideoProcessorInputView(
            pRTTexture2D,
            m_VideoProcessorEnum,
            &InputViewDesc,
            &m_pInputViewLeft.p );

    }
    else if (2 == m_nViews && 0 == pSrf->Info.FrameId.ViewId)
    {
        m_pD3D11Ctx->CopyResource(m_pTempTexture,pRTTexture2D);
        hres = m_pDX11VideoDevice->CreateVideoProcessorInputView(
            m_pTempTexture,
            m_VideoProcessorEnum,
            &InputViewDesc,
            &m_pInputViewLeft.p );
    }
    else
    {
        hres = m_pDX11VideoDevice->CreateVideoProcessorInputView(
            pRTTexture2D,
            m_VideoProcessorEnum,
            &InputViewDesc,
            &m_pInputViewRight.p );
    }
    if (FAILED(hres))
        return MFX_ERR_DEVICE_FAILED;

    //  NV12 surface to RGB backbuffer
    RECT rect = {0};
    rect.right  = pSrf->Info.CropW;
    rect.bottom = pSrf->Info.CropH;

    D3D11_VIDEO_PROCESSOR_STREAM StreamData;

    if (1 == m_nViews || pSrf->Info.FrameId.ViewId == 1)
    {
        StreamData.Enable = TRUE;
        StreamData.OutputIndex = 0;
        StreamData.InputFrameOrField = 0;
        StreamData.PastFrames = 0;
        StreamData.FutureFrames = 0;
        StreamData.ppPastSurfaces = NULL;
        StreamData.ppFutureSurfaces = NULL;
        StreamData.pInputSurface = m_pInputViewLeft;
        StreamData.ppPastSurfacesRight = NULL;
        StreamData.ppFutureSurfacesRight = NULL;
        StreamData.pInputSurfaceRight = m_nViews == 2 ? m_pInputViewRight : NULL;

        m_pVideoContext->VideoProcessorSetStreamSourceRect(m_pVideoProcessor, 0, true, &rect);
        m_pVideoContext->VideoProcessorSetStreamFrameFormat( m_pVideoProcessor, 0, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE);
        hres = m_pVideoContext->VideoProcessorBlt( m_pVideoProcessor, m_pOutputView, 0, 1, &StreamData );
        if (FAILED(hres))
            return MFX_ERR_DEVICE_FAILED;
    }

    if (1 == m_nViews || 1 == pSrf->Info.FrameId.ViewId)
    {
        DXGI_PRESENT_PARAMETERS parameters = {0};
        hres = m_pSwapChain->Present1(0, 0, &parameters);
        if (FAILED(hres))
            return MFX_ERR_DEVICE_FAILED;
    }

    return MFX_ERR_NONE;
}
mfxStatus CD3D11Device::RenderFrame(mfxFrameSurface1*pSrf,mfxFrameAllocator*pAlloc)
{
HRESULT hres=S_正常;
mfxStatus sts;
sts=创建视频处理器(pSrf);
MSDK_检查_状态(sts,“CreateVideoProcessor失败”);
hres=m_pSwapChain->GetBuffer(0,_uuidof(ID3D11Texture2D),(void**)和m_pDXGIBackBuffer.p);
如果(失败(hres))
返回MFX_错误_设备_失败;
D3D11_视频_处理器_输出_视图_描述输出视图描述;
if(2==m_n视图)
{
m_pVideoContext->VideoProcessorSetStreamStereoFormat(m_pVideoProcessor,0,TRUE,D3D11_VIDEO_PROCESSOR_STEREO_FORMAT_分离,
TRUE,TRUE,D3D11_视频处理器_立体声_翻转_无,NULL);
m_pVideoContext->VideoProcessor输出立体声模式(m_pVideoProcessor,TRUE);
OutputViewDesc.ViewDimension=D3D11_VPOV_DIMENSION_Texture2 Darray;
OutputViewDesc.Texture2DArray.ArraySize=2;
OutputViewDesc.Texture2DArray.MipSlice=0;
OutputViewDesc.Texture2DArray.FirstArraySlice=0;
}
其他的
{
OutputViewDesc.ViewDimension=D3D11\u VPOV\u DIMENSION\u TEXTURE2D;
OutputViewDesc.Texture2D.MipSlice=0;
}
如果(1==m|nView | 0==pSrf->Info.FrameId.ViewId)
{
hres=m_pDX11VideoDevice->CreateVideoProcessor输出视图(
m_pDXGIBackBuffer,
m_VideoProcessorEnum,
&OutputViewDesc,
&m_pOutputView.p);
如果(失败(hres))
返回MFX_错误_设备_失败;
}
D3D11_视频_处理器_输入_视图_描述输入视图描述;
InputViewDesc.FourCC=0;
InputViewDesc.ViewDimension=D3D11\u VPIV\u DIMENSION\u TEXTURE2D;
InputViewDesc.Texture2D.MipSlice=0;
InputViewDesc.Texture2D.ArraySlice=0;
mfxHDLPair对={NULL};
sts=pAlloc->GetHDL(pAlloc->pthis,pSrf->Data.MemId,(mfxHDL*)和pair);
MSDK检查状态(sts,“pAlloc->GetHDL失败”);
ID3D11Texture2D*pRTTexture2D=重新解释铸件(第一对);
D3D11_TEXTURE2D_DESC RTTexture2DDesc;
如果(!m_ptexture&&m_nViews==2)
{
pRTTexture2D->GetDesc(&RTTexture2DDesc);
hres=m_pD3D11Device->CreateTexture2D(&RTTexture2DDesc,NULL,&m_ptexture.p);
如果(失败(hres))
返回MFX_错误_设备_失败;
}
//为左眼和右眼创建输入视图
if(1==m_n视图)
{
hres=m_pDX11VideoDevice->CreateVideoProcessorInputView(
pRTTexture2D,
m_VideoProcessorEnum,
&InputViewDesc,
&m_pInputViewLeft.p);
}
else if(2==m\u nView&&0==pSrf->Info.FrameId.ViewId)
{
m_pD3D11Ctx->copyrource(m_ptexture,pRTTexture2D);
hres=m_pDX11VideoDevice->CreateVideoProcessorInputView(
m_Ptexture,
m_VideoProcessorEnum,
&InputViewDesc,
&m_pInputViewLeft.p);
}
其他的
{
hres=m_pDX11VideoDevice->CreateVideoProcessorInputView(
pRTTexture2D,
m_VideoProcessorEnum,
&InputViewDesc,
&m_pInputViewRight.p);
}
如果(失败(hres))
返回MFX_错误_设备_失败;
//NV12曲面到RGB backbuffer
RECT RECT={0};
rect.right=pSrf->Info.CropW;
rect.bottom=pSrf->Info.CropH;
D3D11_视频处理器_流数据;
如果(1==m|nView | pSrf->Info.FrameId.ViewId==1)
{
StreamData.Enable=TRUE;
StreamData.OutputIndex=0;
StreamData.InputFrameOrField=0;
StreamData.PastFrames=0;
StreamData.FutureFrames=0;
StreamData.ppPastSurfaces=NULL;
StreamData.ppFutureSurfaces=NULL;
StreamData.pInputSurface=m_pInputViewLeft;
StreamData.ppPastSurfacesRight=NULL;
StreamData.ppFutureSurfacesRight=NULL;
StreamData.pInputSurfaceRight=m_nView==2?m_pInputViewRight:NULL;
m_pVideoContext->VideoProcessorSetStreamSourceRect(m_pVideoProcessor,0,true,&rect);
m_pVideoContext->VideoProcessorSetStreamFrameFormat(m_pVideoProcessor,0,D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE);
hres=m_pVideoContext->VideoProcessorBlt(m_pVideoProcessor、m_Poutput View、0、1和StreamData);
如果(失败(hres))
返回MFX_错误_设备_失败;
}
如果(1==m|nView | 1==pSrf->Info.FrameId.ViewId)
{
DXGI_PRESENT_PARAMETERS={0};
hres=m_pSwapChain->Present1(0、0和参数);
如果(失败(hres))
返回MFX_错误_设备_失败;
}
返回MFX_ERR_NONE;
}
从代码行:

ID3D11Texture2D  *pRTTexture2D = reinterpret_cast<ID3D11Texture2D*>(pair.first);
ID3D11Texture2D*pRTTexture2D=重新解释铸件(第一对);
我的
pRTTexture2D
是一个
ID3D11Texture2D
格式为DXGI\u-NV12格式

我想从该纹理获取RGB数据,并尝试使用以下方法:

1) 映射纹理使用
d3dContext->Map(纹理,0,D3D11\u Map\u READ,0,&mapInfo)=>在我的情况下必须复制到暂存资源

2) 在系统内存中创建一个
RGB数组
,并计算从
mapInfo
上的NV12转换为
RGB数组

这种方法行得通,但我想用更好的方法。因为我猜在渲染
(RenderFrame()函数)
时,DirectX将纹理转换为BackBuffer中的RGB,如果我可以从BackBuffer中获取数据,那就太好了

有人可以告诉我上面的代码。还是有更好的方法来实施它

多谢各位