Warning: file_get_contents(/data/phpspider/zhask/data//catemap/4/video/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
C++ 如何生成;mdat之前的moov“;媒体基础上的MP4视频文件 我发现微软媒体基金会生成MPEG4文件,其中的MDA原子位于MOOV原子之前。流媒体传输需要MDAT之前的MOOV。我假设我的问题的解决方案是在创建接收器时使用该属性,但我似乎无法让它产生效果。我的代码与此基本相同。在该示例中,我在设置MF\u READWRITE\u ENABLE\u HARDWARE\u TRANSFORMS之前将属性设置为UINT32 TRUE。_C++_Video_Video Streaming_Mp4_Ms Media Foundation - Fatal编程技术网

C++ 如何生成;mdat之前的moov“;媒体基础上的MP4视频文件 我发现微软媒体基金会生成MPEG4文件,其中的MDA原子位于MOOV原子之前。流媒体传输需要MDAT之前的MOOV。我假设我的问题的解决方案是在创建接收器时使用该属性,但我似乎无法让它产生效果。我的代码与此基本相同。在该示例中,我在设置MF\u READWRITE\u ENABLE\u HARDWARE\u TRANSFORMS之前将属性设置为UINT32 TRUE。

C++ 如何生成;mdat之前的moov“;媒体基础上的MP4视频文件 我发现微软媒体基金会生成MPEG4文件,其中的MDA原子位于MOOV原子之前。流媒体传输需要MDAT之前的MOOV。我假设我的问题的解决方案是在创建接收器时使用该属性,但我似乎无法让它产生效果。我的代码与此基本相同。在该示例中,我在设置MF\u READWRITE\u ENABLE\u HARDWARE\u TRANSFORMS之前将属性设置为UINT32 TRUE。,c++,video,video-streaming,mp4,ms-media-foundation,C++,Video,Video Streaming,Mp4,Ms Media Foundation,您是否阅读了中的备注 ? 为使mpeg4接收器使用此属性,传入的字节流不能是慢速查找或远程查找 请检查IMFByteStream的功能 MFBYTESTREAM_为远程,应清除MFBYTESTREAM_为慢速搜索 如果您的IMFByteStream不符合条件,那么首先创建一个文件MDAT->MOOV,然后重新复制到一个新文件MOOV->MDAT。如果您无法让编码器在开始时输出一个带有MOOV原子的文件,那么您可以在之后更正该文件。这两个实用程序都应该在Windows上运行,并实现这一点。(尽管名

您是否阅读了中的备注 ?

为使mpeg4接收器使用此属性,传入的字节流不能是慢速查找或远程查找

请检查IMFByteStream的功能

MFBYTESTREAM_为远程,应清除MFBYTESTREAM_为慢速搜索


如果您的IMFByteStream不符合条件,那么首先创建一个文件MDAT->MOOV,然后重新复制到一个新文件MOOV->MDAT。

如果您无法让编码器在开始时输出一个带有MOOV原子的文件,那么您可以在之后更正该文件。这两个实用程序都应该在Windows上运行,并实现这一点。(尽管名称中有“qt”,但它们都可以毫无问题地处理.mp4文件)


虽然这篇文章发表已经有很长时间了,但在使用MediaFoundation编码的MDAT之前,我也遇到了MOOV的问题。微软提供的关于这个主题的文档很少。创建对象时,需要将该设置应用于MFT视频接收器或SinkWriter

我设法启用了我想要的功能,但录制中的视频是空帧的,即使文件大小相同,并且mp4box信息声明支持渐进下载、视频长度等。音频很好。GOP也没有出现在信息中,因此仍然存在错误配置

然后我又读了一些关于H264和MP4结构的书

对我来说,关键是MPEG容器需要是一个片段化的MP4容器,因此只需将容器类型设置为FMPEG4而不是MPEG4就可以了。下面是SinkWriter的初始化,它非常适合此功能

这是我对这位作家的完整初始化

ComPtr<ID3D11Device> device;
ComPtr<ID3D11Device> dx3Device; // multithread configuration.
ComPtr<ID3D10Multithread> dx3MultiThread;
ComPtr<IMFDXGIDeviceManager> manager;

unsigned videoQuality = 50;
unsigned videoBitrate = FPS * width * height; //  DEFAULT_BITRATE;
videoBitrate = DEFAULT_BITRATE;
// Audio Input
const UINT SamplesPerSecond = BaseSampleRate;
const UINT AverageBytesPerSecond = SamplesPerSecond / sizeof(FLOAT);
const UINT ChannelCount = 2;     // Converted
const UINT BitsPerSample = 16;   // Converted

MFStartup(MF_VERSION, MFSTARTUP_NOSOCKET);
_Clock = new Clock();
// Create a random access stream in memory
// CHK(MFCreateMFByteStreamOnStreamEx((IUnknown*)videoStream, &m_SpByteStream));

// Create a temporary working MP4.
IMFByteStreamEx::CreateInstance((IUnknown*)videoStream, IID_IMFByteStream, &m_SpByteStream);

// Create the Sink Writer
ComPtr<IMFAttributes> spAttr;

ComPtr<IMFMediaType> audioOutputType;
ComPtr<IMFMediaType> spVideoTypeIn;
ComPtr<IMFMediaType> spVideoTypeOut;

CHK(MFCreateAttributes(&spAttr, 10));
CHK(spAttr->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, TRUE));
CHK(spAttr->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, FALSE));
CHK(spAttr->SetUINT32(MF_SINK_WRITER_DISABLE_THROTTLING, TRUE));
CHK(spAttr->SetUINT32(MF_LOW_LATENCY, TRUE));
CHK(spAttr->SetGUID(MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_FMPEG4));
CHK(spAttr->SetUINT32(MF_MPEG4SINK_MOOV_BEFORE_MDAT, TRUE))

// Setup the output video media type   

HRESULT hr = 0;
D3D_FEATURE_LEVEL levels[] = { D3D_FEATURE_LEVEL_11_1, D3D_FEATURE_LEVEL_11_0 };
CHK(D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr, D3D11_CREATE_DEVICE_VIDEO_SUPPORT | D3D11_CREATE_DEVICE_BGRA_SUPPORT,
    levels, ARRAYSIZE(levels), D3D11_SDK_VERSION, &device, nullptr, nullptr));
UINT token;
CHK(MFCreateDXGIDeviceManager(&token, &manager));
HANDLE deviceHandle;

CHK(manager->ResetDevice(reinterpret_cast<IUnknown*>(device.Get()), token));
if (SUCCEEDED(manager->OpenDeviceHandle(&deviceHandle))) {
    // https://docs.microsoft.com/en-au/windows/desktop/medfound/supporting-direct3d-11-video-decoding-in-media-foundation
    // make sure we are using the same device
    hr = manager->GetVideoService(deviceHandle, IID_PPV_ARGS(&dx3Device));
    hr = dx3Device->QueryInterface(IID_PPV_ARGS(&dx3MultiThread));
    dx3MultiThread->SetMultithreadProtected(TRUE);
}
CHK(spAttr->SetUnknown(MF_SINK_WRITER_D3D_MANAGER, manager.Get()));
CHK(MFCreateSinkWriterFromURL(L".mp4v", m_SpByteStream.Get(), spAttr.Get(), &m_SpSinkWriter));

//// Video In Format
CHK(MFCreateMediaType(&spVideoTypeIn));
CHK(spVideoTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
CHK(spVideoTypeIn->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32));

CHK(spVideoTypeIn->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
CHK(MFSetAttributeSize(spVideoTypeIn.Get(), MF_MT_FRAME_SIZE, m_Width, m_Height));
CHK(MFSetAttributeRatio(spVideoTypeIn.Get(), MF_MT_FRAME_RATE, m_FramesPerSecond, 1));
CHK(MFSetAttributeRatio(spVideoTypeIn.Get(), MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
CHK(spVideoTypeIn->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, FALSE));
CHK(spVideoTypeIn->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE));
// Video Out format

CHK(MFCreateMediaType(&spVideoTypeOut));
CHK(spVideoTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
CHK(spVideoTypeOut->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
CHK(spVideoTypeOut->SetUINT32(MF_MT_COMPRESSED, FALSE));
CHK(spVideoTypeOut->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, FALSE));
CHK(spVideoTypeOut->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, FALSE));
CHK(spVideoTypeOut->SetUINT32(MF_MT_AVG_BITRATE, videoBitrate ));
CHK(spVideoTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
CHK(spVideoTypeOut->SetUINT32(MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_High));
CHK(MFSetAttributeSize(spVideoTypeOut.Get(), MF_MT_FRAME_SIZE, m_Width, m_Height));
CHK(MFSetAttributeRatio(spVideoTypeOut.Get(), MF_MT_FRAME_RATE, m_FramesPerSecond , 1));
CHK(MFSetAttributeRatio(spVideoTypeOut.Get(), MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
spVideoTypeOut->SetUINT32(MF_MT_SAMPLE_SIZE, 1);
MFSetAttributeSize(spVideoTypeOut.Get(), MF_MT_FRAME_RATE_RANGE_MAX, m_FramesPerSecond, 1);
MFSetAttributeSize(spVideoTypeOut.Get(), MF_MT_FRAME_RATE_RANGE_MIN, m_FramesPerSecond / 2, 1);

// Audio In Format
ComPtr<IMFMediaType> spAudioTypeIn;
CHK(MFCreateMediaType(&spAudioTypeIn));
CHK(spAudioTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
CHK(spAudioTypeIn->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM));
CHK(spAudioTypeIn->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, BitsPerSample));
CHK(spAudioTypeIn->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, BaseSampleRate));
CHK(spAudioTypeIn->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, ChannelCount));
CHK(spAudioTypeIn->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, AverageBytesPerSec)); // 32bit converted to 16
CHK(spAudioTypeIn->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, 4));
CHK(spAudioTypeIn->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, FALSE));
CHK(spAudioTypeIn->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE));

CHK(MFCreateMediaType(&audioOutputType));
CHK(audioOutputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
CHK(audioOutputType->SetUINT32(MF_MT_AVG_BITRATE, 16000));
CHK(audioOutputType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_AAC));
CHK(audioOutputType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, SamplesPerSecond));
CHK(audioOutputType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, BitsPerSample / ((BitsPerSample > 16) ? 2 : 1)));
CHK(audioOutputType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, ChannelCount));
CHK(audioOutputType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, 12000)); // AverageBytesPerSecond));
CHK(audioOutputType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, 1));
CHK(audioOutputType->SetUINT32(MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION, 0x29));
CHK(audioOutputType->SetUINT32(MF_MT_AUDIO_PREFER_WAVEFORMATEX, 1));
CHK(audioOutputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, FALSE));
CHK(audioOutputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, TRUE));
// Add Video out stream

ComPtr<IMFAttributes> encoderAttributes; 
if (TRUE) {                              // Experimental
    CHK(MFCreateAttributes(&encoderAttributes, 12));
    if (TRUE) {
        unsigned force_keyframe_every_nframes = 11;
        unsigned force_bframe_every_nframes = 2;
        CHK(encoderAttributes->SetUINT32(CODECAPI_AVEncMPVGOPSize, force_keyframe_every_nframes));
        CHK(encoderAttributes->SetUINT32(CODECAPI_AVEncMPVDefaultBPictureCount, force_bframe_every_nframes));
        CHK(encoderAttributes->SetUINT32(CODECAPI_AVEncNumWorkerThreads, 6));
    }
    if (TRUE) {
        // constant quality for screen captures
        CHK(encoderAttributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE));
        CHK(encoderAttributes->SetUINT32(CODECAPI_AVEncCommonRealTime, 1));
        CHK(encoderAttributes->SetUINT32(CODECAPI_AVEncAdaptiveMode, eAVEncAdaptiveMode_Resolution));
        CHK(encoderAttributes->SetGUID(CODECAPI_AVEncCodecType, CODECAPI_GUID_AVEncH264Video));
        CHK(encoderAttributes->SetUINT32(CODECAPI_AVEncCommonMultipassMode, 2));
        CHK(encoderAttributes->SetUINT32(CODECAPI_AVEncCommonRateControlMode, eAVEncCommonRateControlMode_PeakConstrainedVBR));
        CHK(encoderAttributes->SetUINT32(CODECAPI_AVEncCommonMeanBitRate, DEFAULT_BITRATE));
        CHK(encoderAttributes->SetUINT32(CODECAPI_AVEncCommonStreamEndHandling, eAVEncCommonStreamEndHandling_EnsureComplete));
        CHK(encoderAttributes->SetUINT32(CODECAPI_AVEncVideoContentType, eAVEncVideoContentType_FixedCameraAngle));
    }
}
CHK(m_SpSinkWriter->AddStream(spVideoTypeOut.Get(), &m_VideoStreamIndex));
CHK(m_SpSinkWriter->SetInputMediaType(m_VideoStreamIndex, spVideoTypeIn.Get(), encoderAttributes.Get()));
CHK(m_SpSinkWriter->AddStream(audioOutputType.Get(), &m_AudioStreamIndex));
CHK(m_SpSinkWriter->SetInputMediaType(m_AudioStreamIndex, spAudioTypeIn.Get(), nullptr));
_Clock->Start();
m_ClockStart = clock();
CHK(m_SpSinkWriter->BeginWriting());
ComPtr装置;
ComPtr dx3设备;//多线程配置。
ComPtr DX3多线程;
公司经理;
无符号视频质量=50;
无符号视频比特率=FPS*宽度*高度;//默认比特率;
视频比特率=默认比特率;
//音频输入
Consuint SamplesPerSecond=基准采样器;
const UINT AverageBytesPerSecond=采样PerSecond/sizeof(浮点);
const UINT ChannelCount=2;//转换
const UINT BitsPerSample=16;//转换
MFStartup(MF_版本,MFStartup_NOSOCKET);
_时钟=新时钟();
//在内存中创建随机访问流
//CHK(mfcreatemfbytesttreamonstreamex((IUnknown*)视频流和m_spbytesttream));
//创建临时工作MP4。
IMFByteStream::CreateInstance((IUnknown*)视频流、IID_IMFByteStream和m_SpbyTestStream);
//创建水槽编写器
ComPtr spAttr;
ComPtr音频输出类型;
ComPtr-spVideoTypeIn;
ComPtr-spVideoTypeOut;
CHK(MFCreateAttributes(&spAttr,10));
CHK(spAttr->SetUINT32(MF\u READWRITE\u ENABLE\u HARDWARE\u TRANSFORMS,TRUE));
CHK(spAttr->SetUINT32(MF_读写_禁用_转换器,错误));
CHK(spAttr->SetUINT32(MF_接收器_写入器_禁用_节流,正确));
CHK(spAttr->SetUINT32(MF_LOW_LATENCY,TRUE));
CHK(spAttr->SetGUID(MF_TRANSCODE_CONTAINERTYPE,mftranscode CONTAINERTYPE_FMPEG4));
CHK(spAttr->SetUINT32(MF_MPEG4SINK_MOOV_BEFORE_MDAT,TRUE))
//设置输出视频媒体类型
HRESULT hr=0;
D3D_功能_级别[]={D3D_功能_级别_11_1,D3D_功能_级别_11_0};
CHK(D3D11创建设备(nullptr、D3D驱动程序、类型硬件、nullptr、D3D11创建设备、视频支持、D3D11创建设备、BGRA支持、,
级别、阵列化(级别)、D3D11_SDK_版本和设备、nullptr、nullptr);
UINT令牌;
CHK(MFCreateDXGIDeviceManager(&token,&manager));
手柄装置手柄;
CHK(manager->ResetDevice(reinterpret_cast(device.Get()),token));
如果(成功(管理器->OpenDeviceHandle(&deviceHandle))){
// https://docs.microsoft.com/en-au/windows/desktop/medfound/supporting-direct3d-11-video-decoding-in-media-foundation
//确保我们使用的是相同的设备
hr=manager->GetVideoService(设备句柄、IID_PPV_参数(&dx3d设备));
hr=dx3设备->查询接口(IID_PPV_参数(&dx3多线程));
DX3多线程->设置多线程保护(真);
}
CHK(spAttr->SetUnknown(MF_SINK_WRITER_D3D_MANAGER,MANAGER.Get());
CHK(MFCreateSinkWriterFromURL(L.mp4v)、m_SpByteStream.Get()、spAttr.Get()、m_SpSinkWriter));
////视频格式
CHK(MFCreateMediaType(&spVideoTypeIn));
CHK(spVideoTypeIn->SetGUID(MF_MT_MAJOR_TYPE,MFMediaType_Video));
CHK(spVideoTypeIn->SetGUID(MF_MT_子类型,MFVideoFormat_RGB32));
CHK(spVideoTypeIn->SetUINT32(MF_MT_INTERLACE_MODE,MFVideoInterlace_Progressive));
CHK(MFSetAttributeSize(spVideoTypeIn.Get(),MF_MT_FRAME_SIZE,m_Width,m_Height));
CHK(MFSetAttributeRatio(spVideoTypeIn.Get(),MF_mtu FRAME_RATE,m_FramesPerSecond,1));
CHK(MFSetAttributeRatio(spVideoTypeIn.Get(),MF_MT_PIXEL_ASPECT_RATIO,1,1));
CHK(spVideoTypeIn->SetUINT32(MF\u MT\u ALL\u SAMPLES\u INDEPENDENT,FALSE));
CHK(spVideoTypeIn->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES,TRUE));
//视频输出格式
CHK(MFCreateMediaType(&spVideoTypeOut));
CHK(spVideoTypeOut->SetGUID(MF_MT_MAJOR_TYPE,MFMediaType_Video));
CHK(spVideoTypeOut->SetGUID(MF_MT_子类型,MFVideoFormat_H264));
CHK(spVideoTypeOut->SetUINT32(MF_MT_COMPRESSED,FALSE));
CHK(spVideoTypeOut->SetUINT32(MF\u MT\u ALL\u SAMPLES\u INDEPENDENT,FALSE));
CHK(spVideoTypeOut->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES,FALSE));
CHK(spVideoTypeOut->SetUINT32(MF_MT_AVG_BITRATE,videoBitrate));
CHK(spVideoTypeOut->SetUINT32(MF_MT_INTERLACE_MODE,MFVideoInterlace_Progressive));
CHK(spVideoTypeOut->SetUINT32(MF_MT_MPEG2_PROFILE,eavench264 vprofile_High));
CHK(MFSetAttributeSize(spVideoTypeOut.Get(),MF_MT_FRAME_SIZE,m_Width,m_Height));