Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/asp.net/36.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Streaming 使用live555的JPEG流媒体_Streaming_Jpeg_Rtsp_Live555 - Fatal编程技术网

Streaming 使用live555的JPEG流媒体

Streaming 使用live555的JPEG流媒体,streaming,jpeg,rtsp,live555,Streaming,Jpeg,Rtsp,Live555,我想通过Live555传输JPEG图像或运动JPEG文件。但问题是,在Live555中,JPEG的实现是不可用的。任何人都可以帮助???希望你已经做到了,但如果没有做到- 看到这个了吗 这与您要求流式传输图像/JPEG的操作相同。 对于MJPEG,您必须执行相同的过程。您可以找到一个发布到devel邮件列表的实现。 代码和示例可用,但此修改被live555维护人员拒绝 首先,我们需要实现一个MJPEGVideoSource,而不是提供一个JPEGVideoRTPSink #include "JP

我想通过Live555传输JPEG图像或运动JPEG文件。但问题是,在Live555中,JPEG的实现是不可用的。任何人都可以帮助???

希望你已经做到了,但如果没有做到- 看到这个了吗 这与您要求流式传输图像/JPEG的操作相同。
对于MJPEG,您必须执行相同的过程。

您可以找到一个发布到devel邮件列表的实现。 代码和示例可用,但此修改被live555维护人员拒绝

首先,我们需要实现一个
MJPEGVideoSource
,而不是提供一个
JPEGVideoRTPSink

#include "JPEGVideoSource.hh"

class MJPEGVideoSource : public JPEGVideoSource
{
        public:
                static MJPEGVideoSource* createNew (UsageEnvironment& env, FramedSource* source)
                {
                        return new MJPEGVideoSource(env,source);
                }
                virtual void doGetNextFrame()
                {
                    if (m_inputSource)
                        m_inputSource->getNextFrame(fTo, fMaxSize, afterGettingFrameSub, this, FramedSource::handleClosure, this);                     
                }
                virtual void doStopGettingFrames()
                {
                    FramedSource::doStopGettingFrames();
                    if (m_inputSource)
                        m_inputSource->stopGettingFrames();                    
                }
                static void afterGettingFrameSub(void* clientData, unsigned frameSize,unsigned numTruncatedBytes,struct timeval presentationTime,unsigned durationInMicroseconds) 
                {
                                MJPEGVideoSource* source = (MJPEGVideoSource*)clientData;
                                source->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds);
                }        
                void afterGettingFrame(unsigned frameSize,unsigned numTruncatedBytes,struct timeval presentationTime,unsigned durationInMicroseconds)
                {
                    int headerSize = 0;
                    bool headerOk = false;
                    fFrameSize = 0;

                    for (unsigned int i = 0; i < frameSize ; ++i)
                    {
                        // SOF
                        if ( (i+8) < frameSize  && fTo[i] == 0xFF && fTo[i+1] == 0xC0 )
                        {
                             m_height = (fTo[i+5]<<5)|(fTo[i+6]>>3);
                             m_width = (fTo[i+7]<<5)|(fTo[i+8]>>3);
                        }
                        // DQT
                        if ( (i+5+64) < frameSize && fTo[i] == 0xFF && fTo[i+1] == 0xDB)
                        {
                            if (fTo[i+4] ==0)
                            {
                                memcpy(m_qTable, fTo + i + 5, 64);
                                m_qTable0Init = true;
                            }
                            else if (fTo[i+4] ==1)
                            {
                                memcpy(m_qTable + 64, fTo + i + 5, 64);
                                m_qTable1Init = true;
                            }
                        }
                        // End of header
                        if ( (i+1) < frameSize && fTo[i] == 0x3F && fTo[i+1] == 0x00 )
                        {
                             headerOk = true;
                             headerSize = i+2;
                             break;
                        }
                    }

                    if (headerOk)
                    {
                        fFrameSize = frameSize - headerSize;
                        memmove( fTo, fTo + headerSize, fFrameSize );
                    }

                    fNumTruncatedBytes = numTruncatedBytes;
                    fPresentationTime = presentationTime;
                    fDurationInMicroseconds = durationInMicroseconds;
                    afterGetting(this);
                }
                virtual u_int8_t type() { return 1; };
                virtual u_int8_t qFactor() { return 128; };
                virtual u_int8_t width() { return m_width; };
                virtual u_int8_t height() { return m_height; };
                u_int8_t const* quantizationTables( u_int8_t& precision, u_int16_t& length )
                {
                    length = 0;
                    precision = 0;
                    if ( m_qTable0Init && m_qTable1Init )
                    {
                        precision = 8;
                        length = sizeof(m_qTable);
                    }
                    return m_qTable;            
                }

        protected:
                MJPEGVideoSource(UsageEnvironment& env, FramedSource* source) : JPEGVideoSource(env),
                m_inputSource(source),
                m_width(0),
                m_height(0),
                m_qTable0Init(false),
                m_qTable1Init(false)
                {
                    memset(&m_qTable,0,sizeof(m_qTable));
                }
                virtual ~MJPEGVideoSource() 
                { 
                    Medium::close(m_inputSource); 
                }

        protected:
                FramedSource* m_inputSource;
                u_int8_t      m_width;
                u_int8_t      m_height;
                u_int8_t      m_qTable[128];
                bool          m_qTable0Init;
                bool          m_qTable1Init;
};
#包括“JPEGVideoSource.hh”
类别MJPEG视频源:公共JPEG视频源
{
公众:
静态MJPEGVideoSource*createNew(UsageEnvironment&env,FramedSource*source)
{
返回新的源(环境,源);
}
虚拟void doGetNextFrame()
{
if(m_输入源)
m_inputSource->getNextFrame(fTo,fMaxSize,获取FrameSub后,此,FramedSource::handleClosure,此);
}
虚拟void dostogettingframes()
{
FramedSource::DostoGettingFrames();
if(m_输入源)
m_inputSource->stopGettingFrames();
}
获取FrameSub后的静态void(void*clientData、无符号frameSize、无符号numTruncatedBytes、struct timeval presentationTime、无符号持续时间毫秒)
{
MJPEGVideoSource*源=(MJPEGVideoSource*)客户端数据;
source->afterGettingFrame(帧大小、numTruncatedBytes、presentationTime、持续时间毫秒);
}        
获取帧后无效(未签名的帧大小、未签名的numTruncatedBytes、struct timeval presentationTime、未签名的持续时间微秒)
{
int headerSize=0;
bool-headerOk=假;
fFrameSize=0;
for(无符号整数i=0;i
接下来,我们可以将其用作视频源,以构建一个简单的RTSP服务器:

#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "GroupsockHelper.hh"
#include "MJPEGVideoSource.hh"

char const* inputFileName = "test.mjpeg";

int main(int argc, char** argv) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);

  // Create 'groupsocks' for RTP and RTCP:
  struct in_addr destinationAddress;
  destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);

  const unsigned short rtpPortNum = 18888;
  const unsigned short rtcpPortNum = rtpPortNum+1;
  const unsigned char ttl = 255;

  const Port rtpPort(rtpPortNum);
  const Port rtcpPort(rtcpPortNum);

  Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
  rtpGroupsock.multicastSendOnly(); // we're a SSM source
  Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
  rtcpGroupsock.multicastSendOnly(); // we're a SSM source

  // Create a 'JPEG Video RTP' sink from the RTP 'groupsock':
  RTPSink* videoSink = JPEGVideoRTPSink::createNew(*env, &rtpGroupsock);

  // Create (and start) a 'RTCP instance' for this RTP sink:
  const unsigned estimatedSessionBandwidth = 5000; // in kbps; for RTCP b/w share
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen+1];
  gethostname((char*)CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '\0'; // just in case
  RTCPInstance* rtcp = RTCPInstance::createNew(*env, &rtcpGroupsock,
                estimatedSessionBandwidth, CNAME,
                videoSink, NULL /* we're a server */,
                True /* we're a SSM source */);
  // Note: This starts RTCP running automatically

  RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }
  ServerMediaSession* sms = ServerMediaSession::createNew(*env, "testStream", inputFileName,"Session streamed by \"testMJPEGVideoStreamer\"",
                       True /*SSM*/);
  sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
  rtspServer->addServerMediaSession(sms);

  char* url = rtspServer->rtspURL(sms);
  *env << "Play this stream using the URL \"" << url << "\"\n";
  delete[] url;

  // Start the streaming:
  *env << "Beginning streaming...\n";
  // Open the input file as a 'byte-stream file source':
  ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(*env, inputFileName);
  if (fileSource == NULL) {
    *env << "Unable to open file \"" << inputFileName
     << "\" as a byte-stream file source\n";
    exit(1);
  }

  // Create the MJPEG video source:
  MJPEGVideoSource* videoSource = MJPEGVideoSource::createNew(*env, fileSource);

  // Finally, start playing:
  *env << "Beginning to read from file...\n";
  videoSink->startPlaying(*videoSource, NULL, NULL);

  env->taskScheduler().doEventLoop();

  return 0; 
}
#包括“liveMedia.hh”
#包括“BasicUsageEnvironment.hh”
#包括“GroupsockHelper.hh”
#包括“MJPEGVideoSource.hh”
char const*inputFileName=“test.mjpeg”;
int main(int argc,字符**argv){
//首先设置我们的使用环境:
TaskScheduler*scheduler=BasicTaskScheduler::createNew();
UsageEnvironment*env=BasicUsageEnvironment::createNew(*调度器);
//为RTP创建“groupsocks”