当前位置:Gxlcms > mysql > 使用live555直播来自v4l2的摄像头图像

使用live555直播来自v4l2的摄像头图像

时间:2021-07-01 10:21:17 帮助过:103人阅读

结合前面的 采集 v4l2 视频, 使用 live555, 通过 rtsp 发布实时流. capture.h, capture.cpp, vcompress.h, vcompress.cpp 需要参考前面几片文章. 这里仅仅贴出 v4l2_x264_service.cpp [cpp] view plaincopy #includestdio.h #includestdlib.h #includeunistd

结合前面的 采集 v4l2 视频, 使用 live555, 通过 rtsp 发布实时流. capture.h, capture.cpp, vcompress.h, vcompress.cpp 需要参考前面几片文章. 这里仅仅贴出 v4l2_x264_service.cpp

[cpp] view plaincopy

  1. #include
  2. #include
  3. #include
  4. #include
  5. #include
  6. #include
  7. #include
  8. #include
  9. #include
  10. #include "capture.h"
  11. #include "vcompress.h"
  12. static UsageEnvironment *_env = 0;
  13. #define SINK_PORT 3030
  14. #define VIDEO_WIDTH 320
  15. #define VIDEO_HEIGHT 240
  16. #define FRAME_PER_SEC 5.0
  17. pid_t gettid()
  18. {
  19. return syscall(SYS_gettid);
  20. }
  21. // 使用 webcam + x264
  22. class WebcamFrameSource : public FramedSource
  23. {
  24. void *mp_capture, *mp_compress; // v4l2 + x264 encoder
  25. int m_started;
  26. void *mp_token;
  27. public:
  28. WebcamFrameSource (UsageEnvironment &env)
  29. : FramedSource(env)
  30. {
  31. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  32. mp_capture = capture_open("/dev/video0", VIDEO_WIDTH, VIDEO_HEIGHT, PIX_FMT_YUV420P);
  33. if (!mp_capture) {
  34. fprintf(stderr, "%s: open /dev/video0 err\n", __func__);
  35. exit(-1);
  36. }
  37. mp_compress = vc_open(VIDEO_WIDTH, VIDEO_HEIGHT, FRAME_PER_SEC);
  38. if (!mp_compress) {
  39. fprintf(stderr, "%s: open x264 err\n", __func__);
  40. exit(-1);
  41. }
  42. m_started = 0;
  43. mp_token = 0;
  44. }
  45. ~WebcamFrameSource ()
  46. {
  47. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  48. if (m_started) {
  49. envir().taskScheduler().unscheduleDelayedTask(mp_token);
  50. }
  51. if (mp_compress)
  52. vc_close(mp_compress);
  53. if (mp_capture)
  54. capture_close(mp_capture);
  55. }
  56. protected:
  57. virtual void doGetNextFrame ()
  58. {
  59. if (m_started) return;
  60. m_started = 1;
  61. // 根据 fps, 计算等待时间
  62. double delay = 1000.0 / FRAME_PER_SEC;
  63. int to_delay = delay * 1000; // us
  64. mp_token = envir().taskScheduler().scheduleDelayedTask(to_delay,
  65. getNextFrame, this);
  66. }

[cpp] view plaincopy

  1. virtual unsigned maxFrameSize() const // 这个很重要, 如果不设置, 可能导致 getNextFrame() 出现 fMaxSize 小于实际编码帧的情况, 导致图像不完整

[cpp] view plaincopy

  1. { return 100*1024; }

[cpp] view plaincopy

  1. private:
  2. static void getNextFrame (void *ptr)
  3. {
  4. ((WebcamFrameSource*)ptr)->getNextFrame1();
  5. }
  6. void getNextFrame1 ()
  7. {
  8. // capture:
  9. Picture pic;
  10. if (capture_get_picture(mp_capture, &pic) < 0) {
  11. fprintf(stderr, "==== %s: capture_get_picture err\n", __func__);
  12. m_started = 0;
  13. return;
  14. }
  15. // compress
  16. const void *outbuf;
  17. int outlen;
  18. if (vc_compress(mp_compress, pic.data, pic.stride, &outbuf, &outlen) < 0) {
  19. fprintf(stderr, "==== %s: vc_compress err\n", __func__);
  20. m_started = 0;
  21. return;
  22. }
  23. int64_t pts, dts;
  24. int key;
  25. vc_get_last_frame_info(mp_compress, &key, &pts, &dts);
  26. // save outbuf
  27. gettimeofday(&fPresentationTime, 0);
  28. fFrameSize = outlen;
  29. if (fFrameSize > fMaxSize) {
  30. fNumTruncatedBytes = fFrameSize - fMaxSize;
  31. fFrameSize = fMaxSize;
  32. }
  33. else {
  34. fNumTruncatedBytes = 0;
  35. }
  36. memmove(fTo, outbuf, fFrameSize);
  37. // notify
  38. afterGetting(this);
  39. m_started = 0;
  40. }
  41. };
  42. class WebcamOndemandMediaSubsession : public OnDemandServerMediaSubsession
  43. {
  44. public:
  45. static WebcamOndemandMediaSubsession *createNew (UsageEnvironment &env, FramedSource *source)
  46. {
  47. return new WebcamOndemandMediaSubsession(env, source);
  48. }
  49. protected:
  50. WebcamOndemandMediaSubsession (UsageEnvironment &env, FramedSource *source)
  51. : OnDemandServerMediaSubsession(env, True) // reuse the first source
  52. {
  53. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  54. mp_source = source;
  55. mp_sdp_line = 0;
  56. }
  57. ~WebcamOndemandMediaSubsession ()
  58. {
  59. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  60. if (mp_sdp_line) free(mp_sdp_line);
  61. }
  62. private:
  63. static void afterPlayingDummy (void *ptr)
  64. {
  65. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  66. // ok
  67. WebcamOndemandMediaSubsession *This = (WebcamOndemandMediaSubsession*)ptr;
  68. This->m_done = 0xff;
  69. }
  70. static void chkForAuxSDPLine (void *ptr)
  71. {
  72. WebcamOndemandMediaSubsession *This = (WebcamOndemandMediaSubsession *)ptr;
  73. This->chkForAuxSDPLine1();
  74. }
  75. void chkForAuxSDPLine1 ()
  76. {
  77. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  78. if (mp_dummy_rtpsink->auxSDPLine())
  79. m_done = 0xff;
  80. else {
  81. int delay = 100*1000; // 100ms
  82. nextTask() = envir().taskScheduler().scheduleDelayedTask(delay,
  83. chkForAuxSDPLine, this);
  84. }
  85. }
  86. protected:
  87. virtual const char *getAuxSDPLine (RTPSink *sink, FramedSource *source)
  88. {
  89. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  90. if (mp_sdp_line) return mp_sdp_line;
  91. mp_dummy_rtpsink = sink;
  92. mp_dummy_rtpsink->startPlaying(*source, 0, 0);
  93. //mp_dummy_rtpsink->startPlaying(*source, afterPlayingDummy, this);
  94. chkForAuxSDPLine(this);
  95. m_done = 0;
  96. envir().taskScheduler().doEventLoop(&m_done);
  97. mp_sdp_line = strdup(mp_dummy_rtpsink->auxSDPLine());
  98. mp_dummy_rtpsink->stopPlaying();
  99. return mp_sdp_line;
  100. }
  101. virtual RTPSink *createNewRTPSink(Groupsock *rtpsock, unsigned char type, FramedSource *source)
  102. {
  103. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  104. return H264VideoRTPSink::createNew(envir(), rtpsock, type);
  105. }
  106. virtual FramedSource *createNewStreamSource (unsigned sid, unsigned &bitrate)
  107. {
  108. fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);
  109. bitrate = 500;
  110. return H264VideoStreamFramer::createNew(envir(), new WebcamFrameSource(envir()));
  111. }
  112. private:
  113. FramedSource *mp_source; // 对应 WebcamFrameSource
  114. char *mp_sdp_line;
  115. RTPSink *mp_dummy_rtpsink;
  116. char m_done;
  117. };
  118. static void test_task (void *ptr)
  119. {
  120. fprintf(stderr, "test: task ....\n");
  121. _env->taskScheduler().scheduleDelayedTask(100000, test_task, 0);
  122. }
  123. static void test (UsageEnvironment &env)
  124. {
  125. fprintf(stderr, "test: begin...\n");
  126. char done = 0;
  127. int delay = 100 * 1000;
  128. env.taskScheduler().scheduleDelayedTask(delay, test_task, 0);
  129. env.taskScheduler().doEventLoop(&done);
  130. fprintf(stderr, "test: end..\n");
  131. }
  132. int main (int argc, char **argv)
  133. {
  134. // env
  135. TaskScheduler *scheduler = BasicTaskScheduler::createNew();
  136. _env = BasicUsageEnvironment::createNew(*scheduler);
  137. // test
  138. //test(*_env);
  139. // rtsp server
  140. RTSPServer *rtspServer = RTSPServer::createNew(*_env, 8554);
  141. if (!rtspServer) {
  142. fprintf(stderr, "ERR: create RTSPServer err\n");
  143. ::exit(-1);
  144. }
  145. // add live stream
  146. do {
  147. WebcamFrameSource *webcam_source = 0;
  148. ServerMediaSession *sms = ServerMediaSession::createNew(*_env, "webcam", 0, "Session from /dev/video0");
  149. sms->addSubsession(WebcamOndemandMediaSubsession::createNew(*_env, webcam_source));
  150. rtspServer->addServerMediaSession(sms);
  151. char *url = rtspServer->rtspURL(sms);
  152. *_env << "using url \"" << url << "\"\n";
  153. delete [] url;
  154. } while (0);
  155. // run loop
  156. _env->taskScheduler().doEventLoop();
  157. return 1;
  158. }


需要 live555 + libavcodec + libswscale + libx264, client 使用 vlc, mplayer, quicktime, .....

人气教程排行