live555怎么通过subssion连接sink和source的区别和sink

视频/live555(16)
1.首先需要修改live555,定义从 内存中直接获取source而不是从文件读取source的类。
自己实现的类命名为&H264FramedLiveSource&
* Filename: H264FramedLiveSource.hh
* Auther: chenbin
* Create date:
#ifndef _H264FRAMEDLIVESOURCE_HH
#define _H264FRAMEDLIVESOURCE_HH
#include &FramedSource.hh&
class H264FramedLiveSource : public FramedSource
static H264FramedLiveSource* createNew(UsageEnvironment& env,
char const* fileName,
unsigned preferredFrameSize = 0,
unsigned playTimePerFrame = 0);
protected:
H264FramedLiveSource(UsageEnvironment& env,
char const* fileName,
unsigned preferredFrameSize,
unsigned playTimePerFrame);
// called only by createNew()
~H264FramedLiveSource();
// redefined virtual functions:
virtual void doGetNextFrame();
int TransportData( unsigned char* to, unsigned maxSize );
protected:
* Filename:
H264FramedLiveSource.cpp
* Create date:
#include &H264FramedLiveSource.hh&
H264FramedLiveSource::H264FramedLiveSource( UsageEnvironment& env,
char const* fileName,
unsigned preferredFrameSize,
unsigned playTimePerFrame )
: FramedSource(env)
fp = fopen( fileName, &rb& );
H264FramedLiveSource* H264FramedLiveSource::createNew( UsageEnvironment& env,
char const* fileName,
unsigned preferredFrameSize /*= 0*/,
unsigned playTimePerFrame /*= 0*/ )
H264FramedLiveSource* newSource = new H264FramedLiveSource(env, fileName, preferredFrameSize, playTimePerFrame);
return newS
H264FramedLiveSource::~H264FramedLiveSource()
fclose(fp);
long filesize(FILE *stream)
long curpos,
curpos = ftell(stream);
fseek(stream, 0L, SEEK_END);
length = ftell(stream);
fseek(stream, curpos, SEEK_SET);
void H264FramedLiveSource::doGetNextFrame()
if( filesize(fp) &
fFrameSize = fread(fTo,1,fMaxSize,fp);
fFrameSize = fread(fTo,1,filesize(fp),fp);
fseek(fp, 0, SEEK_SET);
//fFrameSize = fMaxS
nextTask() = envir().taskScheduler().scheduleDelayedTask( 0,
(TaskFunc*)FramedSource::afterGetting, this);//表示延迟0秒后再执行 afterGetting 函数
在 H264FramedLiveSource::doGetNextFrame() 中,将要发送的内容复制到&fTo,最大为fMaxSize,fFrameSize指示实际发送的内容是多少字节。这里暂时还是从文件读作为测试。
2、定义自己的ServerMedia&
* Filename: H264LiveVideoServerMediaSubssion.hh
* Auther: mlj
* Create date:
#ifndef _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
#define _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
#include &H264VideoFileServerMediaSubsession.hh&
class H264LiveVideoServerMediaSubssion: public H264VideoFileServerMediaSubsession {
static H264LiveVideoServerMediaSubssion*
createNew( UsageEnvironment& env,
char const* fileName,
Boolean reuseFirstSource );
protected: // we're a virtual base class
H264LiveVideoServerMediaSubssion( UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource );
~H264LiveVideoServerMediaSubssion();
protected: // redefined virtual functions
FramedSource* createNewStreamSource(unsigned clientSessionId,
unsigned& estBitrate);
char fFileName[100];
* Filename: H264LiveVideoServerMediaSubssion.cpp
* Auther: chenbin
* Create date:
#include &H264LiveVideoServerMediaSubssion.hh&
#include &H264FramedLiveSource.hh&
#include &H264VideoStreamFramer.hh&
H264LiveVideoServerMediaSubssion*
H264LiveVideoServerMediaSubssion::createNew( UsageEnvironment& env,
char const* fileName,
Boolean reuseFirstSource )
return new H264LiveVideoServerMediaSubssion( env, fileName, reuseFirstSource );
H264LiveVideoServerMediaSubssion::H264LiveVideoServerMediaSubssion( UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource )
: H264VideoFileServerMediaSubsession( env, fileName, reuseFirstSource )
strcpy(fFileName,fileName);
H264LiveVideoServerMediaSubssion::~H264LiveVideoServerMediaSubssion()
FramedSource* H264LiveVideoServerMediaSubssion::createNewStreamSource( unsigned clientSessionId, unsigned& estBitrate )
/* Remain to do : assign estBitrate */
estBitrate = 1000; // kbps, estimate
// Create the video source:
H264FramedLiveSource* liveSource = H264FramedLiveSource::createNew(envir(), fFileName);
if (liveSource == NULL)
return NULL;
// Create a framer for the Video Elementary Stream:
return H264VideoStreamFramer::createNew(envir(), liveSource);
/**********
This libr you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the
Free Software F either version 2.1 of the License, or (at your
option) any later version. (See &http://www.gnu.org/copyleft/lesser.html&.)
This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE.
See the GNU Lesser General Public License for
more details.
You should have received a copy of the GNU Lesser General Public License
alo if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA
**********/
// Copyright (c) , Live Networks, Inc.
All rights reserved
// A test program that demonstrates how to stream - via unicast RTP
// - various kinds of file on demand, using a built-in RTSP server.
// main program
#include &H264LiveVideoServerMediaSubssion.hh&
#include &H264FramedLiveSource.hh&
#include &liveMedia.hh&
#include &BasicUsageEnvironment.hh&
#pragma comment (lib, &Ws2_32.lib&)
#pragma comment (lib, &BasicUsageEnvironment.lib&)
#pragma comment (lib, &groupsock.lib&)
#pragma comment (lib, &liveMedia.lib&)
#pragma comment (lib, &UsageEnvironment.lib&)
UsageEnvironment*
// To make the second and subsequent client for each stream reuse the same
// input stream as the first client (rather than playing the file from the
// start for each client), change the following &False& to &True&:
Boolean reuseFirstSource = F
// To stream *only* MPEG-1 or 2 video &I& frames
// (e.g., to reduce network bandwidth),
// change the following &False& to &True&:
Boolean iFramesOnly = F
static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
char const* streamName, char const* inputFileName); // fwd
static char newMatroskaDemuxWatchV
static MatroskaFileServerDemux*
static void onMatroskaDemuxCreation(MatroskaFileServerDemux* newDemux, void* /*clientData*/) {
demux = newD
newMatroskaDemuxWatchVariable = 1;
int main(int argc, char** argv) {
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);
UserAuthenticationDatabase* authDB = NULL;
#ifdef ACCESS_CONTROL
// To implement client access control to the RTSP server, do the following:
authDB = new UserAuthenticationD
authDB-&addUserRecord(&username1&, &password1&); // replace these with real strings
// Repeat the above with each &username&, &password& that you wish to allow
// access to the server.
// Create the RTSP server:
RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);
if (rtspServer == NULL) {
*env && &Failed to create RTSP server: & && env-&getResultMsg() && &\n&;
char const* descriptionString
= &Session streamed by \&testOnDemandRTSPServer\&&;
// Set up each of the possible streams that can be served by the
// RTSP server.
Each such stream is implemented using a
// &ServerMediaSession& object, plus one or more
// &ServerMediaSubsession& objects for each audio/video substream.
// A H.264 video elementary stream:
char const* streamName = &h264ESVideoTest&;
char const* inputFileName = &test.264&;
ServerMediaSession* sms
= ServerMediaSession::createNew(*env, streamName, streamName,
descriptionString);
sms-&addSubsession(H264LiveVideoServerMediaSubssion
::createNew(*env, inputFileName, reuseFirstSource));//修改为自己实现的servermedia
H264LiveVideoServerMediaSubssion
rtspServer-&addServerMediaSession(sms);
announceStream(rtspServer, sms, streamName, inputFileName);
// Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
// Try first with the default HTTP port (80), and then with the alternative HTTP
// port numbers (8000 and 8080).
//if (rtspServer-&setUpTunnelingOverHTTP(80) || rtspServer-&setUpTunnelingOverHTTP(8000) || rtspServer-&setUpTunnelingOverHTTP(8080)) {
*env && &\n(We use port & && rtspServer-&httpServerPortNum() && & for optional RTSP-over-HTTP tunneling.)\n&;
//} else {
*env && &\n(RTSP-over-HTTP tunneling is not available.)\n&;
env-&taskScheduler().doEventLoop(); // does not return
return 0; // only to prevent compiler warning
static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
char const* streamName, char const* inputFileName) {
char* url = rtspServer-&rtspURL(sms);
UsageEnvironment& env = rtspServer-&envir();
env && &\n\&& && streamName && &\& stream, from the file \&&
&& inputFileName && &\&\n&;
env && &Play this stream using the URL \&& && url && &\&\n&;
使用 ffplay.exe rtsp://115.156.164.19:ESVideoTest 可以播放test.264的视频.
相关配置:live555的四个库放在lib文件夹下。
库目录:G:\workspace\avs\live555test\live555test\lib
包含目录:G:\workspace\avs\live555test\live555test\BasicUsageEnvironment\G:\workspace\avs\live555test\live555test\UsageEnvironment\G:\workspace\avs\live555test\live555test\liveMedia\G:\workspace\avs\live555test\live555test\groupsock\include
svn checkout&http://live555-send-/svn/trunk/
live555-send-test-read-only
FROM: &/mlj318/archive//2872932.html
参考知识库
* 以上用户言论只代表其个人观点,不代表CSDN网站的观点或立场
访问:1384351次
积分:14629
积分:14629
排名:第651名
原创:13篇
转载:1622篇
评论:169条
(11)(1)(5)(5)(102)(43)(1)(1)(52)(73)(113)(42)(137)(118)(174)(175)(61)(120)(54)(28)(40)(8)(1)(18)(12)(15)(11)(3)(6)(29)(6)(1)(1)(1)(2)(1)(14)(16)(14)(1)(8)(15)(26)(22)(19)(14)(38)(7)(1)(2)温馨提示!由于新浪微博认证机制调整,您的新浪微博帐号绑定已过期,请重新绑定!&&|&&
临江仙 & 短歌行
LOFTER精选
网易考拉推荐
用微信&&“扫一扫”
将文章分享到朋友圈。
用易信&&“扫一扫”
将文章分享到朋友圈。
阅读(9211)|
用微信&&“扫一扫”
将文章分享到朋友圈。
用易信&&“扫一扫”
将文章分享到朋友圈。
历史上的今天
loftPermalink:'',
id:'fks_085071',
blogTitle:'live555 接收rtsp视频流详细源码流程详细解析',
blogAbstract:'程序从rtsp_player_task这个线程开始进入进入到live555的客户端。\r\n<FONT color=#:char *argv[5]={\"openRTSP\", \"-b\", \"80000\", \"-t\", \"\"};这是输入参数。b代表FileSink的大小具体值为80000. T代表用tcp & run_live_rtsp(int argc, char **argv) 这个函数来处理这些参数。2:在run_live_rtsp(int',
blogTag:'',
blogUrl:'blog/static/',
isPublished:1,
istop:false,
modifyTime:0,
publishTime:2,
permalink:'blog/static/',
commentCount:0,
mainCommentCount:0,
recommendCount:0,
bsrk:-100,
publisherId:0,
recomBlogHome:false,
currentRecomBlog:false,
attachmentsFileIds:[],
groupInfo:{},
friendstatus:'none',
followstatus:'unFollow',
pubSucc:'',
visitorProvince:'',
visitorCity:'',
visitorNewUser:false,
postAddInfo:{},
mset:'000',
remindgoodnightblog:false,
isBlackVisitor:false,
isShowYodaoAd:false,
hostIntro:'临江仙 & 短歌行',
hmcon:'0',
selfRecomBlogCount:'0',
lofter_single:''
{list a as x}
{if x.moveFrom=='wap'}
{elseif x.moveFrom=='iphone'}
{elseif x.moveFrom=='android'}
{elseif x.moveFrom=='mobile'}
${a.selfIntro|escape}{if great260}${suplement}{/if}
{list a as x}
推荐过这篇日志的人:
{list a as x}
{if !!b&&b.length>0}
他们还推荐了:
{list b as y}
转载记录:
{list d as x}
{list a as x}
{list a as x}
{list a as x}
{list a as x}
{if x_index>4}{break}{/if}
${fn2(x.publishTime,'yyyy-MM-dd HH:mm:ss')}
{list a as x}
{if !!(blogDetail.preBlogPermalink)}
{if !!(blogDetail.nextBlogPermalink)}
{list a as x}
{if defined('newslist')&&newslist.length>0}
{list newslist as x}
{if x_index>7}{break}{/if}
{list a as x}
{var first_option =}
{list x.voteDetailList as voteToOption}
{if voteToOption==1}
{if first_option==false},{/if}&&“${b[voteToOption_index]}”&&
{if (x.role!="-1") },“我是${c[x.role]}”&&{/if}
&&&&&&&&${fn1(x.voteTime)}
{if x.userName==''}{/if}
网易公司版权所有&&
{list x.l as y}
{if defined('wl')}
{list wl as x}{/list}trackbacks-0
以下只作为个人总结,只作记录用,如果想系统的分析live555,建议阅读以下帖子,或阅读源码:一、概念live555类似于Gstreamer和DirectShow架构,分Source、Filter、Sink的概念,例如,测试程序testOnDemandRTSPServer中,流化H264的pipeline如下(通过H264VideoFileServerMediaSubsession自动构建):【Source】ByteStreamFileSource-&H264or5VideoStreamParser(MPEGVideoStreamParser)-&H264VideoStreamFramer(H264or5VideoStreamFramer)直接与Sink打交道的是H264VideoStreamFramer(通过H264VideoFileServerMediaSubsession的createNewStreamSource),其它Parser、FileSource是H264VideoStreamFramer自动构建(按需要)【Sink】H264or5Fragmenter-&H264VideoRTPSink(H264or5VideoRTPSink)-&VideoRTPSink-&MultiFramedRTPSinkH264or5Fragmenter与上面的H264or5VideoStreamFramer打交道,获取Source的Frame数据后分段成RTP包输出。二、数据流动1、先来看数据的输入1)首先,Sink下会创建缓冲,用于存放从Source获取的数据,存放缓冲的指针就是大家比较熟悉的fTo,Sink通过一系列的类,把指针传递下去,具体是:H264or5Fragmenter-&MPEGVideoStreamFramer-&MPEGVideoStreamParser最终从Sink传递到Parser中,相关的代码片段是:H264or5Fragmenter::H264or5Fragmenter{fInputBuffer&=&new&unsignedchar[fInputBufferSize];}void&H264or5Fragmenter::doGetNextFrame(){fInputSource-&getNextFrame(&fInputBuffer[1],&fInputBufferSize&-&1,&&afterGettingFrame,&this,FramedSource::handleClosure,&this);}MPEGVideoStreamFramer::doGetNextFrame(){& fParser-&registerReadInterest(fTo,&fMaxSize);& continueReadProcessing();}void&MPEGVideoStreamParser::registerReadInterest(unsigned&char*&to,&&&&&&&&&&&&&&&&&&&&&&&&&unsigned&maxSize)&{&&fStartOfFrame&=&fTo&=&fSavedTo&=&to;&&&fLimit&=&to&+&maxS&&fNumTruncatedBytes&=&fSavedNumTruncatedBytes&=&0;}2)或许你注意到,fTo还没传递到最终的Source(ByteStreamFileSource),那是因为ByteStreamFileSource是由Parser来访问的,而Parser本身会建立Buffer用于存储从ByteStreamFileSource读取的数据,再把分析出来的NAL写入fTo(来自Sink),所以你就可以理解为什么fTo只到达了Parser,而不到达ByteStreamFileSource了吧。相关代码如下:StreamParser::StreamParser{&&fBank[0]&=&new&unsigned&char[BANK_SIZE];&&fBank[1]&=&new&unsigned&char[BANK_SIZE];}StreamParser::ensureValidBytes1{&&unsigned&maxNumBytesToRead&=&BANK_SIZE&-&fTotNumValidB&&&fInputSource-&getNextFrame(&curBank()[fTotNumValidBytes],&&&&&&&&&&&&&&&&&&maxNumBytesToRead,&&&&&&&&&&&&&&&&&afterGettingBytes,&this,&&&&&&&&&&&&&&&&&onInputClosure,&this);}unsigned&H264or5VideoStreamParser::parse(){saveXBytes(Y);}class&MPEGVideoStreamParser:&public&StreamParser&{&&//&Record&"byte"&in&the&current&output&frame:&&void&saveByte(u_int8_t&byte)&{&&&&if&(fTo&&=&fLimit)&{&//&there's&no&space&left&&&&&&++fNumTruncatedB&&&&&&&&&&}&&&&*fTo++&=&byte;&&}}2、再来看数据的输出1)上面输入的数据最终去到H264or5Fragmenter,这里说明一下:H264or5Fragmenter还是FrameSource(在H264or5VideoRTSPSink.cpp内定义),用于连接H264VideoRTSPSink和H264VideoStreamFramer;H264or5Fragmenter的doGetNextFrame实现,会把从Source获取到的数据,按照RTP协议的要求进行分段,保存到Sink的fOutBuf中。具体代码如下:MultiFramedRTPSink::MultiFramedRTPSink(UsageEnvironment&&env,&&&&&&&&&&&&&&&&&&&&&&&Groupsock*&rtpGS,&&&&&&&&&&&&&&&&&&&&&&&unsigned&char&rtpPayloadType,&&&&&&&&&&&&&&&&&&&&&&&unsigned&rtpTimestampFrequency,&&&&&&&&&&&&&&&&&&&&&&&char&const*&rtpPayloadFormatName,&&&&&&&&&&&&&&&&&&&&&&&unsigned&numChannels)&&:&RTPSink(env,&rtpGS,&rtpPayloadType,&rtpTimestampFrequency,&&&&&&&&rtpPayloadFormatName,&numChannels),&&&&fOutBuf(NULL),&fCurFragmentationOffset(0),&fPreviousFrameEndedFragmentation(False),&&&&fOnSendErrorFunc(NULL),&fOnSendErrorData(NULL)&{&&setPacketSizes((RTP_PAYLOAD_PREFERRED_SIZE),&(RTP_PAYLOAD_MAX_SIZE));&//sihid}void&MultiFramedRTPSink::setPacketSizes(unsigned&preferredPacketSize,&&&&&&&&&&&&&&&&&&&&unsigned&maxPacketSize)&{&&if&(preferredPacketSize&&&maxPacketSize&||&preferredPacketSize&==&0)&&&&&&&//&sanity&check&&delete&fOutB&&fOutBuf&=&new&OutPacketBuffer(preferredPacketSize,&maxPacketSize);&&fOurMaxPacketSize&=&maxPacketS&//&save&value,&in&case&subclasses&need&it}Boolean&MultiFramedRTPSink::continuePlaying()&{&&//&Send&the&first&packet.&&//&(This&will&also&schedule&any&future&sends.)&&buildAndSendPacket(True);&&return&True;}void&MultiFramedRTPSink::buildAndSendPacket(Boolean&isFirstPacket)&{..&&packFrame();}void&MultiFramedRTPSink::packFrame()&{&&//&Get&the&next&frame...&&&&//&Normal&case:&we&need&to&read&a&new&frame&from&the&source&&&&if&(fSource&==&NULL)&&&&&fSource-&getNextFrame(fOutBuf-&curPtr(),&fOutBuf-&totalBytesAvailable(),&&&&&&&&&&&&&&afterGettingFrame,&this,&ourHandleClosure,&this);}void&H264or5Fragmenter::doGetNextFrame()&{&&if&(fNumValidDataBytes&==&1)&{&&&&//&We&have&no&NAL&unit&data&currently&in&the&buffer.&&Read&a&new&one:&&&&fInputSource-&getNextFrame(&fInputBuffer[1],&fInputBufferSize&-&1,&&//Sink调用Source的getNextFrame获取数据&&&&&&&&&&&&&&&&&&&afterGettingFrame,&this,&&&&&&&&&&&&&&&&&&&FramedSource::handleClosure,&this);&&}&else&{..memmove(fTo,&&fInputBuffer[1],&fNumValidDataBytes&-&1);..memmove(fTo,&fInputBuffer,&fMaxSize);..memmove(fTo,&&fInputBuffer[fCurDataOffset-numExtraHeaderBytes],&numBytesToSend);..}看到了吧,数据的输入操作,其实是由Sink(MultiFramedRTPSink)发起的,当Sink需要获取数据时,通过调用Source的getNextFrame操作(具体由Source的doGetNextFrame操作来实现),经过一系列的类操作(Source-&Filter-&Sink),获取到Sink想要的数据。2)到目前为止,终于可以构建出完整的Pipeline了:ByteStreamFileSource-&H264or5VideoStreamParser(MPEGVideoStreamParser)-&H264VideoStreamFramer(H264or5VideoStreamFramer)-&H264or5Fragmenter-&H264VideoRTPSink(H264or5VideoRTPSink)-&VideoRTPSink-&MultiFramedRTPSink三、设计思想1、对于Buffer上面的Pipeline中,有几处Buffer,对于实时性要求比较高的应用,有必要理清buffer的数量和控制buffer的大小1)StreamParser会产生buffer,大小是BANK_SIZE(150000),那是因为StreamParser的前端是无格式的ByteStream,后面是完整的一帧数据(NAL),需要通过Parser来处理;2)H264or5Fragmenter会产生buffer,用于StreamParser存放分析之后的数据(NAL),并生成后端RTPSink需要的RTP包;3)MultiFramedRTPSink会产生buffer,用于Fragmenter存放分段之后的数据(RTP),以供RTSP服务器使用;2、对于fTofTo,顾名思义,就是To的buffer指针,也就是后端提供的buffer指针;那fTo是在什么时候赋值的呢,答案在这里:void&FramedSource::getNextFrame(unsigned&char*&to,&unsigned&maxSize,&&&&&&&&&&&&&&&&afterGettingFunc*&afterGettingFunc,&&&&&&&&&&&&&&&&void*&afterGettingClientData,&&&&&&&&&&&&&&&&onCloseFunc*&onCloseFunc,&&&&&&&&&&&&&&&&void*&onCloseClientData)&{&&//&Make&sure&we're&not&already&being&read:&&if&(fIsCurrentlyAwaitingData)&{&&&&envir()&&&&"FramedSource["&&&&this&&&&"]::getNextFrame():&attempting&to&read&more&than&once&at&the&same&time!\n";&&&&envir().internalError();&&}&&fTo&=&to;&//把Sink的bufer赋给Source的fTo&sihid&&fMaxSize&=&maxS&//设置FrameSource的MaxSize&sihid&&fNumTruncatedBytes&=&0;&//&by&&could&be&changed&by&doGetNextFrame()&&fDurationInMicroseconds&=&0;&//&by&&could&be&changed&by&doGetNextFrame()&&fAfterGettingFunc&=&afterGettingF&&fAfterGettingClientData&=&afterGettingClientD&&fOnCloseFunc&=&onCloseF&&fOnCloseClientData&=&onCloseClientD&&fIsCurrentlyAwaitingData&=&True;&&doGetNextFrame();}另外,MPEGVideoStreamFramer还会通过其它方式传递fTo给MPEGVideoStreamParser:MPEGVideoStreamFramer::doGetNextFrame(){& fParser-&registerReadInterest(fTo,&fMaxSize);& continueReadProcessing();}void MPEGVideoStreamParser::registerReadInterest(unsigned char* to,unsigned maxSize) {& fStartOfFrame = fTo = fSavedTo = & fLimit = to + maxS& fNumTruncatedBytes = fSavedNumTruncatedBytes = 0;}原因是MPEGVideoStreamParser(StreamParser)不是FrameSource,所以只能提供另外的API来获取fTo当Sink需要数据时,会调用Source的getNextFrame方法,同时把自身的buffer通过参数(to)传递给Source,保存在Source的fTo中。3、对于fSourceBoolean&MediaSink::startPlaying(MediaSource&&source,&&&&&&&&&&&&&&&&afterPlayingFunc*&afterFunc,&&&&&&&&&&&&&&&&void*&afterClientData)&{&&//&Make&sure&we're&not&already&being&played:&&if&(fSource&!=&NULL)&{&&&&envir().setResultMsg("This&sink&is&already&being&played");&&&&return&False;&&}&&//&Make&sure&our&source&is&compatible:&&if&(!sourceIsCompatibleWithUs(source))&{&&&&envir().setResultMsg("MediaSink::startPlaying():&source&is&not&compatible!");&&&&return&False;&&}&&fSource&=&(FramedSource*)&&&&fAfterFunc&=&afterF&&fAfterClientData&=&afterClientD&&return&continuePlaying();}Sink的fSource,在startPlaying中被设置为createNewStreamSource返回的H264VideoStreamFramer。随后在continuePlaying(H264or5VideoRTPSink实现)方法中被修改为H264or5Fragmenter,同时通过reassignInputSource函数记录H264VideoStreamFramer为fInputSource,这样就把fSource和fInputSource区分出来。Boolean&H264or5VideoRTPSink::continuePlaying()&{&&//&First,&check&whether&we&have&a&'fragmenter'&class&set&up&yet.&&//&If&not,&create&it&now:&&if&(fOurFragmenter&==&NULL)&{&&&&fOurFragmenter&=&new&H264or5Fragmenter(fHNumber,&envir(),&fSource,&OutPacketBuffer::maxSize,&ourMaxPacketSize()&-&12/*RTP&hdr&size*/);&&}&else&{&&&&fOurFragmenter-&reassignInputSource(fSource);&&&}&&fSource&=&fOurF&&&//&Then&call&the&parent&class's&implementation:&&return&MultiFramedRTPSink::continuePlaying();}class FramedFilter: public FramedSource {public:& FramedSource* inputSource() const { return fInputS }& void reassignInputSource(FramedSource* newInputSource) { fInputSource = newInputS }& // Call before destruction if you want to prevent the destructor from closing the input source& void detachInputSource();protected:& FramedFilter(UsageEnvironment& env, FramedSource* inputSource);&&&& // abstract base class& virtual ~FramedFilter();protected:& // Redefined virtual functions (with default 'null' implementations):& virtual char const* MIMEtype()& virtual void getAttributes()& virtual void doStopGettingFrames();protected:& FramedSource* fInputS //输入文件对应的Source};为什么要这么做?因为RTPSink需要的数据,需要在H264VideoStreamFramer输出数据的基础上,通过H264or5Fragmenter封装成RTP包,所以多出了H264or5Fragmenter这个东西(类似于前面的H264or5VideoStreamParser)。4、对于fInputSourceclass FramedFilter下定义,被H264or5VideoStreamFramer和H264or5Fragmenter所继承。可是,fInputSource在这两个类下,指向的内容是不同的。1)H264or5VideoStreamFramer下指向ByteStreamFileSource,具体见以下代码:FramedSource*&H264VideoFileServerMediaSubsession::createNewStreamSource(unsigned&/*clientSessionId*/,&unsigned&&estBitrate)&{&&estBitrate&=&500;&//&kbps,&estimate&&//&Create&the&video&source:&&ByteStreamFileSource*&fileSource&=&ByteStreamFileSource::createNew(envir(),&fFileName);&&if&(fileSource&==&NULL)&return&NULL;&&fFileSize&=&fileSource-&fileSize();&&//&Create&a&framer&for&the&Video&Elementary&Stream:&&return&H264VideoStreamFramer::createNew(envir(),&fileSource);&//把ByteStreamFileSource记录到H264VideoStreamFramer的fInputSource}H264or5VideoStreamFramer::H264or5VideoStreamFramer(int&hNumber,&UsageEnvironment&&env,&FramedSource*&inputSource,&&&&&&&&&&&&&&&Boolean&createParser,&Boolean&includeStartCodeInOutput)&&:&MPEGVideoStreamFramer(env,&inputSource),&&&&fHNumber(hNumber),&&&&fLastSeenVPS(NULL),&fLastSeenVPSSize(0),&&&&fLastSeenSPS(NULL),&fLastSeenSPSSize(0),&&&&fLastSeenPPS(NULL),&fLastSeenPPSSize(0)&{.}MPEGVideoStreamFramer::MPEGVideoStreamFramer(UsageEnvironment&&env,&&&&&&&&&&&&&&&&&&&&&&&&&FramedSource*&inputSource)&&:&FramedFilter(env,&inputSource),&&&&fFrameRate(0.0)&/*&until&we&learn&otherwise&*/,&&&&fParser(NULL)&{&&reset();}2)H264or5Fragmenter下指向H264VideoStreamFramer,具体见以下代码:Boolean&H264or5VideoRTPSink::continuePlaying()&{&&//&First,&check&whether&we&have&a&'fragmenter'&class&set&up&yet.&&//&If&not,&create&it&now:&&if&(fOurFragmenter&==&NULL)&{&&&&fOurFragmenter&=&new&H264or5Fragmenter(fHNumber,&envir(),&fSource,&OutPacketBuffer::maxSize,&&&&//OutPacketBuffer::maxSize决定了fInputBufferSize.&sihid&&&&&&&&&&&&&&&&&&&&&&&ourMaxPacketSize()&-&12/*RTP&hdr&size*/);&&}&else&{&&&&fOurFragmenter-&reassignInputSource(fSource);&//把fSource(对应H264VideoStreamFramer)保存到fOurFragmenter(对应H264or5Fragmenter)的fInputSource&&}&&fSource&=&fOurF &&//&Then&call&the&parent&class's&implementation:&&return&MultiFramedRTPSink::continuePlaying();}
阅读排行榜
评论排行榜

我要回帖

更多关于 sink与source输入区别 的文章

 

随机推荐