文如其名,最近在做的項(xiàng)目要求利用RTSP協(xié)議轉(zhuǎn)發(fā)處理完的H264視頻數(shù)據(jù)給上一層客戶端,環(huán)境是Windows的VS2013,于是就各種百度谷歌找代碼。結(jié)果在得到利用live555去做比較簡單的結(jié)論的同時也悲情地發(fā)現(xiàn),網(wǎng)上別人貼出來的代碼基本都是Linux上面的。在修改了兩份來適用于Windows無效后,又一次陷入了百度谷歌的無盡搜索中。Anyway,最后終于解決了,所以貼出代碼跟大家分享下,希望能給和我需求相似的童鞋一點(diǎn)啟發(fā),也希望有高手指正其中的問題。
用live555進(jìn)行RTSP的播放基本上是通過修改其給出來的播放本地文件的DEMO來實(shí)現(xiàn)的。但由于其DEMO封裝的比較深,所以要直接修改他的fread處的代碼變成內(nèi)存拷貝來實(shí)現(xiàn)實(shí)時傳輸會顯得比較別扭。本文參考了網(wǎng)上的一些代碼,自定義了一個繼承自H264VideoFileServerMediaSubsession的類來來進(jìn)行處理,同時定義了一個繼承自FramedSource的類來做內(nèi)存的拷貝操作,該類亦是區(qū)別于讀本地文件和實(shí)時流之緊要處。
代碼如下,如果覺得需要或者懶得自己搭建live555的環(huán)境亦可以在文中最后的鏈接中下載該工程(環(huán)境為VS2013),如果你的VS版本合適即可直接運(yùn)行。
主文件(程序入口)
#include "H264LiveVideoServerMediaSubssion.hh"
#include "H264FramedLiveSource.hh"
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#define BUFSIZE 1024*200
static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,char const* streamName)//顯示RTSP連接信息
{
char* url = rtspServer->rtspURL(sms);
UsageEnvironment env = rtspServer->envir();
env streamName "\n";
env "Play this stream using the URL \&;" url "\&;\n";
delete[] url;
}
int main(int argc, char** argv)
{
//設(shè)置環(huán)境
UsageEnvironment* env;
Boolean reuseFirstSource = False;//如果為“true”則其他接入的客戶端跟第一個客戶端看到一樣的視頻流,否則其他客戶端接入的時候?qū)⒅匦虏シ?br /> TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);
//創(chuàng)建RTSP服務(wù)器
UserAuthenticationDatabase* authDB = NULL;
RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);
if (rtspServer == NULL) {
*env "Failed to create RTSP server: " env->getResultMsg() "\n";
exit(1);
}
char const* descriptionString= "Session streamed by \&;testOnDemandRTSPServer\&;";
//模擬實(shí)時流發(fā)送相關(guān)變量
int datasize;//數(shù)據(jù)區(qū)長度
unsigned char* databuf;//數(shù)據(jù)區(qū)指針
databuf = (unsigned char*)malloc(1024*1024);
bool dosent;//rtsp發(fā)送標(biāo)志位,為true則發(fā)送,否則退出
//從文件中拷貝1M數(shù)據(jù)到內(nèi)存中作為實(shí)時網(wǎng)絡(luò)傳輸內(nèi)存模擬,如果實(shí)時網(wǎng)絡(luò)傳輸應(yīng)該是雙線程結(jié)構(gòu),記得在這里加上線程鎖
//此外實(shí)時傳輸?shù)臄?shù)據(jù)拷貝應(yīng)該是發(fā)生在H264FramedLiveSource文件中,所以這里只是自上往下的傳指針過去給它
FILE *pf;
fopen_s(pf, "test.264", "rb");
fread(databuf, 1, BUFSIZE, pf);
datasize = BUFSIZE;
dosent = true;
fclose(pf);
//上面的部分除了模擬網(wǎng)絡(luò)傳輸?shù)牟糠滞馄渌幕靖鷏ive555提供的demo一樣,而下面則修改為網(wǎng)絡(luò)傳輸?shù)男问?,為此重寫addSubsession的第一個參數(shù)相關(guān)文件
char const* streamName = "h264ESVideoTest";
ServerMediaSession* sms = ServerMediaSession::createNew(*env, streamName, streamName,descriptionString);
sms->addSubsession(H264LiveVideoServerMediaSubssion::createNew(*env, reuseFirstSource, datasize, databuf,dosent));//修改為自己實(shí)現(xiàn)的H264LiveVideoServerMediaSubssion
rtspServer->addServerMediaSession(sms);
announceStream(rtspServer, sms, streamName);//提示用戶輸入連接信息
env->taskScheduler().doEventLoop(); //循環(huán)等待連接
free(databuf);//釋放掉內(nèi)存
return 0;
}
自定義H264VideoFileServerMediaSubsession類
H264VideoFileServerMediaSubsession.hh
#ifndef _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
#define _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
#include "H264VideoFileServerMediaSubsession.hh"
class H264LiveVideoServerMediaSubssion : public H264VideoFileServerMediaSubsession {
public:
static H264LiveVideoServerMediaSubssion* createNew(UsageEnvironment env, Boolean reuseFirstSource, int *datasize, unsigned char* databuf, bool *dosent);
protected: // we're a virtual base class
H264LiveVideoServerMediaSubssion(UsageEnvironment env, Boolean reuseFirstSource, int *datasize, unsigned char* databuf, bool *dosent);
~H264LiveVideoServerMediaSubssion();
protected: // redefined virtual functions
FramedSource* createNewStreamSource(unsigned clientSessionId,unsigned estBitrate);
public:
char fFileName[100];
int *Server_datasize;//數(shù)據(jù)區(qū)大小指針
unsigned char* Server_databuf;//數(shù)據(jù)區(qū)指針
bool *Server_dosent;//發(fā)送標(biāo)示
};
#endifH264VideoFileServerMediaSubsession.cpp
#include "H264LiveVideoServerMediaSubssion.hh"
#include "H264FramedLiveSource.hh"
#include "H264VideoStreamFramer.hh"
H264LiveVideoServerMediaSubssion* H264LiveVideoServerMediaSubssion::createNew(UsageEnvironment env, Boolean reuseFirstSource, int *datasize, unsigned char* databuf, bool *dosent)
{
return new H264LiveVideoServerMediaSubssion(env, reuseFirstSource, datasize, databuf, dosent);
}
H264LiveVideoServerMediaSubssion::H264LiveVideoServerMediaSubssion(UsageEnvironment env, Boolean reuseFirstSource, int *datasize, unsigned char* databuf, bool *dosent)
: H264VideoFileServerMediaSubsession(env, fFileName, reuseFirstSource)//H264VideoFileServerMediaSubsession不是我們需要修改的文件,
//但是我們又要用它來初始化我們的函數(shù),
//所以給個空數(shù)組進(jìn)去即可
{
Server_datasize = datasize;//數(shù)據(jù)區(qū)大小指針
Server_databuf = databuf;//數(shù)據(jù)區(qū)指針
Server_dosent = dosent;//發(fā)送標(biāo)示
}
H264LiveVideoServerMediaSubssion::~H264LiveVideoServerMediaSubssion()
{
}
FramedSource* H264LiveVideoServerMediaSubssion::createNewStreamSource(unsigned clientSessionId, unsigned estBitrate)
{
/* Remain to do : assign estBitrate */
estBitrate = 1000; // kbps, estimate
//創(chuàng)建視頻源
H264FramedLiveSource* liveSource = H264FramedLiveSource::createNew(envir(), Server_datasize, Server_databuf, Server_dosent);
if (liveSource == NULL)
{
return NULL;
}
// Create a framer for the Video Elementary Stream:
return H264VideoStreamFramer::createNew(envir(), liveSource);
}
自定義H264FramedLiveSource類
H264FramedLiveSource.hh
#ifndef _H264FRAMEDLIVESOURCE_HH
#define _H264FRAMEDLIVESOURCE_HH
#include FramedSource.hh>
class H264FramedLiveSource : public FramedSource
{
public:
static H264FramedLiveSource* createNew(UsageEnvironment env, int *datasize, unsigned char* databuf, bool *dosent, unsigned preferredFrameSize = 0, unsigned playTimePerFrame = 0);
protected:
H264FramedLiveSource(UsageEnvironment env, int *datasize, unsigned char* databuf, bool *dosent, unsigned preferredFrameSize, unsigned playTimePerFrame);
~H264FramedLiveSource();
private:
virtual void doGetNextFrame();
int TransportData(unsigned char* to, unsigned maxSize);
protected:
int *Framed_datasize;//數(shù)據(jù)區(qū)大小指針
unsigned char *Framed_databuf;//數(shù)據(jù)區(qū)指針
bool *Framed_dosent;//發(fā)送標(biāo)示
int readbufsize;//記錄已讀取數(shù)據(jù)區(qū)大小
int bufsizel;//記錄數(shù)據(jù)區(qū)大小
};
#endifH264FramedLiveSource.cpp
#include "H264FramedLiveSource.hh"
H264FramedLiveSource::H264FramedLiveSource(UsageEnvironment env, int *datasize, unsigned char* databuf, bool *dosent, unsigned preferredFrameSize, unsigned playTimePerFrame)
: FramedSource(env)
{
Framed_datasize = datasize;//數(shù)據(jù)區(qū)大小指針
Framed_databuf = databuf;//數(shù)據(jù)區(qū)指針
Framed_dosent = dosent;//發(fā)送標(biāo)示
}
H264FramedLiveSource* H264FramedLiveSource::createNew(UsageEnvironment env, int *datasize, unsigned char* databuf, bool *dosent, unsigned preferredFrameSize, unsigned playTimePerFrame)
{
H264FramedLiveSource* newSource = new H264FramedLiveSource(env, datasize, databuf, dosent, preferredFrameSize, playTimePerFrame);
return newSource;
}
H264FramedLiveSource::~H264FramedLiveSource()
{
}
void H264FramedLiveSource::doGetNextFrame()
{
if (*Framed_dosent == true)
{
*Framed_dosent = false;
bufsizel = *Framed_datasize;
readbufsize = 0;
fFrameSize = fMaxSize;
memcpy(fTo, Framed_databuf + readbufsize, fFrameSize);
readbufsize += fFrameSize;
}
else
{
if (bufsizel - readbufsize>fMaxSize)
{
fFrameSize = fMaxSize;
memcpy(fTo, Framed_databuf + readbufsize, fFrameSize);
readbufsize += fFrameSize;
}
else
{
memcpy(fTo, Framed_databuf + readbufsize, bufsizel - readbufsize);
*Framed_dosent = true;
}
}
nextTask() = envir().taskScheduler().scheduleDelayedTask(0,(TaskFunc*)FramedSource::afterGetting, this);//表示延遲0秒后再執(zhí)行 afterGetting 函數(shù)
return;
}