问题描述
- live555音视频同时播放问题
-
我用live555搭了一个rtsp服务端,开了两个队列,一个存放已经编码的H264数据,一个存放已编码的AAC数据。视频帧率15,音频采样率8000。开两个OnDemandMediaSubsession分别处理音频和视频,然后再把他们加到ServerMediaSession。继承Framesource,重写doGetNextFrame,从队列取数据。问题来了,我这样做,只能流畅播放2分钟左右,之后,视频流就会丢帧很严重,画面卡死。然而,我单独播放视频流,或者音频流都是没有问题的。我用的是VLC播放器。各路大神,能指教指教么?AACFrameSource():
void IPCAACAudioFramedSource::doGetNextFrame() {
unsigned int frame_length;memset(data, 0, AUDIO_BUFFER_SIZE);
memset(ptv, 0, sizeof(ptv));fGetAACFrameFunc(data, &frame_length, ptv); //从队列取AAC 数据
//memcpy(fTo, adts_header, 7);
memcpy (fTo, data, frame_length);
fFrameSize = frame_length;
// Next, read the raw frame data into the buffer provided:
if (fFrameSize> fMaxSize) {
fNumTruncatedBytes = fFrameSize - fMaxSize;
fFrameSize = fMaxSize;
}
else
{
fNumTruncatedBytes = 0;
}// Set the 'presentation time':
if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {
// This is the first frame, so use the current time:
gettimeofday(&fPresentationTime, NULL);
} else {
// Increment by the play time of the previous frame:// fuSecsPerFrame
// = (1024/*samples-per-frame*/*1000000) / fSamplingFrequency/*samples-per-second*/;unsigned uSeconds = fPresentationTime.tv_usec + fuSecsPerFrame;
fPresentationTime.tv_sec += uSeconds/1000000;
fPresentationTime.tv_usec = uSeconds%1000000;}
//gettimeofday(&fPresentationTime, NULL);
fDurationInMicroseconds = fuSecsPerFrame;
// Switch to another task, and inform the reader that he has data:
nextTask() = envir().taskScheduler().scheduleDelayedTask(0,
(TaskFunc*)FramedSource::afterGetting, this);
}H264FrameSource():
void IPCH264FramedSource::doGetNextFrame() {fFrameSize = 0; unsigned int len = 0; memset(frameData, 0, BUFFER_SIZE); if(m_getframefunc) { //从视频数据队列取数据 m_getframefunc(frameData, &len, m_nNeedIFrameCount, ptv, findex); } else { printf("not have getframefunc!!! "); return; } fFrameSize = len - 4; //使用H264VideoStreamDiscreteFramer,故没有前4个字节头 if (fFrameSize > fMaxSize) { fNumTruncatedBytes = fFrameSize - fMaxSize; fFrameSize = fMaxSize; } else { fNumTruncatedBytes = 0; } memcpy(fTo,&frameData[4],fFrameSize); gettimeofday(&fPresentationTime, NULL); fDurationInMicroseconds = 0; nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this);
}
unsigned int AMASSIPCH264FramedSource::maxFrameSize() const
{
return 1024*200;
}