Hi Ross, - I am still not able to resolve the issue.
- I would like to elaborate my issue. Basically I want to stream frames I grab from a opengl windows and I get these frames in encoded h264 format - The following is my code for the thread that starts RTSP streaming. int RTSPStreamer::Main() { TaskScheduler* taskSchedular = BasicTaskScheduler::createNew(); BasicUsageEnvironment* usageEnvironment = BasicUsageEnvironment::createNew(*taskSchedular); RTSPServer* rtspServer = RTSPServer::createNew(*usageEnvironment, 8554, NULL); if(rtspServer == NULL) { *usageEnvironment << "Failed to create rtsp server ::" << usageEnvironment->getResultMsg() <<"\n"; exit(1); } std::string streamName = "usb1.mkv"; ServerMediaSession* sms = ServerMediaSession::createNew(*usageEnvironment, streamName.c_str(), streamName.c_str(), "Live H264 Stream"); H264LiveServerMediaSession *liveSubSession = H264LiveServerMediaSession::createNew(*usageEnvironment, true, rtspStreamQueue); sms->addSubsession(liveSubSession); rtspServer->addServerMediaSession(sms); char* url = rtspServer->rtspURL(sms); *usageEnvironment << "Play the stream using url "<<url << "\n"; delete[] url; taskSchedular->doEventLoop(); return 0; } - I have written a custom class H264LiveServerMediaSession which get frames from rtspStreamQueue. - In H264LiveServerMediaSession the function createNewStreamSource() creates an instance of LiveSourceWithx264 which is basically responsible for getting the frame from rtspStreamQueue. - In the function deliverFrame() > fMaxSize will start at 15000 - In the next call to deliverFrame() à fMaxSize will be 15000-size of frame in previous call. - I think this should now happen and . - Can you suggest me what I did wrong. I have attached the files H264LiveServerMediaSession and LiveSourceWithx264. - I am new to live555 and please help me out. Thanks and Regards, Vikram Singh. From: live-devel-boun...@ns.live555.com [mailto:live-devel-boun...@ns.live555.com] On Behalf Of Ross Finlayson Sent: Tuesday, April 15, 2014 8:26 PM To: LIVE555 Streaming Media - development & use Subject: Re: [Live-devel] Frames are corrupted But if fFrameSize is greater than fMaxSize then I have to truncate the data. I think this is what that is causing the corrupted frames. Yes, because if a frame has to be truncated, then the truncated data will be lost (i.e., not sent to the client). Is there any way to get around this problem. Yes, there are two possible solutions. 1/ The best solution is to not send such large NAL units. Reconfigure your encoder to break up 'key frames' into multiple (therefore much smaller) 'slice' NAL units. 2/ Alternatively (though not as good), you can increase the size of the server's output buffer. Try adding the following line to your application - at the start of "main()": OutPacketBuffer::maxSize = 100000; and recompile. If that doesn't work, try increasing to 150000, 200000, etc., depending on the size of your frames. It's important to understand, though, that this is a bad solution. See: http://lists.live555.com/pipermail/live-devel/2013-April/016805.html 1/ is a *much* better solution - i.e., decrease the size of the NAL units that you're streaming. Ross Finlayson Live Networks, Inc. http://www.live555.com/
#include "LiveSourceWithx264.h" LiveSourceWithx264* LiveSourceWithx264::createNew(UsageEnvironment& env, common::ShmQueue *rtspStreamQueue) { return new LiveSourceWithx264(env, rtspStreamQueue); } EventTriggerId LiveSourceWithx264::eventTriggerId = 0; unsigned LiveSourceWithx264::referenceCount = 0; LiveSourceWithx264::LiveSourceWithx264(UsageEnvironment& env, common::ShmQueue *rtspStreamQueue) :FramedSource(env), rtspStreamQueue(rtspStreamQueue) { //if(referenceCount == 0) //{ //} ++referenceCount; if(eventTriggerId == 0) { eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0); } } LiveSourceWithx264::~LiveSourceWithx264(void) { --referenceCount; envir().taskScheduler().deleteEventTrigger(eventTriggerId); eventTriggerId = 0; } void LiveSourceWithx264::deliverFrame0(void* clientData) { ((LiveSourceWithx264*)clientData)->deliverFrame(); } void LiveSourceWithx264::doGetNextFrame() { gettimeofday(¤tTime,NULL); deliverFrame(); } void LiveSourceWithx264::deliverFrame() { if(!isCurrentlyAwaitingData()) return; unsigned char* storage; storage = ( unsigned char * )rtspStreamQueue->getBuf( fFrameSize, false ); if ( fFrameSize > fMaxSize ) { fNumTruncatedBytes = fFrameSize - fMaxSize; fFrameSize = fMaxSize; } //printf("%d\n",fMaxSize ); fPresentationTime = currentTime; memmove(fTo, storage, fFrameSize); rtspStreamQueue->releaseBuf(); FramedSource::afterGetting(this); }
#include "H264LiveServerMediaSession.h" H264LiveServerMediaSession* H264LiveServerMediaSession::createNew(UsageEnvironment& env, bool reuseFirstSource, common::ShmQueue *rtspStreamQueue) { return new H264LiveServerMediaSession(env, reuseFirstSource, rtspStreamQueue); } H264LiveServerMediaSession::H264LiveServerMediaSession(UsageEnvironment& env, bool reuseFirstSource, common::ShmQueue *rtspStreamQueue) :OnDemandServerMediaSubsession(env,reuseFirstSource), fAuxSDPLine(NULL), fDoneFlag(0), fDummySink(NULL), rtspStreamQueue(rtspStreamQueue) { } H264LiveServerMediaSession::~H264LiveServerMediaSession(void) { delete[] fAuxSDPLine; } static void afterPlayingDummy(void* clientData) { H264LiveServerMediaSession *session = (H264LiveServerMediaSession*)clientData; session->afterPlayingDummy1(); } void H264LiveServerMediaSession::afterPlayingDummy1() { envir().taskScheduler().unscheduleDelayedTask(nextTask()); setDoneFlag(); } static void checkForAuxSDPLine(void* clientData) { H264LiveServerMediaSession* session = (H264LiveServerMediaSession*)clientData; session->checkForAuxSDPLine1(); } void H264LiveServerMediaSession::checkForAuxSDPLine1() { char const* dasl; if(fAuxSDPLine != NULL) { setDoneFlag(); } else if(fDummySink != NULL && (dasl = fDummySink->auxSDPLine()) != NULL) { fAuxSDPLine = strDup(dasl); fDummySink = NULL; setDoneFlag(); } else { int uSecsDelay = 100000; nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsDelay, (TaskFunc*)checkForAuxSDPLine, this); } } char const* H264LiveServerMediaSession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) { if(fAuxSDPLine != NULL) return fAuxSDPLine; if(fDummySink == NULL) { fDummySink = rtpSink; fDummySink->startPlaying(*inputSource, afterPlayingDummy, this); checkForAuxSDPLine(this); } envir().taskScheduler().doEventLoop(&fDoneFlag); return fAuxSDPLine; } FramedSource* H264LiveServerMediaSession::createNewStreamSource(unsigned clientSessionID, unsigned& estBitRate) { estBitRate = 90000; LiveSourceWithx264 *source = LiveSourceWithx264::createNew(envir(), rtspStreamQueue); return H264VideoStreamFramer::createNew(envir(),source); } RTPSink* H264LiveServerMediaSession::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource) { return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic); }
_______________________________________________ live-devel mailing list live-devel@lists.live555.com http://lists.live555.com/mailman/listinfo/live-devel