Dear Live555 Team,

I am using Live555 library to create simple rtsp server to stream several mpeg 
video feeds. For this purpose I've implemented subclasses of :

1.       Framed source

2.       OnDemandServerMediaSubsession

In my OnDemandServerMediaSubsession subclass I have implemented both required 
function createNewStreamSource() (returns MPEG1or2VideoStreamDiscreteFramer*) 
and createNewRTPSink() (returns MPEG1or2VideoRTPSink*). Frame source threads 
communicate with the library only by calling 'event triggers'( as suggested in 
FAQ).

Therefore for each video feed I am creating "ServerMediaSession", and ideally 
several clients that joined different sessions ("one client one session") 
should receive a video data concurrently. However, although one client(I used 
VLC player)  can join any single media session and receive video data, two 
clients (VLC players) can't receive data from two separate video feeds 
simultaneously. A client that joined session earlier is being disconnected all 
the time. Also two or more clients can join one session and receive video data 
simultaneously. I attached code my media session:

//// Subclass of OnDemandServerMediaSubsession///
AnalysingServerMediaSubsession* 
AnalysingServerMediaSubsession::createNew(UsageEnvironment& env,FFMPEG 
*Encoder, unsigned estimatedBitrate,
       Boolean iFramesOnly,
       double vshPeriod) {
              return new AnalysingServerMediaSubsession(env, Encoder, 
estimatedBitrate,
                     iFramesOnly, vshPeriod);
}

AnalysingServerMediaSubsession
       ::AnalysingServerMediaSubsession(UsageEnvironment& env, FFMPEG *Encoder, 
unsigned estimatedBitrate, Boolean iFramesOnly, double vshPeriod)
       : OnDemandServerMediaSubsession(env, True),

       m_Encoder(Encoder), fIFramesOnly(iFramesOnly), fVSHPeriod(vshPeriod) {
              fEstimatedKbps = (estimatedBitrate + 500)/1000;
}

AnalysingServerMediaSubsession
       ::~AnalysingServerMediaSubsession() {
}

FramedSource* AnalysingServerMediaSubsession ::createNewStreamSource(unsigned 
/*clientSessionId*/, unsigned& estBitrate) {
       estBitrate = fEstimatedKbps;

       AnalyserSource* source = AnalyserSource::createNew(envir(),m_Encoder);

       return 
MPEG1or2VideoStreamDiscreteFramer::createNew(envir(),source,fIFramesOnly);
}

RTPSink* AnalysingServerMediaSubsession ::createNewRTPSink(Groupsock* 
rtpGroupsock, unsigned char /*rtpPayloadTypeIfDynamic*/, FramedSource* 
/*inputSource*/) {

       return MPEG1or2VideoRTPSink::createNew(envir(), rtpGroupsock);
}
and video source:

/////  Video source ///
AnalyserSource* AnalyserSource::createNew(UsageEnvironment& env, FFMPEG * 
E_Source) {
       return new AnalyserSource(env, E_Source);
}

EventTriggerId AnalyserSource::eventTriggerId = 0;

unsigned AnalyserSource::referenceCount = 0;

AnalyserSource::AnalyserSource(UsageEnvironment& env, FFMPEG * E_Source) : 
FramedSource(env), Encoding_Source(E_Source) {
       if (referenceCount == 0) {
       }
       ++referenceCount;

       Last_Sent_Frame_ID = 0;

       Encoding_Source->RegisterRTSP_Source(&(env.taskScheduler()), this);

       if (eventTriggerId == 0) {
              eventTriggerId = 
envir().taskScheduler().createEventTrigger(deliverFrame0);
       }
}

AnalyserSource::~AnalyserSource() {

       Encoding_Source->Un_RegisterRTSP_Source(this);
       --referenceCount;
       if (referenceCount == 0) {

              envir().taskScheduler().deleteEventTrigger(eventTriggerId);
              eventTriggerId = 0;
       }
}

unsigned AnalyserSource::GetRefCount() {
       return referenceCount;
}

void AnalyserSource::doGetNextFrame() {
       unsigned int FrameID = Encoding_Source->GetFrameID();
       if (FrameID == 0){
              handleClosure(this);
              return;
       }

       if (Last_Sent_Frame_ID != FrameID){
              deliverFrame();
       }
}

void AnalyserSource::deliverFrame0(void* clientData) {
       ((AnalyserSource*)clientData)->deliverFrame();
}

void AnalyserSource::deliverFrame() {

       if (!isCurrentlyAwaitingData()){return }

       static u_int8_t* newFrameDataStart=NULL;
       static unsigned newFrameSize = 0;
       /* get the data frame from the Encoding thread.. */
       if (Encoding_Source->GetFrame(&newFrameDataStart, &newFrameSize, 
&Last_Sent_Frame_ID)){

              if (newFrameDataStart!=NULL) {
                     /* This should never happen, but check anyway.. */
                     if (newFrameSize > fMaxSize) {
                           fFrameSize = fMaxSize;
                           fNumTruncatedBytes = newFrameSize - fMaxSize;
                     } else {
                           fFrameSize = newFrameSize;
                     }
                     gettimeofday(&fPresentationTime, NULL);
                     memmove(fTo, newFrameDataStart, fFrameSize);
                     Encoding_Source->ReleaseFrame();
              }
              else {
                     fFrameSize=0;
                     fTo=NULL;
                     handleClosure(this);
              }
       }
       else {
              handleClosure(this);
       }
       FramedSource::afterGetting(this);
}

Would you please help me here: Have I miss something in my implementation or am 
I doing something
wrong completely wrong.

--
Best Regards,

Konstantin Vikhorev

Dr Konstantin Vikhorev
The Virtual Engineering Centre<http://www.virtualengineeringcentre.com/>
University of Liverpool<http://www.liv.ac.uk/>
Office: A41
STFC Daresbury Laboratory
Daresbury Science and Innovation Campus
Warrington, WA4 4AD
United Kingdom

_______________________________________________
live-devel mailing list
live-devel@lists.live555.com
http://lists.live555.com/mailman/listinfo/live-devel

Reply via email to