On Tue, Aug 23, 2016 at 05:14:04PM +0200, Ross Finlayson wrote: > Furthermore, once a ‘scheduled task’ has occurred, its ‘TaskToken’ is no > longer valid, and should not be used again. Therefore, you MUST NOT call > “unscheduleDelayedTask()” (or “rescheduleDelayedTask()”) on a ‘TaskToken’ > after its scheduled task has occurred.
I am attaching a patch[1] that makes libliveMedia follow this rule. Its main work is carefully placing NULL assignments to TaskTokens to avoid unscheduling expired tokens. In that process the API of FramedSource is broken, because the sematics of FramedSource::afterGetting are undefined. The static method is replaced with two non-static methods afterGetting and scheduleAfterGetting. Users of the static afterGetting must migrate to either of the non-static replacements. Do you see a way of fixing this without breaking the API? Furthermore, I moved some methods to private that seemed to be public by accident. This also constitutes an API break, but not exposing them seemed saner to me. Feel free to revert this part of the patch. The patch does not explicitly break ABI, but due to changing the behaviour of the static FramedSource::afterGetting (which now clears fNextToken), it may fail to unschedule tasks that previously could be unscheduled. Since the previous behaviour was undefined, I don't consider this an ABI break. The patch makes my double frees go away, but some more review certainly is in order. Helmut [1] Consider it appropriately licensed LGPL-2+.
--- a/AC3AudioStreamFramer.cpp +++ b/AC3AudioStreamFramer.cpp @@ -175,7 +175,7 @@ // Call our own 'after getting' function. Because we're not a 'leaf' // source, we can call this directly, without risking infinite recursion. - afterGetting(this); + this->afterGetting(); } else { // We were unable to parse a complete frame from the input, because: // - we had to read more data from the source stream, or --- a/ADTSAudioFileSource.cpp +++ b/ADTSAudioFileSource.cpp @@ -166,6 +166,5 @@ fDurationInMicroseconds = fuSecsPerFrame; // Switch to another task, and inform the reader that he has data: - nextTask() = envir().taskScheduler().scheduleDelayedTask(0, - (TaskFunc*)FramedSource::afterGetting, this); + this->FramedSource::scheduleAfterGetting(); } --- a/AMRAudioFileSource.cpp +++ b/AMRAudioFileSource.cpp @@ -169,6 +169,5 @@ fDurationInMicroseconds = 20000; // each frame is 20 ms // Switch to another task, and inform the reader that he has data: - nextTask() = envir().taskScheduler().scheduleDelayedTask(0, - (TaskFunc*)FramedSource::afterGetting, this); + this->FramedSource::scheduleAfterGetting(); } --- a/AMRAudioRTPSource.cpp +++ b/AMRAudioRTPSource.cpp @@ -464,7 +464,7 @@ // Call our own 'after getting' function. Because we're not a 'leaf' // source, we can call this directly, without risking // infinite recursion - afterGetting(this); + this->afterGetting(); return; } --- a/BasicUDPSink.cpp +++ b/BasicUDPSink.cpp @@ -96,5 +96,6 @@ // The following is called after each delay between packet sends: void BasicUDPSink::sendNext(void* firstArg) { BasicUDPSink* sink = (BasicUDPSink*)firstArg; + sink->nextTask() = NULL; sink->continuePlaying1(); } --- a/BasicUDPSource.cpp +++ b/BasicUDPSource.cpp @@ -69,5 +69,5 @@ if (!fInputGS->handleRead(fTo, fMaxSize, fFrameSize, fromAddress)) return; // Tell our client that we have new data: - afterGetting(this); // we're preceded by a net read; no infinite recursion + this->afterGetting(); // we're preceded by a net read; no infinite recursion } --- a/ByteStreamFileSource.cpp +++ b/ByteStreamFileSource.cpp @@ -174,11 +174,10 @@ // Inform the reader that he has data: #ifdef READ_FROM_FILES_SYNCHRONOUSLY // To avoid possible infinite recursion, we need to return to the event loop to do this: - nextTask() = envir().taskScheduler().scheduleDelayedTask(0, - (TaskFunc*)FramedSource::afterGetting, this); + this->FramedSource::scheduleAfterGetting(); #else // Because the file read was done from the event loop, we can call the // 'after getting' function directly, without risk of infinite recursion: - FramedSource::afterGetting(this); + this->FramedSource::afterGetting(); #endif } --- a/ByteStreamMemoryBufferSource.cpp +++ b/ByteStreamMemoryBufferSource.cpp @@ -114,5 +114,5 @@ } // Inform the downstream object that it has data: - FramedSource::afterGetting(this); + this->FramedSource::afterGetting(); } --- a/ByteStreamMultiFileSource.cpp +++ b/ByteStreamMultiFileSource.cpp @@ -110,7 +110,7 @@ source->fNumTruncatedBytes = numTruncatedBytes; source->fPresentationTime = presentationTime; source->fDurationInMicroseconds = durationInMicroseconds; - FramedSource::afterGetting(source); + source->FramedSource::afterGetting(); } void ByteStreamMultiFileSource::onSourceClosure(void* clientData) { --- a/DVVideoStreamFramer.cpp +++ b/DVVideoStreamFramer.cpp @@ -105,7 +105,7 @@ // For simplicity, we require the downstream object's buffer to be >= this data's size: if (fMaxSize < DV_SAVED_INITIAL_BLOCKS_SIZE) { fNumTruncatedBytes = fMaxSize; - afterGetting(this); + this->afterGetting(); return; } @@ -211,7 +211,7 @@ fNextFramePresentationTime.tv_usec %= MILLION; } - afterGetting(this); + this->afterGetting(); } } else { // We read data into our special buffer; signal that it has arrived: --- a/DeviceSource.cpp +++ b/DeviceSource.cpp @@ -137,7 +137,7 @@ memmove(fTo, newFrameDataStart, fFrameSize); // After delivering the data, inform the reader that it is now available: - FramedSource::afterGetting(this); + this->FramedSource::afterGetting(); } --- a/FramedSource.cpp +++ b/FramedSource.cpp @@ -78,17 +78,26 @@ doGetNextFrame(); } -void FramedSource::afterGetting(FramedSource* source) { - source->fIsCurrentlyAwaitingData = False; +void FramedSource::afterGetting(void *source) { + ((FramedSource*)source)->nextTask() = NULL; + ((FramedSource*)source)->afterGetting(); +} + +void FramedSource::scheduleAfterGetting() { + nextTask() = this->envir().taskScheduler().scheduleDelayedTask(0, afterGetting, this); +} + +void FramedSource::afterGetting() { + this->fIsCurrentlyAwaitingData = False; // indicates that we can be read again // Note that this needs to be done here, in case the "fAfterFunc" // called below tries to read another frame (which it usually will) - if (source->fAfterGettingFunc != NULL) { - (*(source->fAfterGettingFunc))(source->fAfterGettingClientData, - source->fFrameSize, source->fNumTruncatedBytes, - source->fPresentationTime, - source->fDurationInMicroseconds); + if (this->fAfterGettingFunc != NULL) { + (*(this->fAfterGettingFunc))(this->fAfterGettingClientData, + this->fFrameSize, this->fNumTruncatedBytes, + this->fPresentationTime, + this->fDurationInMicroseconds); } } --- a/GenericMediaServer.cpp +++ b/GenericMediaServer.cpp @@ -299,6 +299,7 @@ } void GenericMediaServer::ClientSession::livenessTimeoutTask(ClientSession* clientSession) { + clientSession->fLivenessCheckTask = NULL; // If this gets called, the client session is assumed to have timed out, so delete it: #ifdef DEBUG char const* streamName --- a/H263plusVideoStreamFramer.cpp +++ b/H263plusVideoStreamFramer.cpp @@ -120,7 +120,7 @@ // Call our own 'after getting' function. Because we're not a 'leaf' // source, we can call this directly, without risking infinite recursion. - afterGetting(this); + this->afterGetting(); } else { // We were unable to parse a complete frame from the input, because: // - we had to read more data from the source stream, or --- a/H264VideoFileServerMediaSubsession.cpp +++ b/H264VideoFileServerMediaSubsession.cpp @@ -53,8 +53,9 @@ setDoneFlag(); } -static void checkForAuxSDPLine(void* clientData) { +void H264VideoFileServerMediaSubsession::checkForAuxSDPLine(void* clientData) { H264VideoFileServerMediaSubsession* subsess = (H264VideoFileServerMediaSubsession*)clientData; + subsess->nextTask() = NULL; subsess->checkForAuxSDPLine1(); } @@ -74,7 +75,7 @@ // try again after a brief delay: int uSecsToDelay = 100000; // 100 ms nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay, - (TaskFunc*)checkForAuxSDPLine, this); + checkForAuxSDPLine, this); } } @@ -91,7 +92,7 @@ fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this); // Check whether the sink's 'auxSDPLine()' is ready: - checkForAuxSDPLine(this); + this->checkForAuxSDPLine1(); } envir().taskScheduler().doEventLoop(&fDoneFlag); --- a/H264or5VideoRTPSink.cpp +++ b/H264or5VideoRTPSink.cpp @@ -260,7 +260,7 @@ } // Complete delivery to the client: - FramedSource::afterGetting(this); + this->FramedSource::afterGetting(); } } --- a/H264or5VideoStreamDiscreteFramer.cpp +++ b/H264or5VideoStreamDiscreteFramer.cpp @@ -86,7 +86,7 @@ fNumTruncatedBytes = numTruncatedBytes; fPresentationTime = presentationTime; fDurationInMicroseconds = durationInMicroseconds; - afterGetting(this); + this->afterGetting(); } Boolean H264or5VideoStreamDiscreteFramer::nalUnitEndsAccessUnit(u_int8_t nal_unit_type) { --- a/H265VideoFileServerMediaSubsession.cpp +++ b/H265VideoFileServerMediaSubsession.cpp @@ -53,8 +53,9 @@ setDoneFlag(); } -static void checkForAuxSDPLine(void* clientData) { +void H265VideoFileServerMediaSubsession::checkForAuxSDPLine(void* clientData) { H265VideoFileServerMediaSubsession* subsess = (H265VideoFileServerMediaSubsession*)clientData; + subsess->nextTask() = NULL; subsess->checkForAuxSDPLine1(); } @@ -74,7 +75,7 @@ // try again after a brief delay: int uSecsToDelay = 100000; // 100 ms nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay, - (TaskFunc*)checkForAuxSDPLine, this); + checkForAuxSDPLine, this); } } @@ -92,7 +93,7 @@ fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this); // Check whether the sink's 'auxSDPLine()' is ready: - checkForAuxSDPLine(this); + this->checkForAuxSDPLine1(); } envir().taskScheduler().doEventLoop(&fDoneFlag); --- a/MP3ADU.cpp +++ b/MP3ADU.cpp @@ -259,7 +259,7 @@ if (fFrameCounter++%fScale == 0) { // Call our own 'after getting' function. Because we're not a 'leaf' // source, we can call this directly, without risking infinite recursion. - afterGetting(this); + this->afterGetting(); } else { // Don't use this frame; get another one: doGetNextFrame(); @@ -343,7 +343,7 @@ // Call our own 'after getting' function. Because we're not a 'leaf' // source, we can call this directly, without risking infinite recursion. - afterGetting(this); + this->afterGetting(); } } --- a/MP3ADUTranscoder.cpp +++ b/MP3ADUTranscoder.cpp @@ -88,5 +88,5 @@ // Call our own 'after getting' function. Because we're not a 'leaf' // source, we can call this directly, without risking infinite recursion. - afterGetting(this); + this->afterGetting(); } --- a/MP3ADUinterleaving.cpp +++ b/MP3ADUinterleaving.cpp @@ -137,7 +137,7 @@ // Call our own 'after getting' function. Because we're not a 'leaf' // source, we can call this directly, without risking infinite recursion. - afterGetting(this); + this->afterGetting(); } else { fPositionOfNextIncomingFrame = fInterleaving.lookupInverseCycle(fII); unsigned char* dataPtr; @@ -238,7 +238,7 @@ // Call our own 'after getting' function. Because we're not a 'leaf' // source, we can call this directly, without risking infinite recursion. - afterGetting(this); + this->afterGetting(); } else { #ifdef TEST_LOSS NOTE: This code no longer works, because it uses synchronous reads, --- a/MP3FileSource.cpp +++ b/MP3FileSource.cpp @@ -122,10 +122,9 @@ // function directly. This avoids infinite recursion, as long as our sink // is discontinuous, which is the case for the RTP sink that liveCaster/lc // uses. ##### - afterGetting(this); + this->afterGetting(); #else - nextTask() = envir().taskScheduler().scheduleDelayedTask(0, - (TaskFunc*)afterGetting, this); + this->scheduleAfterGetting(); #endif } --- a/MPEG1or2AudioStreamFramer.cpp +++ b/MPEG1or2AudioStreamFramer.cpp @@ -149,7 +149,7 @@ // Call our own 'after getting' function. Because we're not a 'leaf' // source, we can call this directly, without risking infinite recursion. - afterGetting(this); + this->afterGetting(); } else { // We were unable to parse a complete frame from the input, because: // - we had to read more data from the source stream, or --- a/MPEG1or2DemuxedElementaryStream.cpp +++ b/MPEG1or2DemuxedElementaryStream.cpp @@ -84,5 +84,5 @@ fLastSeenSCR = fOurSourceDemux.lastSeenSCR(); fMPEGversion = fOurSourceDemux.mpegVersion(); - FramedSource::afterGetting(this); + this->FramedSource::afterGetting(); } --- a/MPEG1or2VideoStreamDiscreteFramer.cpp +++ b/MPEG1or2VideoStreamDiscreteFramer.cpp @@ -199,5 +199,5 @@ fNumTruncatedBytes = numTruncatedBytes; fPresentationTime = presentationTime; fDurationInMicroseconds = durationInMicroseconds; - afterGetting(this); + this->afterGetting(); } --- a/MPEG2IndexFromTransportStream.cpp +++ b/MPEG2IndexFromTransportStream.cpp @@ -416,7 +416,7 @@ delete head; // Complete delivery to the client: - afterGetting(this); + this->afterGetting(); return True; } --- a/MPEG2TransportStreamFramer.cpp +++ b/MPEG2TransportStreamFramer.cpp @@ -189,7 +189,7 @@ = numTSPackets * (unsigned)(fTSPacketDurationEstimate*1000000); // Complete the delivery to our client: - afterGetting(this); + this->afterGetting(); } Boolean MPEG2TransportStreamFramer::updateTSPacketDurationEstimate(unsigned char* pkt, double timeNow) { --- a/MPEG2TransportStreamMultiplexor.cpp +++ b/MPEG2TransportStreamMultiplexor.cpp @@ -83,9 +83,9 @@ if ((fOutgoingPacketCounter%10) == 0) { // To avoid excessive recursion (and stack overflow) caused by excessively large input frames, // occasionally return to the event loop to do this: - envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this); + this->FramedSource::scheduleAfterGetting(); } else { - afterGetting(this); + this->afterGetting(); } } --- a/MPEG2TransportStreamTrickModeFilter.cpp +++ b/MPEG2TransportStreamTrickModeFilter.cpp @@ -74,7 +74,7 @@ // already read, before asking for more data from us: if (fMaxSize < TRANSPORT_PACKET_SIZE) { fFrameSize = 0; - afterGetting(this); + this->afterGetting(); return; } @@ -208,7 +208,7 @@ = (unsigned long)((deliveryPCR - fPresentationTime.tv_sec)*1000000.0f); // fprintf(stderr, "#####DGNF9\n"); - afterGetting(this); + this->afterGetting(); } else { // Arrange to read the Transport Packet that we want: readTransportPacket(fDesiredTSPacketNum); --- a/MPEG4VideoFileServerMediaSubsession.cpp +++ b/MPEG4VideoFileServerMediaSubsession.cpp @@ -55,9 +55,10 @@ setDoneFlag(); } -static void checkForAuxSDPLine(void* clientData) { +void MPEG4VideoFileServerMediaSubsession::checkForAuxSDPLine(void* clientData) { MPEG4VideoFileServerMediaSubsession* subsess = (MPEG4VideoFileServerMediaSubsession*)clientData; + subsess->nextTask() = NULL; subsess->checkForAuxSDPLine1(); } @@ -77,7 +78,7 @@ // try again after a brief delay: int uSecsToDelay = 100000; // 100 ms nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay, - (TaskFunc*)checkForAuxSDPLine, this); + checkForAuxSDPLine, this); } } @@ -94,7 +95,7 @@ fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this); // Check whether the sink's 'auxSDPLine()' is ready: - checkForAuxSDPLine(this); + this->checkForAuxSDPLine1(); } envir().taskScheduler().doEventLoop(&fDoneFlag); --- a/MPEG4VideoStreamDiscreteFramer.cpp +++ b/MPEG4VideoStreamDiscreteFramer.cpp @@ -181,7 +181,7 @@ fNumTruncatedBytes = numTruncatedBytes; fPresentationTime = presentationTime; fDurationInMicroseconds = durationInMicroseconds; - afterGetting(this); + this->afterGetting(); } Boolean MPEG4VideoStreamDiscreteFramer::getNextFrameBit(u_int8_t& result) { --- a/MPEGVideoStreamFramer.cpp +++ b/MPEGVideoStreamFramer.cpp @@ -176,7 +176,7 @@ // Call our own 'after getting' function. Because we're not a 'leaf' // source, we can call this directly, without risking infinite recursion. - afterGetting(this); + this->afterGetting(); } else { // We were unable to parse a complete frame from the input, because: // - we had to read more data from the source stream, or --- a/MatroskaFileParser.cpp +++ b/MatroskaFileParser.cpp @@ -1128,7 +1128,7 @@ fprintf(stderr, " @%u.%06u (%.06f from start); duration %u us\n", demuxedTrack->presentationTime().tv_sec, demuxedTrack->presentationTime().tv_usec, demuxedTrack->presentationTime().tv_sec+demuxedTrack->presentationTime().tv_usec/1000000.0-fPresentationTimeOffset, demuxedTrack->durationInMicroseconds()); #endif setParseState(); - FramedSource::afterGetting(demuxedTrack); // completes delivery + demuxedTrack->FramedSource::afterGetting(); // completes delivery } else { // normal case fCurrentParseState = DELIVERING_FRAME_BYTES; setParseState(); @@ -1191,7 +1191,7 @@ } setParseState(); - FramedSource::afterGetting(demuxedTrack); // completes delivery + demuxedTrack->FramedSource::afterGetting(); // completes delivery return; } while (0); --- a/MultiFramedRTPSink.cpp +++ b/MultiFramedRTPSink.cpp @@ -418,6 +418,7 @@ // The following is called after each delay between packet sends: void MultiFramedRTPSink::sendNext(void* firstArg) { MultiFramedRTPSink* sink = (MultiFramedRTPSink*)firstArg; + sink->nextTask() = NULL; sink->buildAndSendPacket(False); } --- a/MultiFramedRTPSource.cpp +++ b/MultiFramedRTPSource.cpp @@ -200,11 +200,10 @@ // Common case optimization: There are no more queued incoming packets, so this code will not get // executed again without having first returned to the event loop. Call our 'after getting' function // directly, because there's no risk of a long chain of recursion (and thus stack overflow): - afterGetting(this); + this->afterGetting(); } else { // Special case: Call our 'after getting' function via the event loop. - nextTask() = envir().taskScheduler().scheduleDelayedTask(0, - (TaskFunc*)FramedSource::afterGetting, this); + this->FramedSource::scheduleAfterGetting(); } } else { // This packet contained fragmented data, and does not complete --- a/OggFileParser.cpp +++ b/OggFileParser.cpp @@ -955,7 +955,7 @@ fCurrentParseState = PARSING_AND_DELIVERING_PAGES; } - FramedSource::afterGetting(demuxedTrack); // completes delivery + demuxedTrack->FramedSource::afterGetting(); // completes delivery return True; } --- a/ProxyServerMediaSession.cpp +++ b/ProxyServerMediaSession.cpp @@ -66,6 +66,8 @@ ////////// ProxyServerMediaSession implementation ////////// +static void continueAfterDESCRIBE(RTSPClient *, int, char *); + UsageEnvironment& operator<<(UsageEnvironment& env, const ProxyServerMediaSession& psms) { // used for debugging return env << "ProxyServerMediaSession[" << psms.url() << "]"; } @@ -115,7 +117,8 @@ tunnelOverHTTPPortNum, verbosityLevel > 0 ? verbosityLevel-1 : verbosityLevel, socketNumToServer); - ProxyRTSPClient::sendDESCRIBE(fProxyRTSPClient); + if (fProxyRTSPClient != NULL) + fProxyRTSPClient->sendDescribeCommand(::continueAfterDESCRIBE, fProxyRTSPClient->auth()); } ProxyServerMediaSession::~ProxyServerMediaSession() { @@ -321,7 +324,7 @@ fOurServerMediaSession.resetDESCRIBEState(); setBaseURL(fOurURL); // because we'll be sending an initial "DESCRIBE" all over again - sendDESCRIBE(this); + this->sendDescribeCommand(::continueAfterDESCRIBE, this->auth()); return; } @@ -416,6 +419,7 @@ void ProxyRTSPClient::sendLivenessCommand(void* clientData) { ProxyRTSPClient* rtspClient = (ProxyRTSPClient*)clientData; + rtspClient->fLivenessCommandTask = NULL; // Note. By default, we do not send "GET_PARAMETER" as our 'liveness notification' command, even if the server previously // indicated (in its response to our earlier "OPTIONS" command) that it supported "GET_PARAMETER". This is because @@ -451,10 +455,12 @@ void ProxyRTSPClient::sendDESCRIBE(void* clientData) { ProxyRTSPClient* rtspClient = (ProxyRTSPClient*)clientData; - if (rtspClient != NULL) rtspClient->sendDescribeCommand(::continueAfterDESCRIBE, rtspClient->auth()); + rtspClient->fDESCRIBECommandTask = NULL; + rtspClient->sendDescribeCommand(::continueAfterDESCRIBE, rtspClient->auth()); } void ProxyRTSPClient::subsessionTimeout(void* clientData) { + ((ProxyRTSPClient*)clientData)->fSubsessionTimerTask = NULL; ((ProxyRTSPClient*)clientData)->handleSubsessionTimeout(); } @@ -898,7 +904,7 @@ if (fRTPSource->curPacketMarkerBit() && strcmp(fCodecName, "JPEG") == 0) ((SimpleRTPSink*)fRTPSink)->setMBitOnNextPacket(); // Complete delivery: - FramedSource::afterGetting(this); + this->FramedSource::afterGetting(); } void PresentationTimeSubsessionNormalizer::doGetNextFrame() { --- a/QCELPAudioRTPSource.cpp +++ b/QCELPAudioRTPSource.cpp @@ -336,7 +336,7 @@ // Call our own 'after getting' function. Because we're not a 'leaf' // source, we can call this directly, without risking // infinite recursion - afterGetting(this); + this->afterGetting(); return; } --- a/RTCP.cpp +++ b/RTCP.cpp @@ -172,7 +172,7 @@ // Send our first report. fTypeOfEvent = EVENT_REPORT; - onExpire(this); + this->onExpire1(); } struct RRHandlerRecord { @@ -925,6 +925,7 @@ } void RTCPInstance::onExpire(RTCPInstance* instance) { + instance->nextTask() = NULL; instance->onExpire1(); } --- a/SIPClient.cpp +++ b/SIPClient.cpp @@ -339,6 +339,7 @@ void SIPClient::timerAHandler(void* clientData) { SIPClient* client = (SIPClient*)clientData; + client->fTimerA = NULL; if (client->fVerbosityLevel >= 1) { client->envir() << "RETRANSMISSION " << ++client->fTimerACount << ", after " << client->fTimerALen/1000000.0 @@ -349,6 +350,7 @@ void SIPClient::timerBHandler(void* clientData) { SIPClient* client = (SIPClient*)clientData; + client->fTimerB = NULL; if (client->fVerbosityLevel >= 1) { client->envir() << "RETRANSMISSION TIMEOUT, after " << 64*client->fT1/1000000.0 << " seconds\n"; @@ -359,6 +361,7 @@ void SIPClient::timerDHandler(void* clientData) { SIPClient* client = (SIPClient*)clientData; + client->fTimerD = NULL; if (client->fVerbosityLevel >= 1) { client->envir() << "TIMER D EXPIRED\n"; } --- a/StreamReplicator.cpp +++ b/StreamReplicator.cpp @@ -265,7 +265,7 @@ if (!(fNumDeliveriesMadeSoFar < fNumActiveReplicas)) fprintf(stderr, "StreamReplicator::deliverReceivedFrame() Internal Error 2(%d,%d)!\n", fNumDeliveriesMadeSoFar, fNumActiveReplicas); // should not happen // Complete delivery to this replica: - FramedSource::afterGetting(replica); + replica->FramedSource::afterGetting(); } if (fNumDeliveriesMadeSoFar == fNumActiveReplicas - 1 && fMasterReplica != NULL) { @@ -294,7 +294,7 @@ fReplicasAwaitingNextFrame = NULL; // Complete delivery to the 'master' replica (thereby completing all deliveries for this frame): - FramedSource::afterGetting(replica); + replica->FramedSource::afterGetting(); } } --- a/T140TextRTPSink.cpp +++ b/T140TextRTPSink.cpp @@ -139,6 +139,7 @@ } void T140IdleFilter::handleIdleTimeout(void* clientData) { + ((T140IdleFilter*)clientData)->fIdleTimerTask = NULL; ((T140IdleFilter*)clientData)->handleIdleTimeout(); } @@ -163,13 +164,13 @@ fNumBufferedBytes = 0; // reset buffer - FramedSource::afterGetting(this); // complete delivery + this->FramedSource::afterGetting(); // complete delivery } void T140IdleFilter::deliverEmptyFrame() { fFrameSize = fNumTruncatedBytes = 0; gettimeofday(&fPresentationTime, NULL); - FramedSource::afterGetting(this); // complete delivery + this->FramedSource::afterGetting(); // complete delivery } void T140IdleFilter::onSourceClosure(void* clientData) { --- a/WAVAudioFileSource.cpp +++ b/WAVAudioFileSource.cpp @@ -335,12 +335,11 @@ // Inform the reader that he has data: #ifdef READ_FROM_FILES_SYNCHRONOUSLY // To avoid possible infinite recursion, we need to return to the event loop to do this: - nextTask() = envir().taskScheduler().scheduleDelayedTask(0, - (TaskFunc*)FramedSource::afterGetting, this); + this->FramedSource::scheduleAfterGetting(); #else // Because the file read was done from the event loop, we can call the // 'after getting' function directly, without risk of infinite recursion: - FramedSource::afterGetting(this); + this->FramedSource::afterGetting(); #endif } --- a/include/FramedSource.hh +++ b/include/FramedSource.hh @@ -60,8 +60,12 @@ Boolean isCurrentlyAwaitingData() const {return fIsCurrentlyAwaitingData;} - static void afterGetting(FramedSource* source); - // doGetNextFrame() should arrange for this to be called after the +private: + static void afterGetting(void *source); +public: + void scheduleAfterGetting(); + void afterGetting(); + // doGetNextFrame() should arrange for one of these to be called after the // frame has been read (*iff* it is read successfully) protected: --- a/include/H264VideoFileServerMediaSubsession.hh +++ b/include/H264VideoFileServerMediaSubsession.hh @@ -32,7 +32,6 @@ createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); // Used to implement "getAuxSDPLine()": - void checkForAuxSDPLine1(); void afterPlayingDummy1(); protected: @@ -53,6 +52,9 @@ FramedSource* inputSource); private: + static void checkForAuxSDPLine(void *); + void checkForAuxSDPLine1(); + char* fAuxSDPLine; char fDoneFlag; // used when setting up "fAuxSDPLine" RTPSink* fDummyRTPSink; // ditto --- a/include/H265VideoFileServerMediaSubsession.hh +++ b/include/H265VideoFileServerMediaSubsession.hh @@ -32,7 +32,6 @@ createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); // Used to implement "getAuxSDPLine()": - void checkForAuxSDPLine1(); void afterPlayingDummy1(); protected: @@ -53,6 +52,9 @@ FramedSource* inputSource); private: + static void checkForAuxSDPLine(void *); + void checkForAuxSDPLine1(); + char* fAuxSDPLine; char fDoneFlag; // used when setting up "fAuxSDPLine" RTPSink* fDummyRTPSink; // ditto --- a/include/MPEG4VideoFileServerMediaSubsession.hh +++ b/include/MPEG4VideoFileServerMediaSubsession.hh @@ -32,7 +32,6 @@ createNew(UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource); // Used to implement "getAuxSDPLine()": - void checkForAuxSDPLine1(); void afterPlayingDummy1(); protected: @@ -53,6 +52,9 @@ FramedSource* inputSource); private: + static void checkForAuxSDPLine(void *); + void checkForAuxSDPLine1(); + char* fAuxSDPLine; char fDoneFlag; // used when setting up "fAuxSDPLine" RTPSink* fDummyRTPSink; // ditto --- a/uLawAudioFilter.cpp +++ b/uLawAudioFilter.cpp @@ -138,7 +138,7 @@ fNumTruncatedBytes = numTruncatedBytes; fPresentationTime = presentationTime; fDurationInMicroseconds = durationInMicroseconds; - afterGetting(this); + this->afterGetting(); } @@ -215,7 +215,7 @@ fNumTruncatedBytes = numTruncatedBytes; fPresentationTime = presentationTime; fDurationInMicroseconds = durationInMicroseconds; - afterGetting(this); + this->afterGetting(); } @@ -269,7 +269,7 @@ fNumTruncatedBytes = numTruncatedBytes; fPresentationTime = presentationTime; fDurationInMicroseconds = durationInMicroseconds; - afterGetting(this); + this->afterGetting(); } @@ -323,7 +323,7 @@ fNumTruncatedBytes = numTruncatedBytes; fPresentationTime = presentationTime; fDurationInMicroseconds = durationInMicroseconds; - afterGetting(this); + this->afterGetting(); } @@ -374,7 +374,7 @@ fNumTruncatedBytes = numTruncatedBytes + (frameSize - fFrameSize); fPresentationTime = presentationTime; fDurationInMicroseconds = durationInMicroseconds; - afterGetting(this); + this->afterGetting(); } @@ -427,5 +427,5 @@ fNumTruncatedBytes = numTruncatedBytes + (frameSize - fFrameSize); fPresentationTime = presentationTime; fDurationInMicroseconds = durationInMicroseconds; - afterGetting(this); + this->afterGetting(); }
_______________________________________________ live-devel mailing list live-devel@lists.live555.com http://lists.live555.com/mailman/listinfo/live-devel