Hello sir, i made an streamer using your libraries which streams the live video from camera int initLm555Settings(void) { scheduler = BasicTaskScheduler::createNew(); env = BasicUsageEnvironment::createNew(*scheduler); destinationAddressStr
#ifdef USE_SSM = "232.255.42.42"; #else = StreamingIp; #endif const unsigned short rtpPortNum = 18888; const unsigned short rtcpPortNum = rtpPortNum+1; const unsigned char ttl = 7; struct in_addr destinationAddress; destinationAddress.s_addr = our_inet_addr(destinationAddressStr); const Port rtpPort(rtpPortNum); const Port rtcpPort(rtcpPortNum); Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl); Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl); #ifdef USE_SSM rtpGroupsock.multicastSendOnly(); rtcpGroupsock.multicastSendOnly(); #endif g_ExitEventLoop = 0; ideoSink = SimpleRTPSink::createNew(*env, &rtpGroupsock, 33, 90000, "video", "MP2T", 1, True, False /*no 'M' bit*/); setSendBufferTo(*env, rtpGroupsock.socketNum(), 1024 * 1024); // Create (and start) a 'RTCP instance' for this RTP sink: const unsigned estimatedSessionBandwidth = 5000; // in kbps; for RTCP b/w share const unsigned maxCNAMElen = 100; unsigned char CNAME[maxCNAMElen+1]; gethostname((char*)CNAME, maxCNAMElen); CNAME[maxCNAMElen] = '\0'; // just in case RTCPInstance* rtcp = RTCPInstance::createNew(*env, &rtcpGroupsock, estimatedSessionBandwidth, CNAME, videoSink, NULL /* we're a server */, isSSM); UserAuthenticationDatabase* authDB = NULL; portNumBits rtspServerPortNum = 554; unsigned reclamationTestSeconds=65U; rtspServer = RTSPServer::createNew(*env,rtspServerPortNum, authDB, reclamationTestSeconds); if (rtspServer == NULL) { *env << "Failed to create RTSP server: " <<env->getResultMsg()<<"\n"; rtspServerPortNum = 8554; rtspServer = RTSPServer::createNew(*env,rtspServerPortNum); if (rtspServer == NULL) { return 0; } else { *env << "Created RTSP server.."<<"\n."; } } else { *env << "Created RTSP server.."<<"\n."; Boolean const inputStreamIsRawUDP = False; char const* descriptionString={"Session streamed by \"testOnDemandRT\""}; sms= ServerMediaSession::createNew(*env, streamName, streamName,descriptionString); sms->addSubsession(MPEG2TransportUDPServerMediaSubsession::createNew(*env,destinationAddressStr,rtpPortNum1,inputStreamIsRawUDP)); rtspServer->addServerMediaSession(sms); char* url = rtspServer->rtspURL(sms); *env << "Play this stream using the URL \"" << url << "\"\n"; delete[] url; if (rtspServer->setUpTunnelingOverHTTP(sport) || rtspServer->setUpTunnelingOverHTTP(sport) || rtspServer->setUpTunnelingOverHTTP(sport)) { cout<<"\n\n\n(We use port "<<rtspServer->httpServerPortNum()<<" for optional RTSP-over-HTTP tunneling.)\n"; } else { cout<<"\n\n\n(RTSP-over-HTTP tunneling is not available.)"; } play(); env->taskScheduler().doEventLoop(&g_ExitEventLoop); if(rtspServer) Medium::close(rtspServer); if(rtcp) Medium::close(rtcp); if(videoSink) Medium::close(videoSink); if(fileSource) Medium::close(fileSource); rtpGroupsock.removeAllDestinations(); rtcpGroupsock.removeAllDestinations(); env->reclaim(); delete scheduler; return 0; // only to prevent compiler warning } void afterPlaying(void* /*clientData*/) { *env << "...done reading from file\n"; videoSink->stopPlaying(); // Note that this also closes the input file that this source read from. Medium::close(videoSource); // Start playing once again: play(); } //================================================================ // play(): Play the input source. //================================================================= void play() { // Open the input file as a 'byte-stream file source': fi_params.nFICardFrameSize = TRANSPORT_PACKETS_PER_NETWORK_PACKET * TRANSPORT_PACKET_SIZE; fi_params.pfnGetRTPPayload = GetRTPPayload; fi_params.socketNum = videoSink->groupsockBeingUsed().socketNum(); DeviceParameters temp; fileSource = DeviceSourceFICard::createNew(*env, fi_params, temp); if (fileSource == NULL) { *env << "Unable to open Foresight card as a byte-stream file source\n"; exit(1); } FramedSource* videoES = fileSource; // Create a framer for the Video Elementary Stream: videoSource = MPEG1or2VideoStreamDiscreteFramer::createNew(*env, videoES);//original // Finally, start playing: *env << "Beginning to read from file...\n"; videoSink->startPlaying(*videoSource, afterPlaying, videoSink); // env->taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)periodicbrMeasurement1, videoSink); } void StartRTPProcess(void) { g_hRtpComThread = CreateThread((LPSECURITY_ATTRIBUTES) NULL, 0, (LPTHREAD_START_ROUTINE)initLm555Settings, 0, 0, &g_dwRtpComThreadID); if(g_hRtpComThread) SetThreadPriority(g_hRtpComThread, THREAD_PRIORITY_LOWEST/*THREAD_PRIORITY_NORMAL*/); } int StopRTProcess(void) { try{ if( videoSource ) videoSource->stopGettingFrames(); *env <<"in StopRTProcess\n"; Sleep(500); Medium::close(rtspServer); g_ExitEventLoop = 1; g_ExitEventLoop = 0; g_hRtpComThread = 0; g_dwRtpComThreadID = 0; return 0; } The streaming done by above streamer is catched by proxy server to which i give the url given by streamer at the line "char* url". this streaming is then seen by the client by using the proxy server URL,when the client say to stop stream the streamer calls its method StopRTProcess(void) but i got stuck in the line " Medium::close(rtspServer);",can you please tell why? I know i have modified your code but still need some of your help Thanks
_______________________________________________ live-devel mailing list live-devel@lists.live555.com http://lists.live555.com/mailman/listinfo/live-devel