Hi,
I finally able to modify  testOnDemandRTSPServer so that:
It uses my custom FramedSource...

Tests
--------
Test One:
--------
In order to test it:
1. I get video stream from h264 ip camera using OpenRTSPClient and
   write each Frame  to a single file[ Boolean oneFilePerFrame = True]
 for 100 frame.

 2. I rename each frame file as vide1,...... video100

 3. I stream those frames[ 1...100] with my modified
OnDemandRTSPServer using my custom FramedSource
 4. I used OpenRTSPClient to connect modified OnDemandRTSPServer.

----------
Test Two:
---------
  Just change step 4 [above]:
  For step 4 i used VLC player as a client

Test Result:

----------------
Test Result One
----------------
 OpenRTSPClient successfully connect and write incoming data to single
file [Boolean oneFilePerFrame = True] call it Result.h264
The Result.h264 is not a playbale file.I investigate its binary data
with hex editor and MediaInfo Utility.
It does not seem to be correct H264 file...

-----------------
Test Result Two
-----------------

VLC connect to server but dows not show any stream

////////////////////////////////////////////////////////////////////

So Here is my Questions:

1. Is my test logic true? Should i expect to playable file at
OpenRTSPClient side?
   Should I expect VLC should play file?
2. If my logic true, what i may do wrong in practice?

Wellcome to any ideas-suggestions  from who develop such a custom
FramedSource for  testOnDemandRTSPServer...


Best Wishes

PS:-----------------------------------------------------------------------------------------------------------------------------
I attach my modified testOnDemandRTSPServer  code below

Files:

MyCustomFramedSource.h : This is a custom FramedSource for my custom
MyCustomServerMediaSubsession
MyCustomServerMediaSubsession.h : This is a custom
OnDemandServerMediaSubsession.
TestOnDemandRTSPServer.cpp : Simple test for my server


More 
PS:------------------------------------------------------------------------------------------------------------

As you can see from my MyCustomFramedSource.cpp I stream file on windows env,
like this at deliverFrame

// Just for test if my logic true
// not a production ready code
 void MyCustomFramedSource::deliverFrame()
 {
        

         cout << "Now deliverFrame() is called" << endl;
        
         if (!isCurrentlyAwaitingData())
         {
                  cout << " we're not ready for the data yet" << endl;

                  return; // we're not ready for the data yet
         }

         cout << " we're ready for the data" << endl;


         static int frameNumber = 0;
         struct stat results;
         char filename[256];


         if(frameNumber >= 100)
         {

                 cout << "finished " << endl;

                 return;
         }
        

         sprintf_s(filename, 256, "D:\\h264\\frame%d.bin", frameNumber++);

         stat(filename, &results);
         unsigned int newFrameSize = results.st_size;

        
         if (newFrameSize > fMaxSize)
         {
                 cout << "newFrameSize > fMaxSize" << endl;
                 fFrameSize = fMaxSize;
                 fNumTruncatedBytes = newFrameSize - fMaxSize;
         }
         else
         {
                  cout << "fFrameSize = newFrameSize" << endl;
                 fFrameSize = newFrameSize;
         }
        

         unsigned char* newFrameData = (unsigned char*)malloc( fFrameSize);

         fstream fbin(filename, ios::in | ios::binary);
         fbin.read((char*)newFrameData, fFrameSize);
         fbin.close();

         memcpy(fTo,newFrameData, fFrameSize);

         gettimeofday(&fPresentationTime,NULL);

         fDurationInMicroseconds = 1000000 / 15; // 15 fps


         FramedSource::afterGetting(this);

 }
-----------------------------------------------------------------------------------------------------
#include "MyCustomFramedSource.h"
#include <sys/stat.h> 

EventTriggerId MyCustomFramedSource::eventTriggerId = 0;

MyCustomFramedSource::MyCustomFramedSource(UsageEnvironment& env):FramedSource(env)
{
	 eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0);
	
}

MyCustomFramedSource::~MyCustomFramedSource(void)
{
}

MyCustomFramedSource* MyCustomFramedSource::createNew(UsageEnvironment& env)
{

	return new MyCustomFramedSource(env);
}


void MyCustomFramedSource::doGetNextFrame()
{

   cout << "Now doGetNextFrame() is called" << endl;
   deliverFrame();

}


 void  MyCustomFramedSource::deliverFrame0(void* clientData)
{
	((MyCustomFramedSource*)clientData)->deliverFrame();
}


 void MyCustomFramedSource::deliverFrame()
 {
	 

	 cout << "Now deliverFrame() is called" << endl;
	 
	 if (!isCurrentlyAwaitingData()) 
	 {
		  cout << " we're not ready for the data yet" << endl;

		  return; // we're not ready for the data yet
	 }

	 cout << " we're ready for the data" << endl;


	 static int frameNumber = 0;
	 struct stat results;
	 char filename[256];



	 if(frameNumber >= 124)
	 {

		 cout << "finished " << endl;

		 return;
	 }
	

	 sprintf_s(filename, 256, "D:\\h264\\frame%d.bin", frameNumber++);

	 stat(filename, &results);
	 unsigned int newFrameSize = results.st_size;

	 
	 if (newFrameSize > fMaxSize) 
	 {
		 cout << "newFrameSize > fMaxSize" << endl;
		 fFrameSize = fMaxSize;
		 fNumTruncatedBytes = newFrameSize - fMaxSize;
	 } 
	 else 
	 {
		  cout << "fFrameSize = newFrameSize" << endl;
		 fFrameSize = newFrameSize;
	 }
	 

	 unsigned char* newFrameData = (unsigned char*)malloc( fFrameSize);

	 fstream fbin(filename, ios::in | ios::binary);
	 fbin.read((char*)newFrameData, fFrameSize);
	 fbin.close();

	 memcpy(fTo,newFrameData, fFrameSize);

	 gettimeofday(&fPresentationTime,NULL);

	 fDurationInMicroseconds = 1000000 / 15; // 15 fps


	 FramedSource::afterGetting(this);

 }

#pragma once


#include "FramedSource.hh"
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "GroupSockHelper.hh"

#include <fstream>
#include <iostream>
#include <string>

using namespace std;

class MyCustomFramedSource : public FramedSource
{
public:
    static MyCustomFramedSource* createNew(UsageEnvironment& env);

public:
	static EventTriggerId eventTriggerId;
protected:

	MyCustomFramedSource(UsageEnvironment& env);

	virtual ~MyCustomFramedSource(void);

private:
	virtual void doGetNextFrame();

private:
	static void deliverFrame0(void* clientData);
	void deliverFrame();
};
#include "MyCustomServerMediaSubsession.h"



MyCustomServerMediaSubsession::MyCustomServerMediaSubsession(
	UsageEnvironment& env, Boolean reuseFirstSource,
    portNumBits initialPortNum)
	:OnDemandServerMediaSubsession(env,reuseFirstSource,initialPortNum)
{
}


MyCustomServerMediaSubsession* MyCustomServerMediaSubsession::createNew(UsageEnvironment& env, Boolean reuseFirstSource,portNumBits initialPortNum)
{

	return new MyCustomServerMediaSubsession(env,reuseFirstSource,initialPortNum);

}
MyCustomServerMediaSubsession::~MyCustomServerMediaSubsession(void)
{
}

FramedSource* MyCustomServerMediaSubsession::createNewStreamSource(unsigned clientSessionId,unsigned& estBitrate)
{
 myCustomFramedSource  = MyCustomFramedSource::createNew(envir());
	return H264VideoStreamFramer::createNew(envir(), myCustomFramedSource);
}
RTPSink* MyCustomServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock,unsigned char rtpPayloadTypeIfDynamic,FramedSource* inputSource)
{
   return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);

}
#pragma once

#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "MyCustomFramedSource.h"

class MyCustomServerMediaSubsession : public OnDemandServerMediaSubsession
{
public:

	static MyCustomServerMediaSubsession* createNew(UsageEnvironment& env, Boolean reuseFirstSource,portNumBits initialPortNum = 6970);

	MyCustomFramedSource* myCustomFramedSource;

protected:

	MyCustomServerMediaSubsession(UsageEnvironment& env, Boolean reuseFirstSource,portNumBits initialPortNum = 6970);
	virtual ~MyCustomServerMediaSubsession();

	virtual FramedSource* createNewStreamSource(unsigned clientSessionId,unsigned& estBitrate);
	virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock,unsigned char rtpPayloadTypeIfDynamic,FramedSource* inputSource);


private:
};

#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "MyCustomServerMediaSubsession.h"
#include "MyCustomFramedSource.h"

UsageEnvironment* env;


Boolean reuseFirstSource = False;
Boolean iFramesOnly = False;

static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms, char const* streamName, char const* inputFileName); 

int main(int argc, char** argv) 
{
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  env = BasicUsageEnvironment::createNew(*scheduler);

  UserAuthenticationDatabase* authDB = NULL;


  // Create the RTSP server:
  RTSPServer* rtspServer = RTSPServer::createNew(*env, 6554, authDB);
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }

  char const* descriptionString  = "Session streamed by \"testOnDemandRTSPServer\"";

  // A H.264 video elementary stream:
  {
    char const* streamName = "h264ESVideoTest";
    char const* inputFileName = "";// Actually no need
    ServerMediaSession* sms
      = ServerMediaSession::createNew(*env, streamName, streamName,
				      descriptionString);

	
   // sms->addSubsession(H264VideoFileServerMediaSubsession ::createNew(*env, inputFileName, reuseFirstSource));
    


	MyCustomServerMediaSubsession* myCustomServerMediaSubsession =
		MyCustomServerMediaSubsession::createNew(*env,reuseFirstSource);

	 sms->addSubsession(myCustomServerMediaSubsession);


	//scheduler->triggerEvent(MyCustomFramedSource::eventTriggerId,myCustomServerMediaSubsession->myCustomFramedSource);



    rtspServer->addServerMediaSession(sms);

    announceStream(rtspServer, sms, streamName, inputFileName);
  }


  if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {
    *env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
  } else {
    *env << "\n(RTSP-over-HTTP tunneling is not available.)\n";
  }

  env->taskScheduler().doEventLoop(); // does not return

  return 0; 
}

static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
			   char const* streamName, char const* inputFileName) {
  char* url = rtspServer->rtspURL(sms);
  UsageEnvironment& env = rtspServer->envir();
  env << "\n\"" << streamName << "\" stream, from the file \""
      << inputFileName << "\"\n";
  env << "Play this stream using the URL \"" << url << "\"\n";
  delete[] url;
}
_______________________________________________
live-devel mailing list
live-devel@lists.live555.com
http://lists.live555.com/mailman/listinfo/live-devel

Reply via email to