Hi Ross,

I read severals times in the live555 mailing list and on your site that you 
seems to consider MJPEG streaming as something to avoid.
But in some situation (dedicated network, when it is needed to garanty precise 
positionning) it could be interesting.

Reading the RFC 2435 and the code of live555, it seems that what is missing is 
very things, so I tried to improve my understanding of live555 library.

I attached an class inherited from JPEGVideoSource interface that get RTP 
informations from the JPEG header as you suggest 
http://lists.live555.com/pipermail/live-devel/2003-November/000037.html
and a test program.

I did not really understood debate about qFactor that seems not extratable from 
the quantification table data.
Using  different precision for quantification table (0,8,255), and different 
qFactor (128,255) doesnot seems to have big impact on the displayed picture.
Using qFactor below 128, produce a image with very few contrast.

But perhaps the viewer is translating values before display ?
Do you have your own favourite figure for the qFactor ? or do I miss things in 
the RFC ?

Thanks for your advices.

Best Regards,

                Michel.

[@@THALES GROUP RESTRICTED@@]

// project header
#include "MJPEGVideoSource.hh"

/*!*****************************************************************************
 * \brief Constructor
*************************************************************************************/
MJPEGVideoSource::MJPEGVideoSource(UsageEnvironment& env, FramedSource* source) 
: 
                JPEGVideoSource(env), 
                m_inputSource(source), 
                m_width(0), 
                m_height(0), 
                m_qTable0Init(false), 
                m_qTable1Init(false) 
{
}

/*!*****************************************************************************
 * \brief Destructor
*************************************************************************************/
MJPEGVideoSource::~MJPEGVideoSource()
{
        if (m_inputSource)
        {
                Medium::close(m_inputSource);
        }
}

/*!*****************************************************************************
 * \brief Ask for a frame from the source
*************************************************************************************/
void MJPEGVideoSource::doGetNextFrame() 
{
        if (m_inputSource)
        {
                m_inputSource->getNextFrame(fTo, fMaxSize, 
afterGettingFrameSub, this, FramedSource::handleClosure, this);                 
     
        }
}

/*!*****************************************************************************
 * \brief source callback
*************************************************************************************/
void MJPEGVideoSource::afterGettingFrame(unsigned frameSize,unsigned 
numTruncatedBytes,struct timeval presentationTime,unsigned 
durationInMicroseconds) 
{
        int headerSize = 0;
        bool headerOk = false;

        for (unsigned int i = 0; i < frameSize ; ++i) 
        {
                // SOF 
                if ( (i+8) < frameSize  && fTo[i] == 0xFF && fTo[i+1] == 0xC0 ) 
                {
                         m_height = (fTo[i+5]<<5)|(fTo[i+6]>>3);
                         m_width = (fTo[i+7]<<5)|(fTo[i+8]>>3);
                }
                // DQT
                if ( (i+5+64) < frameSize && fTo[i] == 0xFF && fTo[i+1] == 
0xDB) 
                {
                        if (fTo[i+4] ==0)
                        {
                                memcpy(m_qTable, fTo + i + 5, 64);
                                m_qTable0Init = true;
                        }
                        else if (fTo[i+4] ==1)
                        {
                                memcpy(m_qTable + 64, fTo + i + 5, 64);
                                m_qTable1Init = true;
                        }
                }
                // End of header
                if ( (i+1) < frameSize && fTo[i] == 0x3F && fTo[i+1] == 0x00 ) 
                {
                         headerOk = true;
                         headerSize = i+2;
                         break;
                }
        }

        if (headerOk)
        {
                fFrameSize = frameSize - headerSize;
                memcpy( fTo, fTo + headerSize, fFrameSize );
        }
        
        fNumTruncatedBytes = numTruncatedBytes;
        fPresentationTime = presentationTime;
        fDurationInMicroseconds = durationInMicroseconds;
        afterGetting(this);
}               

/*!*****************************************************************************
 * \brief return quantification tables
*************************************************************************************/
u_int8_t const* MJPEGVideoSource::quantizationTables( u_int8_t& precision, 
u_int16_t& length )
{
        length = 0;
        precision = 0;
        if ( m_qTable0Init && m_qTable1Init )
        {
                precision = 8;
                length = sizeof(m_qTable);
        }
        return m_qTable;
}
                

Attachment: MJPEGVideoSource.hh
Description: MJPEGVideoSource.hh

#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "GroupsockHelper.hh"
#include "MJPEGVideoSource.hh"

UsageEnvironment* env;
char const* inputFileName = "test.mjpeg";
MJPEGVideoSource* videoSource;
RTPSink* videoSink;

void play(); // forward

int main(int argc, char** argv) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  env = BasicUsageEnvironment::createNew(*scheduler);

  // Create 'groupsocks' for RTP and RTCP:
  struct in_addr destinationAddress;
  destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
  // Note: This is a multicast address.  If you wish instead to stream
  // using unicast, then you should use the "testOnDemandRTSPServer"
  // test program - not this test program - as a model.

  const unsigned short rtpPortNum = 18888;
  const unsigned short rtcpPortNum = rtpPortNum+1;
  const unsigned char ttl = 255;

  const Port rtpPort(rtpPortNum);
  const Port rtcpPort(rtcpPortNum);

  Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
  rtpGroupsock.multicastSendOnly(); // we're a SSM source
  Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
  rtcpGroupsock.multicastSendOnly(); // we're a SSM source

  // Create a 'JPEG Video RTP' sink from the RTP 'groupsock':
  videoSink = JPEGVideoRTPSink::createNew(*env, &rtpGroupsock);

  // Create (and start) a 'RTCP instance' for this RTP sink:
  const unsigned estimatedSessionBandwidth = 5000; // in kbps; for RTCP b/w 
share
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen+1];
  gethostname((char*)CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '\0'; // just in case
  RTCPInstance* rtcp = RTCPInstance::createNew(*env, &rtcpGroupsock,
                            estimatedSessionBandwidth, CNAME,
                            videoSink, NULL /* we're a server */,
                            True /* we're a SSM source */);
  // Note: This starts RTCP running automatically

  RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }
  ServerMediaSession* sms = ServerMediaSession::createNew(*env, "testStream", 
inputFileName,"Session streamed by \"testMJPEGVideoStreamer\"",
                                           True /*SSM*/);
  sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
  rtspServer->addServerMediaSession(sms);

  char* url = rtspServer->rtspURL(sms);
  *env << "Play this stream using the URL \"" << url << "\"\n";
  delete[] url;

  // Start the streaming:
  *env << "Beginning streaming...\n";
  play();

  env->taskScheduler().doEventLoop(); // does not return

  return 0; // only to prevent compiler warning
}

void afterPlaying(void* /*clientData*/) {
  *env << "...done reading from file\n";

  videoSink->stopPlaying();
  Medium::close(videoSource);
  // Note that this also closes the input file that this source read from.

  // Start playing once again:
  play();
}

void play() {
  // Open the input file as a 'byte-stream file source':
  ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(*env, 
inputFileName);
  if (fileSource == NULL) {
    *env << "Unable to open file \"" << inputFileName
         << "\" as a byte-stream file source\n";
    exit(1);
  }

  FramedSource* videoES = fileSource;

  // Create a framer for the Video Elementary Stream:
  videoSource = MJPEGVideoSource::createNew(*env, videoES);

  // Finally, start playing:
  *env << "Beginning to read from file...\n";
  videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}
_______________________________________________
live-devel mailing list
live-devel@lists.live555.com
http://lists.live555.com/mailman/listinfo/live-devel

Reply via email to