forgot to attach the code! 

On Mon, 2008-06-23 at 14:07 -0400, Mike Gilorma wrote:
> Well, I've gotten this far with my h264 stream framer:
> 
> I have a class H264VideoStreamParser that subclasses
> MPEGVideoStreamParser and have created a H264VideoStreamFramer.  I have
> a test program called H264Streamer that implements these new functions
> (see attached).  I am trying to stream a JVT NAL sequence.  It seems
> that my test program finds the first NAL header but then gets stuck.  
> 
> I'm sure I'm missing more than one thing here, so any help would be
> greatly appreciated.  
> 
> Also for those out there who are trying to do something similar to what
> I am, I will make sure to make my test program and corresponding
> h264VideoStreamFramer code available to all who wish!!
> 
> On Thu, 2008-06-19 at 16:37 +0300, Brian Marete wrote:
> > On Wed, Jun 18, 2008 at 9:37 PM, Mike Gilorma
> > <[EMAIL PROTECTED]> wrote:
> > > That sounds like it might be helpful, could you send that over to me?
> > 
> > I will, when I get to the (non-networked) machine that contains it tomorrow.
> > 
> 
> 
> _______________________________________________
> live-devel mailing list
> live-devel@lists.live555.com
> http://lists.live555.com/mailman/listinfo/live-devel
/**********
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the
Free Software Foundation; either version 2.1 of the License, or (at your
option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)

This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for
more details.

You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation, Inc.,
59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
**********/
// Copyright (c) 1996-2007, Live Networks, Inc.  All rights reserved
// A test program that reads a MPEG-4 Video Elementary Stream file,
// and streams it using RTP
// main program

#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "GroupsockHelper.hh"

UsageEnvironment* env;
char const* inputFileName = "test.h264";
H264VideoStreamFramer* videoSource;
RTPSink* videoSink;

void play(); // forward

int main(int argc, char** argv) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  env = BasicUsageEnvironment::createNew(*scheduler);

  // Create 'groupsocks' for RTP and RTCP:
  struct in_addr destinationAddress;
  destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
  // Note: This is a multicast address.  If you wish instead to stream
  // using unicast, then you should use the "testOnDemandRTSPServer"
  // test program - not this test program - as a model.

  const unsigned short rtpPortNum = 18888;
  const unsigned short rtcpPortNum = rtpPortNum+1;
  const unsigned char ttl = 255;

  const Port rtpPort(rtpPortNum);
  const Port rtcpPort(rtcpPortNum);

  Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
  rtpGroupsock.multicastSendOnly(); // we're a SSM source
  Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
  rtcpGroupsock.multicastSendOnly(); // we're a SSM source

  // Create a 'MPEG-4 Video RTP' sink from the RTP 'groupsock':
  videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96, 0x42, "h264");

  // Create (and start) a 'RTCP instance' for this RTP sink:
  const unsigned estimatedSessionBandwidth = 500; // in kbps; for RTCP b/w share
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen+1];
  gethostname((char*)CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '\0'; // just in case
  RTCPInstance* rtcp
  = RTCPInstance::createNew(*env, &rtcpGroupsock,
			    estimatedSessionBandwidth, CNAME,
			    videoSink, NULL /* we're a server */,
			    True /* we're a SSM source */);
  // Note: This starts RTCP running automatically

  RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }
  ServerMediaSession* sms
    = ServerMediaSession::createNew(*env, "testStream", inputFileName,
		   "Session streamed by \"testMPEG4VideoStreamer\"",
					   True /*SSM*/);
  sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
  rtspServer->addServerMediaSession(sms);

  char* url = rtspServer->rtspURL(sms);
  *env << "Play this stream using the URL \"" << url << "\"\n";
  delete[] url;

  // Start the streaming:
  *env << "Beginning streaming...\n";
  play();

  env->taskScheduler().doEventLoop(); // does not return

  return 0; // only to prevent compiler warning
}

void afterPlaying(void* /*clientData*/) {
  *env << "...done reading from file\n";

  Medium::close(videoSource);
  // Note that this also closes the input file that this source read from.

  // Start playing once again:
  play();
}

void play() {
  // Open the input file as a 'byte-stream file source':
  ByteStreamFileSource* fileSource
    = ByteStreamFileSource::createNew(*env, inputFileName);
  if (fileSource == NULL) {
    *env << "Unable to open file \"" << inputFileName
	 << "\" as a byte-stream file source\n";
    exit(1);
  }
  
  FramedSource* videoES = fileSource;

  // Create a framer for the Video Elementary Stream:
  videoSource = H264VideoStreamFramer::createNew(*env, videoES);
  
  // Finally, start playing:
  *env << "Beginning to read from file...\n";
  videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}
/**********
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the
Free Software Foundation; either version 2.1 of the License, or (at your
option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)

This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for
more details.

You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation, Inc.,
59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
**********/
// "liveMedia"
// Copyright (c) 1996-2007 Live Networks, Inc.  All rights reserved.
// Any source that feeds into a "H264VideoRTPSink" must be of this class.
// This is a virtual base class; subclasses must implement the
// "currentNALUnitEndsAccessUnit()" virtual function.
// Implementation

// #include "H264VideoStreamFramer.hh"
// 
// H264VideoStreamFramer::H264VideoStreamFramer(UsageEnvironment& env, FramedSource* inputSource)
//   : FramedFilter(env, inputSource) {
// }
// 
// H264VideoStreamFramer::~H264VideoStreamFramer() {
// }
// 
// Boolean H264VideoStreamFramer::isH264VideoStreamFramer() const {
//   return True;
// }

// ***************************** GOING FOR A RIDE YEAH

#include <iostream>
#include "H264VideoStreamFramer.hh"
#include "MPEGVideoStreamParser.hh"
#include <string.h>

////////// H264VideoStreamParser definition //////////

// An enum representing the current state of the parser:
enum H264ParseState {
  PARSING_START_SEQUENCE,
  PARSING_NAL_UNIT
}; 

class H264VideoStreamParser: public MPEGVideoStreamParser {
public:
  H264VideoStreamParser(H264VideoStreamFramer* usingSource,
             FramedSource* inputSource);
  virtual ~H264VideoStreamParser();

private: // redefined virtual functions:
  virtual void flushInput();
  virtual unsigned parse();

private:
  H264VideoStreamFramer* usingSource() {
    return (H264VideoStreamFramer*)fUsingSource;
  }
  void setParseState(H264ParseState parseState);

  void parseStartSequence();
  unsigned parseNALUnit();

private:
  H264ParseState fCurrentParseState;
  unsigned fSecondsSinceLastTimeCode, fTotalTicksSinceLastTimeCode, fPrevNewTotalTicks; 

};

////////// H264VideoStreamFramer implementation //////////

H264VideoStreamFramer*
H264VideoStreamFramer::createNew(UsageEnvironment& env,
                  FramedSource* inputSource) {
    std::cout << "H264VideoStreamFramer: in createNew" << std::endl;
  // Need to add source type checking here???  #####
  return new H264VideoStreamFramer(env, inputSource);
}

unsigned char* H264VideoStreamFramer
::getConfigBytes(unsigned& numBytes) const {
    std::cout << "H264VideoStreamFramer: in getConfigBytes" << std::endl;
  numBytes = fNumConfigBytes;
  return fConfigBytes;
}

H264VideoStreamFramer::H264VideoStreamFramer(UsageEnvironment& env,
                           FramedSource* inputSource,
                           Boolean createParser)
  : MPEGVideoStreamFramer(env, inputSource),
    fProfileAndLevelIndication(0),
    fConfigBytes(NULL), fNumConfigBytes(0),
    fNewConfigBytes(NULL), fNumNewConfigBytes(0) {
    std::cout << "H264VideoStreamFramer: in createParser" << std::endl;
  fParser = createParser
    ? new H264VideoStreamParser(this, inputSource)
    : NULL;
}

H264VideoStreamFramer::~H264VideoStreamFramer() {
  delete[] fConfigBytes; delete[] fNewConfigBytes;
}

void H264VideoStreamFramer::startNewConfig() {
    std::cout << "H264VideoStreamFramer: in startNewConfig" << std::endl;
  delete[] fNewConfigBytes; fNewConfigBytes = NULL;
  fNumNewConfigBytes = 0;
}

void H264VideoStreamFramer
::appendToNewConfig(unsigned char* newConfigBytes, unsigned numNewBytes) {
    std::cout << "H264VideoStreamFramer: in appendToNewConfig" << std::endl;
  // Allocate a new block of memory for the new config bytes:
  unsigned char* configNew
    = new unsigned char[fNumNewConfigBytes + numNewBytes];

  // Copy the old, then the new, config bytes there:
  memmove(configNew, fNewConfigBytes, fNumNewConfigBytes);
  memmove(&configNew[fNumNewConfigBytes], newConfigBytes, numNewBytes);
  
  delete[] fNewConfigBytes; fNewConfigBytes = configNew;
  fNumNewConfigBytes += numNewBytes;
    std::cout << "H264VideoStreamFramer: done appendToNewConfig" << std::endl;
}

void H264VideoStreamFramer::completeNewConfig() {
    std::cout << "H264VideoStreamFramer: in completeNewConfig" << std::endl;
  delete[] fConfigBytes; fConfigBytes = fNewConfigBytes;
  fNewConfigBytes = NULL;
  fNumConfigBytes = fNumNewConfigBytes;
  fNumNewConfigBytes = 0;
}

Boolean H264VideoStreamFramer::isH264VideoStreamFramer() const {
  return True;
}

////////// H264VideoStreamParser implementation //////////

H264VideoStreamParser
::H264VideoStreamParser(H264VideoStreamFramer* usingSource,
             FramedSource* inputSource)
  : MPEGVideoStreamParser(usingSource, inputSource),
    fCurrentParseState(PARSING_START_SEQUENCE) {
}

H264VideoStreamParser::~H264VideoStreamParser() {
}

void H264VideoStreamParser::setParseState(H264ParseState parseState) {
    std::cout << "H264VideoStreamPARSER: setParseState: " << (int)parseState << std::endl;
  fCurrentParseState = parseState;
  MPEGVideoStreamParser::setParseState();
}

void H264VideoStreamParser::flushInput() {
    std::cout << "H264VideoStreamPARSER: flushInput()" << std::endl;
  StreamParser::flushInput();

}


unsigned H264VideoStreamParser::parse() {
    std::cout << "H264VideoStreamPARSER: parse" << std::endl;
  try {
    switch (fCurrentParseState) {
    case PARSING_START_SEQUENCE: {
        parseStartSequence();
        return 0;
    }
    case PARSING_NAL_UNIT: {
        parseNALUnit();
        return 0;
    }
    default: {
      return 0; // shouldn't happen
    }
    }
  } catch (int /*e*/) {
#ifdef DEBUG
    fprintf(stderr, "H264VideoStreamParser::parse() EXCEPTION (This is normal behavior - *not* an error)\n");
#endif
    return 0;  // the parsing got interrupted
  }
}

void H264VideoStreamParser::parseStartSequence()
{
    std::cout << "H264VideoStreamPARSER: parseStartSequence" << std::endl;
    // Find start sequence (0001)
    std::cout << "going to test4Bytes" << std::endl;
    u_int32_t test = test4Bytes();
    std::cout << "test result: " << test << std::endl;
    while (test != 0x00000001)
    {
        std::cout << "waiting for start sequence" << std::endl;
        skipBytes(1);
        test = test4Bytes();
    }
    setParseState(PARSING_NAL_UNIT);
    skipBytes(4);

//     // Compute this frame's presentation time:
//     usingSource()->computePresentationTime(fTotalTicksSinceLastTimeCode);
//     // This header forms part of the 'configuration' information:
//     usingSource()->appendToNewConfig(fStartOfFrame, curFrameSize());


}

unsigned H264VideoStreamParser::parseNALUnit()
{
    std::cout << "H264VideoStreamPARSER: parseNALUnit" << std::endl;
    // Find next start sequence (0001) or end of stream
    u_int32_t test = test4Bytes();
    while (test != 0x00000001)
    {
        saveByte(get1Byte());
        test = test4Bytes();
    }
    setParseState(PARSING_START_SEQUENCE);

//     // Compute this frame's presentation time:
//     usingSource()->computePresentationTime(fTotalTicksSinceLastTimeCode);
//     // This header forms part of the 'configuration' information:
//     usingSource()->appendToNewConfig(fStartOfFrame, curFrameSize());

    return curFrameSize();
}
_______________________________________________
live-devel mailing list
live-devel@lists.live555.com
http://lists.live555.com/mailman/listinfo/live-devel

Reply via email to