I am sorry to have to bother everyone, but I have read through so much of
the archives and tried so many things, I would really like to get this
working so I need a little help if anyone has a moment.

 

I am trying to stream video from a platform that has a Sensoray 2250 encoder
installed on it. I have already verified that all of the hardware works and
that data produced from it can be streamed using the live library by doing
the following:

*         Created a process that grabs MPEG2 frames from the encoder and
dumps them to a named pipe

*         Modified testMPEG1or2VideoStreamer to look at this pipe for data,
and stream it to a specified IP address

*         Works fine, except that, as you can image, there is some major
delay in the video which I cannot have due to the nature of what this video
stream is being used for.

 

I decided to move on and write a FramedSource subclass that encapsulates my
encoder, and deliver this object directly to a MPEG1or2VideoRTPSink. I have
attached 3 files, my Encoder2250 class (subclass of FramedSource), its
header file, and my version of testMPEG1or2VideoStreamer (I have just been
changing the original test application).

 

The issue that is occurring is that after I start my version of
testMPEG1or2VideoStreamer I get the following output:

 

Beginning streaming...

Opening connection to Video Encoder

Setting video standard to NTSC

Setting video input

Setting Chip Settings

Beginning to read from file...

Right before scheduler...  

 

And that's it, it seems like it is sending data over the network at this
point, but it isn't. VLC doesn't see anything on an rtp connection on port
8888 and there isn't any network traffic. 

 

The thing that is very odd is that it seems like my implementation of
doGetNextFrame() is never executed since this printf statement in it's body
never shows up on the command line:

 

printf("Getting Frame...\n");

 

I don't understand why this is since all printf's from my encoder class's
constructor show up and no error messages show up from the live library. Is
the MPEG1or2VideoRTPSink not executing/seeing my implementation of
doGetNextFrame()? Is it some other problem? Thanks so much everyone.

 

-Jordan

Attachment: Encoder2250.hh
Description: Binary data


#include "Encoder2250.hh"
#include "s2250mid.h"
#include <string.h>


#define BITRATE 800000
#define MY_ENCODER_BOARD 0
#define NTSC             0
#define VIDEO_BUFF_SIZE 128000

Encoder2250*
Encoder2250::createNew(UsageEnvironment& env,
                        EncoderParameters params) {
  return new Encoder2250(env, params);
}

Encoder2250::Encoder2250(UsageEnvironment& env,
                           EncoderParameters params)
  : FramedSource(env), fParams(params) {
  // Any initialization of the device would be done here
  
  printf("Opening connection to Video Encoder\n");
    int ret = SN_OpenBoard(MY_ENCODER_BOARD);
    if( ret != 0)
    {
        printf("Error opening connnection to Encoder board\n");
        SN_CloseBoard(MY_ENCODER_BOARD);
        
    }
    

        
   printf("Setting video standard to NTSC\n");
   if(SN_SetVideoSystem(MY_ENCODER_BOARD,NTSC) < 0 )
        {
          printf("Error Setting Video Standard to NTSC\n");
          SN_CloseBoard(MY_ENCODER_BOARD);
         
        }


    printf("Setting video input\n");

    if(SN_SetVideoSource(MY_ENCODER_BOARD,1) != 0)
     {
        printf("Error Setting Video Input Type\n");
        SN_CloseBoard(MY_ENCODER_BOARD);
       
     }   


    printf("Setting Chip Settings \n");
    // set motion jpeg, full size, NTSC, 4MBit/s (Iframe setting N/A for 
MOTIONJPEG)
    if( SN_SetBasicChipSettings(0, MPEG2, 1, MID2250_VIDSYS_NTSC, BITRATE, 1) 
!=0)
        {
          printf("Error Setting up Chip\r");
          
        }
  
    SN_StartStream(MY_ENCODER_BOARD); 
  
   
}

Encoder2250::~Encoder2250(){}
    
        
        TFrameInfo FrameInfo;
    struct timeval PresTime;

 void Encoder2250::doGetNextFrame() {

  // Arrange here for our "deliverFrame" member function to be called
  // when the next frame of data becomes available from the device.
  // This must be done in a non-blocking fashion - i.e., so that we
  // return immediately from this function even if no data is
  // currently available.
  //
  // If the device can be implemented as a readable socket, then one easy
  // way to do this is using a call to
  //     envir().taskScheduler().turnOnBackgroundReadHandling( ... )
  // (See examples of this call in the "liveMedia" directory.)

  // If, for some reason, the source device stops being readable
  // (e.g., it gets closed), then you do the following:
 // if (0 /* the source stops being readable */) {
  //  handleClosure(this);
  //  return;
  //}

        //unsigned char buffer[VIDEO_BUFF_SIZE];
        
        fMaxSize = VIDEO_BUFF_SIZE; //deliverFrame()in param
    printf("Getting Frame...\n");

                int i = 0, Vid = 0; 
         while( i < 10000)
         {
                        Vid = SN_GetOneFrame( 0, buffer, VIDEO_BUFF_SIZE, 
&FrameInfo);
                 if( Vid == 0)
                         break;
        i++;
        usleep(1);
         }
     if( Vid != 0)
       {
             printf("failed to get one frame\n");
             SN_StopStream(MY_ENCODER_BOARD);
             SN_CloseBoard(MY_ENCODER_BOARD);
             
           }
        
        CallBack(buffer,FrameInfo.VideoLength);
        
}

void Encoder2250::deliverFrame(unsigned char bufferIn[], size_t len) {
// This would be called when new frame data is available from the device.
  // This function should deliver the next frame of data from the device,
  // using the following parameters (class members):
  // 'in' parameters (these should *not* be modified by this function):
  //     fTo: The frame data is copied to this address.
  //         (Note that the variable "fTo" is *not* modified.  Instead,
  //          the frame data is copied to the address pointed to by "fTo".)
  //     fMaxSize: This is the maximum number of bytes that can be copied
  //         (If the actual frame is larger than this, then it should
  //          be truncated, and "fNumTruncatedBytes" set accordingly.)
  // 'out' parameters (these are modified by this function):
  //     fFrameSize: Should be set to the delivered frame size (<= fMaxSize).
  //     fNumTruncatedBytes: Should be set iff the delivered frame would have 
been
  //         bigger than "fMaxSize", in which case it's set to the number of 
bytes
  //         that have been omitted.
  //     fPresentationTime: Should be set to the frame's presentation time
  //         (seconds, microseconds).
  //     fDurationInMicroseconds: Should be set to the frame's duration, if 
known.
  
  
  //if (!isCurrentlyAwaitingData()) return; // we're not ready for the data yet

  // Deliver the data here:
        //SN_GetOneFrame( 0, buffer, VIDEO_BUFF_SIZE, &FrameInfo);
        
        
        printf("Delivering Bytes...\n");
        memmove(fTo, bufferIn, len); //deliverFrame()in param 
        
        
        fFrameSize = len;
        
        nextTask() = envir().taskScheduler().scheduleDelayedTask(0,
                                (TaskFunc*)FramedSource::afterGetting, this);
                                
        
  // After delivering the data, inform the reader that it is now available:
  //FramedSource::afterGetting(this);

}

void Encoder2250::CallBack(unsigned char bufferIn[], size_t len) {

deliverFrame(bufferIn, len);

}
/**********
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the
Free Software Foundation; either version 2.1 of the License, or (at your
option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)

This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for
more details.

You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation, Inc.,
59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
**********/
// Copyright (c) 1996-2007, Live Networks, Inc.  All rights reserved
// A test program that reads a MPEG-1 or 2 Video Elementary Stream file,
// and streams it using RTP
// main program

#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "GroupsockHelper.hh"
#include "./include/Encoder2250.hh"
#include "s2250mid.h"
#include <signal.h>

void SIGINTHandler(int signal_number);

// Uncomment the following if the input file is a MPEG Program Stream
// rather than a MPEG Video Elementary Stream
//#define SOURCE_IS_PROGRAM_STREAM 1

// To stream using "source-specific multicast" (SSM), uncomment the following:
//#define USE_SSM 1
#ifdef USE_SSM
Boolean const isSSM = True;
#else
Boolean const isSSM = False;
#endif

// To set up an internal RTSP server, uncomment the following:
//#define IMPLEMENT_RTSP_SERVER 1
// (Note that this RTSP server works for multicast only)

// To stream *only* MPEG "I" frames (e.g., to reduce network bandwidth),
// change the following "False" to "True":
Boolean iFramesOnly = False;

UsageEnvironment* env;
char const* inputFileName = "/pipes/test.mpg";
#ifdef SOURCE_IS_PROGRAM_STREAM
MPEG1or2Demux* mpegDemux;
#endif
MediaSource* videoSource;
MPEG1or2VideoRTPSink* videoSink;

void play(); // forward

int main(int argc, char** argv) {

  //Signal Handler Stuff to detect CTRL-C
  struct sigaction sa;
  
  memset (&sa, 0, sizeof (sa));
  sa.sa_handler = &SIGINTHandler;
  sigaction (SIGINT, &sa, NULL);




  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  env = BasicUsageEnvironment::createNew(*scheduler);

  // Create 'groupsocks' for RTP and RTCP:
  char* destinationAddressStr
#ifdef USE_SSM
    ="192.168.1.101";//= "232.255.42.42";
#else
    ="192.168.1.101"; //= "239.255.42.42";
  // Note: This is a multicast address.  If you wish to stream using
  // unicast instead, then replace this string with the unicast address
  // of the (single) destination.  (You may also need to make a similar
  // change to the receiver program.)
#endif
  const unsigned short rtpPortNum = 8888;
  const unsigned short rtcpPortNum = rtpPortNum+1;
  const unsigned char ttl = 7; // low, in case routers don't admin scope

  struct in_addr destinationAddress;
  destinationAddress.s_addr = our_inet_addr(destinationAddressStr);
  const Port rtpPort(rtpPortNum);
  const Port rtcpPort(rtcpPortNum);

  Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
  Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
#ifdef USE_SSM
  rtpGroupsock.multicastSendOnly();
  rtcpGroupsock.multicastSendOnly();
#endif

  // Create a 'MPEG Video RTP' sink from the RTP 'groupsock':
  videoSink = MPEG1or2VideoRTPSink::createNew(*env, &rtpGroupsock);

  // Create (and start) a 'RTCP instance' for this RTP sink:
  const unsigned estimatedSessionBandwidth = 4500; // in kbps; for RTCP b/w 
share
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen+1];
  gethostname((char*)CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '\0'; // just in case
#ifdef IMPLEMENT_RTSP_SERVER
  RTCPInstance* rtcp =
#endif
    RTCPInstance::createNew(*env, &rtcpGroupsock,
                              estimatedSessionBandwidth, CNAME,
                              videoSink, NULL /* we're a server */, isSSM);
  // Note: This starts RTCP running automatically

#ifdef IMPLEMENT_RTSP_SERVER
  RTSPServer* rtspServer = RTSPServer::createNew(*env);
  // Note that this (attempts to) start a server on the default RTSP server
  // port: 554.  To use a different port number, add it as an extra
  // (optional) parameter to the "RTSPServer::createNew()" call above.
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }
  ServerMediaSession* sms
    = ServerMediaSession::createNew(*env, "testStream", NULL,
                   "Session streamed by \"testMPEG1or2VideoStreamer\"",
                                           isSSM);
  sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
  rtspServer->addServerMediaSession(sms);

  char* url = rtspServer->rtspURL(sms);
  *env << "Play this stream using the URL \"" << url << "\"\n";
  delete[] url;
#endif

  // Finally, start the streaming:
  *env << "Beginning streaming...\n";
  play();
  
  printf("Right before scheduler...\n");
  
  env->taskScheduler().doEventLoop(); // does not return

  return 0; // only to prevent compiler warning
}

void afterPlaying(void* /*clientData*/) {
  *env << "...done reading from file\n";

  Medium::close(videoSource);
#ifdef SOURCE_IS_PROGRAM_STREAM
  Medium::close(mpegDemux);
#endif
  // Note that this also closes the input file that this source read from.

  play();
}

void play() {

        EncoderParameters params;

  // Open the input file as a 'byte-stream file source':
  /*ByteStreamFileSource* fileSource
    = ByteStreamFileSource::createNew(*env, inputFileName);
  if (fileSource == NULL) {
    *env << "Unable to open file \"" << inputFileName
         << "\" as a byte-stream file source\n";
    exit(1);
      }*/
  
  Encoder2250* device = Encoder2250::createNew(*env, params);
  if(device == NULL)
  {
        printf("Somethings wrong with device");
         //exit(1);
  }
  
  //FramedSource* videoES;
#ifdef SOURCE_IS_PROGRAM_STREAM
  // We must demultiplex a Video Elementary Stream from the input source:
 // mpegDemux = MPEG1or2Demux::createNew(*env, fileSource);
 // videoES = mpegDemux->newVideoStream();
#else
  // The input source is assumed to already be a Video Elementary Stream:
  //videoES = device;
#endif

  // Create a framer for the Video Elementary Stream:
  //videoSource //= device;
  //  = MPEG1or2VideoStreamFramer::createNew(*env, videoES, iFramesOnly);
  
  // Finally, start playing:
  *env << "Beginning to read from file...\n";
  videoSink->startPlaying(*device, afterPlaying, videoSink);
}

void SIGINTHandler (int signal_number)
{
printf("Closing Hardware. Exiting.\r\n");       
SN_StopStream(0);
SN_CloseBoard(0);

exit(0);  
}

_______________________________________________
live-devel mailing list
live-devel@lists.live555.com
http://lists.live555.com/mailman/listinfo/live-devel

Reply via email to