Hi

I’m trying to stream single Images. My application receives iplimages
and converts these to QImages for further manipulation. After that I
simply want to stream these images. I’m not very familiar with video
encoding or stream etc.
If tried to implement a DeviceSource like in one example that if found
in the web but I still not receive any images when I’m trying to watch
the stream.
It would be really nice if you could help me with that problem.

With kind regards
Matthias


/// StreamingServer.cpp
/// author: Traub Matthias

#include "StreamingServer.h"

#include <iostream>

StreamingServer::StreamingServer() :
        menv(NULL),
        minputFileName("test.m4v"),
        mvideoSource(NULL),
        mvideoSink(NULL),
        mfileSource(NULL) {

}

StreamingServer::~StreamingServer() {

}

void StreamingServer::run() {
        std::cout << "SERVER STARTET AS THREAD!" << std::endl;

        // Begin by setting up our usage environment:
        TaskScheduler* scheduler = BasicTaskScheduler::createNew();
        menv = BasicUsageEnvironment::createNew(*scheduler);

        // Create 'groupsocks' for RTP and RTCP:
        struct in_addr destinationAddress;
        destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*menv);

        const unsigned short rtpPortNum = 18888;
        const unsigned short rtcpPortNum = rtpPortNum+1;
        const unsigned char ttl = 255;

        const Port rtpPort(rtpPortNum);
        const Port rtcpPort(rtcpPortNum);

        Groupsock rtpGroupsock(*menv, destinationAddress, rtpPort, ttl);
        rtpGroupsock.multicastSendOnly(); // we're a SSM source
        Groupsock rtcpGroupsock(*menv, destinationAddress, rtcpPort, ttl);
        rtcpGroupsock.multicastSendOnly(); // we're a SSM source

        // Create a 'MPEG-4 Video RTP' sink from the RTP 'groupsock':
        mvideoSink = MPEG4ESVideoRTPSink::createNew(*menv, &rtpGroupsock, 96);

        // Create (and start) a 'RTCP instance' for this RTP sink:
        const unsigned estimatedSessionBandwidth = 500; // in kbps; for RTCP
b/w share
        const unsigned maxCNAMElen = 100;
        unsigned char CNAME[maxCNAMElen+1];
        gethostname((char*)CNAME, maxCNAMElen);
        CNAME[maxCNAMElen] = '\0'; // just in case
        RTCPInstance* rtcp
        = RTCPInstance::createNew(*menv, &rtcpGroupsock,
                                estimatedSessionBandwidth, CNAME,
                                mvideoSink, NULL /* we're a server */,
                                True /* we're a SSM source */);
        // Note: This starts RTCP running automatically

        RTSPServer* rtspServer = RTSPServer::createNew(*menv, 8554);
        if (rtspServer == NULL)
        {
                *menv << "Failed to create RTSP server: " << 
menv->getResultMsg() <<
"\n";
                exit(1);
        }
        ServerMediaSession* sms = ServerMediaSession::createNew(*menv,
"testStream", minputFileName,
                   "Session streamed by \"testMPEG4VideoStreamer\"",  True 
/*SSM*/);
        sms->addSubsession(PassiveServerMediaSubsession::createNew(*mvideoSink,
rtcp));
        rtspServer->addServerMediaSession(sms);

        char* url = rtspServer->rtspURL(sms);
        *menv << "Play this stream using the URL \"" << url << "\"\n";
        delete[] url;

        // Start the streaming:
        *menv << "Beginning streaming...\n";

        // Open the input source
        DeviceParameters params;
        mfileSource = DeviceSourceImage::createNew(*menv, params);
        // Save Source in member Variable
        //this->mfileSource = fileSource;
        if (mfileSource == NULL)
        {
                *menv << "Unable to open source\n";
                exit(1);
        }

        play();

        //FramedSource* videoES = mfileSource;

        //// Create a framer for the Video Elementary Stream:
        //mvideoSource = MPEG4VideoStreamFramer::createNew(*menv, videoES);

        //// Finally, start playing:
        //*menv << "Beginning to read from file...\n";
        //mvideoSink->startPlaying(*mvideoSource, NULL, mvideoSink);

        menv->taskScheduler().doEventLoop(); // does not return

}


void StreamingServer::play() {

        FramedSource* videoES = mfileSource;

    // Create a framer for the Video Elementary Stream:
    mvideoSource = MPEG4VideoStreamFramer::createNew(*menv, videoES);

    // Finally, start playing:
    *menv << "Beginning to read from file...\n";
    mvideoSink->startPlaying(*mvideoSource, NULL, mvideoSink);
}

void StreamingServer::receiveFrame(QImage img/*IplImage *image*/)
{
        // FWD to DeviceSourceImage
        mfileSource->receiveFrame(img);
}

-----------------------------------------------------------------------------------------------------
/// StreamingServer.h
/// author: Traub Matthias

#include <windows.h>
#include <stdio.h>
#include <stdlib.h>

#include "BasicUsageEnvironment.hh"
#include "DeviceSourceImage.h"
#include "GroupsockHelper.hh"
#include "liveMedia.hh"

#include "cv.h"
#include <QtCore/QThread>
#include <QtGui/QImage>

class StreamingServer : public QThread {
public:
        StreamingServer();
        ~StreamingServer();

        void receiveFrame(QImage img);

        virtual void run();

private:
        UsageEnvironment* menv;
        char const* minputFileName;
        MPEG4VideoStreamFramer* mvideoSource;
        RTPSink* mvideoSink;
        DeviceSourceImage* mfileSource;

        void play();

};

------------------------------------------------------------------------------------------------
/// DeviceSourceImage.cpp
/// author: Traub Matthias

#include "DeviceSourceImage.h"
#include <GroupsockHelper.hh> // for "gettimeofday()"

#include <QtCore/QBuffer>
#include <iostream>
#include <stdio.h>
#include <io.h>


DeviceSourceImage* DeviceSourceImage::createNew(UsageEnvironment& env,
DeviceParameters params)
{
        return new DeviceSourceImage(env, params);
}

DeviceSourceImage::DeviceSourceImage(UsageEnvironment& env,
DeviceParameters params)
: DeviceSource(env, params)
{
        std::cout << "DeviceSourceImage::DeviceSourceImage() called!" <<
std::endl;
        mJpegBufferUpToDate = false;
}

DeviceSourceImage::~DeviceSourceImage()
{
        std::cout << "DeviceSourceImage::~DeviceSourceImage() called!" <<
std::endl;
}

void DeviceSourceImage::doGetNextFrame()
{
  // This function is called (by our 'downstream' object) when it asks
for new data.
        std::cout << "DeviceSourceImage::doGetNextFrame() called!" <<
std::endl;

    deliverFrame();
}

void DeviceSourceImage::receiveFrame(QImage img)
{
        mImg = img;
        mJpegBufferUpToDate = false;
}

void DeviceSourceImage::deliverFrame() {
  // This function is called when new frame data is available from the
device.
  // We deliver this data by copying it to the 'downstream' object,
using the following parameters (class members):
  // 'in' parameters (these should *not* be modified by this function):
  //     fTo: The frame data is copied to this address.
  //         (Note that the variable "fTo" is *not* modified.  Instead,
  //          the frame data is copied to the address pointed to by
"fTo".)
  //     fMaxSize: This is the maximum number of bytes that can be
copied
  //         (If the actual frame is larger than this, then it should
  //          be truncated, and "fNumTruncatedBytes" set accordingly.)
  // 'out' parameters (these are modified by this function):
  //     fFrameSize: Should be set to the delivered frame size (<=
fMaxSize).
  //     fNumTruncatedBytes: Should be set iff the delivered frame would
have been
  //         bigger
//      than "fMaxSize", in which case it's set to the number of bytes
  //         that have been omitted.
  //     fPresentationTime: Should be set to the frame's presentation
time
  //         (seconds, microseconds).  This time must be aligned with
'wall-clock time' - i.e., the time that you would get
  //         by calling "gettimeofday()".
  //     fDurationInMicroseconds: Should be set to the frame's duration,
if known.
  //         If, however, the device is a 'live source' (e.g., encoded
from a camera or microphone), then we probably don't need
  //         to set this variable, because - in this case - data will
never arrive 'early'.
  // Note the code below.

        // Check if buffer is out of date and image is available
        if (!mJpegBufferUpToDate && mImg != QImage())
        {
                QBuffer buffer(&mJpegBuffer);
                buffer.open(QIODevice::WriteOnly);
                bool ok = mImg.save(&buffer, "JPG", 15); // writes image into a 
buffer
in JPG format
                buffer.close();
                mJpegBufferUpToDate = true;
        }

        fFrameSize = 0;
        static int framesOk = 0;
        static int framesSkipped = 0;
        gettimeofday(&fPresentationTime, 0);

        // Check if Buffer is filled and smaller than outputbuffer size
        if (mJpegBuffer.size() && mJpegBuffer.size() <= fMaxSize)
        {
                // Copy imagedata onto outputbuffer (fTo)
                fFrameSize = mJpegBuffer.size();
                memcpy_s(fTo, fMaxSize, mJpegBuffer.constData(), 
mJpegBuffer.size());
                ++framesOk;
        }
        // Just for testing to fill up buffer
        else if(mJpegBuffer.size())
        {
                fFrameSize = fMaxSize;
                memcpy_s(fTo, fMaxSize, mJpegBuffer.constData() , fMaxSize);
                ++framesSkipped;
        }

        // just for testing of performance changes
        Sleep(1);

        // Finished writting into outputbuffer and schedule
        nextTask() = envir().taskScheduler().scheduleDelayedTask(3000,
                           (TaskFunc*)FramedSource::afterGetting, this);

}

---------------------------------------------------------------------------------------------
/// DeviceSourceImage.h
/// author: Traub Matthias

#ifndef _DEVICE_SOURCE_IMAGE_HH
#define _DEVICE_SOURCE_IMAGE_HH

#include "DeviceSource.hh"

#include <QtGui/QImage>
#include <QtCore/QByteArray>

#include <time.h>

class DeviceSourceImage: public DeviceSource {

public:
  static DeviceSourceImage* createNew(UsageEnvironment& env,
DeviceParameters params);
        void receiveFrame(QImage img);

protected:
  DeviceSourceImage(UsageEnvironment& env, DeviceParameters params);
  // called only by createNew(), or by subclass constructors
  virtual ~DeviceSourceImage();

private:
  // redefined virtual functions:
  virtual void doGetNextFrame();

private:
  void deliverFrame();

private:

        QImage mImg;
        QByteArray mJpegBuffer;
        bool mJpegBufferUpToDate;
};

#endif



_______________________________________________
live-devel mailing list
live-devel@lists.live555.com
http://lists.live555.com/mailman/listinfo/live-devel

Reply via email to