In main.cpp
 
pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER;

void *liveMedia(void *arg)
{
    pthread_mutex_lock(&mutex);

    H264VideoCamSource **videoSource = (H264VideoCamSource **) arg;

    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);

    // Create the RTSP server:
    //RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);
    RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
    if (rtspServer == NULL) {
        *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
        exit(1);
    }

    char const* descriptionString = "Session streamed by \"testOnDemandRTSPServer\"";
    {
        char const* streamName = "h264ESVideoTest";
        ServerMediaSession* sms = ServerMediaSession::createNew(*env, streamName, streamName, descriptionString);
        sms->addSubsession(H264VideoCamServerMediaSubsession::createNew(*env, reuseFirstSource, videoSource));
        rtspServer->addServerMediaSession(sms);

        announceStream(rtspServer, sms, streamName);
    }

    if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {
        *env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
    }
    else {
        *env << "\n(RTSP-over-HTTP tunneling is not available.)\n";
    }

    /* Tell main thread that Live555 is ready to stream */
    if (pthread_cond_broadcast(&cond))
        *env << "Broadcast went wrong!\n";
    pthread_mutex_unlock(&mutex);

    env->taskScheduler().doEventLoop(); // does not return

    *env << "Fell off end of live555!\n";

    return NULL;
}
 
void signalNewFrameData(H264VideoCamSource* ourDevice)
{
    TaskScheduler* ourScheduler = NULL; //%%% TO BE WRITTEN %%%
    ourScheduler = &(ourDevice->envir().taskScheduler());

    if (ourScheduler != NULL && (ourDevice->getReferenceCount() != 0)) { // sanity check
        ourScheduler->triggerEvent(H264VideoCamSource::eventTriggerId, ourDevice);
    }
}
 
int main(int argc, char *argv[])
{
    H264VideoCamSource *videoSource = NULL;

    pthread_t thread;
    pthread_create(&thread, NULL, liveMedia, (void *) &videoSource);
    /* Wait for Live555 to be ready to stream */
    pthread_mutex_lock(&mutex);
    pthread_cond_wait(&cond, &mutex);
    pthread_mutex_unlock(&mutex);

    VideoCam videoCam(-1);
    h264encoder *encoder = new h264encoder(videoCam.get(CV_CAP_PROP_FRAME_WIDTH),
                                           videoCam.get(CV_CAP_PROP_FRAME_HEIGHT));

    int i_nals;
    x264_nal_t *nals;
    cv::Mat current_frame;
    while (1) {
        videoCam.read(current_frame);
        printf("reference = %i\n", videoSource->getReferenceCount());
        if (videoSource->getReferenceCount() != 0) {
            encoder->encode_frame(&current_frame, &i_nals, &nals);
            printf("encode frame\n");
            for (int i = 0; i < i_nals; i++) {
                printf("updateNAL[%i]\n", i);
                videoSource->updateNAL(&(nals[i]));
                printf("nals[i].i_type = %i\n", nals[i].i_type);
                signalNewFrameData(videoSource);
            }
        }
    }
    delete encoder;

    return 0;
}
 
In my H264VideoCamSource.cpp (DeviceSource.cpp)
 
void H264VideoCamSource::updateNAL(x264_nal_t *nal)
{
    nalUnit = nal; //nalUnit is x264_nal_t *nalUnit pointer, member of H264VideoCamSource
    printf("nalUnit updated\n");
}
 
void H264VideoCamSource::deliverFrame()
{
    if (!isCurrentlyAwaitingData()) return; // we're not ready for the data yet

    printf("deliver data\n");

    u_int8_t* newFrameDataStart = (u_int8_t*)(nalUnit->p_payload + 4);
    unsigned newFrameSize = (nalUnit->i_payload - 4);

    // Deliver the data here:
    ...
}
 
Output:
encode frame
updateNAL[0]
nalUnit updated
nals[i].i_type = 7
updateNAL[1]
nalUnit updated
nals[i].i_type = 8
updateNAL[2]
nalUnit updated
nals[i].i_type = 6
updateNAL[3]
nalUnit updated
nals[i].i_type = 5
deliver data
encode frame
updateNAL[0]
...
Gettind data is the main thread. The data is then transmitted to the object H264VideoCamSource and calling signalNewFrameData.
Apparently, in my case no thread syncronization is necessary.
06.02.2012, 02:18, "Ross Finlayson" <finlay...@live555.com>:
I'm creating a RTSP Server in a separate thread. My encoder (libx264) produces arrays x264 nal units. When the encoder processes the first frame it produces an array of 4 nal units. Then I pass by one unit in my DeviceSource and call signalNewFrameData every time. But it seems this separate thread does not have time to process, and rewrites them to the main thread.
That last sentence makes no sense.

If I need to synchronize threads, then please tell me where I need to do it.
No, you don't need to do any 'thread synchronization'.  Note that only one of your threads - the one that contains the RTSP server - is running LIVE555 code.  The only exception to this is that other thread(s) may call "TaskScheduler::triggerEvent()", to signal an event that the first thread (the LIVE555 thread) will then handle, within the LIVE555 event loop.
If this is what you are doing, then your code should work, provided, of course, that the LIVE555 thread is processing its event loop - i.e., has already called "doEventLoop()".


Ross Finlayson
Live Networks, Inc.
http://www.live555.com/

_______________________________________________
live-devel mailing list
live-devel@lists.live555.com
http://lists.live555.com/mailman/listinfo/live-devel

_______________________________________________
live-devel mailing list
live-devel@lists.live555.com
http://lists.live555.com/mailman/listinfo/live-devel

Reply via email to