Hi everyone, I’m trying to capture live stream from usb camera with raspberry
pi 2.I installed ffmpeg and I’m using this code to get live stream from
camera.I have some problems:- the framerate decreases because every ~22 frames
there is a big delay (~950ms), it’s how if the buffer become full and then it’s
emptied, or another possibility is that every ~22 frames the camera adjusts
some parameters for contrast, brightness, ecc…- from details camera I could
capture the frames with 30fps with 640x480 resolution, but, without to consider
the delay, the difference in ms between frames is 44ms, then ~23fps, why?
The usb camera is: ELP-USB30W02M-L36, I use 640x480 resolution with format
YUY2.Thanks a lot for yours helps.
--------------------------------------------------------------------------------------------------------------------------------------------------------------------------#include
<libavdevice/avdevice.h>
#include <libswscale/swscale.h>
#include <libavutil/pixfmt.h>
#include <libavutil/imgutils.h>
#include <sys/time.h>
typedef struct Timestamp {
int seconds;
int useconds;
} Timestamp;
#define ONE_SECOND_IN_USEC 1000000
int difference_timestamp(Timestamp timestamp1, Timestamp timestamp2) {
int difference = 0; //in usec
if (timestamp1.seconds > timestamp2.seconds) {
difference = (timestamp1.seconds - timestamp2.seconds)
* ONE_SECOND_IN_USEC;
if (timestamp1.useconds > timestamp2.useconds) {
difference += timestamp1.useconds - timestamp2.useconds;
} else {
difference += timestamp2.useconds - timestamp1.useconds;
difference -= ONE_SECOND_IN_USEC;
}
} else {
difference = (timestamp2.seconds - timestamp1.seconds)
* ONE_SECOND_IN_USEC;
if (timestamp1.useconds > timestamp2.useconds) {
difference += timestamp1.useconds - timestamp2.useconds;
difference -= ONE_SECOND_IN_USEC;
} else {
difference += timestamp2.useconds - timestamp1.useconds;
}
}
return difference;
}
void get_current_time(Timestamp* timestamp) {
struct timeval tv;
gettimeofday(&tv, NULL);
timestamp->seconds = (int) (tv.tv_sec);
timestamp->useconds = (int) (tv.tv_usec);
}
int main(int argc, char *argv[]) {
avdevice_register_all();
avcodec_register_all();
const char *filenameSrc = "/dev/video0";
AVCodecContext *pCodecCtx;
AVFormatContext *pFormatCtx = avformat_alloc_context();
AVCodec * pCodec;
AVInputFormat *iformat = av_find_input_format("dshow");
AVFrame *pFrame, *pFrameRGB;
AVCodecParameters *pCodecPrm = NULL;
if (avformat_open_input(&pFormatCtx, filenameSrc, iformat, NULL) !=
0)
return -12;
if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
return -13;
av_dump_format(pFormatCtx, 0, filenameSrc, 0);
int videoStream = 1;
int i;
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type
== AVMEDIA_TYPE_VIDEO) {
videoStream = i;
break;
}
}
// Get a pointer to the codec context for the video stream
pCodecPrm = pFormatCtx->streams[videoStream]->codecpar;
if (videoStream == -1)
return -14;
// Find the decoder for the video stream
pCodec = avcodec_find_decoder(pCodecPrm->codec_id);
if (pCodec == NULL)
return -15; //codec not found
pCodecCtx = avcodec_alloc_context3(pCodec);
pCodecCtx->bit_rate = pCodecPrm->bit_rate;
pCodecCtx->width = pCodecPrm->width;
pCodecCtx->height = pCodecPrm->height;
pCodecCtx->pix_fmt = AV_PIX_FMT_YUYV422;
AVDictionary *codec_options = NULL;
if (avcodec_open2(pCodecCtx, pCodec, &codec_options) < 0)
return -16;
pFrame = av_frame_alloc();
pFrameRGB = av_frame_alloc();
enum AVPixelFormat pFormat = AV_PIX_FMT_BGR24;
int numBytes = av_image_get_buffer_size(pFormat, pCodecPrm->width,
pCodecPrm->height, 1);
uint8_t *buffer = NULL;
buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer,
pFormat,
pCodecPrm->width, pCodecPrm->height, 1);
int res = 0, diff;
AVPacket packet;
Timestamp timestamp_prec, timestamp_curr;
get_current_time(&timestamp_prec);
sleep(10);
while (res >= 0) {
res = av_read_frame(pFormatCtx, &packet);
get_current_time(&timestamp_curr);
diff = difference_timestamp(timestamp_prec, timestamp_curr) /
1000; //diff in ms
printf("T_prec:%d.%d\tT_curr:%d.%d\ndiff:%d\n",
timestamp_prec.seconds,
timestamp_prec.useconds, timestamp_curr.seconds,
timestamp_curr.useconds, diff);
fflush(stdout);
if (packet.stream_index == videoStream) {
avcodec_send_packet(pCodecCtx, &packet);
avcodec_receive_frame(pCodecCtx, pFrame);
struct SwsContext * img_convert_ctx;
img_convert_ctx = sws_getCachedContext(NULL,
pCodecCtx->width,
pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height, AV_PIX_FMT_BGR24,
SWS_BICUBIC, NULL,
NULL, NULL);
sws_scale(img_convert_ctx, (uint8_t const * const *)
pFrame->data,
pFrame->linesize, 0, pCodecCtx->height,
pFrameRGB->data,
pFrameRGB->linesize);
av_packet_unref(&packet);
sws_freeContext(img_convert_ctx);
}
timestamp_prec = timestamp_curr;
}
av_packet_unref(&packet);
avcodec_close(pCodecCtx);
av_free(pFrame);
av_free(pFrameRGB);
avformat_close_input(&pFormatCtx);
return 0;
}
_______________________________________________
Libav-user mailing list
[email protected]
http://ffmpeg.org/mailman/listinfo/libav-user