mirror of
https://git.freebsd.org/ports.git
synced 2025-07-18 01:39:16 -04:00
o Update booktree capture code to latest version:
1) Warning message if the tuner device cannot be opened for write 2) A two stage sync slip recovery mechanism, stage 1 runs async to catch up, stage 2 injects a blank frame 3) Update to a consistent indent style (will minimize future diffs) o Bump PORTREVISION Submitted by: "Steve O'Hara-Smith" <steve@sohara.org>
This commit is contained in:
parent
bab9877d31
commit
b868b85dad
Notes:
svn2git
2021-03-31 03:12:20 +00:00
svn path=/head/; revision=69495
8 changed files with 636 additions and 564 deletions
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
PORTNAME= ffmpeg
|
PORTNAME= ffmpeg
|
||||||
PORTVERSION= 0.4.5
|
PORTVERSION= 0.4.5
|
||||||
PORTREVISION= 3
|
PORTREVISION= 4
|
||||||
CATEGORIES= graphics
|
CATEGORIES= graphics
|
||||||
MASTER_SITES= ${MASTER_SITE_LOCAL}
|
MASTER_SITES= ${MASTER_SITE_LOCAL}
|
||||||
MASTER_SITE_SUBDIR= lioux
|
MASTER_SITE_SUBDIR= lioux
|
||||||
|
|
|
@ -33,12 +33,12 @@
|
||||||
#include <signal.h>
|
#include <signal.h>
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
int fd;
|
int fd;
|
||||||
int tuner_fd;
|
int tuner_fd;
|
||||||
int frame_format; /* see VIDEO_PALETTE_xxx */
|
int frame_format; /* see VIDEO_PALETTE_xxx */
|
||||||
int width, height;
|
int width, height;
|
||||||
int frame_rate;
|
int frame_rate;
|
||||||
int frame_size;
|
int frame_size;
|
||||||
} VideoData;
|
} VideoData;
|
||||||
|
|
||||||
const char *video_device = "/dev/bktr0";
|
const char *video_device = "/dev/bktr0";
|
||||||
|
@ -60,185 +60,203 @@ const char *video_device = "/dev/bktr0";
|
||||||
|
|
||||||
static UINT8 *video_buf;
|
static UINT8 *video_buf;
|
||||||
|
|
||||||
|
static int signal_expected = 0;
|
||||||
|
static int unexpected_signals = 0;
|
||||||
|
|
||||||
static void catchsignal(int signal)
|
static void catchsignal(int signal)
|
||||||
{
|
{
|
||||||
return;
|
if (!signal_expected) unexpected_signals++;
|
||||||
|
signal_expected = 0;
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int bktr_init(AVFormatContext *s1, AVFormatParameters *ap)
|
static int bktr_init(AVFormatContext *s1, AVFormatParameters *ap)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
int width, height;
|
int width, height;
|
||||||
int video_fd;
|
int video_fd;
|
||||||
int format = VIDEO_FORMAT;
|
int format = VIDEO_FORMAT;
|
||||||
struct meteor_geomet geo;
|
struct meteor_geomet geo;
|
||||||
int c;
|
int c;
|
||||||
struct sigaction act,old;
|
struct sigaction act,old;
|
||||||
|
|
||||||
memset(&act,0,sizeof(act));
|
memset(&act,0,sizeof(act));
|
||||||
sigemptyset(&act.sa_mask);
|
sigemptyset(&act.sa_mask);
|
||||||
act.sa_handler = catchsignal;
|
act.sa_handler = catchsignal;
|
||||||
sigaction(SIGUSR1,&act,&old);
|
sigaction(SIGUSR1,&act,&old);
|
||||||
sigaction(SIGALRM,&act,&old);
|
sigaction(SIGALRM,&act,&old);
|
||||||
|
|
||||||
width = s->width;
|
width = s->width;
|
||||||
height = s->height;
|
height = s->height;
|
||||||
|
|
||||||
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
|
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
|
||||||
|
if (s->tuner_fd < 0) {
|
||||||
|
perror("Warning: Tuner not opened continuing");
|
||||||
|
}
|
||||||
|
|
||||||
video_fd = open(video_device, O_RDWR);
|
video_fd = open(video_device, O_RDWR);
|
||||||
if (video_fd < 0) {
|
if (video_fd < 0) {
|
||||||
perror(video_device);
|
perror(video_device);
|
||||||
return -EIO;
|
return -EIO;
|
||||||
}
|
}
|
||||||
s->fd=video_fd;
|
s->fd=video_fd;
|
||||||
geo.rows = height;
|
geo.rows = height;
|
||||||
geo.columns = width;
|
geo.columns = width;
|
||||||
geo.frames = 1;
|
geo.frames = 1;
|
||||||
geo.oformat = METEOR_GEO_YUV_PACKED; // RGB
|
geo.oformat = METEOR_GEO_YUV_PACKED;
|
||||||
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
|
|
||||||
geo.oformat |= METEOR_GEO_ODD_ONLY;
|
|
||||||
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
|
|
||||||
geo.oformat |= METEOR_GEO_ODD_ONLY;
|
|
||||||
|
|
||||||
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
|
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
|
||||||
perror ("METEORSETGEO");
|
geo.oformat |= METEOR_GEO_EVEN_ONLY;
|
||||||
return -EIO;
|
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
|
||||||
}
|
geo.oformat |= METEOR_GEO_EVEN_ONLY;
|
||||||
|
|
||||||
switch (format) {
|
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
|
||||||
case PAL: c = METEOR_FMT_PAL; break;
|
perror ("METEORSETGEO");
|
||||||
case NTSC: c = METEOR_FMT_NTSC; break;
|
return -EIO;
|
||||||
default: c = METEOR_FMT_PAL; break;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
|
switch (format) {
|
||||||
perror ("METEORSFMT");
|
case PAL: c = METEOR_FMT_PAL; break;
|
||||||
return -EIO;
|
case NTSC: c = METEOR_FMT_NTSC; break;
|
||||||
}
|
default: c = METEOR_FMT_PAL; break;
|
||||||
|
}
|
||||||
|
|
||||||
c = VIDEO_INPUT;
|
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
|
||||||
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
|
perror ("METEORSFMT");
|
||||||
perror ("METEORSINPUT");
|
return -EIO;
|
||||||
return -EIO;
|
}
|
||||||
}
|
|
||||||
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED, // RGB
|
c = VIDEO_INPUT;
|
||||||
video_fd, (off_t) 0);
|
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
|
||||||
if (video_buf == MAP_FAILED) {
|
perror ("METEORSINPUT");
|
||||||
perror ("mmap");
|
return -EIO;
|
||||||
return -EIO;
|
}
|
||||||
}
|
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED,
|
||||||
c = METEOR_CAP_CONTINOUS;
|
video_fd, (off_t) 0);
|
||||||
ioctl(s->fd, METEORCAPTUR, &c);
|
if (video_buf == MAP_FAILED) {
|
||||||
c = SIGUSR1;
|
perror ("mmap");
|
||||||
ioctl (s->fd, METEORSSIGNAL, &c);
|
return -EIO;
|
||||||
return 0;
|
}
|
||||||
|
c = METEOR_CAP_CONTINOUS;
|
||||||
|
ioctl(s->fd, METEORCAPTUR, &c);
|
||||||
|
c = SIGUSR1;
|
||||||
|
signal_expected = 1;
|
||||||
|
ioctl (s->fd, METEORSSIGNAL, &c);
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void bf_yuv422_to_yuv420p(UINT8 *lum, UINT8 *cb, UINT8 *cr,
|
static void bf_yuv422_to_yuv420p(UINT8 *lum, UINT8 *cb, UINT8 *cr,
|
||||||
UINT8 *src, int width, int height)
|
UINT8 *src, int width, int height)
|
||||||
{
|
{
|
||||||
int x, y;
|
int x, y;
|
||||||
UINT8 *p = src;
|
UINT8 *p = src;
|
||||||
for(y=0;y<height;y+=2) {
|
for(y=0;y<height;y+=2) {
|
||||||
for(x=0;x<width;x+=2) {
|
for(x=0;x<width;x+=2) {
|
||||||
lum[0] = p[1];
|
lum[0] = p[1];
|
||||||
cb[0] = p[0];
|
cb[0] = p[0];
|
||||||
lum[1] = p[3];
|
lum[1] = p[3];
|
||||||
cr[0] = p[2];
|
cr[0] = p[2];
|
||||||
p += 4;
|
p += 4;
|
||||||
lum += 2;
|
lum += 2;
|
||||||
cb++;
|
cb++;
|
||||||
cr++;
|
cr++;
|
||||||
}
|
}
|
||||||
for(x=0;x<width;x+=2) {
|
for(x=0;x<width;x+=2) {
|
||||||
lum[0] = p[1];
|
lum[0] = p[1];
|
||||||
lum[1] = p[3];
|
lum[1] = p[3];
|
||||||
p += 4;
|
p += 4;
|
||||||
lum += 2;
|
lum += 2;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* note: we support only one picture read at a time */
|
/* note: we support only one picture read at a time */
|
||||||
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
int size, halfsize;
|
int size, halfsize;
|
||||||
sigset_t msig;
|
sigset_t msig;
|
||||||
UINT8 *lum, *cb, *cr;
|
UINT8 *lum, *cb, *cr;
|
||||||
|
|
||||||
size = s->width * s->height;
|
size = s->width * s->height;
|
||||||
halfsize = size << 1;
|
halfsize = size << 1;
|
||||||
if (av_new_packet(pkt, size + halfsize) < 0)
|
if (av_new_packet(pkt, size + halfsize) < 0)
|
||||||
return -EIO;
|
return -EIO;
|
||||||
|
|
||||||
sigemptyset (&msig);
|
if (unexpected_signals > 0) {
|
||||||
sigsuspend (&msig);
|
unexpected_signals--;
|
||||||
|
} else {
|
||||||
|
signal_expected = 1;
|
||||||
|
sigemptyset (&msig);
|
||||||
|
sigsuspend (&msig);
|
||||||
|
}
|
||||||
|
|
||||||
lum = pkt->data;
|
if (unexpected_signals & 1) {
|
||||||
cb = lum + size;
|
bzero (pkt->data, size + halfsize);
|
||||||
cr = cb + size/4;
|
} else {
|
||||||
|
lum = pkt->data;
|
||||||
|
cb = lum + size;
|
||||||
|
cr = cb + size/4;
|
||||||
|
|
||||||
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
|
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
|
||||||
|
}
|
||||||
return size + halfsize;
|
return size + halfsize;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int grab_read_header (AVFormatContext *s1, AVFormatParameters *ap)
|
static int grab_read_header (AVFormatContext *s1, AVFormatParameters *ap)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
AVStream *st;
|
AVStream *st;
|
||||||
int width, height;
|
int width, height;
|
||||||
int frame_rate;
|
int frame_rate;
|
||||||
|
|
||||||
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
|
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
|
||||||
return -1;
|
return -1;
|
||||||
|
|
||||||
width = ap->width;
|
width = ap->width;
|
||||||
height = ap->height;
|
height = ap->height;
|
||||||
frame_rate = ap->frame_rate;
|
frame_rate = ap->frame_rate;
|
||||||
st = av_new_stream(s1, 0);
|
st = av_new_stream(s1, 0);
|
||||||
if (!st)
|
if (!st)
|
||||||
return -ENOMEM;
|
return -ENOMEM;
|
||||||
s1->priv_data = s;
|
s1->priv_data = s;
|
||||||
s1->nb_streams = 1;
|
s1->nb_streams = 1;
|
||||||
s1->streams[0] = st;
|
s1->streams[0] = st;
|
||||||
|
|
||||||
s->width = width;
|
s->width = width;
|
||||||
s->height = height;
|
s->height = height;
|
||||||
s->frame_rate = frame_rate;
|
s->frame_rate = frame_rate;
|
||||||
s->frame_size = width*height*2; /*RGB*/
|
s->frame_size = width*height*2;
|
||||||
st->codec.pix_fmt = PIX_FMT_YUV420P;
|
st->codec.pix_fmt = PIX_FMT_YUV420P;
|
||||||
st->codec.codec_id = CODEC_ID_RAWVIDEO;
|
st->codec.codec_id = CODEC_ID_RAWVIDEO;
|
||||||
st->codec.width = width;
|
st->codec.width = width;
|
||||||
st->codec.height = height;
|
st->codec.height = height;
|
||||||
st->codec.frame_rate = frame_rate;
|
st->codec.frame_rate = frame_rate;
|
||||||
|
|
||||||
return bktr_init(s1, ap);
|
return bktr_init(s1, ap);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int grab_read_close (AVFormatContext *s1)
|
static int grab_read_close (AVFormatContext *s1)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
|
|
||||||
int c = METEOR_CAP_STOP_CONT;
|
int c = METEOR_CAP_STOP_CONT;
|
||||||
ioctl(s->fd, METEORCAPTUR, &c);
|
ioctl(s->fd, METEORCAPTUR, &c);
|
||||||
close(s->fd);
|
close(s->fd);
|
||||||
close(s->tuner_fd);
|
close(s->tuner_fd);
|
||||||
free(s);
|
free(s);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
AVInputFormat video_grab_device_format = {
|
AVInputFormat video_grab_device_format = {
|
||||||
"video_grab_device",
|
"video_grab_device",
|
||||||
"video grab",
|
"video grab",
|
||||||
sizeof(VideoData),
|
sizeof(VideoData),
|
||||||
NULL,
|
NULL,
|
||||||
grab_read_header,
|
grab_read_header,
|
||||||
grab_read_packet,
|
grab_read_packet,
|
||||||
grab_read_close,
|
grab_read_close,
|
||||||
flags: AVFMT_NOFILE,
|
flags: AVFMT_NOFILE,
|
||||||
};
|
};
|
||||||
|
|
||||||
int video_grab_init(void)
|
int video_grab_init(void)
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
PORTNAME= ffmpeg
|
PORTNAME= ffmpeg
|
||||||
PORTVERSION= 0.4.5
|
PORTVERSION= 0.4.5
|
||||||
PORTREVISION= 3
|
PORTREVISION= 4
|
||||||
CATEGORIES= graphics
|
CATEGORIES= graphics
|
||||||
MASTER_SITES= ${MASTER_SITE_LOCAL}
|
MASTER_SITES= ${MASTER_SITE_LOCAL}
|
||||||
MASTER_SITE_SUBDIR= lioux
|
MASTER_SITE_SUBDIR= lioux
|
||||||
|
|
|
@ -33,12 +33,12 @@
|
||||||
#include <signal.h>
|
#include <signal.h>
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
int fd;
|
int fd;
|
||||||
int tuner_fd;
|
int tuner_fd;
|
||||||
int frame_format; /* see VIDEO_PALETTE_xxx */
|
int frame_format; /* see VIDEO_PALETTE_xxx */
|
||||||
int width, height;
|
int width, height;
|
||||||
int frame_rate;
|
int frame_rate;
|
||||||
int frame_size;
|
int frame_size;
|
||||||
} VideoData;
|
} VideoData;
|
||||||
|
|
||||||
const char *video_device = "/dev/bktr0";
|
const char *video_device = "/dev/bktr0";
|
||||||
|
@ -60,185 +60,203 @@ const char *video_device = "/dev/bktr0";
|
||||||
|
|
||||||
static UINT8 *video_buf;
|
static UINT8 *video_buf;
|
||||||
|
|
||||||
|
static int signal_expected = 0;
|
||||||
|
static int unexpected_signals = 0;
|
||||||
|
|
||||||
static void catchsignal(int signal)
|
static void catchsignal(int signal)
|
||||||
{
|
{
|
||||||
return;
|
if (!signal_expected) unexpected_signals++;
|
||||||
|
signal_expected = 0;
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int bktr_init(AVFormatContext *s1, AVFormatParameters *ap)
|
static int bktr_init(AVFormatContext *s1, AVFormatParameters *ap)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
int width, height;
|
int width, height;
|
||||||
int video_fd;
|
int video_fd;
|
||||||
int format = VIDEO_FORMAT;
|
int format = VIDEO_FORMAT;
|
||||||
struct meteor_geomet geo;
|
struct meteor_geomet geo;
|
||||||
int c;
|
int c;
|
||||||
struct sigaction act,old;
|
struct sigaction act,old;
|
||||||
|
|
||||||
memset(&act,0,sizeof(act));
|
memset(&act,0,sizeof(act));
|
||||||
sigemptyset(&act.sa_mask);
|
sigemptyset(&act.sa_mask);
|
||||||
act.sa_handler = catchsignal;
|
act.sa_handler = catchsignal;
|
||||||
sigaction(SIGUSR1,&act,&old);
|
sigaction(SIGUSR1,&act,&old);
|
||||||
sigaction(SIGALRM,&act,&old);
|
sigaction(SIGALRM,&act,&old);
|
||||||
|
|
||||||
width = s->width;
|
width = s->width;
|
||||||
height = s->height;
|
height = s->height;
|
||||||
|
|
||||||
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
|
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
|
||||||
|
if (s->tuner_fd < 0) {
|
||||||
|
perror("Warning: Tuner not opened continuing");
|
||||||
|
}
|
||||||
|
|
||||||
video_fd = open(video_device, O_RDWR);
|
video_fd = open(video_device, O_RDWR);
|
||||||
if (video_fd < 0) {
|
if (video_fd < 0) {
|
||||||
perror(video_device);
|
perror(video_device);
|
||||||
return -EIO;
|
return -EIO;
|
||||||
}
|
}
|
||||||
s->fd=video_fd;
|
s->fd=video_fd;
|
||||||
geo.rows = height;
|
geo.rows = height;
|
||||||
geo.columns = width;
|
geo.columns = width;
|
||||||
geo.frames = 1;
|
geo.frames = 1;
|
||||||
geo.oformat = METEOR_GEO_YUV_PACKED; // RGB
|
geo.oformat = METEOR_GEO_YUV_PACKED;
|
||||||
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
|
|
||||||
geo.oformat |= METEOR_GEO_ODD_ONLY;
|
|
||||||
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
|
|
||||||
geo.oformat |= METEOR_GEO_ODD_ONLY;
|
|
||||||
|
|
||||||
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
|
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
|
||||||
perror ("METEORSETGEO");
|
geo.oformat |= METEOR_GEO_EVEN_ONLY;
|
||||||
return -EIO;
|
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
|
||||||
}
|
geo.oformat |= METEOR_GEO_EVEN_ONLY;
|
||||||
|
|
||||||
switch (format) {
|
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
|
||||||
case PAL: c = METEOR_FMT_PAL; break;
|
perror ("METEORSETGEO");
|
||||||
case NTSC: c = METEOR_FMT_NTSC; break;
|
return -EIO;
|
||||||
default: c = METEOR_FMT_PAL; break;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
|
switch (format) {
|
||||||
perror ("METEORSFMT");
|
case PAL: c = METEOR_FMT_PAL; break;
|
||||||
return -EIO;
|
case NTSC: c = METEOR_FMT_NTSC; break;
|
||||||
}
|
default: c = METEOR_FMT_PAL; break;
|
||||||
|
}
|
||||||
|
|
||||||
c = VIDEO_INPUT;
|
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
|
||||||
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
|
perror ("METEORSFMT");
|
||||||
perror ("METEORSINPUT");
|
return -EIO;
|
||||||
return -EIO;
|
}
|
||||||
}
|
|
||||||
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED, // RGB
|
c = VIDEO_INPUT;
|
||||||
video_fd, (off_t) 0);
|
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
|
||||||
if (video_buf == MAP_FAILED) {
|
perror ("METEORSINPUT");
|
||||||
perror ("mmap");
|
return -EIO;
|
||||||
return -EIO;
|
}
|
||||||
}
|
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED,
|
||||||
c = METEOR_CAP_CONTINOUS;
|
video_fd, (off_t) 0);
|
||||||
ioctl(s->fd, METEORCAPTUR, &c);
|
if (video_buf == MAP_FAILED) {
|
||||||
c = SIGUSR1;
|
perror ("mmap");
|
||||||
ioctl (s->fd, METEORSSIGNAL, &c);
|
return -EIO;
|
||||||
return 0;
|
}
|
||||||
|
c = METEOR_CAP_CONTINOUS;
|
||||||
|
ioctl(s->fd, METEORCAPTUR, &c);
|
||||||
|
c = SIGUSR1;
|
||||||
|
signal_expected = 1;
|
||||||
|
ioctl (s->fd, METEORSSIGNAL, &c);
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void bf_yuv422_to_yuv420p(UINT8 *lum, UINT8 *cb, UINT8 *cr,
|
static void bf_yuv422_to_yuv420p(UINT8 *lum, UINT8 *cb, UINT8 *cr,
|
||||||
UINT8 *src, int width, int height)
|
UINT8 *src, int width, int height)
|
||||||
{
|
{
|
||||||
int x, y;
|
int x, y;
|
||||||
UINT8 *p = src;
|
UINT8 *p = src;
|
||||||
for(y=0;y<height;y+=2) {
|
for(y=0;y<height;y+=2) {
|
||||||
for(x=0;x<width;x+=2) {
|
for(x=0;x<width;x+=2) {
|
||||||
lum[0] = p[1];
|
lum[0] = p[1];
|
||||||
cb[0] = p[0];
|
cb[0] = p[0];
|
||||||
lum[1] = p[3];
|
lum[1] = p[3];
|
||||||
cr[0] = p[2];
|
cr[0] = p[2];
|
||||||
p += 4;
|
p += 4;
|
||||||
lum += 2;
|
lum += 2;
|
||||||
cb++;
|
cb++;
|
||||||
cr++;
|
cr++;
|
||||||
}
|
}
|
||||||
for(x=0;x<width;x+=2) {
|
for(x=0;x<width;x+=2) {
|
||||||
lum[0] = p[1];
|
lum[0] = p[1];
|
||||||
lum[1] = p[3];
|
lum[1] = p[3];
|
||||||
p += 4;
|
p += 4;
|
||||||
lum += 2;
|
lum += 2;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* note: we support only one picture read at a time */
|
/* note: we support only one picture read at a time */
|
||||||
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
int size, halfsize;
|
int size, halfsize;
|
||||||
sigset_t msig;
|
sigset_t msig;
|
||||||
UINT8 *lum, *cb, *cr;
|
UINT8 *lum, *cb, *cr;
|
||||||
|
|
||||||
size = s->width * s->height;
|
size = s->width * s->height;
|
||||||
halfsize = size << 1;
|
halfsize = size << 1;
|
||||||
if (av_new_packet(pkt, size + halfsize) < 0)
|
if (av_new_packet(pkt, size + halfsize) < 0)
|
||||||
return -EIO;
|
return -EIO;
|
||||||
|
|
||||||
sigemptyset (&msig);
|
if (unexpected_signals > 0) {
|
||||||
sigsuspend (&msig);
|
unexpected_signals--;
|
||||||
|
} else {
|
||||||
|
signal_expected = 1;
|
||||||
|
sigemptyset (&msig);
|
||||||
|
sigsuspend (&msig);
|
||||||
|
}
|
||||||
|
|
||||||
lum = pkt->data;
|
if (unexpected_signals & 1) {
|
||||||
cb = lum + size;
|
bzero (pkt->data, size + halfsize);
|
||||||
cr = cb + size/4;
|
} else {
|
||||||
|
lum = pkt->data;
|
||||||
|
cb = lum + size;
|
||||||
|
cr = cb + size/4;
|
||||||
|
|
||||||
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
|
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
|
||||||
|
}
|
||||||
return size + halfsize;
|
return size + halfsize;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int grab_read_header (AVFormatContext *s1, AVFormatParameters *ap)
|
static int grab_read_header (AVFormatContext *s1, AVFormatParameters *ap)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
AVStream *st;
|
AVStream *st;
|
||||||
int width, height;
|
int width, height;
|
||||||
int frame_rate;
|
int frame_rate;
|
||||||
|
|
||||||
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
|
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
|
||||||
return -1;
|
return -1;
|
||||||
|
|
||||||
width = ap->width;
|
width = ap->width;
|
||||||
height = ap->height;
|
height = ap->height;
|
||||||
frame_rate = ap->frame_rate;
|
frame_rate = ap->frame_rate;
|
||||||
st = av_new_stream(s1, 0);
|
st = av_new_stream(s1, 0);
|
||||||
if (!st)
|
if (!st)
|
||||||
return -ENOMEM;
|
return -ENOMEM;
|
||||||
s1->priv_data = s;
|
s1->priv_data = s;
|
||||||
s1->nb_streams = 1;
|
s1->nb_streams = 1;
|
||||||
s1->streams[0] = st;
|
s1->streams[0] = st;
|
||||||
|
|
||||||
s->width = width;
|
s->width = width;
|
||||||
s->height = height;
|
s->height = height;
|
||||||
s->frame_rate = frame_rate;
|
s->frame_rate = frame_rate;
|
||||||
s->frame_size = width*height*2; /*RGB*/
|
s->frame_size = width*height*2;
|
||||||
st->codec.pix_fmt = PIX_FMT_YUV420P;
|
st->codec.pix_fmt = PIX_FMT_YUV420P;
|
||||||
st->codec.codec_id = CODEC_ID_RAWVIDEO;
|
st->codec.codec_id = CODEC_ID_RAWVIDEO;
|
||||||
st->codec.width = width;
|
st->codec.width = width;
|
||||||
st->codec.height = height;
|
st->codec.height = height;
|
||||||
st->codec.frame_rate = frame_rate;
|
st->codec.frame_rate = frame_rate;
|
||||||
|
|
||||||
return bktr_init(s1, ap);
|
return bktr_init(s1, ap);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int grab_read_close (AVFormatContext *s1)
|
static int grab_read_close (AVFormatContext *s1)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
|
|
||||||
int c = METEOR_CAP_STOP_CONT;
|
int c = METEOR_CAP_STOP_CONT;
|
||||||
ioctl(s->fd, METEORCAPTUR, &c);
|
ioctl(s->fd, METEORCAPTUR, &c);
|
||||||
close(s->fd);
|
close(s->fd);
|
||||||
close(s->tuner_fd);
|
close(s->tuner_fd);
|
||||||
free(s);
|
free(s);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
AVInputFormat video_grab_device_format = {
|
AVInputFormat video_grab_device_format = {
|
||||||
"video_grab_device",
|
"video_grab_device",
|
||||||
"video grab",
|
"video grab",
|
||||||
sizeof(VideoData),
|
sizeof(VideoData),
|
||||||
NULL,
|
NULL,
|
||||||
grab_read_header,
|
grab_read_header,
|
||||||
grab_read_packet,
|
grab_read_packet,
|
||||||
grab_read_close,
|
grab_read_close,
|
||||||
flags: AVFMT_NOFILE,
|
flags: AVFMT_NOFILE,
|
||||||
};
|
};
|
||||||
|
|
||||||
int video_grab_init(void)
|
int video_grab_init(void)
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
PORTNAME= ffmpeg
|
PORTNAME= ffmpeg
|
||||||
PORTVERSION= 0.4.5
|
PORTVERSION= 0.4.5
|
||||||
PORTREVISION= 3
|
PORTREVISION= 4
|
||||||
CATEGORIES= graphics
|
CATEGORIES= graphics
|
||||||
MASTER_SITES= ${MASTER_SITE_LOCAL}
|
MASTER_SITES= ${MASTER_SITE_LOCAL}
|
||||||
MASTER_SITE_SUBDIR= lioux
|
MASTER_SITE_SUBDIR= lioux
|
||||||
|
|
|
@ -33,12 +33,12 @@
|
||||||
#include <signal.h>
|
#include <signal.h>
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
int fd;
|
int fd;
|
||||||
int tuner_fd;
|
int tuner_fd;
|
||||||
int frame_format; /* see VIDEO_PALETTE_xxx */
|
int frame_format; /* see VIDEO_PALETTE_xxx */
|
||||||
int width, height;
|
int width, height;
|
||||||
int frame_rate;
|
int frame_rate;
|
||||||
int frame_size;
|
int frame_size;
|
||||||
} VideoData;
|
} VideoData;
|
||||||
|
|
||||||
const char *video_device = "/dev/bktr0";
|
const char *video_device = "/dev/bktr0";
|
||||||
|
@ -60,185 +60,203 @@ const char *video_device = "/dev/bktr0";
|
||||||
|
|
||||||
static UINT8 *video_buf;
|
static UINT8 *video_buf;
|
||||||
|
|
||||||
|
static int signal_expected = 0;
|
||||||
|
static int unexpected_signals = 0;
|
||||||
|
|
||||||
static void catchsignal(int signal)
|
static void catchsignal(int signal)
|
||||||
{
|
{
|
||||||
return;
|
if (!signal_expected) unexpected_signals++;
|
||||||
|
signal_expected = 0;
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int bktr_init(AVFormatContext *s1, AVFormatParameters *ap)
|
static int bktr_init(AVFormatContext *s1, AVFormatParameters *ap)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
int width, height;
|
int width, height;
|
||||||
int video_fd;
|
int video_fd;
|
||||||
int format = VIDEO_FORMAT;
|
int format = VIDEO_FORMAT;
|
||||||
struct meteor_geomet geo;
|
struct meteor_geomet geo;
|
||||||
int c;
|
int c;
|
||||||
struct sigaction act,old;
|
struct sigaction act,old;
|
||||||
|
|
||||||
memset(&act,0,sizeof(act));
|
memset(&act,0,sizeof(act));
|
||||||
sigemptyset(&act.sa_mask);
|
sigemptyset(&act.sa_mask);
|
||||||
act.sa_handler = catchsignal;
|
act.sa_handler = catchsignal;
|
||||||
sigaction(SIGUSR1,&act,&old);
|
sigaction(SIGUSR1,&act,&old);
|
||||||
sigaction(SIGALRM,&act,&old);
|
sigaction(SIGALRM,&act,&old);
|
||||||
|
|
||||||
width = s->width;
|
width = s->width;
|
||||||
height = s->height;
|
height = s->height;
|
||||||
|
|
||||||
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
|
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
|
||||||
|
if (s->tuner_fd < 0) {
|
||||||
|
perror("Warning: Tuner not opened continuing");
|
||||||
|
}
|
||||||
|
|
||||||
video_fd = open(video_device, O_RDWR);
|
video_fd = open(video_device, O_RDWR);
|
||||||
if (video_fd < 0) {
|
if (video_fd < 0) {
|
||||||
perror(video_device);
|
perror(video_device);
|
||||||
return -EIO;
|
return -EIO;
|
||||||
}
|
}
|
||||||
s->fd=video_fd;
|
s->fd=video_fd;
|
||||||
geo.rows = height;
|
geo.rows = height;
|
||||||
geo.columns = width;
|
geo.columns = width;
|
||||||
geo.frames = 1;
|
geo.frames = 1;
|
||||||
geo.oformat = METEOR_GEO_YUV_PACKED; // RGB
|
geo.oformat = METEOR_GEO_YUV_PACKED;
|
||||||
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
|
|
||||||
geo.oformat |= METEOR_GEO_ODD_ONLY;
|
|
||||||
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
|
|
||||||
geo.oformat |= METEOR_GEO_ODD_ONLY;
|
|
||||||
|
|
||||||
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
|
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
|
||||||
perror ("METEORSETGEO");
|
geo.oformat |= METEOR_GEO_EVEN_ONLY;
|
||||||
return -EIO;
|
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
|
||||||
}
|
geo.oformat |= METEOR_GEO_EVEN_ONLY;
|
||||||
|
|
||||||
switch (format) {
|
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
|
||||||
case PAL: c = METEOR_FMT_PAL; break;
|
perror ("METEORSETGEO");
|
||||||
case NTSC: c = METEOR_FMT_NTSC; break;
|
return -EIO;
|
||||||
default: c = METEOR_FMT_PAL; break;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
|
switch (format) {
|
||||||
perror ("METEORSFMT");
|
case PAL: c = METEOR_FMT_PAL; break;
|
||||||
return -EIO;
|
case NTSC: c = METEOR_FMT_NTSC; break;
|
||||||
}
|
default: c = METEOR_FMT_PAL; break;
|
||||||
|
}
|
||||||
|
|
||||||
c = VIDEO_INPUT;
|
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
|
||||||
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
|
perror ("METEORSFMT");
|
||||||
perror ("METEORSINPUT");
|
return -EIO;
|
||||||
return -EIO;
|
}
|
||||||
}
|
|
||||||
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED, // RGB
|
c = VIDEO_INPUT;
|
||||||
video_fd, (off_t) 0);
|
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
|
||||||
if (video_buf == MAP_FAILED) {
|
perror ("METEORSINPUT");
|
||||||
perror ("mmap");
|
return -EIO;
|
||||||
return -EIO;
|
}
|
||||||
}
|
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED,
|
||||||
c = METEOR_CAP_CONTINOUS;
|
video_fd, (off_t) 0);
|
||||||
ioctl(s->fd, METEORCAPTUR, &c);
|
if (video_buf == MAP_FAILED) {
|
||||||
c = SIGUSR1;
|
perror ("mmap");
|
||||||
ioctl (s->fd, METEORSSIGNAL, &c);
|
return -EIO;
|
||||||
return 0;
|
}
|
||||||
|
c = METEOR_CAP_CONTINOUS;
|
||||||
|
ioctl(s->fd, METEORCAPTUR, &c);
|
||||||
|
c = SIGUSR1;
|
||||||
|
signal_expected = 1;
|
||||||
|
ioctl (s->fd, METEORSSIGNAL, &c);
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void bf_yuv422_to_yuv420p(UINT8 *lum, UINT8 *cb, UINT8 *cr,
|
static void bf_yuv422_to_yuv420p(UINT8 *lum, UINT8 *cb, UINT8 *cr,
|
||||||
UINT8 *src, int width, int height)
|
UINT8 *src, int width, int height)
|
||||||
{
|
{
|
||||||
int x, y;
|
int x, y;
|
||||||
UINT8 *p = src;
|
UINT8 *p = src;
|
||||||
for(y=0;y<height;y+=2) {
|
for(y=0;y<height;y+=2) {
|
||||||
for(x=0;x<width;x+=2) {
|
for(x=0;x<width;x+=2) {
|
||||||
lum[0] = p[1];
|
lum[0] = p[1];
|
||||||
cb[0] = p[0];
|
cb[0] = p[0];
|
||||||
lum[1] = p[3];
|
lum[1] = p[3];
|
||||||
cr[0] = p[2];
|
cr[0] = p[2];
|
||||||
p += 4;
|
p += 4;
|
||||||
lum += 2;
|
lum += 2;
|
||||||
cb++;
|
cb++;
|
||||||
cr++;
|
cr++;
|
||||||
}
|
}
|
||||||
for(x=0;x<width;x+=2) {
|
for(x=0;x<width;x+=2) {
|
||||||
lum[0] = p[1];
|
lum[0] = p[1];
|
||||||
lum[1] = p[3];
|
lum[1] = p[3];
|
||||||
p += 4;
|
p += 4;
|
||||||
lum += 2;
|
lum += 2;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* note: we support only one picture read at a time */
|
/* note: we support only one picture read at a time */
|
||||||
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
int size, halfsize;
|
int size, halfsize;
|
||||||
sigset_t msig;
|
sigset_t msig;
|
||||||
UINT8 *lum, *cb, *cr;
|
UINT8 *lum, *cb, *cr;
|
||||||
|
|
||||||
size = s->width * s->height;
|
size = s->width * s->height;
|
||||||
halfsize = size << 1;
|
halfsize = size << 1;
|
||||||
if (av_new_packet(pkt, size + halfsize) < 0)
|
if (av_new_packet(pkt, size + halfsize) < 0)
|
||||||
return -EIO;
|
return -EIO;
|
||||||
|
|
||||||
sigemptyset (&msig);
|
if (unexpected_signals > 0) {
|
||||||
sigsuspend (&msig);
|
unexpected_signals--;
|
||||||
|
} else {
|
||||||
|
signal_expected = 1;
|
||||||
|
sigemptyset (&msig);
|
||||||
|
sigsuspend (&msig);
|
||||||
|
}
|
||||||
|
|
||||||
lum = pkt->data;
|
if (unexpected_signals & 1) {
|
||||||
cb = lum + size;
|
bzero (pkt->data, size + halfsize);
|
||||||
cr = cb + size/4;
|
} else {
|
||||||
|
lum = pkt->data;
|
||||||
|
cb = lum + size;
|
||||||
|
cr = cb + size/4;
|
||||||
|
|
||||||
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
|
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
|
||||||
|
}
|
||||||
return size + halfsize;
|
return size + halfsize;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int grab_read_header (AVFormatContext *s1, AVFormatParameters *ap)
|
static int grab_read_header (AVFormatContext *s1, AVFormatParameters *ap)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
AVStream *st;
|
AVStream *st;
|
||||||
int width, height;
|
int width, height;
|
||||||
int frame_rate;
|
int frame_rate;
|
||||||
|
|
||||||
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
|
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
|
||||||
return -1;
|
return -1;
|
||||||
|
|
||||||
width = ap->width;
|
width = ap->width;
|
||||||
height = ap->height;
|
height = ap->height;
|
||||||
frame_rate = ap->frame_rate;
|
frame_rate = ap->frame_rate;
|
||||||
st = av_new_stream(s1, 0);
|
st = av_new_stream(s1, 0);
|
||||||
if (!st)
|
if (!st)
|
||||||
return -ENOMEM;
|
return -ENOMEM;
|
||||||
s1->priv_data = s;
|
s1->priv_data = s;
|
||||||
s1->nb_streams = 1;
|
s1->nb_streams = 1;
|
||||||
s1->streams[0] = st;
|
s1->streams[0] = st;
|
||||||
|
|
||||||
s->width = width;
|
s->width = width;
|
||||||
s->height = height;
|
s->height = height;
|
||||||
s->frame_rate = frame_rate;
|
s->frame_rate = frame_rate;
|
||||||
s->frame_size = width*height*2; /*RGB*/
|
s->frame_size = width*height*2;
|
||||||
st->codec.pix_fmt = PIX_FMT_YUV420P;
|
st->codec.pix_fmt = PIX_FMT_YUV420P;
|
||||||
st->codec.codec_id = CODEC_ID_RAWVIDEO;
|
st->codec.codec_id = CODEC_ID_RAWVIDEO;
|
||||||
st->codec.width = width;
|
st->codec.width = width;
|
||||||
st->codec.height = height;
|
st->codec.height = height;
|
||||||
st->codec.frame_rate = frame_rate;
|
st->codec.frame_rate = frame_rate;
|
||||||
|
|
||||||
return bktr_init(s1, ap);
|
return bktr_init(s1, ap);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int grab_read_close (AVFormatContext *s1)
|
static int grab_read_close (AVFormatContext *s1)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
|
|
||||||
int c = METEOR_CAP_STOP_CONT;
|
int c = METEOR_CAP_STOP_CONT;
|
||||||
ioctl(s->fd, METEORCAPTUR, &c);
|
ioctl(s->fd, METEORCAPTUR, &c);
|
||||||
close(s->fd);
|
close(s->fd);
|
||||||
close(s->tuner_fd);
|
close(s->tuner_fd);
|
||||||
free(s);
|
free(s);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
AVInputFormat video_grab_device_format = {
|
AVInputFormat video_grab_device_format = {
|
||||||
"video_grab_device",
|
"video_grab_device",
|
||||||
"video grab",
|
"video grab",
|
||||||
sizeof(VideoData),
|
sizeof(VideoData),
|
||||||
NULL,
|
NULL,
|
||||||
grab_read_header,
|
grab_read_header,
|
||||||
grab_read_packet,
|
grab_read_packet,
|
||||||
grab_read_close,
|
grab_read_close,
|
||||||
flags: AVFMT_NOFILE,
|
flags: AVFMT_NOFILE,
|
||||||
};
|
};
|
||||||
|
|
||||||
int video_grab_init(void)
|
int video_grab_init(void)
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
PORTNAME= ffmpeg
|
PORTNAME= ffmpeg
|
||||||
PORTVERSION= 0.4.5
|
PORTVERSION= 0.4.5
|
||||||
PORTREVISION= 3
|
PORTREVISION= 4
|
||||||
CATEGORIES= graphics
|
CATEGORIES= graphics
|
||||||
MASTER_SITES= ${MASTER_SITE_LOCAL}
|
MASTER_SITES= ${MASTER_SITE_LOCAL}
|
||||||
MASTER_SITE_SUBDIR= lioux
|
MASTER_SITE_SUBDIR= lioux
|
||||||
|
|
|
@ -33,12 +33,12 @@
|
||||||
#include <signal.h>
|
#include <signal.h>
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
int fd;
|
int fd;
|
||||||
int tuner_fd;
|
int tuner_fd;
|
||||||
int frame_format; /* see VIDEO_PALETTE_xxx */
|
int frame_format; /* see VIDEO_PALETTE_xxx */
|
||||||
int width, height;
|
int width, height;
|
||||||
int frame_rate;
|
int frame_rate;
|
||||||
int frame_size;
|
int frame_size;
|
||||||
} VideoData;
|
} VideoData;
|
||||||
|
|
||||||
const char *video_device = "/dev/bktr0";
|
const char *video_device = "/dev/bktr0";
|
||||||
|
@ -60,185 +60,203 @@ const char *video_device = "/dev/bktr0";
|
||||||
|
|
||||||
static UINT8 *video_buf;
|
static UINT8 *video_buf;
|
||||||
|
|
||||||
|
static int signal_expected = 0;
|
||||||
|
static int unexpected_signals = 0;
|
||||||
|
|
||||||
static void catchsignal(int signal)
|
static void catchsignal(int signal)
|
||||||
{
|
{
|
||||||
return;
|
if (!signal_expected) unexpected_signals++;
|
||||||
|
signal_expected = 0;
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int bktr_init(AVFormatContext *s1, AVFormatParameters *ap)
|
static int bktr_init(AVFormatContext *s1, AVFormatParameters *ap)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
int width, height;
|
int width, height;
|
||||||
int video_fd;
|
int video_fd;
|
||||||
int format = VIDEO_FORMAT;
|
int format = VIDEO_FORMAT;
|
||||||
struct meteor_geomet geo;
|
struct meteor_geomet geo;
|
||||||
int c;
|
int c;
|
||||||
struct sigaction act,old;
|
struct sigaction act,old;
|
||||||
|
|
||||||
memset(&act,0,sizeof(act));
|
memset(&act,0,sizeof(act));
|
||||||
sigemptyset(&act.sa_mask);
|
sigemptyset(&act.sa_mask);
|
||||||
act.sa_handler = catchsignal;
|
act.sa_handler = catchsignal;
|
||||||
sigaction(SIGUSR1,&act,&old);
|
sigaction(SIGUSR1,&act,&old);
|
||||||
sigaction(SIGALRM,&act,&old);
|
sigaction(SIGALRM,&act,&old);
|
||||||
|
|
||||||
width = s->width;
|
width = s->width;
|
||||||
height = s->height;
|
height = s->height;
|
||||||
|
|
||||||
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
|
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
|
||||||
|
if (s->tuner_fd < 0) {
|
||||||
|
perror("Warning: Tuner not opened continuing");
|
||||||
|
}
|
||||||
|
|
||||||
video_fd = open(video_device, O_RDWR);
|
video_fd = open(video_device, O_RDWR);
|
||||||
if (video_fd < 0) {
|
if (video_fd < 0) {
|
||||||
perror(video_device);
|
perror(video_device);
|
||||||
return -EIO;
|
return -EIO;
|
||||||
}
|
}
|
||||||
s->fd=video_fd;
|
s->fd=video_fd;
|
||||||
geo.rows = height;
|
geo.rows = height;
|
||||||
geo.columns = width;
|
geo.columns = width;
|
||||||
geo.frames = 1;
|
geo.frames = 1;
|
||||||
geo.oformat = METEOR_GEO_YUV_PACKED; // RGB
|
geo.oformat = METEOR_GEO_YUV_PACKED;
|
||||||
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
|
|
||||||
geo.oformat |= METEOR_GEO_ODD_ONLY;
|
|
||||||
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
|
|
||||||
geo.oformat |= METEOR_GEO_ODD_ONLY;
|
|
||||||
|
|
||||||
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
|
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
|
||||||
perror ("METEORSETGEO");
|
geo.oformat |= METEOR_GEO_EVEN_ONLY;
|
||||||
return -EIO;
|
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
|
||||||
}
|
geo.oformat |= METEOR_GEO_EVEN_ONLY;
|
||||||
|
|
||||||
switch (format) {
|
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
|
||||||
case PAL: c = METEOR_FMT_PAL; break;
|
perror ("METEORSETGEO");
|
||||||
case NTSC: c = METEOR_FMT_NTSC; break;
|
return -EIO;
|
||||||
default: c = METEOR_FMT_PAL; break;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
|
switch (format) {
|
||||||
perror ("METEORSFMT");
|
case PAL: c = METEOR_FMT_PAL; break;
|
||||||
return -EIO;
|
case NTSC: c = METEOR_FMT_NTSC; break;
|
||||||
}
|
default: c = METEOR_FMT_PAL; break;
|
||||||
|
}
|
||||||
|
|
||||||
c = VIDEO_INPUT;
|
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
|
||||||
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
|
perror ("METEORSFMT");
|
||||||
perror ("METEORSINPUT");
|
return -EIO;
|
||||||
return -EIO;
|
}
|
||||||
}
|
|
||||||
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED, // RGB
|
c = VIDEO_INPUT;
|
||||||
video_fd, (off_t) 0);
|
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
|
||||||
if (video_buf == MAP_FAILED) {
|
perror ("METEORSINPUT");
|
||||||
perror ("mmap");
|
return -EIO;
|
||||||
return -EIO;
|
}
|
||||||
}
|
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED,
|
||||||
c = METEOR_CAP_CONTINOUS;
|
video_fd, (off_t) 0);
|
||||||
ioctl(s->fd, METEORCAPTUR, &c);
|
if (video_buf == MAP_FAILED) {
|
||||||
c = SIGUSR1;
|
perror ("mmap");
|
||||||
ioctl (s->fd, METEORSSIGNAL, &c);
|
return -EIO;
|
||||||
return 0;
|
}
|
||||||
|
c = METEOR_CAP_CONTINOUS;
|
||||||
|
ioctl(s->fd, METEORCAPTUR, &c);
|
||||||
|
c = SIGUSR1;
|
||||||
|
signal_expected = 1;
|
||||||
|
ioctl (s->fd, METEORSSIGNAL, &c);
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void bf_yuv422_to_yuv420p(UINT8 *lum, UINT8 *cb, UINT8 *cr,
|
static void bf_yuv422_to_yuv420p(UINT8 *lum, UINT8 *cb, UINT8 *cr,
|
||||||
UINT8 *src, int width, int height)
|
UINT8 *src, int width, int height)
|
||||||
{
|
{
|
||||||
int x, y;
|
int x, y;
|
||||||
UINT8 *p = src;
|
UINT8 *p = src;
|
||||||
for(y=0;y<height;y+=2) {
|
for(y=0;y<height;y+=2) {
|
||||||
for(x=0;x<width;x+=2) {
|
for(x=0;x<width;x+=2) {
|
||||||
lum[0] = p[1];
|
lum[0] = p[1];
|
||||||
cb[0] = p[0];
|
cb[0] = p[0];
|
||||||
lum[1] = p[3];
|
lum[1] = p[3];
|
||||||
cr[0] = p[2];
|
cr[0] = p[2];
|
||||||
p += 4;
|
p += 4;
|
||||||
lum += 2;
|
lum += 2;
|
||||||
cb++;
|
cb++;
|
||||||
cr++;
|
cr++;
|
||||||
}
|
}
|
||||||
for(x=0;x<width;x+=2) {
|
for(x=0;x<width;x+=2) {
|
||||||
lum[0] = p[1];
|
lum[0] = p[1];
|
||||||
lum[1] = p[3];
|
lum[1] = p[3];
|
||||||
p += 4;
|
p += 4;
|
||||||
lum += 2;
|
lum += 2;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* note: we support only one picture read at a time */
|
/* note: we support only one picture read at a time */
|
||||||
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
int size, halfsize;
|
int size, halfsize;
|
||||||
sigset_t msig;
|
sigset_t msig;
|
||||||
UINT8 *lum, *cb, *cr;
|
UINT8 *lum, *cb, *cr;
|
||||||
|
|
||||||
size = s->width * s->height;
|
size = s->width * s->height;
|
||||||
halfsize = size << 1;
|
halfsize = size << 1;
|
||||||
if (av_new_packet(pkt, size + halfsize) < 0)
|
if (av_new_packet(pkt, size + halfsize) < 0)
|
||||||
return -EIO;
|
return -EIO;
|
||||||
|
|
||||||
sigemptyset (&msig);
|
if (unexpected_signals > 0) {
|
||||||
sigsuspend (&msig);
|
unexpected_signals--;
|
||||||
|
} else {
|
||||||
|
signal_expected = 1;
|
||||||
|
sigemptyset (&msig);
|
||||||
|
sigsuspend (&msig);
|
||||||
|
}
|
||||||
|
|
||||||
lum = pkt->data;
|
if (unexpected_signals & 1) {
|
||||||
cb = lum + size;
|
bzero (pkt->data, size + halfsize);
|
||||||
cr = cb + size/4;
|
} else {
|
||||||
|
lum = pkt->data;
|
||||||
|
cb = lum + size;
|
||||||
|
cr = cb + size/4;
|
||||||
|
|
||||||
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
|
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
|
||||||
|
}
|
||||||
return size + halfsize;
|
return size + halfsize;
|
||||||
}
|
}
|
||||||
|
|
||||||
static int grab_read_header (AVFormatContext *s1, AVFormatParameters *ap)
|
static int grab_read_header (AVFormatContext *s1, AVFormatParameters *ap)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
AVStream *st;
|
AVStream *st;
|
||||||
int width, height;
|
int width, height;
|
||||||
int frame_rate;
|
int frame_rate;
|
||||||
|
|
||||||
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
|
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
|
||||||
return -1;
|
return -1;
|
||||||
|
|
||||||
width = ap->width;
|
width = ap->width;
|
||||||
height = ap->height;
|
height = ap->height;
|
||||||
frame_rate = ap->frame_rate;
|
frame_rate = ap->frame_rate;
|
||||||
st = av_new_stream(s1, 0);
|
st = av_new_stream(s1, 0);
|
||||||
if (!st)
|
if (!st)
|
||||||
return -ENOMEM;
|
return -ENOMEM;
|
||||||
s1->priv_data = s;
|
s1->priv_data = s;
|
||||||
s1->nb_streams = 1;
|
s1->nb_streams = 1;
|
||||||
s1->streams[0] = st;
|
s1->streams[0] = st;
|
||||||
|
|
||||||
s->width = width;
|
s->width = width;
|
||||||
s->height = height;
|
s->height = height;
|
||||||
s->frame_rate = frame_rate;
|
s->frame_rate = frame_rate;
|
||||||
s->frame_size = width*height*2; /*RGB*/
|
s->frame_size = width*height*2;
|
||||||
st->codec.pix_fmt = PIX_FMT_YUV420P;
|
st->codec.pix_fmt = PIX_FMT_YUV420P;
|
||||||
st->codec.codec_id = CODEC_ID_RAWVIDEO;
|
st->codec.codec_id = CODEC_ID_RAWVIDEO;
|
||||||
st->codec.width = width;
|
st->codec.width = width;
|
||||||
st->codec.height = height;
|
st->codec.height = height;
|
||||||
st->codec.frame_rate = frame_rate;
|
st->codec.frame_rate = frame_rate;
|
||||||
|
|
||||||
return bktr_init(s1, ap);
|
return bktr_init(s1, ap);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int grab_read_close (AVFormatContext *s1)
|
static int grab_read_close (AVFormatContext *s1)
|
||||||
{
|
{
|
||||||
VideoData *s = s1->priv_data;
|
VideoData *s = s1->priv_data;
|
||||||
|
|
||||||
int c = METEOR_CAP_STOP_CONT;
|
int c = METEOR_CAP_STOP_CONT;
|
||||||
ioctl(s->fd, METEORCAPTUR, &c);
|
ioctl(s->fd, METEORCAPTUR, &c);
|
||||||
close(s->fd);
|
close(s->fd);
|
||||||
close(s->tuner_fd);
|
close(s->tuner_fd);
|
||||||
free(s);
|
free(s);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
AVInputFormat video_grab_device_format = {
|
AVInputFormat video_grab_device_format = {
|
||||||
"video_grab_device",
|
"video_grab_device",
|
||||||
"video grab",
|
"video grab",
|
||||||
sizeof(VideoData),
|
sizeof(VideoData),
|
||||||
NULL,
|
NULL,
|
||||||
grab_read_header,
|
grab_read_header,
|
||||||
grab_read_packet,
|
grab_read_packet,
|
||||||
grab_read_close,
|
grab_read_close,
|
||||||
flags: AVFMT_NOFILE,
|
flags: AVFMT_NOFILE,
|
||||||
};
|
};
|
||||||
|
|
||||||
int video_grab_init(void)
|
int video_grab_init(void)
|
||||||
|
|
Loading…
Add table
Reference in a new issue