1
0
mirror of https://git.FreeBSD.org/ports.git synced 2024-12-26 05:02:18 +00:00

o Update booktree capture code to latest version:

1) Warning message if the tuner device cannot be opened for
	   write
	2) A two stage sync slip recovery mechanism, stage 1 runs
	   async to catch up, stage 2 injects a blank frame
	3) Update to a consistent indent style (will minimize future
	   diffs)
o Bump PORTREVISION

Submitted by:	"Steve O'Hara-Smith" <steve@sohara.org>
This commit is contained in:
Mario Sergio Fujikawa Ferreira 2002-11-05 02:00:06 +00:00
parent bab9877d31
commit b868b85dad
Notes: svn2git 2021-03-31 03:12:20 +00:00
svn path=/head/; revision=69495
8 changed files with 636 additions and 564 deletions

View File

@ -7,7 +7,7 @@
PORTNAME= ffmpeg
PORTVERSION= 0.4.5
PORTREVISION= 3
PORTREVISION= 4
CATEGORIES= graphics
MASTER_SITES= ${MASTER_SITE_LOCAL}
MASTER_SITE_SUBDIR= lioux

View File

@ -33,12 +33,12 @@
#include <signal.h>
typedef struct {
int fd;
int tuner_fd;
int frame_format; /* see VIDEO_PALETTE_xxx */
int width, height;
int frame_rate;
int frame_size;
int fd;
int tuner_fd;
int frame_format; /* see VIDEO_PALETTE_xxx */
int width, height;
int frame_rate;
int frame_size;
} VideoData;
const char *video_device = "/dev/bktr0";
@ -60,185 +60,203 @@ const char *video_device = "/dev/bktr0";
static UINT8 *video_buf;
static int signal_expected = 0;
static int unexpected_signals = 0;
static void catchsignal(int signal)
{
return;
if (!signal_expected) unexpected_signals++;
signal_expected = 0;
return;
}
static int bktr_init(AVFormatContext *s1, AVFormatParameters *ap)
{
VideoData *s = s1->priv_data;
int width, height;
int video_fd;
int format = VIDEO_FORMAT;
struct meteor_geomet geo;
int c;
struct sigaction act,old;
VideoData *s = s1->priv_data;
int width, height;
int video_fd;
int format = VIDEO_FORMAT;
struct meteor_geomet geo;
int c;
struct sigaction act,old;
memset(&act,0,sizeof(act));
sigemptyset(&act.sa_mask);
act.sa_handler = catchsignal;
sigaction(SIGUSR1,&act,&old);
sigaction(SIGALRM,&act,&old);
memset(&act,0,sizeof(act));
sigemptyset(&act.sa_mask);
act.sa_handler = catchsignal;
sigaction(SIGUSR1,&act,&old);
sigaction(SIGALRM,&act,&old);
width = s->width;
height = s->height;
width = s->width;
height = s->height;
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
if (s->tuner_fd < 0) {
perror("Warning: Tuner not opened continuing");
}
video_fd = open(video_device, O_RDWR);
if (video_fd < 0) {
perror(video_device);
return -EIO;
}
s->fd=video_fd;
geo.rows = height;
geo.columns = width;
geo.frames = 1;
geo.oformat = METEOR_GEO_YUV_PACKED; // RGB
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
geo.oformat |= METEOR_GEO_ODD_ONLY;
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
geo.oformat |= METEOR_GEO_ODD_ONLY;
video_fd = open(video_device, O_RDWR);
if (video_fd < 0) {
perror(video_device);
return -EIO;
}
s->fd=video_fd;
geo.rows = height;
geo.columns = width;
geo.frames = 1;
geo.oformat = METEOR_GEO_YUV_PACKED;
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
perror ("METEORSETGEO");
return -EIO;
}
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
geo.oformat |= METEOR_GEO_EVEN_ONLY;
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
geo.oformat |= METEOR_GEO_EVEN_ONLY;
switch (format) {
case PAL: c = METEOR_FMT_PAL; break;
case NTSC: c = METEOR_FMT_NTSC; break;
default: c = METEOR_FMT_PAL; break;
}
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
perror ("METEORSETGEO");
return -EIO;
}
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
perror ("METEORSFMT");
return -EIO;
}
switch (format) {
case PAL: c = METEOR_FMT_PAL; break;
case NTSC: c = METEOR_FMT_NTSC; break;
default: c = METEOR_FMT_PAL; break;
}
c = VIDEO_INPUT;
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
perror ("METEORSINPUT");
return -EIO;
}
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED, // RGB
video_fd, (off_t) 0);
if (video_buf == MAP_FAILED) {
perror ("mmap");
return -EIO;
}
c = METEOR_CAP_CONTINOUS;
ioctl(s->fd, METEORCAPTUR, &c);
c = SIGUSR1;
ioctl (s->fd, METEORSSIGNAL, &c);
return 0;
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
perror ("METEORSFMT");
return -EIO;
}
c = VIDEO_INPUT;
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
perror ("METEORSINPUT");
return -EIO;
}
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED,
video_fd, (off_t) 0);
if (video_buf == MAP_FAILED) {
perror ("mmap");
return -EIO;
}
c = METEOR_CAP_CONTINOUS;
ioctl(s->fd, METEORCAPTUR, &c);
c = SIGUSR1;
signal_expected = 1;
ioctl (s->fd, METEORSSIGNAL, &c);
return 0;
}
static void bf_yuv422_to_yuv420p(UINT8 *lum, UINT8 *cb, UINT8 *cr,
UINT8 *src, int width, int height)
UINT8 *src, int width, int height)
{
int x, y;
UINT8 *p = src;
for(y=0;y<height;y+=2) {
for(x=0;x<width;x+=2) {
lum[0] = p[1];
cb[0] = p[0];
lum[1] = p[3];
cr[0] = p[2];
p += 4;
lum += 2;
cb++;
cr++;
}
for(x=0;x<width;x+=2) {
lum[0] = p[1];
lum[1] = p[3];
p += 4;
lum += 2;
}
}
int x, y;
UINT8 *p = src;
for(y=0;y<height;y+=2) {
for(x=0;x<width;x+=2) {
lum[0] = p[1];
cb[0] = p[0];
lum[1] = p[3];
cr[0] = p[2];
p += 4;
lum += 2;
cb++;
cr++;
}
for(x=0;x<width;x+=2) {
lum[0] = p[1];
lum[1] = p[3];
p += 4;
lum += 2;
}
}
}
/* note: we support only one picture read at a time */
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
{
VideoData *s = s1->priv_data;
int size, halfsize;
sigset_t msig;
UINT8 *lum, *cb, *cr;
VideoData *s = s1->priv_data;
int size, halfsize;
sigset_t msig;
UINT8 *lum, *cb, *cr;
size = s->width * s->height;
halfsize = size << 1;
if (av_new_packet(pkt, size + halfsize) < 0)
return -EIO;
size = s->width * s->height;
halfsize = size << 1;
if (av_new_packet(pkt, size + halfsize) < 0)
return -EIO;
sigemptyset (&msig);
sigsuspend (&msig);
if (unexpected_signals > 0) {
unexpected_signals--;
} else {
signal_expected = 1;
sigemptyset (&msig);
sigsuspend (&msig);
}
lum = pkt->data;
cb = lum + size;
cr = cb + size/4;
if (unexpected_signals & 1) {
bzero (pkt->data, size + halfsize);
} else {
lum = pkt->data;
cb = lum + size;
cr = cb + size/4;
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
return size + halfsize;
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
}
return size + halfsize;
}
static int grab_read_header (AVFormatContext *s1, AVFormatParameters *ap)
{
VideoData *s = s1->priv_data;
AVStream *st;
int width, height;
int frame_rate;
VideoData *s = s1->priv_data;
AVStream *st;
int width, height;
int frame_rate;
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
return -1;
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
return -1;
width = ap->width;
height = ap->height;
frame_rate = ap->frame_rate;
st = av_new_stream(s1, 0);
if (!st)
return -ENOMEM;
s1->priv_data = s;
s1->nb_streams = 1;
s1->streams[0] = st;
width = ap->width;
height = ap->height;
frame_rate = ap->frame_rate;
st = av_new_stream(s1, 0);
if (!st)
return -ENOMEM;
s1->priv_data = s;
s1->nb_streams = 1;
s1->streams[0] = st;
s->width = width;
s->height = height;
s->frame_rate = frame_rate;
s->frame_size = width*height*2; /*RGB*/
st->codec.pix_fmt = PIX_FMT_YUV420P;
st->codec.codec_id = CODEC_ID_RAWVIDEO;
st->codec.width = width;
st->codec.height = height;
st->codec.frame_rate = frame_rate;
s->width = width;
s->height = height;
s->frame_rate = frame_rate;
s->frame_size = width*height*2;
st->codec.pix_fmt = PIX_FMT_YUV420P;
st->codec.codec_id = CODEC_ID_RAWVIDEO;
st->codec.width = width;
st->codec.height = height;
st->codec.frame_rate = frame_rate;
return bktr_init(s1, ap);
return bktr_init(s1, ap);
}
static int grab_read_close (AVFormatContext *s1)
{
VideoData *s = s1->priv_data;
VideoData *s = s1->priv_data;
int c = METEOR_CAP_STOP_CONT;
ioctl(s->fd, METEORCAPTUR, &c);
close(s->fd);
close(s->tuner_fd);
free(s);
return 0;
int c = METEOR_CAP_STOP_CONT;
ioctl(s->fd, METEORCAPTUR, &c);
close(s->fd);
close(s->tuner_fd);
free(s);
return 0;
}
AVInputFormat video_grab_device_format = {
"video_grab_device",
"video grab",
"video_grab_device",
"video grab",
sizeof(VideoData),
NULL,
grab_read_header,
grab_read_packet,
grab_read_close,
flags: AVFMT_NOFILE,
grab_read_header,
grab_read_packet,
grab_read_close,
flags: AVFMT_NOFILE,
};
int video_grab_init(void)

View File

@ -7,7 +7,7 @@
PORTNAME= ffmpeg
PORTVERSION= 0.4.5
PORTREVISION= 3
PORTREVISION= 4
CATEGORIES= graphics
MASTER_SITES= ${MASTER_SITE_LOCAL}
MASTER_SITE_SUBDIR= lioux

View File

@ -33,12 +33,12 @@
#include <signal.h>
typedef struct {
int fd;
int tuner_fd;
int frame_format; /* see VIDEO_PALETTE_xxx */
int width, height;
int frame_rate;
int frame_size;
int fd;
int tuner_fd;
int frame_format; /* see VIDEO_PALETTE_xxx */
int width, height;
int frame_rate;
int frame_size;
} VideoData;
const char *video_device = "/dev/bktr0";
@ -60,185 +60,203 @@ const char *video_device = "/dev/bktr0";
static UINT8 *video_buf;
static int signal_expected = 0;
static int unexpected_signals = 0;
static void catchsignal(int signal)
{
return;
if (!signal_expected) unexpected_signals++;
signal_expected = 0;
return;
}
static int bktr_init(AVFormatContext *s1, AVFormatParameters *ap)
{
VideoData *s = s1->priv_data;
int width, height;
int video_fd;
int format = VIDEO_FORMAT;
struct meteor_geomet geo;
int c;
struct sigaction act,old;
VideoData *s = s1->priv_data;
int width, height;
int video_fd;
int format = VIDEO_FORMAT;
struct meteor_geomet geo;
int c;
struct sigaction act,old;
memset(&act,0,sizeof(act));
sigemptyset(&act.sa_mask);
act.sa_handler = catchsignal;
sigaction(SIGUSR1,&act,&old);
sigaction(SIGALRM,&act,&old);
memset(&act,0,sizeof(act));
sigemptyset(&act.sa_mask);
act.sa_handler = catchsignal;
sigaction(SIGUSR1,&act,&old);
sigaction(SIGALRM,&act,&old);
width = s->width;
height = s->height;
width = s->width;
height = s->height;
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
if (s->tuner_fd < 0) {
perror("Warning: Tuner not opened continuing");
}
video_fd = open(video_device, O_RDWR);
if (video_fd < 0) {
perror(video_device);
return -EIO;
}
s->fd=video_fd;
geo.rows = height;
geo.columns = width;
geo.frames = 1;
geo.oformat = METEOR_GEO_YUV_PACKED; // RGB
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
geo.oformat |= METEOR_GEO_ODD_ONLY;
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
geo.oformat |= METEOR_GEO_ODD_ONLY;
video_fd = open(video_device, O_RDWR);
if (video_fd < 0) {
perror(video_device);
return -EIO;
}
s->fd=video_fd;
geo.rows = height;
geo.columns = width;
geo.frames = 1;
geo.oformat = METEOR_GEO_YUV_PACKED;
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
perror ("METEORSETGEO");
return -EIO;
}
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
geo.oformat |= METEOR_GEO_EVEN_ONLY;
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
geo.oformat |= METEOR_GEO_EVEN_ONLY;
switch (format) {
case PAL: c = METEOR_FMT_PAL; break;
case NTSC: c = METEOR_FMT_NTSC; break;
default: c = METEOR_FMT_PAL; break;
}
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
perror ("METEORSETGEO");
return -EIO;
}
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
perror ("METEORSFMT");
return -EIO;
}
switch (format) {
case PAL: c = METEOR_FMT_PAL; break;
case NTSC: c = METEOR_FMT_NTSC; break;
default: c = METEOR_FMT_PAL; break;
}
c = VIDEO_INPUT;
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
perror ("METEORSINPUT");
return -EIO;
}
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED, // RGB
video_fd, (off_t) 0);
if (video_buf == MAP_FAILED) {
perror ("mmap");
return -EIO;
}
c = METEOR_CAP_CONTINOUS;
ioctl(s->fd, METEORCAPTUR, &c);
c = SIGUSR1;
ioctl (s->fd, METEORSSIGNAL, &c);
return 0;
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
perror ("METEORSFMT");
return -EIO;
}
c = VIDEO_INPUT;
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
perror ("METEORSINPUT");
return -EIO;
}
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED,
video_fd, (off_t) 0);
if (video_buf == MAP_FAILED) {
perror ("mmap");
return -EIO;
}
c = METEOR_CAP_CONTINOUS;
ioctl(s->fd, METEORCAPTUR, &c);
c = SIGUSR1;
signal_expected = 1;
ioctl (s->fd, METEORSSIGNAL, &c);
return 0;
}
static void bf_yuv422_to_yuv420p(UINT8 *lum, UINT8 *cb, UINT8 *cr,
UINT8 *src, int width, int height)
UINT8 *src, int width, int height)
{
int x, y;
UINT8 *p = src;
for(y=0;y<height;y+=2) {
for(x=0;x<width;x+=2) {
lum[0] = p[1];
cb[0] = p[0];
lum[1] = p[3];
cr[0] = p[2];
p += 4;
lum += 2;
cb++;
cr++;
}
for(x=0;x<width;x+=2) {
lum[0] = p[1];
lum[1] = p[3];
p += 4;
lum += 2;
}
}
int x, y;
UINT8 *p = src;
for(y=0;y<height;y+=2) {
for(x=0;x<width;x+=2) {
lum[0] = p[1];
cb[0] = p[0];
lum[1] = p[3];
cr[0] = p[2];
p += 4;
lum += 2;
cb++;
cr++;
}
for(x=0;x<width;x+=2) {
lum[0] = p[1];
lum[1] = p[3];
p += 4;
lum += 2;
}
}
}
/* note: we support only one picture read at a time */
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
{
VideoData *s = s1->priv_data;
int size, halfsize;
sigset_t msig;
UINT8 *lum, *cb, *cr;
VideoData *s = s1->priv_data;
int size, halfsize;
sigset_t msig;
UINT8 *lum, *cb, *cr;
size = s->width * s->height;
halfsize = size << 1;
if (av_new_packet(pkt, size + halfsize) < 0)
return -EIO;
size = s->width * s->height;
halfsize = size << 1;
if (av_new_packet(pkt, size + halfsize) < 0)
return -EIO;
sigemptyset (&msig);
sigsuspend (&msig);
if (unexpected_signals > 0) {
unexpected_signals--;
} else {
signal_expected = 1;
sigemptyset (&msig);
sigsuspend (&msig);
}
lum = pkt->data;
cb = lum + size;
cr = cb + size/4;
if (unexpected_signals & 1) {
bzero (pkt->data, size + halfsize);
} else {
lum = pkt->data;
cb = lum + size;
cr = cb + size/4;
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
return size + halfsize;
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
}
return size + halfsize;
}
static int grab_read_header (AVFormatContext *s1, AVFormatParameters *ap)
{
VideoData *s = s1->priv_data;
AVStream *st;
int width, height;
int frame_rate;
VideoData *s = s1->priv_data;
AVStream *st;
int width, height;
int frame_rate;
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
return -1;
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
return -1;
width = ap->width;
height = ap->height;
frame_rate = ap->frame_rate;
st = av_new_stream(s1, 0);
if (!st)
return -ENOMEM;
s1->priv_data = s;
s1->nb_streams = 1;
s1->streams[0] = st;
width = ap->width;
height = ap->height;
frame_rate = ap->frame_rate;
st = av_new_stream(s1, 0);
if (!st)
return -ENOMEM;
s1->priv_data = s;
s1->nb_streams = 1;
s1->streams[0] = st;
s->width = width;
s->height = height;
s->frame_rate = frame_rate;
s->frame_size = width*height*2; /*RGB*/
st->codec.pix_fmt = PIX_FMT_YUV420P;
st->codec.codec_id = CODEC_ID_RAWVIDEO;
st->codec.width = width;
st->codec.height = height;
st->codec.frame_rate = frame_rate;
s->width = width;
s->height = height;
s->frame_rate = frame_rate;
s->frame_size = width*height*2;
st->codec.pix_fmt = PIX_FMT_YUV420P;
st->codec.codec_id = CODEC_ID_RAWVIDEO;
st->codec.width = width;
st->codec.height = height;
st->codec.frame_rate = frame_rate;
return bktr_init(s1, ap);
return bktr_init(s1, ap);
}
static int grab_read_close (AVFormatContext *s1)
{
VideoData *s = s1->priv_data;
VideoData *s = s1->priv_data;
int c = METEOR_CAP_STOP_CONT;
ioctl(s->fd, METEORCAPTUR, &c);
close(s->fd);
close(s->tuner_fd);
free(s);
return 0;
int c = METEOR_CAP_STOP_CONT;
ioctl(s->fd, METEORCAPTUR, &c);
close(s->fd);
close(s->tuner_fd);
free(s);
return 0;
}
AVInputFormat video_grab_device_format = {
"video_grab_device",
"video grab",
"video_grab_device",
"video grab",
sizeof(VideoData),
NULL,
grab_read_header,
grab_read_packet,
grab_read_close,
flags: AVFMT_NOFILE,
grab_read_header,
grab_read_packet,
grab_read_close,
flags: AVFMT_NOFILE,
};
int video_grab_init(void)

View File

@ -7,7 +7,7 @@
PORTNAME= ffmpeg
PORTVERSION= 0.4.5
PORTREVISION= 3
PORTREVISION= 4
CATEGORIES= graphics
MASTER_SITES= ${MASTER_SITE_LOCAL}
MASTER_SITE_SUBDIR= lioux

View File

@ -33,12 +33,12 @@
#include <signal.h>
typedef struct {
int fd;
int tuner_fd;
int frame_format; /* see VIDEO_PALETTE_xxx */
int width, height;
int frame_rate;
int frame_size;
int fd;
int tuner_fd;
int frame_format; /* see VIDEO_PALETTE_xxx */
int width, height;
int frame_rate;
int frame_size;
} VideoData;
const char *video_device = "/dev/bktr0";
@ -60,185 +60,203 @@ const char *video_device = "/dev/bktr0";
static UINT8 *video_buf;
static int signal_expected = 0;
static int unexpected_signals = 0;
static void catchsignal(int signal)
{
return;
if (!signal_expected) unexpected_signals++;
signal_expected = 0;
return;
}
static int bktr_init(AVFormatContext *s1, AVFormatParameters *ap)
{
VideoData *s = s1->priv_data;
int width, height;
int video_fd;
int format = VIDEO_FORMAT;
struct meteor_geomet geo;
int c;
struct sigaction act,old;
VideoData *s = s1->priv_data;
int width, height;
int video_fd;
int format = VIDEO_FORMAT;
struct meteor_geomet geo;
int c;
struct sigaction act,old;
memset(&act,0,sizeof(act));
sigemptyset(&act.sa_mask);
act.sa_handler = catchsignal;
sigaction(SIGUSR1,&act,&old);
sigaction(SIGALRM,&act,&old);
memset(&act,0,sizeof(act));
sigemptyset(&act.sa_mask);
act.sa_handler = catchsignal;
sigaction(SIGUSR1,&act,&old);
sigaction(SIGALRM,&act,&old);
width = s->width;
height = s->height;
width = s->width;
height = s->height;
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
if (s->tuner_fd < 0) {
perror("Warning: Tuner not opened continuing");
}
video_fd = open(video_device, O_RDWR);
if (video_fd < 0) {
perror(video_device);
return -EIO;
}
s->fd=video_fd;
geo.rows = height;
geo.columns = width;
geo.frames = 1;
geo.oformat = METEOR_GEO_YUV_PACKED; // RGB
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
geo.oformat |= METEOR_GEO_ODD_ONLY;
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
geo.oformat |= METEOR_GEO_ODD_ONLY;
video_fd = open(video_device, O_RDWR);
if (video_fd < 0) {
perror(video_device);
return -EIO;
}
s->fd=video_fd;
geo.rows = height;
geo.columns = width;
geo.frames = 1;
geo.oformat = METEOR_GEO_YUV_PACKED;
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
perror ("METEORSETGEO");
return -EIO;
}
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
geo.oformat |= METEOR_GEO_EVEN_ONLY;
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
geo.oformat |= METEOR_GEO_EVEN_ONLY;
switch (format) {
case PAL: c = METEOR_FMT_PAL; break;
case NTSC: c = METEOR_FMT_NTSC; break;
default: c = METEOR_FMT_PAL; break;
}
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
perror ("METEORSETGEO");
return -EIO;
}
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
perror ("METEORSFMT");
return -EIO;
}
switch (format) {
case PAL: c = METEOR_FMT_PAL; break;
case NTSC: c = METEOR_FMT_NTSC; break;
default: c = METEOR_FMT_PAL; break;
}
c = VIDEO_INPUT;
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
perror ("METEORSINPUT");
return -EIO;
}
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED, // RGB
video_fd, (off_t) 0);
if (video_buf == MAP_FAILED) {
perror ("mmap");
return -EIO;
}
c = METEOR_CAP_CONTINOUS;
ioctl(s->fd, METEORCAPTUR, &c);
c = SIGUSR1;
ioctl (s->fd, METEORSSIGNAL, &c);
return 0;
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
perror ("METEORSFMT");
return -EIO;
}
c = VIDEO_INPUT;
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
perror ("METEORSINPUT");
return -EIO;
}
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED,
video_fd, (off_t) 0);
if (video_buf == MAP_FAILED) {
perror ("mmap");
return -EIO;
}
c = METEOR_CAP_CONTINOUS;
ioctl(s->fd, METEORCAPTUR, &c);
c = SIGUSR1;
signal_expected = 1;
ioctl (s->fd, METEORSSIGNAL, &c);
return 0;
}
static void bf_yuv422_to_yuv420p(UINT8 *lum, UINT8 *cb, UINT8 *cr,
UINT8 *src, int width, int height)
UINT8 *src, int width, int height)
{
int x, y;
UINT8 *p = src;
for(y=0;y<height;y+=2) {
for(x=0;x<width;x+=2) {
lum[0] = p[1];
cb[0] = p[0];
lum[1] = p[3];
cr[0] = p[2];
p += 4;
lum += 2;
cb++;
cr++;
}
for(x=0;x<width;x+=2) {
lum[0] = p[1];
lum[1] = p[3];
p += 4;
lum += 2;
}
}
int x, y;
UINT8 *p = src;
for(y=0;y<height;y+=2) {
for(x=0;x<width;x+=2) {
lum[0] = p[1];
cb[0] = p[0];
lum[1] = p[3];
cr[0] = p[2];
p += 4;
lum += 2;
cb++;
cr++;
}
for(x=0;x<width;x+=2) {
lum[0] = p[1];
lum[1] = p[3];
p += 4;
lum += 2;
}
}
}
/* note: we support only one picture read at a time */
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
{
VideoData *s = s1->priv_data;
int size, halfsize;
sigset_t msig;
UINT8 *lum, *cb, *cr;
VideoData *s = s1->priv_data;
int size, halfsize;
sigset_t msig;
UINT8 *lum, *cb, *cr;
size = s->width * s->height;
halfsize = size << 1;
if (av_new_packet(pkt, size + halfsize) < 0)
return -EIO;
size = s->width * s->height;
halfsize = size << 1;
if (av_new_packet(pkt, size + halfsize) < 0)
return -EIO;
sigemptyset (&msig);
sigsuspend (&msig);
if (unexpected_signals > 0) {
unexpected_signals--;
} else {
signal_expected = 1;
sigemptyset (&msig);
sigsuspend (&msig);
}
lum = pkt->data;
cb = lum + size;
cr = cb + size/4;
if (unexpected_signals & 1) {
bzero (pkt->data, size + halfsize);
} else {
lum = pkt->data;
cb = lum + size;
cr = cb + size/4;
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
return size + halfsize;
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
}
return size + halfsize;
}
static int grab_read_header (AVFormatContext *s1, AVFormatParameters *ap)
{
VideoData *s = s1->priv_data;
AVStream *st;
int width, height;
int frame_rate;
VideoData *s = s1->priv_data;
AVStream *st;
int width, height;
int frame_rate;
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
return -1;
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
return -1;
width = ap->width;
height = ap->height;
frame_rate = ap->frame_rate;
st = av_new_stream(s1, 0);
if (!st)
return -ENOMEM;
s1->priv_data = s;
s1->nb_streams = 1;
s1->streams[0] = st;
width = ap->width;
height = ap->height;
frame_rate = ap->frame_rate;
st = av_new_stream(s1, 0);
if (!st)
return -ENOMEM;
s1->priv_data = s;
s1->nb_streams = 1;
s1->streams[0] = st;
s->width = width;
s->height = height;
s->frame_rate = frame_rate;
s->frame_size = width*height*2; /*RGB*/
st->codec.pix_fmt = PIX_FMT_YUV420P;
st->codec.codec_id = CODEC_ID_RAWVIDEO;
st->codec.width = width;
st->codec.height = height;
st->codec.frame_rate = frame_rate;
s->width = width;
s->height = height;
s->frame_rate = frame_rate;
s->frame_size = width*height*2;
st->codec.pix_fmt = PIX_FMT_YUV420P;
st->codec.codec_id = CODEC_ID_RAWVIDEO;
st->codec.width = width;
st->codec.height = height;
st->codec.frame_rate = frame_rate;
return bktr_init(s1, ap);
return bktr_init(s1, ap);
}
static int grab_read_close (AVFormatContext *s1)
{
VideoData *s = s1->priv_data;
VideoData *s = s1->priv_data;
int c = METEOR_CAP_STOP_CONT;
ioctl(s->fd, METEORCAPTUR, &c);
close(s->fd);
close(s->tuner_fd);
free(s);
return 0;
int c = METEOR_CAP_STOP_CONT;
ioctl(s->fd, METEORCAPTUR, &c);
close(s->fd);
close(s->tuner_fd);
free(s);
return 0;
}
AVInputFormat video_grab_device_format = {
"video_grab_device",
"video grab",
"video_grab_device",
"video grab",
sizeof(VideoData),
NULL,
grab_read_header,
grab_read_packet,
grab_read_close,
flags: AVFMT_NOFILE,
grab_read_header,
grab_read_packet,
grab_read_close,
flags: AVFMT_NOFILE,
};
int video_grab_init(void)

View File

@ -7,7 +7,7 @@
PORTNAME= ffmpeg
PORTVERSION= 0.4.5
PORTREVISION= 3
PORTREVISION= 4
CATEGORIES= graphics
MASTER_SITES= ${MASTER_SITE_LOCAL}
MASTER_SITE_SUBDIR= lioux

View File

@ -33,12 +33,12 @@
#include <signal.h>
typedef struct {
int fd;
int tuner_fd;
int frame_format; /* see VIDEO_PALETTE_xxx */
int width, height;
int frame_rate;
int frame_size;
int fd;
int tuner_fd;
int frame_format; /* see VIDEO_PALETTE_xxx */
int width, height;
int frame_rate;
int frame_size;
} VideoData;
const char *video_device = "/dev/bktr0";
@ -60,185 +60,203 @@ const char *video_device = "/dev/bktr0";
static UINT8 *video_buf;
static int signal_expected = 0;
static int unexpected_signals = 0;
static void catchsignal(int signal)
{
return;
if (!signal_expected) unexpected_signals++;
signal_expected = 0;
return;
}
static int bktr_init(AVFormatContext *s1, AVFormatParameters *ap)
{
VideoData *s = s1->priv_data;
int width, height;
int video_fd;
int format = VIDEO_FORMAT;
struct meteor_geomet geo;
int c;
struct sigaction act,old;
VideoData *s = s1->priv_data;
int width, height;
int video_fd;
int format = VIDEO_FORMAT;
struct meteor_geomet geo;
int c;
struct sigaction act,old;
memset(&act,0,sizeof(act));
sigemptyset(&act.sa_mask);
act.sa_handler = catchsignal;
sigaction(SIGUSR1,&act,&old);
sigaction(SIGALRM,&act,&old);
memset(&act,0,sizeof(act));
sigemptyset(&act.sa_mask);
act.sa_handler = catchsignal;
sigaction(SIGUSR1,&act,&old);
sigaction(SIGALRM,&act,&old);
width = s->width;
height = s->height;
width = s->width;
height = s->height;
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
s->tuner_fd = open ("/dev/tuner0", O_RDWR);
if (s->tuner_fd < 0) {
perror("Warning: Tuner not opened continuing");
}
video_fd = open(video_device, O_RDWR);
if (video_fd < 0) {
perror(video_device);
return -EIO;
}
s->fd=video_fd;
geo.rows = height;
geo.columns = width;
geo.frames = 1;
geo.oformat = METEOR_GEO_YUV_PACKED; // RGB
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
geo.oformat |= METEOR_GEO_ODD_ONLY;
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
geo.oformat |= METEOR_GEO_ODD_ONLY;
video_fd = open(video_device, O_RDWR);
if (video_fd < 0) {
perror(video_device);
return -EIO;
}
s->fd=video_fd;
geo.rows = height;
geo.columns = width;
geo.frames = 1;
geo.oformat = METEOR_GEO_YUV_PACKED;
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
perror ("METEORSETGEO");
return -EIO;
}
if ((format == PAL) && (height <= (PAL_HEIGHT/2)))
geo.oformat |= METEOR_GEO_EVEN_ONLY;
if ((format == NTSC) && (height <= (NTSC_HEIGHT/2)))
geo.oformat |= METEOR_GEO_EVEN_ONLY;
switch (format) {
case PAL: c = METEOR_FMT_PAL; break;
case NTSC: c = METEOR_FMT_NTSC; break;
default: c = METEOR_FMT_PAL; break;
}
if (ioctl(video_fd, METEORSETGEO, &geo) < 0) {
perror ("METEORSETGEO");
return -EIO;
}
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
perror ("METEORSFMT");
return -EIO;
}
switch (format) {
case PAL: c = METEOR_FMT_PAL; break;
case NTSC: c = METEOR_FMT_NTSC; break;
default: c = METEOR_FMT_PAL; break;
}
c = VIDEO_INPUT;
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
perror ("METEORSINPUT");
return -EIO;
}
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED, // RGB
video_fd, (off_t) 0);
if (video_buf == MAP_FAILED) {
perror ("mmap");
return -EIO;
}
c = METEOR_CAP_CONTINOUS;
ioctl(s->fd, METEORCAPTUR, &c);
c = SIGUSR1;
ioctl (s->fd, METEORSSIGNAL, &c);
return 0;
if (ioctl(video_fd, METEORSFMT, &c) < 0) {
perror ("METEORSFMT");
return -EIO;
}
c = VIDEO_INPUT;
if (ioctl(video_fd, METEORSINPUT, &c) < 0) {
perror ("METEORSINPUT");
return -EIO;
}
video_buf = mmap((caddr_t)0, width*height*2, PROT_READ, MAP_SHARED,
video_fd, (off_t) 0);
if (video_buf == MAP_FAILED) {
perror ("mmap");
return -EIO;
}
c = METEOR_CAP_CONTINOUS;
ioctl(s->fd, METEORCAPTUR, &c);
c = SIGUSR1;
signal_expected = 1;
ioctl (s->fd, METEORSSIGNAL, &c);
return 0;
}
static void bf_yuv422_to_yuv420p(UINT8 *lum, UINT8 *cb, UINT8 *cr,
UINT8 *src, int width, int height)
UINT8 *src, int width, int height)
{
int x, y;
UINT8 *p = src;
for(y=0;y<height;y+=2) {
for(x=0;x<width;x+=2) {
lum[0] = p[1];
cb[0] = p[0];
lum[1] = p[3];
cr[0] = p[2];
p += 4;
lum += 2;
cb++;
cr++;
}
for(x=0;x<width;x+=2) {
lum[0] = p[1];
lum[1] = p[3];
p += 4;
lum += 2;
}
}
int x, y;
UINT8 *p = src;
for(y=0;y<height;y+=2) {
for(x=0;x<width;x+=2) {
lum[0] = p[1];
cb[0] = p[0];
lum[1] = p[3];
cr[0] = p[2];
p += 4;
lum += 2;
cb++;
cr++;
}
for(x=0;x<width;x+=2) {
lum[0] = p[1];
lum[1] = p[3];
p += 4;
lum += 2;
}
}
}
/* note: we support only one picture read at a time */
static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
{
VideoData *s = s1->priv_data;
int size, halfsize;
sigset_t msig;
UINT8 *lum, *cb, *cr;
VideoData *s = s1->priv_data;
int size, halfsize;
sigset_t msig;
UINT8 *lum, *cb, *cr;
size = s->width * s->height;
halfsize = size << 1;
if (av_new_packet(pkt, size + halfsize) < 0)
return -EIO;
size = s->width * s->height;
halfsize = size << 1;
if (av_new_packet(pkt, size + halfsize) < 0)
return -EIO;
sigemptyset (&msig);
sigsuspend (&msig);
if (unexpected_signals > 0) {
unexpected_signals--;
} else {
signal_expected = 1;
sigemptyset (&msig);
sigsuspend (&msig);
}
lum = pkt->data;
cb = lum + size;
cr = cb + size/4;
if (unexpected_signals & 1) {
bzero (pkt->data, size + halfsize);
} else {
lum = pkt->data;
cb = lum + size;
cr = cb + size/4;
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
return size + halfsize;
bf_yuv422_to_yuv420p (lum, cb, cr, video_buf, s->width, s->height);
}
return size + halfsize;
}
static int grab_read_header (AVFormatContext *s1, AVFormatParameters *ap)
{
VideoData *s = s1->priv_data;
AVStream *st;
int width, height;
int frame_rate;
VideoData *s = s1->priv_data;
AVStream *st;
int width, height;
int frame_rate;
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
return -1;
if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
return -1;
width = ap->width;
height = ap->height;
frame_rate = ap->frame_rate;
st = av_new_stream(s1, 0);
if (!st)
return -ENOMEM;
s1->priv_data = s;
s1->nb_streams = 1;
s1->streams[0] = st;
width = ap->width;
height = ap->height;
frame_rate = ap->frame_rate;
st = av_new_stream(s1, 0);
if (!st)
return -ENOMEM;
s1->priv_data = s;
s1->nb_streams = 1;
s1->streams[0] = st;
s->width = width;
s->height = height;
s->frame_rate = frame_rate;
s->frame_size = width*height*2; /*RGB*/
st->codec.pix_fmt = PIX_FMT_YUV420P;
st->codec.codec_id = CODEC_ID_RAWVIDEO;
st->codec.width = width;
st->codec.height = height;
st->codec.frame_rate = frame_rate;
s->width = width;
s->height = height;
s->frame_rate = frame_rate;
s->frame_size = width*height*2;
st->codec.pix_fmt = PIX_FMT_YUV420P;
st->codec.codec_id = CODEC_ID_RAWVIDEO;
st->codec.width = width;
st->codec.height = height;
st->codec.frame_rate = frame_rate;
return bktr_init(s1, ap);
return bktr_init(s1, ap);
}
static int grab_read_close (AVFormatContext *s1)
{
VideoData *s = s1->priv_data;
VideoData *s = s1->priv_data;
int c = METEOR_CAP_STOP_CONT;
ioctl(s->fd, METEORCAPTUR, &c);
close(s->fd);
close(s->tuner_fd);
free(s);
return 0;
int c = METEOR_CAP_STOP_CONT;
ioctl(s->fd, METEORCAPTUR, &c);
close(s->fd);
close(s->tuner_fd);
free(s);
return 0;
}
AVInputFormat video_grab_device_format = {
"video_grab_device",
"video grab",
"video_grab_device",
"video grab",
sizeof(VideoData),
NULL,
grab_read_header,
grab_read_packet,
grab_read_close,
flags: AVFMT_NOFILE,
grab_read_header,
grab_read_packet,
grab_read_close,
flags: AVFMT_NOFILE,
};
int video_grab_init(void)