summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorAndreas Auras <yak54@inkennet.de>2011-03-29 11:35:31 +0200
committerAndreas Auras <yak54@inkennet.de>2011-03-29 11:35:31 +0200
commit6fa5ad78275f7f94565df3aaf3ef9bcf52c790fa (patch)
treec03974a79c219671585942ffda661071793e35dd /src
parent3af2095a8b756b747db80ca9e0a75f07b3cf5e25 (diff)
downloadxine-lib-6fa5ad78275f7f94565df3aaf3ef9bcf52c790fa.tar.gz
xine-lib-6fa5ad78275f7f94565df3aaf3ef9bcf52c790fa.tar.bz2
Continuous video frame grabbing feature.
In opposite to the 'xine_get_current_frame' based snapshot function this grabbing feature allow continuous grabbing of last or next displayed video frame. Grabbed video frames are returned in simple three byte RGB format. Depending on the capabilities of the used video output driver video image data is taken as close as possible at the end of the video processing chain. Thus a returned video image could contain the blended OSD data, is deinterlaced, cropped and scaled and video properties like hue, sat could be applied. With this patch such a decent grabbing feature is implemented for vdpau video out driver. If a video output driver does not have a decent grabbing implementation then there is a generic fallback feature that grabs the video frame as they are taken from the video display queue (like the xine_get_current_frame' function). In this case color correct conversation to a RGB image incorporating source cropping and scaling to the requested grab size is also supported. A more detailed description can be found in file "xine.h".
Diffstat (limited to 'src')
-rw-r--r--src/video_out/video_out_vdpau.c238
-rw-r--r--src/xine-engine/Makefile.am11
-rw-r--r--src/xine-engine/post.c11
-rw-r--r--src/xine-engine/video_out.c341
-rw-r--r--src/xine-engine/xine.c11
5 files changed, 589 insertions, 23 deletions
diff --git a/src/video_out/video_out_vdpau.c b/src/video_out/video_out_vdpau.c
index 3bb2bd652..69dcbae9e 100644
--- a/src/video_out/video_out_vdpau.c
+++ b/src/video_out/video_out_vdpau.c
@@ -134,7 +134,9 @@ VdpVideoSurfaceGetParameters *vdp_video_surface_get_parameters;
VdpOutputSurfaceCreate *vdp_output_surface_create;
VdpOutputSurfaceDestroy *vdp_output_surface_destroy;
VdpOutputSurfaceRenderBitmapSurface *vdp_output_surface_render_bitmap_surface;
+VdpOutputSurfaceRenderOutputSurface *vdp_output_surface_render_output_surface;
VdpOutputSurfacePutBitsNative *vdp_output_surface_put_bits;
+VdpOutputSurfaceGetBitsNative *vdp_output_surface_get_bits;
VdpVideoMixerCreate *vdp_video_mixer_create;
VdpVideoMixerDestroy *vdp_video_mixer_destroy;
@@ -291,6 +293,17 @@ static VdpStatus guarded_vdp_decoder_render(VdpDecoder decoder, VdpVideoSurface
typedef struct {
+ xine_grab_video_frame_t grab_frame;
+
+ vo_driver_t *vo_driver;
+ VdpOutputSurface render_surface;
+ int vdp_runtime_nr;
+ int width, height;
+ uint32_t *rgba;
+} vdpau_grab_video_frame_t;
+
+
+typedef struct {
VdpBitmapSurface ovl_bitmap;
uint32_t bitmap_width, bitmap_height;
int ovl_w, ovl_h; /* overlay's width and height */
@@ -375,6 +388,10 @@ typedef struct {
uint8_t init_queue;
uint8_t queue_length;
+ vdpau_grab_video_frame_t *pending_grab_request;
+ pthread_mutex_t grab_lock;
+ pthread_cond_t grab_cond;
+
VdpVideoMixer video_mixer;
VdpChromaType video_mixer_chroma;
uint32_t video_mixer_width;
@@ -1588,6 +1605,117 @@ static void vdpau_check_output_size( vo_driver_t *this_gen )
}
+static void vdpau_grab_current_output_surface (vdpau_driver_t *this, int64_t vpts)
+{
+ pthread_mutex_lock(&this->grab_lock);
+
+ vdpau_grab_video_frame_t *frame = this->pending_grab_request;
+ if (frame) {
+ VdpStatus st;
+
+ this->pending_grab_request = NULL;
+ frame->grab_frame.vpts = -1;
+
+ VdpOutputSurface grab_surface = this->output_surface[this->current_output_surface];
+ int width = this->output_surface_width[this->current_output_surface];
+ int height = this->output_surface_height[this->current_output_surface];
+
+ /* take cropping parameters into account */
+ width = width - frame->grab_frame.crop_left - frame->grab_frame.crop_right;
+ height = height - frame->grab_frame.crop_top - frame->grab_frame.crop_bottom;
+ if (width < 1)
+ width = 1;
+ if (height < 1)
+ height = 1;
+
+ /* if caller does not specify frame size we return the actual size of grabbed frame */
+ if (frame->grab_frame.width <= 0)
+ frame->grab_frame.width = width;
+ if (frame->grab_frame.height <= 0)
+ frame->grab_frame.height = height;
+
+ if (frame->vdp_runtime_nr != this->vdp_runtime_nr)
+ frame->render_surface = VDP_INVALID_HANDLE;
+
+ if (frame->grab_frame.width != frame->width || frame->grab_frame.height != frame->height) {
+ free(frame->rgba);
+ free(frame->grab_frame.img);
+ frame->rgba = NULL;
+ frame->grab_frame.img = NULL;
+
+ if (frame->render_surface != VDP_INVALID_HANDLE) {
+ st = vdp_output_surface_destroy(frame->render_surface);
+ frame->render_surface = VDP_INVALID_HANDLE;
+ if (st != VDP_STATUS_OK) {
+ fprintf(stderr, "vo_vdpau: Can't destroy grab render output surface: %s\n", vdp_get_error_string (st));
+ pthread_cond_broadcast(&this->grab_cond);
+ pthread_mutex_unlock(&this->grab_lock);
+ return;
+ }
+ }
+
+ frame->width = frame->grab_frame.width;
+ frame->height = frame->grab_frame.height;
+ }
+
+ if (frame->rgba == NULL) {
+ frame->rgba = (uint32_t *) calloc(frame->width * frame->height, sizeof(uint32_t));
+ if (frame->rgba == NULL) {
+ pthread_cond_broadcast(&this->grab_cond);
+ pthread_mutex_unlock(&this->grab_lock);
+ return;
+ }
+ }
+ if (frame->grab_frame.img == NULL) {
+ frame->grab_frame.img = (uint8_t *) calloc(frame->width * frame->height, 3);
+ if (frame->grab_frame.img == NULL) {
+ pthread_cond_broadcast(&this->grab_cond);
+ pthread_mutex_unlock(&this->grab_lock);
+ return;
+ }
+ }
+
+ uint32_t pitches = frame->width * sizeof(uint32_t);
+ VdpRect src_rect = { frame->grab_frame.crop_left, frame->grab_frame.crop_top, width+frame->grab_frame.crop_left, height+frame->grab_frame.crop_top };
+
+ if (frame->width != width || frame->height != height) {
+ st = VDP_STATUS_OK;
+ if (frame->render_surface == VDP_INVALID_HANDLE) {
+ frame->vdp_runtime_nr = this->vdp_runtime_nr;
+ st = vdp_output_surface_create(vdp_device, VDP_RGBA_FORMAT_B8G8R8A8, frame->width, frame->height, &frame->render_surface);
+ }
+ if (st == VDP_STATUS_OK) {
+ st = vdp_output_surface_render_output_surface(frame->render_surface, NULL, grab_surface, &src_rect, NULL, NULL, VDP_OUTPUT_SURFACE_RENDER_ROTATE_0);
+ if (st == VDP_STATUS_OK) {
+ st = vdp_output_surface_get_bits(frame->render_surface, NULL, &frame->rgba, &pitches);
+ if (st == VDP_STATUS_OK) {
+ if (!(frame->grab_frame.flags & XINE_GRAB_VIDEO_FRAME_FLAGS_CONTINUOUS)) {
+ st = vdp_output_surface_destroy(frame->render_surface);
+ if (st != VDP_STATUS_OK)
+ fprintf(stderr, "vo_vdpau: Can't destroy grab render output surface: %s\n", vdp_get_error_string (st));
+ frame->render_surface = VDP_INVALID_HANDLE;
+ }
+ } else
+ fprintf(stderr, "vo_vdpau: Can't get output surface bits for raw frame grabbing: %s\n", vdp_get_error_string (st));
+ } else
+ fprintf(stderr, "vo_vdpau: Can't render output surface for raw frame grabbing: %s\n", vdp_get_error_string (st));
+ } else
+ fprintf(stderr, "vo_vdpau: Can't create render output surface for raw frame grabbing: %s\n", vdp_get_error_string (st));
+ } else {
+ st = vdp_output_surface_get_bits(grab_surface, &src_rect, &frame->rgba, &pitches);
+ if (st != VDP_STATUS_OK)
+ fprintf(stderr, "vo_vdpau: Can't get output surface bits for raw frame grabbing: %s\n", vdp_get_error_string (st));
+ }
+
+ if (st == VDP_STATUS_OK)
+ frame->grab_frame.vpts = vpts;
+
+ pthread_cond_broadcast(&this->grab_cond);
+ }
+
+ pthread_mutex_unlock(&this->grab_lock);
+}
+
static void vdpau_display_frame (vo_driver_t *this_gen, vo_frame_t *frame_gen)
{
@@ -1811,6 +1939,7 @@ static void vdpau_display_frame (vo_driver_t *this_gen, vo_frame_t *frame_gen)
if ( st != VDP_STATUS_OK )
fprintf(stderr, "vo_vdpau: vdp_video_mixer_render error : %s\n", vdp_get_error_string( st ) );
+ vdpau_grab_current_output_surface( this, frame->vo_frame.vpts );
vdp_queue_get_time( vdp_queue, &current_time );
vdp_queue_display( vdp_queue, this->output_surface[this->current_output_surface], 0, 0, 0 ); /* display _now_ */
vdpau_shift_queue( this_gen );
@@ -1861,6 +1990,7 @@ static void vdpau_display_frame (vo_driver_t *this_gen, vo_frame_t *frame_gen)
if ( st != VDP_STATUS_OK )
fprintf(stderr, "vo_vdpau: vdp_video_mixer_render error : %s\n", vdp_get_error_string( st ) );
+ vdpau_grab_current_output_surface( this, frame->vo_frame.vpts );
vdp_queue_display( vdp_queue, this->output_surface[this->current_output_surface], 0, 0, 0 );
vdpau_shift_queue( this_gen );
}
@@ -1990,6 +2120,101 @@ static void vdpau_get_property_min_max (vo_driver_t *this_gen, int property, int
}
+/*
+ * functions for grabbing RGB images from displayed frames
+ */
+static void vdpau_dispose_grab_video_frame(xine_grab_video_frame_t *frame_gen)
+{
+ vdpau_grab_video_frame_t *frame = (vdpau_grab_video_frame_t *) frame_gen;
+ vdpau_driver_t *this = (vdpau_driver_t *) frame->vo_driver;
+
+ free(frame->grab_frame.img);
+ free(frame->rgba);
+ if (frame->render_surface != VDP_INVALID_HANDLE && frame->vdp_runtime_nr == this->vdp_runtime_nr) {
+ VdpStatus st;
+ st = vdp_output_surface_destroy(frame->render_surface);
+ if (st != VDP_STATUS_OK)
+ fprintf(stderr, "vo_vdpau: Can't destroy grab render output surface: %s\n", vdp_get_error_string (st) );
+ }
+ free(frame);
+}
+
+/*
+ * grab next displayed output surface.
+ * Note: This feature only supports grabbing of next displayed frame (implicit VO_GRAB_FRAME_FLAGS_WAIT_NEXT)
+ */
+static int vdpau_grab_grab_video_frame (xine_grab_video_frame_t *frame_gen) {
+ vdpau_grab_video_frame_t *frame = (vdpau_grab_video_frame_t *) frame_gen;
+ vdpau_driver_t *this = (vdpau_driver_t *) frame->vo_driver;
+ struct timeval tvnow, tvdiff, tvtimeout;
+ struct timespec ts;
+
+ /* calculate absolute timeout time */
+ tvdiff.tv_sec = frame->grab_frame.timeout / 1000;
+ tvdiff.tv_usec = frame->grab_frame.timeout % 1000;
+ tvdiff.tv_usec *= 1000;
+ gettimeofday(&tvnow, NULL);
+ timeradd(&tvnow, &tvdiff, &tvtimeout);
+ ts.tv_sec = tvtimeout.tv_sec;
+ ts.tv_nsec = tvtimeout.tv_usec;
+ ts.tv_nsec *= 1000;
+
+ pthread_mutex_lock(&this->grab_lock);
+
+ /* wait until other pending grab request is finished */
+ while (this->pending_grab_request) {
+ if (pthread_cond_timedwait(&this->grab_cond, &this->grab_lock, &ts) == ETIMEDOUT) {
+ pthread_mutex_unlock(&this->grab_lock);
+ return 1; /* no frame available */
+ }
+ }
+
+ this->pending_grab_request = frame;
+
+ /* wait until our request is finished */
+ while (this->pending_grab_request) {
+ if (pthread_cond_timedwait(&this->grab_cond, &this->grab_lock, &ts) == ETIMEDOUT) {
+ this->pending_grab_request = NULL;
+ pthread_mutex_unlock(&this->grab_lock);
+ return 1; /* no frame available */
+ }
+ }
+
+ pthread_mutex_unlock(&this->grab_lock);
+
+ if (frame->grab_frame.vpts == -1)
+ return -1; /* error happened */
+
+ /* convert ARGB image to RGB image */
+ uint32_t *src = frame->rgba;
+ uint8_t *dst = frame->grab_frame.img;
+ int n = frame->width * frame->height;
+ while (n--) {
+ uint32_t rgba = *src++;
+ *dst++ = (uint8_t)(rgba >> 16); /*R*/
+ *dst++ = (uint8_t)(rgba >> 8); /*G*/
+ *dst++ = (uint8_t)(rgba); /*B*/
+ }
+
+ return 0;
+}
+
+
+static xine_grab_video_frame_t * vdpau_new_grab_video_frame(vo_driver_t *this)
+{
+ vdpau_grab_video_frame_t *frame = calloc(1, sizeof(vdpau_grab_video_frame_t));
+ if (frame) {
+ frame->grab_frame.dispose = vdpau_dispose_grab_video_frame;
+ frame->grab_frame.grab = vdpau_grab_grab_video_frame;
+ frame->grab_frame.vpts = -1;
+ frame->grab_frame.timeout = XINE_GRAB_VIDEO_FRAME_DEFAULT_TIMEOUT;
+ frame->vo_driver = this;
+ frame->render_surface = VDP_INVALID_HANDLE;
+ }
+
+ return (xine_grab_video_frame_t *) frame;
+}
+
static int vdpau_gui_data_exchange (vo_driver_t *this_gen, int data_type, void *data)
{
@@ -2131,6 +2356,8 @@ static void vdpau_dispose (vo_driver_t *this_gen)
if ( (vdp_device != VDP_INVALID_HANDLE) && vdp_device_destroy )
vdp_device_destroy( vdp_device );
+ pthread_mutex_destroy(&this->grab_lock);
+ pthread_cond_destroy(&this->grab_cond);
pthread_mutex_destroy(&this->drawable_lock);
free (this);
}
@@ -2365,6 +2592,7 @@ static vo_driver_t *vdpau_open_plugin (video_driver_class_t *class_gen, const vo
this->vo_driver.gui_data_exchange = vdpau_gui_data_exchange;
this->vo_driver.dispose = vdpau_dispose;
this->vo_driver.redraw_needed = vdpau_redraw_needed;
+ this->vo_driver.new_grab_video_frame = vdpau_new_grab_video_frame;
this->surface_cleared_nr = 0;
@@ -2481,9 +2709,15 @@ static vo_driver_t *vdpau_open_plugin (video_driver_class_t *class_gen, const vo
st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_OUTPUT_SURFACE_RENDER_BITMAP_SURFACE , (void*)&vdp_output_surface_render_bitmap_surface );
if ( vdpau_init_error( st, "Can't get OUTPUT_SURFACE_RENDER_BITMAP_SURFACE proc address !!", &this->vo_driver, 1 ) )
return NULL;
+ st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_OUTPUT_SURFACE_RENDER_OUTPUT_SURFACE , (void*)&vdp_output_surface_render_output_surface );
+ if ( vdpau_init_error( st, "Can't get OUTPUT_SURFACE_RENDER_OUTPUT_SURFACE proc address !!", &this->vo_driver, 1 ) )
+ return NULL;
st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_OUTPUT_SURFACE_PUT_BITS_NATIVE , (void*)&vdp_output_surface_put_bits );
if ( vdpau_init_error( st, "Can't get VDP_FUNC_ID_OUTPUT_SURFACE_PUT_BITS_NATIVE proc address !!", &this->vo_driver, 1 ) )
return NULL;
+ st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_OUTPUT_SURFACE_GET_BITS_NATIVE , (void*)&vdp_output_surface_get_bits );
+ if ( vdpau_init_error( st, "Can't get VDP_FUNC_ID_OUTPUT_SURFACE_GET_BITS_NATIVE proc address !!", &this->vo_driver, 1 ) )
+ return NULL;
st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_VIDEO_MIXER_CREATE , (void*)&vdp_video_mixer_create );
if ( vdpau_init_error( st, "Can't get VIDEO_MIXER_CREATE proc address !!", &this->vo_driver, 1 ) )
return NULL;
@@ -2866,6 +3100,10 @@ static vo_driver_t *vdpau_open_plugin (video_driver_class_t *class_gen, const vo
this->vdp_runtime_nr = 1;
+ this->pending_grab_request = NULL;
+ pthread_mutex_init(&this->grab_lock, NULL);
+ pthread_cond_init(&this->grab_cond, NULL);
+
return &this->vo_driver;
}
diff --git a/src/xine-engine/Makefile.am b/src/xine-engine/Makefile.am
index 15553380a..b081a4f30 100644
--- a/src/xine-engine/Makefile.am
+++ b/src/xine-engine/Makefile.am
@@ -1,12 +1,14 @@
include $(top_srcdir)/misc/Makefile.common
include $(top_srcdir)/lib/Makefile.common
-AM_CFLAGS = $(DEFAULT_OCFLAGS) $(X_CFLAGS) $(FT2_CFLAGS) $(FONTCONFIG_CFLAGS) \
+AM_CFLAGS = -I$(top_builddir)/src/video_out $(DEFAULT_OCFLAGS) $(X_CFLAGS) $(FT2_CFLAGS) $(FONTCONFIG_CFLAGS) \
$(AVUTIL_CFLAGS) $(VISIBILITY_FLAG)
AM_CPPFLAGS = $(XDG_BASEDIR_CPPFLAGS) $(ZLIB_CPPFLAGS) -DXINE_LIBRARY_COMPILE
XINEUTILS_LIB = $(top_builddir)/src/xine-utils/libxineutils.la
+YUV_LIB = $(top_builddir)/src/video_out/libyuv2rgb.la
+
# FIXME: these are currently unused:
EXTRA_DIST = lrb.c lrb.h accel_vdpau.h accel_xvmc.h
@@ -33,11 +35,11 @@ libxine_la_SOURCES = xine.c metronom.c configfile.c buffer.c \
alphablend.c \
xine_private.h
-libxine_la_DEPENDENCIES = $(XINEUTILS_LIB) $(XDG_BASEDIR_DEPS) \
+libxine_la_DEPENDENCIES = $(XINEUTILS_LIB) $(YUV_LIB) $(XDG_BASEDIR_DEPS) \
$(pthread_dep) $(LIBXINEPOSIX) \
libxine-interface.la
libxine_la_LIBADD = $(PTHREAD_LIBS) $(DYNAMIC_LD_LIBS) $(LTLIBINTL) $(ZLIB_LIBS) \
- -lm $(XINEUTILS_LIB) $(LTLIBICONV) $(FT2_LIBS) $(FONTCONFIG_LIBS) \
+ -lm $(XINEUTILS_LIB) $(YUV_LIB) $(LTLIBICONV) $(FT2_LIBS) $(FONTCONFIG_LIBS) \
$(LIBXINEPOSIX) $(RT_LIBS) $(NET_LIBS) $(XDG_BASEDIR_LIBS) \
$(AVUTIL_LIBS)
@@ -60,6 +62,9 @@ clean-local:
$(XINEUTILS_LIB):
$(MAKE) -C $(top_builddir)/src/xine-utils libxineutils.la
+$(YUV_LIB):
+ $(MAKE) -C $(top_builddir)/src/video_out libyuv2rgb.la
+
if WIN32
install-exec-local:
cp -p $(DEF_FILE) $(DESTDIR)$(libdir)
diff --git a/src/xine-engine/post.c b/src/xine-engine/post.c
index d9b9fb209..30e61acd4 100644
--- a/src/xine-engine/post.c
+++ b/src/xine-engine/post.c
@@ -90,6 +90,16 @@ static vo_frame_t *post_video_get_last_frame(xine_video_port_t *port_gen) {
return frame;
}
+static xine_grab_video_frame_t *post_video_new_grab_video_frame(xine_video_port_t *port_gen) {
+ post_video_port_t *port = (post_video_port_t *)port_gen;
+ xine_grab_video_frame_t *frame;
+
+ if (port->port_lock) pthread_mutex_lock(port->port_lock);
+ frame = port->original_port->new_grab_video_frame(port->original_port);
+ if (port->port_lock) pthread_mutex_unlock(port->port_lock);
+ return frame;
+}
+
static void post_video_enable_ovl(xine_video_port_t *port_gen, int ovl_enable) {
post_video_port_t *port = (post_video_port_t *)port_gen;
@@ -223,6 +233,7 @@ post_video_port_t *_x_post_intercept_video_port(post_plugin_t *post, xine_video_
port->new_port.open = post_video_open;
port->new_port.get_frame = post_video_get_frame;
port->new_port.get_last_frame = post_video_get_last_frame;
+ port->new_port.new_grab_video_frame = post_video_new_grab_video_frame;
port->new_port.enable_ovl = post_video_enable_ovl;
port->new_port.close = post_video_close;
port->new_port.exit = post_video_exit;
diff --git a/src/xine-engine/video_out.c b/src/xine-engine/video_out.c
index bceb38a58..f348da3f5 100644
--- a/src/xine-engine/video_out.c
+++ b/src/xine-engine/video_out.c
@@ -49,6 +49,7 @@
#include <xine/video_out.h>
#include <xine/metronom.h>
#include <xine/xineutils.h>
+#include <yuv2rgb.h>
#define NUM_FRAME_BUFFERS 15
#define MAX_USEC_TO_SLEEP 20000
@@ -66,6 +67,24 @@
static vo_frame_t * crop_frame( xine_video_port_t *this_gen, vo_frame_t *img );
+typedef struct vos_grab_video_frame_s vos_grab_video_frame_t;
+struct vos_grab_video_frame_s {
+ xine_grab_video_frame_t grab_frame;
+
+ vos_grab_video_frame_t *next;
+ int finished;
+ xine_video_port_t *video_port;
+ vo_frame_t *vo_frame;
+ yuv2rgb_factory_t *yuv2rgb_factory;
+ yuv2rgb_t *yuv2rgb;
+ int vo_width, vo_height;
+ int grab_width, grab_height;
+ int y_stride, uv_stride;
+ int img_size;
+ uint8_t *img;
+};
+
+
typedef struct {
vo_frame_t *first;
vo_frame_t *last;
@@ -91,10 +110,13 @@ typedef struct {
img_buf_fifo_t *free_img_buf_queue;
img_buf_fifo_t *display_img_buf_queue;
- pthread_mutex_t last_frame_mutex;
- vo_frame_t *last_frame;
vo_frame_t *img_backup;
+ vo_frame_t *last_frame;
+ vos_grab_video_frame_t *pending_grab_request;
+ pthread_mutex_t grab_lock;
+ pthread_cond_t grab_cond;
+
uint32_t video_loop_running:1;
uint32_t video_opened:1;
@@ -331,6 +353,288 @@ static void vo_frame_dec_lock (vo_frame_t *img) {
pthread_mutex_unlock (&img->mutex);
}
+
+/*
+ * functions for grabbing RGB images from displayed frames
+ */
+static void vo_dispose_grab_video_frame(xine_grab_video_frame_t *frame_gen)
+{
+ vos_grab_video_frame_t *frame = (vos_grab_video_frame_t *) frame_gen;
+
+ if (frame->vo_frame)
+ vo_frame_dec_lock(frame->vo_frame);
+
+ if (frame->yuv2rgb)
+ frame->yuv2rgb->dispose(frame->yuv2rgb);
+
+ if (frame->yuv2rgb_factory)
+ frame->yuv2rgb_factory->dispose(frame->yuv2rgb_factory);
+
+ free(frame->img);
+ free(frame->grab_frame.img);
+ free(frame);
+}
+
+
+static int vo_grab_grab_video_frame (xine_grab_video_frame_t *frame_gen) {
+ vos_grab_video_frame_t *frame = (vos_grab_video_frame_t *) frame_gen;
+ vos_t *this = (vos_t *) frame->video_port;
+ vo_frame_t *vo_frame;
+ int format, y_stride, uv_stride;
+ uint8_t *base[3];
+
+ if (frame->grab_frame.flags & XINE_GRAB_VIDEO_FRAME_FLAGS_WAIT_NEXT) {
+ struct timeval tvnow, tvdiff, tvtimeout;
+ struct timespec ts;
+
+ /* calculate absolute timeout time */
+ tvdiff.tv_sec = frame->grab_frame.timeout / 1000;
+ tvdiff.tv_usec = frame->grab_frame.timeout % 1000;
+ tvdiff.tv_usec *= 1000;
+ gettimeofday(&tvnow, NULL);
+ timeradd(&tvnow, &tvdiff, &tvtimeout);
+ ts.tv_sec = tvtimeout.tv_sec;
+ ts.tv_nsec = tvtimeout.tv_usec;
+ ts.tv_nsec *= 1000;
+
+ pthread_mutex_lock(&this->grab_lock);
+
+ /* insert grab request into grab queue */
+ frame->next = this->pending_grab_request;
+ this->pending_grab_request = frame;
+
+ /* wait until our request is finished */
+ frame->finished = 0;
+ while (!frame->finished) {
+ if (pthread_cond_timedwait(&this->grab_cond, &this->grab_lock, &ts) == ETIMEDOUT) {
+ vos_grab_video_frame_t *prev = this->pending_grab_request;
+ while (prev) {
+ if (prev == frame) {
+ this->pending_grab_request = frame->next;
+ break;
+ } else if (prev->next == frame) {
+ prev->next = frame->next;
+ break;
+ }
+ prev = prev->next;
+ }
+ frame->next = NULL;
+ pthread_mutex_unlock(&this->grab_lock);
+ return 1; /* no frame available */
+ }
+ }
+
+ pthread_mutex_unlock(&this->grab_lock);
+
+ vo_frame = frame->vo_frame;
+ frame->vo_frame = NULL;
+ if (!vo_frame)
+ return -1; /* error happened */
+ } else {
+ pthread_mutex_lock(&this->grab_lock);
+
+ /* use last displayed frame */
+ vo_frame = this->last_frame;
+ if (!vo_frame) {
+ pthread_mutex_unlock(&this->grab_lock);
+ return 1; /* no frame available */
+ }
+ if (vo_frame->format != XINE_IMGFMT_YV12 && vo_frame->format != XINE_IMGFMT_YUY2 && !vo_frame->proc_provide_standard_frame_data) {
+ pthread_mutex_unlock(&this->grab_lock);
+ return -1; /* error happened */
+ }
+ vo_frame_inc_lock(vo_frame);
+ pthread_mutex_unlock(&this->grab_lock);
+ frame->grab_frame.vpts = vo_frame->vpts;
+ }
+
+ int width = vo_frame->width;
+ int height = vo_frame->height;
+
+ if (vo_frame->format == XINE_IMGFMT_YV12 || vo_frame->format == XINE_IMGFMT_YUY2) {
+ format = vo_frame->format;
+ y_stride = vo_frame->pitches[0];
+ uv_stride = vo_frame->pitches[1];
+ base[0] = vo_frame->base[0];
+ base[1] = vo_frame->base[1];
+ base[2] = vo_frame->base[2];
+ } else {
+ /* retrieve standard format image data from output driver */
+ xine_current_frame_data_t data;
+ memset(&data, 0, sizeof(data));
+ vo_frame->proc_provide_standard_frame_data(vo_frame, &data);
+ if (data.img_size > frame->img_size) {
+ free(frame->img);
+ frame->img_size = data.img_size;
+ frame->img = calloc(data.img_size, sizeof(uint8_t));
+ if (!frame->img) {
+ vo_frame_dec_lock(vo_frame);
+ return -1; /* error happened */
+ }
+ }
+ data.img = frame->img;
+ vo_frame->proc_provide_standard_frame_data(vo_frame, &data);
+ format = data.format;
+ if (format == XINE_IMGFMT_YV12) {
+ y_stride = width;
+ uv_stride = width / 2;
+ base[0] = data.img;
+ base[1] = data.img + width * height;
+ base[2] = data.img + width * height + width * height / 4;
+ } else { // XINE_IMGFMT_YUY2
+ y_stride = width * 2;
+ uv_stride = 0;
+ base[0] = data.img;
+ base[1] = NULL;
+ base[2] = NULL;
+ }
+ }
+
+ /* take cropping parameters into account */
+ int crop_left = (vo_frame->crop_left + frame->grab_frame.crop_left) & ~1;
+ int crop_right = (vo_frame->crop_right + frame->grab_frame.crop_right) & ~1;
+ int crop_top = vo_frame->crop_top + frame->grab_frame.crop_top;
+ int crop_bottom = vo_frame->crop_bottom + frame->grab_frame.crop_bottom;
+
+ if (crop_left || crop_right || crop_top || crop_bottom) {
+ if ((width - crop_left - crop_right) >= 8)
+ width = width - crop_left - crop_right;
+ else
+ crop_left = crop_right = 0;
+
+ if ((height - crop_top - crop_bottom) >= 8)
+ height = height - crop_top - crop_bottom;
+ else
+ crop_top = crop_bottom = 0;
+
+ if (format == XINE_IMGFMT_YV12) {
+ base[0] += crop_top * y_stride + crop_left;
+ base[1] += crop_top/2 * uv_stride + crop_left/2;
+ base[2] += crop_top/2 * uv_stride + crop_left/2;
+ } else { // XINE_IMGFMT_YUY2
+ base[0] += crop_top * y_stride + crop_left*2;
+ }
+ }
+
+ /* if caller does not specify frame size we return the actual size of grabbed frame */
+ if (frame->grab_frame.width <= 0)
+ frame->grab_frame.width = width;
+ if (frame->grab_frame.height <= 0)
+ frame->grab_frame.height = height;
+
+ /* allocate grab frame image buffer */
+ if (frame->grab_frame.width != frame->grab_width || frame->grab_frame.height != frame->grab_height) {
+ free(frame->grab_frame.img);
+ frame->grab_frame.img = NULL;
+ }
+ if (frame->grab_frame.img == NULL) {
+ frame->grab_frame.img = (uint8_t *) calloc(frame->grab_frame.width * frame->grab_frame.height, 3);
+ if (frame->grab_frame.img == NULL) {
+ vo_frame_dec_lock(vo_frame);
+ return -1; /* error happened */
+ }
+ }
+
+ /* initialize yuv2rgb factory */
+ if (!frame->yuv2rgb_factory) {
+ frame->yuv2rgb_factory = yuv2rgb_factory_init(MODE_24_RGB, 0, NULL);
+ if (!frame->yuv2rgb_factory) {
+ vo_frame_dec_lock(vo_frame);
+ return -1; /* error happened */
+ }
+ frame->yuv2rgb_factory->matrix_coefficients = 1; /* ITU-R Rec. 709 (1990) */
+ frame->yuv2rgb_factory->set_csc_levels (frame->yuv2rgb_factory, 0, 128, 128);
+ }
+
+ /* retrieve a yuv2rgb converter */
+ if (!frame->yuv2rgb) {
+ frame->yuv2rgb = frame->yuv2rgb_factory->create_converter(frame->yuv2rgb_factory);
+ if (!frame->yuv2rgb) {
+ vo_frame_dec_lock(vo_frame);
+ return -1; /* error happened */
+ }
+ }
+
+ /* configure yuv2rgb converter */
+ if (width != frame->vo_width ||
+ height != frame->vo_height ||
+ frame->grab_frame.width != frame->grab_width ||
+ frame->grab_frame.height != frame->grab_height ||
+ y_stride != frame->y_stride ||
+ uv_stride != frame->uv_stride) {
+ frame->vo_width = width;
+ frame->vo_height = height;
+ frame->grab_width = frame->grab_frame.width;
+ frame->grab_height = frame->grab_frame.height;
+ frame->y_stride = y_stride;
+ frame->uv_stride = uv_stride;
+ frame->yuv2rgb->configure(frame->yuv2rgb, width, height, y_stride, uv_stride, frame->grab_width, frame->grab_height, frame->grab_width * 3);
+ }
+
+ /* convert YUV to RGB image taking possible scaling into account */
+ /* FIXME: have to swap U and V planes to get correct colors for YV12 frames?? */
+ if(format == XINE_IMGFMT_YV12)
+ frame->yuv2rgb->yuv2rgb_fun(frame->yuv2rgb, frame->grab_frame.img, base[0], base[2], base[1]);
+ else
+ frame->yuv2rgb->yuy22rgb_fun(frame->yuv2rgb, frame->grab_frame.img, base[0]);
+
+ vo_frame_dec_lock(vo_frame);
+ return 0;
+}
+
+
+static xine_grab_video_frame_t *vo_new_grab_video_frame(xine_video_port_t *this_gen)
+{
+ vos_grab_video_frame_t *frame = calloc(1, sizeof(vos_grab_video_frame_t));
+ if (frame) {
+ frame->grab_frame.dispose = vo_dispose_grab_video_frame;
+ frame->grab_frame.grab = vo_grab_grab_video_frame;
+ frame->grab_frame.vpts = -1;
+ frame->grab_frame.timeout = XINE_GRAB_VIDEO_FRAME_DEFAULT_TIMEOUT;
+ frame->video_port = this_gen;
+ }
+ return (xine_grab_video_frame_t *)frame;
+}
+
+
+static void vo_grab_current_frame (vos_t *this, vo_frame_t *vo_frame, int64_t vpts)
+{
+ pthread_mutex_lock(&this->grab_lock);
+
+ /* hold current frame for snapshot feature */
+ if (this->last_frame)
+ vo_frame_dec_lock(this->last_frame);
+ vo_frame_inc_lock(vo_frame);
+ this->last_frame = vo_frame;
+
+ /* process grab queue */
+ vos_grab_video_frame_t *frame = this->pending_grab_request;
+ if (frame) {
+ while (frame) {
+ if (frame->vo_frame)
+ vo_frame_dec_lock(frame->vo_frame);
+ frame->vo_frame = NULL;
+
+ if (vo_frame->format == XINE_IMGFMT_YV12 || vo_frame->format == XINE_IMGFMT_YUY2 || vo_frame->proc_provide_standard_frame_data) {
+ vo_frame_inc_lock(vo_frame);
+ frame->vo_frame = vo_frame;
+ frame->grab_frame.vpts = vpts;
+ }
+
+ frame->finished = 1;
+ vos_grab_video_frame_t *next = frame->next;
+ frame->next = NULL;
+ frame = next;
+ }
+
+ this->pending_grab_request = NULL;
+ pthread_cond_broadcast(&this->grab_cond);
+ }
+
+ pthread_mutex_unlock(&this->grab_lock);
+}
+
+
/* call vo_driver->proc methods for the entire frame */
static void vo_frame_driver_proc(vo_frame_t *img)
{
@@ -1038,16 +1342,7 @@ static void overlay_and_display_frame (vos_t *this,
this->video_loop_running && this->overlay_enabled);
}
- /* hold current frame for snapshot feature */
- pthread_mutex_lock(&this->last_frame_mutex);
-
- if( this->last_frame ) {
- vo_frame_dec_lock( this->last_frame );
- }
- vo_frame_inc_lock( img );
- this->last_frame = img;
-
- pthread_mutex_unlock(&this->last_frame_mutex);
+ vo_grab_current_frame (this, img, vpts);
this->driver->display_frame (this->driver, img);
@@ -1329,12 +1624,13 @@ static void *video_out_loop (void *this_gen) {
vo_frame_dec_lock( this->img_backup );
this->img_backup = NULL;
}
+
+ pthread_mutex_lock(&this->grab_lock);
if (this->last_frame) {
- pthread_mutex_lock(&this->last_frame_mutex);
vo_frame_dec_lock( this->last_frame );
this->last_frame = NULL;
- pthread_mutex_unlock(&this->last_frame_mutex);
}
+ pthread_mutex_unlock(&this->grab_lock);
return NULL;
}
@@ -1705,11 +2001,12 @@ static void vo_exit (xine_video_port_t *this_gen) {
free (this->free_img_buf_queue);
free (this->display_img_buf_queue);
- pthread_mutex_destroy(&this->last_frame_mutex);
-
pthread_cond_destroy(&this->trigger_drawing_cond);
pthread_mutex_destroy(&this->trigger_drawing_mutex);
+ pthread_mutex_destroy(&this->grab_lock);
+ pthread_cond_destroy(&this->grab_cond);
+
free (this);
}
@@ -1717,13 +2014,13 @@ static vo_frame_t *vo_get_last_frame (xine_video_port_t *this_gen) {
vos_t *this = (vos_t *) this_gen;
vo_frame_t *last_frame;
- pthread_mutex_lock(&this->last_frame_mutex);
+ pthread_mutex_lock(&this->grab_lock);
last_frame = this->last_frame;
if (last_frame)
vo_frame_inc_lock(last_frame);
- pthread_mutex_unlock(&this->last_frame_mutex);
+ pthread_mutex_unlock(&this->grab_lock);
return last_frame;
}
@@ -1887,6 +2184,7 @@ xine_video_port_t *_x_vo_new_port (xine_t *xine, vo_driver_t *driver, int grabon
this->vo.open = vo_open;
this->vo.get_frame = vo_get_frame;
this->vo.get_last_frame = vo_get_last_frame;
+ this->vo.new_grab_video_frame = vo_new_grab_video_frame;
this->vo.close = vo_close;
this->vo.exit = vo_exit;
this->vo.get_capabilities = vo_get_capabilities;
@@ -1906,10 +2204,13 @@ xine_video_port_t *_x_vo_new_port (xine_t *xine, vo_driver_t *driver, int grabon
this->display_img_buf_queue = vo_new_img_buf_queue ();
this->video_loop_running = 0;
- pthread_mutex_init(&this->last_frame_mutex, NULL);
- this->last_frame = NULL;
this->img_backup = NULL;
+ this->last_frame = NULL;
+ this->pending_grab_request = NULL;
+ pthread_mutex_init(&this->grab_lock, NULL);
+ pthread_cond_init(&this->grab_cond, NULL);
+
this->overlay_source = _x_video_overlay_new_manager(xine);
this->overlay_source->init (this->overlay_source);
this->overlay_enabled = 1;
diff --git a/src/xine-engine/xine.c b/src/xine-engine/xine.c
index 73cd9ae7e..6e5001f35 100644
--- a/src/xine-engine/xine.c
+++ b/src/xine-engine/xine.c
@@ -2219,6 +2219,17 @@ int xine_get_current_frame (xine_stream_t *stream, int *width, int *height,
return result;
}
+xine_grab_video_frame_t* xine_new_grab_video_frame (xine_stream_t *stream) {
+ xine_grab_video_frame_t *frame;
+
+ if (stream->video_out->driver->new_grab_video_frame)
+ frame = stream->video_out->driver->new_grab_video_frame(stream->video_out->driver);
+ else
+ frame = stream->video_out->new_grab_video_frame(stream->video_out);
+
+ return frame;
+}
+
int xine_get_spu_lang (xine_stream_t *stream, int channel, char *lang) {
/* Ask the demuxer first (e.g. TS extracts this information from