Porting the SDL+VLC example (and vaapi/dxva and h264 and...)

Hello all!
I’m trying to port the sdl+vlc example
(http://wiki.videolan.org/LibVLC_SampleCode_SDL) to SDL 1.3 apis
I’m trying to use this code to decode some h264 videos and to do some
compositing operation on it live.

So far i’ve got this (not sure whether SDL_LockMutex) should be removed or not:

#ifdef __WIN32
#include “SDL.h”
#else
#include “SDL/SDL.h”
#endif

#include “vlc/vlc.h”
#include “assert.h”

struct ctx {
SDL_mutex *mutex;
SDL_Texture *tex;
};

static void *lock (void *data, void **p_pixels) {
struct ctx *ctx = data;

// SDL_LockMutex(ctx->mutex);
int pitch = 19204;
SDL_LockTexture(ctx->tex, NULL, p_pixels, &pitch);
return NULL; /
picture identifier, not needed here */
}

static void unlock (void *data, void *id, void *const *p_pixels) {
struct ctx *ctx = data;

SDL_UnlockTexture(ctx->tex);

// SDL_UnlockMutex(ctx->mutex);
assert(id == NULL); /* picture identifier, not needed here */
}

// VLC wants to display the video
static void display (void *data, void *id) {
struct ctx *ctx = data;
assert(id == NULL);
}

int main(int argc, char** argv) {

libvlc_instance_t *libvlc;
libvlc_media_t *m;
libvlc_media_player_t *mp;
char const *vlc_argv[] =
{
    "--no-audio", /* skip any audio track */
    "--no-xlib", /* tell VLC to not use Xlib */
};
int vlc_argc = sizeof(vlc_argv) / sizeof(*vlc_argv);

int pause = 0;
int action = 0;
int done = 0;
int fullscreen = 0;
SDL_Event event;
SDL_DisplayMode mode;
struct ctx ctx;

if (argc < 2) {
    fprintf (stderr, "Usage: %s <filename>\n", argv[0]);
    return -1;
}

// Initialize SDL.
if (SDL_Init(SDL_INIT_VIDEO) < 0)
    return -1;

SDL_GetCurrentDisplayMode(0, &mode);
SDL_Window *window = SDL_CreateWindow("Window",
            SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, mode.w, mode.h,
            SDL_WINDOW_SHOWN | SDL_WINDOW_BORDERLESS );
SDL_Renderer *renderer = SDL_CreateRenderer(window, -1,

SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC);

ctx.mutex = SDL_CreateMutex();
ctx.tex = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_ARGB8888,

SDL_TEXTUREACCESS_STREAMING, 1920, 1080);

SDL_SetRenderDrawColor(renderer, 0, 0, 0, 0);
SDL_RenderClear(renderer);

// Initialise libVLC
libvlc = libvlc_new(vlc_argc, vlc_argv);
m = libvlc_media_new_path(libvlc, argv[1]);
mp = libvlc_media_player_new_from_media(m);

libvlc_video_set_callbacks(mp, lock, unlock, display, &ctx);
libvlc_video_set_format(mp, "RV32", 1920, 1080, 1920*4);
libvlc_media_player_play(mp);

while(!done) {
    action = 0;

    /* Keys: enter (fullscreen), space (pause), escape (quit) */
    while( SDL_PollEvent( &event ) ) {
        switch(event.type) {
            case SDL_QUIT:
                done = 1;
                break;
            case SDL_KEYDOWN:
                action = event.key.keysym.sym;
                break;
        }
    }

    switch(action) {
        case SDLK_ESCAPE:
            done = 1;
            break;
        case SDLK_RETURN:
            fullscreen = !fullscreen;
            SDL_SetWindowFullscreen(window, fullscreen);
            break;
        case ' ':
            pause = !pause;
            libvlc_media_player_set_pause(mp, pause);
            break;
    }
    SDL_RenderCopy(renderer, ctx.tex, NULL, NULL);
    SDL_RenderPresent(renderer);
    SDL_Delay(10);
}

libvlc_media_release(m);
libvlc_media_player_stop(mp);
libvlc_media_player_release(mp);
libvlc_release(libvlc);

SDL_DestroyTexture(ctx.tex);
SDL_Quit();
return 0;

}

this works fine on Windows, i can see the video and render with the
new apis; it works using the direct3d and software renderer, with the
opengl renderer i see only a black screen.
However on Linux, using the opengl renderer, it gets a segfault and
crashes at the first frame or so (works fine with software renderer).

Program received signal SIGSEGV, Segmentation fault.
[Switching to Thread 0x7fffe2da4700 (LWP 2995)]
0x00007ffff57e3629 in glGetError () from /usr/lib/nvidia-current/libGL.so.1
(gdb) bt
#0 0x00007ffff57e3629 in glGetError () from /usr/lib/nvidia-current/libGL.so.1
#1 0x00007ffff7b50533 in GL_UpdateTexture (renderer=, texture=0x88bda0, rect=0x8abfc4, pixels=0x7ffff2c29010,
pitch=7680) at src/render/opengl/SDL_render_gl.c:546
#2 0x00007ffff7b4c197 in SDL_UnlockTexture (texture=0x88bda0) at
src/render/SDL_render.c:732
#3 0x00000000004010d3 in unlock (data=0x7fffffffe0d0, id=0x0,
p_pixels=0x7fffe2da3bf0) at sdl13+vlc.c:26
#4 0x00007fffe2aa0c66 in Unlock (picture=) at vmem.c:353
#5 0x00007ffff6c48532 in vout_display_Display (vout=0xa94f50,
now=, deadline=) at
…/include/vlc_vout_wrapper.h:58
#6 ThreadDisplayRenderPicture (vout=0xa94f50, now=, deadline=) at
video_output/video_output.c:1040
#7 ThreadDisplayPicture (vout=0xa94f50, now=,
deadline=) at video_output/video_output.c:1113
#8 0x00007ffff6c48ffd in ThreadManage (object=)
at video_output/video_output.c:1125
#9 Thread (object=) at video_output/video_output.c:1559
#10 0x00007ffff6ed4971 in start_thread (arg=) at
pthread_create.c:304
#11 0x00007ffff765792d in clone () at
…/sysdeps/unix/sysv/linux/x86_64/clone.S:112
#12 0x0000000000000000 in ?? ()

Am I using the libraries wrong? Is there some logic flaw (besides
hardcoding video width and height)?
Also if i add “–ffmpeg-hw” to the vlc parameters, in Windows works
fine, in linux i get [0xfce650] avcodec decoder warning: Ignoring VA
API
Is this standard behaviour? Is it normal that (under windows) when i
use vlc directly and hw acceleration i see that the cpu is completely
offloaded, while with this test program and hw acceleration the cpu is
loaded to a 10%?

Thanks for any help :slight_smile:
Vittorio

I’m trying to port the sdl+vlc example

(http://wiki.videolan.org/LibVLC_SampleCode_SDL) to SDL 1.3 apis
I’m trying to use this code to decode some h264 videos and to do some
compositing operation on it live.

I’ve never used the VLC library but I always use avcodec/format directly,
but I don’t think that thread approach “libvlc” seems to use fits very well
with SDL model.

Maybe you are allowed to lock/unlock textures in another thread (I’m not
sure about this), certanly you cannot display or present your rendering in
the “display” VLC callback.

A better idea than display the texture 100 times per sec would be to attach
an SDL_USEREVENT in the “display” VLC callback and to do RenderCopy/Present
only when that event arrives.

This will also remove for sure the need for the SDL_Mutex (libVLC will
probably do: lock, draw, unlock, display).

Also I don’t understand why you are creating a RGB8888 surface, I’m quite
sure libVLC will need the cpu or the SSE unit to convert the YV12 frames to
RGB, better leave that work to the shaders Sam recently added to SDL :)–
Ing. Gabriele Greco, DARTS Engineering
Tel: +39-0105761240 Fax: +39-0105760224
s-mail: Via G.T. Invrea 14 - 16129 GENOVA (ITALY)

A better idea than display the texture 100 times per sec would be to attach
an SDL_USEREVENT in the “display” VLC callback and to do RenderCopy/Present
only when that event arrives.
This will also remove for sure the need for the SDL_Mutex (libVLC will
probably do: lock, draw, unlock, display).

thanks for the tip; i’ll do some test with it

Also I don’t understand why you are creating a RGB8888 surface, I’m quite
sure libVLC will need the cpu or the SSE unit to convert the YV12 frames to
RGB, better leave that work to the shaders Sam recently added to SDL :slight_smile:

I might be mistaken, but i thought that i read on the mailing list
that SDL_PIXERLFORMAT_ARGB8888 was the fastest pixel format available
for all renderers! Plus vlc always performs some kind of chroma
conversion before handing out the decoded frame (unfortunately)…

VittorioOn Tue, Feb 15, 2011 at 12:11 PM, Gabriele Greco <gabriele.greco at darts.it> wrote:

I’ve checked my program with the latest snapshot but the crash of the
opengl renderer is still there
Should i open a bug in bugzilla for better tracking?
thanks
Vittorio

Hello all!
I’m trying to port the sdl+vlc example
(http://wiki.videolan.org/LibVLC_SampleCode_SDL) to SDL 1.3 apis
I’m trying to use this code to decode some h264 videos and to do some
compositing operation on it live.

[cut]On Tue, Feb 15, 2011 at 11:55 AM, Vittorio G. <vitto.giova at yahoo.it> wrote:

Program received signal SIGSEGV, Segmentation fault.
[Switching to Thread 0x7fffe2da4700 (LWP 2995)]
0x00007ffff57e3629 in glGetError () from /usr/lib/nvidia-current/libGL.so.1
(gdb) bt
#0 ?0x00007ffff57e3629 in glGetError () from /usr/lib/nvidia-current/libGL.so.1
#1 ?0x00007ffff7b50533 in GL_UpdateTexture (renderer=, texture=0x88bda0, rect=0x8abfc4, pixels=0x7ffff2c29010,
pitch=7680) at src/render/opengl/SDL_render_gl.c:546
#2 ?0x00007ffff7b4c197 in SDL_UnlockTexture (texture=0x88bda0) at
src/render/SDL_render.c:732
#3 ?0x00000000004010d3 in unlock (data=0x7fffffffe0d0, id=0x0,
p_pixels=0x7fffe2da3bf0) at sdl13+vlc.c:26
#4 ?0x00007fffe2aa0c66 in Unlock (picture=) at vmem.c:353
#5 ?0x00007ffff6c48532 in vout_display_Display (vout=0xa94f50,
now=, deadline=) at
…/include/vlc_vout_wrapper.h:58
#6 ?ThreadDisplayRenderPicture (vout=0xa94f50, now=, deadline=) at
video_output/video_output.c:1040
#7 ?ThreadDisplayPicture (vout=0xa94f50, now=,
deadline=) at video_output/video_output.c:1113
#8 ?0x00007ffff6c48ffd in ThreadManage (object=)
at video_output/video_output.c:1125
#9 ?Thread (object=) at video_output/video_output.c:1559
#10 0x00007ffff6ed4971 in start_thread (arg=) at
pthread_create.c:304
#11 0x00007ffff765792d in clone () at
…/sysdeps/unix/sysv/linux/x86_64/clone.S:112
#12 0x0000000000000000 in ?? ()

Yes please! :)On Wed, Feb 16, 2011 at 5:11 AM, Vittorio G. <vitto.giova at yahoo.it> wrote:

I’ve checked my program with the latest snapshot but the crash of the
opengl renderer is still there
Should i open a bug in bugzilla for better tracking?
thanks
Vittorio

On Tue, Feb 15, 2011 at 11:55 AM, Vittorio G. <vitto.giova at yahoo.it> wrote:

Hello all!
I’m trying to port the sdl+vlc example
(http://wiki.videolan.org/LibVLC_SampleCode_SDL) to SDL 1.3 apis
I’m trying to use this code to decode some h264 videos and to do some
compositing operation on it live.

[cut]

Program received signal SIGSEGV, Segmentation fault.
[Switching to Thread 0x7fffe2da4700 (LWP 2995)]
0x00007ffff57e3629 in glGetError () from
/usr/lib/nvidia-current/libGL.so.1
(gdb) bt
#0 0x00007ffff57e3629 in glGetError () from
/usr/lib/nvidia-current/libGL.so.1
#1 0x00007ffff7b50533 in GL_UpdateTexture (renderer=, texture=0x88bda0, rect=0x8abfc4, pixels=0x7ffff2c29010,
pitch=7680) at src/render/opengl/SDL_render_gl.c:546
#2 0x00007ffff7b4c197 in SDL_UnlockTexture (texture=0x88bda0) at
src/render/SDL_render.c:732
#3 0x00000000004010d3 in unlock (data=0x7fffffffe0d0, id=0x0,
p_pixels=0x7fffe2da3bf0) at sdl13+vlc.c:26
#4 0x00007fffe2aa0c66 in Unlock (picture=) at
vmem.c:353
#5 0x00007ffff6c48532 in vout_display_Display (vout=0xa94f50,
now=, deadline=) at
…/include/vlc_vout_wrapper.h:58
#6 ThreadDisplayRenderPicture (vout=0xa94f50, now=, deadline=) at
video_output/video_output.c:1040
#7 ThreadDisplayPicture (vout=0xa94f50, now=,
deadline=) at video_output/video_output.c:1113
#8 0x00007ffff6c48ffd in ThreadManage (object=)
at video_output/video_output.c:1125
#9 Thread (object=) at
video_output/video_output.c:1559
#10 0x00007ffff6ed4971 in start_thread (arg=) at
pthread_create.c:304
#11 0x00007ffff765792d in clone () at
…/sysdeps/unix/sysv/linux/x86_64/clone.S:112
#12 0x0000000000000000 in ?? ()


SDL mailing list
SDL at lists.libsdl.org
http://lists.libsdl.org/listinfo.cgi/sdl-libsdl.org


-Sam Lantinga, Founder and CEO, Galaxy Gameworks