Cleanup xcomposite_drm
This commit is contained in:
parent
2f67083915
commit
29237c7116
1
TODO
1
TODO
@ -40,3 +40,4 @@ Fix constant framerate not working properly on amd/intel because capture framera
|
||||
JPEG color range on amd seems to produce too bright video with h264 but not hevc, why?
|
||||
|
||||
Support recording screen/monitor on amd/intel.
|
||||
Better configure vaapi. The file size is too large.
|
@ -209,7 +209,7 @@ static int gsr_capture_xcomposite_cuda_start(gsr_capture *cap, AVCodecContext *v
|
||||
video_codec_context->width = cap_xcomp->texture_size.x;
|
||||
video_codec_context->height = cap_xcomp->texture_size.y;
|
||||
|
||||
if(cap_xcomp->params.region_size.x > 0 && cap_xcomp->params.region_size.y) {
|
||||
if(cap_xcomp->params.region_size.x > 0 && cap_xcomp->params.region_size.y > 0) {
|
||||
video_codec_context->width = max_int(2, cap_xcomp->params.region_size.x & ~1);
|
||||
video_codec_context->height = max_int(2, cap_xcomp->params.region_size.y & ~1);
|
||||
}
|
||||
@ -278,22 +278,6 @@ static void gsr_capture_xcomposite_cuda_tick(gsr_capture *cap, AVCodecContext *v
|
||||
|
||||
cap_xcomp->egl.glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
if(!cap_xcomp->created_hw_frame) {
|
||||
cap_xcomp->created_hw_frame = true;
|
||||
CUcontext old_ctx;
|
||||
cap_xcomp->cuda.cuCtxPushCurrent_v2(cap_xcomp->cuda.cu_ctx);
|
||||
|
||||
if(av_hwframe_get_buffer(video_codec_context->hw_frames_ctx, *frame, 0) < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_cuda_tick: av_hwframe_get_buffer failed\n");
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = true;
|
||||
cap_xcomp->cuda.cuCtxPopCurrent_v2(&old_ctx);
|
||||
return;
|
||||
}
|
||||
|
||||
cap_xcomp->cuda.cuCtxPopCurrent_v2(&old_ctx);
|
||||
}
|
||||
|
||||
if(!cap_xcomp->params.follow_focused && XCheckTypedWindowEvent(cap_xcomp->dpy, cap_xcomp->window, DestroyNotify, &cap_xcomp->xev)) {
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = false;
|
||||
@ -351,7 +335,7 @@ static void gsr_capture_xcomposite_cuda_tick(gsr_capture *cap, AVCodecContext *v
|
||||
}
|
||||
|
||||
const double window_resize_timeout = 1.0; // 1 second
|
||||
if(cap_xcomp->window_resized && clock_get_monotonic_seconds() - cap_xcomp->window_resize_timer >= window_resize_timeout) {
|
||||
if(!cap_xcomp->created_hw_frame || (cap_xcomp->window_resized && clock_get_monotonic_seconds() - cap_xcomp->window_resize_timer >= window_resize_timeout)) {
|
||||
cap_xcomp->window_resized = false;
|
||||
if(window_texture_on_resize(&cap_xcomp->window_texture) != 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_cuda_tick: window_texture_on_resize failed\n");
|
||||
@ -371,6 +355,8 @@ static void gsr_capture_xcomposite_cuda_tick(gsr_capture *cap, AVCodecContext *v
|
||||
cap_xcomp->texture_size.x = min_int(video_codec_context->width, max_int(2, cap_xcomp->texture_size.x & ~1));
|
||||
cap_xcomp->texture_size.y = min_int(video_codec_context->height, max_int(2, cap_xcomp->texture_size.y & ~1));
|
||||
|
||||
if(!cap_xcomp->created_hw_frame) {
|
||||
cap_xcomp->created_hw_frame = true;
|
||||
av_frame_free(frame);
|
||||
*frame = av_frame_alloc();
|
||||
if(!frame) {
|
||||
@ -394,6 +380,7 @@ static void gsr_capture_xcomposite_cuda_tick(gsr_capture *cap, AVCodecContext *v
|
||||
cap_xcomp->stop_is_error = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Clear texture with black background because the source texture (window_texture_get_opengl_texture_id(&cap_xcomp->window_texture))
|
||||
// might be smaller than cap_xcomp->target_texture_id
|
||||
@ -432,7 +419,7 @@ static int gsr_capture_xcomposite_cuda_capture(gsr_capture *cap, AVFrame *frame)
|
||||
static bool error_shown = false;
|
||||
if(!error_shown) {
|
||||
error_shown = true;
|
||||
fprintf(stderr, "Error: glCopyImageSubData failed, gl error: %d\n", err);
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_cuda_capture: glCopyImageSubData failed, gl error: %d\n", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -466,11 +453,17 @@ static int gsr_capture_xcomposite_cuda_capture(gsr_capture *cap, AVFrame *frame)
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_cuda_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
gsr_capture_xcomposite_cuda *cap_xcomp = cap->priv;
|
||||
if(cap->priv) {
|
||||
gsr_capture_xcomposite_cuda_stop(cap, video_codec_context);
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
if(cap_xcomp->dpy) {
|
||||
// TODO: This causes a crash, why? maybe some other library dlclose xlib and that also happened to unload this???
|
||||
//XCloseDisplay(cap_xcomp->dpy);
|
||||
cap_xcomp->dpy = NULL;
|
||||
}
|
||||
free(cap);
|
||||
}
|
||||
|
||||
|
@ -4,23 +4,26 @@
|
||||
#include "../../include/time.h"
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <assert.h>
|
||||
#include <X11/Xlib.h>
|
||||
#include <X11/extensions/Xcomposite.h>
|
||||
#include <libavutil/hwcontext.h>
|
||||
#include <libavutil/hwcontext_vaapi.h>
|
||||
#include <libavutil/frame.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
//#include <drm_fourcc.h>
|
||||
#include <assert.h>
|
||||
/* TODO: Proper error checks and cleanups */
|
||||
|
||||
typedef struct {
|
||||
gsr_capture_xcomposite_drm_params params;
|
||||
Display *dpy;
|
||||
XEvent xev;
|
||||
bool should_stop;
|
||||
bool stop_is_error;
|
||||
bool window_resized;
|
||||
bool created_hw_frame;
|
||||
bool follow_focused_initialized;
|
||||
|
||||
vec2i window_pos;
|
||||
Window window;
|
||||
vec2i window_size;
|
||||
vec2i texture_size;
|
||||
double window_resize_timer;
|
||||
@ -36,18 +39,40 @@ typedef struct {
|
||||
int32_t pitch;
|
||||
int32_t offset;
|
||||
|
||||
unsigned int target_textures[2];
|
||||
|
||||
VADisplay va_dpy;
|
||||
VAConfigID config_id;
|
||||
VAContextID context_id;
|
||||
VASurfaceID input_surface;
|
||||
VABufferID buffer_id;
|
||||
VARectangle output_region;
|
||||
|
||||
Atom net_active_window_atom;
|
||||
} gsr_capture_xcomposite_drm;
|
||||
|
||||
static int max_int(int a, int b) {
|
||||
return a > b ? a : b;
|
||||
}
|
||||
|
||||
static int min_int(int a, int b) {
|
||||
return a < b ? a : b;
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_drm_stop(gsr_capture *cap, AVCodecContext *video_codec_context);
|
||||
|
||||
static Window get_focused_window(Display *display, Atom net_active_window_atom) {
|
||||
Atom type;
|
||||
int format = 0;
|
||||
unsigned long num_items = 0;
|
||||
unsigned long bytes_after = 0;
|
||||
unsigned char *properties = NULL;
|
||||
if(XGetWindowProperty(display, DefaultRootWindow(display), net_active_window_atom, 0, 1024, False, AnyPropertyType, &type, &format, &num_items, &bytes_after, &properties) == Success && properties) {
|
||||
Window focused_window = *(unsigned long*)properties;
|
||||
XFree(properties);
|
||||
return focused_window;
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
static bool drm_create_codec_context(gsr_capture_xcomposite_drm *cap_xcomp, AVCodecContext *video_codec_context) {
|
||||
AVBufferRef *device_ctx;
|
||||
if(av_hwdevice_ctx_create(&device_ctx, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", NULL, 0) < 0) {
|
||||
@ -94,16 +119,30 @@ static bool drm_create_codec_context(gsr_capture_xcomposite_drm *cap_xcomp, AVCo
|
||||
static int gsr_capture_xcomposite_drm_start(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
gsr_capture_xcomposite_drm *cap_xcomp = cap->priv;
|
||||
|
||||
if(cap_xcomp->params.follow_focused) {
|
||||
cap_xcomp->net_active_window_atom = XInternAtom(cap_xcomp->dpy, "_NET_ACTIVE_WINDOW", False);
|
||||
if(!cap_xcomp->net_active_window_atom) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_start failed: failed to get _NET_ACTIVE_WINDOW atom\n");
|
||||
return -1;
|
||||
}
|
||||
cap_xcomp->window = get_focused_window(cap_xcomp->dpy, cap_xcomp->net_active_window_atom);
|
||||
} else {
|
||||
cap_xcomp->window = cap_xcomp->params.window;
|
||||
}
|
||||
|
||||
/* TODO: Do these in tick, and allow error if follow_focused */
|
||||
|
||||
XWindowAttributes attr;
|
||||
if(!XGetWindowAttributes(cap_xcomp->dpy, cap_xcomp->params.window, &attr)) {
|
||||
if(!XGetWindowAttributes(cap_xcomp->dpy, cap_xcomp->params.window, &attr) && !cap_xcomp->params.follow_focused) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_start failed: invalid window id: %lu\n", cap_xcomp->params.window);
|
||||
return -1;
|
||||
}
|
||||
|
||||
cap_xcomp->window_size.x = max_int(attr.width, 0);
|
||||
cap_xcomp->window_size.y = max_int(attr.height, 0);
|
||||
Window c;
|
||||
XTranslateCoordinates(cap_xcomp->dpy, cap_xcomp->params.window, DefaultRootWindow(cap_xcomp->dpy), 0, 0, &cap_xcomp->window_pos.x, &cap_xcomp->window_pos.y, &c);
|
||||
|
||||
if(cap_xcomp->params.follow_focused)
|
||||
XSelectInput(cap_xcomp->dpy, DefaultRootWindow(cap_xcomp->dpy), PropertyChangeMask);
|
||||
|
||||
// TODO: Get select and add these on top of it and then restore at the end. Also do the same in other xcomposite
|
||||
XSelectInput(cap_xcomp->dpy, cap_xcomp->params.window, StructureNotifyMask | ExposureMask);
|
||||
@ -127,44 +166,15 @@ static int gsr_capture_xcomposite_drm_start(gsr_capture *cap, AVCodecContext *vi
|
||||
|
||||
/* Disable vsync */
|
||||
cap_xcomp->egl.eglSwapInterval(cap_xcomp->egl.egl_display, 0);
|
||||
#if 0
|
||||
// TODO: Fallback to composite window
|
||||
if(window_texture_init(&cap_xcomp->window_texture, cap_xcomp->dpy, cap_xcomp->params.window, &cap_xcomp->gl) != 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_start: failed get window texture for window %ld\n", cap_xcomp->params.window);
|
||||
gsr_egl_unload(&cap_xcomp->egl);
|
||||
return -1;
|
||||
}
|
||||
|
||||
cap_xcomp->egl.glBindTexture(GL_TEXTURE_2D, window_texture_get_opengl_texture_id(&cap_xcomp->window_texture));
|
||||
cap_xcomp->texture_size.x = 0;
|
||||
cap_xcomp->texture_size.y = 0;
|
||||
cap_xcomp->egl.glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &cap_xcomp->texture_size.x);
|
||||
cap_xcomp->egl.glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &cap_xcomp->texture_size.y);
|
||||
cap_xcomp->egl.glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
cap_xcomp->texture_size.x = max_int(2, cap_xcomp->texture_size.x & ~1);
|
||||
cap_xcomp->texture_size.y = max_int(2, cap_xcomp->texture_size.y & ~1);
|
||||
|
||||
cap_xcomp->target_texture_id = gl_create_texture(cap_xcomp, cap_xcomp->texture_size.x, cap_xcomp->texture_size.y);
|
||||
if(cap_xcomp->target_texture_id == 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_start: failed to create opengl texture\n");
|
||||
gsr_capture_xcomposite_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
video_codec_context->width = cap_xcomp->texture_size.x;
|
||||
video_codec_context->height = cap_xcomp->texture_size.y;
|
||||
|
||||
cap_xcomp->window_resize_timer = clock_get_monotonic_seconds();
|
||||
return 0;
|
||||
#else
|
||||
// TODO: Fallback to composite window
|
||||
if(window_texture_init(&cap_xcomp->window_texture, cap_xcomp->dpy, cap_xcomp->params.window, &cap_xcomp->egl) != 0) {
|
||||
if(window_texture_init(&cap_xcomp->window_texture, cap_xcomp->dpy, cap_xcomp->params.window, &cap_xcomp->egl) != 0 && !cap_xcomp->params.follow_focused) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_start: failed get window texture for window %ld\n", cap_xcomp->params.window);
|
||||
gsr_egl_unload(&cap_xcomp->egl);
|
||||
return -1;
|
||||
}
|
||||
|
||||
cap_xcomp->texture_size.x = 0;
|
||||
cap_xcomp->texture_size.y = 0;
|
||||
|
||||
cap_xcomp->egl.glBindTexture(GL_TEXTURE_2D, window_texture_get_opengl_texture_id(&cap_xcomp->window_texture));
|
||||
cap_xcomp->texture_size.x = 0;
|
||||
cap_xcomp->texture_size.y = 0;
|
||||
@ -178,76 +188,156 @@ static int gsr_capture_xcomposite_drm_start(gsr_capture *cap, AVCodecContext *vi
|
||||
video_codec_context->width = cap_xcomp->texture_size.x;
|
||||
video_codec_context->height = cap_xcomp->texture_size.y;
|
||||
|
||||
{
|
||||
const intptr_t pixmap_attrs[] = {
|
||||
EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
|
||||
EGL_NONE,
|
||||
};
|
||||
|
||||
EGLImage img = cap_xcomp->egl.eglCreateImage(cap_xcomp->egl.egl_display, cap_xcomp->egl.egl_context, EGL_GL_TEXTURE_2D, (EGLClientBuffer)(uint64_t)window_texture_get_opengl_texture_id(&cap_xcomp->window_texture), pixmap_attrs);
|
||||
if(!img) {
|
||||
fprintf(stderr, "eglCreateImage failed\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(!cap_xcomp->egl.eglExportDMABUFImageQueryMESA(cap_xcomp->egl.egl_display, img, &cap_xcomp->fourcc, &cap_xcomp->num_planes, &cap_xcomp->modifiers)) {
|
||||
fprintf(stderr, "eglExportDMABUFImageQueryMESA failed\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(cap_xcomp->num_planes != 1) {
|
||||
// TODO: FAIL!
|
||||
fprintf(stderr, "Blablalba\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(!cap_xcomp->egl.eglExportDMABUFImageMESA(cap_xcomp->egl.egl_display, img, &cap_xcomp->dmabuf_fd, &cap_xcomp->pitch, &cap_xcomp->offset)) {
|
||||
fprintf(stderr, "eglExportDMABUFImageMESA failed\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
fprintf(stderr, "texture: %u, dmabuf: %d, pitch: %d, offset: %d\n", window_texture_get_opengl_texture_id(&cap_xcomp->window_texture), cap_xcomp->dmabuf_fd, cap_xcomp->pitch, cap_xcomp->offset);
|
||||
fprintf(stderr, "fourcc: %d, num planes: %d, modifiers: %zu\n", cap_xcomp->fourcc, cap_xcomp->num_planes, cap_xcomp->modifiers);
|
||||
if(cap_xcomp->params.region_size.x > 0 && cap_xcomp->params.region_size.y > 0) {
|
||||
video_codec_context->width = max_int(2, cap_xcomp->params.region_size.x & ~1);
|
||||
video_codec_context->height = max_int(2, cap_xcomp->params.region_size.y & ~1);
|
||||
}
|
||||
|
||||
if(!drm_create_codec_context(cap_xcomp, video_codec_context)) {
|
||||
fprintf(stderr, "failed to create hw codec context\n");
|
||||
gsr_egl_unload(&cap_xcomp->egl);
|
||||
gsr_capture_xcomposite_drm_stop(cap, video_codec_context);
|
||||
return -1;
|
||||
}
|
||||
|
||||
//fprintf(stderr, "sneed: %u\n", cap_xcomp->FramebufferName);
|
||||
cap_xcomp->window_resize_timer = clock_get_monotonic_seconds();
|
||||
return 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_drm_tick(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame **frame) {
|
||||
gsr_capture_xcomposite_drm *cap_xcomp = cap->priv;
|
||||
|
||||
cap_xcomp->egl.glClear(GL_COLOR_BUFFER_BIT);
|
||||
// TODO:
|
||||
//cap_xcomp->egl.glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
if(!cap_xcomp->params.follow_focused && XCheckTypedWindowEvent(cap_xcomp->dpy, cap_xcomp->window, DestroyNotify, &cap_xcomp->xev)) {
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = false;
|
||||
}
|
||||
|
||||
if(XCheckTypedWindowEvent(cap_xcomp->dpy, cap_xcomp->window, Expose, &cap_xcomp->xev) && cap_xcomp->xev.xexpose.count == 0) {
|
||||
cap_xcomp->window_resize_timer = clock_get_monotonic_seconds();
|
||||
cap_xcomp->window_resized = true;
|
||||
}
|
||||
|
||||
if(XCheckTypedWindowEvent(cap_xcomp->dpy, cap_xcomp->window, ConfigureNotify, &cap_xcomp->xev) && cap_xcomp->xev.xconfigure.window == cap_xcomp->window) {
|
||||
while(XCheckTypedWindowEvent(cap_xcomp->dpy, cap_xcomp->window, ConfigureNotify, &cap_xcomp->xev)) {}
|
||||
|
||||
/* Window resize */
|
||||
if(cap_xcomp->xev.xconfigure.width != cap_xcomp->window_size.x || cap_xcomp->xev.xconfigure.height != cap_xcomp->window_size.y) {
|
||||
cap_xcomp->window_size.x = max_int(cap_xcomp->xev.xconfigure.width, 0);
|
||||
cap_xcomp->window_size.y = max_int(cap_xcomp->xev.xconfigure.height, 0);
|
||||
cap_xcomp->window_resize_timer = clock_get_monotonic_seconds();
|
||||
cap_xcomp->window_resized = true;
|
||||
}
|
||||
}
|
||||
|
||||
if(cap_xcomp->params.follow_focused && (!cap_xcomp->follow_focused_initialized || (XCheckTypedWindowEvent(cap_xcomp->dpy, DefaultRootWindow(cap_xcomp->dpy), PropertyNotify, &cap_xcomp->xev) && cap_xcomp->xev.xproperty.atom == cap_xcomp->net_active_window_atom))) {
|
||||
Window focused_window = get_focused_window(cap_xcomp->dpy, cap_xcomp->net_active_window_atom);
|
||||
if(focused_window != cap_xcomp->window || !cap_xcomp->follow_focused_initialized) {
|
||||
cap_xcomp->follow_focused_initialized = true;
|
||||
XSelectInput(cap_xcomp->dpy, cap_xcomp->window, 0);
|
||||
cap_xcomp->window = focused_window;
|
||||
XSelectInput(cap_xcomp->dpy, cap_xcomp->window, StructureNotifyMask | ExposureMask);
|
||||
|
||||
XWindowAttributes attr;
|
||||
attr.width = 0;
|
||||
attr.height = 0;
|
||||
if(!XGetWindowAttributes(cap_xcomp->dpy, cap_xcomp->window, &attr))
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick failed: invalid window id: %lu\n", cap_xcomp->window);
|
||||
|
||||
cap_xcomp->window_size.x = max_int(attr.width, 0);
|
||||
cap_xcomp->window_size.y = max_int(attr.height, 0);
|
||||
cap_xcomp->window_resized = true;
|
||||
|
||||
window_texture_deinit(&cap_xcomp->window_texture);
|
||||
window_texture_init(&cap_xcomp->window_texture, cap_xcomp->dpy, cap_xcomp->window, &cap_xcomp->egl); // TODO: Do not do the below window_texture_on_resize after this
|
||||
|
||||
cap_xcomp->texture_size.x = 0;
|
||||
cap_xcomp->texture_size.y = 0;
|
||||
|
||||
cap_xcomp->egl.glBindTexture(GL_TEXTURE_2D, window_texture_get_opengl_texture_id(&cap_xcomp->window_texture));
|
||||
cap_xcomp->egl.glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &cap_xcomp->texture_size.x);
|
||||
cap_xcomp->egl.glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &cap_xcomp->texture_size.y);
|
||||
cap_xcomp->egl.glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
cap_xcomp->texture_size.x = min_int(video_codec_context->width, max_int(2, cap_xcomp->texture_size.x & ~1));
|
||||
cap_xcomp->texture_size.y = min_int(video_codec_context->height, max_int(2, cap_xcomp->texture_size.y & ~1));
|
||||
}
|
||||
}
|
||||
|
||||
const double window_resize_timeout = 1.0; // 1 second
|
||||
if(!cap_xcomp->created_hw_frame || (cap_xcomp->window_resized && clock_get_monotonic_seconds() - cap_xcomp->window_resize_timer >= window_resize_timeout)) {
|
||||
cap_xcomp->window_resized = false;
|
||||
|
||||
if(window_texture_on_resize(&cap_xcomp->window_texture) != 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: window_texture_on_resize failed\n");
|
||||
//cap_xcomp->should_stop = true;
|
||||
//cap_xcomp->stop_is_error = true;
|
||||
//return;
|
||||
}
|
||||
|
||||
cap_xcomp->texture_size.x = 0;
|
||||
cap_xcomp->texture_size.y = 0;
|
||||
|
||||
cap_xcomp->egl.glBindTexture(GL_TEXTURE_2D, window_texture_get_opengl_texture_id(&cap_xcomp->window_texture));
|
||||
cap_xcomp->egl.glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &cap_xcomp->texture_size.x);
|
||||
cap_xcomp->egl.glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &cap_xcomp->texture_size.y);
|
||||
cap_xcomp->egl.glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
cap_xcomp->texture_size.x = min_int(video_codec_context->width, max_int(2, cap_xcomp->texture_size.x & ~1));
|
||||
cap_xcomp->texture_size.y = min_int(video_codec_context->height, max_int(2, cap_xcomp->texture_size.y & ~1));
|
||||
|
||||
if(cap_xcomp->buffer_id) {
|
||||
vaDestroyBuffer(cap_xcomp->va_dpy, cap_xcomp->buffer_id);
|
||||
cap_xcomp->buffer_id = 0;
|
||||
}
|
||||
|
||||
if(cap_xcomp->context_id) {
|
||||
vaDestroyContext(cap_xcomp->va_dpy, cap_xcomp->context_id);
|
||||
cap_xcomp->context_id = 0;
|
||||
}
|
||||
|
||||
if(cap_xcomp->config_id) {
|
||||
vaDestroyConfig(cap_xcomp->va_dpy, cap_xcomp->config_id);
|
||||
cap_xcomp->config_id = 0;
|
||||
}
|
||||
|
||||
if(cap_xcomp->input_surface) {
|
||||
vaDestroySurfaces(cap_xcomp->va_dpy, &cap_xcomp->input_surface, 1);
|
||||
cap_xcomp->input_surface = 0;
|
||||
}
|
||||
|
||||
if(cap_xcomp->dmabuf_fd) {
|
||||
close(cap_xcomp->dmabuf_fd);
|
||||
cap_xcomp->dmabuf_fd = 0;
|
||||
}
|
||||
|
||||
if(!cap_xcomp->created_hw_frame) {
|
||||
cap_xcomp->created_hw_frame = true;
|
||||
|
||||
av_frame_free(frame);
|
||||
*frame = av_frame_alloc();
|
||||
if(!frame) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_tick: failed to allocate frame\n");
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: failed to allocate frame\n");
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = true;
|
||||
return;
|
||||
}
|
||||
(*frame)->format = video_codec_context->pix_fmt;
|
||||
(*frame)->width = video_codec_context->width;
|
||||
(*frame)->height = video_codec_context->height;
|
||||
(*frame)->color_range = AVCOL_RANGE_JPEG;
|
||||
(*frame)->color_range = video_codec_context->color_range;
|
||||
(*frame)->color_primaries = video_codec_context->color_primaries;
|
||||
(*frame)->color_trc = video_codec_context->color_trc;
|
||||
(*frame)->colorspace = video_codec_context->colorspace;
|
||||
(*frame)->chroma_location = video_codec_context->chroma_sample_location;
|
||||
|
||||
int res = av_hwframe_get_buffer(video_codec_context->hw_frames_ctx, *frame, 0);
|
||||
if(res < 0) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_tick: av_hwframe_get_buffer failed 1: %d\n", res);
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: av_hwframe_get_buffer failed: %d\n", res);
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = true;
|
||||
return;
|
||||
}
|
||||
|
||||
fprintf(stderr, "fourcc: %u\n", cap_xcomp->fourcc);
|
||||
fprintf(stderr, "va surface id: %u\n", (VASurfaceID)(uintptr_t)(*frame)->data[3]);
|
||||
}
|
||||
|
||||
int xx = 0, yy = 0;
|
||||
cap_xcomp->egl.glBindTexture(GL_TEXTURE_2D, window_texture_get_opengl_texture_id(&cap_xcomp->window_texture));
|
||||
@ -255,10 +345,49 @@ static void gsr_capture_xcomposite_drm_tick(gsr_capture *cap, AVCodecContext *vi
|
||||
cap_xcomp->egl.glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &yy);
|
||||
cap_xcomp->egl.glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
const intptr_t pixmap_attrs[] = {
|
||||
EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
|
||||
EGL_NONE,
|
||||
};
|
||||
|
||||
EGLImage img = cap_xcomp->egl.eglCreateImage(cap_xcomp->egl.egl_display, cap_xcomp->egl.egl_context, EGL_GL_TEXTURE_2D, (EGLClientBuffer)(uint64_t)window_texture_get_opengl_texture_id(&cap_xcomp->window_texture), pixmap_attrs);
|
||||
if(!img) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: eglCreateImage failed\n");
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if(!cap_xcomp->egl.eglExportDMABUFImageQueryMESA(cap_xcomp->egl.egl_display, img, &cap_xcomp->fourcc, &cap_xcomp->num_planes, &cap_xcomp->modifiers)) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: eglExportDMABUFImageQueryMESA failed\n");
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = true;
|
||||
cap_xcomp->egl.eglDestroyImage(cap_xcomp->egl.egl_display, img);
|
||||
return;
|
||||
}
|
||||
|
||||
if(cap_xcomp->num_planes != 1) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: expected 1 plane for drm buf, got %d planes\n", cap_xcomp->num_planes);
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = true;
|
||||
cap_xcomp->egl.eglDestroyImage(cap_xcomp->egl.egl_display, img);
|
||||
return;
|
||||
}
|
||||
|
||||
if(!cap_xcomp->egl.eglExportDMABUFImageMESA(cap_xcomp->egl.egl_display, img, &cap_xcomp->dmabuf_fd, &cap_xcomp->pitch, &cap_xcomp->offset)) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: eglExportDMABUFImageMESA failed\n");
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = true;
|
||||
cap_xcomp->egl.eglDestroyImage(cap_xcomp->egl.egl_display, img);
|
||||
return;
|
||||
}
|
||||
|
||||
cap_xcomp->egl.eglDestroyImage(cap_xcomp->egl.egl_display, img);
|
||||
|
||||
uintptr_t dmabuf = cap_xcomp->dmabuf_fd;
|
||||
|
||||
VASurfaceAttribExternalBuffers buf = {0};
|
||||
buf.pixel_format = VA_FOURCC_BGRX; // TODO: VA_FOURCC_XRGB?
|
||||
buf.pixel_format = VA_FOURCC_BGRX;
|
||||
buf.width = xx;
|
||||
buf.height = yy;
|
||||
buf.data_size = yy * cap_xcomp->pitch;
|
||||
@ -284,8 +413,9 @@ static void gsr_capture_xcomposite_drm_tick(gsr_capture *cap, AVCodecContext *vi
|
||||
|
||||
VAStatus va_status = vaCreateSurfaces(cap_xcomp->va_dpy, VA_RT_FORMAT_RGB32, xx, yy, &cap_xcomp->input_surface, 1, attribs, 2);
|
||||
if(va_status != VA_STATUS_SUCCESS) {
|
||||
fprintf(stderr, "failed to create surface: %d\n", va_status);
|
||||
abort();
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: vaCreateSurfaces failed: %d\n", va_status);
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = true;
|
||||
return;
|
||||
}
|
||||
|
||||
@ -293,16 +423,44 @@ static void gsr_capture_xcomposite_drm_tick(gsr_capture *cap, AVCodecContext *vi
|
||||
|
||||
va_status = vaCreateConfig(cap_xcomp->va_dpy, VAProfileNone, VAEntrypointVideoProc, NULL, 0, &cap_xcomp->config_id);
|
||||
if(va_status != VA_STATUS_SUCCESS) {
|
||||
fprintf(stderr, "vaCreateConfig failed: %d\n", va_status);
|
||||
abort();
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: vaCreateConfig failed: %d\n", va_status);
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = true;
|
||||
return;
|
||||
}
|
||||
|
||||
VASurfaceID target_surface_id = (uintptr_t)(*frame)->data[3];
|
||||
va_status = vaCreateContext(cap_xcomp->va_dpy, cap_xcomp->config_id, xx, yy, VA_PROGRESSIVE, &target_surface_id, 1, &cap_xcomp->context_id);
|
||||
if(va_status != VA_STATUS_SUCCESS) {
|
||||
fprintf(stderr, "vaCreateContext failed: %d\n", va_status);
|
||||
abort();
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: vaCreateContext failed: %d\n", va_status);
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = true;
|
||||
return;
|
||||
}
|
||||
|
||||
cap_xcomp->output_region = (VARectangle){
|
||||
.x = 0,
|
||||
.y = 0,
|
||||
.width = xx,
|
||||
.height = yy
|
||||
};
|
||||
|
||||
// Copying a surface to another surface will automatically perform the color conversion. Thanks vaapi!
|
||||
VAProcPipelineParameterBuffer params = {0};
|
||||
params.surface = cap_xcomp->input_surface;
|
||||
params.surface_region = NULL; // TODO: Use when using kmsgrab to restrict region to captured monitor
|
||||
params.output_region = &cap_xcomp->output_region;
|
||||
params.output_background_color = 0;
|
||||
//params.filter_flags = VA_FRAME_PICTURE;
|
||||
// TODO: Colors
|
||||
params.input_color_properties.color_range = (*frame)->color_range == AVCOL_RANGE_JPEG ? VA_SOURCE_RANGE_FULL : VA_SOURCE_RANGE_REDUCED;
|
||||
params.output_color_properties.color_range = (*frame)->color_range == AVCOL_RANGE_JPEG ? VA_SOURCE_RANGE_FULL : VA_SOURCE_RANGE_REDUCED;
|
||||
|
||||
va_status = vaCreateBuffer(cap_xcomp->va_dpy, cap_xcomp->context_id, VAProcPipelineParameterBufferType, sizeof(params), 1, ¶ms, &cap_xcomp->buffer_id);
|
||||
if(va_status != VA_STATUS_SUCCESS) {
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: vaCreateBuffer failed: %d\n", va_status);
|
||||
cap_xcomp->should_stop = true;
|
||||
cap_xcomp->stop_is_error = true;
|
||||
return;
|
||||
}
|
||||
|
||||
@ -314,14 +472,18 @@ static void gsr_capture_xcomposite_drm_tick(gsr_capture *cap, AVCodecContext *vi
|
||||
}
|
||||
|
||||
static bool gsr_capture_xcomposite_drm_should_stop(gsr_capture *cap, bool *err) {
|
||||
gsr_capture_xcomposite_drm *cap_xcomp = cap->priv;
|
||||
if(cap_xcomp->should_stop) {
|
||||
if(err)
|
||||
*err = cap_xcomp->stop_is_error;
|
||||
return true;
|
||||
}
|
||||
|
||||
if(err)
|
||||
*err = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
#define GL_FLOAT 0x1406
|
||||
#define GL_FALSE 0
|
||||
#define GL_TRUE 1
|
||||
#define GL_TRIANGLES 0x0004
|
||||
|
||||
static int gsr_capture_xcomposite_drm_capture(gsr_capture *cap, AVFrame *frame) {
|
||||
gsr_capture_xcomposite_drm *cap_xcomp = cap->priv;
|
||||
|
||||
@ -329,35 +491,33 @@ static int gsr_capture_xcomposite_drm_capture(gsr_capture *cap, AVFrame *frame)
|
||||
|
||||
VAStatus va_status = vaBeginPicture(cap_xcomp->va_dpy, cap_xcomp->context_id, target_surface_id);
|
||||
if(va_status != VA_STATUS_SUCCESS) {
|
||||
fprintf(stderr, "vaBeginPicture failed: %d\n", va_status);
|
||||
abort();
|
||||
return 1;
|
||||
static bool error_printed = false;
|
||||
if(!error_printed) {
|
||||
error_printed = true;
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: vaBeginPicture failed: %d\n", va_status);
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
VAProcPipelineParameterBuffer params = {0};
|
||||
params.surface = cap_xcomp->input_surface;
|
||||
params.surface_region = NULL;
|
||||
params.output_background_color = 0xFF000000;
|
||||
params.filter_flags = VA_FRAME_PICTURE;
|
||||
// TODO: Colors
|
||||
|
||||
VABufferID buffer_id = 0;
|
||||
va_status = vaCreateBuffer(cap_xcomp->va_dpy, cap_xcomp->context_id, VAProcPipelineParameterBufferType, sizeof(params), 1, ¶ms, &buffer_id);
|
||||
va_status = vaRenderPicture(cap_xcomp->va_dpy, cap_xcomp->context_id, &cap_xcomp->buffer_id, 1);
|
||||
if(va_status != VA_STATUS_SUCCESS) {
|
||||
fprintf(stderr, "vaCreateBuffer failed: %d\n", va_status);
|
||||
return 1;
|
||||
vaEndPicture(cap_xcomp->va_dpy, cap_xcomp->context_id);
|
||||
static bool error_printed = false;
|
||||
if(!error_printed) {
|
||||
error_printed = true;
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: vaRenderPicture failed: %d\n", va_status);
|
||||
}
|
||||
|
||||
va_status = vaRenderPicture(cap_xcomp->va_dpy, cap_xcomp->context_id, &buffer_id, 1);
|
||||
if(va_status != VA_STATUS_SUCCESS) {
|
||||
fprintf(stderr, "vaRenderPicture failed: %d\n", va_status);
|
||||
return 1;
|
||||
return -1;
|
||||
}
|
||||
|
||||
va_status = vaEndPicture(cap_xcomp->va_dpy, cap_xcomp->context_id);
|
||||
if(va_status != VA_STATUS_SUCCESS) {
|
||||
fprintf(stderr, "vaEndPicture failed: %d\n", va_status);
|
||||
return 1;
|
||||
static bool error_printed = false;
|
||||
if(!error_printed) {
|
||||
error_printed = true;
|
||||
fprintf(stderr, "gsr error: gsr_capture_xcomposite_drm_tick: vaEndPicture failed: %d\n", va_status);
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
// TODO: Needed?
|
||||
@ -369,10 +529,54 @@ static int gsr_capture_xcomposite_drm_capture(gsr_capture *cap, AVFrame *frame)
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_drm_stop(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
gsr_capture_xcomposite_drm *cap_xcomp = cap->priv;
|
||||
|
||||
if(cap_xcomp->buffer_id) {
|
||||
vaDestroyBuffer(cap_xcomp->va_dpy, cap_xcomp->buffer_id);
|
||||
cap_xcomp->buffer_id = 0;
|
||||
}
|
||||
|
||||
if(cap_xcomp->context_id) {
|
||||
vaDestroyContext(cap_xcomp->va_dpy, cap_xcomp->context_id);
|
||||
cap_xcomp->context_id = 0;
|
||||
}
|
||||
|
||||
if(cap_xcomp->config_id) {
|
||||
vaDestroyConfig(cap_xcomp->va_dpy, cap_xcomp->config_id);
|
||||
cap_xcomp->config_id = 0;
|
||||
}
|
||||
|
||||
if(cap_xcomp->input_surface) {
|
||||
vaDestroySurfaces(cap_xcomp->va_dpy, &cap_xcomp->input_surface, 1);
|
||||
cap_xcomp->input_surface = 0;
|
||||
}
|
||||
|
||||
if(cap_xcomp->dmabuf_fd) {
|
||||
close(cap_xcomp->dmabuf_fd);
|
||||
cap_xcomp->dmabuf_fd = 0;
|
||||
}
|
||||
|
||||
window_texture_deinit(&cap_xcomp->window_texture);
|
||||
|
||||
if(video_codec_context->hw_device_ctx)
|
||||
av_buffer_unref(&video_codec_context->hw_device_ctx);
|
||||
if(video_codec_context->hw_frames_ctx)
|
||||
av_buffer_unref(&video_codec_context->hw_frames_ctx);
|
||||
|
||||
gsr_egl_unload(&cap_xcomp->egl);
|
||||
if(cap_xcomp->dpy) {
|
||||
// TODO: This causes a crash, why? maybe some other library dlclose xlib and that also happened to unload this???
|
||||
//XCloseDisplay(cap_xcomp->dpy);
|
||||
cap_xcomp->dpy = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
static void gsr_capture_xcomposite_drm_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
|
||||
(void)video_codec_context;
|
||||
gsr_capture_xcomposite_drm *cap_xcomp = cap->priv;
|
||||
if(cap->priv) {
|
||||
gsr_capture_xcomposite_drm_stop(cap, video_codec_context);
|
||||
free(cap->priv);
|
||||
cap->priv = NULL;
|
||||
}
|
||||
|
@ -375,7 +375,7 @@ static AVCodecContext *create_video_codec_context(AVPixelFormat pix_fmt,
|
||||
}
|
||||
codec_context->max_b_frames = 0;
|
||||
codec_context->pix_fmt = pix_fmt;
|
||||
//codec_context->color_range = AVCOL_RANGE_MPEG;
|
||||
//codec_context->color_range = AVCOL_RANGE_JPEG;
|
||||
//codec_context->color_primaries = AVCOL_PRI_BT709;
|
||||
//codec_context->color_trc = AVCOL_TRC_BT709;
|
||||
//codec_context->colorspace = AVCOL_SPC_BT709;
|
||||
@ -637,7 +637,7 @@ static void open_video(AVCodecContext *codec_context, VideoQuality video_quality
|
||||
// //av_dict_set(&options, "preset", "llhq", 0);
|
||||
//}
|
||||
|
||||
// Fuck nvidia and ffmpeg, I want to use a good preset for the gpu but all gpus prefer different
|
||||
// I want to use a good preset for the gpu but all gpus prefer different
|
||||
// presets. Nvidia and ffmpeg used to support "hq" preset that chose the best preset for the gpu
|
||||
// with pretty good performance but you now have to choose p1-p7, which are gpu agnostic and on
|
||||
// older gpus p5-p7 slow the gpu down to a crawl...
|
||||
@ -686,7 +686,7 @@ static void open_video(AVCodecContext *codec_context, VideoQuality video_quality
|
||||
|
||||
if(codec_context->codec_id == AV_CODEC_ID_H264) {
|
||||
av_dict_set(&options, "profile", "high", 0);
|
||||
av_dict_set_int(&options, "quality", 4, 0);
|
||||
av_dict_set_int(&options, "quality", 14, 0);
|
||||
} else {
|
||||
av_dict_set(&options, "profile", "main", 0);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user