Attempt to fix amd/intel driver bug vram leak, force cfr when livestreaming, replace std::filesystem with posix

create replay directory automatically, recursively
This commit is contained in:
dec05eba 2023-10-21 22:46:30 +02:00
parent ee5d1d7a27
commit 9710f8a2d4
5 changed files with 70 additions and 51 deletions

View File

@ -82,7 +82,7 @@ Here is an example of how to record all monitors and the default audio output: `
## Streaming ## Streaming
Streaming works the same as recording, but the `-o` argument should be path to the live streaming service you want to use (including your live streaming key). Take a look at scripts/twitch-stream.sh to see an example of how to stream to twitch. Streaming works the same as recording, but the `-o` argument should be path to the live streaming service you want to use (including your live streaming key). Take a look at scripts/twitch-stream.sh to see an example of how to stream to twitch.
## Replay mode ## Replay mode
Run `gpu-screen-recorder` with the `-c mp4` and `-r` option, for example: `gpu-screen-recorder -w screen -f 60 -r 30 -c mp4 -o ~/Videos`. Note that in this case, `-o` should point to a directory (that exists).\ Run `gpu-screen-recorder` with the `-c mp4` and `-r` option, for example: `gpu-screen-recorder -w screen -f 60 -r 30 -c mp4 -o ~/Videos`. Note that in this case, `-o` should point to a directory.\
If `-mf yes` is set, replays are save in folders based on the date. If `-mf yes` is set, replays are save in folders based on the date.
To save a video in replay mode, you need to send signal SIGUSR1 to gpu screen recorder. You can do this by running `killall -SIGUSR1 gpu-screen-recorder`.\ To save a video in replay mode, you need to send signal SIGUSR1 to gpu screen recorder. You can do this by running `killall -SIGUSR1 gpu-screen-recorder`.\
To stop recording, send SIGINT to gpu screen recorder. You can do this by running `killall gpu-screen-recorder` or pressing `Ctrl-C` in the terminal that runs gpu screen recorder.\ To stop recording, send SIGINT to gpu screen recorder. You can do this by running `killall gpu-screen-recorder` or pressing `Ctrl-C` in the terminal that runs gpu screen recorder.\

View File

@ -9,7 +9,7 @@ typedef struct _XDisplay Display;
typedef struct { typedef struct {
gsr_egl *egl; gsr_egl *egl;
const char *display_to_capture; /* if this is "screen", then the entire x11 screen is captured (all displays). A copy is made of this */ const char *display_to_capture; /* if this is "screen", then the first monitor is captured. A copy is made of this */
gsr_gpu_info gpu_inf; gsr_gpu_info gpu_inf;
const char *card_path; /* reference */ const char *card_path; /* reference */
} gsr_capture_kms_cuda_params; } gsr_capture_kms_cuda_params;

View File

@ -9,7 +9,7 @@ typedef struct _XDisplay Display;
typedef struct { typedef struct {
gsr_egl *egl; gsr_egl *egl;
const char *display_to_capture; /* if this is "screen", then the entire x11 screen is captured (all displays). A copy is made of this */ const char *display_to_capture; /* if this is "screen", then the first monitor is captured. A copy is made of this */
gsr_gpu_info gpu_inf; gsr_gpu_info gpu_inf;
const char *card_path; /* reference */ const char *card_path; /* reference */
bool wayland; bool wayland;

View File

@ -13,7 +13,8 @@
#include <xf86drm.h> #include <xf86drm.h>
#include <xf86drmMode.h> #include <xf86drmMode.h>
#include <libdrm/drm_mode.h> #include <drm_mode.h>
#include <drm_fourcc.h>
#define MAX_CONNECTORS 32 #define MAX_CONNECTORS 32
@ -161,14 +162,6 @@ static uint32_t get_connector_by_crtc_id(const connector_to_crtc_map *c2crtc_map
return 0; return 0;
} }
static bool drmfb_has_multiple_handles(drmModeFB2 *drmfb) {
int num_handles = 0;
for(uint32_t handle_index = 0; handle_index < 4 && drmfb->handles[handle_index]; ++handle_index) {
++num_handles;
}
return num_handles > 1;
}
static void map_crtc_to_connector_ids(gsr_drm *drm, connector_to_crtc_map *c2crtc_map) { static void map_crtc_to_connector_ids(gsr_drm *drm, connector_to_crtc_map *c2crtc_map) {
c2crtc_map->num_maps = 0; c2crtc_map->num_maps = 0;
drmModeResPtr resources = drmModeGetResources(drm->drmfd); drmModeResPtr resources = drmModeGetResources(drm->drmfd);
@ -201,7 +194,7 @@ static int kms_get_fb(gsr_drm *drm, gsr_kms_response *response, connector_to_crt
for(uint32_t i = 0; i < drm->planes->count_planes && response->num_fds < GSR_KMS_MAX_PLANES; ++i) { for(uint32_t i = 0; i < drm->planes->count_planes && response->num_fds < GSR_KMS_MAX_PLANES; ++i) {
drmModePlanePtr plane = NULL; drmModePlanePtr plane = NULL;
drmModeFB2 *drmfb = NULL; drmModeFBPtr drmfb = NULL;
plane = drmModeGetPlane(drm->drmfd, drm->planes->planes[i]); plane = drmModeGetPlane(drm->drmfd, drm->planes->planes[i]);
if(!plane) { if(!plane) {
@ -214,7 +207,8 @@ static int kms_get_fb(gsr_drm *drm, gsr_kms_response *response, connector_to_crt
if(!plane->fb_id) if(!plane->fb_id)
goto next; goto next;
drmfb = drmModeGetFB2(drm->drmfd, plane->fb_id); // TODO: drmModeGetFB2 can't be used because it causes a vram leak when the fb_fd is sent amd/intel.. why?
drmfb = drmModeGetFB(drm->drmfd, plane->fb_id);
if(!drmfb) { if(!drmfb) {
// Commented out for now because we get here if the cursor is moved to another monitor and we dont care about the cursor // Commented out for now because we get here if the cursor is moved to another monitor and we dont care about the cursor
//response->result = KMS_RESULT_FAILED_TO_GET_PLANE; //response->result = KMS_RESULT_FAILED_TO_GET_PLANE;
@ -223,7 +217,7 @@ static int kms_get_fb(gsr_drm *drm, gsr_kms_response *response, connector_to_crt
goto next; goto next;
} }
if(!drmfb->handles[0]) { if(!drmfb->handle) {
response->result = KMS_RESULT_FAILED_TO_GET_PLANE; response->result = KMS_RESULT_FAILED_TO_GET_PLANE;
snprintf(response->err_msg, sizeof(response->err_msg), "drmfb handle is NULL"); snprintf(response->err_msg, sizeof(response->err_msg), "drmfb handle is NULL");
fprintf(stderr, "kms server error: %s\n", response->err_msg); fprintf(stderr, "kms server error: %s\n", response->err_msg);
@ -234,7 +228,7 @@ static int kms_get_fb(gsr_drm *drm, gsr_kms_response *response, connector_to_crt
// TODO: Support other plane formats than rgb (with multiple planes, such as direct YUV420 on wayland). // TODO: Support other plane formats than rgb (with multiple planes, such as direct YUV420 on wayland).
int fb_fd = -1; int fb_fd = -1;
const int ret = drmPrimeHandleToFD(drm->drmfd, drmfb->handles[0], O_RDONLY, &fb_fd); const int ret = drmPrimeHandleToFD(drm->drmfd, drmfb->handle, O_RDONLY, &fb_fd);
if(ret != 0 || fb_fd == -1) { if(ret != 0 || fb_fd == -1) {
response->result = KMS_RESULT_FAILED_TO_GET_PLANE; response->result = KMS_RESULT_FAILED_TO_GET_PLANE;
snprintf(response->err_msg, sizeof(response->err_msg), "failed to get fd from drm handle, error: %s", strerror(errno)); snprintf(response->err_msg, sizeof(response->err_msg), "failed to get fd from drm handle, error: %s", strerror(errno));
@ -249,16 +243,14 @@ static int kms_get_fb(gsr_drm *drm, gsr_kms_response *response, connector_to_crt
response->fds[response->num_fds].fd = fb_fd; response->fds[response->num_fds].fd = fb_fd;
response->fds[response->num_fds].width = drmfb->width; response->fds[response->num_fds].width = drmfb->width;
response->fds[response->num_fds].height = drmfb->height; response->fds[response->num_fds].height = drmfb->height;
response->fds[response->num_fds].pitch = drmfb->pitches[0]; response->fds[response->num_fds].pitch = drmfb->pitch;
response->fds[response->num_fds].offset = drmfb->offsets[0]; response->fds[response->num_fds].offset = 0;//drmfb->offsets[0];
response->fds[response->num_fds].pixel_format = drmfb->pixel_format; // TODO?
response->fds[response->num_fds].modifier = drmfb->modifier; response->fds[response->num_fds].pixel_format = DRM_FORMAT_ARGB8888;//drmfb->pixel_format;
response->fds[response->num_fds].modifier = 0;//drmfb->modifier;
response->fds[response->num_fds].connector_id = get_connector_by_crtc_id(c2crtc_map, plane->crtc_id); response->fds[response->num_fds].connector_id = get_connector_by_crtc_id(c2crtc_map, plane->crtc_id);
response->fds[response->num_fds].is_cursor = is_cursor; response->fds[response->num_fds].is_cursor = is_cursor;
// TODO: This is not an accurate way to detect it. First of all, it always fails with multiple monitors response->fds[response->num_fds].is_combined_plane = false;
// on wayland as the drmfb always has multiple planes.
// Check if this can be improved by also checking if the handles are duplicated (multiple ones refer to each other).
response->fds[response->num_fds].is_combined_plane = drmfb_has_multiple_handles(drmfb);
if(is_cursor) { if(is_cursor) {
response->fds[response->num_fds].x = x; response->fds[response->num_fds].x = x;
response->fds[response->num_fds].y = y; response->fds[response->num_fds].y = y;
@ -274,12 +266,12 @@ static int kms_get_fb(gsr_drm *drm, gsr_kms_response *response, connector_to_crt
next: next:
if(drmfb) if(drmfb)
drmModeFreeFB2(drmfb); drmModeFreeFB(drmfb);
if(plane) if(plane)
drmModeFreePlane(plane); drmModeFreePlane(plane);
} }
if(response->num_fds > 0 || response->result == KMS_RESULT_OK) { if(response->result == KMS_RESULT_OK) {
result = 0; result = 0;
} else { } else {
for(int i = 0; i < response->num_fds; ++i) { for(int i = 0; i < response->num_fds; ++i) {
@ -419,19 +411,20 @@ int main(int argc, char **argv) {
switch(request.type) { switch(request.type) {
case KMS_REQUEST_TYPE_GET_KMS: { case KMS_REQUEST_TYPE_GET_KMS: {
gsr_kms_response response; gsr_kms_response response;
response.num_fds = 0;
if(kms_get_fb(&drm, &response, &c2crtc_map) == 0) { if(kms_get_fb(&drm, &response, &c2crtc_map) == 0) {
if(send_msg_to_client(socket_fd, &response) == -1) if(send_msg_to_client(socket_fd, &response) == -1)
fprintf(stderr, "kms server error: failed to respond to client KMS_REQUEST_TYPE_GET_KMS request\n"); fprintf(stderr, "kms server error: failed to respond to client KMS_REQUEST_TYPE_GET_KMS request\n");
for(int i = 0; i < response.num_fds; ++i) {
close(response.fds[i].fd);
}
} else { } else {
if(send_msg_to_client(socket_fd, &response) == -1) if(send_msg_to_client(socket_fd, &response) == -1)
fprintf(stderr, "kms server error: failed to respond to client KMS_REQUEST_TYPE_GET_KMS request\n"); fprintf(stderr, "kms server error: failed to respond to client KMS_REQUEST_TYPE_GET_KMS request\n");
} }
for(int i = 0; i < response.num_fds; ++i) {
close(response.fds[i].fd);
}
break; break;
} }
default: { default: {

View File

@ -9,7 +9,6 @@ extern "C" {
} }
#include <assert.h> #include <assert.h>
#include <filesystem>
#include <stdio.h> #include <stdio.h>
#include <stdlib.h> #include <stdlib.h>
#include <string> #include <string>
@ -712,7 +711,7 @@ static void usage_full() {
fprintf(stderr, " is dropped when you record a game. Only needed if you are recording a game that is bottlenecked by GPU.\n"); fprintf(stderr, " is dropped when you record a game. Only needed if you are recording a game that is bottlenecked by GPU.\n");
fprintf(stderr, " Works only if your have \"Coolbits\" set to \"12\" in NVIDIA X settings, see README for more information. Note! use at your own risk! Optional, disabled by default.\n"); fprintf(stderr, " Works only if your have \"Coolbits\" set to \"12\" in NVIDIA X settings, see README for more information. Note! use at your own risk! Optional, disabled by default.\n");
fprintf(stderr, "\n"); fprintf(stderr, "\n");
fprintf(stderr, " -fm Framerate mode. Should be either 'cfr' or 'vfr'. Defaults to 'cfr' on NVIDIA and 'vfr' on AMD/Intel.\n"); fprintf(stderr, " -fm Framerate mode. Should be either 'cfr' or 'vfr'. Defaults to 'cfr' on NVIDIA X11 and 'vfr' on AMD/Intel X11/Wayland or NVIDIA Wayland.\n");
fprintf(stderr, "\n"); fprintf(stderr, "\n");
fprintf(stderr, " -v Prints per second, fps updates. Optional, set to 'yes' by default.\n"); fprintf(stderr, " -v Prints per second, fps updates. Optional, set to 'yes' by default.\n");
fprintf(stderr, "\n"); fprintf(stderr, "\n");
@ -722,7 +721,7 @@ static void usage_full() {
fprintf(stderr, "\n"); fprintf(stderr, "\n");
//fprintf(stderr, " -pixfmt The pixel format to use for the output video. yuv420 is the most common format and is best supported, but the color is compressed, so colors can look washed out and certain colors of text can look bad. Use yuv444 for no color compression, but the video may not work everywhere and it may not work with hardware video decoding. Optional, defaults to yuv420\n"); //fprintf(stderr, " -pixfmt The pixel format to use for the output video. yuv420 is the most common format and is best supported, but the color is compressed, so colors can look washed out and certain colors of text can look bad. Use yuv444 for no color compression, but the video may not work everywhere and it may not work with hardware video decoding. Optional, defaults to yuv420\n");
fprintf(stderr, " -o The output file path. If omitted then the encoded data is sent to stdout. Required in replay mode (when using -r).\n"); fprintf(stderr, " -o The output file path. If omitted then the encoded data is sent to stdout. Required in replay mode (when using -r).\n");
fprintf(stderr, " In replay mode this has to be an existing directory instead of a file.\n"); fprintf(stderr, " In replay mode this has to be a directory instead of a file.\n");
fprintf(stderr, "\n"); fprintf(stderr, "\n");
fprintf(stderr, "NOTES:\n"); fprintf(stderr, "NOTES:\n");
fprintf(stderr, " Send signal SIGINT to gpu-screen-recorder (Ctrl+C, or killall gpu-screen-recorder) to stop and save the recording (when not using replay mode).\n"); fprintf(stderr, " Send signal SIGINT to gpu-screen-recorder (Ctrl+C, or killall gpu-screen-recorder) to stop and save the recording (when not using replay mode).\n");
@ -849,6 +848,38 @@ static std::future<void> save_replay_thread;
static std::vector<std::shared_ptr<PacketData>> save_replay_packets; static std::vector<std::shared_ptr<PacketData>> save_replay_packets;
static std::string save_replay_output_filepath; static std::string save_replay_output_filepath;
static int create_directory_recursive(char *path) {
int path_len = strlen(path);
char *p = path;
char *end = path + path_len;
for(;;) {
char *slash_p = strchr(p, '/');
// Skips first '/', we don't want to try and create the root directory
if(slash_p == path) {
++p;
continue;
}
if(!slash_p)
slash_p = end;
char prev_char = *slash_p;
*slash_p = '\0';
int err = mkdir(path, S_IRWXU);
*slash_p = prev_char;
if(err == -1 && errno != EEXIST)
return err;
if(slash_p == end)
break;
else
p = slash_p + 1;
}
return 0;
}
static void save_replay_async(AVCodecContext *video_codec_context, int video_stream_index, std::vector<AudioTrack> &audio_tracks, std::deque<std::shared_ptr<PacketData>> &frame_data_queue, bool frames_erased, std::string output_dir, const char *container_format, const std::string &file_extension, std::mutex &write_output_mutex, bool make_folders) { static void save_replay_async(AVCodecContext *video_codec_context, int video_stream_index, std::vector<AudioTrack> &audio_tracks, std::deque<std::shared_ptr<PacketData>> &frame_data_queue, bool frames_erased, std::string output_dir, const char *container_format, const std::string &file_extension, std::mutex &write_output_mutex, bool make_folders) {
if(save_replay_thread.valid()) if(save_replay_thread.valid())
return; return;
@ -894,11 +925,10 @@ static void save_replay_async(AVCodecContext *video_codec_context, int video_str
if (make_folders) { if (make_folders) {
std::string output_folder = output_dir + '/' + get_date_only_str(); std::string output_folder = output_dir + '/' + get_date_only_str();
if (!std::filesystem::exists(output_folder)) { create_directory_recursive(&output_folder[0]);
std::filesystem::create_directory(output_folder);
}
save_replay_output_filepath = output_folder + "/Replay_" + get_time_only_str() + "." + file_extension; save_replay_output_filepath = output_folder + "/Replay_" + get_time_only_str() + "." + file_extension;
} else { } else {
create_directory_recursive(&output_dir[0]);
save_replay_output_filepath = output_dir + "/Replay_" + get_date_str() + "." + file_extension; save_replay_output_filepath = output_dir + "/Replay_" + get_date_str() + "." + file_extension;
} }
@ -1507,14 +1537,9 @@ int main(int argc, char **argv) {
if(gpu_inf.vendor == GSR_GPU_VENDOR_NVIDIA) { if(gpu_inf.vendor == GSR_GPU_VENDOR_NVIDIA) {
if(wayland) { if(wayland) {
const char *capture_target = window_str;
if(strcmp(window_str, "screen-direct") == 0 || strcmp(window_str, "screen-direct-force") == 0) {
capture_target = "screen";
}
gsr_capture_kms_cuda_params kms_params; gsr_capture_kms_cuda_params kms_params;
kms_params.egl = &egl; kms_params.egl = &egl;
kms_params.display_to_capture = capture_target; kms_params.display_to_capture = window_str;
kms_params.gpu_inf = gpu_inf; kms_params.gpu_inf = gpu_inf;
kms_params.card_path = card_path; kms_params.card_path = card_path;
capture = gsr_capture_kms_cuda_create(&kms_params); capture = gsr_capture_kms_cuda_create(&kms_params);
@ -1550,14 +1575,9 @@ int main(int argc, char **argv) {
_exit(1); _exit(1);
} }
} else { } else {
const char *capture_target = window_str;
if(strcmp(window_str, "screen-direct") == 0 || strcmp(window_str, "screen-direct-force") == 0) {
capture_target = "screen";
}
gsr_capture_kms_vaapi_params kms_params; gsr_capture_kms_vaapi_params kms_params;
kms_params.egl = &egl; kms_params.egl = &egl;
kms_params.display_to_capture = capture_target; kms_params.display_to_capture = window_str;
kms_params.gpu_inf = gpu_inf; kms_params.gpu_inf = gpu_inf;
kms_params.card_path = card_path; kms_params.card_path = card_path;
kms_params.wayland = wayland; kms_params.wayland = wayland;
@ -1632,8 +1652,8 @@ int main(int argc, char **argv) {
} }
struct stat buf; struct stat buf;
if(stat(filename, &buf) == -1 || !S_ISDIR(buf.st_mode)) { if(stat(filename, &buf) != -1 && !S_ISDIR(buf.st_mode)) {
fprintf(stderr, "Error: directory \"%s\" does not exist or is not a directory\n", filename); fprintf(stderr, "Error: File \"%s\" exists but it's not a directory\n", filename);
usage(); usage();
} }
} }
@ -1787,6 +1807,12 @@ int main(int argc, char **argv) {
requested_audio_inputs.push_back(std::move(mai)); requested_audio_inputs.push_back(std::move(mai));
} }
if(is_livestream && framerate_mode != FramerateMode::CONSTANT) {
fprintf(stderr, "Info: framerate mode was forcefully set to \"cfr\" because live streaming was detected\n");
framerate_mode = FramerateMode::CONSTANT;
framerate_mode_str = "cfr";
}
AVStream *video_stream = nullptr; AVStream *video_stream = nullptr;
std::vector<AudioTrack> audio_tracks; std::vector<AudioTrack> audio_tracks;