gpu-screen-recorder/src/main.cpp

1174 lines
43 KiB
C++
Raw Normal View History

2020-04-02 22:39:36 +00:00
/*
Copyright (C) 2020 dec05eba
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
2020-03-30 15:38:55 +00:00
#include <assert.h>
#include <libavutil/pixfmt.h>
2020-01-08 12:34:55 +00:00
#include <stdio.h>
#include <stdlib.h>
#include <string>
#include <vector>
2020-04-01 17:25:16 +00:00
#include <thread>
#include <mutex>
#include <map>
#include <signal.h>
2020-04-01 17:25:16 +00:00
#include <unistd.h>
2020-01-08 12:34:55 +00:00
2020-04-01 17:25:16 +00:00
#include "../include/sound.hpp"
2020-01-08 12:34:55 +00:00
#define GLX_GLXEXT_PROTOTYPES
#include <GL/glew.h>
#include <GL/glx.h>
#include <GL/glxext.h>
2020-03-30 15:38:55 +00:00
#include <GLFW/glfw3.h>
2020-01-08 12:34:55 +00:00
#include <X11/extensions/Xcomposite.h>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
2020-03-30 15:22:57 +00:00
#include <libavutil/hwcontext.h>
2020-03-30 15:38:55 +00:00
#include <libavutil/hwcontext_cuda.h>
2020-04-01 17:25:16 +00:00
#include <libavutil/opt.h>
#include <libswresample/swresample.h>
2020-01-08 12:34:55 +00:00
}
#include <cudaGL.h>
extern "C" {
#include <libavutil/hwcontext.h>
}
2020-08-28 03:04:16 +00:00
#include <deque>
2020-03-29 15:48:17 +00:00
//#include <CL/cl.h>
2020-01-08 12:34:55 +00:00
2020-08-28 03:04:16 +00:00
static thread_local char av_error_buffer[AV_ERROR_MAX_STRING_SIZE];
2020-04-01 17:25:16 +00:00
static char* av_error_to_string(int err) {
if(av_strerror(err, av_error_buffer, sizeof(av_error_buffer) < 0))
strcpy(av_error_buffer, "Unknown error");
return av_error_buffer;
}
2020-01-08 12:34:55 +00:00
struct ScopedGLXFBConfig {
~ScopedGLXFBConfig() {
2020-03-30 15:38:55 +00:00
if (configs)
2020-01-08 12:34:55 +00:00
XFree(configs);
}
GLXFBConfig *configs = nullptr;
};
struct WindowPixmap {
2020-03-30 15:38:55 +00:00
WindowPixmap()
: pixmap(None), glx_pixmap(None), texture_id(0), target_texture_id(0),
texture_width(0), texture_height(0) {}
2020-01-08 12:34:55 +00:00
Pixmap pixmap;
GLXPixmap glx_pixmap;
GLuint texture_id;
2020-03-28 07:47:53 +00:00
GLuint target_texture_id;
2020-01-08 12:34:55 +00:00
GLint texture_width;
GLint texture_height;
};
enum class VideoQuality {
HIGH,
ULTRA
};
2020-01-08 12:34:55 +00:00
static bool x11_supports_composite_named_window_pixmap(Display *dpy) {
int extension_major;
int extension_minor;
2020-03-30 15:38:55 +00:00
if (!XCompositeQueryExtension(dpy, &extension_major, &extension_minor))
2020-01-08 12:34:55 +00:00
return false;
int major_version;
int minor_version;
2020-03-30 15:38:55 +00:00
return XCompositeQueryVersion(dpy, &major_version, &minor_version) &&
(major_version > 0 || minor_version >= 2);
2020-01-08 12:34:55 +00:00
}
2020-03-31 17:29:54 +00:00
static int x11_error_handler(Display *dpy, XErrorEvent *ev) {
#if 0
2020-03-31 17:29:54 +00:00
char type_str[128];
XGetErrorText(dpy, ev->type, type_str, sizeof(type_str));
char major_opcode_str[128];
XGetErrorText(dpy, ev->type, major_opcode_str, sizeof(major_opcode_str));
char minor_opcode_str[128];
XGetErrorText(dpy, ev->type, minor_opcode_str, sizeof(minor_opcode_str));
fprintf(stderr,
"X Error of failed request: %s\n"
"Major opcode of failed request: %d (%s)\n"
"Minor opcode of failed request: %d (%s)\n"
"Serial number of failed request: %d\n",
type_str,
ev->request_code, major_opcode_str,
ev->minor_code, minor_opcode_str);
#endif
2020-03-31 17:29:54 +00:00
return 0;
}
static int x11_io_error_handler(Display *dpy) {
return 0;
}
2020-01-08 12:34:55 +00:00
static void cleanup_window_pixmap(Display *dpy, WindowPixmap &pixmap) {
2020-03-30 15:38:55 +00:00
if (pixmap.target_texture_id) {
2020-03-28 07:47:53 +00:00
glDeleteTextures(1, &pixmap.target_texture_id);
pixmap.target_texture_id = 0;
}
2020-03-30 15:38:55 +00:00
if (pixmap.texture_id) {
2020-01-08 12:34:55 +00:00
glDeleteTextures(1, &pixmap.texture_id);
pixmap.texture_id = 0;
pixmap.texture_width = 0;
pixmap.texture_height = 0;
}
2020-03-30 15:38:55 +00:00
if (pixmap.glx_pixmap) {
glXDestroyPixmap(dpy, pixmap.glx_pixmap);
2020-03-31 18:02:18 +00:00
glXReleaseTexImageEXT(dpy, pixmap.glx_pixmap, GLX_FRONT_EXT);
2020-01-08 12:34:55 +00:00
pixmap.glx_pixmap = None;
}
2020-03-30 15:38:55 +00:00
if (pixmap.pixmap) {
2020-01-08 12:34:55 +00:00
XFreePixmap(dpy, pixmap.pixmap);
pixmap.pixmap = None;
}
}
2020-03-30 15:38:55 +00:00
static bool recreate_window_pixmap(Display *dpy, Window window_id,
WindowPixmap &pixmap) {
2020-01-08 12:34:55 +00:00
cleanup_window_pixmap(dpy, pixmap);
2020-03-30 15:38:55 +00:00
XWindowAttributes attr;
if (!XGetWindowAttributes(dpy, window_id, &attr)) {
fprintf(stderr, "Failed to get window attributes\n");
return false;
}
2020-01-08 12:34:55 +00:00
const int pixmap_config[] = {
GLX_BIND_TO_TEXTURE_RGBA_EXT, True,
GLX_DRAWABLE_TYPE, GLX_PIXMAP_BIT | GLX_WINDOW_BIT,
2020-03-30 15:38:55 +00:00
GLX_BIND_TO_TEXTURE_TARGETS_EXT, GLX_TEXTURE_2D_BIT_EXT,
GLX_BIND_TO_MIPMAP_TEXTURE_EXT, True,
GLX_DOUBLEBUFFER, False,
2020-05-01 18:26:17 +00:00
GLX_BUFFER_SIZE, 32,
GLX_ALPHA_SIZE, 8,
2020-03-30 15:38:55 +00:00
// GLX_Y_INVERTED_EXT, (int)GLX_DONT_CARE,
None};
2020-01-08 12:34:55 +00:00
2020-03-29 00:14:00 +00:00
// Note that mipmap is generated even though its not used.
// glCopyImageSubData fails if the texture doesn't have mipmap.
2020-03-30 15:38:55 +00:00
const int pixmap_attribs[] = {GLX_TEXTURE_TARGET_EXT,
GLX_TEXTURE_2D_EXT,
GLX_TEXTURE_FORMAT_EXT,
GLX_TEXTURE_FORMAT_RGBA_EXT,
GLX_MIPMAP_TEXTURE_EXT,
1,
None};
int c;
GLXFBConfig *configs = glXChooseFBConfig(dpy, 0, pixmap_config, &c);
if (!configs) {
fprintf(stderr, "Failed too choose fb config\n");
return false;
}
2020-01-08 12:34:55 +00:00
ScopedGLXFBConfig scoped_configs;
scoped_configs.configs = configs;
bool found = false;
GLXFBConfig config;
for (int i = 0; i < c; i++) {
config = configs[i];
XVisualInfo *visual = glXGetVisualFromFBConfig(dpy, config);
if (!visual)
continue;
if (attr.depth != visual->depth) {
XFree(visual);
continue;
}
XFree(visual);
found = true;
break;
}
if(!found) {
fprintf(stderr, "No matching fb config found\n");
return false;
}
2020-03-30 15:38:55 +00:00
Pixmap new_window_pixmap = XCompositeNameWindowPixmap(dpy, window_id);
if (!new_window_pixmap) {
fprintf(stderr, "Failed to get pixmap for window %ld\n", window_id);
return false;
}
2020-01-08 12:34:55 +00:00
2020-03-30 15:38:55 +00:00
GLXPixmap glx_pixmap =
glXCreatePixmap(dpy, config, new_window_pixmap, pixmap_attribs);
2020-03-30 15:38:55 +00:00
if (!glx_pixmap) {
fprintf(stderr, "Failed to create glx pixmap\n");
2020-01-08 12:34:55 +00:00
XFreePixmap(dpy, new_window_pixmap);
2020-03-30 15:38:55 +00:00
return false;
}
2020-01-08 12:34:55 +00:00
pixmap.pixmap = new_window_pixmap;
pixmap.glx_pixmap = glx_pixmap;
2020-04-01 18:46:12 +00:00
//glEnable(GL_TEXTURE_2D);
2020-01-08 12:34:55 +00:00
glGenTextures(1, &pixmap.texture_id);
2020-03-30 15:38:55 +00:00
glBindTexture(GL_TEXTURE_2D, pixmap.texture_id);
2020-01-08 12:34:55 +00:00
2020-03-30 15:38:55 +00:00
// glEnable(GL_BLEND);
// glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
2020-03-30 15:22:57 +00:00
2020-03-30 15:38:55 +00:00
glXBindTexImageEXT(dpy, pixmap.glx_pixmap, GLX_FRONT_EXT, NULL);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH,
&pixmap.texture_width);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT,
&pixmap.texture_height);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
GL_NEAREST); // GL_LINEAR );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_NEAREST); // GL_LINEAR);//GL_LINEAR_MIPMAP_LINEAR );
2020-04-01 18:46:12 +00:00
//glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
fprintf(stderr, "texture width: %d, height: %d\n", pixmap.texture_width,
2020-03-30 15:38:55 +00:00
pixmap.texture_height);
// Generating this second texture is needed because
// cuGraphicsGLRegisterImage cant be used with the texture that is mapped
// directly to the pixmap.
// TODO: Investigate if it's somehow possible to use the pixmap texture
// directly, this should improve performance since only less image copy is
// then needed every frame.
2020-03-28 07:47:53 +00:00
glGenTextures(1, &pixmap.target_texture_id);
glBindTexture(GL_TEXTURE_2D, pixmap.target_texture_id);
2020-03-30 15:38:55 +00:00
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, pixmap.texture_width,
pixmap.texture_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
2020-03-28 07:47:53 +00:00
int err2 = glGetError();
fprintf(stderr, "error: %d\n", err2);
2020-03-30 15:38:55 +00:00
glCopyImageSubData(pixmap.texture_id, GL_TEXTURE_2D, 0, 0, 0, 0,
pixmap.target_texture_id, GL_TEXTURE_2D, 0, 0, 0, 0,
pixmap.texture_width, pixmap.texture_height, 1);
2020-03-28 07:47:53 +00:00
int err = glGetError();
fprintf(stderr, "error: %d\n", err);
2020-03-30 15:38:55 +00:00
// glXBindTexImageEXT(dpy, pixmap.glx_pixmap, GLX_FRONT_EXT, NULL);
// glGenerateTextureMipmapEXT(glxpixmap, GL_TEXTURE_2D);
2020-01-08 12:34:55 +00:00
2020-03-30 15:38:55 +00:00
// glGenerateMipmap(GL_TEXTURE_2D);
2020-01-08 12:34:55 +00:00
2020-03-30 15:38:55 +00:00
// glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
// glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
2020-03-28 07:47:53 +00:00
2020-03-30 15:38:55 +00:00
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
GL_NEAREST); // GL_LINEAR );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
GL_NEAREST); // GL_LINEAR);//GL_LINEAR_MIPMAP_LINEAR );
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
2020-01-08 12:34:55 +00:00
glBindTexture(GL_TEXTURE_2D, 0);
2020-03-30 15:38:55 +00:00
2020-03-28 07:47:53 +00:00
return pixmap.texture_id != 0 && pixmap.target_texture_id != 0;
2020-01-08 12:34:55 +00:00
}
std::vector<std::string> get_hardware_acceleration_device_names() {
int iGpu = 0;
int nGpu = 0;
cuDeviceGetCount(&nGpu);
2020-03-30 15:38:55 +00:00
if (iGpu < 0 || iGpu >= nGpu) {
2020-01-08 12:34:55 +00:00
fprintf(stderr, "Error: failed...\n");
return {};
}
CUdevice cuDevice = 0;
cuDeviceGet(&cuDevice, iGpu);
char deviceName[80];
cuDeviceGetName(deviceName, sizeof(deviceName), cuDevice);
fprintf(stderr, "device name: %s\n", deviceName);
2020-03-30 15:38:55 +00:00
return {deviceName};
2020-01-08 12:34:55 +00:00
}
2020-03-30 15:38:55 +00:00
static void receive_frames(AVCodecContext *av_codec_context, AVStream *stream,
2020-04-01 17:25:16 +00:00
AVFormatContext *av_format_context,
2020-08-28 03:04:16 +00:00
double replay_start_time,
std::deque<AVPacket*> &frame_data_queue,
int replay_buffer_size_secs,
bool &frames_erased,
2020-04-01 17:25:16 +00:00
std::mutex &write_output_mutex) {
2020-03-28 13:01:36 +00:00
AVPacket av_packet;
av_init_packet(&av_packet);
2020-03-30 15:38:55 +00:00
for (;;) {
2020-03-28 13:01:36 +00:00
av_packet.data = NULL;
av_packet.size = 0;
2020-03-30 15:38:55 +00:00
int res = avcodec_receive_packet(av_codec_context, &av_packet);
if (res == 0) { // we have a packet, send the packet to the muxer
2020-04-01 17:25:16 +00:00
assert(av_packet.stream_index == stream->id);
2020-03-30 15:38:55 +00:00
av_packet_rescale_ts(&av_packet, av_codec_context->time_base,
stream->time_base);
2020-03-28 13:01:36 +00:00
av_packet.stream_index = stream->index;
2020-08-28 03:04:16 +00:00
av_packet.dts = AV_NOPTS_VALUE;
// Write the encoded video frame to disk
// av_write_frame(av_format_context, &av_packet)
// write(STDOUT_FILENO, av_packet.data, av_packet.size)
2020-04-01 17:25:16 +00:00
std::lock_guard<std::mutex> lock(write_output_mutex);
2020-08-28 03:04:16 +00:00
if(replay_buffer_size_secs != -1) {
double time_now = glfwGetTime();
double replay_time_elapsed = time_now - replay_start_time;
AVPacket *new_pack = new AVPacket();
av_packet_move_ref(new_pack, &av_packet);
frame_data_queue.push_back(new_pack);
if(replay_time_elapsed >= replay_buffer_size_secs) {
av_packet_unref(frame_data_queue.front());
delete frame_data_queue.front();
frame_data_queue.pop_front();
frames_erased = true;
}
} else {
int ret = av_write_frame(av_format_context, &av_packet);
if(ret < 0) {
fprintf(stderr, "Error: Failed to write video frame to muxer, reason: %s (%d)\n", av_error_to_string(ret), ret);
}
2020-03-28 07:47:53 +00:00
}
2020-04-01 18:35:43 +00:00
av_packet_unref(&av_packet);
2020-03-30 15:38:55 +00:00
} else if (res == AVERROR(EAGAIN)) { // we have no packet
// fprintf(stderr, "No packet!\n");
2020-03-28 07:47:53 +00:00
break;
2020-03-30 15:38:55 +00:00
} else if (res == AVERROR_EOF) { // this is the end of the stream
fprintf(stderr, "End of stream!\n");
2020-03-28 07:47:53 +00:00
break;
2020-03-30 15:38:55 +00:00
} else {
fprintf(stderr, "Unexpected error: %d\n", res);
2020-03-30 15:38:55 +00:00
break;
}
}
2020-04-01 18:35:43 +00:00
//av_packet_unref(&av_packet);
2020-03-28 07:47:53 +00:00
}
2020-04-01 17:25:16 +00:00
static AVStream *add_audio_stream(AVFormatContext *av_format_context, AVCodec **codec,
enum AVCodecID codec_id) {
*codec = avcodec_find_encoder(AV_CODEC_ID_AAC);
if (!*codec) {
fprintf(
stderr,
"Error: Could not find aac encoder\n");
exit(1);
}
AVStream *stream = avformat_new_stream(av_format_context, *codec);
if (!stream) {
fprintf(stderr, "Error: Could not allocate stream\n");
exit(1);
}
stream->id = av_format_context->nb_streams - 1;
fprintf(stderr, "audio stream id: %d\n", stream->id);
AVCodecContext *codec_context = stream->codec;
assert((*codec)->type == AVMEDIA_TYPE_AUDIO);
/*
codec_context->sample_fmt = (*codec)->sample_fmts
? (*codec)->sample_fmts[0]
: AV_SAMPLE_FMT_FLTP;
*/
codec_context->codec_id = AV_CODEC_ID_AAC;
codec_context->sample_fmt = AV_SAMPLE_FMT_FLTP;
//codec_context->bit_rate = 64000;
codec_context->sample_rate = 48000;
codec_context->channel_layout = AV_CH_LAYOUT_STEREO;
codec_context->channels = 2;
// Some formats want stream headers to be seperate
//if (av_format_context->oformat->flags & AVFMT_GLOBALHEADER)
// av_format_context->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
return stream;
}
static AVStream *add_video_stream(AVFormatContext *av_format_context, AVCodec **codec,
VideoQuality video_quality,
2020-03-31 16:44:46 +00:00
const WindowPixmap &window_pixmap,
int fps) {
2020-03-28 07:47:53 +00:00
//*codec = avcodec_find_encoder(codec_id);
*codec = avcodec_find_encoder_by_name("h264_nvenc");
2020-03-30 15:38:55 +00:00
if (!*codec) {
2020-03-29 15:48:17 +00:00
*codec = avcodec_find_encoder_by_name("nvenc_h264");
}
2020-03-30 15:38:55 +00:00
if (!*codec) {
fprintf(
stderr,
2020-04-01 17:25:16 +00:00
"Error: Could not find h264_nvenc or nvenc_h264 encoder\n");
2020-03-28 07:47:53 +00:00
exit(1);
}
AVStream *stream = avformat_new_stream(av_format_context, *codec);
2020-03-30 15:38:55 +00:00
if (!stream) {
2020-03-28 07:47:53 +00:00
fprintf(stderr, "Error: Could not allocate stream\n");
exit(1);
}
stream->id = av_format_context->nb_streams - 1;
2020-04-01 17:25:16 +00:00
fprintf(stderr, "video stream id: %d\n", stream->id);
2020-03-28 07:47:53 +00:00
AVCodecContext *codec_context = stream->codec;
//double fps_ratio = (double)fps / 30.0;
2020-04-01 17:25:16 +00:00
assert((*codec)->type == AVMEDIA_TYPE_VIDEO);
codec_context->codec_id = (*codec)->id;
fprintf(stderr, "codec id: %d\n", (*codec)->id);
codec_context->width = window_pixmap.texture_width & ~1;
codec_context->height = window_pixmap.texture_height & ~1;
2020-08-30 04:17:38 +00:00
codec_context->bit_rate = 7500000 + (codec_context->width * codec_context->height) / 2;
2020-04-01 17:25:16 +00:00
// Timebase: This is the fundamental unit of time (in seconds) in terms
// of which frame timestamps are represented. For fixed-fps content,
// timebase should be 1/framerate and timestamp increments should be
// identical to 1
codec_context->time_base.num = 1;
codec_context->time_base.den = fps;
// codec_context->framerate.num = 60;
// codec_context->framerate.den = 1;
2020-07-05 05:05:39 +00:00
codec_context->sample_aspect_ratio.num = 0;
codec_context->sample_aspect_ratio.den = 0;
2020-08-30 04:17:38 +00:00
codec_context->gop_size = fps * 2;
codec_context->max_b_frames = 2;
2020-04-01 17:25:16 +00:00
codec_context->pix_fmt = AV_PIX_FMT_CUDA;
codec_context->color_range = AVCOL_RANGE_JPEG;
switch(video_quality) {
case VideoQuality::HIGH:
2020-08-30 04:17:38 +00:00
codec_context->qmin = 10;
codec_context->qmax = 15;
//av_opt_set(codec_context->priv_data, "preset", "slow", 0);
//av_opt_set(codec_context->priv_data, "profile", "high", 0);
//codec_context->profile = FF_PROFILE_H264_HIGH;
break;
case VideoQuality::ULTRA:
2020-08-30 04:17:38 +00:00
codec_context->bit_rate = 10000000 + (codec_context->width * codec_context->height) / 2;
codec_context->qmin = 10;
codec_context->qmax = 15;
//av_opt_set(codec_context->priv_data, "preset", "veryslow", 0);
//av_opt_set(codec_context->priv_data, "profile", "high", 0);
//codec_context->profile = FF_PROFILE_H264_HIGH;
break;
}
2020-07-05 05:05:39 +00:00
stream->time_base = codec_context->time_base;
stream->avg_frame_rate = av_inv_q(codec_context->time_base);
2020-04-01 17:25:16 +00:00
if (codec_context->codec_id == AV_CODEC_ID_MPEG1VIDEO)
codec_context->mb_decision = 2;
// stream->time_base = codec_context->time_base;
// codec_context->ticks_per_frame = 30;
2020-03-28 07:47:53 +00:00
// Some formats want stream headers to be seperate
2020-03-30 15:38:55 +00:00
if (av_format_context->oformat->flags & AVFMT_GLOBALHEADER)
2020-03-28 07:47:53 +00:00
av_format_context->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
return stream;
}
2020-04-01 17:25:16 +00:00
static AVFrame* open_audio(AVCodec *codec, AVStream *stream) {
int ret;
AVCodecContext *codec_context = stream->codec;
ret = avcodec_open2(codec_context, codec, nullptr);
if(ret < 0) {
fprintf(stderr, "failed to open codec, reason: %s\n", av_error_to_string(ret));
exit(1);
}
AVFrame *frame = av_frame_alloc();
if(!frame) {
fprintf(stderr, "failed to allocate audio frame\n");
exit(1);
}
frame->nb_samples = codec_context->frame_size;
frame->format = codec_context->sample_fmt;
frame->channels = codec_context->channels;
frame->channel_layout = codec_context->channel_layout;
ret = av_frame_get_buffer(frame, 0);
if(ret < 0) {
fprintf(stderr, "failed to allocate audio data buffers, reason: %s\n", av_error_to_string(ret));
exit(1);
}
return frame;
}
2020-03-30 15:38:55 +00:00
static void open_video(AVCodec *codec, AVStream *stream,
WindowPixmap &window_pixmap, AVBufferRef **device_ctx,
CUgraphicsResource *cuda_graphics_resource) {
2020-03-28 07:47:53 +00:00
int ret;
AVCodecContext *codec_context = stream->codec;
2020-03-30 15:38:55 +00:00
std::vector<std::string> hardware_accelerated_devices =
get_hardware_acceleration_device_names();
if (hardware_accelerated_devices.empty()) {
fprintf(
stderr,
"Error: No hardware accelerated device was found on your system\n");
2020-03-28 07:47:53 +00:00
exit(1);
}
2020-03-30 15:38:55 +00:00
if (av_hwdevice_ctx_create(device_ctx, AV_HWDEVICE_TYPE_CUDA,
hardware_accelerated_devices[0].c_str(), NULL,
0) < 0) {
fprintf(stderr,
"Error: Failed to create hardware device context for gpu: %s\n",
hardware_accelerated_devices[0].c_str());
2020-03-28 07:47:53 +00:00
exit(1);
}
AVBufferRef *frame_context = av_hwframe_ctx_alloc(*device_ctx);
2020-03-30 15:38:55 +00:00
if (!frame_context) {
2020-03-28 07:47:53 +00:00
fprintf(stderr, "Error: Failed to create hwframe context\n");
exit(1);
}
2020-03-30 15:38:55 +00:00
AVHWFramesContext *hw_frame_context =
(AVHWFramesContext *)frame_context->data;
2020-03-28 07:47:53 +00:00
hw_frame_context->width = codec_context->width;
hw_frame_context->height = codec_context->height;
hw_frame_context->sw_format = AV_PIX_FMT_0BGR32;
hw_frame_context->format = codec_context->pix_fmt;
hw_frame_context->device_ref = *device_ctx;
2020-03-30 15:38:55 +00:00
hw_frame_context->device_ctx = (AVHWDeviceContext *)(*device_ctx)->data;
2020-03-28 07:47:53 +00:00
2020-03-30 15:38:55 +00:00
if (av_hwframe_ctx_init(frame_context) < 0) {
fprintf(stderr, "Error: Failed to initialize hardware frame context "
"(note: ffmpeg version needs to be > 4.0\n");
2020-03-28 07:47:53 +00:00
exit(1);
}
codec_context->hw_device_ctx = *device_ctx;
codec_context->hw_frames_ctx = frame_context;
ret = avcodec_open2(codec_context, codec, nullptr);
2020-03-30 15:38:55 +00:00
if (ret < 0) {
fprintf(stderr, "Error: Could not open video codec: %s\n",
"blabla"); // av_err2str(ret));
2020-03-28 07:47:53 +00:00
exit(1);
}
2020-03-30 15:38:55 +00:00
AVHWDeviceContext *hw_device_context =
(AVHWDeviceContext *)(*device_ctx)->data;
AVCUDADeviceContext *cuda_device_context =
(AVCUDADeviceContext *)hw_device_context->hwctx;
2020-03-28 07:47:53 +00:00
CUcontext *cuda_context = &(cuda_device_context->cuda_ctx);
2020-03-30 15:38:55 +00:00
if (!cuda_context) {
2020-03-28 07:47:53 +00:00
fprintf(stderr, "Error: No cuda context\n");
exit(1);
}
CUresult res;
CUcontext old_ctx;
res = cuCtxPopCurrent(&old_ctx);
res = cuCtxPushCurrent(*cuda_context);
2020-03-30 15:38:55 +00:00
res = cuGraphicsGLRegisterImage(
cuda_graphics_resource, window_pixmap.target_texture_id, GL_TEXTURE_2D,
CU_GRAPHICS_REGISTER_FLAGS_READ_ONLY);
// cuGraphicsUnregisterResource(*cuda_graphics_resource);
if (res != CUDA_SUCCESS) {
2020-03-31 18:02:18 +00:00
const char *err_str;
cuGetErrorString(res, &err_str);
2020-03-30 15:38:55 +00:00
fprintf(stderr,
2020-03-31 18:02:18 +00:00
"Error: cuGraphicsGLRegisterImage failed, error %s, texture "
2020-03-30 15:38:55 +00:00
"id: %u\n",
2020-03-31 18:02:18 +00:00
err_str, window_pixmap.target_texture_id);
2020-03-28 07:47:53 +00:00
exit(1);
}
res = cuCtxPopCurrent(&old_ctx);
}
static void close_video(AVStream *video_stream, AVFrame *frame) {
2020-03-30 15:38:55 +00:00
// avcodec_close(video_stream->codec);
// av_frame_free(&frame);
2020-03-28 07:47:53 +00:00
}
2020-01-08 12:34:55 +00:00
static void usage() {
fprintf(stderr, "usage: gpu-screen-recorder -w <window_id> -c <container_format> -f <fps> [-a <audio_input>] [-q <quality>] [-r <replay_buffer_size_sec>] [-o <output_file>]\n");
2020-08-28 03:04:16 +00:00
fprintf(stderr, "OPTIONS:\n");
fprintf(stderr, " -w Window to record.\n");
fprintf(stderr, " -c Container format for output file, for example mp4, or flv.\n");
fprintf(stderr, " -f Framerate to record at.\n");
fprintf(stderr, " -a Audio device to record from (pulse audio device). Optional, disabled by default.\n");
fprintf(stderr, " -q Video quality. Should either be 'high' or 'ultra'. Optional, set to 'high' be default.\n");
2020-08-28 03:04:16 +00:00
fprintf(stderr, " -r Replay buffer size in seconds. If this is set, then only the last seconds as set by this option will be stored"
" and the video will only be saved when the gpu-screen-recorder is closed. This feature is similar to Nvidia's instant replay feature."
" This option has be between 5 and 1200. Note that the replay buffer size will not always be precise, because of keyframes. Optional, disabled by default.\n");
2020-08-28 03:04:16 +00:00
fprintf(stderr, " -o The output file path. If omitted, then the encoded data is sent to stdout.\n");
exit(1);
}
static sig_atomic_t running = 1;
static void int_handler(int dummy) {
running = 0;
}
2020-08-26 00:54:05 +00:00
struct Arg {
const char *value;
bool optional;
};
2020-01-08 12:34:55 +00:00
int main(int argc, char **argv) {
signal(SIGINT, int_handler);
2020-08-26 00:54:05 +00:00
std::map<std::string, Arg> args = {
{ "-w", Arg { nullptr, false } },
{ "-c", Arg { nullptr, false } },
{ "-f", Arg { nullptr, false } },
2020-08-28 03:04:16 +00:00
{ "-a", Arg { nullptr, true } },
{ "-q", Arg { nullptr, true } },
2020-08-28 03:04:16 +00:00
{ "-o", Arg { nullptr, true } },
{ "-r", Arg { nullptr, true} }
};
2020-08-26 00:54:05 +00:00
for(int i = 1; i < argc - 1; i += 2) {
auto it = args.find(argv[i]);
if(it == args.end()) {
fprintf(stderr, "Invalid argument '%s'\n", argv[i]);
usage();
}
2020-08-26 00:54:05 +00:00
it->second.value = argv[i + 1];
}
for(auto &it : args) {
2020-08-26 00:54:05 +00:00
if(!it.second.optional && !it.second.value) {
fprintf(stderr, "Missing argument '%s'\n", it.first.c_str());
usage();
}
2020-01-08 12:34:55 +00:00
}
2020-08-26 00:54:05 +00:00
Window src_window_id = strtol(args["-w"].value, nullptr, 0);
const char *container_format = args["-c"].value;
int fps = atoi(args["-f"].value);
2020-03-31 16:44:46 +00:00
if(fps <= 0 || fps > 255) {
2020-08-26 00:54:05 +00:00
fprintf(stderr, "invalid fps argument: %s\n", args["-f"].value);
2020-03-31 16:44:46 +00:00
return 1;
}
const char *quality_str = args["-q"].value;
if(!quality_str)
quality_str = "high";
VideoQuality quality;
if(strcmp(quality_str, "high") == 0) {
quality = VideoQuality::HIGH;
} else if(strcmp(quality_str, "ultra") == 0) {
quality = VideoQuality::ULTRA;
} else {
fprintf(stderr, "Error: -q should either be 'high' or 'ultra', got: %s\n", quality_str);
usage();
}
2020-08-28 03:04:16 +00:00
const char *filename = args["-o"].value;
if(!filename)
filename = "/dev/stdout";
const double target_fps = 1.0 / (double)fps;
2020-01-08 12:34:55 +00:00
2020-08-28 03:04:16 +00:00
int replay_buffer_size_secs = -1;
const char *replay_buffer_size_secs_str = args["-r"].value;
if(replay_buffer_size_secs_str) {
replay_buffer_size_secs = atoi(replay_buffer_size_secs_str);
if(replay_buffer_size_secs < 5 || replay_buffer_size_secs > 1200) {
fprintf(stderr, "Error: option -r has to be between 5 and 1200, was: %s\n", replay_buffer_size_secs_str);
return 1;
}
replay_buffer_size_secs += 5; // Add a few seconds to account of lost packets because of non-keyframe packets skipped
}
2020-01-08 12:34:55 +00:00
Display *dpy = XOpenDisplay(nullptr);
2020-03-30 15:38:55 +00:00
if (!dpy) {
2020-01-08 12:34:55 +00:00
fprintf(stderr, "Error: Failed to open display\n");
return 1;
}
bool has_name_pixmap = x11_supports_composite_named_window_pixmap(dpy);
2020-03-30 15:38:55 +00:00
if (!has_name_pixmap) {
fprintf(stderr, "Error: XCompositeNameWindowPixmap is not supported by "
"your X11 server\n");
2020-01-08 12:34:55 +00:00
return 1;
}
2020-03-30 15:38:55 +00:00
2020-01-08 12:34:55 +00:00
XWindowAttributes attr;
2020-03-30 15:38:55 +00:00
if (!XGetWindowAttributes(dpy, src_window_id, &attr)) {
2020-01-08 12:34:55 +00:00
fprintf(stderr, "Error: Invalid window id: %lu\n", src_window_id);
return 1;
}
2020-04-02 22:02:28 +00:00
XCompositeRedirectWindow(dpy, src_window_id, CompositeRedirectAutomatic);
2020-03-30 15:38:55 +00:00
// glXMakeContextCurrent(Display *dpy, GLXDrawable draw, GLXDrawable read,
// GLXContext ctx)
if (!glfwInit()) {
2020-01-08 12:34:55 +00:00
fprintf(stderr, "Error: Failed to initialize glfw\n");
return 1;
}
2020-03-31 18:02:18 +00:00
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
2020-03-31 18:02:18 +00:00
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
2020-03-28 08:46:47 +00:00
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow *window = glfwCreateWindow(1, 1, "gpu-screen-recorder", nullptr, nullptr);
2020-03-30 15:38:55 +00:00
if (!window) {
2020-01-08 12:34:55 +00:00
fprintf(stderr, "Error: Failed to create glfw window\n");
glfwTerminate();
return 1;
}
glfwMakeContextCurrent(window);
2020-03-29 00:14:00 +00:00
glfwSwapInterval(0);
glfwHideWindow(window);
2020-01-08 12:34:55 +00:00
//#if defined(DEBUG)
2020-03-31 17:29:54 +00:00
XSetErrorHandler(x11_error_handler);
XSetIOErrorHandler(x11_io_error_handler);
//#endif
2020-03-31 17:29:54 +00:00
2020-01-08 12:34:55 +00:00
glewExperimental = GL_TRUE;
2020-03-30 15:38:55 +00:00
GLenum nGlewError = glewInit();
if (nGlewError != GLEW_OK) {
fprintf(stderr, "%s - Error initializing GLEW! %s\n", __FUNCTION__,
glewGetErrorString(nGlewError));
return 1;
}
glGetError(); // to clear the error caused deep in GLEW
2020-01-08 12:34:55 +00:00
WindowPixmap window_pixmap;
2020-03-30 15:38:55 +00:00
if (!recreate_window_pixmap(dpy, src_window_id, window_pixmap)) {
fprintf(stderr, "Error: Failed to create glx pixmap for window: %lu\n",
src_window_id);
2020-01-08 12:34:55 +00:00
return 1;
}
2020-03-28 07:47:53 +00:00
// Video start
AVFormatContext *av_format_context;
// The output format is automatically guessed by the file extension
2020-03-31 16:44:46 +00:00
avformat_alloc_output_context2(&av_format_context, nullptr, container_format,
nullptr);
2020-03-30 15:38:55 +00:00
if (!av_format_context) {
fprintf(
stderr,
"Error: Failed to deduce output format from file extension\n");
2020-03-28 07:47:53 +00:00
return 1;
}
AVOutputFormat *output_format = av_format_context->oformat;
2020-04-01 17:25:16 +00:00
2020-03-28 07:47:53 +00:00
AVCodec *video_codec;
2020-03-30 15:38:55 +00:00
AVStream *video_stream =
add_video_stream(av_format_context, &video_codec, quality,
2020-03-31 16:44:46 +00:00
window_pixmap, fps);
2020-03-30 15:38:55 +00:00
if (!video_stream) {
2020-03-28 07:47:53 +00:00
fprintf(stderr, "Error: Failed to create video stream\n");
return 1;
}
2020-04-01 17:25:16 +00:00
AVCodec *audio_codec;
AVStream *audio_stream =
add_audio_stream(av_format_context, &audio_codec, output_format->audio_codec);
if (!audio_stream) {
fprintf(stderr, "Error: Failed to create audio stream\n");
return 1;
}
2020-03-30 15:38:55 +00:00
if (cuInit(0) < 0) {
2020-03-30 15:22:57 +00:00
fprintf(stderr, "Error: cuInit failed\n");
return {};
}
2020-03-28 07:47:53 +00:00
AVBufferRef *device_ctx;
CUgraphicsResource cuda_graphics_resource;
2020-03-30 15:38:55 +00:00
open_video(video_codec, video_stream, window_pixmap, &device_ctx,
&cuda_graphics_resource);
2020-03-28 07:47:53 +00:00
2020-04-01 17:25:16 +00:00
AVFrame *audio_frame = open_audio(audio_codec, audio_stream);
//av_dump_format(av_format_context, 0, filename, 1);
if (!(output_format->flags & AVFMT_NOFILE)) {
int ret = avio_open(&av_format_context->pb, filename, AVIO_FLAG_WRITE);
if (ret < 0) {
fprintf(stderr, "Error: Could not open '%s': %s\n", filename,
"blabla"); // av_err2str(ret));
return 1;
}
}
2020-08-28 03:04:16 +00:00
//video_stream->duration = AV_TIME_BASE * 15;
//audio_stream->duration = AV_TIME_BASE * 15;
//av_format_context->duration = AV_TIME_BASE * 15;
int ret = avformat_write_header(av_format_context, nullptr);
if (ret < 0) {
fprintf(stderr, "Error occurred when opening output file: %s\n",
"blabla"); // av_err2str(ret));
return 1;
}
2020-03-28 07:47:53 +00:00
2020-03-30 15:38:55 +00:00
AVHWDeviceContext *hw_device_context =
(AVHWDeviceContext *)device_ctx->data;
AVCUDADeviceContext *cuda_device_context =
(AVCUDADeviceContext *)hw_device_context->hwctx;
2020-03-28 07:47:53 +00:00
CUcontext *cuda_context = &(cuda_device_context->cuda_ctx);
2020-03-30 15:38:55 +00:00
if (!cuda_context) {
2020-03-28 07:47:53 +00:00
fprintf(stderr, "Error: No cuda context\n");
exit(1);
}
2020-03-30 15:38:55 +00:00
// av_frame_free(&rgb_frame);
// avcodec_close(av_codec_context);
2020-03-28 07:47:53 +00:00
XSelectInput(dpy, src_window_id, StructureNotifyMask);
int frame_count = 0;
2020-03-28 08:46:47 +00:00
CUresult res;
CUcontext old_ctx;
res = cuCtxPopCurrent(&old_ctx);
res = cuCtxPushCurrent(*cuda_context);
// Get texture
2020-03-30 15:38:55 +00:00
res = cuGraphicsResourceSetMapFlags(
cuda_graphics_resource, CU_GRAPHICS_MAP_RESOURCE_FLAGS_READ_ONLY);
2020-03-28 08:46:47 +00:00
res = cuGraphicsMapResources(1, &cuda_graphics_resource, 0);
// Map texture to cuda array
CUarray mapped_array;
2020-03-30 15:38:55 +00:00
res = cuGraphicsSubResourceGetMappedArray(&mapped_array,
cuda_graphics_resource, 0, 0);
2020-03-28 08:46:47 +00:00
// Release texture
2020-03-30 15:38:55 +00:00
// res = cuGraphicsUnmapResources(1, &cuda_graphics_resource, 0);
2020-03-28 08:46:47 +00:00
2020-03-29 15:48:17 +00:00
double start_time = glfwGetTime();
2020-07-03 00:35:35 +00:00
double frame_timer_start = start_time;
2020-03-31 18:02:18 +00:00
double window_resize_timer = start_time;
2020-04-04 12:54:12 +00:00
bool window_resized = false;
2020-03-31 16:44:46 +00:00
int fps_counter = 0;
2020-03-29 15:48:17 +00:00
int current_fps = 30;
2020-03-28 13:01:36 +00:00
2020-03-30 15:22:57 +00:00
AVFrame *frame = av_frame_alloc();
2020-03-30 15:38:55 +00:00
if (!frame) {
2020-03-30 15:22:57 +00:00
fprintf(stderr, "Error: Failed to allocate frame\n");
exit(1);
}
frame->format = video_stream->codec->pix_fmt;
frame->width = video_stream->codec->width;
frame->height = video_stream->codec->height;
2020-03-31 18:02:18 +00:00
if (av_hwframe_get_buffer(video_stream->codec->hw_frames_ctx, frame, 0) < 0) {
2020-03-30 15:22:57 +00:00
fprintf(stderr, "Error: av_hwframe_get_buffer failed\n");
exit(1);
}
XWindowAttributes xwa;
XGetWindowAttributes(dpy, src_window_id, &xwa);
int window_width = xwa.width;
int window_height = xwa.height;
2020-08-26 00:54:05 +00:00
std::mutex write_output_mutex;
std::thread audio_thread;
2020-08-28 03:04:16 +00:00
double record_start_time = glfwGetTime();
std::deque<AVPacket*> frame_data_queue;
bool frames_erased = false;
2020-04-01 17:25:16 +00:00
SoundDevice sound_device;
2020-08-26 00:54:05 +00:00
Arg &audio_input_arg = args["-a"];
if(audio_input_arg.value) {
if(sound_device_get_by_name(&sound_device, audio_input_arg.value, audio_stream->codec->channels, audio_stream->codec->frame_size) != 0) {
fprintf(stderr, "failed to get 'pulse' sound device\n");
exit(1);
}
int audio_buffer_size = av_samples_get_buffer_size(NULL, audio_stream->codec->channels, audio_stream->codec->frame_size, audio_stream->codec->sample_fmt, 1);
uint8_t *audio_frame_buf = (uint8_t *)av_malloc(audio_buffer_size);
avcodec_fill_audio_frame(audio_frame, audio_stream->codec->channels, audio_stream->codec->sample_fmt, (const uint8_t*)audio_frame_buf, audio_buffer_size, 1);
2020-04-01 17:25:16 +00:00
2020-08-28 03:04:16 +00:00
audio_thread = std::thread([audio_buffer_size, record_start_time, replay_buffer_size_secs, &frame_data_queue, &frames_erased](AVFormatContext *av_format_context, AVStream *audio_stream, uint8_t *audio_frame_buf, SoundDevice *sound_device, AVFrame *audio_frame, std::mutex *write_output_mutex) mutable {
2020-08-26 00:54:05 +00:00
AVPacket audio_packet;
if(av_new_packet(&audio_packet, audio_buffer_size) != 0) {
fprintf(stderr, "Failed to create audio packet\n");
exit(1);
}
SwrContext *swr = swr_alloc();
if(!swr) {
fprintf(stderr, "Failed to create SwrContext\n");
exit(1);
}
av_opt_set_int(swr, "in_channel_layout", audio_stream->codec->channel_layout, 0);
av_opt_set_int(swr, "out_channel_layout", audio_stream->codec->channel_layout, 0);
av_opt_set_int(swr, "in_sample_rate", audio_stream->codec->sample_rate, 0);
av_opt_set_int(swr, "out_sample_rate", audio_stream->codec->sample_rate, 0);
av_opt_set_sample_fmt(swr, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0);
av_opt_set_sample_fmt(swr, "out_sample_fmt", AV_SAMPLE_FMT_FLTP, 0);
swr_init(swr);
while(running) {
void *sound_buffer;
int sound_buffer_size = sound_device_read_next_chunk(sound_device, &sound_buffer);
if(sound_buffer_size >= 0) {
// TODO: Instead of converting audio, get float audio from alsa. Or does alsa do conversion internally to get this format?
swr_convert(swr, &audio_frame_buf, audio_frame->nb_samples, (const uint8_t**)&sound_buffer, sound_buffer_size);
audio_frame->extended_data = &audio_frame_buf;
// TODO: Fix this. Warning from ffmpeg:
// Timestamps are unset in a packet for stream 1. This is deprecated and will stop working in the future. Fix your code to set the timestamps properly
//audio_frame->pts=audio_frame_index*100;
//++audio_frame_index;
int got_frame = 0;
int ret = avcodec_encode_audio2(audio_stream->codec, &audio_packet, audio_frame, &got_frame);
if(ret < 0){
printf("Failed to encode!\n");
break;
}
if (got_frame==1){
//printf("Succeed to encode 1 frame! \tsize:%5d\n",pkt.size);
audio_packet.stream_index = audio_stream->index;
std::lock_guard<std::mutex> lock(*write_output_mutex);
2020-08-28 03:04:16 +00:00
if(replay_buffer_size_secs != -1) {
double time_now = glfwGetTime();
double replay_time_elapsed = time_now - record_start_time;
AVPacket *new_pack = new AVPacket();
av_packet_move_ref(new_pack, &audio_packet);
frame_data_queue.push_back(new_pack);
if(replay_time_elapsed >= replay_buffer_size_secs) {
av_packet_unref(frame_data_queue.front());
delete frame_data_queue.front();
frame_data_queue.pop_front();
frames_erased = true;
}
} else {
ret = av_write_frame(av_format_context, &audio_packet);
if(ret < 0) {
fprintf(stderr, "Error: Failed to write audio frame to muxer, reason: %s (%d)\n", av_error_to_string(ret), ret);
}
}
av_packet_unref(&audio_packet);
2020-08-26 00:54:05 +00:00
}
} else {
fprintf(stderr, "failed to read sound from device, error: %d\n", sound_buffer_size);
}
}
swr_free(&swr);
}, av_format_context, audio_stream, audio_frame_buf, &sound_device, audio_frame, &write_output_mutex);
}
2020-04-01 17:25:16 +00:00
bool redraw = true;
2020-03-29 15:48:17 +00:00
XEvent e;
while (running) {
double frame_start = glfwGetTime();
/*glClear(GL_COLOR_BUFFER_BIT);*/
2020-03-28 07:47:53 +00:00
glfwPollEvents();
if (XCheckTypedWindowEvent(dpy, src_window_id, ConfigureNotify, &e) && e.xconfigure.window == src_window_id) {
2020-03-29 15:48:17 +00:00
// Window resize
if(e.xconfigure.width != window_width || e.xconfigure.height != window_height) {
window_width = e.xconfigure.width;
window_height = e.xconfigure.height;
2020-03-31 18:02:18 +00:00
window_resize_timer = glfwGetTime();
2020-04-04 12:54:12 +00:00
window_resized = true;
}
2020-03-29 15:48:17 +00:00
}
redraw = true;
2020-03-28 07:47:53 +00:00
2020-03-31 18:02:18 +00:00
const double window_resize_timeout = 1.0; // 1 second
if(window_resized && glfwGetTime() - window_resize_timer >= window_resize_timeout) {
2020-04-04 12:54:12 +00:00
window_resized = false;
2020-03-31 18:02:18 +00:00
fprintf(stderr, "Resize window!\n");
recreate_window_pixmap(dpy, src_window_id, window_pixmap);
// Resolution must be a multiple of two
2020-04-04 12:54:12 +00:00
//video_stream->codec->width = window_pixmap.texture_width & ~1;
//video_stream->codec->height = window_pixmap.texture_height & ~1;
2020-03-31 18:02:18 +00:00
cuGraphicsUnregisterResource(cuda_graphics_resource);
res = cuGraphicsGLRegisterImage(
&cuda_graphics_resource, window_pixmap.target_texture_id, GL_TEXTURE_2D,
CU_GRAPHICS_REGISTER_FLAGS_READ_ONLY);
if (res != CUDA_SUCCESS) {
const char *err_str;
cuGetErrorString(res, &err_str);
fprintf(stderr,
"Error: cuGraphicsGLRegisterImage failed, error %s, texture "
"id: %u\n",
err_str, window_pixmap.target_texture_id);
2020-04-01 17:25:16 +00:00
break;
2020-03-31 18:02:18 +00:00
}
res = cuGraphicsResourceSetMapFlags(
cuda_graphics_resource, CU_GRAPHICS_MAP_RESOURCE_FLAGS_READ_ONLY);
res = cuGraphicsMapResources(1, &cuda_graphics_resource, 0);
res = cuGraphicsSubResourceGetMappedArray(&mapped_array, cuda_graphics_resource, 0, 0);
av_frame_unref(frame);
if (av_hwframe_get_buffer(video_stream->codec->hw_frames_ctx, frame, 0) < 0) {
fprintf(stderr, "Error: av_hwframe_get_buffer failed\n");
2020-04-01 17:25:16 +00:00
break;
2020-03-31 18:02:18 +00:00
}
}
++fps_counter;
double time_now = glfwGetTime();
double frame_timer_elapsed = time_now - frame_timer_start;
double elapsed = time_now - start_time;
if (elapsed >= 1.0) {
fprintf(stderr, "fps: %d\n", fps_counter);
start_time = time_now;
current_fps = fps_counter;
fps_counter = 0;
}
2020-03-30 15:22:57 +00:00
double frame_time_overflow = frame_timer_elapsed - target_fps;
2020-03-30 15:38:55 +00:00
if (frame_time_overflow >= 0.0) {
2020-03-30 15:22:57 +00:00
frame_timer_start = time_now - frame_time_overflow;
if(redraw) {
redraw = false;
// TODO: Use a framebuffer instead. glCopyImageSubData requires
// opengl 4.2
glCopyImageSubData(
window_pixmap.texture_id, GL_TEXTURE_2D, 0, 0, 0, 0,
window_pixmap.target_texture_id, GL_TEXTURE_2D, 0, 0, 0, 0,
window_pixmap.texture_width, window_pixmap.texture_height, 1);
// int err = glGetError();
// fprintf(stderr, "error: %d\n", err);
CUDA_MEMCPY2D memcpy_struct;
memcpy_struct.srcXInBytes = 0;
memcpy_struct.srcY = 0;
memcpy_struct.srcMemoryType = CUmemorytype::CU_MEMORYTYPE_ARRAY;
memcpy_struct.dstXInBytes = 0;
memcpy_struct.dstY = 0;
memcpy_struct.dstMemoryType = CUmemorytype::CU_MEMORYTYPE_DEVICE;
memcpy_struct.srcArray = mapped_array;
memcpy_struct.dstDevice = (CUdeviceptr)frame->data[0];
memcpy_struct.dstPitch = frame->linesize[0];
memcpy_struct.WidthInBytes = frame->width * 4;
memcpy_struct.Height = frame->height;
cuMemcpy2D(&memcpy_struct);
// res = cuCtxPopCurrent(&old_ctx);
glfwSwapBuffers(window);
}
2020-03-30 15:22:57 +00:00
frame->pts = frame_count;
frame_count += 1;
if (avcodec_send_frame(video_stream->codec, frame) >= 0) {
2020-08-28 03:04:16 +00:00
receive_frames(video_stream->codec, video_stream, av_format_context,
record_start_time, frame_data_queue, replay_buffer_size_secs, frames_erased, write_output_mutex);
} else {
fprintf(stderr, "Error: avcodec_send_frame failed\n");
2020-03-28 07:47:53 +00:00
}
}
2020-01-08 12:34:55 +00:00
2020-03-30 15:38:55 +00:00
// av_frame_free(&frame);
double frame_end = glfwGetTime();
double frame_sleep_fps = 1.0 / 250.0;
double sleep_time = frame_sleep_fps - (frame_end - frame_start);
if(sleep_time > 0.0)
usleep(sleep_time * 1000.0 * 1000.0);
2020-01-08 12:34:55 +00:00
}
running = 0;
2020-08-26 00:54:05 +00:00
if(audio_input_arg.value) {
audio_thread.join();
sound_device_close(&sound_device);
}
2020-04-01 17:25:16 +00:00
2020-08-28 03:04:16 +00:00
if(replay_buffer_size_secs != -1) {
size_t start_index = 0;
for(size_t i = 0; i < frame_data_queue.size(); ++i) {
AVPacket *av_packet = frame_data_queue[i];
if((av_packet->flags & AV_PKT_FLAG_KEY) && av_packet->stream_index == video_stream->index) {
start_index = i;
break;
} else {
//av_packet_unref(av_packet);
//delete av_packet;
}
}
//fprintf(stderr, "Frame start index: %zu\n", start_index);
int64_t pts_offset = 0;
if(frames_erased)
pts_offset = frame_data_queue[start_index]->pts;
for(size_t i = start_index; i < frame_data_queue.size(); ++i) {
AVPacket *av_packet = frame_data_queue[i];
if(av_packet->stream_index == video_stream->index) {
av_packet->pos = -1;
av_packet->pts -= pts_offset;
av_packet->dts = AV_NOPTS_VALUE;
}
av_packet->pos = -1;
int ret = av_write_frame(av_format_context, av_packet);
if(ret < 0) {
fprintf(stderr, "Error: Failed to write video frame to muxer, reason: %s (%d)\n", av_error_to_string(ret), ret);
}
//av_packet_unref(av_packet);
//delete av_packet;
}
}
2020-04-01 17:25:16 +00:00
//Flush Encoder
#if 0
ret = flush_encoder(pFormatCtx,0);
if (ret < 0) {
printf("Flushing encoder failed\n");
return -1;
}
#endif
if (av_write_trailer(av_format_context) != 0) {
fprintf(stderr, "Failed to write trailer\n");
}
2020-03-31 16:44:46 +00:00
/* add sequence end code to have a real MPEG file */
/*
2020-03-31 16:44:46 +00:00
const uint8_t endcode[] = { 0, 0, 1, 0xb7 };
if (video_codec->id == AV_CODEC_ID_MPEG1VIDEO || video_codec->id == AV_CODEC_ID_MPEG2VIDEO)
write(STDOUT_FILENO, endcode, sizeof(endcode));
*/
2020-01-08 12:34:55 +00:00
2020-03-30 15:38:55 +00:00
// close_video(video_stream, NULL);
2020-03-28 07:47:53 +00:00
if(!(output_format->flags & AVFMT_NOFILE))
avio_close(av_format_context->pb);
2020-03-30 15:38:55 +00:00
// avformat_free_context(av_format_context);
2020-03-28 07:47:53 +00:00
2020-03-30 15:38:55 +00:00
// cleanup_window_pixmap(dpy, window_pixmap);
2020-04-01 14:16:06 +00:00
XCompositeUnredirectWindow(dpy, src_window_id, CompositeRedirectAutomatic);
2020-01-08 12:34:55 +00:00
XCloseDisplay(dpy);
}