Add the ability to save replays into folders by date
This adds a new flag, '-mf' with a boolean argument If this is set, replays are organized in folders by date. With this, replays are saved like: $output_folder/2023-10-15/Replay_15-39-28.mp4 instead of all into the same folder.
This commit is contained in:
parent
cf7b5e0904
commit
3ae89c8d7c
@ -83,6 +83,7 @@ Here is an example of how to record all monitors and the default audio output: `
|
|||||||
Streaming works the same as recording, but the `-o` argument should be path to the live streaming service you want to use (including your live streaming key). Take a look at scripts/twitch-stream.sh to see an example of how to stream to twitch.
|
Streaming works the same as recording, but the `-o` argument should be path to the live streaming service you want to use (including your live streaming key). Take a look at scripts/twitch-stream.sh to see an example of how to stream to twitch.
|
||||||
## Replay mode
|
## Replay mode
|
||||||
Run `gpu-screen-recorder` with the `-c mp4` and `-r` option, for example: `gpu-screen-recorder -w screen -f 60 -r 30 -c mp4 -o ~/Videos`. Note that in this case, `-o` should point to a directory (that exists).\
|
Run `gpu-screen-recorder` with the `-c mp4` and `-r` option, for example: `gpu-screen-recorder -w screen -f 60 -r 30 -c mp4 -o ~/Videos`. Note that in this case, `-o` should point to a directory (that exists).\
|
||||||
|
If `-mf yes` is set, replays are save in folders based on the date.
|
||||||
To save a video in replay mode, you need to send signal SIGUSR1 to gpu screen recorder. You can do this by running `killall -SIGUSR1 gpu-screen-recorder`.\
|
To save a video in replay mode, you need to send signal SIGUSR1 to gpu screen recorder. You can do this by running `killall -SIGUSR1 gpu-screen-recorder`.\
|
||||||
To stop recording, send SIGINT to gpu screen recorder. You can do this by running `killall gpu-screen-recorder` or pressing `Ctrl-C` in the terminal that runs gpu screen recorder.\
|
To stop recording, send SIGINT to gpu screen recorder. You can do this by running `killall gpu-screen-recorder` or pressing `Ctrl-C` in the terminal that runs gpu screen recorder.\
|
||||||
The file path to the saved replay is output to stdout. All other output from GPU Screen Recorder is output to stderr.
|
The file path to the saved replay is output to stdout. All other output from GPU Screen Recorder is output to stderr.
|
||||||
|
@ -12,9 +12,10 @@ Environment=AUDIO_DEVICE=
|
|||||||
Environment=FRAMERATE=60
|
Environment=FRAMERATE=60
|
||||||
Environment=REPLAYDURATION=60
|
Environment=REPLAYDURATION=60
|
||||||
Environment=OUTPUTDIR=%h/Videos
|
Environment=OUTPUTDIR=%h/Videos
|
||||||
ExecStart=/bin/sh -c 'AUDIO="${AUDIO_DEVICE:-$(pactl get-default-sink).monitor}"; gpu-screen-recorder -v no -w $WINDOW -c $CONTAINER -q $QUALITY -k $CODEC -ac $AUDIO_CODEC -a "$AUDIO" -f $FRAMERATE -r $REPLAYDURATION -o "$OUTPUTDIR" $ADDITIONAL_ARGS'
|
Environment=MAKEFOLDERS=no
|
||||||
|
ExecStart=/bin/sh -c 'AUDIO="${AUDIO_DEVICE:-$(pactl get-default-sink).monitor}"; gpu-screen-recorder -v no -w $WINDOW -c $CONTAINER -q $QUALITY -k $CODEC -ac $AUDIO_CODEC -a "$AUDIO" -f $FRAMERATE -r $REPLAYDURATION -o "$OUTPUTDIR" -mk $MAKEFOLDERS $ADDITIONAL_ARGS'
|
||||||
Restart=on-failure
|
Restart=on-failure
|
||||||
RestartSec=5s
|
RestartSec=5s
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=default.target
|
WantedBy=default.target
|
||||||
|
51
src/main.cpp
51
src/main.cpp
@ -9,6 +9,7 @@ extern "C" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
|
#include <filesystem>
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <string>
|
#include <string>
|
||||||
@ -665,7 +666,7 @@ static void open_video(AVCodecContext *codec_context, VideoQuality video_quality
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void usage_header() {
|
static void usage_header() {
|
||||||
fprintf(stderr, "usage: gpu-screen-recorder -w <window_id|monitor|focused> [-c <container_format>] [-s WxH] -f <fps> [-a <audio_input>] [-q <quality>] [-r <replay_buffer_size_sec>] [-k h264|h265] [-ac aac|opus|flac] [-oc yes|no] [-fm cfr|vfr] [-v yes|no] [-h|--help] [-o <output_file>]\n");
|
fprintf(stderr, "usage: gpu-screen-recorder -w <window_id|monitor|focused> [-c <container_format>] [-s WxH] -f <fps> [-a <audio_input>] [-q <quality>] [-r <replay_buffer_size_sec>] [-k h264|h265] [-ac aac|opus|flac] [-oc yes|no] [-fm cfr|vfr] [-v yes|no] [-h|--help] [-o <output_file>] [-mf yes|no]\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
static void usage_full() {
|
static void usage_full() {
|
||||||
@ -717,6 +718,8 @@ static void usage_full() {
|
|||||||
fprintf(stderr, "\n");
|
fprintf(stderr, "\n");
|
||||||
fprintf(stderr, " -h Show this help.\n");
|
fprintf(stderr, " -h Show this help.\n");
|
||||||
fprintf(stderr, "\n");
|
fprintf(stderr, "\n");
|
||||||
|
fprintf(stderr, " -mf Organise replays in folders based on the current date.\n");
|
||||||
|
fprintf(stderr, "\n");
|
||||||
//fprintf(stderr, " -pixfmt The pixel format to use for the output video. yuv420 is the most common format and is best supported, but the color is compressed, so colors can look washed out and certain colors of text can look bad. Use yuv444 for no color compression, but the video may not work everywhere and it may not work with hardware video decoding. Optional, defaults to yuv420\n");
|
//fprintf(stderr, " -pixfmt The pixel format to use for the output video. yuv420 is the most common format and is best supported, but the color is compressed, so colors can look washed out and certain colors of text can look bad. Use yuv444 for no color compression, but the video may not work everywhere and it may not work with hardware video decoding. Optional, defaults to yuv420\n");
|
||||||
fprintf(stderr, " -o The output file path. If omitted then the encoded data is sent to stdout. Required in replay mode (when using -r).\n");
|
fprintf(stderr, " -o The output file path. If omitted then the encoded data is sent to stdout. Required in replay mode (when using -r).\n");
|
||||||
fprintf(stderr, " In replay mode this has to be an existing directory instead of a file.\n");
|
fprintf(stderr, " In replay mode this has to be an existing directory instead of a file.\n");
|
||||||
@ -795,6 +798,22 @@ static std::string get_date_str() {
|
|||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static std::string get_date_only_str() {
|
||||||
|
char str[128];
|
||||||
|
time_t now = time(NULL);
|
||||||
|
struct tm *t = localtime(&now);
|
||||||
|
strftime(str, sizeof(str)-1, "%Y-%m-%d", t);
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::string get_time_only_str() {
|
||||||
|
char str[128];
|
||||||
|
time_t now = time(NULL);
|
||||||
|
struct tm *t = localtime(&now);
|
||||||
|
strftime(str, sizeof(str)-1, "%H-%M-%S", t);
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
|
||||||
static AVStream* create_stream(AVFormatContext *av_format_context, AVCodecContext *codec_context) {
|
static AVStream* create_stream(AVFormatContext *av_format_context, AVCodecContext *codec_context) {
|
||||||
AVStream *stream = avformat_new_stream(av_format_context, nullptr);
|
AVStream *stream = avformat_new_stream(av_format_context, nullptr);
|
||||||
if (!stream) {
|
if (!stream) {
|
||||||
@ -830,7 +849,7 @@ static std::future<void> save_replay_thread;
|
|||||||
static std::vector<std::shared_ptr<PacketData>> save_replay_packets;
|
static std::vector<std::shared_ptr<PacketData>> save_replay_packets;
|
||||||
static std::string save_replay_output_filepath;
|
static std::string save_replay_output_filepath;
|
||||||
|
|
||||||
static void save_replay_async(AVCodecContext *video_codec_context, int video_stream_index, std::vector<AudioTrack> &audio_tracks, std::deque<std::shared_ptr<PacketData>> &frame_data_queue, bool frames_erased, std::string output_dir, const char *container_format, const std::string &file_extension, std::mutex &write_output_mutex) {
|
static void save_replay_async(AVCodecContext *video_codec_context, int video_stream_index, std::vector<AudioTrack> &audio_tracks, std::deque<std::shared_ptr<PacketData>> &frame_data_queue, bool frames_erased, std::string output_dir, const char *container_format, const std::string &file_extension, std::mutex &write_output_mutex, bool make_folders) {
|
||||||
if(save_replay_thread.valid())
|
if(save_replay_thread.valid())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
@ -873,7 +892,16 @@ static void save_replay_async(AVCodecContext *video_codec_context, int video_str
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
save_replay_output_filepath = output_dir + "/Replay_" + get_date_str() + "." + file_extension;
|
if (make_folders) {
|
||||||
|
std::string output_folder = output_dir + '/' + get_date_only_str();
|
||||||
|
if (!std::filesystem::exists(output_folder)) {
|
||||||
|
std::filesystem::create_directory(output_folder);
|
||||||
|
}
|
||||||
|
save_replay_output_filepath = output_folder + "/Replay_" + get_time_only_str() + "." + file_extension;
|
||||||
|
} else {
|
||||||
|
save_replay_output_filepath = output_dir + "/Replay_" + get_date_str() + "." + file_extension;
|
||||||
|
}
|
||||||
|
|
||||||
save_replay_thread = std::async(std::launch::async, [video_stream_index, container_format, start_index, video_pts_offset, audio_pts_offset, video_codec_context, &audio_tracks]() mutable {
|
save_replay_thread = std::async(std::launch::async, [video_stream_index, container_format, start_index, video_pts_offset, audio_pts_offset, video_codec_context, &audio_tracks]() mutable {
|
||||||
AVFormatContext *av_format_context;
|
AVFormatContext *av_format_context;
|
||||||
avformat_alloc_output_context2(&av_format_context, nullptr, container_format, nullptr);
|
avformat_alloc_output_context2(&av_format_context, nullptr, container_format, nullptr);
|
||||||
@ -1150,6 +1178,7 @@ int main(int argc, char **argv) {
|
|||||||
{ "-fm", Arg { {}, true, false } },
|
{ "-fm", Arg { {}, true, false } },
|
||||||
{ "-pixfmt", Arg { {}, true, false } },
|
{ "-pixfmt", Arg { {}, true, false } },
|
||||||
{ "-v", Arg { {}, true, false } },
|
{ "-v", Arg { {}, true, false } },
|
||||||
|
{ "-mf", Arg { {}, true, false } },
|
||||||
};
|
};
|
||||||
|
|
||||||
for(int i = 1; i < argc; i += 2) {
|
for(int i = 1; i < argc; i += 2) {
|
||||||
@ -1243,6 +1272,20 @@ int main(int argc, char **argv) {
|
|||||||
usage();
|
usage();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool make_folders = false;
|
||||||
|
const char *make_folders_str = args["-mf"].value();
|
||||||
|
if(!make_folders_str)
|
||||||
|
make_folders_str = "no";
|
||||||
|
|
||||||
|
if(strcmp(make_folders_str, "yes") == 0) {
|
||||||
|
make_folders = true;
|
||||||
|
} else if(strcmp(make_folders_str, "no") == 0) {
|
||||||
|
make_folders = false;
|
||||||
|
} else {
|
||||||
|
fprintf(stderr, "Error: -mf should either be either 'yes' or 'no', got: '%s'\n", make_folders_str);
|
||||||
|
usage();
|
||||||
|
}
|
||||||
|
|
||||||
PixelFormat pixel_format = PixelFormat::YUV420;
|
PixelFormat pixel_format = PixelFormat::YUV420;
|
||||||
const char *pixfmt = args["-pixfmt"].value();
|
const char *pixfmt = args["-pixfmt"].value();
|
||||||
if(!pixfmt)
|
if(!pixfmt)
|
||||||
@ -2128,7 +2171,7 @@ int main(int argc, char **argv) {
|
|||||||
|
|
||||||
if(save_replay == 1 && !save_replay_thread.valid() && replay_buffer_size_secs != -1) {
|
if(save_replay == 1 && !save_replay_thread.valid() && replay_buffer_size_secs != -1) {
|
||||||
save_replay = 0;
|
save_replay = 0;
|
||||||
save_replay_async(video_codec_context, VIDEO_STREAM_INDEX, audio_tracks, frame_data_queue, frames_erased, filename, container_format, file_extension, write_output_mutex);
|
save_replay_async(video_codec_context, VIDEO_STREAM_INDEX, audio_tracks, frame_data_queue, frames_erased, filename, container_format, file_extension, write_output_mutex, make_folders);
|
||||||
}
|
}
|
||||||
|
|
||||||
double frame_end = clock_get_monotonic_seconds();
|
double frame_end = clock_get_monotonic_seconds();
|
||||||
|
Loading…
Reference in New Issue
Block a user