mp4 recording code from @LemonSkin

This commit is contained in:
F. Duncanh
2026-01-22 00:18:31 -05:00
parent 1bd37fd5c8
commit 8d8c0e78c9
5 changed files with 417 additions and 5 deletions

View File

@@ -41,7 +41,8 @@ pkg_check_modules(GST REQUIRED gstreamer-1.0>=1.4
add_library( renderers
STATIC
audio_renderer.c
video_renderer.c )
video_renderer.c
mux_renderer.c )
target_link_libraries ( renderers PUBLIC airplay )

323
renderers/mux_renderer.c Normal file
View File

@@ -0,0 +1,323 @@
/**
* UxPlay - An open-source AirPlay mirroring server
* Copyright (C) 2021-24 F. Duncanh
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include "mux_renderer.h"
#define SECOND_IN_NSECS 1000000000UL
static logger_t *logger = NULL;
static const char *output_filename = NULL;
static int file_count = 0;
static gboolean no_audio = FALSE;
static gboolean no_video = FALSE;
static gboolean audio_is_alac = FALSE;
static gboolean video_is_h265 = FALSE;
typedef struct mux_renderer_s {
GstElement *pipeline;
GstElement *video_appsrc;
GstElement *audio_appsrc;
GstElement *filesink;
GstBus *bus;
GstClockTime base_time;
GstClockTime first_video_time;
GstClockTime first_audio_time;
gboolean audio_started;
gboolean is_alac;
gboolean is_h265;
} mux_renderer_t;
static mux_renderer_t *renderer = NULL;
static const char h264_caps[] = "video/x-h264,stream-format=(string)byte-stream,alignment=(string)au";
static const char h265_caps[] = "video/x-h265,stream-format=(string)byte-stream,alignment=(string)au";
static const char aac_eld_caps[] = "audio/mpeg,mpegversion=(int)4,channels=(int)2,rate=(int)44100,stream-format=raw,codec_data=(buffer)f8e85000";
static const char alac_caps[] = "audio/x-alac,mpegversion=(int)4,channels=(int)2,rate=(int)44100,stream-format=raw,codec_data=(buffer)"
"00000024""616c6163""00000000""00000160""0010280a""0e0200ff""00000000""00000000""0000ac44";
/* called once when uxplay first starts */
void mux_renderer_init(logger_t *render_logger, const char *filename, bool use_audio, bool use_video) {
logger = render_logger;
no_audio = !use_audio;
no_video = !use_video;
if (no_audio && no_video) {
logger_log(logger, LOGGER_INFO, "both audio and video rendering are disabled: nothing to record: (not starting mux renderer)");
return;
} else if (no_audio) {
logger_log(logger, LOGGER_INFO, "audio rendering is disabled: video only will be recorded");
} else if (no_video) {
logger_log(logger, LOGGER_INFO, "video rendering is disabled: audio only will be recorded");
}
output_filename = filename ;
file_count = 0;
logger_log(logger, LOGGER_INFO, "Mux renderer initialized: %s", output_filename);
}
static
void mux_renderer_start(void) {
GError *error = NULL;
GstCaps *video_caps = NULL;
GstCaps *audio_caps = NULL;
if (renderer && renderer->pipeline) {
logger_log(logger, LOGGER_DEBUG, "Mux renderer already running");
return;
}
mux_renderer_destroy();
renderer = g_new0(mux_renderer_t, 1);
renderer->base_time = GST_CLOCK_TIME_NONE;
renderer->first_video_time = GST_CLOCK_TIME_NONE;
renderer->first_audio_time = GST_CLOCK_TIME_NONE;
renderer->audio_started = FALSE;
renderer->pipeline = NULL;
renderer->video_appsrc = NULL;
renderer->audio_appsrc = NULL;
renderer->is_alac = audio_is_alac;
renderer->is_h265 = video_is_h265;
file_count++;
GString *filename = g_string_new("");
g_string_append(filename, g_strdup_printf("%s.%d.", output_filename, file_count));
if (!no_video && !audio_is_alac) {
if (video_is_h265) {
g_string_append(filename,"H265.");
} else {
g_string_append(filename,"H264.");
}
} if (!no_audio) {
if (audio_is_alac) {
g_string_append(filename,"ALAC.");
} else {
g_string_append(filename,"AAC.");
}
}
g_string_append(filename, "mp4");
GString *launch = g_string_new("");
if (!no_video && !audio_is_alac) {
g_string_append(launch, "appsrc name=video_src format=time is-live=true ! queue ! ");
if (video_is_h265) {
g_string_append(launch, "h265parse ! ");
} else {
g_string_append(launch, "h264parse ! ");
}
g_string_append(launch, "mux. ");
}
if (!no_audio) {
g_string_append(launch, "appsrc name=audio_src format=time is-live=true ! queue ! ");
if (!audio_is_alac ) {
g_string_append(launch, "aacparse ! queue ! ");
}
g_string_append(launch, "mux. ");
}
g_string_append(launch, "mp4mux name=mux ! filesink name=filesink location=");
g_string_append(launch, filename->str);
logger_log(logger, LOGGER_DEBUG, "created Mux pipeline: %s", launch->str);
renderer->pipeline = gst_parse_launch(launch->str, &error);
g_string_free(launch, TRUE);
if (error) {
logger_log(logger, LOGGER_ERR, "Mux pipeline error: %s", error->message);
g_clear_error(&error);
g_free(filename);
return;
}
if (!no_video && !audio_is_alac) {
renderer->video_appsrc = gst_bin_get_by_name(GST_BIN(renderer->pipeline), "video_src");
if (renderer->is_h265) {
video_caps = gst_caps_from_string(h265_caps);
} else {
video_caps = gst_caps_from_string(h264_caps);
}
g_object_set(renderer->video_appsrc, "caps", video_caps, NULL);
gst_caps_unref(video_caps);
}
if (!no_audio) {
renderer->audio_appsrc = gst_bin_get_by_name(GST_BIN(renderer->pipeline), "audio_src");
if (audio_is_alac) {
audio_caps = gst_caps_from_string(alac_caps);
} else {
audio_caps = gst_caps_from_string(aac_eld_caps);
}
g_object_set(renderer->audio_appsrc, "caps", audio_caps, NULL);
gst_caps_unref(audio_caps);
}
renderer->filesink = gst_bin_get_by_name(GST_BIN(renderer->pipeline), "filesink");
renderer->bus = gst_element_get_bus(renderer->pipeline);
gst_element_set_state(renderer->pipeline, GST_STATE_PLAYING);
logger_log(logger, LOGGER_INFO, "Started recording to: %s", filename->str);
g_string_free(filename, TRUE);
}
/* called by audio_get_format callback in uxplay.cpp, from raop_handlers.h */
void mux_renderer_choose_audio_codec(unsigned char audio_ct) {
if (no_audio) {
return;
}
audio_is_alac = (audio_ct == 2);
if (renderer && renderer->is_alac != audio_is_alac) {
logger_log(logger, LOGGER_DEBUG, "Audio codec changed, recreating mux renderer");
mux_renderer_destroy();
}
if (audio_ct == 2) {
mux_renderer_start();
}
}
/* called by video_set_codec calback in uxplay.cpp, from raop_rtp_mirror */
void mux_renderer_choose_video_codec(bool is_h265) {
video_is_h265 = is_h265;
if (renderer && renderer->pipeline && renderer->is_h265 != video_is_h265) {
logger_log(logger, LOGGER_DEBUG, "Video codec changed, recreating mux renderer");
mux_renderer_destroy();
}
logger_log(logger, LOGGER_DEBUG, "Mux renderer video codec: h265=%s", is_h265 ? "true" : "false");
mux_renderer_start();
}
/* called by video_process callback in uxplay.cpp*/
void mux_renderer_push_video(unsigned char *data, int data_len, uint64_t ntp_time) {
if (no_video) {
return;
}
if (!renderer || !renderer->pipeline || !renderer->video_appsrc) return;
GstBuffer *buffer = gst_buffer_new_allocate(NULL, data_len, NULL);
if (!buffer) {
return;
}
gst_buffer_fill(buffer, 0, data, data_len);
if (renderer->base_time == GST_CLOCK_TIME_NONE) {
renderer->base_time = (GstClockTime)ntp_time;
renderer->first_video_time = (GstClockTime)ntp_time;
}
GstClockTime pts = (GstClockTime)ntp_time - renderer->base_time;
GST_BUFFER_PTS(buffer) = pts;
GST_BUFFER_DTS(buffer) = pts;
gst_app_src_push_buffer(GST_APP_SRC(renderer->video_appsrc), buffer);
}
/* called by audio_process callback in uxplay.cpp*/
void mux_renderer_push_audio(unsigned char *data, int data_len, uint64_t ntp_time) {
if (no_audio) {
return;
}
if (!renderer || !renderer->pipeline || !renderer->audio_appsrc) return;
if (!renderer->audio_started && renderer->first_video_time != GST_CLOCK_TIME_NONE) {
renderer->audio_started = TRUE;
renderer->first_audio_time = (GstClockTime)ntp_time;
if (renderer->first_audio_time > renderer->first_video_time) {
GstClockTime silence_duration = renderer->first_audio_time - renderer->first_video_time;
guint64 num_samples = (silence_duration * 44100) / GST_SECOND;
gsize silence_size = num_samples * 2 * 2;
GstBuffer *silence_buffer = gst_buffer_new_allocate(NULL, silence_size, NULL);
if (silence_buffer) {
GstMapInfo map;
if (gst_buffer_map(silence_buffer, &map, GST_MAP_WRITE)) {
memset(map.data, 0, map.size);
gst_buffer_unmap(silence_buffer, &map);
}
GST_BUFFER_PTS(silence_buffer) = 0;
GST_BUFFER_DTS(silence_buffer) = 0;
GST_BUFFER_DURATION(silence_buffer) = silence_duration;
gst_app_src_push_buffer(GST_APP_SRC(renderer->audio_appsrc), silence_buffer);
logger_log(logger, LOGGER_DEBUG, "Inserted %.2f seconds of silence before audio",
(double)silence_duration / GST_SECOND);
}
}
}
GstBuffer *buffer = gst_buffer_new_allocate(NULL, data_len, NULL);
if (!buffer) return;
gst_buffer_fill(buffer, 0, data, data_len);
if (renderer->base_time == GST_CLOCK_TIME_NONE) {
renderer->base_time = (GstClockTime)ntp_time;
}
GstClockTime pts = (GstClockTime)ntp_time - renderer->base_time;
GST_BUFFER_PTS(buffer) = pts;
GST_BUFFER_DTS(buffer) = pts;
gst_app_src_push_buffer(GST_APP_SRC(renderer->audio_appsrc), buffer);
}
/* called by conn_destroy callback in uxplay.cpp, and when video resets */
void mux_renderer_stop(void) {
if (!renderer || !renderer->pipeline) return;
if (renderer->video_appsrc) {
gst_app_src_end_of_stream(GST_APP_SRC(renderer->video_appsrc));
}
if (renderer->audio_appsrc) {
gst_app_src_end_of_stream(GST_APP_SRC(renderer->audio_appsrc));
}
GstMessage *msg = gst_bus_timed_pop_filtered(renderer->bus, 5 * GST_SECOND,
GST_MESSAGE_EOS | GST_MESSAGE_ERROR);
if (msg) {
gst_message_unref(msg);
}
gst_element_set_state(renderer->pipeline, GST_STATE_NULL);
if (renderer->video_appsrc) {
gst_object_unref(renderer->video_appsrc);
renderer->video_appsrc = NULL;
}
if (renderer->audio_appsrc) {
gst_object_unref(renderer->audio_appsrc);
renderer->audio_appsrc = NULL;
}
gst_object_unref(renderer->filesink);
renderer->filesink = NULL;
gst_object_unref(renderer->bus);
renderer->bus = NULL;
gst_object_unref(renderer->pipeline);
renderer->pipeline = NULL;
renderer->base_time = GST_CLOCK_TIME_NONE;
logger_log(logger, LOGGER_INFO, "Stopped recording");
audio_is_alac = FALSE;
video_is_h265 = FALSE;
}
void mux_renderer_destroy(void) {
mux_renderer_stop();
if (renderer) {
g_free(renderer);
renderer = NULL;
}
}

44
renderers/mux_renderer.h Normal file
View File

@@ -0,0 +1,44 @@
/**
* UxPlay - An open-source AirPlay mirroring server
* Copyright (C) 2021-24 F. Duncanh
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef MUX_RENDERER_H
#define MUX_RENDERER_H
#ifdef __cplusplus
extern "C" {
#endif
#include <stdlib.h>
#include <stdint.h>
#include <stdbool.h>
#include "../lib/logger.h"
void mux_renderer_init(logger_t *logger, const char *filename, bool use_audio, bool use_video);
void mux_renderer_choose_audio_codec(unsigned char audio_ct);
void mux_renderer_choose_video_codec(bool is_h265);
void mux_renderer_push_video(unsigned char *data, int data_len, uint64_t ntp_time);
void mux_renderer_push_audio(unsigned char *data, int data_len, uint64_t ntp_time);
void mux_renderer_stop(void);
void mux_renderer_destroy(void);
#ifdef __cplusplus
}
#endif
#endif //MUX_RENDERER_H

View File

@@ -15,6 +15,10 @@ UxPlay 1.73: An open\-source AirPlay mirroring (+ audio streaming) server:
.TP
\fB\-h265\fR Support h265 (4K) video (with h265 versions of h264 plugins)
.TP
\fB\-mp4\fI [fn]\fR Record (non-HLS)audio/video to mp4 file "fn.[n].[format].mp4"
.IP
n=1,2,.. format = H264/5, ALAC/AAC. Default fn="recording"
.TP
\fB\-hls\fR [v] Support HTTP Live Streaming (HLS), YouTube app video only:
.IP
v = 2 or 3 (default 3) optionally selects video player version

View File

@@ -67,6 +67,7 @@
#include "lib/crypto.h"
#include "renderers/video_renderer.h"
#include "renderers/audio_renderer.h"
#include "renderers/mux_renderer.h"
#ifdef DBUS
#include <dbus/dbus.h>
#endif
@@ -197,6 +198,8 @@ static std::string ble_filename = "";
static std::string rtp_pipeline = "";
static std::string audio_rtp_pipeline = "";
static GMainLoop *gmainloop = NULL;
static bool mux_to_file = false;
static std::string mux_filename = "recording";
//Support for D-Bus-based screensaver inhibition (org.freedesktop.ScreenSaver)
static unsigned int scrsv;
@@ -906,6 +909,8 @@ static void print_info (char *name) {
printf("-n name Specify network name of the AirPlay server (UTF-8/ascii)\n");
printf("-nh Do not add \"@hostname\" at the end of AirPlay server name\n");
printf("-h265 Support h265 (4K) video (with h265 versions of h264 plugins)\n");
printf("-mp4 [fn] Record (non-HLS)audio/video to mp4 file \"fn.[n].[format].mp4\"\n");
printf(" n=1,2,.. format = H264/5, ALAC/AAC. Default fn=\"recording\"\n");
printf("-hls [v] Support HTTP Live Streaming (HLS), Youtube app video only: \n");
printf(" v = 2 or 3 (default 3) optionally selects video player version\n");
printf("-lang xx HLS language preferences (\"fr:es:..\", overrides $LANGUAGE)\n");
@@ -951,6 +956,7 @@ static void print_info (char *name) {
printf("-vrtp pl Use rtph26[4,5]pay to send decoded video elsewhere: \"pl\"\n");
printf(" is the remaining pipeline, starting with rtph26*pay options:\n");
printf(" e.g. \"config-interval=1 ! udpsink host=127.0.0.1 port=5000\"\n");
printf(" Writes output to \"fn.N.mp4\"\n");
printf("-v4l2 Use Video4Linux2 for GPU hardware h264 decoding\n");
printf("-bt709 Sometimes needed for Raspberry Pi models using Video4Linux2 \n");
printf("-srgb Display \"Full range\" [0-255] color, not \"Limited Range\"[16-235]\n");
@@ -985,6 +991,9 @@ static void print_info (char *name) {
printf("-key [fn] Store private key in $HOME/.uxplay.pem (or in file \"fn\")\n");
printf("-dacp [fn]Export client DACP information to file $HOME/.uxplay.dacp\n");
printf(" (option to use file \"fn\" instead); used for client remote\n");
printf("-ble [fn] For BluetoothLE beacon: write data to file ~/.uxplay.ble\n");
printf(" optional: write to file \"fn\" (\"fn\" = \"off\" to cancel)\n");
printf("-d [n] Enable debug logging; optional: n=1 to skip normal packet data\n");
printf("-vdmp [n] Dump h264 video output to \"fn.h264\"; fn=\"videodump\",change\n");
printf(" with \"-vdmp [n] filename\". If [n] is given, file fn.x.h264\n");
printf(" x=1,2,.. opens whenever a new SPS/PPS NAL arrives, and <=n\n");
@@ -993,9 +1002,6 @@ static void print_info (char *name) {
printf(" =1,2,..; fn=\"audiodump\"; change with \"-admp [n] filename\".\n");
printf(" x increases when audio format changes. If n is given, <= n\n");
printf(" audio packets are dumped. \"aud\"= unknown format.\n");
printf("-ble [fn] For BluetoothLE beacon: write data to file ~/.uxplay.ble\n");
printf(" optional: write to file \"fn\" (\"fn\" = \"off\" to cancel)\n");
printf("-d [n] Enable debug logging; optional: n=1 to skip normal packet data\n");
printf("-v Displays version information\n");
printf("-h Displays this help\n");
printf("-rc fn Read startup options from file \"fn\" instead of ~/.uxplayrc, etc\n");
@@ -1480,6 +1486,17 @@ static void parse_arguments (int argc, char *argv[]) {
exit(1);
}
}
} else if (arg == "-mp4"){
mux_to_file = true;
if (i < argc - 1 && *argv[i+1] != '-') {
mux_filename.erase();
mux_filename.append(argv[++i]);
const char *fn = mux_filename.c_str();
if (!file_has_write_access(fn)) {
fprintf(stderr, "%s cannot be written to:\noption \"-mp4 <fn>\" must be to a file with write access\n", fn);
exit(1);
}
}
} else if (arg == "-admp") {
dump_audio = true;
if (i < argc - 1 && *argv[i+1] != '-') {
@@ -2117,6 +2134,9 @@ extern "C" void video_reset(void *cls, reset_type_t type) {
extern "C" int video_set_codec(void *cls, video_codec_t codec) {
bool video_is_h265 = (codec == VIDEO_CODEC_H265);
if (mux_to_file) {
mux_renderer_choose_video_codec(video_is_h265);
}
if (!use_video) {
return 0;
}
@@ -2177,6 +2197,9 @@ extern "C" void conn_destroy (void *cls) {
if (dacpfile.length()) {
remove (dacpfile.c_str());
}
if (mux_to_file) {
mux_renderer_stop();
}
}
}
@@ -2233,6 +2256,9 @@ extern "C" void audio_process (void *cls, raop_ntp_t *ntp, audio_decode_struct *
if (dump_audio) {
dump_audio_to_file(data->data, data->data_len, (data->data)[0] & 0xf0);
}
if (mux_to_file) {
mux_renderer_push_audio(data->data, data->data_len, data->ntp_time_remote);
}
if (use_audio) {
if (!remote_clock_offset) {
uint64_t local_time = (data->ntp_time_local ? data->ntp_time_local : get_local_time());
@@ -2265,6 +2291,9 @@ extern "C" void video_process (void *cls, raop_ntp_t *ntp, video_decode_struct *
if (dump_video) {
dump_video_to_file(data->data, data->data_len);
}
if (mux_to_file) {
mux_renderer_push_video(data->data, data->data_len, data->ntp_time_remote);
}
if (use_video) {
if (!remote_clock_offset) {
uint64_t local_time = (data->ntp_time_local ? data->ntp_time_local : get_local_time());
@@ -2403,6 +2432,10 @@ extern "C" void audio_get_format (void *cls, unsigned char *ct, unsigned short *
audio_renderer_start(ct);
}
if (mux_to_file) {
mux_renderer_choose_audio_codec(*ct);
}
if (coverart_filename.length()) {
write_coverart(coverart_filename.c_str(), (const void *) empty_image, sizeof(empty_image));
}
@@ -3069,6 +3102,10 @@ int main (int argc, char *argv[]) {
#endif
}
if (mux_to_file) {
mux_renderer_init(render_logger, mux_filename.c_str(), use_audio, use_video);
}
if (udp[0]) {
LOGI("using network ports UDP %d %d %d TCP %d %d %d", udp[0], udp[1], udp[2], tcp[0], tcp[1], tcp[2]);
}
@@ -3169,6 +3206,9 @@ int main (int argc, char *argv[]) {
raop_start_httpd(raop, &port);
raop_set_port(raop, port);
}
if (mux_to_file) {
mux_renderer_stop();
}
goto reconnect;
} else {
LOGI("Stopping RAOP Server...");