Merge pull request #492 from JacobSyndeo/feature/audio-rtp-output

Add -artp option for audio RTP output
This commit is contained in:
fduncanh
2026-01-18 16:42:06 -05:00
committed by GitHub
5 changed files with 67 additions and 25 deletions

View File

@@ -20,7 +20,9 @@
- option `-vrtp <rest-of-pipeline>` bypasses rendering by UxPlay, and instead
transmits rtp packets of decrypted h264 or h265 video to
an external renderer (e.g. OBS Studio) at an address specified in `rest-of-pipeline`.
(Note: this is video only, an option "-rtp" which muxes audio and video into a mpeg4 container still needs to be created:
Similarly, `-artp <rest-of-pipeline>` forwards decoded audio as L16 RTP packets.
Both options can be used together to forward video and audio (as separate concurrent streams) to external applications.
(Note: an option "-rtp" which muxes audio and video into a mpeg4 container still needs to be created:
Pull Requests welcomed).
- (for Linux/*BSD Desktop Environments using D-Bus). New option `-scrsv <n>` provides screensaver inhibition (e.g., to
@@ -1313,6 +1315,11 @@ Uses rtph264pay or rtph265pay as appropriate: *pipeline* should start with any
rtph26xpay options (such as config_interval= or aggregate-mode =), followed by
a sending method: *e.g.*, `"config-interval=1 ! udpsink host=127.0.0.1 port=5000`".
**-artp *pipeline***: forward decoded audio as L16 RTP packets to somewhere else, without local playback.
Uses rtpL16pay (16-bit signed big-endian PCM, 44100Hz stereo): *pipeline* should start with any
rtpL16pay options (such as pt=), followed by a sending method:
*e.g.*, `"pt=96 ! udpsink host=127.0.0.1 port=5002"`. iOS volume control still works over RTP.
**-v4l2** Video settings for hardware h264 video decoding in the GPU by
Video4Linux2. Equivalent to `-vd v4l2h264dec -vc v4l2convert`.

View File

@@ -40,6 +40,7 @@ static gboolean render_audio = FALSE;
static gboolean async = FALSE;
static gboolean vsync = FALSE;
static gboolean sync = FALSE;
static gboolean audio_rtp = FALSE;
typedef struct audio_renderer_s {
GstElement *appsrc;
@@ -124,12 +125,17 @@ bool gstreamer_init(){
return (bool) check_plugins ();
}
void audio_renderer_init(logger_t *render_logger, const char* audiosink, const bool* audio_sync, const bool* video_sync) {
void audio_renderer_init(logger_t *render_logger, const char* audiosink, const bool* audio_sync, const bool* video_sync, const char *artp_pipeline) {
GError *error = NULL;
GstCaps *caps = NULL;
GstClock *clock = gst_system_clock_obtain();
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_REALTIME, NULL);
audio_rtp = (bool) strlen(artp_pipeline);
if (audio_rtp) {
g_print("*** Audio RTP mode enabled: sending to %s\n", artp_pipeline);
}
logger = render_logger;
aac = check_plugin_feature (avdec_aac);
@@ -155,27 +161,38 @@ void audio_renderer_init(logger_t *render_logger, const char* audiosink, const b
}
g_string_append (launch, "audioconvert ! ");
g_string_append (launch, "audioresample ! "); /* wasapisink must resample from 44.1 kHz to 48 kHz */
g_string_append (launch, "volume name=volume ! level ! ");
g_string_append (launch, audiosink);
switch(i) {
case 1: /*ALAC*/
if (*audio_sync) {
g_string_append (launch, " sync=true");
async = TRUE;
} else {
g_string_append (launch, " sync=false");
async = FALSE;
g_string_append (launch, "volume name=volume ! ");
if (!audio_rtp) {
/* Normal path: local audio output */
g_string_append (launch, "level ! ");
g_string_append (launch, audiosink);
switch(i) {
case 1: /*ALAC*/
if (*audio_sync) {
g_string_append (launch, " sync=true");
async = TRUE;
} else {
g_string_append (launch, " sync=false");
async = FALSE;
}
break;
default:
if (*video_sync) {
g_string_append (launch, " sync=true");
vsync = TRUE;
} else {
g_string_append (launch, " sync=false");
vsync = FALSE;
}
break;
}
break;
default:
if (*video_sync) {
g_string_append (launch, " sync=true");
vsync = TRUE;
} else {
g_string_append (launch, " sync=false");
vsync = FALSE;
}
break;
} else {
/* RTP path: send decoded PCM over RTP */
/* rtpL16pay requires S16BE (big-endian) format */
g_string_append (launch, "audioconvert ! audio/x-raw,format=S16BE,rate=44100,channels=2 ! ");
g_string_append (launch, "rtpL16pay ");
g_string_append (launch, artp_pipeline);
}
renderer_type[i]->pipeline = gst_parse_launch(launch->str, &error);
if (error) {

View File

@@ -33,7 +33,7 @@ extern "C" {
#include "../lib/logger.h"
bool gstreamer_init();
void audio_renderer_init(logger_t *logger, const char* audiosink, const bool *audio_sync, const bool *video_sync);
void audio_renderer_init(logger_t *logger, const char* audiosink, const bool *audio_sync, const bool *video_sync, const char *artp_pipeline);
void audio_renderer_start(unsigned char* compression_type);
void audio_renderer_stop();
void audio_renderer_render_buffer(unsigned char* data, int *data_len, unsigned short *seqnum, uint64_t *ntp_time);

View File

@@ -107,6 +107,12 @@ UxPlay 1.73: An open\-source AirPlay mirroring (+ audio streaming) server:
is the remaining pipeline, starting with rtph26*pay options:
.IP
e.g. "config-interval=1 ! udpsink host=127.0.0.1 port=5000"
.TP
\fB\-artp\fI pl\fR Use rtpL16pay to send decoded audio elsewhere: "pl"
.IP
is the remaining pipeline, starting with rtpL16pay options:
.IP
e.g. "pt=96 ! udpsink host=127.0.0.1 port=5002"
.PP
.TP
\fB\-v4l2\fR Use Video4Linux2 for GPU hardware h264 video decoding.

View File

@@ -195,6 +195,7 @@ static std::string artist;
static std::string coverart_artist;
static std::string ble_filename = "";
static std::string rtp_pipeline = "";
static std::string audio_rtp_pipeline = "";
static GMainLoop *gmainloop = NULL;
//Support for D-Bus-based screensaver inhibition (org.freedesktop.ScreenSaver)
@@ -959,6 +960,9 @@ static void print_info (char *name) {
printf(" some choices:pulsesink,alsasink,pipewiresink,jackaudiosink,\n");
printf(" osssink,oss4sink,osxaudiosink,wasapisink,directsoundsink.\n");
printf("-as 0 (or -a) Turn audio off, streamed video only\n");
printf("-artp pl Use rtpL16pay to send decoded audio elsewhere: \"pl\"\n");
printf(" is the remaining pipeline, starting with rtpL16pay options:\n");
printf(" e.g. \"pt=96 ! udpsink host=127.0.0.1 port=5002\"\n");
printf("-al x Audio latency in seconds (default 0.25) reported to client.\n");
printf("-ca [<fn>]In Audio (ALAC) mode, render cover-art [or write to file <fn>]\n");
printf("-md <fn> In Airplay Audio (ALAC) mode, write metadata text to file <fn>\n");
@@ -1444,7 +1448,15 @@ static void parse_arguments (int argc, char *argv[]) {
}
rtp_pipeline.erase();
rtp_pipeline.append(argv[++i]);
} else if (arg == "-vdmp") {
} else if (arg == "-artp") {
if (!option_has_value(i, argc, arg, argv[i+1])) {
fprintf(stderr,"option \"-artp\" must be followed by a pipeline for sending the audio stream:\n"
"e.g., \"<rtpL16pay options> ! udpsink host=127.0.0.1 port=5002\"\n");
exit(1);
}
audio_rtp_pipeline.erase();
audio_rtp_pipeline.append(argv[++i]);
} else if (arg == "-vdmp") {
dump_video = true;
if (i < argc - 1 && *argv[i+1] != '-') {
unsigned int n = 0;
@@ -3036,7 +3048,7 @@ int main (int argc, char *argv[]) {
logger_set_level(render_logger, log_level);
if (use_audio) {
audio_renderer_init(render_logger, audiosink.c_str(), &audio_sync, &video_sync);
audio_renderer_init(render_logger, audiosink.c_str(), &audio_sync, &video_sync, audio_rtp_pipeline.c_str());
} else {
LOGI("audio_disabled");
}