Add -artp option for audio RTP output

Add a new -artp command-line option that routes decoded audio to an RTP
stream instead of the local audio sink, following the existing -vrtp
pattern for video.

Usage: uxplay -artp "pt=96 ! udpsink host=127.0.0.1 port=5002"

The implementation:
- Decodes audio (AAC-ELD/ALAC) to PCM
- Converts to S16BE format required by rtpL16pay
- Preserves volume control for iOS volume adjustment
- Sends L16 RTP packets (16-bit signed big-endian, 44100Hz, stereo)
This commit is contained in:
Jacob Pritchett
2026-01-18 00:08:14 -07:00
parent 0133170ff7
commit 6039f53976
5 changed files with 67 additions and 25 deletions

View File

@@ -40,6 +40,7 @@ static gboolean render_audio = FALSE;
static gboolean async = FALSE;
static gboolean vsync = FALSE;
static gboolean sync = FALSE;
static gboolean audio_rtp = FALSE;
typedef struct audio_renderer_s {
GstElement *appsrc;
@@ -124,12 +125,17 @@ bool gstreamer_init(){
return (bool) check_plugins ();
}
void audio_renderer_init(logger_t *render_logger, const char* audiosink, const bool* audio_sync, const bool* video_sync) {
void audio_renderer_init(logger_t *render_logger, const char* audiosink, const bool* audio_sync, const bool* video_sync, const char *artp_pipeline) {
GError *error = NULL;
GstCaps *caps = NULL;
GstClock *clock = gst_system_clock_obtain();
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_REALTIME, NULL);
audio_rtp = (bool) strlen(artp_pipeline);
if (audio_rtp) {
g_print("*** Audio RTP mode enabled: sending to %s\n", artp_pipeline);
}
logger = render_logger;
aac = check_plugin_feature (avdec_aac);
@@ -155,27 +161,38 @@ void audio_renderer_init(logger_t *render_logger, const char* audiosink, const b
}
g_string_append (launch, "audioconvert ! ");
g_string_append (launch, "audioresample ! "); /* wasapisink must resample from 44.1 kHz to 48 kHz */
g_string_append (launch, "volume name=volume ! level ! ");
g_string_append (launch, audiosink);
switch(i) {
case 1: /*ALAC*/
if (*audio_sync) {
g_string_append (launch, " sync=true");
async = TRUE;
} else {
g_string_append (launch, " sync=false");
async = FALSE;
g_string_append (launch, "volume name=volume ! ");
if (!audio_rtp) {
/* Normal path: local audio output */
g_string_append (launch, "level ! ");
g_string_append (launch, audiosink);
switch(i) {
case 1: /*ALAC*/
if (*audio_sync) {
g_string_append (launch, " sync=true");
async = TRUE;
} else {
g_string_append (launch, " sync=false");
async = FALSE;
}
break;
default:
if (*video_sync) {
g_string_append (launch, " sync=true");
vsync = TRUE;
} else {
g_string_append (launch, " sync=false");
vsync = FALSE;
}
break;
}
break;
default:
if (*video_sync) {
g_string_append (launch, " sync=true");
vsync = TRUE;
} else {
g_string_append (launch, " sync=false");
vsync = FALSE;
}
break;
} else {
/* RTP path: send decoded PCM over RTP */
/* rtpL16pay requires S16BE (big-endian) format */
g_string_append (launch, "audioconvert ! audio/x-raw,format=S16BE,rate=44100,channels=2 ! ");
g_string_append (launch, "rtpL16pay ");
g_string_append (launch, artp_pipeline);
}
renderer_type[i]->pipeline = gst_parse_launch(launch->str, &error);
if (error) {

View File

@@ -33,7 +33,7 @@ extern "C" {
#include "../lib/logger.h"
bool gstreamer_init();
void audio_renderer_init(logger_t *logger, const char* audiosink, const bool *audio_sync, const bool *video_sync);
void audio_renderer_init(logger_t *logger, const char* audiosink, const bool *audio_sync, const bool *video_sync, const char *artp_pipeline);
void audio_renderer_start(unsigned char* compression_type);
void audio_renderer_stop();
void audio_renderer_render_buffer(unsigned char* data, int *data_len, unsigned short *seqnum, uint64_t *ntp_time);