UxPlay 1.41 separate RAOP and GStreamer startup.

This commit is contained in:
fduncanh
2021-11-11 06:27:15 -05:00
parent d48cc2b271
commit 161d14511a
10 changed files with 342 additions and 266 deletions

View File

@@ -25,19 +25,17 @@ extern "C" {
#endif
#include <stdint.h>
#include <stdbool.h>
#include "../lib/logger.h"
#include "../lib/raop_ntp.h"
#include "video_renderer.h"
typedef struct audio_renderer_s audio_renderer_t;
audio_renderer_t *audio_renderer_init(logger_t *logger, unsigned char *compression_type, const char* audiosink);
void audio_renderer_start(audio_renderer_t *renderer);
void audio_renderer_render_buffer(audio_renderer_t *renderer, raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t pts);
void audio_renderer_set_volume(audio_renderer_t *renderer, float volume);
void audio_renderer_flush(audio_renderer_t *renderer);
void audio_renderer_destroy(audio_renderer_t *renderer);
void audio_renderer_init(const char* audiosink);
void audio_renderer_start(unsigned char* compression_type);
void audio_renderer_stop();
void audio_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t pts);
void audio_renderer_set_volume(float volume);
void audio_renderer_flush();
void audio_renderer_destroy();
#ifdef __cplusplus
}

View File

@@ -17,12 +17,14 @@
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "audio_renderer.h"
#include <assert.h>
#include <math.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include "audio_renderer.h"
/* GStreamer Caps strings for Airplay-defined connection types (ct) */
/* GStreamer Caps strings for Airplay-defined audio compression types (ct) */
/* ct = 1; linear PCM (uncompressed): 44100/16/2, S16LE */
static const char lpcm[]="audio/x-raw,rate=(int)44100,channels=(int)2,format=S16LE,layout=interleaved";
@@ -37,141 +39,175 @@ static const char aac_lc[] ="audio/mpeg,mpegversion=(int)4,channnels=(int)2,rate
/* ct = 8; codec_data from MPEG v4 ISO 14996-3 Section 1.6.2.1: AAC_ELD 44100/2 spf = 480 */
static const char aac_eld[] ="audio/mpeg,mpegversion=(int)4,channnels=(int)2,rate=(int)44100,stream-format=raw,codec_data=(buffer)f8e85000";
struct audio_renderer_s {
logger_t *logger;
typedef struct audio_renderer_s {
GstElement *appsrc;
GstElement *pipeline;
GstElement *volume;
};
unsigned char ct;
} audio_renderer_t ;
static gboolean check_plugins (void)
{
int i;
gboolean ret;
GstRegistry *registry;
const gchar *needed[] = {"app", "libav", "playback", "autodetect", NULL};
int i;
gboolean ret;
GstRegistry *registry;
const gchar *needed[] = { "app", "libav", "playback", "autodetect", NULL};
registry = gst_registry_get ();
ret = TRUE;
for (i = 0; i < g_strv_length ((gchar **) needed); i++) {
GstPlugin *plugin;
plugin = gst_registry_find_plugin (registry, needed[i]);
if (!plugin) {
g_print ("Required gstreamer plugin '%s' not found\n", needed[i]);
ret = FALSE;
continue;
registry = gst_registry_get ();
ret = TRUE;
for (i = 0; i < g_strv_length ((gchar **) needed); i++) {
GstPlugin *plugin;
plugin = gst_registry_find_plugin (registry, needed[i]);
if (!plugin) {
g_print ("Required gstreamer plugin '%s' not found\n", needed[i]);
ret = FALSE;
continue;
}
gst_object_unref (plugin);
plugin = NULL;
}
gst_object_unref (plugin);
}
return ret;
return ret;
}
audio_renderer_t *audio_renderer_init(logger_t *logger, unsigned char *compression_type, const char* audiosink) {
audio_renderer_t *renderer;
#define NFORMATS 4
static audio_renderer_t *renderer_type[NFORMATS];
static audio_renderer_t *renderer = NULL;
const char * format[NFORMATS];
void audio_renderer_init(const char* audiosink) {
GError *error = NULL;
GstCaps *caps = NULL;
switch (*compression_type) {
case 1: /* uncompressed PCM */
case 2: /* Apple lossless ALAC */
case 4: /* AAC-LC */
case 8: /* AAC-ELD */
logger_log(logger, LOGGER_INFO , "audio_renderer_init: compression_type ct = %d", *compression_type);
break;
default:
logger_log(logger, LOGGER_ERR, "audio_renderer_init: unsupported compression_type ct = %d", *compression_type);
return NULL;
}
gst_init(NULL,NULL);
assert(check_plugins ());
for (int i = 0; i < NFORMATS ; i++) {
renderer_type[i] = (audio_renderer_t *) calloc(1,sizeof(audio_renderer_t));
GString *launch = g_string_new("appsrc name=audio_source stream-type=0 format=GST_FORMAT_TIME is-live=true ! queue ! ");
switch (i) {
case 0: /* AAC-ELD */
case 2: /* AAC-LC */
g_string_append(launch, "avdec_aac ! ");
break;
case 1: /* ALAC */
g_string_append(launch, "avdec_alac ! ");
break;
case 3: /*PCM*/
break;
}
g_string_append (launch, "audioconvert ! volume name=volume ! level ! ");
g_string_append (launch, audiosink);
g_string_append (launch, " sync=false");
renderer_type[i]->pipeline = gst_parse_launch(launch->str, &error);
if (error) {
g_error ("get_parse_launch error:\n %s\n",error->message);
g_clear_error (&error);
}
g_assert (renderer_type[i]->pipeline);
g_string_free(launch, TRUE);
renderer_type[i]->appsrc = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "audio_source");
renderer_type[i]->volume = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "volume");
switch (i) {
case 0:
caps = gst_caps_from_string(aac_eld);
renderer_type[i]->ct = 8;
format[i] = "AAC-ELD 44100/2";
break;
case 1:
caps = gst_caps_from_string(alac);
renderer_type[i]->ct = 2;
format[i] = "ALAC 44100/16/2";
break;
case 2:
caps = gst_caps_from_string(aac_lc);
renderer_type[i]->ct = 4;
format[i] = "AAC-LC 44100/2";
break;
case 3:
caps = gst_caps_from_string(lpcm);
renderer_type[i]->ct = 1;
format[i] = "PCM 44100/16/2 S16LE";
break;
}
g_message ("supported audio format %d: %s",i+1,format[i]);
g_object_set(renderer_type[i]->appsrc, "caps", caps, NULL);
gst_caps_unref(caps);
}
}
void audio_renderer_stop() {
if (renderer) {
gst_app_src_end_of_stream(GST_APP_SRC(renderer->appsrc));
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
renderer = NULL;
}
}
void audio_renderer_start(unsigned char *ct) {
unsigned char compression_type = 0, id;
for (int i = 0; i < NFORMATS; i++) {
if(renderer_type[i]->ct == *ct) {
compression_type = *ct;
id = i;
break;
}
}
if (compression_type && renderer) {
if(compression_type != renderer->ct) {
gst_app_src_end_of_stream(GST_APP_SRC(renderer->appsrc));
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
g_message ("changed audio connection, format %s\n", format[id]);
renderer = renderer_type[id];
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
}
} else if (compression_type) {
g_message ("start audio connection, format %s", format[id]);
renderer = renderer_type[id];
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
} else {
g_error( "unknown audio compression type ct = %d\n", *ct);
}
renderer = calloc(1, sizeof(audio_renderer_t));
if (!renderer) {
return NULL;
}
renderer->logger = logger;
assert(check_plugins ());
GString *launch = g_string_new("appsrc name=audio_source stream-type=0 format=GST_FORMAT_TIME is-live=true ! queue ! ");
if (*compression_type == 8 || *compression_type == 4) {
g_string_append(launch, "avdec_aac ! ");
} else if (*compression_type == 2) {
g_string_append(launch, "avdec_alac ! ");
}
g_string_append(launch, "audioconvert ! volume name=volume ! level ! ");
g_string_append(launch, audiosink);
g_string_append(launch, " sync=false");
renderer->pipeline = gst_parse_launch(launch->str, &error);
g_assert (renderer->pipeline);
g_string_free(launch, TRUE);
renderer->appsrc = gst_bin_get_by_name (GST_BIN (renderer->pipeline), "audio_source");
renderer->volume = gst_bin_get_by_name (GST_BIN (renderer->pipeline), "volume");
if (*compression_type == 8) {
logger_log(logger, LOGGER_INFO, "AAC-ELD 44100/2");
caps = gst_caps_from_string(aac_eld);
} else if (*compression_type == 2) {
logger_log(logger, LOGGER_INFO, "ALAC 44100/16/2");
caps = gst_caps_from_string(alac);;
} else if (*compression_type == 4) {
logger_log(logger, LOGGER_INFO, "AAC-LC 44100/2");
caps = gst_caps_from_string(aac_lc);
logger_log(logger, LOGGER_INFO, "uncompressed PCM 44100/16/2");
} else if (*compression_type == 1) {
caps = gst_caps_from_string(lpcm);
}
g_object_set(renderer->appsrc, "caps", caps, NULL);
gst_caps_unref(caps);
return renderer;
}
void audio_renderer_start(audio_renderer_t *renderer) {
//g_signal_connect( renderer->pipeline, "deep-notify", G_CALLBACK(gst_object_default_deep_notify ), NULL );
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
}
void audio_renderer_render_buffer(audio_renderer_t *renderer, raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t pts) {
void audio_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t pts) {
GstBuffer *buffer;
if (data_len == 0) return;
if (data_len == 0 || renderer == NULL) return;
/* all audio received seems to be either ct = 8 (AAC_ELD 44100/2 spf 460 ) AirPlay Mirror protocol */
/* or ct = 2 (ALAC 44100/16/2 spf 352) AirPlay protocol */
/* first byte data[0] of ALAC frame is 0x20, first byte of AAC_ELD is 0x8d or 0x8e, AAC_LC is 0xff (ADTS) */
/* GStreamer caps_filter could be used here to switch the appsrc caps between aac_eld and alac */
/* depending on the initial byte of the buffer, with a pipeline using decodebin */
buffer = gst_buffer_new_and_alloc(data_len);
assert(buffer != NULL);
GST_BUFFER_DTS(buffer) = (GstClockTime)pts;
gst_buffer_fill(buffer, 0, data, data_len);
gst_app_src_push_buffer(GST_APP_SRC(renderer->appsrc), buffer);
}
void audio_renderer_set_volume(audio_renderer_t *renderer, float volume) {
void audio_renderer_set_volume(float volume) {
float avol;
if (fabs(volume) < 28) {
avol=floorf(((28-fabs(volume))/28)*10)/10;
avol=floorf(((28-fabs(volume))/28)*10)/10;
g_object_set(renderer->volume, "volume", avol, NULL);
}
}
void audio_renderer_flush(audio_renderer_t *renderer) {
void audio_renderer_flush() {
}
void audio_renderer_destroy(audio_renderer_t *renderer) {
if(renderer) {
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
gst_object_unref (renderer->appsrc);
gst_object_unref (renderer->pipeline);
gst_object_unref (renderer->volume);
free (renderer);
renderer = NULL;
void audio_renderer_destroy() {
audio_renderer_stop();
for (int i = 0; i < NFORMATS ; i++ ) {
gst_object_unref (renderer_type[i]->volume);
renderer_type[i]->volume = NULL;
gst_object_unref (renderer_type[i]->appsrc);
renderer_type[i]->appsrc = NULL;
gst_object_unref (renderer_type[i]->pipeline);
renderer_type[i]->pipeline = NULL;
free(renderer_type[i]);
}
}

View File

@@ -44,15 +44,16 @@ typedef enum videoflip_e {
typedef struct video_renderer_s video_renderer_t;
video_renderer_t *video_renderer_init (logger_t *logger, const char *server_name, videoflip_t videoflip[2], const char *videosink);
void video_renderer_start (video_renderer_t *renderer);
void video_renderer_render_buffer (video_renderer_t *renderer, raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t pts, int type);
void video_renderer_flush (video_renderer_t *renderer);
unsigned int video_renderer_listen(void *loop, video_renderer_t *renderer);
void video_renderer_destroy (video_renderer_t *renderer);
void video_renderer_init (const char *server_name, videoflip_t videoflip[2], const char *videosink);
void video_renderer_start ();
void video_renderer_stop ();
void video_renderer_render_buffer (raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t pts, int type);
void video_renderer_flush ();
unsigned int video_renderer_listen(void *loop);
void video_renderer_destroy ();
/* not implemented for gstreamer */
void video_renderer_update_background (video_renderer_t *renderer, int type);
void video_renderer_update_background (int type);
#ifdef __cplusplus
}

View File

@@ -27,7 +27,6 @@
#endif
struct video_renderer_s {
logger_t *logger;
GstElement *appsrc, *pipeline, *sink;
GstBus *bus;
#ifdef X_DISPLAY_FIX
@@ -36,27 +35,6 @@ struct video_renderer_s {
#endif
};
static gboolean check_plugins (void)
{
int i;
gboolean ret;
GstRegistry *registry;
const gchar *needed[] = { "app", "libav", "playback", "autodetect", NULL};
registry = gst_registry_get ();
ret = TRUE;
for (i = 0; i < g_strv_length ((gchar **) needed); i++) {
GstPlugin *plugin;
plugin = gst_registry_find_plugin (registry, needed[i]);
if (!plugin) {
g_print ("Required gstreamer plugin '%s' not found\n", needed[i]);
ret = FALSE;
continue;
}
gst_object_unref (plugin);
}
return ret;
}
static void append_videoflip (GString *launch, const videoflip_t *flip, const videoflip_t *rot) {
/* videoflip image transform */
@@ -115,10 +93,12 @@ static void append_videoflip (GString *launch, const videoflip_t *flip, const vi
}
}
video_renderer_t *video_renderer_init(logger_t *logger, const char *server_name, videoflip_t videoflip[2], const char *videosink) {
video_renderer_t *renderer;
GError *error = NULL;
static video_renderer_t *renderer = NULL;
void video_renderer_init(const char *server_name, videoflip_t videoflip[2], const char *videosink) {
GError *error = NULL;
/* this call to g_set_application_name makes server_name appear in the X11 display window title bar, */
/* (instead of the program name uxplay taken from (argv[0]). It is only set one time. */
@@ -127,12 +107,8 @@ video_renderer_t *video_renderer_init(logger_t *logger, const char *server_name,
renderer = calloc(1, sizeof(video_renderer_t));
assert(renderer);
gst_init(NULL, NULL);
renderer->logger = logger;
assert(check_plugins ());
gst_init(NULL,NULL);
GString *launch = g_string_new("appsrc name=video_source stream-type=0 format=GST_FORMAT_TIME is-live=true !"
"queue ! decodebin ! videoconvert ! ");
append_videoflip(launch, &videoflip[0], &videoflip[1]);
@@ -143,30 +119,33 @@ video_renderer_t *video_renderer_init(logger_t *logger, const char *server_name,
g_string_free(launch, TRUE);
renderer->appsrc = gst_bin_get_by_name (GST_BIN (renderer->pipeline), "video_source");
assert(renderer->appsrc);
renderer->sink = gst_bin_get_by_name (GST_BIN (renderer->pipeline), "video_sink");
assert(renderer->sink);
#ifdef X_DISPLAY_FIX
renderer->server_name = server_name;
renderer->gst_window = NULL;
bool x_display_fix = false;
if (strcmp(videosink,"autovideosink") == 0) x_display_fix = true;
if (strcmp(videosink,"ximagesink") == 0) x_display_fix = true;
if (strcmp(videosink,"xvimagesink") == 0) x_display_fix = true;
if (strcmp(videosink,"autovideosink") == 0 ||
strcmp(videosink,"ximagesink") == 0 ||
strcmp(videosink,"xvimagesink") == 0) {
x_display_fix = true;
}
if (x_display_fix) {
renderer->gst_window = calloc(1, sizeof(X11_Window_t));
assert(renderer->gst_window);
get_X11_Display(renderer->gst_window);
}
#endif
return renderer;
}
void video_renderer_start(video_renderer_t *renderer) {
//g_signal_connect( renderer->pipeline, "deep-notify", G_CALLBACK(gst_object_default_deep_notify ), NULL );
void video_renderer_start() {
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
renderer->bus = gst_element_get_bus(renderer->pipeline);
}
void video_renderer_render_buffer(video_renderer_t *renderer, raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t pts, int type) {
void video_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t pts, int type) {
GstBuffer *buffer;
assert(data_len != 0);
@@ -187,16 +166,30 @@ void video_renderer_render_buffer(video_renderer_t *renderer, raop_ntp_t *ntp, u
void video_renderer_flush(video_renderer_t *renderer) {
}
void video_renderer_destroy(video_renderer_t *renderer) {
void video_renderer_stop() {
if (renderer) {
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
}
}
void video_renderer_destroy() {
if (renderer) {
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
GstState state;
gst_element_get_state(renderer->pipeline, &state, NULL, 0);
if (state != GST_STATE_NULL) {
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
}
gst_object_unref(renderer->bus);
gst_object_unref(renderer->sink);
gst_object_unref (renderer->appsrc);
gst_object_unref (renderer->bus);
gst_object_unref (renderer->pipeline);
gst_object_unref (renderer->sink);
#ifdef X_DISPLAY_FIX
if(renderer->gst_window) free(renderer->gst_window);
if (renderer->gst_window) {
free(renderer->gst_window);
renderer->gst_window = NULL;
}
#endif
free (renderer);
renderer = NULL;
@@ -204,7 +197,7 @@ void video_renderer_destroy(video_renderer_t *renderer) {
}
/* not implemented for gstreamer */
void video_renderer_update_background(video_renderer_t *renderer, int type) {
void video_renderer_update_background(int type) {
}
gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpointer loop) {
@@ -212,17 +205,22 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpoin
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gboolean flushing;
gst_message_parse_error (message, &err, &debug);
g_print ("GStreamer error: %s\n", err->message);
g_error_free (err);
g_free (debug);
g_main_loop_quit( (GMainLoop *) loop);
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
flushing = TRUE;
gst_bus_set_flushing(bus, flushing);
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
g_main_loop_quit( (GMainLoop *) loop);
break;
}
case GST_MESSAGE_EOS:
/* end-of-stream */
g_print("GStreamer: End-Of-Stream\n");
g_main_loop_quit( (GMainLoop *) loop);
// g_main_loop_quit( (GMainLoop *) loop);
break;
default:
/* unhandled message */
@@ -231,8 +229,7 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpoin
return TRUE;
}
unsigned int video_renderer_listen(void *loop, video_renderer_t *renderer) {
unsigned int video_renderer_listen(void *loop) {
return (unsigned int) gst_bus_add_watch(renderer->bus, (GstBusFunc)
gstreamer_pipeline_bus_callback, (gpointer) loop);
}