cleanup of the gstreamer audio and video PTS system

This commit is contained in:
F. Duncanh
2023-01-18 16:07:53 -05:00
parent f0c12d363e
commit 1de43f914f
5 changed files with 70 additions and 24 deletions

View File

@@ -27,14 +27,13 @@ extern "C" {
#include <stdlib.h>
#include <stdint.h>
#include <stdbool.h>
#include "../lib/raop_ntp.h"
#include "../lib/logger.h"
bool gstreamer_init();
bool gstreamer_init(uint64_t * unix_start_time, uint64_t *monotonic_start_time);
void audio_renderer_init(logger_t *logger, const char* audiosink, const char* audiodelay);
void audio_renderer_start(unsigned char* compression_type);
void audio_renderer_stop();
void audio_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data_len,
uint64_t ntp_time, uint64_t rtp_time, unsigned short seqnum);
void audio_renderer_render_buffer(unsigned char* data, int *data_len, unsigned short *seqnum, uint64_t *pts);
void audio_renderer_set_volume(float volume);
void audio_renderer_flush();
void audio_renderer_destroy();

View File

@@ -69,20 +69,43 @@ static gboolean check_plugins (void)
return ret;
}
bool gstreamer_init(){
gst_init(NULL,NULL);
bool gstreamer_init(uint64_t *unix_start_time, uint64_t *monotonic_start_time){
struct timespec tp;
GstClock *clock = NULL;
GstClockTime time;
gst_init(NULL,NULL);
clock = gst_system_clock_obtain();
if (!clock) {
g_print("gstreamer_init: error: failed to obtain gst_system_clock\n");
return false;
}
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_MONOTONIC, NULL);
time = GST_TIME_AS_NSECONDS(gst_clock_get_time(clock));
if (clock_gettime(CLOCK_REALTIME, &tp)){
g_print("gstreamer_init: error failed to get unix time\n");
return false;
}
time += GST_TIME_AS_NSECONDS(gst_clock_get_time(clock));
*monotonic_start_time = time/2;
*unix_start_time = (1000000000 * tp.tv_sec) + tp.tv_nsec;
g_object_unref (clock);
return (bool) check_plugins ();
}
#define NFORMATS 2 /* set to 4 to enable AAC_LD and PCM: allowed, but never seen in real-world use */
static audio_renderer_t *renderer_type[NFORMATS];
static audio_renderer_t *renderer = NULL;
static GstClockTime gst_audio_pipeline_base_time = GST_CLOCK_TIME_NONE;
static logger_t *logger = NULL;
const char * format[NFORMATS];
void audio_renderer_init(logger_t *render_logger, const char* audiosink, const char* audio_delay) {
GError *error = NULL;
GstCaps *caps = NULL;
GstClock *clock = gst_system_clock_obtain();
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_MONOTONIC, NULL);
logger = render_logger;
for (int i = 0; i < NFORMATS ; i++) {
@@ -120,7 +143,8 @@ void audio_renderer_init(logger_t *render_logger, const char* audiosink, const c
}
g_assert (renderer_type[i]->pipeline);
gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer_type[i]->pipeline), clock);
renderer_type[i]->appsrc = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "audio_source");
renderer_type[i]->volume = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "volume");
switch (i) {
@@ -152,6 +176,7 @@ void audio_renderer_init(logger_t *render_logger, const char* audiosink, const c
g_string_free(launch, TRUE);
g_object_set(renderer_type[i]->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
gst_caps_unref(caps);
g_object_unref(clock);
}
}
@@ -179,21 +204,29 @@ void audio_renderer_start(unsigned char *ct) {
logger_log(logger, LOGGER_INFO, "changed audio connection, format %s", format[id]);
renderer = renderer_type[id];
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
gst_audio_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
}
} else if (compression_type) {
logger_log(logger, LOGGER_INFO, "start audio connection, format %s", format[id]);
renderer = renderer_type[id];
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
gst_audio_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
} else {
logger_log(logger, LOGGER_ERR, "unknown audio compression type ct = %d", *ct);
}
}
void audio_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t ntp_time,
uint64_t rtp_time, unsigned short seqnum) {
void audio_renderer_render_buffer(unsigned char* data, int *data_len, unsigned short *seqnum, uint64_t *pts_raw) {
GstBuffer *buffer;
bool valid;
GstClockTime pts = (GstClockTime) *pts_raw;
if (pts >= gst_audio_pipeline_base_time) {
pts -= gst_audio_pipeline_base_time;
} else {
logger_log(logger, LOGGER_ERR, "*** invalid *pts_raw < gst_audio_pipeline_base_time");
return;
}
if (data_len == 0 || renderer == NULL) return;
/* all audio received seems to be either ct = 8 (AAC_ELD 44100/2 spf 460 ) AirPlay Mirror protocol *
@@ -203,11 +236,10 @@ void audio_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data
* but is 0x80, 0x81 or 0x82: 0x100000(00,01,10) in ios9, ios10 devices *
* first byte of AAC_LC should be 0xff (ADTS) (but has never been seen). */
buffer = gst_buffer_new_allocate(NULL, data_len, NULL);
buffer = gst_buffer_new_allocate(NULL, *data_len, NULL);
g_assert(buffer != NULL);
/* ntp_time is PTS given as UTC in usec */
GST_BUFFER_PTS(buffer) = (GstClockTime) (ntp_time * 1000);
gst_buffer_fill(buffer, 0, data, data_len);
GST_BUFFER_PTS(buffer) = pts;
gst_buffer_fill(buffer, 0, data, *data_len);
switch (renderer->ct){
case 8: /*AAC-ELD*/
switch (data[0]){

View File

@@ -32,7 +32,6 @@ extern "C" {
#include <stdint.h>
#include <stdbool.h>
#include "../lib/logger.h"
#include "../lib/raop_ntp.h"
typedef enum videoflip_e {
NONE,
@@ -49,7 +48,7 @@ void video_renderer_init (logger_t *logger, const char *server_name, videoflip_t
const char *decoder, const char *converter, const char *videosink, const bool *fullscreen);
void video_renderer_start ();
void video_renderer_stop ();
void video_renderer_render_buffer (raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t ntp_time, int nal_count);
void video_renderer_render_buffer (unsigned char* data, int *data_len, int *nal_count, uint64_t *pts);
void video_renderer_flush ();
unsigned int video_renderer_listen(void *loop);
void video_renderer_destroy ();

View File

@@ -97,6 +97,7 @@ static void append_videoflip (GString *launch, const videoflip_t *flip, const vi
}
static video_renderer_t *renderer = NULL;
static GstClockTime gst_video_pipeline_base_time = GST_CLOCK_TIME_NONE;
static logger_t *logger = NULL;
static unsigned short width, height, width_source, height_source; /* not currently used */
static bool first_packet = false;
@@ -125,6 +126,9 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
const char *decoder, const char *converter, const char *videosink, const bool *initial_fullscreen) {
GError *error = NULL;
GstCaps *caps = NULL;
GstClock *clock = gst_system_clock_obtain();
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_MONOTONIC, NULL);
logger = render_logger;
/* this call to g_set_application_name makes server_name appear in the X11 display window title bar, */
@@ -155,13 +159,15 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
g_clear_error (&error);
}
g_assert (renderer->pipeline);
g_string_free(launch, TRUE);
gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer->pipeline), clock);
renderer->appsrc = gst_bin_get_by_name (GST_BIN (renderer->pipeline), "video_source");
g_assert(renderer->appsrc);
caps = gst_caps_from_string(h264_caps);
g_object_set(renderer->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
g_string_free(launch, TRUE);
gst_caps_unref(caps);
gst_object_unref(clock);
renderer->sink = gst_bin_get_by_name (GST_BIN (renderer->pipeline), "video_sink");
g_assert(renderer->sink);
@@ -204,6 +210,7 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
void video_renderer_start() {
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
gst_video_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
renderer->bus = gst_element_get_bus(renderer->pipeline);
first_packet = true;
#ifdef X_DISPLAY_FIX
@@ -211,8 +218,15 @@ void video_renderer_start() {
#endif
}
void video_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t ntp_time, int nal_count) {
void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_count, uint64_t *pts_raw) {
GstBuffer *buffer;
GstClockTime pts = (GstClockTime) *pts_raw;
if (pts >= gst_video_pipeline_base_time) {
pts -= gst_video_pipeline_base_time;
} else {
logger_log(logger, LOGGER_ERR, "*** invalid *pts_raw < gst_video_pipeline_base_time") ;
return;
}
g_assert(data_len != 0);
/* first four bytes of valid h264 video data are 0x00, 0x00, 0x00, 0x01. *
* nal_count is the number of NAL units in the data: short SPS, PPS, SEI NALs *
@@ -225,11 +239,10 @@ void video_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data
logger_log(logger, LOGGER_INFO, "Begin streaming to GStreamer video pipeline");
first_packet = false;
}
buffer = gst_buffer_new_allocate(NULL, data_len, NULL);
buffer = gst_buffer_new_allocate(NULL, *data_len, NULL);
g_assert(buffer != NULL);
/* ntp_time is PTS given as UTC in usec */
GST_BUFFER_PTS(buffer) = (GstClockTime) (ntp_time * 1000);
gst_buffer_fill(buffer, 0, data, data_len);
GST_BUFFER_PTS(buffer) = pts;
gst_buffer_fill(buffer, 0, data, *data_len);
gst_app_src_push_buffer (GST_APP_SRC(renderer->appsrc), buffer);
#ifdef X_DISPLAY_FIX
if (renderer->gst_window && !(renderer->gst_window->window) && X11_search_attempts < MAX_X11_SEARCH_ATTEMPTS) {