introduce -vsync, -async options

This commit is contained in:
F. Duncanh
2023-02-12 18:03:03 -05:00
parent 4580c44ed5
commit 6d9f2a2b88
8 changed files with 74 additions and 15 deletions

View File

@@ -677,6 +677,7 @@ raop_rtp_thread_udp(void *arg)
audio_data.seqnum = seqnum;
audio_data.data_len = payload_size;
audio_data.data = payload;
audio_data.ct = raop_rtp->ct;
if (have_synced) {
if (ntp_timestamp == 0) {
ntp_timestamp = (uint64_t) (raop_rtp->rtp_sync_offset + (int64_t) (raop_rtp->rtp_clock_rate * rtp64_timestamp));

View File

@@ -31,6 +31,7 @@ typedef struct {
typedef struct {
unsigned char *data;
unsigned char ct;
int data_len;
int sync_status;
uint64_t ntp_time_local;

View File

@@ -33,7 +33,7 @@ extern "C" {
#include "../lib/logger.h"
bool gstreamer_init();
void audio_renderer_init(logger_t *logger, const char* audiosink, const bool *audio_sync);
void audio_renderer_init(logger_t *logger, const char* audiosink, const bool *audio_sync, const bool *video_sync);
void audio_renderer_start(unsigned char* compression_type);
void audio_renderer_stop();
void audio_renderer_render_buffer(unsigned char* data, int *data_len, unsigned short *seqnum, uint64_t *ntp_time);

View File

@@ -85,7 +85,7 @@ bool gstreamer_init(){
return (bool) check_plugins ();
}
void audio_renderer_init(logger_t *render_logger, const char* audiosink, const bool* audio_sync) {
void audio_renderer_init(logger_t *render_logger, const char* audiosink, const bool* audio_sync, const bool* video_sync) {
GError *error = NULL;
GstCaps *caps = NULL;
GstClock *clock = gst_system_clock_obtain();
@@ -124,7 +124,11 @@ void audio_renderer_init(logger_t *render_logger, const char* audiosink, const b
}
break;
default:
g_string_append (launch, " sync=false");
if (*video_sync) {
g_string_append (launch, " sync=true");
} else {
g_string_append (launch, " sync=false");
}
break;
}
renderer_type[i]->pipeline = gst_parse_launch(launch->str, &error);
@@ -211,6 +215,7 @@ void audio_renderer_render_buffer(unsigned char* data, int *data_len, unsigned s
GstBuffer *buffer;
bool valid;
GstClockTime pts = (GstClockTime) *ntp_time ; /* now in nsecs */
//GstClockTimeDiff latency = GST_CLOCK_DIFF(gst_element_get_current_clock_time (renderer->appsrc), pts);
if (pts >= gst_audio_pipeline_base_time) {
pts -= gst_audio_pipeline_base_time;
} else {
@@ -229,6 +234,7 @@ void audio_renderer_render_buffer(unsigned char* data, int *data_len, unsigned s
buffer = gst_buffer_new_allocate(NULL, *data_len, NULL);
g_assert(buffer != NULL);
//g_print("audio latency %8.6f\n", (double) latency / SECOND_IN_NSECS);
GST_BUFFER_PTS(buffer) = pts;
gst_buffer_fill(buffer, 0, data, *data_len);
switch (renderer->ct){
@@ -288,4 +294,3 @@ void audio_renderer_destroy() {
free(renderer_type[i]);
}
}

View File

@@ -48,7 +48,8 @@ typedef enum videoflip_e {
typedef struct video_renderer_s video_renderer_t;
void video_renderer_init (logger_t *logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
const char *decoder, const char *converter, const char *videosink, const bool *fullscreen);
const char *decoder, const char *converter, const char *videosink, const bool *fullscreen,
const bool *video_sync);
void video_renderer_start ();
void video_renderer_stop ();
void video_renderer_render_buffer (unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time);

View File

@@ -128,7 +128,8 @@ void video_renderer_size(float *f_width_source, float *f_height_source, float *f
}
void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
const char *decoder, const char *converter, const char *videosink, const bool *initial_fullscreen) {
const char *decoder, const char *converter, const char *videosink, const bool *initial_fullscreen,
const bool *video_sync) {
GError *error = NULL;
GstCaps *caps = NULL;
GstClock *clock = gst_system_clock_obtain();
@@ -156,7 +157,12 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
g_string_append(launch, " ! ");
append_videoflip(launch, &videoflip[0], &videoflip[1]);
g_string_append(launch, videosink);
g_string_append(launch, " name=video_sink sync=false");
g_string_append(launch, " name=video_sink");
if (*video_sync) {
g_string_append(launch, " sync=true");
} else {
g_string_append(launch, " sync=false");
}
logger_log(logger, LOGGER_DEBUG, "GStreamer video pipeline will be:\n\"%s\"", launch->str);
renderer->pipeline = gst_parse_launch(launch->str, &error);
if (error) {
@@ -226,6 +232,7 @@ void video_renderer_start() {
void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time) {
GstBuffer *buffer;
GstClockTime pts = (GstClockTime) *ntp_time; /*now in nsecs */
//GstClockTimeDiff latency = GST_CLOCK_DIFF(gst_element_get_current_clock_time (renderer->appsrc), pts);
if (pts >= gst_video_pipeline_base_time) {
pts -= gst_video_pipeline_base_time;
} else {
@@ -247,6 +254,7 @@ void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_c
}
buffer = gst_buffer_new_allocate(NULL, *data_len, NULL);
g_assert(buffer != NULL);
//g_print("video latency %8.6f\n", (double) latency / SECOND_IN_NSECS);
GST_BUFFER_PTS(buffer) = pts;
gst_buffer_fill(buffer, 0, data, *data_len);
gst_app_src_push_buffer (GST_APP_SRC(renderer->appsrc), buffer);

View File

@@ -13,7 +13,11 @@ UxPlay 1.63: An open\-source AirPlay mirroring (+ audio streaming) server.
.TP
\fB\-nh\fR Do \fBNOT\fR append "@\fIhostname\fR" at end of the AirPlay server name
.TP
\fB\-sync\fR (In Audio-Only mode) sync audio on server with video on client.
\fB\-vsync\fR Mirror mode: sync audio to video (default: stream w/o sync)
.TP
\fB\-vsync\fI[x]\fR \fIx\fR is optional audio delay in millisecs, can be neg., decimal.
.TP
\fB\-async\fR[\fIx\fR] Audio-Only mode: sync audio to client video (default: no sync).
.TP
\fB\-s\fR wxh[@r]Set display resolution [refresh_rate] default 1920x1080[@60]
.TP

View File

@@ -70,6 +70,9 @@ static dnssd_t *dnssd = NULL;
static raop_t *raop = NULL;
static logger_t *render_logger = NULL;
static bool audio_sync = false;
static bool video_sync = false;
static int64_t audio_delay_alac = 0;
static int64_t audio_delay_aac = 0;
static bool relaunch_video = false;
static bool reset_loop = false;
static unsigned int open_connections= 0;
@@ -364,7 +367,9 @@ static void print_info (char *name) {
printf("Options:\n");
printf("-n name Specify the network name of the AirPlay server\n");
printf("-nh Do not add \"@hostname\" at the end of the AirPlay server name\n");
printf("-sync (In Audio-Only mode) sync audio on server with video on client\n");
printf("-vsync [x]Mirror mode: sync audio to video (default: stream w/o sync)\n");
printf(" x is optional audio delay in millisecs, can be neg., decimal\n");
printf("-async [x]Audio-Only mode: sync audio to client video (default: no sync)\n");
printf("-s wxh[@r]Set display resolution [refresh_rate] default 1920x1080[@60]\n");
printf("-o Set display \"overscanned\" mode on (not usually needed)\n");
printf("-fs Full-screen (only works with X11, Wayland and VAAPI)\n");
@@ -558,8 +563,36 @@ static void parse_arguments (int argc, char *argv[]) {
server_name = std::string(argv[++i]);
} else if (arg == "-nh") {
do_append_hostname = false;
} else if (arg == "-sync") {
} else if (arg == "-async") {
audio_sync = true;
if (i < argc - 1) {
char *end;
int n = (int) (strtof(argv[i + 1], &end) * 1000);
if (*end == '\0') {
i++;
if (n > -SECOND_IN_USECS && n < SECOND_IN_USECS) {
audio_delay_alac = n * 1000; /* units are nsecs */
} else {
fprintf(stderr, "invalid -async %s: requested delays must be smaller than +/- 1000 millisecs\n", argv[i] );
exit (1);
}
}
}
} else if (arg == "-vsync") {
video_sync = true;
if (i < argc - 1) {
char *end;
int n = (int) (strtof(argv[i + 1], &end) * 1000);
if (*end == '\0') {
i++;
if (n > -SECOND_IN_USECS && n < SECOND_IN_USECS) {
audio_delay_aac = n * 1000; /* units are nsecs */
} else {
fprintf(stderr, "invalid -vsync %s: requested delays must be smaller than +/- 1000 millisecs\n", argv[i]);
exit (1);
}
}
}
} else if (arg == "-s") {
if (!option_has_value(i, argc, argv[i], argv[i+1])) exit(1);
std::string value(argv[++i]);
@@ -998,10 +1031,15 @@ extern "C" void audio_process (void *cls, raop_ntp_t *ntp, audio_decode_struct *
}
if (use_audio) {
if (!remote_clock_offset) {
remote_clock_offset = data->ntp_time_local - data->ntp_time_remote;
remote_clock_offset = data->ntp_time_local - data->ntp_time_remote;
}
data->ntp_time_remote = data->ntp_time_remote + remote_clock_offset;
audio_renderer_render_buffer(data->data, &(data->data_len), &(data->seqnum), &(data->ntp_time_remote));
if (data->ct == 2 && audio_delay_alac) {
data->ntp_time_remote = (uint64_t) ((int64_t) data->ntp_time_remote + audio_delay_alac);
} else if (audio_delay_aac) {
data->ntp_time_remote = (uint64_t) ((int64_t) data->ntp_time_remote + audio_delay_aac);
}
audio_renderer_render_buffer(data->data, &(data->data_len), &(data->seqnum), &(data->ntp_time_remote));
}
}
@@ -1300,14 +1338,14 @@ int main (int argc, char *argv[]) {
logger_set_level(render_logger, debug_log ? LOGGER_DEBUG : LOGGER_INFO);
if (use_audio) {
audio_renderer_init(render_logger, audiosink.c_str(), &audio_sync);
audio_renderer_init(render_logger, audiosink.c_str(), &audio_sync, &video_sync);
} else {
LOGI("audio_disabled");
}
if (use_video) {
video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(),
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), &fullscreen);
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), &fullscreen, &video_sync);
video_renderer_start();
}
@@ -1360,7 +1398,8 @@ int main (int argc, char *argv[]) {
if (use_video && close_window) {
video_renderer_destroy();
video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(),
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), &fullscreen);
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), &fullscreen,
&video_sync);
video_renderer_start();
}
if (relaunch_video) {