add h265 support

This commit is contained in:
F. Duncanh
2024-09-17 18:12:40 -04:00
parent 0a2dbaa9e2
commit 57bd7555fa
9 changed files with 519 additions and 260 deletions

View File

@@ -36,14 +36,21 @@ typedef struct raop_s raop_t;
typedef void (*raop_log_callback_t)(void *cls, int level, const char *msg);
typedef enum video_codec_e {
VIDEO_CODEC_UNKNOWN,
VIDEO_CODEC_H264,
VIDEO_CODEC_H265
} video_codec_t;
struct raop_callbacks_s {
void* cls;
void (*audio_process)(void *cls, raop_ntp_t *ntp, audio_decode_struct *data);
void (*video_process)(void *cls, raop_ntp_t *ntp, h264_decode_struct *data);
void (*video_process)(void *cls, raop_ntp_t *ntp, video_decode_struct *data);
void (*video_pause)(void *cls);
void (*video_resume)(void *cls);
void (*video_codec) (void *cls, video_codec_t video_codec);
/* Optional but recommended callback functions */
void (*conn_init)(void *cls);
void (*conn_destroy)(void *cls);
@@ -64,6 +71,7 @@ struct raop_callbacks_s {
bool (*check_register) (void *cls, const char *pk_str);
void (*export_dacp) (void *cls, const char *active_remote, const char *dacp_id);
void (*video_reset) (void *cls);
void (*video_set_codec)(void *cls, video_codec_t codec);
};
typedef struct raop_callbacks_s raop_callbacks_t;
raop_ntp_t *raop_ntp_init(logger_t *logger, raop_callbacks_t *callbacks, const char *remote,

View File

@@ -33,7 +33,7 @@
#include "utils.h"
#include "byteutils.h"
#define RAOP_BUFFER_LENGTH 32
#define RAOP_BUFFER_LENGTH 960
typedef struct {
/* Data available */

View File

@@ -146,7 +146,7 @@ raop_handler_info(raop_conn_t *conn,
plist_t displays_0_width_pixels_node = plist_new_uint(conn->raop->width);
plist_t displays_0_height_pixels_node = plist_new_uint(conn->raop->height);
plist_t displays_0_rotation_node = plist_new_bool(0); /* set to true in AppleTV gen 3 (which has features bit 8 set */
plist_t displays_0_refresh_rate_node = plist_new_real((double) 1.0 / conn->raop->refreshRate); /* set as real 0.166666 = 60hz in AppleTV gen 3 */
plist_t displays_0_refresh_rate_node = plist_new_uint(conn->raop->refreshRate); /* set as real 0.166666 = 60hz in AppleTV gen 3 */
plist_t displays_0_max_fps_node = plist_new_uint(conn->raop->maxFPS);
plist_t displays_0_overscanned_node = plist_new_bool(conn->raop->overscanned);
plist_t displays_0_features = plist_new_uint(14);

View File

@@ -195,7 +195,10 @@ raop_rtp_mirror_thread(void *arg)
uint64_t ntp_timestamp_local = 0;
unsigned char nal_start_code[4] = { 0x00, 0x00, 0x00, 0x01 };
bool logger_debug = (logger_get_level(raop_rtp_mirror->logger) >= LOGGER_DEBUG);
bool h265_video_detected = false;
bool h265_video = false;
video_codec_t codec;
const char h264[] = "h264";
const char h265[] = "h265";
while (1) {
fd_set rfds;
@@ -377,9 +380,9 @@ raop_rtp_mirror_thread(void *arg)
uint64_t ntp_now = raop_ntp_get_local_time(raop_rtp_mirror->ntp);
int64_t latency = ((int64_t) ntp_now) - ((int64_t) ntp_timestamp_local);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
"raop_rtp video: now = %8.6f, ntp = %8.6f, latency = %8.6f, ts = %8.6f, %s",
"raop_rtp video: now = %8.6f, ntp = %8.6f, latency = %8.6f, ts = %8.6f, %s %s",
(double) ntp_now / SEC, (double) ntp_timestamp_local / SEC, (double) latency / SEC,
(double) ntp_timestamp_remote / SEC, packet_description);
(double) ntp_timestamp_remote / SEC, packet_description, h265_video ? h265 : h264);
}
unsigned char* payload_out;
@@ -442,98 +445,93 @@ raop_rtp_mirror_thread(void *arg)
valid_data = false;
break;
}
int nalu_type = payload_decrypted[nalu_size] & 0x1f;
int ref_idc = (payload_decrypted[nalu_size] >> 5);
/* check for unsupported h265 video (sometimes sent by macOS in high-def screen mirroring) */
if (payload_decrypted[nalu_size + 1] == 0x01) {
switch (payload_decrypted[nalu_size]) {
case 0x28: // h265 IDR type 20 NAL
case 0x02: // h265 non-IDR type 1 NAL
ref_idc = 0;
h265_video_detected = true;
int nalu_type;
if (h265_video) {
nalu_type = payload_decrypted[nalu_size] & 0x7e >> 1;;
//logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG," h265 video, NALU type %d, size %d", nalu_type, nc_len);
} else {
nalu_type = payload_decrypted[nalu_size] & 0x1f;
int ref_idc = (payload_decrypted[nalu_size] >> 5);
switch (nalu_type) {
case 14: /* Prefix NALu , seen before all VCL Nalu's in AirMyPc */
case 5: /*IDR, slice_layer_without_partitioning */
case 1: /*non-IDR, slice_layer_without_partitioning */
break;
case 2: /* slice data partition A */
case 3: /* slice data partition B */
case 4: /* slice data partition C */
logger_log(raop_rtp_mirror->logger, LOGGER_INFO,
"unexpected partitioned VCL NAL unit: nalu_type = %d, ref_idc = %d, nalu_size = %d,"
"processed bytes %d, payloadsize = %d nalus_count = %d",
nalu_type, ref_idc, nc_len, nalu_size, payload_size, nalus_count);
break;
case 6:
if (logger_debug) {
char *str = utils_data_to_string(payload_decrypted + nalu_size, nc_len, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror SEI NAL size = %d", nc_len);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
"raop_rtp_mirror h264 Supplemental Enhancement Information:\n%s", str);
free(str);
}
break;
case 7:
if (logger_debug) {
char *str = utils_data_to_string(payload_decrypted + nalu_size, nc_len, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror SPS NAL size = %d", nc_len);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
"raop_rtp_mirror h264 Sequence Parameter Set:\n%s", str);
free(str);
}
break;
case 8:
if (logger_debug) {
char *str = utils_data_to_string(payload_decrypted + nalu_size, nc_len, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror PPS NAL size = %d", nc_len);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
"raop_rtp_mirror h264 Picture Parameter Set :\n%s", str);
free(str);
}
break;
default:
break;
}
if (h265_video_detected) {
break;
}
}
switch (nalu_type) {
case 14: /* Prefix NALu , seen before all VCL Nalu's in AirMyPc */
case 5: /*IDR, slice_layer_without_partitioning */
case 1: /*non-IDR, slice_layer_without_partitioning */
break;
case 2: /* slice data partition A */
case 3: /* slice data partition B */
case 4: /* slice data partition C */
logger_log(raop_rtp_mirror->logger, LOGGER_INFO,
"unexpected partitioned VCL NAL unit: nalu_type = %d, ref_idc = %d, nalu_size = %d,"
"processed bytes %d, payloadsize = %d nalus_count = %d",
nalu_type, ref_idc, nc_len, nalu_size, payload_size, nalus_count);
break;
case 6:
if (logger_debug) {
char *str = utils_data_to_string(payload_decrypted + nalu_size, nc_len, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror SEI NAL size = %d", nc_len);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
"raop_rtp_mirror h264 Supplemental Enhancement Information:\n%s", str);
free(str);
}
break;
case 7:
if (logger_debug) {
char *str = utils_data_to_string(payload_decrypted + nalu_size, nc_len, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror SPS NAL size = %d", nc_len);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
"raop_rtp_mirror h264 Sequence Parameter Set:\n%s", str);
free(str);
}
break;
case 8:
if (logger_debug) {
char *str = utils_data_to_string(payload_decrypted + nalu_size, nc_len, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror PPS NAL size = %d", nc_len);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG,
"raop_rtp_mirror h264 Picture Parameter Set :\n%s", str);
free(str);
}
break;
default:
logger_log(raop_rtp_mirror->logger, LOGGER_INFO,
"unexpected non-VCL NAL unit: nalu_type = %d, ref_idc = %d, nalu_size = %d,"
"processed bytes %d, payloadsize = %d nalus_count = %d",
nalu_type, ref_idc, nc_len, nalu_size, payload_size, nalus_count);
break;
logger_log(raop_rtp_mirror->logger, LOGGER_INFO,
"unexpected non-VCL NAL unit: nalu_type = %d, ref_idc = %d, nalu_size = %d,"
"processed bytes %d, payloadsize = %d nalus_count = %d",
nalu_type, ref_idc, nc_len, nalu_size, payload_size, nalus_count);
break;
}
}
nalu_size += nc_len;
}
if (h265_video_detected) {
logger_log(raop_rtp_mirror->logger, LOGGER_ERR,
"unsupported h265 video detected");
free (payload_out);
break;
}
if (nalu_size != payload_size) valid_data = false;
if(!valid_data) {
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "nalu marked as invalid");
payload_out[0] = 1; /* mark video data as invalid h264 (failed decryption) */
}
payload_decrypted = NULL;
h264_decode_struct h264_data;
h264_data.ntp_time_local = ntp_timestamp_local;
h264_data.ntp_time_remote = ntp_timestamp_remote;
h264_data.nal_count = nalus_count; /*nal_count will be the number of nal units in the packet */
h264_data.data_len = payload_size;
h264_data.data = payload_out;
video_decode_struct video_data;
video_data.is_h265 = h265_video;
video_data.ntp_time_local = ntp_timestamp_local;
video_data.ntp_time_remote = ntp_timestamp_remote;
video_data.nal_count = nalus_count; /*nal_count will be the number of nal units in the packet */
video_data.data_len = payload_size;
video_data.data = payload_out;
if (prepend_sps_pps) {
h264_data.data_len += sps_pps_len;
h264_data.nal_count += 2;
prepend_sps_pps = false;
video_data.data_len += sps_pps_len;
video_data.nal_count += 2;
if (h265_video) {
video_data.nal_count++;
}
prepend_sps_pps = false;
}
//char *str3 = utils_data_to_string(payload_out, video_data.data_len, 16);
//printf("%s\n", str3);
//free (str3);
raop_rtp_mirror->callbacks.video_resume(raop_rtp_mirror->callbacks.cls);
raop_rtp_mirror->callbacks.video_process(raop_rtp_mirror->callbacks.cls, raop_rtp_mirror->ntp, &h264_data);
raop_rtp_mirror->callbacks.video_process(raop_rtp_mirror->callbacks.cls, raop_rtp_mirror->ntp, &video_data);
free(payload_out);
break;
case 0x01:
@@ -542,10 +540,13 @@ raop_rtp_mirror_thread(void *arg)
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "\nReceived unencrypted codec packet from client:"
" payload_size %d header %s ts_client = %8.6f",
payload_size, packet_description, (double) ntp_timestamp_remote / SEC);
if (payload_size == 0) {
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror, discard type 0x01 packet with no payload");
break;
}
//char *str1 = utils_data_to_string(payload, payload_size, 16);
//printf("unencrypted payload, size %d\n", payload_size);
//printf("%s\n", str1);
//free (str1);
codec = VIDEO_CODEC_UNKNOWN;
assert (raop_rtp_mirror->callbacks.video_set_codec);
ntp_timestamp_nal = ntp_timestamp_raw;
float width = byteutils_get_float(packet, 16);
float height = byteutils_get_float(packet, 20);
@@ -565,55 +566,140 @@ raop_rtp_mirror_thread(void *arg)
}
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror width_source = %f height_source = %f width = %f height = %f",
width_source, height_source, width, height);
short sps_size = byteutils_get_short_be(payload,6);
unsigned char *sequence_parameter_set = payload + 8;
short pps_size = byteutils_get_short_be(payload, sps_size + 9);
unsigned char *picture_parameter_set = payload + sps_size + 11;
int data_size = 6;
if (logger_debug) {
char *str = utils_data_to_string(payload, data_size, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror: SPS+PPS header size = %d", data_size);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror h264 SPS+PPS header:\n%s", str);
free(str);
str = utils_data_to_string(sequence_parameter_set, sps_size,16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror SPS NAL size = %d", sps_size);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror h264 Sequence Parameter Set:\n%s", str);
free(str);
str = utils_data_to_string(picture_parameter_set, pps_size, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror PPS NAL size = %d", pps_size);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror h264 Picture Parameter Set:\n%s", str);
free(str);
if (payload_size == 0) {
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror, received 0x01 packet with no payload (h265 video?)");
raop_rtp_mirror->callbacks.video_pause(raop_rtp_mirror->callbacks.cls);
break;
}
data_size = payload_size - sps_size - pps_size - 11;
if (data_size > 0 && logger_debug) {
char *str = utils_data_to_string (picture_parameter_set + pps_size, data_size, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "remainder size = %d", data_size);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "remainder of SPS+PPS packet:\n%s", str);
free(str);
} else if (data_size < 0) {
logger_log(raop_rtp_mirror->logger, LOGGER_ERR, " pps_sps error: packet remainder size = %d < 0", data_size);
}
// Copy the sps and pps into a buffer to prepend to the next NAL unit.
if (sps_pps) {
free(sps_pps);
sps_pps = NULL;
}
sps_pps_len = sps_size + pps_size + 8;
sps_pps = (unsigned char*) malloc(sps_pps_len);
assert(sps_pps);
memcpy(sps_pps, nal_start_code, 4);
memcpy(sps_pps + 4, sequence_parameter_set, sps_size);
memcpy(sps_pps + sps_size + 4, nal_start_code, 4);
memcpy(sps_pps + sps_size + 8, payload + sps_size + 11, pps_size);
prepend_sps_pps = true;
/* test for a H265 VPS/SPs/PPS */
unsigned char hvc1[] = { 0x68, 0x76, 0x63, 0x31 };
uint64_t ntp_offset = 0;
ntp_offset = raop_ntp_convert_remote_time(raop_rtp_mirror->ntp, ntp_offset);
if (!ntp_offset) {
logger_log(raop_rtp_mirror->logger, LOGGER_WARNING, "ntp synchronization has not yet started: synchronized video may fail");
if (!memcmp(payload + 4, hvc1, 4)) {
/* hvc1 HECV detected */
codec = VIDEO_CODEC_H265;
printf("h265 detected\n");
h265_video = true;
raop_rtp_mirror->callbacks.video_set_codec(raop_rtp_mirror->callbacks.cls, codec);
unsigned char vps_code[] = { 0xa0, 0x00, 0x01, 0x00 };
unsigned char sps_code[] = { 0xa1, 0x00, 0x01, 0x00 };
unsigned char pps_code[] = { 0xa2, 0x00, 0x01, 0x00 };
unsigned char *vps;
short vps_size;
unsigned char *sps;
short sps_size;
unsigned char *pps;
short pps_size;
unsigned char * ptr = payload + 0x75;
if (memcmp(ptr, vps_code, 4)) {
logger_log(raop_rtp_mirror->logger, LOGGER_ERR, "non-conforming HEVC VPS/SPS/PPS payload (VPS)");
raop_rtp_mirror->callbacks.video_pause(raop_rtp_mirror->callbacks.cls);
break;
}
vps_size = byteutils_get_short_be(ptr, 3);
ptr += 5;
vps = ptr;
if (logger_debug) {
char *str = utils_data_to_string(vps, vps_size, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "h265 vps size %d\n%s",vps_size, str);
free(str);
}
ptr += vps_size;
if (memcmp(ptr, sps_code, 4)) {
logger_log(raop_rtp_mirror->logger, LOGGER_ERR, "non-conforming HEVC VPS/SPS/PPS payload (SPS)");
raop_rtp_mirror->callbacks.video_pause(raop_rtp_mirror->callbacks.cls);
break;
}
sps_size = byteutils_get_short_be(ptr, 3);
ptr += 5;
sps = ptr;
if (logger_debug) {
char *str = utils_data_to_string(sps, sps_size, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "h265 sps size %d\n%s",vps_size, str);
free(str);
}
ptr += sps_size;
if (memcmp(ptr, pps_code, 4)) {
logger_log(raop_rtp_mirror->logger, LOGGER_ERR, "non-conforming HEVC VPS/SPS/PPS payload (PPS)");
raop_rtp_mirror->callbacks.video_pause(raop_rtp_mirror->callbacks.cls);
break;
}
pps_size = byteutils_get_short_be(ptr, 3);
ptr += 5;
pps = ptr;
if (logger_debug) {
char *str = utils_data_to_string(pps, pps_size, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "h265 pps size %d\n%s",pps_size, str);
free(str);
}
sps_pps_len = vps_size + sps_size + pps_size + 12;
sps_pps = (unsigned char*) malloc(sps_pps_len);
assert(sps_pps);
ptr = sps_pps;
memcpy(ptr, nal_start_code, 4);
ptr += 4;
memcpy(ptr, vps, vps_size);
ptr += vps_size;
memcpy(ptr, nal_start_code, 4);
ptr += 4;
memcpy(ptr, sps, sps_size);
ptr += sps_size;
memcpy(ptr, nal_start_code, 4);
ptr += 4;
memcpy(ptr, pps, pps_size);
// printf (" HEVC (hvc1) vps + sps + pps NALU\n");
//char *str = utils_data_to_string(sps_pps, sps_pps_len, 16);
//printf("%s\n", str);
//free (str);
} else {
codec = VIDEO_CODEC_H264;
h265_video = false;
raop_rtp_mirror->callbacks.video_set_codec(raop_rtp_mirror->callbacks.cls, codec);
short sps_size = byteutils_get_short_be(payload,6);
unsigned char *sequence_parameter_set = payload + 8;
short pps_size = byteutils_get_short_be(payload, sps_size + 9);
unsigned char *picture_parameter_set = payload + sps_size + 11;
int data_size = 6;
if (logger_debug) {
char *str = utils_data_to_string(payload, data_size, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror: SPS+PPS header size = %d", data_size);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror h264 SPS+PPS header:\n%s", str);
free(str);
str = utils_data_to_string(sequence_parameter_set, sps_size,16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror SPS NAL size = %d", sps_size);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror h264 Sequence Parameter Set:\n%s", str);
free(str);
str = utils_data_to_string(picture_parameter_set, pps_size, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror PPS NAL size = %d", pps_size);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror h264 Picture Parameter Set:\n%s", str);
free(str);
}
data_size = payload_size - sps_size - pps_size - 11;
if (data_size > 0 && logger_debug) {
char *str = utils_data_to_string (picture_parameter_set + pps_size, data_size, 16);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "remainder size = %d", data_size);
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "remainder of SPS+PPS packet:\n%s", str);
free(str);
} else if (data_size < 0) {
logger_log(raop_rtp_mirror->logger, LOGGER_ERR, " pps_sps error: packet remainder size = %d < 0", data_size);
}
// Copy the sps and pps into a buffer to prepend to the next NAL unit.
sps_pps_len = sps_size + pps_size + 8;
sps_pps = (unsigned char*) malloc(sps_pps_len);
assert(sps_pps);
memcpy(sps_pps, nal_start_code, 4);
memcpy(sps_pps + 4, sequence_parameter_set, sps_size);
memcpy(sps_pps + sps_size + 4, nal_start_code, 4);
memcpy(sps_pps + sps_size + 8, payload + sps_size + 11, pps_size);
}
prepend_sps_pps = true;
// h264codec_t h264;
// h264.version = payload[0];
// h264.profile_high = payload[1];
@@ -628,7 +714,6 @@ raop_rtp_mirror_thread(void *arg)
// h264.pps_size = pps_size;
// h264.picture_parameter_set = malloc(h264.pps_size);
// memcpy(h264.picture_parameter_set, picture_parameter_set, pps_size);
raop_rtp_mirror->callbacks.video_pause(raop_rtp_mirror->callbacks.cls);
break;
case 0x02:
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "\nReceived old-protocol once-per-second packet from client:"

View File

@@ -22,12 +22,13 @@
#include <stdbool.h>
typedef struct {
bool is_h265;
int nal_count;
unsigned char *data;
int data_len;
uint64_t ntp_time_local;
uint64_t ntp_time_remote;
} h264_decode_struct;
} video_decode_struct;
typedef struct {
unsigned char *data;

View File

@@ -47,9 +47,14 @@ typedef enum videoflip_e {
typedef struct video_renderer_s video_renderer_t;
void video_renderer_init (logger_t *logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
const char *decoder, const char *converter, const char *videosink, const bool fullscreen,
const bool video_sync);
typedef struct user_data_s {
int type;
GMainLoop *loop;
} user_data_t;
void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
const char *decoder, const char *converter, const char *videosink, const char *videosin_options,
bool initial_fullscreen, bool video_sync, bool h265_support);
void video_renderer_start ();
void video_renderer_stop ();
void video_renderer_pause ();
@@ -57,13 +62,13 @@ void video_renderer_resume ();
bool video_renderer_is_paused();
void video_renderer_render_buffer (unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time);
void video_renderer_flush ();
unsigned int video_renderer_listen(void *loop);
void video_renderer_destroy ();
void video_renderer_size(float *width_source, float *height_source, float *width, float *height);
/* not implemented for gstreamer */
void video_renderer_update_background (int type);
void video_renderer_h265(bool is_h265);
unsigned int video_renderer_listen(void *loop, int id);
unsigned int video_reset_callback(void *loop);
#ifdef __cplusplus
}
#endif

View File

@@ -3,7 +3,7 @@
* Copyright (C) 2019 Florian Draschbacher
* Modified for:
* UxPlay - An open-source AirPlay mirroring server
* Copyright (C) 2021-23 F. Duncanh
* Copyright (C) 2021-24 F. Duncanh
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -20,9 +20,10 @@
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "video_renderer.h"
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include "video_renderer.h"
#define SECOND_IN_NSECS 1000000000UL
#ifdef X_DISPLAY_FIX
@@ -33,24 +34,37 @@ static bool alt_keypress = false;
static unsigned char X11_search_attempts;
#endif
static video_renderer_t *renderer = NULL;
static GstClockTime gst_video_pipeline_base_time = GST_CLOCK_TIME_NONE;
static logger_t *logger = NULL;
static unsigned short width, height, width_source, height_source; /* not currently used */
static bool first_packet = false;
static bool sync = false;
static bool auto_videosink;
static bool auto_videosink = true;
static bool logger_debug = false;
static bool video_terminate = false;
static user_data_t user_data;
#define NCODECS 2 /* renderers for h264 and h265 */
struct video_renderer_s {
GstElement *appsrc, *pipeline;
GstBus *bus;
const char *codec;
bool autovideo;
int id;
#ifdef X_DISPLAY_FIX
bool use_x11;
const char * server_name;
X11_Window_t * gst_window;
bool use_x11;
#endif
};
static video_renderer_t *renderer = NULL;
static video_renderer_t *renderer_type[NCODECS] = {0};
static int n_renderers = NCODECS;
static char h264[] = "h264";
static char h265[] = "h265";
static void append_videoflip (GString *launch, const videoflip_t *flip, const videoflip_t *rot) {
/* videoflip image transform */
switch (*flip) {
@@ -119,6 +133,7 @@ static void append_videoflip (GString *launch, const videoflip_t *flip, const vi
* range = 2 -> GST_VIDEO_COLOR_RANGE_16_235 ("limited RGB") */
static const char h264_caps[]="video/x-h264,stream-format=(string)byte-stream,alignment=(string)au";
static const char h265_caps[]="video/x-h265,stream-format=(string)byte-stream,alignment=(string)au";
void video_renderer_size(float *f_width_source, float *f_height_source, float *f_width, float *f_height) {
width_source = (unsigned short) *f_width_source;
@@ -128,90 +143,131 @@ void video_renderer_size(float *f_width_source, float *f_height_source, float *f
logger_log(logger, LOGGER_DEBUG, "begin video stream wxh = %dx%d; source %dx%d", width, height, width_source, height_source);
}
void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
const char *decoder, const char *converter, const char *videosink, const bool initial_fullscreen,
const bool video_sync) {
void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
const char *decoder, const char *converter, const char *videosink, const char *videosink_options,
bool initial_fullscreen, bool video_sync, bool h265_support) {
GError *error = NULL;
GstCaps *caps = NULL;
GstClock *clock = gst_system_clock_obtain();
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_REALTIME, NULL);
/* videosink choices that are auto */
auto_videosink = (strstr(videosink, "autovideosink") || strstr(videosink, "fpsdisplaysink"));
logger = render_logger;
logger_debug = (logger_get_level(logger) >= LOGGER_DEBUG);
video_terminate = false;
/* this call to g_set_application_name makes server_name appear in the X11 display window title bar, */
/* (instead of the program name uxplay taken from (argv[0]). It is only set one time. */
const gchar *appname = g_get_application_name();
if (!appname || strcmp(appname,server_name)) g_set_application_name(server_name);
appname = NULL;
renderer = calloc(1, sizeof(video_renderer_t));
g_assert(renderer);
GString *launch = g_string_new("appsrc name=video_source ! ");
g_string_append(launch, "queue ! ");
g_string_append(launch, parser);
g_string_append(launch, " ! ");
g_string_append(launch, decoder);
g_string_append(launch, " ! ");
append_videoflip(launch, &videoflip[0], &videoflip[1]);
g_string_append(launch, converter);
g_string_append(launch, " ! ");
g_string_append(launch, "videoscale ! ");
g_string_append(launch, videosink);
if (video_sync) {
g_string_append(launch, " sync=true");
sync = true;
} else {
g_string_append(launch, " sync=false");
sync = false;
}
logger_log(logger, LOGGER_DEBUG, "GStreamer video pipeline will be:\n\"%s\"", launch->str);
renderer->pipeline = gst_parse_launch(launch->str, &error);
if (error) {
g_error ("get_parse_launch error (video) :\n %s\n",error->message);
g_clear_error (&error);
}
g_assert (renderer->pipeline);
gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer->pipeline), clock);
renderer->appsrc = gst_bin_get_by_name (GST_BIN (renderer->pipeline), "video_source");
g_assert(renderer->appsrc);
caps = gst_caps_from_string(h264_caps);
g_object_set(renderer->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
g_string_free(launch, TRUE);
gst_caps_unref(caps);
gst_object_unref(clock);
#ifdef X_DISPLAY_FIX
renderer->use_x11 = (strstr(videosink, "xvimagesink") || strstr(videosink, "ximagesink") || auto_videosink);
fullscreen = initial_fullscreen;
renderer->server_name = server_name;
renderer->gst_window = NULL;
X11_search_attempts = 0;
if (renderer->use_x11) {
renderer->gst_window = calloc(1, sizeof(X11_Window_t));
g_assert(renderer->gst_window);
get_X11_Display(renderer->gst_window);
if (!renderer->gst_window->display) {
free(renderer->gst_window);
renderer->gst_window = NULL;
}
}
#endif
gst_element_set_state (renderer->pipeline, GST_STATE_READY);
GstState state;
if (gst_element_get_state (renderer->pipeline, &state, NULL, 0)) {
if (state == GST_STATE_READY) {
logger_log(logger, LOGGER_DEBUG, "Initialized GStreamer video renderer");
n_renderers = h265_support ? 2 : 1;
g_assert (n_renderers <= NCODECS);
for (int i = 0; i < n_renderers; i++) {
g_assert (i < 2);
renderer_type[i] = (video_renderer_t *) calloc(1, sizeof(video_renderer_t));
g_assert(renderer_type[i]);
renderer_type[i]->autovideo = auto_videosink;
renderer_type[i]->id = i;
renderer_type[i]->bus = NULL;
switch (i) {
case 0:
renderer_type[i]->codec = h264;
caps = gst_caps_from_string(h264_caps);
break;
case 1:
renderer_type[i]->codec = h265;
caps = gst_caps_from_string(h265_caps);
break;
default:
g_assert(0);
}
GString *launch = g_string_new("appsrc name=video_source ! ");
g_string_append(launch, "queue ! ");
g_string_append(launch, parser);
g_string_append(launch, " ! ");
g_string_append(launch, decoder);
g_string_append(launch, " ! ");
append_videoflip(launch, &videoflip[0], &videoflip[1]);
g_string_append(launch, converter);
g_string_append(launch, " ! ");
g_string_append(launch, "videoscale ! ");
g_string_append(launch, videosink);
g_string_append(launch, " name=");
g_string_append(launch, videosink);
g_string_append(launch, "_");
g_string_append(launch, renderer_type[i]->codec);
g_string_append(launch, videosink_options);
if (video_sync) {
g_string_append(launch, " sync=true");
sync = true;
} else {
logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer");
g_string_append(launch, " sync=false");
sync = false;
}
} else {
logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer");
if (!strcmp(renderer_type[i]->codec, h265)) {
g_string_replace (launch, (const gchar *) h264, (const gchar *) h265, 0);
} else {
g_string_replace (launch, (const gchar *) h265, (const gchar *) h264, 0);
}
logger_log(logger, LOGGER_DEBUG, "GStreamer video pipeline %d:\n\"%s\"", i + 1, launch->str);
renderer_type[i]->pipeline = gst_parse_launch(launch->str, &error);
if (error) {
g_error ("get_parse_launch error (video) :\n %s\n",error->message);
g_clear_error (&error);
}
g_assert (renderer_type[i]->pipeline);
GstClock *clock = gst_system_clock_obtain();
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_REALTIME, NULL);
gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer_type[i]->pipeline), clock);
renderer_type[i]->appsrc = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "video_source");
g_assert(renderer_type[i]->appsrc);
g_object_set(renderer_type[i]->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
g_string_free(launch, TRUE);
gst_caps_unref(caps);
gst_object_unref(clock);
#ifdef X_DISPLAY_FIX
bool use_x11 = (strstr(videosink, "xvimagesink") || strstr(videosink, "ximagesink") || auto_videosink);
fullscreen = initial_fullscreen;
renderer_type[i]->server_name = server_name;
renderer_type[i]->gst_window = NULL;
renderer_type[i]->use_x11 = false;
X11_search_attempts = 0;
if (use_x11) {
if (i == 0) {
renderer_type[0]->gst_window = (X11_Window_t *) calloc(1, sizeof(X11_Window_t));
g_assert(renderer_type[0]->gst_window);
get_X11_Display(renderer_type[0]->gst_window);
if (renderer_type[0]->gst_window->display) {
renderer_type[i]->use_x11 = true;
} else {
free(renderer_type[0]->gst_window);
renderer_type[0]->gst_window = NULL;
} } else if (renderer_type[0]->use_x11) {
renderer_type[i]->gst_window = (X11_Window_t *) calloc(1, sizeof(X11_Window_t));
g_assert(renderer_type[i]->gst_window);
memcpy(renderer_type[i]->gst_window, renderer_type[0]->gst_window, sizeof(X11_Window_t));
renderer_type[i]->use_x11 = true;
}
}
#endif
gst_element_set_state (renderer_type[i]->pipeline, GST_STATE_READY);
GstState state;
if (gst_element_get_state (renderer_type[i]->pipeline, &state, NULL, 0)) {
if (state == GST_STATE_READY) {
logger_log(logger, LOGGER_DEBUG, "Initialized GStreamer video renderer %d", i + 1);
} else {
logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer %d", i + 1);
}
} else {
logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer %d", i + 1);
}
}
}
@@ -235,9 +291,13 @@ bool video_renderer_is_paused() {
}
void video_renderer_start() {
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
gst_video_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
renderer->bus = gst_element_get_bus(renderer->pipeline);
/* start both h264 and h265 pipelines; will shut down the "wrong" one when we know the codec */
for (int i = 0; i < n_renderers; i++) {
gst_element_set_state (renderer_type[i]->pipeline, GST_STATE_PLAYING);
gst_video_pipeline_base_time = gst_element_get_base_time(renderer_type[i]->appsrc);
renderer_type[i]->bus = gst_element_get_bus(renderer_type[i]->pipeline);
}
renderer = NULL;
first_packet = true;
#ifdef X_DISPLAY_FIX
X11_search_attempts = 0;
@@ -282,7 +342,7 @@ void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_c
X11_search_attempts++;
logger_log(logger, LOGGER_DEBUG, "Looking for X11 UxPlay Window, attempt %d", (int) X11_search_attempts);
get_x_window(renderer->gst_window, renderer->server_name);
if (renderer->gst_window->window) {
if (renderer->gst_window->window) {
logger_log(logger, LOGGER_INFO, "\n*** X11 Windows: Use key F11 or (left Alt)+Enter to toggle full-screen mode\n");
if (fullscreen) {
set_fullscreen(renderer->gst_window, &fullscreen);
@@ -303,13 +363,13 @@ void video_renderer_stop() {
}
}
void video_renderer_destroy() {
static void video_renderer_destroy_h26x(video_renderer_t *renderer) {
if (renderer) {
GstState state;
gst_element_get_state(renderer->pipeline, &state, NULL, 0);
if (state != GST_STATE_NULL) {
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
}
gst_object_unref(renderer->bus);
gst_object_unref (renderer->appsrc);
@@ -325,13 +385,33 @@ void video_renderer_destroy() {
}
}
void video_renderer_destroy() {
for (int i = 0; i < n_renderers; i++) {
if (renderer_type[i]) {
video_renderer_destroy_h26x(renderer_type[i]);
}
}
}
/* not implemented for gstreamer */
void video_renderer_update_background(int type) {
}
gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpointer loop) {
if (logger_get_level(logger) >= LOGGER_DEBUG) {
g_print("GStreamer bus message: %s %s\n", GST_MESSAGE_SRC_NAME(message), GST_MESSAGE_TYPE_NAME(message));
gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, void * loop) {
/* identify which pipeline sent the message */
int type = -1;
for (int i = 0 ; i < n_renderers ; i ++ ) {
if (renderer_type[i]->bus == bus) {
type = i;
break;
}
}
g_assert(type != -1);
if (logger_debug) {
g_print("GStreamer %s bus message: %s %s\n", renderer_type[type]->codec, GST_MESSAGE_SRC_NAME(message), GST_MESSAGE_TYPE_NAME(message));
}
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR: {
@@ -352,10 +432,10 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpoin
}
g_error_free (err);
g_free (debug);
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
gst_app_src_end_of_stream (GST_APP_SRC(renderer_type[type]->appsrc));
flushing = TRUE;
gst_bus_set_flushing(bus, flushing);
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
gst_element_set_state (renderer_type[type]->pipeline, GST_STATE_NULL);
g_main_loop_quit( (GMainLoop *) loop);
break;
}
@@ -365,21 +445,26 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpoin
// g_main_loop_quit( (GMainLoop *) loop);
break;
case GST_MESSAGE_STATE_CHANGED:
if (auto_videosink) {
if (renderer_type[type]->autovideo) {
char *sink = strstr(GST_MESSAGE_SRC_NAME(message), "-actual-sink-");
if (sink) {
sink += strlen("-actual-sink-");
logger_log(logger, LOGGER_DEBUG, "GStreamer: automatically-selected videosink is \"%ssink\"", sink);
auto_videosink = false;
if (strstr(GST_MESSAGE_SRC_NAME(message), renderer_type[type]->codec)) {
logger_log(logger, LOGGER_DEBUG, "GStreamer: automatically-selected videosink"
" (renderer %d: %s) is \"%ssink\"", renderer_type[type]->id + 1,
renderer_type[type]->codec, sink);
#ifdef X_DISPLAY_FIX
renderer->use_x11 = (strstr(sink, "ximage") || strstr(sink, "xvimage"));
renderer_type[type]->use_x11 = (strstr(sink, "ximage") || strstr(sink, "xvimage"));
#endif
renderer_type[type]->autovideo = false;
}
}
}
break;
#ifdef X_DISPLAY_FIX
case GST_MESSAGE_ELEMENT:
if (renderer->gst_window && renderer->gst_window->window) {
if (renderer_type[type]->gst_window && renderer_type[type]->gst_window->window) {
printf("hello element with gst_window \n");
GstNavigationMessageType message_type = gst_navigation_message_get_type (message);
if (message_type == GST_NAVIGATION_MESSAGE_EVENT) {
GstEvent *event = NULL;
@@ -391,7 +476,7 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpoin
if (gst_navigation_event_parse_key_event (event, &key)) {
if ((strcmp (key, "F11") == 0) || (alt_keypress && strcmp (key, "Return") == 0)) {
fullscreen = !(fullscreen);
set_fullscreen(renderer->gst_window, &fullscreen);
set_fullscreen(renderer_type[type]->gst_window, &fullscreen);
} else if (strcmp (key, "Alt_L") == 0) {
alt_keypress = true;
}
@@ -421,7 +506,36 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, gpoin
return TRUE;
}
unsigned int video_renderer_listen(void *loop) {
return (unsigned int) gst_bus_add_watch(renderer->bus, (GstBusFunc)
void video_renderer_h265 (bool video_is_h265) {
/* set renderer to h264 or h265, depending on pps/sps received by raop_rtp_mirror */
video_renderer_t *renderer_prev = renderer;
if (renderer) {
video_renderer_pause();
}
renderer = video_is_h265 ? renderer_type[1] : renderer_type[0];
if (renderer_prev && renderer_prev != renderer) {
gst_app_src_end_of_stream (GST_APP_SRC(renderer_prev->appsrc));
gst_bus_set_flushing(renderer_prev->bus, TRUE);
/* set state of previous renderer to GST_STATE_NULL to (hopefully?) close video window */
gst_element_set_state (renderer_prev->pipeline, GST_STATE_NULL);
gst_element_set_state (renderer_prev->pipeline, GST_STATE_READY);
}
}
unsigned int video_reset_callback(void * loop) {
if (video_terminate) {
video_terminate = false;
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
gboolean flushing = TRUE;
gst_bus_set_flushing(renderer->bus, flushing);
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
g_main_loop_quit( (GMainLoop *) loop);
}
return (unsigned int) TRUE;
}
unsigned int video_renderer_listen(void *loop, int id) {
g_assert(id >= 0 && id < n_renderers);
return (unsigned int) gst_bus_add_watch(renderer_type[id]->bus,(GstBusFunc)
gstreamer_pipeline_bus_callback, (gpointer) loop);
}

View File

@@ -1,11 +1,11 @@
.TH UXPLAY "1" "August 2024" "1.69" "User Commands"
.TH UXPLAY "1" "September 2024" "1.70" "User Commands"
.SH NAME
uxplay \- start AirPlay server
.SH SYNOPSIS
.B uxplay
[\fI\,-n name\/\fR] [\fI\,-s wxh\/\fR] [\fI\,-p \/\fR[\fI\,n\/\fR]] [more \fI OPTIONS \/\fR ...]
.SH DESCRIPTION
UxPlay 1.69: An open\-source AirPlay mirroring (+ audio streaming) server:
UxPlay 1.70: An open\-source AirPlay mirroring (+ audio streaming) server:
.SH OPTIONS
.TP
.B
@@ -13,6 +13,8 @@ UxPlay 1.69: An open\-source AirPlay mirroring (+ audio streaming) server:
.TP
\fB\-nh\fR Do \fBNOT\fR append "@\fIhostname\fR" at end of AirPlay server name
.TP
\fB\-h265\fR Support h265 (4K) video (with h265 versions of h264 plugins)
.TP
\fB\-pin\fI[xxxx]\fRUse a 4-digit pin code to control client access (default: no)
.IP
without option, pin is random: optionally use fixed pin xxxx.

View File

@@ -62,7 +62,7 @@
#include "renderers/video_renderer.h"
#include "renderers/audio_renderer.h"
#define VERSION "1.69"
#define VERSION "1.70"
#define SECOND_IN_USECS 1000000
#define SECOND_IN_NSECS 1000000000UL
@@ -85,6 +85,7 @@ static bool relaunch_video = false;
static bool reset_loop = false;
static unsigned int open_connections= 0;
static std::string videosink = "autovideosink";
static std::string videosink_options = "";
static videoflip_t videoflip[2] = { NONE , NONE };
static bool use_video = true;
static unsigned char compression_type = 0;
@@ -141,6 +142,8 @@ static std::vector <std::string> registered_keys;
static double db_low = -30.0;
static double db_high = 0.0;
static bool taper_volume = false;
static bool h265_support = false;
static int n_renderers = 0;
/* logging */
@@ -357,13 +360,13 @@ static gboolean reset_callback(gpointer loop) {
return TRUE;
}
static gboolean sigint_callback(gpointer loop) {
static gboolean sigint_callback(gpointer loop) {
relaunch_video = false;
g_main_loop_quit((GMainLoop *) loop);
return TRUE;
}
static gboolean sigterm_callback(gpointer loop) {
static gboolean sigterm_callback(gpointer loop) {
relaunch_video = false;
g_main_loop_quit((GMainLoop *) loop);
return TRUE;
@@ -391,22 +394,30 @@ static guint g_unix_signal_add(gint signum, GSourceFunc handler, gpointer user_d
#endif
static void main_loop() {
guint gst_bus_watch_id = 0;
guint gst_bus_watch_id[2] = { 0 };
g_assert(n_renderers <= 2);
GMainLoop *loop = g_main_loop_new(NULL,FALSE);
relaunch_video = false;
if (use_video) {
relaunch_video = true;
gst_bus_watch_id = (guint) video_renderer_listen((void *)loop);
for (int i = 0; i < n_renderers; i++) {
gst_bus_watch_id[i] = (guint) video_renderer_listen((void *)loop, i);
}
}
guint reset_watch_id = g_timeout_add(100, (GSourceFunc) reset_callback, (gpointer) loop);
guint video_reset_watch_id = g_timeout_add(100, (GSourceFunc) video_reset_callback, (gpointer) loop);
guint sigterm_watch_id = g_unix_signal_add(SIGTERM, (GSourceFunc) sigterm_callback, (gpointer) loop);
guint sigint_watch_id = g_unix_signal_add(SIGINT, (GSourceFunc) sigint_callback, (gpointer) loop);
printf("********** main_loop_run *******************\n");
g_main_loop_run(loop);
if (gst_bus_watch_id > 0) g_source_remove(gst_bus_watch_id);
printf("********** main_loop_exit *******************\n");
for (int i = 0; i < n_renderers; i++) {
if (gst_bus_watch_id[i] > 0) g_source_remove(gst_bus_watch_id[i]);
}
if (sigint_watch_id > 0) g_source_remove(sigint_watch_id);
if (sigterm_watch_id > 0) g_source_remove(sigterm_watch_id);
if (reset_watch_id > 0) g_source_remove(reset_watch_id);
if (video_reset_watch_id > 0) g_source_remove(video_reset_watch_id);
g_main_loop_unref(loop);
}
@@ -570,6 +581,7 @@ static void print_info (char *name) {
printf("Options:\n");
printf("-n name Specify the network name of the AirPlay server\n");
printf("-nh Do not add \"@hostname\" at the end of AirPlay server name\n");
printf("-h265 Support h265 (4K) video (with h265 versions of h264 plugins)\n");
printf("-pin[xxxx]Use a 4-digit pin code to control client access (default: no)\n");
printf(" default pin is random: optionally use fixed pin xxxx\n");
printf("-reg [fn] Keep a register in $HOME/.uxplay.register to verify returning\n");
@@ -594,7 +606,6 @@ static void print_info (char *name) {
printf("-vd ... Choose the GStreamer h264 decoder; default \"decodebin\"\n");
printf(" choices: (software) avdec_h264; (hardware) v4l2h264dec,\n");
printf(" nvdec, nvh264dec, vaapih64dec, vtdec,etc.\n");
printf(" choices: avdec_h264,vaapih264dec,nvdec,nvh264dec,v4l2h264dec\n");
printf("-vc ... Choose the GStreamer videoconverter; default \"videoconvert\"\n");
printf(" another choice when using v4l2h264dec: v4l2convert\n");
printf("-vs ... Choose the GStreamer videosink; default \"autovideosink\"\n");
@@ -930,6 +941,12 @@ static void parse_arguments (int argc, char *argv[]) {
if (!option_has_value(i, argc, arg, argv[i+1])) exit(1);
videosink.erase();
videosink.append(argv[++i]);
std::size_t pos = videosink.find(" ");
if (pos != std::string::npos) {
videosink_options.erase();
videosink_options = videosink.substr(pos);
videosink.erase(pos);
}
} else if (arg == "-as") {
if (!option_has_value(i, argc, arg, argv[i+1])) exit(1);
audiosink.erase();
@@ -1127,6 +1144,8 @@ static void parse_arguments (int argc, char *argv[]) {
db_low = db1;
db_high = db2;
printf("db range %f:%f\n", db_low, db_high);
} else if (arg == "-h265") {
h265_support = true;
} else if (arg == "-nofreeze") {
nofreeze = true;
} else {
@@ -1379,11 +1398,7 @@ static int start_dnssd(std::vector<char> hw_addr, std::string name) {
dnssd_set_airplay_features(dnssd, 30, 1); // RAOP support: with this bit set, the AirTunes service is not required.
dnssd_set_airplay_features(dnssd, 31, 0); //
for (int i = 32; i < 64; i++) {
dnssd_set_airplay_features(dnssd, i, 0);
}
/* bits 32-63 are not used here: see https://emanualcozzi.net/docs/airplay2/features
/* bits 32-63 see https://emanualcozzi.net/docs/airplay2/features
dnssd_set_airplay_features(dnssd, 32, 0); // isCarPlay when ON,; Supports InitialVolume when OFF
dnssd_set_airplay_features(dnssd, 33, 0); // Supports Air Play Video Play Queue
dnssd_set_airplay_features(dnssd, 34, 0); // Supports Air Play from cloud (requires that bit 6 is ON)
@@ -1396,7 +1411,8 @@ static int start_dnssd(std::vector<char> hw_addr, std::string name) {
dnssd_set_airplay_features(dnssd, 40, 0); // Supports Buffered Audio
dnssd_set_airplay_features(dnssd, 41, 0); // Supports PTP
dnssd_set_airplay_features(dnssd, 42, 0); // Supports Screen Multi Codec
dnssd_set_airplay_features(dnssd, 42, 0); // Supports Screen Multi Codec (allows h265 video)
dnssd_set_airplay_features(dnssd, 43, 0); // Supports System Pairing
dnssd_set_airplay_features(dnssd, 44, 0); // is AP Valeria Screen Sender
@@ -1423,6 +1439,9 @@ static int start_dnssd(std::vector<char> hw_addr, std::string name) {
dnssd_set_airplay_features(dnssd, 61, 0); // Supports RFC2198 redundancy
*/
/* needed for h265 video support */
dnssd_set_airplay_features(dnssd, 42, (int) h265_support);
/* bit 27 of Features determines whether the AirPlay2 client-pairing protocol will be used (1) or not (0) */
dnssd_set_airplay_features(dnssd, 27, (int) setup_legacy_pairing);
return 0;
@@ -1460,7 +1479,15 @@ extern "C" void video_reset(void *cls) {
relaunch_video = true;
}
extern "C" void video_set_codec(void *cls, video_codec_t codec) {
if (use_video) {
if (codec == VIDEO_CODEC_H265) {
video_renderer_h265(true);
} else {
video_renderer_h265(false);
}
}
}
extern "C" void display_pin(void *cls, char *pin) {
int margin = 10;
@@ -1572,7 +1599,7 @@ extern "C" void audio_process (void *cls, raop_ntp_t *ntp, audio_decode_struct *
}
}
extern "C" void video_process (void *cls, raop_ntp_t *ntp, h264_decode_struct *data) {
extern "C" void video_process (void *cls, raop_ntp_t *ntp, video_decode_struct *data) {
if (dump_video) {
dump_video_to_file(data->data, data->data_len);
}
@@ -1831,6 +1858,7 @@ static int start_raop_server (unsigned short display[5], unsigned short tcp[3],
raop_cbs.check_register = check_register;
raop_cbs.export_dacp = export_dacp;
raop_cbs.video_reset = video_reset;
raop_cbs.video_set_codec = video_set_codec;
raop = raop_init(&raop_cbs);
if (raop == NULL) {
@@ -2037,21 +2065,22 @@ int main (int argc, char *argv[]) {
use_video = false;
videosink.erase();
videosink.append("fakesink");
videosink_options.erase();
LOGI("video_disabled");
display[3] = 1; /* set fps to 1 frame per sec when no video will be shown */
}
if (fullscreen && use_video) {
if (videosink == "waylandsink" || videosink == "vaapisink") {
videosink.append(" fullscreen=true");
videosink_options.append(" fullscreen=true");
}
}
if (videosink == "d3d11videosink" && use_video) {
if (videosink == "d3d11videosink" && videosink_options.empty() && use_video) {
if (fullscreen) {
videosink.append(" fullscreen-toggle-mode=GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_PROPERTY fullscreen=true ");
videosink_options.append(" fullscreen-toggle-mode=GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_PROPERTY fullscreen=true ");
} else {
videosink.append(" fullscreen-toggle-mode=GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_ALT_ENTER ");
videosink_options.append(" fullscreen-toggle-mode=GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_ALT_ENTER ");
}
LOGI("d3d11videosink is being used with option fullscreen-toggle-mode=alt-enter\n"
"Use Alt-Enter key combination to toggle into/out of full-screen mode");
@@ -2129,8 +2158,10 @@ int main (int argc, char *argv[]) {
}
if (use_video) {
n_renderers = h265_support ? 2 : 1;
video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(),
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), fullscreen, video_sync);
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(),
videosink_options.c_str(),fullscreen, video_sync, h265_support);
video_renderer_start();
}
@@ -2158,10 +2189,22 @@ int main (int argc, char *argv[]) {
write_coverart(coverart_filename.c_str(), (const void *) empty_image, sizeof(empty_image));
}
restart:
/* set default resolutions for h264 or h265*/
if (!display[0] && !display[1]) {
if (h265_support) {
display[0] = 3840;
display[1] = 2160;
} else {
display[0] = 1920;
display[1] = 1080;
}
}
restart:
if (start_dnssd(server_hw_addr, server_name)) {
goto cleanup;
}
if (start_raop_server(display, tcp, udp, debug_log)) {
stop_dnssd();
goto cleanup;
@@ -2173,7 +2216,8 @@ int main (int argc, char *argv[]) {
}
reconnect:
compression_type = 0;
close_window = new_window_closing_behavior;
close_window = new_window_closing_behavior;
main_loop();
if (relaunch_video || reset_loop) {
if(reset_loop) {
@@ -2185,8 +2229,8 @@ int main (int argc, char *argv[]) {
if (use_video && close_window) {
video_renderer_destroy();
video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(),
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), fullscreen,
video_sync);
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(),
videosink_options.c_str(), fullscreen, video_sync, h265_support);
video_renderer_start();
}
if (relaunch_video) {