mirror of
https://github.com/morgan9e/UxPlay
synced 2026-04-14 00:04:13 +09:00
@@ -12,18 +12,19 @@ if (ZOOMFIX )
|
||||
message (STATUS "cmake option ZOOMFIX is no longer used (if needed, ZOOMFIX is automatically applied if X11 libraries are present)" )
|
||||
endif()
|
||||
|
||||
|
||||
if ( NOT NO_X11_DEPS )
|
||||
find_package( X11 )
|
||||
if ( X11_FOUND )
|
||||
message (STATUS "Will compile using X11 Libraries (use cmake option -DNO_X11_DEPS=ON if X11 dependence is not wanted)" )
|
||||
link_libraries( ${X11_LIBRARIES} )
|
||||
include_directories( ${X11_INCLUDE_DIR} )
|
||||
else ()
|
||||
message (STATUS "X11 libraries not found, will compile without X11 dependence" )
|
||||
endif ()
|
||||
else()
|
||||
message (STATUS "will compile without X11 dependence" )
|
||||
if ( ( UNIX AND NOT APPLE ) OR USE_X11 )
|
||||
if ( NOT NO_X11_DEPS )
|
||||
find_package( X11 )
|
||||
if ( X11_FOUND )
|
||||
message (STATUS "Will compile using X11 Libraries (use cmake option -DNO_X11_DEPS=ON if X11 dependence is not wanted)" )
|
||||
link_libraries( ${X11_LIBRARIES} )
|
||||
include_directories( ${X11_INCLUDE_DIR} )
|
||||
else ()
|
||||
message (STATUS "X11 libraries not found, will compile without X11 dependence" )
|
||||
endif ()
|
||||
else()
|
||||
message (STATUS "will compile without X11 dependence" )
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if( UNIX AND NOT APPLE )
|
||||
|
||||
375
README.html
375
README.html
@@ -1,6 +1,6 @@
|
||||
<h1
|
||||
id="uxplay-1.61-airplay-mirror-and-airplay-audio-server-for-linux-macos-and-unix-now-also-runs-on-windows.">UxPlay
|
||||
1.61: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix
|
||||
id="uxplay-1.63-airplay-mirror-and-airplay-audio-server-for-linux-macos-and-unix-now-also-runs-on-windows.">UxPlay
|
||||
1.63: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix
|
||||
(now also runs on Windows).</h1>
|
||||
<h3
|
||||
id="now-developed-at-the-github-site-httpsgithub.comfdh2uxplay-where-all-user-issues-should-be-posted.">Now
|
||||
@@ -33,7 +33,10 @@ the GStreamer Video4Linux2 (v4l2) plugin, which supports both 32- and
|
||||
(omx). See <a
|
||||
href="https://github.com/FDH2/UxPlay/wiki/UxPlay-on-Raspberry-Pi:-success-reports:">success
|
||||
reports</a>, so far limited to distributions available through
|
||||
Raspberry-Pi Imager.</p></li>
|
||||
Raspberry-Pi Imager. <strong>NEW!</strong> <em>The new-in-UxPlay-1.63
|
||||
option <code>-vsync</code> now makes UxPlay viable on other
|
||||
distributions for Raspberry Pi that do not include kernel support for
|
||||
hardware decoding!</em></p></li>
|
||||
<li><p><strong>New</strong>: Support for running on Microsoft Windows
|
||||
(builds with the MinGW-64 compiler in the unix-like MSYS2
|
||||
environment).</p></li>
|
||||
@@ -46,7 +49,8 @@ alt="Current Packaging status" /></a>.</p>
|
||||
<ul>
|
||||
<li><p>Install uxplay on Debian-based Linux systems with
|
||||
“<code>sudo apt install uxplay</code>”; on FreeBSD with
|
||||
“<code>sudo pkg install uxplay</code>”.</p></li>
|
||||
“<code>sudo pkg install uxplay</code>”. Also available on Arch-based
|
||||
systems through AUR.</p></li>
|
||||
<li><p>On Linux and *BSD the mDNS/DNS-SD (Bonjour/ZeroConf) local
|
||||
network services needed by UxPlay are usually provided by Avahi:
|
||||
<strong>if there is a firewall on the server that will host UxPlay, make
|
||||
@@ -54,24 +58,28 @@ sure the default network port for mDNS queries (UDP 5353) is
|
||||
open</strong>. (Uxplay can work without this port by using only the
|
||||
host’s loopback interface, but its visibility to clients will be
|
||||
degraded.) See the <a href="#troubleshooting">Troubleshooting</a>
|
||||
section below for more details.</p></li>
|
||||
section below for more details. (With a firewall, you also need to open
|
||||
ports for UxPlay, and use the <code>-p <n></code> option; see
|
||||
<code>man uxplay</code> or <code>uxplay -h</code>.)</p></li>
|
||||
<li><p>Even if you install your distribution’s pre-compiled uxplay
|
||||
binary package, you may need to read the instructions below for <a
|
||||
href="#running-uxplay">running UxPlay</a> to see which of your
|
||||
distribution’s <strong>GStreamer plugin packages</strong> you should
|
||||
also install.</p></li>
|
||||
<li><p>For Raspbery Pi (tested on RPi 4 model B, reported to work on RPi
|
||||
3 model B+), only Raspberry Pi OS, plus the Debian and Manjaro ARM-RPi4
|
||||
Images made available through the Raspberry Pi Imager, are known to
|
||||
provide the (out-of-mainline-kernel) kernel-module
|
||||
<strong>bcm2835-codec.ko</strong> maintained by Rasperry Pi, and needed
|
||||
for hardware-accelerated video decoding by the Broadcom GPU on the Pi,
|
||||
accessed using the GStreamer Video4Linux (v4l2) plugin. In addition, for
|
||||
Ubuntu and Manjaro, the v4l2 plugin needs a <a
|
||||
<li><p>For Raspberry Pi (tested on RPi 4 model B, reported to work on
|
||||
RPi 3 model B+), only Raspberry Pi OS, plus the Debian and Manjaro
|
||||
ARM-RPi4 images made available through the Raspberry Pi Imager, are
|
||||
known to provide the (out-of-mainline-kernel) kernel-module
|
||||
<strong>bcm2835-codec.ko</strong> <a
|
||||
href="https://github.com/raspberrypi/linux/tree/rpi-5.15.y/drivers/staging/vc04_services">maintained
|
||||
by Raspberry Pi</a>, and needed for hardware-accelerated video decoding
|
||||
by the Broadcom GPU on the Pi, accessed using the GStreamer Video4Linux
|
||||
(v4l2) plugin. In addition, for Ubuntu and Manjaro, the v4l2 plugin
|
||||
needs a <a
|
||||
href="https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches">patch</a>
|
||||
forGStreamer < 1.22.</p></li>
|
||||
<li><p>To (easily) compile UxPlay from source, see the section <a
|
||||
href="#building-uxplay">building UxPlay</a>.</p></li>
|
||||
<li><p>To (easily) compile the latest UxPlay from source, see the
|
||||
section <a href="#getting-uxplay">Getting UxPlay</a>.</p></li>
|
||||
</ul>
|
||||
<h1 id="detailed-description-of-uxplay">Detailed description of
|
||||
UxPlay</h1>
|
||||
@@ -174,24 +182,21 @@ GStreamer-1.16.3 or earlier, replace <code>nvh264dec</code> by the older
|
||||
plugin <code>nvdec</code>, which must be built by the user: See <a
|
||||
href="https://github.com/FDH2/UxPlay/wiki/NVIDIA-nvdec-and-nvenc-plugins">these
|
||||
instructions</a>.</p></li>
|
||||
<li><p><strong>Video4Linux2 support for the Raspberry Pi Broadcom
|
||||
<li><p><strong>Video4Linux2 support for the Raspberry Pi Broadcom 2835
|
||||
GPU</strong></p>
|
||||
<p>Raspberry Pi (RPi) computers can run UxPlay with software decoding of
|
||||
h264 video but this usually has unacceptable latency, and
|
||||
hardware-accelerated GPU decoding should be used. UxPlay accesses the
|
||||
GPU using the GStreamer plugin for Video4Linux2 (v4l2), which replaces
|
||||
unmaintained 32-bit-only OpenMax used by RPiPlay. Fixes to the v4l2
|
||||
plugin that allow it to work with UxPlay on RPi are now in the GStreamer
|
||||
development branch, and will appear in the upcoming GStreamer-1.22
|
||||
release. A backport (package
|
||||
<code>gstreamer1.0-plugins-good-1.18.4-2+deb11u1+rpt1</code>) has
|
||||
already appeared in RPi OS (Bullseye); for it to work with uxplay 1.56
|
||||
or later, you may need to use the <code>-bt709</code> option. For other
|
||||
distributions without the backport, you can find <a
|
||||
<p>Raspberry Pi (RPi) computers (tested on Pi 4 Model B) can now run
|
||||
UxPlay using software decoding of h264 video, but hardware-accelerated
|
||||
decoding by firmware in the Pi’s GPU is prefered. UxPlay accesses the
|
||||
GPU using the GStreamer-1.22 Video4Linux2 (v4l2) plugin; the plugin from
|
||||
older GStreamer needs a patch to backport fixes from v1.22: this has
|
||||
been done in the v1.18.4 version supplied by Raspberry Pi OS (Bullseye),
|
||||
and patches for this and later 1.20 versions are available in the UxPlay
|
||||
Wiki (see <a
|
||||
href="https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches">patching
|
||||
instructions for GStreamer</a> in the <a
|
||||
href="https://github.com/FDH2/UxPlay/wiki">UxPlay Wiki</a> for GStreamer
|
||||
1.18.4 and later.</p></li>
|
||||
instructions for GStreamer</a>). Also required is the out-of-mainline
|
||||
Linux kernel module bcm2835-v4l2-codec maintained by Raspberry Pi, so
|
||||
far only included in Raspberry Pi OS, and two other distributions
|
||||
(Ubuntu, Manjaro) available with Raspberry Pi Imager.</p></li>
|
||||
</ul>
|
||||
<h3 id="note-to-packagers">Note to packagers:</h3>
|
||||
<p>UxPlay’s GPLv3 license does not have an added “exception” explicitly
|
||||
@@ -232,11 +237,12 @@ that cmake>=3.4.1 is installed:
|
||||
<code>build-essential</code> and <code>pkg-config</code> (or
|
||||
<code>pkgconf</code>) to this if needed).</p>
|
||||
<p>Make sure that your distribution provides OpenSSL 1.1.1 or later, and
|
||||
libplist 2.0 or later. (This means Debian 10 “Buster”, Ubuntu 18.04 or
|
||||
later.) If it does not, you may need to build and install these from
|
||||
source (see instructions at the end of this README). If you have a
|
||||
non-standard OpenSSL installation, you may need to set the environment
|
||||
variable OPENSSL_ROOT_DIR (<em>e.g.</em> ,
|
||||
libplist 2.0 or later. (This means Debian 10 “Buster” based systems
|
||||
(e.g, Ubuntu 18.04) or newer; on Debian 10 systems “libplist” is an
|
||||
older version, you need “libplist3”.) If it does not, you may need to
|
||||
build and install these from source (see instructions at the end of this
|
||||
README). If you have a non-standard OpenSSL installation, you may need
|
||||
to set the environment variable OPENSSL_ROOT_DIR (<em>e.g.</em> ,
|
||||
“<code>export OPENSSL_ROOT_DIR=/usr/local/lib64</code>” if that is where
|
||||
it is installed).</p>
|
||||
<p>In a terminal window, change directories to the source directory of
|
||||
@@ -302,7 +308,8 @@ the dns_sd library. OpenSSL is already installed as a System
|
||||
Library.</p></li>
|
||||
</ul>
|
||||
<h2 id="running-uxplay">Running UxPlay</h2>
|
||||
<h3 id="debian-based-systems-1">Debian-based systems</h3>
|
||||
<h3 id="installing-plugins-debian-based-linux-systems">Installing
|
||||
plugins (Debian-based Linux systems)</h3>
|
||||
<p>Next install the GStreamer plugins that are needed with
|
||||
<code>sudo apt-get install gstreamer1.0-<plugin></code>. Values of
|
||||
<code><plugin></code> required are:</p>
|
||||
@@ -324,44 +331,8 @@ examining the GStreamer installation. If sound is not working,
|
||||
“<strong>alsa</strong>”“,”<strong>pulseaudio</strong>”, or
|
||||
“<strong>pipewire</strong>” plugins may need to be installed, depending
|
||||
on how your audio is set up.</p>
|
||||
<p><strong>Finally, run uxplay in a terminal window</strong>. On some
|
||||
systems, you can toggle into and out of fullscreen mode with F11 or
|
||||
(held-down left Alt)+Enter keys. Use Ctrl-C (or close the window) to
|
||||
terminate it when done. If the UxPlay server is not seen by the iOS
|
||||
client’s drop-down “Screen Mirroring” panel, check that your DNS-SD
|
||||
server (usually avahi-daemon) is running: do this in a terminal window
|
||||
with <code>systemctl status avahi-daemon</code>. If this shows the
|
||||
avahi-daemon is not running, control it with
|
||||
<code>sudo systemctl [start,stop,enable,disable] avahi-daemon</code> (or
|
||||
avahi-daemon.service). If UxPlay is seen, but the client fails to
|
||||
connect when it is selected, there may be a firewall on the server that
|
||||
prevents UxPlay from receiving client connection requests unless some
|
||||
network ports are opened: if a firewall is active, also open UDP port
|
||||
5353 (for mDNS queries) needed by Avahi. See <a
|
||||
href="#troubleshooting">Troubleshooting</a> below for help with this or
|
||||
other problems.</p>
|
||||
<ul>
|
||||
<li>By default, UxPlay is locked to its current client until that client
|
||||
drops the connection; the option <code>-nohold</code> modifies this
|
||||
behavior so that when a new client requests a connection, it removes the
|
||||
current client and takes over.</li>
|
||||
</ul>
|
||||
<p>To display the accompanying “Cover Art” from sources like Apple Music
|
||||
in Audio-Only (ALAC) mode, run
|
||||
“<code>uxplay -ca <name> &</code>” in the background, then run
|
||||
a image viewer with an autoreload feature: an example is “feh”: run
|
||||
“<code>feh -R 1 <name></code>” in the foreground; terminate feh
|
||||
and then Uxplay with “<code>ctrl-C fg ctrl-C</code>”.</p>
|
||||
<p><strong>One common problem involves GStreamer attempting to use
|
||||
incorrectly-configured or absent accelerated hardware h264 video
|
||||
decoding (e.g., VAAPI). Try “<code>uxplay -avdec</code>” to force
|
||||
software video decoding; if this works you can then try to fix
|
||||
accelerated hardware video decoding if you need it, or just uninstall
|
||||
the GStreamer VAAPI plugin. If your system uses the Wayland compositor
|
||||
for graphics, use “<code>uxplay -vs waylandsink</code>”.</strong> See <a
|
||||
href="#usage">Usage</a> for more run-time options.</p>
|
||||
<h3 id="running-uxplay-non-debian-based-linux-or-bsd">Running uxplay
|
||||
Non-Debian-based Linux or *BSD</h3>
|
||||
<h3 id="installing-plugins-non-debian-based-linux-or-bsd">Installing
|
||||
plugins (Non-Debian-based Linux or *BSD)</h3>
|
||||
<ul>
|
||||
<li><p><strong>Red Hat, or clones like CentOS (now continued as Rocky
|
||||
Linux or Alma Linux):</strong> (sudo dnf install, or sudo yum install)
|
||||
@@ -381,8 +352,8 @@ start, with error: <strong>no element “avdec_aac”</strong>
|
||||
<li><p><strong>OpenSUSE:</strong> (sudo zypper install) The required
|
||||
GStreamer packages are: gstreamer-devel gstreamer-plugins-base-devel
|
||||
gstreamer-plugins-libav gstreamer-plugins-bad (+ gstreamer-plugins-vaapi
|
||||
for Intel graphics); in some cases, you may need to use gstreamer
|
||||
packages for OpenSUSE from <a
|
||||
for Intel graphics); in some cases, you may need to use gstreamer or
|
||||
libav* packages for OpenSUSE from <a
|
||||
href="https://ftp.gwdg.de/pub/linux/misc/packman/suse/">Packman</a>
|
||||
“Essentials” (which provides packages including plugins that OpenSUSE
|
||||
does not ship for license reasons).</p></li>
|
||||
@@ -394,16 +365,80 @@ gstreamer1-plugins, gstreamer1-plugins-* (* = core, good, bad, x, gtk,
|
||||
gl, vulkan, pulse, v4l2, …), (+ gstreamer1-vaapi for Intel
|
||||
graphics).</p></li>
|
||||
</ul>
|
||||
<h3 id="starting-uxplay">Starting UxPlay</h3>
|
||||
<p><strong>Finally, run uxplay in a terminal window</strong>. On some
|
||||
systems, you can toggle into and out of fullscreen mode with F11 or
|
||||
(held-down left Alt)+Enter keys. Use Ctrl-C (or close the window) to
|
||||
terminate it when done. If the UxPlay server is not seen by the iOS
|
||||
client’s drop-down “Screen Mirroring” panel, check that your DNS-SD
|
||||
server (usually avahi-daemon) is running: do this in a terminal window
|
||||
with <code>systemctl status avahi-daemon</code>. If this shows the
|
||||
avahi-daemon is not running, control it with
|
||||
<code>sudo systemctl [start,stop,enable,disable] avahi-daemon</code> (on
|
||||
non-systemd systems, such as *BSD, use
|
||||
<code>sudo service avahi-daemon [status, start, stop, restart, ...]</code>).
|
||||
If UxPlay is seen, but the client fails to connect when it is selected,
|
||||
there may be a firewall on the server that prevents UxPlay from
|
||||
receiving client connection requests unless some network ports are
|
||||
opened: if a firewall is active, also open UDP port 5353 (for mDNS
|
||||
queries) needed by Avahi. See <a
|
||||
href="#troubleshooting">Troubleshooting</a> below for help with this or
|
||||
other problems.</p>
|
||||
<ul>
|
||||
<li><p>you may find video is improved by the setting -fps 60 that allows
|
||||
some video to be played at 60 frames per second. (You can see what
|
||||
framerate is actually streaming by using -vs fpsdisplaysink, and/or
|
||||
-FPSdata.)</p></li>
|
||||
<li><p>By default, UxPlay is locked to its current client until that
|
||||
client drops the connection; since UxPlay-1.58, the option
|
||||
<code>-nohold</code> modifies this behavior so that when a new client
|
||||
requests a connection, it removes the current client and takes
|
||||
over.</p></li>
|
||||
<li><p>In its default mode, Uxplay uses a simple GStreamer mode
|
||||
(“sync=false”) that streams without using audio- and video-timestamps
|
||||
for synchronization. UxPlay 1.63 also introduces <code>-vsync</code> and
|
||||
<code>-async</code> as alternatives that use timestamps in Mirror and
|
||||
Audio-Only modes respectively (GStreamer’s “sync=true” mode). (These
|
||||
options also allow an optional positive (or negative) audio-delay in
|
||||
milliseconds for fine-tuning : <code>-vsync 20.5</code> delays audio
|
||||
relative to video by 0.0205 secs; a negative value advances it.) Use
|
||||
<code>-async</code> to synchronise video on the iOS client with ALAC
|
||||
Audio-Only mode audio streamer to the server, for example when watching
|
||||
Apple Music song lyrics on the client. Use <code>-vsync</code> in Mirror
|
||||
mode on low-powered system such Raspberry Pi when using
|
||||
<code>-avdec</code> software h264 video decoding. Simple streaming seems
|
||||
to maintain synchronisation of audio with video on desktop systems, but
|
||||
you may wish to experiment with <code>-vsync</code> there too.</p></li>
|
||||
<li><p>Since UxPlay-1.54, you can display the accompanying “Cover Art”
|
||||
from sources like Apple Music in Audio-Only (ALAC) mode: run
|
||||
“<code>uxplay -ca <name> &</code>” in the background, then run
|
||||
a image viewer with an autoreload feature: an example is “feh”: run
|
||||
“<code>feh -R 1 <name></code>” in the foreground; terminate feh
|
||||
and then Uxplay with “<code>ctrl-C fg ctrl-C</code>”.</p></li>
|
||||
</ul>
|
||||
<p><strong>One common problem involves GStreamer attempting to use
|
||||
incorrectly-configured or absent accelerated hardware h264 video
|
||||
decoding (e.g., VAAPI). Try “<code>uxplay -avdec</code>” to force
|
||||
software video decoding; if this works you can then try to fix
|
||||
accelerated hardware video decoding if you need it, or just uninstall
|
||||
the GStreamer VAAPI plugin. If your system uses the Wayland compositor
|
||||
for graphics, use “<code>uxplay -vs waylandsink</code>”.</strong> See <a
|
||||
href="#usage">Usage</a> for more run-time options.</p>
|
||||
<h3
|
||||
id="special-instructions-for-raspberry-pi-only-tested-on-model-4b"><strong>Special
|
||||
instructions for Raspberry Pi (only tested on model 4B)</strong>:</h3>
|
||||
<ul>
|
||||
<li><p>For good performance, the Raspberry Pi needs the GStreamer
|
||||
<li><p>If you use the software-only (h264) video-decoding UxPlay option
|
||||
<code>-avdec</code>, you also need option <code>-vsync</code> to keep
|
||||
audio and video synchronized (<code>-vsync</code> is a new feature;
|
||||
before it was introduced, software decoding on the Pi was not
|
||||
viable.)</p></li>
|
||||
<li><p>For best performance, the Raspberry Pi needs the GStreamer
|
||||
Video4linux2 plugin to use its Broadcom GPU hardware for decoding h264
|
||||
video. The plugin accesses the GPU using the bcm2835_codec kernel module
|
||||
which is maintained by Raspberry Pi in the drivers/staging/VC04_services
|
||||
part of the <a href="https://github.com/raspberrypi/linux">Raspberry Pi
|
||||
kernel tree</a>, but is not yet included in the mainline Linux kernel.
|
||||
video. This needs the bcm2835_codec kernel module which is maintained by
|
||||
Raspberry Pi in the drivers/staging/VC04_services part of the <a
|
||||
href="https://github.com/raspberrypi/linux">Raspberry Pi kernel
|
||||
tree</a>, but is not yet included in the mainline Linux kernel.
|
||||
Distributions for R Pi that supply it include Raspberry Pi OS, Ubuntu,
|
||||
and Manjaro. Some others may not. <strong>Without this kernel module,
|
||||
UxPlay cannot use the GPU.</strong></p></li>
|
||||
@@ -473,29 +508,37 @@ href="http://www.macports.org">MacPorts</a>, <a
|
||||
href="http://finkproject.org">Fink</a> or <a
|
||||
href="http://brew.sh">Homebrew</a>, or by a download from <a
|
||||
href="https://cmake.org/download/">https://cmake.org/download/</a>.</p>
|
||||
<p>First install OpenSSL and libplist: static versions of these libaries
|
||||
will be used, so they can be uninstalled after UxPlay is built. These
|
||||
are available in MacPorts and Homebrew, or they can easily be built from
|
||||
source (see instructions at the end of this README; this requires
|
||||
development tools autoconf, automake, libtool, which can be installed
|
||||
using MacPorts, HomeBrew, or Fink).</p>
|
||||
<p>First install OpenSSL and libplist: static versions of these
|
||||
libraries will be used, so they can be uninstalled after UxPlay is
|
||||
built. These are available in MacPorts and Homebrew, or they can easily
|
||||
be built from source (see instructions at the end of this README; this
|
||||
requires development tools autoconf, automake, libtool, which can be
|
||||
installed using MacPorts, HomeBrew, or Fink).</p>
|
||||
<p>Next get the latest macOS release of GStreamer-1.0.</p>
|
||||
<ul>
|
||||
<li>recommended: install the “official” GStreamer release for macOS from
|
||||
<a
|
||||
<li><p>recommended: install the “official” GStreamer release for macOS
|
||||
from <a
|
||||
href="https://gstreamer.freedesktop.org/download/">https://gstreamer.freedesktop.org/download/</a>.
|
||||
The alternative is to install it from Homebrew (MacPorts also supplies
|
||||
it, but compiled to use X11).</li>
|
||||
The alternative is to install it from Homebrew. MacPorts packages of
|
||||
GStreamer are compiled to use X11 and are <strong>NOT</strong>
|
||||
recommended.</p></li>
|
||||
<li><p>You could instead compile the “official” GStreamer release from
|
||||
source: GStreamer-1.22.0 has been successfully built this way on a
|
||||
system using MacPorts: see <a
|
||||
href="https://github.com/FDH2/UxPlay/wiki/Building-GStreamer-from-Source-on-macOS-with-MacPorts">the
|
||||
UxPlay Wiki</a></p></li>
|
||||
</ul>
|
||||
<p><strong>For the “official” release</strong>: install both the macOS
|
||||
runtime and development installer packages. Assuming that the latest
|
||||
release is 1.20.4. install
|
||||
release is 1.20.5 install
|
||||
<code>gstreamer-1.0-1.20.5-universal.pkg</code> and
|
||||
<code>gstreamer-1.0-devel-1.20.5-universal.pkg</code>. (If you have an
|
||||
Intel-architecture Mac, and have problems with the “universal” packages,
|
||||
you can also use <code>gstreamer-1.0-1.18.6-x86_64.pkg</code> and
|
||||
<code>gstreamer-1.0-devel-1.18.6-x86_64.pkg</code>.) Click on them to
|
||||
<code>gstreamer-1.0-devel-1.20.5-universal.pkg</code>. Click on them to
|
||||
install (they install to /Library/FrameWorks/GStreamer.framework).</p>
|
||||
<ul>
|
||||
<li><strong>ADDED 2023-01-25: v1.22.0 has just been released, but these
|
||||
binaries seem to have problems, perhaps only on older macOS releases;
|
||||
use v1.20.5 if they dont work for you.</strong></li>
|
||||
</ul>
|
||||
<p><strong>For Homebrew</strong>: pkgconfig is needed (“brew install
|
||||
pkgconfig”). Then “brew install gst-plugins-base gst-plugins-good
|
||||
gst-plugins-bad gst-libav”. This appears to be functionally equivalent
|
||||
@@ -532,17 +575,18 @@ recommended):</em></strong></p>
|
||||
<p>To install: “sudo port install pkgconfig”; “sudo port install
|
||||
gstreamer1-gst-plugins-base gstreamer1-gst-plugins-good
|
||||
gstreamer1-gst-plugins-bad gstreamer1-gst-libav”. <strong>The MacPorts
|
||||
GStreamer is built to use X11</strong>, so uxplay must be run from an
|
||||
XQuartz terminal, can use ZOOMFIX, and needs option “-vs ximagesink”. On
|
||||
an unibody (non-retina) MacBook Pro, the default resolution wxh =
|
||||
1920x1080 was too large, but using option “-s 800x600” worked. The
|
||||
MacPorts GStreamer pipeline seems fragile against attempts to change the
|
||||
X11 window size, or to rotations that switch a connected client between
|
||||
portrait and landscape mode while uxplay is running. Using the MacPorts
|
||||
X11 GStreamer seems only possible if the image size is left unchanged
|
||||
from the initial “-s wxh” setting (also use the iPad/iPhone setting that
|
||||
locks the screen orientation against switching between portrait and
|
||||
landscape mode as the device is rotated).</p>
|
||||
GStreamer is built to use X11</strong>: use the special CMake option
|
||||
<code>-DUSE_X11=ON</code> when building UxPlay. Then uxplay must be run
|
||||
from an XQuartz terminal, can use ZOOMFIX, and needs option “-vs
|
||||
ximagesink”. On an unibody (non-retina) MacBook Pro, the default
|
||||
resolution wxh = 1920x1080 was too large, but using option “-s 800x600”
|
||||
worked. The MacPorts GStreamer pipeline seems fragile against attempts
|
||||
to change the X11 window size, or to rotations that switch a connected
|
||||
client between portrait and landscape mode while uxplay is running.
|
||||
Using the MacPorts X11 GStreamer seems only possible if the image size
|
||||
is left unchanged from the initial “-s wxh” setting (also use the
|
||||
iPad/iPhone setting that locks the screen orientation against switching
|
||||
between portrait and landscape mode as the device is rotated).</p>
|
||||
<h2
|
||||
id="building-uxplay-on-microsoft-windows-using-msys2-with-the-mingw-64-compiler.">Building
|
||||
UxPlay on Microsoft Windows, using MSYS2 with the MinGW-64
|
||||
@@ -656,6 +700,12 @@ the mirror display (X11) window.</p>
|
||||
<p><strong>-nh</strong> Do not append “<span class="citation"
|
||||
data-cites="_hostname_">@_hostname_</span>” at the end of the AirPlay
|
||||
server name.</p>
|
||||
<p><strong>-sync</strong> (In Audio-Only (ALAC)) mode: this option
|
||||
synchronizes audio on the server with video on the client, but causes
|
||||
the client to add a delay to account for latency, so pausing the stream
|
||||
will not take effect immediately. This can be mitigated by using the
|
||||
<code>-al</code> audio latency setting to change the latency (default
|
||||
0.25 secs) that the server reports to the cient.</p>
|
||||
<p><strong>-s wxh</strong> (e.g. -s 1920x1080 , which is the default )
|
||||
sets the display resolution (width and height, in pixels). (This may be
|
||||
a request made to the AirPlay client, and perhaps will not be the final
|
||||
@@ -751,6 +801,12 @@ parameters to be included with the audiosink name. (Some choices of
|
||||
audiosink might not work on your system.)</p>
|
||||
<p><strong>-as 0</strong> (or just <strong>-a</strong>) suppresses
|
||||
playing of streamed audio, but displays streamed video.</p>
|
||||
<p><strong>-al <em>x</em></strong> specifies an audio latency <em>x</em>
|
||||
in (decimal) seconds in Audio-only (ALAC), that is reported to the
|
||||
client. Values in the range [0.0, 10.0] seconds are allowed, and will be
|
||||
converted to a whole number of microseconds. Default is 0.25 sec (250000
|
||||
usec). (This replaces the <code>-ao</code> option introduced in v1.62,
|
||||
as a workaround for a problem that is now fixed).</p>
|
||||
<p><strong>-ca <em>filename</em></strong> provides a file (where
|
||||
<em>filename</em> can include a full path) used for output of “cover
|
||||
art” (from Apple Music, <em>etc.</em>,) in audio-only ALAC mode. This
|
||||
@@ -788,14 +844,15 @@ data is updated by the client at 1 second intervals.</p>
|
||||
<p><strong>-fps n</strong> sets a maximum frame rate (in frames per
|
||||
second) for the AirPlay client to stream video; n must be a whole number
|
||||
less than 256. (The client may choose to serve video at any frame rate
|
||||
lower than this; default is 30 fps.) A setting below 30 fps might be
|
||||
useful to reduce latency if you are running more than one instance of
|
||||
uxplay at the same time. <em>This setting is only an advisory to the
|
||||
client device, so setting a high value will not force a high
|
||||
framerate.</em> (You can test using “-vs fpsdisplaysink” to see what
|
||||
framerate is being received, or use the option -FPSdata which displays
|
||||
video-stream performance data continuously sent by the client during
|
||||
video-streaming.)</p>
|
||||
lower than this; default is 30 fps.) A setting of 60 fps may give you
|
||||
improved video but is not recommended on Raspberry Pi. A setting below
|
||||
30 fps might be useful to reduce latency if you are running more than
|
||||
one instance of uxplay at the same time. <em>This setting is only an
|
||||
advisory to the client device, so setting a high value will not force a
|
||||
high framerate.</em> (You can test using “-vs fpsdisplaysink” to see
|
||||
what framerate is being received, or use the option -FPSdata which
|
||||
displays video-stream performance data continuously sent by the client
|
||||
during video-streaming.)</p>
|
||||
<p><strong>-f {H|V|I}</strong> implements “videoflip” image transforms:
|
||||
H = horizontal flip (right-left flip, or mirror image); V = vertical
|
||||
flip ; I = 180 degree rotation or inversion (which is the combination of
|
||||
@@ -848,11 +905,13 @@ present, set the environment variable OPEN_SSL_ROOT_DIR to point to the
|
||||
correct one; on 64-bit Ubuntu, this is done by running
|
||||
<code>export OPENSSL_ROOT_DIR=/usr/lib/X86_64-linux-gnu/</code> before
|
||||
running cmake.</p>
|
||||
<h3
|
||||
id="uxplay-starts-but-either-stalls-or-stops-after-initialized-server-sockets-appears-without-the-server-name-showing-on-the-client.">1.
|
||||
uxplay starts, but either stalls or stops after “Initialized server
|
||||
socket(s)” appears (<em>without the server name showing on the
|
||||
client</em>).</h3>
|
||||
<h3 id="avahidns_sd-bonjourzeroconf-issues">1. <strong>Avahi/DNS_SD
|
||||
Bonjour/Zeroconf issues</strong></h3>
|
||||
<ul>
|
||||
<li><strong>uxplay starts, but either stalls or stops after “Initialized
|
||||
server socket(s)” appears (<em>without the server name showing on the
|
||||
client</em>)</strong>.</li>
|
||||
</ul>
|
||||
<p>If UxPlay stops with the “No DNS-SD Server found” message, this means
|
||||
that your network <strong>does not have a running Bonjour/zeroconf
|
||||
DNS-SD server.</strong></p>
|
||||
@@ -883,11 +942,21 @@ for airplay support.</em>)</p>
|
||||
<p>If UxPlay stalls <em>without an error message</em> and <em>without
|
||||
the server name showing on the client</em>, this is either
|
||||
pre-UxPlay-1.60 behavior when no DNS-SD server was found, or a network
|
||||
problem. After starting uxplay, use the utility
|
||||
problem.</p>
|
||||
<ul>
|
||||
<li><strong>Avahi works at first, but new clients do not see UxPlay, or
|
||||
clients that initially saw it stop doing so after they
|
||||
disconnect</strong>.</li>
|
||||
</ul>
|
||||
<p>This is because Avahi is only using the “loopback” network interface,
|
||||
and is not receiving mDNS queries from new clients that were not
|
||||
listening when UxPlay started.</p>
|
||||
<p>To check this, after starting uxplay, use the utility
|
||||
<code>avahi-browse -a -t</code> in a different terminal window on the
|
||||
server to verify that the UxPlay AirTunes and AirPlay services are
|
||||
correctly registered (only the AirTunes service is used in the “Legacy”
|
||||
AirPlay Mirror mode used by UxPlay).</p>
|
||||
AirPlay Mirror mode used by UxPlay, bit the AirPlay service is used for
|
||||
the initial contact).</p>
|
||||
<p>The results returned by avahi-browse should show entries for uxplay
|
||||
like</p>
|
||||
<pre><code>+ eno1 IPv6 UxPlay AirPlay Remote Video local
|
||||
@@ -901,9 +970,9 @@ like</p>
|
||||
UxPlay host is probably blocking full DNS-SD service, and you need to
|
||||
open the default UDP port 5353 for mDNS requests, as loopback-based
|
||||
DNS-SD service is unreliable.</p>
|
||||
<p>If the UxPlay service is listed by avahi-browse, but is not seen by
|
||||
the client, the problem is likely to be a problem with the local
|
||||
network.</p>
|
||||
<p>If the UxPlay services are listed by avahi-browse as above, but are
|
||||
not seen by the client, the problem is likely to be a problem with the
|
||||
local network.</p>
|
||||
<h3
|
||||
id="uxplay-starts-but-stalls-after-initialized-server-sockets-appears-with-the-server-name-showing-on-the-client-but-the-client-fails-to-connect-when-the-uxplay-server-is-selected.">2.
|
||||
uxplay starts, but stalls after “Initialized server socket(s)” appears,
|
||||
@@ -936,13 +1005,18 @@ doesn’t work on your system</strong> (by default, GStreamer uses the
|
||||
occurred when a user with a firewall only opened two udp network ports:
|
||||
<strong>three</strong> are required (the third one receives the audio
|
||||
data).</p>
|
||||
<p><strong>Raspberry Pi</strong> devices (-rpi option) only work with
|
||||
hardware GPU decoding if the Video4Linux2 plugin in GStreamer v1.20.x or
|
||||
<p><strong>Raspberry Pi</strong> devices only work with hardware GPU
|
||||
h264 video decoding if the Video4Linux2 plugin in GStreamer v1.20.x or
|
||||
earlier has been patched (see the UxPlay <a
|
||||
href="https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches">Wiki</a>
|
||||
for patches). This may be fixed in the future when GStreamer-1.22 is
|
||||
released, or by backport patches in distributions such as Raspberry Pi
|
||||
OS (Bullseye).</p>
|
||||
for patches). This is fixed in GStreamer-1.22, and by backport patches
|
||||
from this in distributions such as Raspberry Pi OS (Bullseye):
|
||||
<strong>use option <code>-bt709</code> with the GStreamer-1.18.4 from
|
||||
Raspberry Pi OS</strong>.. This also needs the bcm2835-codec kernel
|
||||
module that is not in the standard Linux kernel (it is available in
|
||||
Raspberry Pi OS, Ubuntu and Manjaro). <strong>If you do not have this
|
||||
kernel module, or GStreamer < 1.22 is not patched, use options
|
||||
<code>-avdec -vsync</code> for software h264-decoding.</strong></p>
|
||||
<p>Sometimes “autovideosink” may select the OpenGL renderer
|
||||
“glimagesink” which may not work correctly on your system. Try the
|
||||
options “-vs ximagesink” or “-vs xvimagesink” to see if using one of
|
||||
@@ -981,6 +1055,34 @@ when the client sends the “Stop Mirroring” signal, try the no-close
|
||||
option “-nc” that leaves the video window open.</p>
|
||||
<h3 id="gstreamer-issues-missing-plugins-etc.">4. GStreamer issues
|
||||
(missing plugins, etc.):</h3>
|
||||
<p>If UxPlay fails to start, with a message that a required GStreamer
|
||||
plugin (such as “libav”) was not found, first check with the GStreamer
|
||||
tool gst-inspect-1.0 to see what GStreamer knows is available. (You may
|
||||
need to install some additional GStreamer “tools” package to get
|
||||
gst-inspect-1.0). For, <em>e.g.</em> a libav problem, check with
|
||||
“<code>gst-inspect-1.0 libav</code>”. If it is not shown as available to
|
||||
GStreamer, but your package manager shows the relevant package as
|
||||
installed (as one user found), try entirely removing and reinstalling
|
||||
the package. That user found that a solution to a “<strong>Required
|
||||
gstreamer plugin ‘libav’ not found</strong>” message that kept recurring
|
||||
was to clear the user’s gstreamer cache with
|
||||
<code>rm -rf ~/.cache/gstreamer-1.0</code>.</p>
|
||||
<p>If it fails to start with an error like
|
||||
‘<code>no element "avdec_aac"</code>’ this is because even though
|
||||
gstreamer-libav is installed. it is incomplete because some plugins are
|
||||
missing: “<code>gst-inspect-1.0 | grep avdec_aac</code>” will show if
|
||||
avdec_aac is available. Some distributions (RedHat, SUSE, etc) provide
|
||||
incomplete versions of libav because of patent issues with codecs used
|
||||
by certain plugins. In those cases there will be some “extra package”
|
||||
provider like <a href="https://rpmfusion.org">RPM fusion</a> (RedHat) or
|
||||
<a href="http://packman.links2linux.org/">packman</a> (SUSE) where you
|
||||
can get complete packages (your distribution will usually provide
|
||||
instructions for this). The packages needed may be “libav*” or “ffmpeg*”
|
||||
packages: the GStreamer libav plugin package does not contain any codecs
|
||||
itself, it just provides a way for GStreamer to use ffmpeg/libav codec
|
||||
libraries which must be installed separately. For similar reasons,
|
||||
distributions may ship incomplete packages of GStreamer “plugins-bad”,
|
||||
which is where “license-problematical” plugins go.</p>
|
||||
<p>To troubleshoot GStreamer execute “export GST_DEBUG=2” to set the
|
||||
GStreamer debug-level environment-variable in the terminal where you
|
||||
will run uxplay, so that you see warning and error messages; see <a
|
||||
@@ -1053,6 +1155,17 @@ as “SupportsLegacyPairing”) of the “features” plist code (reported to
|
||||
the client by the AirPlay server) to be set. The “features” code and
|
||||
other settings are set in <code>UxPlay/lib/dnssdint.h</code>.</p>
|
||||
<h1 id="changelog">Changelog</h1>
|
||||
<p>1.63 2023-02-12 Reworked audio-video synchronization, with new
|
||||
options -vsync (for Mirror mode) and -async (for Audio-Only mode, to
|
||||
sync with client video). Option -vsync makes software h264 decoding of
|
||||
streamed videos with option -avdec viable on some recent Raspberry Pi
|
||||
models. Internal change: all times are now processed in nanoseconds
|
||||
units. Removed -ao option introduced in 1.62.</p>
|
||||
<p>1.62 2023-01-18 Added Audio-only mode time offset -ao x to allow user
|
||||
synchronization of ALAC audio playing on the server with video, song
|
||||
lyrics, etc. playing on the client. x = 5.0 appears to be optimal in
|
||||
many cases. Quality fixes: cleanup in volume changes, timestamps, some
|
||||
bugfixes.</p>
|
||||
<p>1.61 2022-12-30 Removed -t option (workaround for an Avahi issue,
|
||||
correctly solved by opening network port UDP 5353 in firewall). Remove
|
||||
-g debug flag from CMAKE_CFLAGS. Postpend (instead of prepend) build
|
||||
|
||||
235
README.md
235
README.md
@@ -1,4 +1,4 @@
|
||||
# UxPlay 1.61: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix (now also runs on Windows).
|
||||
# UxPlay 1.63: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix (now also runs on Windows).
|
||||
|
||||
### Now developed at the GitHub site [https://github.com/FDH2/UxPlay](https://github.com/FDH2/UxPlay) (where all user issues should be posted).
|
||||
|
||||
@@ -22,7 +22,8 @@
|
||||
* Support for Raspberry Pi, with hardware video acceleration using the GStreamer
|
||||
Video4Linux2 (v4l2) plugin, which supports both 32- and 64-bit systems, as the replacement for unmaintained 32-bit OpenMAX (omx).
|
||||
See [success reports](https://github.com/FDH2/UxPlay/wiki/UxPlay-on-Raspberry-Pi:-success-reports:), so far limited to
|
||||
distributions available through Raspberry-Pi Imager.
|
||||
distributions available through Raspberry-Pi Imager. **NEW!** _The new-in-UxPlay-1.63 option `-vsync` now makes UxPlay viable
|
||||
on other distributions for Raspberry Pi that do not include kernel support for hardware decoding!_
|
||||
|
||||
* **New**: Support for running on Microsoft Windows (builds with the MinGW-64 compiler in the
|
||||
unix-like MSYS2 environment).
|
||||
@@ -31,20 +32,26 @@
|
||||
|
||||
[](https://repology.org/project/uxplay/versions).
|
||||
|
||||
* Install uxplay on Debian-based Linux systems with "`sudo apt install uxplay`"; on FreeBSD with "``sudo pkg install uxplay``".
|
||||
* Install uxplay on Debian-based Linux systems with "`sudo apt install uxplay`"; on FreeBSD with "``sudo pkg install uxplay``". Also
|
||||
available on Arch-based systems through AUR.
|
||||
|
||||
* On Linux and \*BSD the mDNS/DNS-SD (Bonjour/ZeroConf) local network services needed by UxPlay are usually provided by Avahi: **if
|
||||
there is a firewall on the server that will host UxPlay, make sure the default network port for mDNS queries (UDP 5353) is open**. (Uxplay can work without this port by using
|
||||
only the host's loopback interface, but its visibility to clients will be degraded.) See the [Troubleshooting](#troubleshooting) section below for more details.
|
||||
there is a firewall on the server that will host UxPlay, make sure the default network port for mDNS queries (UDP 5353) is open**. (Uxplay
|
||||
can work without this port by using only the host's loopback interface, but its visibility to clients will be
|
||||
degraded.) See the [Troubleshooting](#troubleshooting) section below for more details. (With a firewall, you also need to open
|
||||
ports for UxPlay, and use the `-p <n>` option; see `man uxplay` or ``uxplay -h``.)
|
||||
|
||||
* Even if you install your distribution's pre-compiled uxplay binary package, you may need to read the instructions below
|
||||
for [running UxPlay](#running-uxplay) to see which of your distribution's **GStreamer plugin packages** you should also install.
|
||||
|
||||
* For Raspbery Pi (tested on RPi 4 model B, reported to work on RPi 3 model B+), only Raspberry Pi OS, plus the Debian and Manjaro ARM-RPi4 Images made available through the Raspberry Pi Imager, are known to provide the (out-of-mainline-kernel)
|
||||
kernel-module **bcm2835-codec.ko** maintained by Rasperry Pi, and needed for hardware-accelerated video decoding by the Broadcom GPU on the Pi, accessed using the GStreamer Video4Linux (v4l2) plugin. In addition,
|
||||
* For Raspberry Pi (tested on RPi 4 model B, reported to work on RPi 3 model B+), only Raspberry Pi OS, plus the Debian
|
||||
and Manjaro ARM-RPi4 images made available through the Raspberry Pi Imager, are known to provide the (out-of-mainline-kernel)
|
||||
kernel-module **bcm2835-codec.ko** [maintained by Raspberry Pi](https://github.com/raspberrypi/linux/tree/rpi-5.15.y/drivers/staging/vc04_services),
|
||||
and needed for hardware-accelerated video decoding by
|
||||
the Broadcom GPU on the Pi, accessed using the GStreamer Video4Linux (v4l2) plugin. In addition,
|
||||
for Ubuntu and Manjaro, the v4l2 plugin needs a [patch](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches) forGStreamer < 1.22.
|
||||
|
||||
* To (easily) compile UxPlay from source, see the section [building UxPlay](#building-uxplay).
|
||||
* To (easily) compile the latest UxPlay from source, see the section [Getting UxPlay](#getting-uxplay).
|
||||
|
||||
# Detailed description of UxPlay
|
||||
|
||||
@@ -137,20 +144,19 @@ if not, software decoding is used.
|
||||
must be built by the user:
|
||||
See [these instructions](https://github.com/FDH2/UxPlay/wiki/NVIDIA-nvdec-and-nvenc-plugins).
|
||||
|
||||
* **Video4Linux2 support for the Raspberry Pi Broadcom GPU**
|
||||
* **Video4Linux2 support for the Raspberry Pi Broadcom 2835 GPU**
|
||||
|
||||
Raspberry Pi (RPi) computers (tested on Pi 4 Model B) can now run UxPlay using software decoding
|
||||
of h264 video, but hardware-accelerated decoding by firmware in the Pi's
|
||||
GPU is prefered. UxPlay accesses the GPU using the GStreamer-1.22 Video4Linux2 (v4l2) plugin;
|
||||
the plugin from older GStreamer needs a patch to backport fixes from v1.22: this has been done in
|
||||
the v1.18.4 version supplied by Raspberry Pi OS (Bullseye), and patches for this and later 1.20 versions
|
||||
are available in the UxPlay Wiki
|
||||
(see [patching instructions for GStreamer](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches)). Also
|
||||
required is the out-of-mainline Linux kernel module bcm2835-v4l2-codec maintained by Raspberry Pi,
|
||||
so far only included in Raspberry Pi OS, and two other distributions (Ubuntu, Manjaro) available
|
||||
with Raspberry Pi Imager.
|
||||
|
||||
Raspberry Pi (RPi) computers can run UxPlay with software decoding
|
||||
of h264 video but this usually has unacceptable latency, and hardware-accelerated
|
||||
GPU decoding should be used. UxPlay accesses the GPU using the GStreamer
|
||||
plugin for Video4Linux2 (v4l2), which replaces unmaintained 32-bit-only OpenMax used by
|
||||
RPiPlay. Fixes to the v4l2 plugin that allow it to
|
||||
work with UxPlay on RPi are now in the GStreamer development branch, and will appear
|
||||
in the upcoming GStreamer-1.22 release.
|
||||
A backport (package `gstreamer1.0-plugins-good-1.18.4-2+deb11u1+rpt1`)
|
||||
has already appeared in RPi OS (Bullseye); for it to work with uxplay 1.56 or later, you may need to use the
|
||||
`-bt709` option. For other distributions without the backport, you can find
|
||||
[patching instructions for GStreamer](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches)
|
||||
in the [UxPlay Wiki](https://github.com/FDH2/UxPlay/wiki) for GStreamer 1.18.4 and later.
|
||||
|
||||
### Note to packagers:
|
||||
|
||||
@@ -189,8 +195,9 @@ pkgconf. Also make sure that cmake>=3.4.1 is installed:
|
||||
(or ``pkgconf``) to this if needed).
|
||||
|
||||
Make sure that your distribution provides OpenSSL 1.1.1 or later, and
|
||||
libplist 2.0 or later. (This means Debian 10 "Buster", Ubuntu 18.04 or
|
||||
later.) If it does not, you may need to build and install these from
|
||||
libplist 2.0 or later. (This means Debian 10 "Buster" based systems (e.g, Ubuntu 18.04) or newer;
|
||||
on Debian 10 systems "libplist" is an older version, you need "libplist3".) If it does
|
||||
not, you may need to build and install these from
|
||||
source (see instructions at the end of this README). If you have a non-standard OpenSSL
|
||||
installation, you may need to set the environment variable OPENSSL_ROOT_DIR
|
||||
(_e.g._ , "`export OPENSSL_ROOT_DIR=/usr/local/lib64`" if that is where it is installed).
|
||||
@@ -251,7 +258,7 @@ OpenSSL is already installed as a System Library.
|
||||
|
||||
## Running UxPlay
|
||||
|
||||
### Debian-based systems
|
||||
### Installing plugins (Debian-based Linux systems)
|
||||
|
||||
Next install the GStreamer plugins that are needed with `sudo apt-get install gstreamer1.0-<plugin>`.
|
||||
Values of `<plugin>` required are:
|
||||
@@ -270,41 +277,8 @@ Also install "**tools**" to get the utility gst-inspect-1.0 for
|
||||
examining the GStreamer installation. If sound is not working, "**alsa**"", "**pulseaudio**",
|
||||
or "**pipewire**" plugins may need to be installed, depending on how your audio is set up.
|
||||
|
||||
**Finally, run uxplay in a terminal window**. On some systems, you can toggle into and out of fullscreen mode
|
||||
with F11 or (held-down left Alt)+Enter keys. Use Ctrl-C (or close the window)
|
||||
to terminate it when done. If the UxPlay server is not seen by the
|
||||
iOS client's drop-down "Screen Mirroring" panel, check that your DNS-SD
|
||||
server (usually avahi-daemon) is running: do this in a terminal window
|
||||
with ```systemctl status avahi-daemon```.
|
||||
If this shows the avahi-daemon is not running, control it
|
||||
with ```sudo systemctl [start,stop,enable,disable] avahi-daemon``` (or
|
||||
avahi-daemon.service). If UxPlay is seen, but the client fails to connect
|
||||
when it is selected, there may be a firewall on the server that prevents
|
||||
UxPlay from receiving client connection requests unless some network ports
|
||||
are opened: if a firewall is active, also open UDP port 5353 (for mDNS queries)
|
||||
needed by Avahi. See [Troubleshooting](#troubleshooting) below for
|
||||
help with this or other problems.
|
||||
|
||||
* By default, UxPlay is locked to
|
||||
its current client until that client drops the connection; the option `-nohold` modifies this
|
||||
behavior so that when a new client requests a connection, it removes the current client and takes over.
|
||||
|
||||
To display the accompanying "Cover Art" from sources like Apple Music in Audio-Only (ALAC) mode,
|
||||
run "`uxplay -ca <name> &`" in the background, then run a image viewer with an autoreload feature: an example
|
||||
is "feh": run "``feh -R 1 <name>``"
|
||||
in the foreground; terminate feh and then Uxplay with "`ctrl-C fg ctrl-C`".
|
||||
|
||||
|
||||
**One common problem involves GStreamer
|
||||
attempting to use incorrectly-configured or absent accelerated hardware h264
|
||||
video decoding (e.g., VAAPI).
|
||||
Try "`uxplay -avdec`" to force software video decoding; if this works you can
|
||||
then try to fix accelerated hardware video decoding if you need it, or just uninstall the GStreamer VAAPI plugin. If
|
||||
your system uses the Wayland compositor for graphics, use "`uxplay -vs waylandsink`".**
|
||||
See [Usage](#usage) for more run-time options.
|
||||
|
||||
|
||||
### Running uxplay Non-Debian-based Linux or \*BSD
|
||||
### Installing plugins (Non-Debian-based Linux or \*BSD)
|
||||
|
||||
* **Red Hat, or clones like CentOS (now continued as Rocky Linux or Alma Linux):**
|
||||
(sudo dnf install, or sudo yum install) The required GStreamer packages are:
|
||||
@@ -322,7 +296,7 @@ error: **no element "avdec_aac"** ]_.
|
||||
(sudo zypper install)
|
||||
The required GStreamer packages are: gstreamer-devel
|
||||
gstreamer-plugins-base-devel gstreamer-plugins-libav gstreamer-plugins-bad (+ gstreamer-plugins-vaapi
|
||||
for Intel graphics); in some cases, you may need to use gstreamer packages for OpenSUSE
|
||||
for Intel graphics); in some cases, you may need to use gstreamer or libav* packages for OpenSUSE
|
||||
from [Packman](https://ftp.gwdg.de/pub/linux/misc/packman/suse/) "Essentials"
|
||||
(which provides packages including plugins that OpenSUSE does not ship for license reasons).
|
||||
|
||||
@@ -334,11 +308,63 @@ for Intel graphics).
|
||||
(\* = core, good, bad, x, gtk, gl, vulkan, pulse, v4l2, ...), (+ gstreamer1-vaapi for Intel graphics).
|
||||
|
||||
|
||||
### Starting UxPlay
|
||||
|
||||
**Finally, run uxplay in a terminal window**. On some systems, you can toggle into and out of fullscreen mode
|
||||
with F11 or (held-down left Alt)+Enter keys. Use Ctrl-C (or close the window)
|
||||
to terminate it when done. If the UxPlay server is not seen by the
|
||||
iOS client's drop-down "Screen Mirroring" panel, check that your DNS-SD
|
||||
server (usually avahi-daemon) is running: do this in a terminal window
|
||||
with ```systemctl status avahi-daemon```.
|
||||
If this shows the avahi-daemon is not running, control it
|
||||
with ```sudo systemctl [start,stop,enable,disable] avahi-daemon``` (on non-systemd systems, such as \*BSD,
|
||||
use ``sudo service avahi-daemon [status, start, stop, restart, ...]``). If UxPlay is
|
||||
seen, but the client fails to connect
|
||||
when it is selected, there may be a firewall on the server that prevents
|
||||
UxPlay from receiving client connection requests unless some network ports
|
||||
are opened: if a firewall is active, also open UDP port 5353 (for mDNS queries)
|
||||
needed by Avahi. See [Troubleshooting](#troubleshooting) below for
|
||||
help with this or other problems.
|
||||
|
||||
* you may find video is improved by the setting -fps 60 that allows some video to be played at 60 frames
|
||||
per second. (You can see what framerate is actually streaming by using -vs fpsdisplaysink, and/or -FPSdata.)
|
||||
|
||||
* By default, UxPlay is locked to
|
||||
its current client until that client drops the connection; since UxPlay-1.58, the option `-nohold` modifies this
|
||||
behavior so that when a new client requests a connection, it removes the current client and takes over.
|
||||
|
||||
* In its default mode, Uxplay uses a simple GStreamer mode ("sync=false") that streams without using audio- and
|
||||
video-timestamps for synchronization. UxPlay 1.63 also introduces `-vsync` and `-async` as alternatives that use timestamps
|
||||
in Mirror and Audio-Only modes respectively (GStreamer's "sync=true" mode). (These options also allow an optional
|
||||
positive (or negative) audio-delay in
|
||||
milliseconds for fine-tuning : `-vsync 20.5` delays audio relative to video by 0.0205 secs; a negative value advances it.)
|
||||
Use `-async` to synchronise video on the iOS client with ALAC Audio-Only mode audio streamer to the server, for example
|
||||
when watching Apple Music song lyrics on the client. Use `-vsync` in Mirror mode
|
||||
on low-powered system such Raspberry Pi when using `-avdec` software h264 video decoding. Simple streaming seems to maintain
|
||||
synchronisation of audio with video on desktop systems, but you may wish to experiment with `-vsync` there too.
|
||||
|
||||
* Since UxPlay-1.54, you can display the accompanying "Cover Art" from sources like Apple Music in Audio-Only (ALAC) mode:
|
||||
run "`uxplay -ca <name> &`" in the background, then run a image viewer with an autoreload feature: an example
|
||||
is "feh": run "``feh -R 1 <name>``"
|
||||
in the foreground; terminate feh and then Uxplay with "`ctrl-C fg ctrl-C`".
|
||||
|
||||
**One common problem involves GStreamer
|
||||
attempting to use incorrectly-configured or absent accelerated hardware h264
|
||||
video decoding (e.g., VAAPI).
|
||||
Try "`uxplay -avdec`" to force software video decoding; if this works you can
|
||||
then try to fix accelerated hardware video decoding if you need it, or just uninstall the GStreamer VAAPI plugin. If
|
||||
your system uses the Wayland compositor for graphics, use "`uxplay -vs waylandsink`".**
|
||||
See [Usage](#usage) for more run-time options.
|
||||
|
||||
|
||||
### **Special instructions for Raspberry Pi (only tested on model 4B)**:
|
||||
|
||||
* For good performance, the Raspberry Pi needs the GStreamer Video4linux2 plugin to use its Broadcom GPU hardware
|
||||
for decoding h264 video. The plugin accesses the GPU using the bcm2835_codec kernel module
|
||||
* If you use the software-only (h264) video-decoding UxPlay option `-avdec`, you also need option `-vsync` to keep
|
||||
audio and video synchronized (`-vsync` is a new feature; before it was introduced,
|
||||
software decoding on the Pi was not viable.)
|
||||
|
||||
* For best performance, the Raspberry Pi needs the GStreamer Video4linux2 plugin to use its Broadcom GPU hardware
|
||||
for decoding h264 video. This needs the bcm2835_codec kernel module
|
||||
which is maintained by Raspberry Pi in the drivers/staging/VC04_services part of
|
||||
the [Raspberry Pi kernel tree](https://github.com/raspberrypi/linux), but
|
||||
is not yet included in the mainline Linux kernel. Distributions for R Pi that supply it include Raspberry Pi OS, Ubuntu,
|
||||
@@ -396,7 +422,7 @@ this can be done with package managers [MacPorts](http://www.macports.org),
|
||||
[Fink](http://finkproject.org) or [Homebrew](http://brew.sh), or by a download from
|
||||
[https://cmake.org/download/](https://cmake.org/download/).
|
||||
|
||||
First install OpenSSL and libplist: static versions of these libaries will be used, so they can be uninstalled after UxPlay is built.
|
||||
First install OpenSSL and libplist: static versions of these libraries will be used, so they can be uninstalled after UxPlay is built.
|
||||
These are available in MacPorts and Homebrew, or they can easily be built from source (see instructions at the end of this README; this
|
||||
requires development tools autoconf, automake, libtool, which can be installed using MacPorts, HomeBrew, or Fink).
|
||||
|
||||
@@ -404,16 +430,18 @@ requires development tools autoconf, automake, libtool, which can be installed u
|
||||
Next get the latest macOS release of GStreamer-1.0.
|
||||
|
||||
* recommended: install the "official" GStreamer release for macOS
|
||||
from [https://gstreamer.freedesktop.org/download/](https://gstreamer.freedesktop.org/download/). The alternative is to install it from Homebrew
|
||||
(MacPorts also supplies it, but compiled to use X11).
|
||||
from [https://gstreamer.freedesktop.org/download/](https://gstreamer.freedesktop.org/download/). The alternative is to install it from Homebrew. MacPorts
|
||||
packages of GStreamer are compiled to use X11 and are **NOT** recommended.
|
||||
|
||||
**For the "official" release**: install both the macOS runtime and development installer packages. Assuming that the latest release is 1.20.4.
|
||||
install `gstreamer-1.0-1.20.5-universal.pkg` and ``gstreamer-1.0-devel-1.20.5-universal.pkg``. (If
|
||||
you have an Intel-architecture Mac, and have problems with the "universal" packages, you can also
|
||||
use `gstreamer-1.0-1.18.6-x86_64.pkg` and ``gstreamer-1.0-devel-1.18.6-x86_64.pkg``.) Click on them to
|
||||
* You could instead compile the "official" GStreamer release from source: GStreamer-1.22.0 has been successfully
|
||||
built this way on a system using MacPorts: see [the UxPlay Wiki](https://github.com/FDH2/UxPlay/wiki/Building-GStreamer-from-Source-on-macOS-with-MacPorts)
|
||||
|
||||
**For the "official" release**: install both the macOS runtime and development installer packages. Assuming that the latest release is 1.20.5
|
||||
install `gstreamer-1.0-1.20.5-universal.pkg` and ``gstreamer-1.0-devel-1.20.5-universal.pkg``. Click on them to
|
||||
install (they install to /Library/FrameWorks/GStreamer.framework).
|
||||
|
||||
|
||||
* **ADDED 2023-01-25: v1.22.0 has just been released, but these binaries
|
||||
seem to have problems, perhaps only on older macOS releases; use v1.20.5 if they dont work for you.**
|
||||
|
||||
**For Homebrew**: pkgconfig is needed ("brew install pkgconfig").
|
||||
Then
|
||||
@@ -445,7 +473,8 @@ Finally, build and install uxplay: open a terminal and change into the UxPlay so
|
||||
***Using GStreamer installed from MacPorts (not recommended):***
|
||||
|
||||
To install: "sudo port install pkgconfig"; "sudo port install gstreamer1-gst-plugins-base gstreamer1-gst-plugins-good gstreamer1-gst-plugins-bad gstreamer1-gst-libav".
|
||||
**The MacPorts GStreamer is built to use X11**, so uxplay must be run from an XQuartz terminal, can use ZOOMFIX, and needs
|
||||
**The MacPorts GStreamer is built to use X11**: use the special CMake option `-DUSE_X11=ON` when building UxPlay.
|
||||
Then uxplay must be run from an XQuartz terminal, can use ZOOMFIX, and needs
|
||||
option "-vs ximagesink". On an unibody (non-retina) MacBook Pro, the default resolution wxh = 1920x1080 was too large,
|
||||
but using option "-s 800x600" worked. The MacPorts GStreamer pipeline seems fragile against attempts to change
|
||||
the X11 window size, or to rotations that switch a connected client between portrait and landscape mode while uxplay is running.
|
||||
@@ -551,6 +580,12 @@ Options:
|
||||
|
||||
**-nh** Do not append "@_hostname_" at the end of the AirPlay server name.
|
||||
|
||||
**-sync** (In Audio-Only (ALAC)) mode: this option synchronizes audio on the server with video on the client,
|
||||
but causes the client to add a delay to account for latency, so pausing the stream will not take effect
|
||||
immediately. This can be mitigated by using the `-al` audio latency setting to change the latency (default 0.25 secs)
|
||||
that the server reports to the cient.
|
||||
|
||||
|
||||
**-s wxh** (e.g. -s 1920x1080 , which is the default ) sets the display resolution (width and height,
|
||||
in pixels). (This may be a
|
||||
request made to the AirPlay client, and perhaps will not
|
||||
@@ -642,6 +677,11 @@ which will not work if a firewall is running.
|
||||
|
||||
**-as 0** (or just **-a**) suppresses playing of streamed audio, but displays streamed video.
|
||||
|
||||
**-al _x_** specifies an audio latency _x_ in (decimal) seconds in Audio-only (ALAC), that is reported to the client. Values
|
||||
in the range [0.0, 10.0] seconds are allowed, and will be converted to a whole number of microseconds. Default
|
||||
is 0.25 sec (250000 usec). (This replaces the `-ao` option introduced in v1.62, as a workaround for a problem that
|
||||
is now fixed).
|
||||
|
||||
**-ca _filename_** provides a file (where _filename_ can include a full path) used for output of "cover art"
|
||||
(from Apple Music, _etc._,) in audio-only ALAC mode. This file is overwritten with the latest cover art as
|
||||
it arrives. Cover art (jpeg format) is discarded if this option is not used. Use with a image viewer that reloads the image
|
||||
@@ -670,7 +710,8 @@ which will not work if a firewall is running.
|
||||
**-fps n** sets a maximum frame rate (in frames per second) for the AirPlay
|
||||
client to stream video; n must be a whole number less than 256.
|
||||
(The client may choose to serve video at any frame rate lower
|
||||
than this; default is 30 fps.) A setting below 30 fps might be useful to
|
||||
than this; default is 30 fps.) A setting of 60 fps may give you improved video
|
||||
but is not recommended on Raspberry Pi. A setting below 30 fps might be useful to
|
||||
reduce latency if you are running more than one instance of uxplay at the same time.
|
||||
_This setting is only an advisory to
|
||||
the client device, so setting a high value will not force a high framerate._
|
||||
@@ -724,7 +765,9 @@ Solution: when more than one installation of OpenSSL is present, set the environ
|
||||
on 64-bit Ubuntu, this is done by
|
||||
running `export OPENSSL_ROOT_DIR=/usr/lib/X86_64-linux-gnu/` before running cmake.
|
||||
|
||||
### 1. uxplay starts, but either stalls or stops after "Initialized server socket(s)" appears (_without the server name showing on the client_).
|
||||
### 1. **Avahi/DNS_SD Bonjour/Zeroconf issues**
|
||||
|
||||
* **uxplay starts, but either stalls or stops after "Initialized server socket(s)" appears (_without the server name showing on the client_)**.
|
||||
|
||||
If UxPlay stops with the "No DNS-SD Server found" message, this means that your network **does not have a running Bonjour/zeroconf DNS-SD server.**
|
||||
|
||||
@@ -747,9 +790,15 @@ uncomment a line for airplay support._)
|
||||
|
||||
If UxPlay stalls _without an error message_ and _without the server name showing on the client_, this is either pre-UxPlay-1.60
|
||||
behavior when no DNS-SD server was found, or a network problem.
|
||||
After starting uxplay, use the utility ``avahi-browse -a -t`` in a different terminal window on the server to
|
||||
|
||||
* **Avahi works at first, but new clients do not see UxPlay, or clients that initially saw it stop doing so after they disconnect**.
|
||||
|
||||
This is because Avahi is only using the "loopback" network interface, and is not receiving mDNS queries from new clients that were not
|
||||
listening when UxPlay started.
|
||||
|
||||
To check this, after starting uxplay, use the utility ``avahi-browse -a -t`` in a different terminal window on the server to
|
||||
verify that the UxPlay AirTunes and AirPlay services are correctly registered (only the AirTunes service is
|
||||
used in the "Legacy" AirPlay Mirror mode used by UxPlay).
|
||||
used in the "Legacy" AirPlay Mirror mode used by UxPlay, bit the AirPlay service is used for the initial contact).
|
||||
|
||||
The results returned by avahi-browse should show entries for
|
||||
uxplay like
|
||||
@@ -768,7 +817,7 @@ If only the loopback ("lo") entries are shown, a firewall on the UxPlay host
|
||||
is probably blocking full DNS-SD service, and you need to open the default UDP port 5353 for mDNS requests,
|
||||
as loopback-based DNS-SD service is unreliable.
|
||||
|
||||
If the UxPlay service is listed by avahi-browse, but is not seen by the client,
|
||||
If the UxPlay services are listed by avahi-browse as above, but are not seen by the client,
|
||||
the problem is likely to be a problem with the local network.
|
||||
|
||||
|
||||
@@ -795,9 +844,13 @@ to guess what are the "best" plugins to use on your system).
|
||||
A different reason for no audio occurred when a user with a firewall only opened two udp network
|
||||
ports: **three** are required (the third one receives the audio data).
|
||||
|
||||
**Raspberry Pi** devices (-rpi option) only work with hardware GPU decoding if the Video4Linux2 plugin in GStreamer v1.20.x or earlier has been patched
|
||||
**Raspberry Pi** devices only work with hardware GPU h264 video decoding if the Video4Linux2 plugin in GStreamer v1.20.x or earlier has been patched
|
||||
(see the UxPlay [Wiki](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches) for patches).
|
||||
This may be fixed in the future when GStreamer-1.22 is released, or by backport patches in distributions such as Raspberry Pi OS (Bullseye).
|
||||
This is fixed in GStreamer-1.22, and by backport patches from this in distributions such as Raspberry Pi OS (Bullseye): **use option `-bt709` with the
|
||||
GStreamer-1.18.4 from Raspberry Pi OS**..
|
||||
This also needs the bcm2835-codec kernel module that is not in the standard Linux kernel (it is available in Raspberry Pi OS, Ubuntu and Manjaro).
|
||||
**If you do not have this kernel module, or GStreamer < 1.22 is not patched, use options `-avdec -vsync` for software h264-decoding.**
|
||||
|
||||
|
||||
Sometimes "autovideosink" may select the OpenGL renderer "glimagesink" which
|
||||
may not work correctly on your system. Try the options "-vs ximagesink" or
|
||||
@@ -828,6 +881,23 @@ the client sends the "Stop Mirroring" signal, try the no-close option "-nc" that
|
||||
|
||||
### 4. GStreamer issues (missing plugins, etc.):
|
||||
|
||||
If UxPlay fails to start, with a message that a required GStreamer plugin (such as "libav") was not found, first check with the GStreamer tool
|
||||
gst-inspect-1.0 to see what GStreamer knows is available. (You may need to install some additional GStreamer "tools" package to get gst-inspect-1.0).
|
||||
For, _e.g._ a libav problem, check with "`gst-inspect-1.0 libav`". If it is not shown as available to GStreamer, but your package manager
|
||||
shows the relevant package as installed (as one user found), try entirely removing and reinstalling the package.
|
||||
That user found that a solution to a "**Required gstreamer plugin 'libav' not found**" message that kept recurring was to clear the user's gstreamer
|
||||
cache with `rm -rf ~/.cache/gstreamer-1.0`.
|
||||
|
||||
If it fails to start with an error like '`no element "avdec_aac"`' this is
|
||||
because even though gstreamer-libav is installed. it is incomplete because some plugins are missing: "`gst-inspect-1.0 | grep avdec_aac`" will
|
||||
show if avdec_aac is available. Some distributions (RedHat, SUSE, etc) provide incomplete versions of libav because of patent issues with codecs used by
|
||||
certain plugins. In those cases there will be some "extra package" provider
|
||||
like [RPM fusion](https://rpmfusion.org) (RedHat) or [packman](http://packman.links2linux.org/) (SUSE) where you can get complete packages (your
|
||||
distribution will usually provide instructions for this). The packages
|
||||
needed may be "libav\*" or "ffmpeg\*" packages: the GStreamer libav plugin package does not contain any codecs itself, it just provides a way
|
||||
for GStreamer to use ffmpeg/libav codec libraries which must be installed separately. For similar reasons, distributions may ship incomplete packages
|
||||
of GStreamer "plugins-bad", which is where "license-problematical" plugins go.
|
||||
|
||||
To troubleshoot GStreamer execute "export GST_DEBUG=2"
|
||||
to set the GStreamer debug-level environment-variable in the terminal
|
||||
where you will run uxplay, so that you see warning and error messages;
|
||||
@@ -890,6 +960,17 @@ tvOS 12.2.1); it seems that the use of "legacy" protocol just requires bit 27 (l
|
||||
The "features" code and other settings are set in `UxPlay/lib/dnssdint.h`.
|
||||
|
||||
# Changelog
|
||||
1.63 2023-02-12 Reworked audio-video synchronization, with new options -vsync (for Mirror mode) and
|
||||
-async (for Audio-Only mode, to sync with client video). Option -vsync makes software
|
||||
h264 decoding of streamed videos with option -avdec viable on some recent Raspberry Pi models.
|
||||
Internal change: all times are now processed in nanoseconds units. Removed -ao option
|
||||
introduced in 1.62.
|
||||
|
||||
1.62 2023-01-18 Added Audio-only mode time offset -ao x to allow user synchronization of ALAC
|
||||
audio playing on the server with video, song lyrics, etc. playing on the client.
|
||||
x = 5.0 appears to be optimal in many cases. Quality fixes: cleanup in volume
|
||||
changes, timestamps, some bugfixes.
|
||||
|
||||
1.61 2022-12-30 Removed -t option (workaround for an Avahi issue, correctly solved by opening network
|
||||
port UDP 5353 in firewall). Remove -g debug flag from CMAKE_CFLAGS. Postpend (instead
|
||||
of prepend) build environment CFLAGS to CMAKE_CFLAGS. Refactor parts of uxplay.cpp
|
||||
|
||||
344
README.txt
344
README.txt
@@ -1,4 +1,4 @@
|
||||
# UxPlay 1.61: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix (now also runs on Windows).
|
||||
# UxPlay 1.63: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix (now also runs on Windows).
|
||||
|
||||
### Now developed at the GitHub site <https://github.com/FDH2/UxPlay> (where all user issues should be posted).
|
||||
|
||||
@@ -33,7 +33,9 @@
|
||||
(omx). See [success
|
||||
reports](https://github.com/FDH2/UxPlay/wiki/UxPlay-on-Raspberry-Pi:-success-reports:),
|
||||
so far limited to distributions available through Raspberry-Pi
|
||||
Imager.
|
||||
Imager. **NEW!** *The new-in-UxPlay-1.63 option `-vsync` now makes
|
||||
UxPlay viable on other distributions for Raspberry Pi that do not
|
||||
include kernel support for hardware decoding!*
|
||||
|
||||
- **New**: Support for running on Microsoft Windows (builds with the
|
||||
MinGW-64 compiler in the unix-like MSYS2 environment).
|
||||
@@ -45,7 +47,8 @@ status](https://repology.org/badge/vertical-allrepos/uxplay.svg)](https://repolo
|
||||
|
||||
- Install uxplay on Debian-based Linux systems with
|
||||
"`sudo apt install uxplay`"; on FreeBSD with
|
||||
"`sudo pkg install uxplay`".
|
||||
"`sudo pkg install uxplay`". Also available on Arch-based systems
|
||||
through AUR.
|
||||
|
||||
- On Linux and \*BSD the mDNS/DNS-SD (Bonjour/ZeroConf) local network
|
||||
services needed by UxPlay are usually provided by Avahi: **if there
|
||||
@@ -54,25 +57,28 @@ status](https://repology.org/badge/vertical-allrepos/uxplay.svg)](https://repolo
|
||||
can work without this port by using only the host's loopback
|
||||
interface, but its visibility to clients will be degraded.) See the
|
||||
[Troubleshooting](#troubleshooting) section below for more details.
|
||||
(With a firewall, you also need to open ports for UxPlay, and use
|
||||
the `-p <n>` option; see `man uxplay` or `uxplay -h`.)
|
||||
|
||||
- Even if you install your distribution's pre-compiled uxplay binary
|
||||
package, you may need to read the instructions below for [running
|
||||
UxPlay](#running-uxplay) to see which of your distribution's
|
||||
**GStreamer plugin packages** you should also install.
|
||||
|
||||
- For Raspbery Pi (tested on RPi 4 model B, reported to work on RPi 3
|
||||
- For Raspberry Pi (tested on RPi 4 model B, reported to work on RPi 3
|
||||
model B+), only Raspberry Pi OS, plus the Debian and Manjaro
|
||||
ARM-RPi4 Images made available through the Raspberry Pi Imager, are
|
||||
ARM-RPi4 images made available through the Raspberry Pi Imager, are
|
||||
known to provide the (out-of-mainline-kernel) kernel-module
|
||||
**bcm2835-codec.ko** maintained by Rasperry Pi, and needed for
|
||||
hardware-accelerated video decoding by the Broadcom GPU on the Pi,
|
||||
accessed using the GStreamer Video4Linux (v4l2) plugin. In addition,
|
||||
for Ubuntu and Manjaro, the v4l2 plugin needs a
|
||||
**bcm2835-codec.ko** [maintained by Raspberry
|
||||
Pi](https://github.com/raspberrypi/linux/tree/rpi-5.15.y/drivers/staging/vc04_services),
|
||||
and needed for hardware-accelerated video decoding by the Broadcom
|
||||
GPU on the Pi, accessed using the GStreamer Video4Linux (v4l2)
|
||||
plugin. In addition, for Ubuntu and Manjaro, the v4l2 plugin needs a
|
||||
[patch](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches)
|
||||
forGStreamer \< 1.22.
|
||||
|
||||
- To (easily) compile UxPlay from source, see the section [building
|
||||
UxPlay](#building-uxplay).
|
||||
- To (easily) compile the latest UxPlay from source, see the section
|
||||
[Getting UxPlay](#getting-uxplay).
|
||||
|
||||
# Detailed description of UxPlay
|
||||
|
||||
@@ -175,24 +181,22 @@ used.
|
||||
be built by the user: See [these
|
||||
instructions](https://github.com/FDH2/UxPlay/wiki/NVIDIA-nvdec-and-nvenc-plugins).
|
||||
|
||||
- **Video4Linux2 support for the Raspberry Pi Broadcom GPU**
|
||||
- **Video4Linux2 support for the Raspberry Pi Broadcom 2835 GPU**
|
||||
|
||||
Raspberry Pi (RPi) computers can run UxPlay with software decoding
|
||||
of h264 video but this usually has unacceptable latency, and
|
||||
hardware-accelerated GPU decoding should be used. UxPlay accesses
|
||||
the GPU using the GStreamer plugin for Video4Linux2 (v4l2), which
|
||||
replaces unmaintained 32-bit-only OpenMax used by RPiPlay. Fixes to
|
||||
the v4l2 plugin that allow it to work with UxPlay on RPi are now in
|
||||
the GStreamer development branch, and will appear in the upcoming
|
||||
GStreamer-1.22 release. A backport (package
|
||||
`gstreamer1.0-plugins-good-1.18.4-2+deb11u1+rpt1`) has already
|
||||
appeared in RPi OS (Bullseye); for it to work with uxplay 1.56 or
|
||||
later, you may need to use the `-bt709` option. For other
|
||||
distributions without the backport, you can find [patching
|
||||
instructions for
|
||||
GStreamer](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches)
|
||||
in the [UxPlay Wiki](https://github.com/FDH2/UxPlay/wiki) for
|
||||
GStreamer 1.18.4 and later.
|
||||
Raspberry Pi (RPi) computers (tested on Pi 4 Model B) can now run
|
||||
UxPlay using software decoding of h264 video, but
|
||||
hardware-accelerated decoding by firmware in the Pi's GPU is
|
||||
prefered. UxPlay accesses the GPU using the GStreamer-1.22
|
||||
Video4Linux2 (v4l2) plugin; the plugin from older GStreamer needs a
|
||||
patch to backport fixes from v1.22: this has been done in the
|
||||
v1.18.4 version supplied by Raspberry Pi OS (Bullseye), and patches
|
||||
for this and later 1.20 versions are available in the UxPlay Wiki
|
||||
(see [patching instructions for
|
||||
GStreamer](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches)).
|
||||
Also required is the out-of-mainline Linux kernel module
|
||||
bcm2835-v4l2-codec maintained by Raspberry Pi, so far only included
|
||||
in Raspberry Pi OS, and two other distributions (Ubuntu, Manjaro)
|
||||
available with Raspberry Pi Imager.
|
||||
|
||||
### Note to packagers:
|
||||
|
||||
@@ -236,11 +240,12 @@ cmake\>=3.4.1 is installed: "`sudo apt-get install cmake`" (add
|
||||
`build-essential` and `pkg-config` (or `pkgconf`) to this if needed).
|
||||
|
||||
Make sure that your distribution provides OpenSSL 1.1.1 or later, and
|
||||
libplist 2.0 or later. (This means Debian 10 "Buster", Ubuntu 18.04 or
|
||||
later.) If it does not, you may need to build and install these from
|
||||
source (see instructions at the end of this README). If you have a
|
||||
non-standard OpenSSL installation, you may need to set the environment
|
||||
variable OPENSSL_ROOT_DIR (*e.g.* ,
|
||||
libplist 2.0 or later. (This means Debian 10 "Buster" based systems
|
||||
(e.g, Ubuntu 18.04) or newer; on Debian 10 systems "libplist" is an
|
||||
older version, you need "libplist3".) If it does not, you may need to
|
||||
build and install these from source (see instructions at the end of this
|
||||
README). If you have a non-standard OpenSSL installation, you may need
|
||||
to set the environment variable OPENSSL_ROOT_DIR (*e.g.* ,
|
||||
"`export OPENSSL_ROOT_DIR=/usr/local/lib64`" if that is where it is
|
||||
installed).
|
||||
|
||||
@@ -307,7 +312,7 @@ installed)
|
||||
|
||||
## Running UxPlay
|
||||
|
||||
### Debian-based systems
|
||||
### Installing plugins (Debian-based Linux systems)
|
||||
|
||||
Next install the GStreamer plugins that are needed with
|
||||
`sudo apt-get install gstreamer1.0-<plugin>`. Values of `<plugin>`
|
||||
@@ -328,44 +333,7 @@ gst-inspect-1.0 for examining the GStreamer installation. If sound is
|
||||
not working, "**alsa**"","**pulseaudio**", or "**pipewire**" plugins may
|
||||
need to be installed, depending on how your audio is set up.
|
||||
|
||||
**Finally, run uxplay in a terminal window**. On some systems, you can
|
||||
toggle into and out of fullscreen mode with F11 or (held-down left
|
||||
Alt)+Enter keys. Use Ctrl-C (or close the window) to terminate it when
|
||||
done. If the UxPlay server is not seen by the iOS client's drop-down
|
||||
"Screen Mirroring" panel, check that your DNS-SD server (usually
|
||||
avahi-daemon) is running: do this in a terminal window with
|
||||
`systemctl status avahi-daemon`. If this shows the avahi-daemon is not
|
||||
running, control it with
|
||||
`sudo systemctl [start,stop,enable,disable] avahi-daemon` (or
|
||||
avahi-daemon.service). If UxPlay is seen, but the client fails to
|
||||
connect when it is selected, there may be a firewall on the server that
|
||||
prevents UxPlay from receiving client connection requests unless some
|
||||
network ports are opened: if a firewall is active, also open UDP port
|
||||
5353 (for mDNS queries) needed by Avahi. See
|
||||
[Troubleshooting](#troubleshooting) below for help with this or other
|
||||
problems.
|
||||
|
||||
- By default, UxPlay is locked to its current client until that client
|
||||
drops the connection; the option `-nohold` modifies this behavior so
|
||||
that when a new client requests a connection, it removes the current
|
||||
client and takes over.
|
||||
|
||||
To display the accompanying "Cover Art" from sources like Apple Music in
|
||||
Audio-Only (ALAC) mode, run "`uxplay -ca <name> &`" in the background,
|
||||
then run a image viewer with an autoreload feature: an example is "feh":
|
||||
run "`feh -R 1 <name>`" in the foreground; terminate feh and then Uxplay
|
||||
with "`ctrl-C fg ctrl-C`".
|
||||
|
||||
**One common problem involves GStreamer attempting to use
|
||||
incorrectly-configured or absent accelerated hardware h264 video
|
||||
decoding (e.g., VAAPI). Try "`uxplay -avdec`" to force software video
|
||||
decoding; if this works you can then try to fix accelerated hardware
|
||||
video decoding if you need it, or just uninstall the GStreamer VAAPI
|
||||
plugin. If your system uses the Wayland compositor for graphics, use
|
||||
"`uxplay -vs waylandsink`".** See [Usage](#usage) for more run-time
|
||||
options.
|
||||
|
||||
### Running uxplay Non-Debian-based Linux or \*BSD
|
||||
### Installing plugins (Non-Debian-based Linux or \*BSD)
|
||||
|
||||
- **Red Hat, or clones like CentOS (now continued as Rocky Linux or
|
||||
Alma Linux):** (sudo dnf install, or sudo yum install) The required
|
||||
@@ -386,7 +354,7 @@ options.
|
||||
are: gstreamer-devel gstreamer-plugins-base-devel
|
||||
gstreamer-plugins-libav gstreamer-plugins-bad (+
|
||||
gstreamer-plugins-vaapi for Intel graphics); in some cases, you may
|
||||
need to use gstreamer packages for OpenSUSE from
|
||||
need to use gstreamer or libav\* packages for OpenSUSE from
|
||||
[Packman](https://ftp.gwdg.de/pub/linux/misc/packman/suse/)
|
||||
"Essentials" (which provides packages including plugins that
|
||||
OpenSUSE does not ship for license reasons).
|
||||
@@ -399,13 +367,80 @@ options.
|
||||
gtk, gl, vulkan, pulse, v4l2, ...), (+ gstreamer1-vaapi for Intel
|
||||
graphics).
|
||||
|
||||
### Starting UxPlay
|
||||
|
||||
**Finally, run uxplay in a terminal window**. On some systems, you can
|
||||
toggle into and out of fullscreen mode with F11 or (held-down left
|
||||
Alt)+Enter keys. Use Ctrl-C (or close the window) to terminate it when
|
||||
done. If the UxPlay server is not seen by the iOS client's drop-down
|
||||
"Screen Mirroring" panel, check that your DNS-SD server (usually
|
||||
avahi-daemon) is running: do this in a terminal window with
|
||||
`systemctl status avahi-daemon`. If this shows the avahi-daemon is not
|
||||
running, control it with
|
||||
`sudo systemctl [start,stop,enable,disable] avahi-daemon` (on
|
||||
non-systemd systems, such as \*BSD, use
|
||||
`sudo service avahi-daemon [status, start, stop, restart, ...]`). If
|
||||
UxPlay is seen, but the client fails to connect when it is selected,
|
||||
there may be a firewall on the server that prevents UxPlay from
|
||||
receiving client connection requests unless some network ports are
|
||||
opened: if a firewall is active, also open UDP port 5353 (for mDNS
|
||||
queries) needed by Avahi. See [Troubleshooting](#troubleshooting) below
|
||||
for help with this or other problems.
|
||||
|
||||
- you may find video is improved by the setting -fps 60 that allows
|
||||
some video to be played at 60 frames per second. (You can see what
|
||||
framerate is actually streaming by using -vs fpsdisplaysink, and/or
|
||||
-FPSdata.)
|
||||
|
||||
- By default, UxPlay is locked to its current client until that client
|
||||
drops the connection; since UxPlay-1.58, the option `-nohold`
|
||||
modifies this behavior so that when a new client requests a
|
||||
connection, it removes the current client and takes over.
|
||||
|
||||
- In its default mode, Uxplay uses a simple GStreamer mode
|
||||
("sync=false") that streams without using audio- and
|
||||
video-timestamps for synchronization. UxPlay 1.63 also introduces
|
||||
`-vsync` and `-async` as alternatives that use timestamps in Mirror
|
||||
and Audio-Only modes respectively (GStreamer's "sync=true" mode).
|
||||
(These options also allow an optional positive (or negative)
|
||||
audio-delay in milliseconds for fine-tuning : `-vsync 20.5` delays
|
||||
audio relative to video by 0.0205 secs; a negative value advances
|
||||
it.) Use `-async` to synchronise video on the iOS client with ALAC
|
||||
Audio-Only mode audio streamer to the server, for example when
|
||||
watching Apple Music song lyrics on the client. Use `-vsync` in
|
||||
Mirror mode on low-powered system such Raspberry Pi when using
|
||||
`-avdec` software h264 video decoding. Simple streaming seems to
|
||||
maintain synchronisation of audio with video on desktop systems, but
|
||||
you may wish to experiment with `-vsync` there too.
|
||||
|
||||
- Since UxPlay-1.54, you can display the accompanying "Cover Art" from
|
||||
sources like Apple Music in Audio-Only (ALAC) mode: run
|
||||
"`uxplay -ca <name> &`" in the background, then run a image viewer
|
||||
with an autoreload feature: an example is "feh": run
|
||||
"`feh -R 1 <name>`" in the foreground; terminate feh and then Uxplay
|
||||
with "`ctrl-C fg ctrl-C`".
|
||||
|
||||
**One common problem involves GStreamer attempting to use
|
||||
incorrectly-configured or absent accelerated hardware h264 video
|
||||
decoding (e.g., VAAPI). Try "`uxplay -avdec`" to force software video
|
||||
decoding; if this works you can then try to fix accelerated hardware
|
||||
video decoding if you need it, or just uninstall the GStreamer VAAPI
|
||||
plugin. If your system uses the Wayland compositor for graphics, use
|
||||
"`uxplay -vs waylandsink`".** See [Usage](#usage) for more run-time
|
||||
options.
|
||||
|
||||
### **Special instructions for Raspberry Pi (only tested on model 4B)**:
|
||||
|
||||
- For good performance, the Raspberry Pi needs the GStreamer
|
||||
- If you use the software-only (h264) video-decoding UxPlay option
|
||||
`-avdec`, you also need option `-vsync` to keep audio and video
|
||||
synchronized (`-vsync` is a new feature; before it was introduced,
|
||||
software decoding on the Pi was not viable.)
|
||||
|
||||
- For best performance, the Raspberry Pi needs the GStreamer
|
||||
Video4linux2 plugin to use its Broadcom GPU hardware for decoding
|
||||
h264 video. The plugin accesses the GPU using the bcm2835_codec
|
||||
kernel module which is maintained by Raspberry Pi in the
|
||||
drivers/staging/VC04_services part of the [Raspberry Pi kernel
|
||||
h264 video. This needs the bcm2835_codec kernel module which is
|
||||
maintained by Raspberry Pi in the drivers/staging/VC04_services part
|
||||
of the [Raspberry Pi kernel
|
||||
tree](https://github.com/raspberrypi/linux), but is not yet included
|
||||
in the mainline Linux kernel. Distributions for R Pi that supply it
|
||||
include Raspberry Pi OS, Ubuntu, and Manjaro. Some others may not.
|
||||
@@ -478,7 +513,7 @@ with package managers [MacPorts](http://www.macports.org),
|
||||
[Fink](http://finkproject.org) or [Homebrew](http://brew.sh), or by a
|
||||
download from <https://cmake.org/download/>.
|
||||
|
||||
First install OpenSSL and libplist: static versions of these libaries
|
||||
First install OpenSSL and libplist: static versions of these libraries
|
||||
will be used, so they can be uninstalled after UxPlay is built. These
|
||||
are available in MacPorts and Homebrew, or they can easily be built from
|
||||
source (see instructions at the end of this README; this requires
|
||||
@@ -489,17 +524,23 @@ Next get the latest macOS release of GStreamer-1.0.
|
||||
|
||||
- recommended: install the "official" GStreamer release for macOS from
|
||||
<https://gstreamer.freedesktop.org/download/>. The alternative is to
|
||||
install it from Homebrew (MacPorts also supplies it, but compiled to
|
||||
use X11).
|
||||
install it from Homebrew. MacPorts packages of GStreamer are
|
||||
compiled to use X11 and are **NOT** recommended.
|
||||
|
||||
- You could instead compile the "official" GStreamer release from
|
||||
source: GStreamer-1.22.0 has been successfully built this way on a
|
||||
system using MacPorts: see [the UxPlay
|
||||
Wiki](https://github.com/FDH2/UxPlay/wiki/Building-GStreamer-from-Source-on-macOS-with-MacPorts)
|
||||
|
||||
**For the "official" release**: install both the macOS runtime and
|
||||
development installer packages. Assuming that the latest release is
|
||||
1.20.4. install `gstreamer-1.0-1.20.5-universal.pkg` and
|
||||
`gstreamer-1.0-devel-1.20.5-universal.pkg`. (If you have an
|
||||
Intel-architecture Mac, and have problems with the "universal" packages,
|
||||
you can also use `gstreamer-1.0-1.18.6-x86_64.pkg` and
|
||||
`gstreamer-1.0-devel-1.18.6-x86_64.pkg`.) Click on them to install (they
|
||||
install to /Library/FrameWorks/GStreamer.framework).
|
||||
1.20.5 install `gstreamer-1.0-1.20.5-universal.pkg` and
|
||||
`gstreamer-1.0-devel-1.20.5-universal.pkg`. Click on them to install
|
||||
(they install to /Library/FrameWorks/GStreamer.framework).
|
||||
|
||||
- **ADDED 2023-01-25: v1.22.0 has just been released, but these
|
||||
binaries seem to have problems, perhaps only on older macOS
|
||||
releases; use v1.20.5 if they dont work for you.**
|
||||
|
||||
**For Homebrew**: pkgconfig is needed ("brew install pkgconfig"). Then
|
||||
"brew install gst-plugins-base gst-plugins-good gst-plugins-bad
|
||||
@@ -540,12 +581,13 @@ make install" (same as for Linux).
|
||||
To install: "sudo port install pkgconfig"; "sudo port install
|
||||
gstreamer1-gst-plugins-base gstreamer1-gst-plugins-good
|
||||
gstreamer1-gst-plugins-bad gstreamer1-gst-libav". **The MacPorts
|
||||
GStreamer is built to use X11**, so uxplay must be run from an XQuartz
|
||||
terminal, can use ZOOMFIX, and needs option "-vs ximagesink". On an
|
||||
unibody (non-retina) MacBook Pro, the default resolution wxh = 1920x1080
|
||||
was too large, but using option "-s 800x600" worked. The MacPorts
|
||||
GStreamer pipeline seems fragile against attempts to change the X11
|
||||
window size, or to rotations that switch a connected client between
|
||||
GStreamer is built to use X11**: use the special CMake option
|
||||
`-DUSE_X11=ON` when building UxPlay. Then uxplay must be run from an
|
||||
XQuartz terminal, can use ZOOMFIX, and needs option "-vs ximagesink". On
|
||||
an unibody (non-retina) MacBook Pro, the default resolution wxh =
|
||||
1920x1080 was too large, but using option "-s 800x600" worked. The
|
||||
MacPorts GStreamer pipeline seems fragile against attempts to change the
|
||||
X11 window size, or to rotations that switch a connected client between
|
||||
portrait and landscape mode while uxplay is running. Using the MacPorts
|
||||
X11 GStreamer seems only possible if the image size is left unchanged
|
||||
from the initial "-s wxh" setting (also use the iPad/iPhone setting that
|
||||
@@ -670,6 +712,13 @@ will also now be the name shown above the mirror display (X11) window.
|
||||
**-nh** Do not append "@_hostname_" at the end of the AirPlay server
|
||||
name.
|
||||
|
||||
**-sync** (In Audio-Only (ALAC)) mode: this option synchronizes audio on
|
||||
the server with video on the client, but causes the client to add a
|
||||
delay to account for latency, so pausing the stream will not take effect
|
||||
immediately. This can be mitigated by using the `-al` audio latency
|
||||
setting to change the latency (default 0.25 secs) that the server
|
||||
reports to the cient.
|
||||
|
||||
**-s wxh** (e.g. -s 1920x1080 , which is the default ) sets the display
|
||||
resolution (width and height, in pixels). (This may be a request made to
|
||||
the AirPlay client, and perhaps will not be the final resolution you
|
||||
@@ -775,6 +824,13 @@ name. (Some choices of audiosink might not work on your system.)
|
||||
**-as 0** (or just **-a**) suppresses playing of streamed audio, but
|
||||
displays streamed video.
|
||||
|
||||
**-al *x*** specifies an audio latency *x* in (decimal) seconds in
|
||||
Audio-only (ALAC), that is reported to the client. Values in the range
|
||||
\[0.0, 10.0\] seconds are allowed, and will be converted to a whole
|
||||
number of microseconds. Default is 0.25 sec (250000 usec). (This
|
||||
replaces the `-ao` option introduced in v1.62, as a workaround for a
|
||||
problem that is now fixed).
|
||||
|
||||
**-ca *filename*** provides a file (where *filename* can include a full
|
||||
path) used for output of "cover art" (from Apple Music, *etc.*,) in
|
||||
audio-only ALAC mode. This file is overwritten with the latest cover art
|
||||
@@ -814,13 +870,15 @@ updated by the client at 1 second intervals.
|
||||
**-fps n** sets a maximum frame rate (in frames per second) for the
|
||||
AirPlay client to stream video; n must be a whole number less than 256.
|
||||
(The client may choose to serve video at any frame rate lower than this;
|
||||
default is 30 fps.) A setting below 30 fps might be useful to reduce
|
||||
latency if you are running more than one instance of uxplay at the same
|
||||
time. *This setting is only an advisory to the client device, so setting
|
||||
a high value will not force a high framerate.* (You can test using "-vs
|
||||
fpsdisplaysink" to see what framerate is being received, or use the
|
||||
option -FPSdata which displays video-stream performance data
|
||||
continuously sent by the client during video-streaming.)
|
||||
default is 30 fps.) A setting of 60 fps may give you improved video but
|
||||
is not recommended on Raspberry Pi. A setting below 30 fps might be
|
||||
useful to reduce latency if you are running more than one instance of
|
||||
uxplay at the same time. *This setting is only an advisory to the client
|
||||
device, so setting a high value will not force a high framerate.* (You
|
||||
can test using "-vs fpsdisplaysink" to see what framerate is being
|
||||
received, or use the option -FPSdata which displays video-stream
|
||||
performance data continuously sent by the client during
|
||||
video-streaming.)
|
||||
|
||||
**-f {H\|V\|I}** implements "videoflip" image transforms: H = horizontal
|
||||
flip (right-left flip, or mirror image); V = vertical flip ; I = 180
|
||||
@@ -882,7 +940,11 @@ correct one; on 64-bit Ubuntu, this is done by running
|
||||
`export OPENSSL_ROOT_DIR=/usr/lib/X86_64-linux-gnu/` before running
|
||||
cmake.
|
||||
|
||||
### 1. uxplay starts, but either stalls or stops after "Initialized server socket(s)" appears (*without the server name showing on the client*).
|
||||
### 1. **Avahi/DNS_SD Bonjour/Zeroconf issues**
|
||||
|
||||
- **uxplay starts, but either stalls or stops after "Initialized
|
||||
server socket(s)" appears (*without the server name showing on the
|
||||
client*)**.
|
||||
|
||||
If UxPlay stops with the "No DNS-SD Server found" message, this means
|
||||
that your network **does not have a running Bonjour/zeroconf DNS-SD
|
||||
@@ -914,11 +976,21 @@ a line for airplay support.*)
|
||||
|
||||
If UxPlay stalls *without an error message* and *without the server name
|
||||
showing on the client*, this is either pre-UxPlay-1.60 behavior when no
|
||||
DNS-SD server was found, or a network problem. After starting uxplay,
|
||||
use the utility `avahi-browse -a -t` in a different terminal window on
|
||||
the server to verify that the UxPlay AirTunes and AirPlay services are
|
||||
correctly registered (only the AirTunes service is used in the "Legacy"
|
||||
AirPlay Mirror mode used by UxPlay).
|
||||
DNS-SD server was found, or a network problem.
|
||||
|
||||
- **Avahi works at first, but new clients do not see UxPlay, or
|
||||
clients that initially saw it stop doing so after they disconnect**.
|
||||
|
||||
This is because Avahi is only using the "loopback" network interface,
|
||||
and is not receiving mDNS queries from new clients that were not
|
||||
listening when UxPlay started.
|
||||
|
||||
To check this, after starting uxplay, use the utility
|
||||
`avahi-browse -a -t` in a different terminal window on the server to
|
||||
verify that the UxPlay AirTunes and AirPlay services are correctly
|
||||
registered (only the AirTunes service is used in the "Legacy" AirPlay
|
||||
Mirror mode used by UxPlay, bit the AirPlay service is used for the
|
||||
initial contact).
|
||||
|
||||
The results returned by avahi-browse should show entries for uxplay like
|
||||
|
||||
@@ -934,8 +1006,9 @@ host is probably blocking full DNS-SD service, and you need to open the
|
||||
default UDP port 5353 for mDNS requests, as loopback-based DNS-SD
|
||||
service is unreliable.
|
||||
|
||||
If the UxPlay service is listed by avahi-browse, but is not seen by the
|
||||
client, the problem is likely to be a problem with the local network.
|
||||
If the UxPlay services are listed by avahi-browse as above, but are not
|
||||
seen by the client, the problem is likely to be a problem with the local
|
||||
network.
|
||||
|
||||
### 2. uxplay starts, but stalls after "Initialized server socket(s)" appears, *with the server name showing on the client* (but the client fails to connect when the UxPlay server is selected).
|
||||
|
||||
@@ -967,13 +1040,17 @@ on your system). A different reason for no audio occurred when a user
|
||||
with a firewall only opened two udp network ports: **three** are
|
||||
required (the third one receives the audio data).
|
||||
|
||||
**Raspberry Pi** devices (-rpi option) only work with hardware GPU
|
||||
decoding if the Video4Linux2 plugin in GStreamer v1.20.x or earlier has
|
||||
been patched (see the UxPlay
|
||||
**Raspberry Pi** devices only work with hardware GPU h264 video decoding
|
||||
if the Video4Linux2 plugin in GStreamer v1.20.x or earlier has been
|
||||
patched (see the UxPlay
|
||||
[Wiki](https://github.com/FDH2/UxPlay/wiki/Gstreamer-Video4Linux2-plugin-patches)
|
||||
for patches). This may be fixed in the future when GStreamer-1.22 is
|
||||
released, or by backport patches in distributions such as Raspberry Pi
|
||||
OS (Bullseye).
|
||||
for patches). This is fixed in GStreamer-1.22, and by backport patches
|
||||
from this in distributions such as Raspberry Pi OS (Bullseye): **use
|
||||
option `-bt709` with the GStreamer-1.18.4 from Raspberry Pi OS**.. This
|
||||
also needs the bcm2835-codec kernel module that is not in the standard
|
||||
Linux kernel (it is available in Raspberry Pi OS, Ubuntu and Manjaro).
|
||||
**If you do not have this kernel module, or GStreamer \< 1.22 is not
|
||||
patched, use options `-avdec -vsync` for software h264-decoding.**
|
||||
|
||||
Sometimes "autovideosink" may select the OpenGL renderer "glimagesink"
|
||||
which may not work correctly on your system. Try the options "-vs
|
||||
@@ -1016,6 +1093,34 @@ option "-nc" that leaves the video window open.
|
||||
|
||||
### 4. GStreamer issues (missing plugins, etc.):
|
||||
|
||||
If UxPlay fails to start, with a message that a required GStreamer
|
||||
plugin (such as "libav") was not found, first check with the GStreamer
|
||||
tool gst-inspect-1.0 to see what GStreamer knows is available. (You may
|
||||
need to install some additional GStreamer "tools" package to get
|
||||
gst-inspect-1.0). For, *e.g.* a libav problem, check with
|
||||
"`gst-inspect-1.0 libav`". If it is not shown as available to GStreamer,
|
||||
but your package manager shows the relevant package as installed (as one
|
||||
user found), try entirely removing and reinstalling the package. That
|
||||
user found that a solution to a "**Required gstreamer plugin 'libav' not
|
||||
found**" message that kept recurring was to clear the user's gstreamer
|
||||
cache with `rm -rf ~/.cache/gstreamer-1.0`.
|
||||
|
||||
If it fails to start with an error like '`no element "avdec_aac"`' this
|
||||
is because even though gstreamer-libav is installed. it is incomplete
|
||||
because some plugins are missing: "`gst-inspect-1.0 | grep avdec_aac`"
|
||||
will show if avdec_aac is available. Some distributions (RedHat, SUSE,
|
||||
etc) provide incomplete versions of libav because of patent issues with
|
||||
codecs used by certain plugins. In those cases there will be some "extra
|
||||
package" provider like [RPM fusion](https://rpmfusion.org) (RedHat) or
|
||||
[packman](http://packman.links2linux.org/) (SUSE) where you can get
|
||||
complete packages (your distribution will usually provide instructions
|
||||
for this). The packages needed may be "libav\*" or "ffmpeg\*" packages:
|
||||
the GStreamer libav plugin package does not contain any codecs itself,
|
||||
it just provides a way for GStreamer to use ffmpeg/libav codec libraries
|
||||
which must be installed separately. For similar reasons, distributions
|
||||
may ship incomplete packages of GStreamer "plugins-bad", which is where
|
||||
"license-problematical" plugins go.
|
||||
|
||||
To troubleshoot GStreamer execute "export GST_DEBUG=2" to set the
|
||||
GStreamer debug-level environment-variable in the terminal where you
|
||||
will run uxplay, so that you see warning and error messages; see
|
||||
@@ -1094,6 +1199,19 @@ other settings are set in `UxPlay/lib/dnssdint.h`.
|
||||
|
||||
# Changelog
|
||||
|
||||
1.63 2023-02-12 Reworked audio-video synchronization, with new options
|
||||
-vsync (for Mirror mode) and -async (for Audio-Only mode, to sync with
|
||||
client video). Option -vsync makes software h264 decoding of streamed
|
||||
videos with option -avdec viable on some recent Raspberry Pi models.
|
||||
Internal change: all times are now processed in nanoseconds units.
|
||||
Removed -ao option introduced in 1.62.
|
||||
|
||||
1.62 2023-01-18 Added Audio-only mode time offset -ao x to allow user
|
||||
synchronization of ALAC audio playing on the server with video, song
|
||||
lyrics, etc. playing on the client. x = 5.0 appears to be optimal in
|
||||
many cases. Quality fixes: cleanup in volume changes, timestamps, some
|
||||
bugfixes.
|
||||
|
||||
1.61 2022-12-30 Removed -t option (workaround for an Avahi issue,
|
||||
correctly solved by opening network port UDP 5353 in firewall). Remove
|
||||
-g debug flag from CMAKE_CFLAGS. Postpend (instead of prepend) build
|
||||
|
||||
@@ -10,7 +10,13 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*=================================================================
|
||||
* modified by fduncanh 2021-23
|
||||
*/
|
||||
|
||||
|
||||
#define SECOND_IN_NSECS 1000000000UL
|
||||
|
||||
#include <time.h>
|
||||
#ifdef _WIN32
|
||||
@@ -96,23 +102,23 @@ void byteutils_put_int(unsigned char* b, int offset, uint32_t value) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads an ntp timestamp and returns it as micro seconds since the Unix epoch
|
||||
* Reads an ntp timestamp and returns it as nano seconds since the Unix epoch
|
||||
*/
|
||||
uint64_t byteutils_get_ntp_timestamp(unsigned char *b, int offset) {
|
||||
uint64_t seconds = ntohl(((unsigned int) byteutils_get_int(b, offset))) - SECONDS_FROM_1900_TO_1970;
|
||||
uint64_t fraction = ntohl((unsigned int) byteutils_get_int(b, offset + 4));
|
||||
return (seconds * 1000000L) + ((fraction * 1000000L) >> 32);
|
||||
return (seconds * SECOND_IN_NSECS) + ((fraction * SECOND_IN_NSECS) >> 32);
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a time given as micro seconds since the Unix time epoch as an ntp timestamp
|
||||
* Writes a time given as nano seconds since the Unix time epoch as an ntp timestamp
|
||||
* into the buffer at position offset
|
||||
*/
|
||||
void byteutils_put_ntp_timestamp(unsigned char *b, int offset, uint64_t us_since_1970) {
|
||||
uint64_t seconds = us_since_1970 / 1000000L;
|
||||
uint64_t microseconds = us_since_1970 % 1000000L;
|
||||
void byteutils_put_ntp_timestamp(unsigned char *b, int offset, uint64_t ns_since_1970) {
|
||||
uint64_t seconds = ns_since_1970 / SECOND_IN_NSECS;
|
||||
uint64_t nanoseconds = ns_since_1970 % SECOND_IN_NSECS;
|
||||
seconds += SECONDS_FROM_1900_TO_1970;
|
||||
uint64_t fraction = (microseconds << 32) / 1000000L;
|
||||
uint64_t fraction = (nanoseconds << 32) / SECOND_IN_NSECS;
|
||||
|
||||
// Write in big endian!
|
||||
byteutils_put_int(b, offset, htonl(seconds));
|
||||
|
||||
@@ -34,7 +34,6 @@
|
||||
#include <pthread.h>
|
||||
#endif
|
||||
|
||||
#include "memalign.h"
|
||||
#include "sockets.h"
|
||||
#include "threads.h"
|
||||
|
||||
|
||||
@@ -16,6 +16,9 @@
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*
|
||||
*===================================================================
|
||||
* modified by fduncanh 2021-2022
|
||||
*/
|
||||
|
||||
#include "crypto.h"
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*=================================================================
|
||||
* modified by fduncanh 2022
|
||||
*/
|
||||
|
||||
/* These defines allow us to compile on iOS */
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*==================================================================
|
||||
* modified by fduncanh 2022
|
||||
*/
|
||||
|
||||
#ifndef DNSSDINT_H
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*==================================================================
|
||||
* modified by fduncanh 2021-2022
|
||||
*/
|
||||
|
||||
#ifndef GLOBAL_H
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*==================================================================
|
||||
* modified by fduncanh 2021
|
||||
*/
|
||||
|
||||
#include <stdlib.h>
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*===================================================================
|
||||
* modified by fduncanh 2022
|
||||
*/
|
||||
|
||||
#include <stdlib.h>
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
/**
|
||||
* Copyright (C) 2011-2012 Juho Vähä-Herttua
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*/
|
||||
|
||||
#ifndef MEMALIGN_H
|
||||
#define MEMALIGN_H
|
||||
|
||||
#if defined(WIN32)
|
||||
|
||||
#define SYSTEM_GET_PAGESIZE(ret) do {\
|
||||
SYSTEM_INFO si;\
|
||||
GetSystemInfo(&si);\
|
||||
ret = si.dwPageSize;\
|
||||
} while(0)
|
||||
#define SYSTEM_GET_TIME(ret) ret = timeGetTime()
|
||||
|
||||
#define ALIGNED_MALLOC(memptr, alignment, size) do {\
|
||||
char *ptr = malloc(sizeof(void*) + (size) + (alignment)-1);\
|
||||
memptr = NULL;\
|
||||
if (ptr) {\
|
||||
size_t ptrval = (size_t)ptr + sizeof(void*) + (alignment)-1;\
|
||||
ptrval = ptrval / (alignment) * (alignment);\
|
||||
memptr = (void *)ptrval;\
|
||||
*(((void **)memptr)-1) = ptr;\
|
||||
}\
|
||||
} while(0)
|
||||
#define ALIGNED_FREE(memptr) free(*(((void **)memptr)-1))
|
||||
|
||||
#else
|
||||
|
||||
#define SYSTEM_GET_PAGESIZE(ret) ret = sysconf(_SC_PAGESIZE)
|
||||
#define SYSTEM_GET_TIME(ret) do {\
|
||||
struct timeval tv;\
|
||||
gettimeofday(&tv, NULL);\
|
||||
ret = (unsigned int)(tv.tv_sec*1000 + tv.tv_usec/1000);\
|
||||
} while(0)
|
||||
|
||||
#define ALIGNED_MALLOC(memptr, alignment, size) if (posix_memalign((void **)&memptr, alignment, size)) memptr = NULL
|
||||
#define ALIGNED_FREE(memptr) free(memptr)
|
||||
|
||||
#endif
|
||||
|
||||
#endif
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*================================================================
|
||||
* modified by fduncanh 2022
|
||||
*/
|
||||
|
||||
#include "mirror_buffer.h"
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*=================================================================
|
||||
* modified by fduncanh 2022
|
||||
*/
|
||||
|
||||
#ifndef MIRROR_BUFFER_H
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*==================================================================
|
||||
* modified by fduncanh 2022
|
||||
*/
|
||||
|
||||
#include <stdlib.h>
|
||||
|
||||
@@ -11,6 +11,8 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*==================================================================
|
||||
* modified by fduncanh 2021
|
||||
*/
|
||||
|
||||
#include <stdlib.h>
|
||||
|
||||
10
lib/raop.c
10
lib/raop.c
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*===================================================================
|
||||
* modified by fduncanh 2021-23
|
||||
*/
|
||||
|
||||
#include <stdlib.h>
|
||||
@@ -60,6 +63,7 @@ struct raop_s {
|
||||
uint8_t overscanned;
|
||||
uint8_t clientFPSdata;
|
||||
|
||||
int audio_delay_micros;
|
||||
int max_ntp_timeouts;
|
||||
};
|
||||
|
||||
@@ -461,6 +465,7 @@ raop_init(int max_clients, raop_callbacks_t *callbacks) {
|
||||
raop->clientFPSdata = 0;
|
||||
|
||||
raop->max_ntp_timeouts = 0;
|
||||
raop->audio_delay_micros = 250000;
|
||||
|
||||
return raop;
|
||||
}
|
||||
@@ -519,6 +524,11 @@ int raop_set_plist(raop_t *raop, const char *plist_item, const int value) {
|
||||
} else if (strcmp(plist_item, "max_ntp_timeouts") == 0) {
|
||||
raop->max_ntp_timeouts = (value > 0 ? value : 0);
|
||||
if (raop->max_ntp_timeouts != value) retval = 1;
|
||||
} else if (strcmp(plist_item, "audio_delay_micros") == 0) {
|
||||
if (value >= 0 && value <= 10 * SECOND_IN_USECS) {
|
||||
raop->audio_delay_micros = value;
|
||||
}
|
||||
if (raop->audio_delay_micros != value) retval = 1;
|
||||
} else {
|
||||
retval = -1;
|
||||
}
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*===================================================================
|
||||
* modified by fduncanh 2021-23
|
||||
*/
|
||||
|
||||
#ifndef RAOP_H
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*==================================================================
|
||||
* modified by fduncanh 2021-2023
|
||||
*/
|
||||
|
||||
#include <stdlib.h>
|
||||
@@ -38,7 +41,8 @@ typedef struct {
|
||||
|
||||
/* RTP header */
|
||||
unsigned short seqnum;
|
||||
uint64_t timestamp;
|
||||
uint64_t rtp_timestamp;
|
||||
uint64_t ntp_timestamp;
|
||||
|
||||
/* Payload data */
|
||||
unsigned int payload_size;
|
||||
@@ -154,11 +158,11 @@ raop_buffer_decrypt(raop_buffer_t *raop_buffer, unsigned char *data, unsigned ch
|
||||
|
||||
if (DECRYPTION_TEST) {
|
||||
char *str = utils_data_to_string(data,12,12);
|
||||
printf("encrypted 12 byte header %s", str);
|
||||
logger_log(raop_buffer->logger, LOGGER_INFO, "encrypted 12 byte header %s", str);
|
||||
free(str);
|
||||
if (payload_size) {
|
||||
str = utils_data_to_string(&data[12],16,16);
|
||||
printf("len %d before decryption:\n%s", payload_size, str);
|
||||
logger_log(raop_buffer->logger, LOGGER_INFO, "len %d before decryption:\n%s", payload_size, str);
|
||||
free(str);
|
||||
}
|
||||
}
|
||||
@@ -181,18 +185,17 @@ raop_buffer_decrypt(raop_buffer_t *raop_buffer, unsigned char *data, unsigned ch
|
||||
case 0x20:
|
||||
break;
|
||||
default:
|
||||
printf("***ERROR AUDIO FRAME IS NOT AAC_ELD OR ALAC\n");
|
||||
logger_log(raop_buffer->logger, LOGGER_INFO, "***ERROR AUDIO FRAME IS NOT AAC_ELD OR ALAC");
|
||||
break;
|
||||
}
|
||||
if (DECRYPTION_TEST == 2) {
|
||||
printf("decrypted audio frame, len = %d\n", *outputlen);
|
||||
logger_log(raop_buffer->logger, LOGGER_INFO, "decrypted audio frame, len = %d", *outputlen);
|
||||
char *str = utils_data_to_string(output,payload_size,16);
|
||||
printf("%s",str);
|
||||
printf("\n");
|
||||
logger_log(raop_buffer->logger, LOGGER_INFO,"%s",str);
|
||||
free(str);
|
||||
} else {
|
||||
char *str = utils_data_to_string(output,16,16);
|
||||
printf("%d after \n%s\n", payload_size, str);
|
||||
logger_log(raop_buffer->logger, LOGGER_INFO, "%d after \n%s", payload_size, str);
|
||||
free(str);
|
||||
}
|
||||
}
|
||||
@@ -207,7 +210,7 @@ raop_buffer_decrypt(raop_buffer_t *raop_buffer, unsigned char *data, unsigned ch
|
||||
}
|
||||
|
||||
int
|
||||
raop_buffer_enqueue(raop_buffer_t *raop_buffer, unsigned char *data, unsigned short datalen, uint64_t timestamp, int use_seqnum) {
|
||||
raop_buffer_enqueue(raop_buffer_t *raop_buffer, unsigned char *data, unsigned short datalen, uint64_t *ntp_timestamp, uint64_t *rtp_timestamp, int use_seqnum) {
|
||||
unsigned char empty_packet_marker[] = { 0x00, 0x68, 0x34, 0x00 };
|
||||
assert(raop_buffer);
|
||||
|
||||
@@ -248,7 +251,8 @@ raop_buffer_enqueue(raop_buffer_t *raop_buffer, unsigned char *data, unsigned sh
|
||||
|
||||
/* Update the raop_buffer entry header */
|
||||
entry->seqnum = seqnum;
|
||||
entry->timestamp = timestamp;
|
||||
entry->rtp_timestamp = *rtp_timestamp;
|
||||
entry->ntp_timestamp = *ntp_timestamp;
|
||||
entry->filled = 1;
|
||||
|
||||
entry->payload_data = malloc(payload_size);
|
||||
@@ -269,7 +273,7 @@ raop_buffer_enqueue(raop_buffer_t *raop_buffer, unsigned char *data, unsigned sh
|
||||
}
|
||||
|
||||
void *
|
||||
raop_buffer_dequeue(raop_buffer_t *raop_buffer, unsigned int *length, uint64_t *timestamp, unsigned short *seqnum, int no_resend) {
|
||||
raop_buffer_dequeue(raop_buffer_t *raop_buffer, unsigned int *length, uint64_t *ntp_timestamp, uint64_t *rtp_timestamp, unsigned short *seqnum, int no_resend) {
|
||||
assert(raop_buffer);
|
||||
|
||||
/* Calculate number of entries in the current buffer */
|
||||
@@ -301,7 +305,8 @@ raop_buffer_dequeue(raop_buffer_t *raop_buffer, unsigned int *length, uint64_t *
|
||||
entry->filled = 0;
|
||||
|
||||
/* Return entry payload buffer */
|
||||
*timestamp = entry->timestamp;
|
||||
*rtp_timestamp = entry->rtp_timestamp;
|
||||
*ntp_timestamp = entry->ntp_timestamp;
|
||||
*seqnum = entry->seqnum;
|
||||
*length = entry->payload_size;
|
||||
entry->payload_size = 0;
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*==================================================================
|
||||
* modified by fduncanh 2021-23
|
||||
*/
|
||||
|
||||
#ifndef RAOP_BUFFER_H
|
||||
@@ -25,8 +28,8 @@ typedef int (*raop_resend_cb_t)(void *opaque, unsigned short seqno, unsigned sho
|
||||
raop_buffer_t *raop_buffer_init(logger_t *logger,
|
||||
const unsigned char *aeskey,
|
||||
const unsigned char *aesiv);
|
||||
int raop_buffer_enqueue(raop_buffer_t *raop_buffer, unsigned char *data, unsigned short datalen, uint64_t timestamp, int use_seqnum);
|
||||
void *raop_buffer_dequeue(raop_buffer_t *raop_buffer, unsigned int *length, uint64_t *timestamp, unsigned short *seqnum, int no_resend);
|
||||
int raop_buffer_enqueue(raop_buffer_t *raop_buffer, unsigned char *data, unsigned short datalen, uint64_t *ntp_timestamp, uint64_t *rtp_timestamp, int use_seqnum);
|
||||
void *raop_buffer_dequeue(raop_buffer_t *raop_buffer, unsigned int *length, uint64_t *ntp_timestamp, uint64_t *rtp_timestamp, unsigned short *seqnum, int no_resend);
|
||||
void raop_buffer_handle_resends(raop_buffer_t *raop_buffer, raop_resend_cb_t resend_cb, void *opaque);
|
||||
void raop_buffer_flush(raop_buffer_t *raop_buffer, int next_seq);
|
||||
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*===================================================================
|
||||
* modfied by fduncanh 2021-2023
|
||||
*/
|
||||
|
||||
/* This file should be only included from raop.c as it defines static handler
|
||||
@@ -20,6 +23,8 @@
|
||||
#include <ctype.h>
|
||||
#include <stdlib.h>
|
||||
#include <plist/plist.h>
|
||||
#define AUDIO_SAMPLE_RATE 44100 /* all supported AirPlay audio format use this sample rate */
|
||||
#define SECOND_IN_USECS 1000000
|
||||
|
||||
typedef void (*raop_handler_t)(raop_conn_t *, http_request_t *,
|
||||
http_response_t *, char **, int *);
|
||||
@@ -500,6 +505,8 @@ raop_handler_setup(raop_conn_t *conn,
|
||||
unsigned short cport = conn->raop->control_lport, dport = conn->raop->data_lport;
|
||||
unsigned short remote_cport = 0;
|
||||
unsigned char ct;
|
||||
unsigned int sr = AUDIO_SAMPLE_RATE; /* all AirPlay audio formats supported so far have sample rate 44.1kHz */
|
||||
|
||||
uint64_t uint_val = 0;
|
||||
plist_t req_stream_control_port_node = plist_dict_get_item(req_stream_node, "controlPort");
|
||||
plist_get_uint_val(req_stream_control_port_node, &uint_val);
|
||||
@@ -544,7 +551,7 @@ raop_handler_setup(raop_conn_t *conn,
|
||||
}
|
||||
|
||||
if (conn->raop_rtp) {
|
||||
raop_rtp_start_audio(conn->raop_rtp, use_udp, remote_cport, &cport, &dport, ct);
|
||||
raop_rtp_start_audio(conn->raop_rtp, use_udp, &remote_cport, &cport, &dport, &ct, &sr);
|
||||
logger_log(conn->raop->logger, LOGGER_DEBUG, "RAOP initialized success");
|
||||
} else {
|
||||
logger_log(conn->raop->logger, LOGGER_ERR, "RAOP not initialized at SETUP, playing will fail!");
|
||||
@@ -685,7 +692,10 @@ raop_handler_record(raop_conn_t *conn,
|
||||
http_request_t *request, http_response_t *response,
|
||||
char **response_data, int *response_datalen)
|
||||
{
|
||||
char audio_latency[12];
|
||||
unsigned int ad = (unsigned int) (((uint64_t) conn->raop->audio_delay_micros) * AUDIO_SAMPLE_RATE / SECOND_IN_USECS);
|
||||
sprintf(audio_latency, "%u", ad);
|
||||
logger_log(conn->raop->logger, LOGGER_DEBUG, "raop_handler_record");
|
||||
http_response_add_header(response, "Audio-Latency", "11025");
|
||||
http_response_add_header(response, "Audio-Latency", audio_latency);
|
||||
http_response_add_header(response, "Audio-Jack-Status", "connected; type=analog");
|
||||
}
|
||||
|
||||
@@ -11,6 +11,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*=================================================================
|
||||
* modified by fduncanh 2021-23
|
||||
*/
|
||||
|
||||
// Some of the code in here comes from https://github.com/juhovh/shairplay/pull/25/files
|
||||
@@ -22,7 +25,6 @@
|
||||
#include <stdbool.h>
|
||||
#ifdef _WIN32
|
||||
#define CAST (char *)
|
||||
#include <sys/time.h>
|
||||
#else
|
||||
#define CAST
|
||||
#endif
|
||||
@@ -34,6 +36,7 @@
|
||||
#include "byteutils.h"
|
||||
#include "utils.h"
|
||||
|
||||
#define SECOND_IN_NSECS 1000000000UL
|
||||
#define RAOP_NTP_DATA_COUNT 8
|
||||
#define RAOP_NTP_PHI_PPM 15ull // PPM
|
||||
#define RAOP_NTP_R_RHO ((1ull << 32) / 1000u) // packet precision
|
||||
@@ -285,7 +288,10 @@ raop_ntp_thread(void *arg)
|
||||
byteutils_put_ntp_timestamp(request, 24, send_time);
|
||||
int send_len = sendto(raop_ntp->tsock, (char *)request, sizeof(request), 0,
|
||||
(struct sockaddr *) &raop_ntp->remote_saddr, raop_ntp->remote_saddr_len);
|
||||
logger_log(raop_ntp->logger, LOGGER_DEBUG, "\nraop_ntp send_len = %d, now = %llu", send_len, send_time);
|
||||
char *str = utils_data_to_string(request, send_len, 16);
|
||||
logger_log(raop_ntp->logger, LOGGER_DEBUG, "\nraop_ntp send time type_t=%d send_len = %d, now = %8.6f\n%s",
|
||||
request[1] &~0x80, send_len, (double) send_time / SECOND_IN_NSECS, str);
|
||||
free(str);
|
||||
if (send_len < 0) {
|
||||
logger_log(raop_ntp->logger, LOGGER_ERR, "raop_ntp error sending request");
|
||||
} else {
|
||||
@@ -294,7 +300,7 @@ raop_ntp_thread(void *arg)
|
||||
(struct sockaddr *) &raop_ntp->remote_saddr, &raop_ntp->remote_saddr_len);
|
||||
if (response_len < 0) {
|
||||
timeout_counter++;
|
||||
char time[28];
|
||||
char time[30];
|
||||
int level = (timeout_counter == 1 ? LOGGER_DEBUG : LOGGER_ERR);
|
||||
ntp_timestamp_to_time(send_time, time, sizeof(time));
|
||||
logger_log(raop_ntp->logger, level, "raop_ntp receive timeout %d (limit %d) (request sent %s)",
|
||||
@@ -306,12 +312,7 @@ raop_ntp_thread(void *arg)
|
||||
} else {
|
||||
//local time of the server when the NTP response packet returns
|
||||
int64_t t3 = (int64_t) raop_ntp_get_local_time(raop_ntp);
|
||||
|
||||
timeout_counter = 0;
|
||||
char *str = utils_data_to_string(response, response_len, 16);
|
||||
logger_log(raop_ntp->logger, LOGGER_DEBUG, "raop_ntp receive time type_t=%d packetlen = %d\n%s",
|
||||
response[1] &~0x80, response_len, str);
|
||||
free(str);
|
||||
|
||||
// Local time of the server when the NTP request packet leaves the server
|
||||
int64_t t0 = (int64_t) byteutils_get_ntp_timestamp(response, 8);
|
||||
@@ -322,15 +323,20 @@ raop_ntp_thread(void *arg)
|
||||
// Local time of the client when the response message leaves the client
|
||||
int64_t t2 = (int64_t) byteutils_get_ntp_timestamp(response, 24);
|
||||
|
||||
// The iOS client device sends its time in micro seconds relative to an arbitrary Epoch (the last boot).
|
||||
// For a little bonus confusion, they add SECONDS_FROM_1900_TO_1970 * 1000000 us.
|
||||
char *str = utils_data_to_string(response, response_len, 16);
|
||||
logger_log(raop_ntp->logger, LOGGER_DEBUG, "raop_ntp receive time type_t=%d packetlen = %d, now = %8.6f t1 = %8.6f, t2 = %8.6f\n%s",
|
||||
response[1] &~0x80, response_len, (double) t3 / SECOND_IN_NSECS, (double) t1 / SECOND_IN_NSECS, (double) t2 / SECOND_IN_NSECS, str);
|
||||
free(str);
|
||||
|
||||
// The iOS client device sends its time in seconds relative to an arbitrary Epoch (the last boot).
|
||||
// For a little bonus confusion, they add SECONDS_FROM_1900_TO_1970.
|
||||
// This means we have to expect some rather huge offset, but its growth or shrink over time should be small.
|
||||
|
||||
raop_ntp->data_index = (raop_ntp->data_index + 1) % RAOP_NTP_DATA_COUNT;
|
||||
raop_ntp->data[raop_ntp->data_index].time = t3;
|
||||
raop_ntp->data[raop_ntp->data_index].offset = ((t1 - t0) + (t2 - t3)) / 2;
|
||||
raop_ntp->data[raop_ntp->data_index].delay = ((t3 - t0) - (t2 - t1));
|
||||
raop_ntp->data[raop_ntp->data_index].dispersion = RAOP_NTP_R_RHO + RAOP_NTP_S_RHO + (t3 - t0) * RAOP_NTP_PHI_PPM / 1000000u;
|
||||
raop_ntp->data[raop_ntp->data_index].dispersion = RAOP_NTP_R_RHO + RAOP_NTP_S_RHO + (t3 - t0) * RAOP_NTP_PHI_PPM / SECOND_IN_NSECS;
|
||||
|
||||
// Sort by delay
|
||||
memcpy(data_sorted, raop_ntp->data, sizeof(data_sorted));
|
||||
@@ -342,7 +348,7 @@ raop_ntp_thread(void *arg)
|
||||
|
||||
// Calculate dispersion
|
||||
for(int i = 0; i < RAOP_NTP_DATA_COUNT; ++i) {
|
||||
unsigned long long disp = raop_ntp->data[i].dispersion + (t3 - raop_ntp->data[i].time) * RAOP_NTP_PHI_PPM / 1000000u;
|
||||
unsigned long long disp = raop_ntp->data[i].dispersion + (t3 - raop_ntp->data[i].time) * RAOP_NTP_PHI_PPM / SECOND_IN_NSECS;
|
||||
dispersion += disp / two_pow_n[i];
|
||||
}
|
||||
|
||||
@@ -359,12 +365,10 @@ raop_ntp_thread(void *arg)
|
||||
}
|
||||
|
||||
// Sleep for 3 seconds
|
||||
struct timeval now;
|
||||
struct timespec wait_time;
|
||||
MUTEX_LOCK(raop_ntp->wait_mutex);
|
||||
gettimeofday(&now, NULL);
|
||||
wait_time.tv_sec = now.tv_sec + 3;
|
||||
wait_time.tv_nsec = now.tv_usec * 1000;
|
||||
clock_gettime(CLOCK_REALTIME, &wait_time);
|
||||
wait_time.tv_sec += 3;
|
||||
pthread_cond_timedwait(&raop_ntp->wait_cond, &raop_ntp->wait_mutex, &wait_time);
|
||||
MUTEX_UNLOCK(raop_ntp->wait_mutex);
|
||||
}
|
||||
@@ -457,54 +461,54 @@ raop_ntp_stop(raop_ntp_t *raop_ntp)
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts from a little endian ntp timestamp to micro seconds since the Unix epoch.
|
||||
* Converts from a little endian ntp timestamp to nano seconds since the Unix epoch.
|
||||
* Does the same thing as byteutils_get_ntp_timestamp, except its input is an uint64_t
|
||||
* and expected to already be in little endian.
|
||||
* Please note this just converts to a different representation, the clock remains the
|
||||
* same.
|
||||
*/
|
||||
uint64_t raop_ntp_timestamp_to_micro_seconds(uint64_t ntp_timestamp, bool account_for_epoch_diff) {
|
||||
uint64_t raop_ntp_timestamp_to_nano_seconds(uint64_t ntp_timestamp, bool account_for_epoch_diff) {
|
||||
uint64_t seconds = ((ntp_timestamp >> 32) & 0xffffffff) - (account_for_epoch_diff ? SECONDS_FROM_1900_TO_1970 : 0);
|
||||
uint64_t fraction = (ntp_timestamp & 0xffffffff);
|
||||
return (seconds * 1000000) + ((fraction * 1000000) >> 32);
|
||||
return (seconds * SECOND_IN_NSECS) + ((fraction * SECOND_IN_NSECS) >> 32);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current time in micro seconds according to the local wall clock.
|
||||
* Returns the current time in nano seconds according to the local wall clock.
|
||||
* The system Unix time is used as the local wall clock.
|
||||
*/
|
||||
uint64_t raop_ntp_get_local_time(raop_ntp_t *raop_ntp) {
|
||||
struct timespec time;
|
||||
clock_gettime(CLOCK_REALTIME, &time);
|
||||
return (uint64_t)time.tv_sec * 1000000L + (uint64_t)(time.tv_nsec / 1000);
|
||||
return ((uint64_t) time.tv_nsec) + (uint64_t) time.tv_sec * SECOND_IN_NSECS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current time in micro seconds according to the remote wall clock.
|
||||
* Returns the current time in nano seconds according to the remote wall clock.
|
||||
*/
|
||||
uint64_t raop_ntp_get_remote_time(raop_ntp_t *raop_ntp) {
|
||||
MUTEX_LOCK(raop_ntp->sync_params_mutex);
|
||||
int64_t offset = raop_ntp->sync_offset;
|
||||
MUTEX_UNLOCK(raop_ntp->sync_params_mutex);
|
||||
return (uint64_t) ((int64_t) raop_ntp_get_local_time(raop_ntp)) + ((int64_t) offset);
|
||||
return (uint64_t) ((int64_t) raop_ntp_get_local_time(raop_ntp) + offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the local wall clock time in micro seconds for the given point in remote clock time
|
||||
* Returns the local wall clock time in nano seconds for the given point in remote clock time
|
||||
*/
|
||||
uint64_t raop_ntp_convert_remote_time(raop_ntp_t *raop_ntp, uint64_t remote_time) {
|
||||
MUTEX_LOCK(raop_ntp->sync_params_mutex);
|
||||
uint64_t offset = raop_ntp->sync_offset;
|
||||
int64_t offset = raop_ntp->sync_offset;
|
||||
MUTEX_UNLOCK(raop_ntp->sync_params_mutex);
|
||||
return (uint64_t) ((int64_t) remote_time) - ((int64_t) offset);
|
||||
return (uint64_t) ((int64_t) remote_time - offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the remote wall clock time in micro seconds for the given point in local clock time
|
||||
* Returns the remote wall clock time in nano seconds for the given point in local clock time
|
||||
*/
|
||||
uint64_t raop_ntp_convert_local_time(raop_ntp_t *raop_ntp, uint64_t local_time) {
|
||||
MUTEX_LOCK(raop_ntp->sync_params_mutex);
|
||||
uint64_t offset = raop_ntp->sync_offset;
|
||||
int64_t offset = raop_ntp->sync_offset;
|
||||
MUTEX_UNLOCK(raop_ntp->sync_params_mutex);
|
||||
return (uint64_t) ((int64_t) local_time) + ((int64_t) offset);
|
||||
return (uint64_t) ((int64_t) local_time + offset);
|
||||
}
|
||||
|
||||
@@ -11,6 +11,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*=================================================================
|
||||
* modified by fduncanh 2021-2023
|
||||
*/
|
||||
|
||||
#ifndef RAOP_NTP_H
|
||||
@@ -31,7 +34,7 @@ unsigned short raop_ntp_get_port(raop_ntp_t *raop_ntp);
|
||||
|
||||
void raop_ntp_destroy(raop_ntp_t *raop_rtp);
|
||||
|
||||
uint64_t raop_ntp_timestamp_to_micro_seconds(uint64_t ntp_timestamp, bool account_for_epoch_diff);
|
||||
uint64_t raop_ntp_timestamp_to_nano_seconds(uint64_t ntp_timestamp, bool account_for_epoch_diff);
|
||||
|
||||
uint64_t raop_ntp_get_local_time(raop_ntp_t *raop_ntp);
|
||||
uint64_t raop_ntp_get_remote_time(raop_ntp_t *raop_ntp);
|
||||
|
||||
227
lib/raop_rtp.c
227
lib/raop_rtp.c
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*=================================================================
|
||||
* modified by fduncanh 2021-2023
|
||||
*/
|
||||
|
||||
#include <stdlib.h>
|
||||
@@ -32,12 +35,11 @@
|
||||
|
||||
#define NO_FLUSH (-42)
|
||||
|
||||
#define RAOP_RTP_SAMPLE_RATE (44100.0 / 1000000.0)
|
||||
#define SECOND_IN_NSECS 1000000000
|
||||
#define RAOP_RTP_SYNC_DATA_COUNT 8
|
||||
#define SEC 1000000
|
||||
#define SEC SECOND_IN_NSECS
|
||||
|
||||
#define DELAY_AAC 500000 //empirical, matches audio latency of about -0.5 sec after first clock sync event
|
||||
#define DELAY_ALAC 2000000 //empirical, matches audio latency of about -2.0 sec after first clock sync event
|
||||
#define DELAY_AAC 0.275 //empirical, matches audio latency of about -0.25 sec after first clock sync event
|
||||
|
||||
/* note: it is unclear what will happen in the unlikely event that this code is running at the time of the unix-time
|
||||
* epoch event on 2038-01-19 at 3:14:08 UTC ! (but Apple will surely have removed AirPlay "legacy pairing" by then!) */
|
||||
@@ -53,7 +55,7 @@ struct raop_rtp_s {
|
||||
|
||||
// Time and sync
|
||||
raop_ntp_t *ntp;
|
||||
double rtp_sync_scale;
|
||||
double rtp_clock_rate;
|
||||
int64_t rtp_sync_offset;
|
||||
raop_rtp_sync_data_t sync_data[RAOP_RTP_SYNC_DATA_COUNT];
|
||||
int sync_data_index;
|
||||
@@ -163,7 +165,6 @@ raop_rtp_init(logger_t *logger, raop_callbacks_t *callbacks, raop_ntp_t *ntp, co
|
||||
raop_rtp->ntp = ntp;
|
||||
|
||||
raop_rtp->rtp_sync_offset = 0;
|
||||
raop_rtp->rtp_sync_scale = RAOP_RTP_SAMPLE_RATE;
|
||||
raop_rtp->sync_data_index = 0;
|
||||
for (int i = 0; i < RAOP_RTP_SYNC_DATA_COUNT; ++i) {
|
||||
raop_rtp->sync_data[i].ntp_time = 0;
|
||||
@@ -342,7 +343,7 @@ raop_rtp_process_events(raop_rtp_t *raop_rtp, void *cb_data)
|
||||
|
||||
/* Call set_volume callback if changed */
|
||||
if (volume_changed) {
|
||||
raop_buffer_flush(raop_rtp->buffer, flush);
|
||||
//raop_buffer_flush(raop_rtp->buffer, flush); /* seems to be unnecessary, may cause audio artefacts */
|
||||
if (raop_rtp->callbacks.audio_set_volume) {
|
||||
raop_rtp->callbacks.audio_set_volume(raop_rtp->callbacks.cls, volume);
|
||||
}
|
||||
@@ -388,34 +389,35 @@ raop_rtp_process_events(raop_rtp_t *raop_rtp, void *cb_data)
|
||||
return 0;
|
||||
}
|
||||
|
||||
void raop_rtp_sync_clock(raop_rtp_t *raop_rtp, uint64_t ntp_time, uint64_t rtp_time, int shift) {
|
||||
int latest;
|
||||
uint32_t valid_data_count = 0;
|
||||
valid_data_count = 0;
|
||||
double total_offsets = 0;
|
||||
int64_t rtp_offset, avg_offset, correction;
|
||||
void raop_rtp_sync_clock(raop_rtp_t *raop_rtp, uint64_t *ntp_time, uint64_t *rtp_time) {
|
||||
/* ntp_time = (uint64_t)(((int64_t)(raop_rtp->rtp_clock_rate * rtp_time)) + raop_rtp->rtp_sync_offset) */
|
||||
int latest, valid_data_count = 0;
|
||||
uint64_t ntp_sum = 0, rtp_sum = 0;
|
||||
double offset = ((double) *ntp_time) - raop_rtp->rtp_clock_rate * *rtp_time;
|
||||
int64_t correction = 0;
|
||||
|
||||
raop_rtp->sync_data_index = (raop_rtp->sync_data_index + 1) % RAOP_RTP_SYNC_DATA_COUNT;
|
||||
latest = raop_rtp->sync_data_index;
|
||||
raop_rtp->sync_data[latest].rtp_time = rtp_time;
|
||||
raop_rtp->sync_data[latest].ntp_time = ntp_time;
|
||||
raop_rtp->sync_data[latest].rtp_time = *rtp_time;
|
||||
raop_rtp->sync_data[latest].ntp_time = *ntp_time;
|
||||
|
||||
for (int i = 0; i < RAOP_RTP_SYNC_DATA_COUNT; i++) {
|
||||
if (raop_rtp->sync_data[i].ntp_time == 0) continue;
|
||||
rtp_offset = ((int64_t) raop_rtp->sync_data[i].rtp_time) - ((int64_t) raop_rtp->sync_data[latest].rtp_time);
|
||||
total_offsets += ((double) rtp_offset) / raop_rtp-> rtp_sync_scale;
|
||||
total_offsets -= (double) (((int64_t) raop_rtp->sync_data[i].ntp_time) - ((int64_t) raop_rtp->sync_data[latest].ntp_time));
|
||||
valid_data_count++;
|
||||
if (i == latest) continue;
|
||||
ntp_sum += *ntp_time - raop_rtp->sync_data[i].ntp_time;
|
||||
rtp_sum += *rtp_time - raop_rtp->sync_data[i].rtp_time;
|
||||
}
|
||||
total_offsets = (total_offsets / valid_data_count);
|
||||
rtp_offset = ((int64_t) raop_rtp->sync_data[latest].rtp_time) - ((int64_t) raop_rtp->rtp_start_time) + ((int64_t) shift);
|
||||
total_offsets += ((double) rtp_offset) / raop_rtp->rtp_sync_scale;
|
||||
avg_offset = (int64_t) total_offsets;
|
||||
avg_offset -= ((int64_t) raop_rtp->sync_data[latest].ntp_time) - ((int64_t) raop_rtp->ntp_start_time);
|
||||
correction = avg_offset - raop_rtp->rtp_sync_offset;
|
||||
raop_rtp->rtp_sync_offset = avg_offset;
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG, "raop_rtp sync correction=%lld, rtp_sync_offset = %8.6f ",
|
||||
correction, ((double) raop_rtp->rtp_sync_offset) / SEC);
|
||||
|
||||
if (valid_data_count > 1) {
|
||||
correction -= raop_rtp->rtp_sync_offset;
|
||||
offset += (((double) ntp_sum) - raop_rtp->rtp_clock_rate * rtp_sum) / valid_data_count;
|
||||
}
|
||||
raop_rtp->rtp_sync_offset = (int64_t) offset;
|
||||
correction += raop_rtp->rtp_sync_offset;
|
||||
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG, "dataset %d raop_rtp sync correction=%lld, rtp_sync_offset = %8.6f ",
|
||||
valid_data_count, correction, offset);
|
||||
}
|
||||
|
||||
uint64_t rtp64_time (raop_rtp_t *raop_rtp, const uint32_t *rtp32) {
|
||||
@@ -453,12 +455,12 @@ raop_rtp_thread_udp(void *arg)
|
||||
|
||||
/* for initial rtp to ntp conversions */
|
||||
bool have_synced = false;
|
||||
bool no_data_yet = true;
|
||||
unsigned char no_data_marker[] = {0x00, 0x68, 0x34, 0x00 };
|
||||
int rtp_count = 0;
|
||||
int64_t initial_offset = 0;
|
||||
double sync_adjustment = 0;
|
||||
int64_t delay = 0;
|
||||
uint64_t delay = 0;
|
||||
unsigned short seqnum1 = 0, seqnum2 = 0;
|
||||
bool offset_estimate_initialized = false;
|
||||
|
||||
assert(raop_rtp);
|
||||
raop_rtp->ntp_start_time = raop_ntp_get_local_time(raop_rtp->ntp);
|
||||
@@ -467,6 +469,8 @@ raop_rtp_thread_udp(void *arg)
|
||||
raop_rtp->sync_data[i].ntp_time = 0;
|
||||
}
|
||||
|
||||
int no_resend = (raop_rtp->control_rport == 0); /* true when control_rport is not set */
|
||||
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG, "raop_rtp start_time = %8.6f (raop_rtp audio)",
|
||||
((double) raop_rtp->ntp_start_time) / SEC);
|
||||
|
||||
@@ -520,8 +524,12 @@ raop_rtp_thread_udp(void *arg)
|
||||
if (resent_packetlen >= 12) {
|
||||
uint32_t timestamp = byteutils_get_int_be(resent_packet, 4);
|
||||
uint64_t rtp_time = rtp64_time(raop_rtp, ×tamp);
|
||||
uint64_t ntp_time = 0;
|
||||
if (have_synced) {
|
||||
ntp_time = (uint64_t) (raop_rtp->rtp_sync_offset + (int64_t) (raop_rtp->rtp_clock_rate * rtp_time));
|
||||
}
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG, "raop_rtp resent audio packet: seqnum=%u", seqnum);
|
||||
int result = raop_buffer_enqueue(raop_rtp->buffer, resent_packet, resent_packetlen, rtp_time, 1);
|
||||
int result = raop_buffer_enqueue(raop_rtp->buffer, resent_packet, resent_packetlen, &ntp_time, &rtp_time, 1);
|
||||
assert(result >= 0);
|
||||
} else {
|
||||
/* type_c = 0x56 packets with length 8 have been reported */
|
||||
@@ -543,32 +551,20 @@ raop_rtp_thread_udp(void *arg)
|
||||
// The unit for the rtp clock is 1 / sample rate = 1 / 44100
|
||||
uint32_t sync_rtp = byteutils_get_int_be(packet, 4);
|
||||
uint64_t sync_rtp64 = rtp64_time(raop_rtp, &sync_rtp);
|
||||
|
||||
|
||||
if (have_synced == false) {
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG, "first audio rtp sync");
|
||||
have_synced = true;
|
||||
}
|
||||
uint64_t sync_ntp_raw = byteutils_get_long_be(packet, 8);
|
||||
uint64_t sync_ntp_remote = raop_ntp_timestamp_to_micro_seconds(sync_ntp_raw, true);
|
||||
uint64_t sync_ntp_remote = raop_ntp_timestamp_to_nano_seconds(sync_ntp_raw, true);
|
||||
uint64_t sync_ntp_local = raop_ntp_convert_remote_time(raop_rtp->ntp, sync_ntp_remote);
|
||||
int64_t shift;
|
||||
switch (raop_rtp->ct) {
|
||||
case 0x08: /*AAC-ELD */
|
||||
shift = -11025; /* 44100/4 */
|
||||
break;
|
||||
case 0x02:
|
||||
default:
|
||||
shift = 0; /* not needed for ALAC (audio only) */
|
||||
break;
|
||||
}
|
||||
char *str = utils_data_to_string(packet, packetlen, 20);
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG,
|
||||
"raop_rtp sync: client ntp=%8.6f, ntp = %8.6f, ntp_start_time %8.6f, sync_rtp=%u\n%s",
|
||||
((double) sync_ntp_remote) / SEC, ((double)sync_ntp_local) / SEC,
|
||||
((double) raop_rtp->ntp_start_time) / SEC, sync_rtp, str);
|
||||
"raop_rtp sync: client ntp=%8.6f, ntp = %8.6f, ntp_start_time %8.6f\nts_client = %8.6f sync_rtp=%u\n%s",
|
||||
(double) sync_ntp_remote / SEC, (double) sync_ntp_local / SEC,
|
||||
(double) raop_rtp->ntp_start_time / SEC, (double) sync_ntp_remote / SEC, sync_rtp, str);
|
||||
free(str);
|
||||
raop_rtp_sync_clock(raop_rtp, sync_ntp_local, sync_rtp64, shift);
|
||||
raop_rtp_sync_clock(raop_rtp, &sync_ntp_remote, &sync_rtp64);
|
||||
} else {
|
||||
char *str = utils_data_to_string(packet, packetlen, 16);
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG, "raop_rtp unknown udp control packet\n%s", str);
|
||||
@@ -606,9 +602,13 @@ raop_rtp_thread_udp(void *arg)
|
||||
* the same 32-byte encrypted payload which after decryption is the beginning of a
|
||||
* 32-byte ALAC packet, presumably with format information, but not actual audio data.
|
||||
* The secnum and rtp_timestamp in the packet header increment according to the same
|
||||
* pattern as ALAC packets with audio content */
|
||||
* pattern as ALAC packets with audio content */
|
||||
|
||||
if (FD_ISSET(raop_rtp->dsock, &rfds)) {
|
||||
/* The first ALAC packet with data seems to be decoded just before the first sync event
|
||||
* so its dequeuing should be delayed until the first rtp sync has occurred */
|
||||
|
||||
|
||||
if (FD_ISSET(raop_rtp->dsock, &rfds)) {
|
||||
//logger_log(raop_rtp->logger, LOGGER_INFO, "Would have data packet in queue");
|
||||
// Receiving audio data here
|
||||
saddrlen = sizeof(saddr);
|
||||
@@ -617,73 +617,87 @@ raop_rtp_thread_udp(void *arg)
|
||||
// rtp payload type
|
||||
//int type_d = packet[1] & ~0x80;
|
||||
//logger_log(raop_rtp->logger, LOGGER_DEBUG, "raop_rtp_thread_udp type_d 0x%02x, packetlen = %d", type_d, packetlen);
|
||||
if (packetlen >= 12) {
|
||||
int no_resend = (raop_rtp->control_rport == 0); /* true when control_rport is not set */
|
||||
uint32_t rtp_timestamp = byteutils_get_int_be(packet, 4);
|
||||
uint64_t rtp_time = rtp64_time(raop_rtp, &rtp_timestamp);
|
||||
if (have_synced == false) {
|
||||
/* until the first rtp sync occurs, we don't know the exact client ntp timestamp that matches the client rtp timestamp */
|
||||
int64_t sync_ntp = ((int64_t) raop_ntp_get_local_time(raop_rtp->ntp)) - ((int64_t) raop_rtp->ntp_start_time) ;
|
||||
|
||||
if (packetlen < 12) {
|
||||
char *str = utils_data_to_string(packet, packetlen, 16);
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG, "Received short type_d = 0x%2x packet with length %d:\n%s", packet[1] & ~0x80, packetlen, str);
|
||||
free (str);
|
||||
continue;
|
||||
}
|
||||
|
||||
uint32_t rtp_timestamp = byteutils_get_int_be(packet, 4);
|
||||
uint64_t rtp_time = rtp64_time(raop_rtp, &rtp_timestamp);
|
||||
uint64_t ntp_time = 0;
|
||||
|
||||
if (raop_rtp->ct == 2 && packetlen == 44) continue; /* ignore the ALAC packets with format information only. */
|
||||
|
||||
if (have_synced) {
|
||||
ntp_time = (uint64_t) (raop_rtp->rtp_sync_offset + (int64_t) (raop_rtp->rtp_clock_rate * rtp_time));
|
||||
} else if (packetlen == 16 && memcmp(packet + 12, no_data_marker, 4) == 0) {
|
||||
/* use the special "no_data" packet to help determine an initial offset before the first rtp sync.
|
||||
* until the first rtp sync occurs, we don't know the exact client ntp timestamp that matches the client rtp timestamp */
|
||||
if (no_data_yet) {
|
||||
int64_t sync_ntp = ((int64_t) raop_ntp_get_local_time(raop_rtp->ntp)) - ((int64_t) raop_rtp->ntp_start_time) ;
|
||||
int64_t sync_rtp = ((int64_t) rtp_time) - ((int64_t) raop_rtp->rtp_start_time);
|
||||
int64_t offset;
|
||||
unsigned short seqnum = byteutils_get_short_be(packet,2);
|
||||
if (!offset_estimate_initialized) {
|
||||
offset_estimate_initialized = true;
|
||||
switch (raop_rtp->ct) {
|
||||
case 0x02:
|
||||
delay = DELAY_ALAC; /* DELAY = 2000000 (2.0 sec) is empirical choice for ALAC */
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG, "Audio is ALAC: using initial latency estimate -%8.6f sec",
|
||||
((double) delay) / SEC);
|
||||
break;
|
||||
case 0x08:
|
||||
delay = DELAY_AAC; /* DELAY = 500000 (0.5 sec) is empirical choice for AAC-ELD */
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG, "Audio is AAC: using initial latency estimate -%8.6f sec",
|
||||
((double) delay ) / SEC);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
initial_offset = -(sync_ntp + delay);
|
||||
raop_rtp->rtp_sync_offset = initial_offset;
|
||||
sync_adjustment = 0;
|
||||
unsigned short seqnum = byteutils_get_short_be(packet, 2);
|
||||
if (rtp_count == 0) {
|
||||
sync_adjustment = ((double) sync_ntp);
|
||||
rtp_count = 1;
|
||||
seqnum1 = seqnum;
|
||||
seqnum2 = seqnum;
|
||||
}
|
||||
sync_ntp += delay;
|
||||
offset = -sync_ntp;
|
||||
if (seqnum2 != seqnum) { /* for AAC-ELD only use copy 3 of the 3 copies of each frame */
|
||||
if (seqnum2 != seqnum) { /* for AAC-ELD only use copy 1 of the 3 copies of each frame */
|
||||
rtp_count++;
|
||||
offset -= initial_offset;
|
||||
sync_adjustment += ((double) offset) + (((double) sync_rtp) / raop_rtp->rtp_sync_scale);
|
||||
raop_rtp->rtp_sync_offset = initial_offset + (int64_t) (sync_adjustment / rtp_count);
|
||||
//logger_log(raop_rtp->logger, LOGGER_DEBUG, "initial estimate of rtp_sync_offset %d secnum = %u: %8.6f",
|
||||
// rtp_count, seqnum, ((double) raop_rtp->rtp_sync_offset) / SEC);
|
||||
}
|
||||
sync_adjustment += (((double) sync_ntp) - raop_rtp->rtp_clock_rate * sync_rtp - sync_adjustment) / rtp_count;
|
||||
}
|
||||
seqnum2 = seqnum1;
|
||||
seqnum1 = seqnum;
|
||||
}
|
||||
int result = raop_buffer_enqueue(raop_rtp->buffer, packet, packetlen, rtp_time, 1);
|
||||
assert(result >= 0);
|
||||
// Render continuous buffer entries
|
||||
continue;
|
||||
} else {
|
||||
no_data_yet = false;
|
||||
}
|
||||
int result = raop_buffer_enqueue(raop_rtp->buffer, packet, packetlen, &ntp_time, &rtp_time, 1);
|
||||
assert(result >= 0);
|
||||
|
||||
if (raop_rtp->ct == 2 && !have_synced) {
|
||||
/* in ALAC Audio-only mode wait until the first sync before dequeing */
|
||||
continue;
|
||||
} else {
|
||||
// Render continuous buffer entries
|
||||
void *payload = NULL;
|
||||
unsigned int payload_size;
|
||||
unsigned short seqnum;
|
||||
uint64_t rtp64_timestamp;
|
||||
while ((payload = raop_buffer_dequeue(raop_rtp->buffer, &payload_size, &rtp64_timestamp, &seqnum, no_resend))) {
|
||||
double elapsed_time = (((double) (rtp64_timestamp - (uint64_t) raop_rtp->rtp_start_time)) / raop_rtp->rtp_sync_scale);
|
||||
uint64_t ntp_timestamp;
|
||||
|
||||
while ((payload = raop_buffer_dequeue(raop_rtp->buffer, &payload_size, &ntp_timestamp, &rtp64_timestamp, &seqnum, no_resend))) {
|
||||
audio_decode_struct audio_data;
|
||||
audio_data.data_len = payload_size;
|
||||
audio_data.data = payload;
|
||||
audio_data.ntp_time = raop_rtp->ntp_start_time + (uint64_t) elapsed_time;
|
||||
audio_data.ntp_time -= raop_rtp->rtp_sync_offset;
|
||||
audio_data.rtp_time = rtp64_timestamp;
|
||||
audio_data.seqnum = seqnum;
|
||||
audio_data.data_len = payload_size;
|
||||
audio_data.data = payload;
|
||||
audio_data.ct = raop_rtp->ct;
|
||||
if (have_synced) {
|
||||
if (ntp_timestamp == 0) {
|
||||
ntp_timestamp = (uint64_t) (raop_rtp->rtp_sync_offset + (int64_t) (raop_rtp->rtp_clock_rate * rtp64_timestamp));
|
||||
}
|
||||
audio_data.ntp_time_remote = ntp_timestamp;
|
||||
audio_data.ntp_time_local = raop_ntp_convert_remote_time(raop_rtp->ntp, audio_data.ntp_time_remote);
|
||||
audio_data.sync_status = 1;
|
||||
} else {
|
||||
double elapsed_time = raop_rtp->rtp_clock_rate * (rtp64_timestamp - raop_rtp->rtp_start_time) + sync_adjustment
|
||||
+ DELAY_AAC * SECOND_IN_NSECS;
|
||||
audio_data.ntp_time_local = raop_rtp->ntp_start_time + delay + (uint64_t) elapsed_time;
|
||||
audio_data.ntp_time_remote = raop_ntp_convert_local_time(raop_rtp->ntp, audio_data.ntp_time_local);
|
||||
audio_data.sync_status = 0;
|
||||
}
|
||||
raop_rtp->callbacks.audio_process(raop_rtp->callbacks.cls, raop_rtp->ntp, &audio_data);
|
||||
free(payload);
|
||||
uint64_t ntp_now = raop_ntp_get_local_time(raop_rtp->ntp);
|
||||
int64_t latency = ((int64_t) ntp_now) - ((int64_t) audio_data.ntp_time);
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG, "raop_rtp audio: now = %8.6f, npt = %8.6f, latency = %8.6f, rtp_time=%u seqnum = %u",
|
||||
((double) ntp_now ) / SEC, ((double) audio_data.ntp_time) / SEC, ((double) latency) / SEC, (uint32_t) rtp64_timestamp,
|
||||
int64_t latency = ((int64_t) ntp_now) - ((int64_t) audio_data.ntp_time_local);
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG, "raop_rtp audio: now = %8.6f, ntp = %8.6f, latency = %8.6f, rtp_time=%u seqnum = %u",
|
||||
(double) ntp_now / SEC, (double) audio_data.ntp_time_local / SEC, (double) latency / SEC, (uint32_t) rtp64_timestamp,
|
||||
seqnum);
|
||||
}
|
||||
|
||||
@@ -691,10 +705,6 @@ raop_rtp_thread_udp(void *arg)
|
||||
if (!no_resend) {
|
||||
raop_buffer_handle_resends(raop_rtp->buffer, raop_rtp_resend_callback, raop_rtp);
|
||||
}
|
||||
} else {
|
||||
char *str = utils_data_to_string(packet, packetlen, 16);
|
||||
logger_log(raop_rtp->logger, LOGGER_DEBUG, "Received short type_d = 0x%2x packet with length %d:\n%s", packet[1] & ~0x80, packetlen, str);
|
||||
free (str);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -711,15 +721,13 @@ raop_rtp_thread_udp(void *arg)
|
||||
|
||||
// Start rtp service, three udp ports
|
||||
void
|
||||
raop_rtp_start_audio(raop_rtp_t *raop_rtp, int use_udp, unsigned short control_rport,
|
||||
unsigned short *control_lport, unsigned short *data_lport, unsigned char ct)
|
||||
raop_rtp_start_audio(raop_rtp_t *raop_rtp, int use_udp, unsigned short *control_rport, unsigned short *control_lport,
|
||||
unsigned short *data_lport, unsigned char *ct, unsigned int *sr)
|
||||
{
|
||||
logger_log(raop_rtp->logger, LOGGER_INFO, "raop_rtp starting audio");
|
||||
int use_ipv6 = 0;
|
||||
|
||||
assert(raop_rtp);
|
||||
assert(control_lport);
|
||||
assert(data_lport);
|
||||
|
||||
MUTEX_LOCK(raop_rtp->run_mutex);
|
||||
if (raop_rtp->running || !raop_rtp->joined) {
|
||||
@@ -727,12 +735,13 @@ raop_rtp_start_audio(raop_rtp_t *raop_rtp, int use_udp, unsigned short control_r
|
||||
return;
|
||||
}
|
||||
|
||||
raop_rtp->ct = ct;
|
||||
raop_rtp->ct = *ct;
|
||||
raop_rtp->rtp_clock_rate = SECOND_IN_NSECS / *sr;
|
||||
|
||||
/* Initialize ports and sockets */
|
||||
raop_rtp->control_lport = *control_lport;
|
||||
raop_rtp->data_lport = *data_lport;
|
||||
raop_rtp->control_rport = control_rport;
|
||||
raop_rtp->control_rport = *control_rport;
|
||||
if (raop_rtp->remote_saddr.ss_family == AF_INET6) {
|
||||
use_ipv6 = 1;
|
||||
}
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*==================================================================
|
||||
* modified by fduncanh 2021-2023
|
||||
*/
|
||||
|
||||
#ifndef RAOP_RTP_H
|
||||
@@ -29,8 +32,8 @@ typedef struct raop_rtp_s raop_rtp_t;
|
||||
raop_rtp_t *raop_rtp_init(logger_t *logger, raop_callbacks_t *callbacks, raop_ntp_t *ntp, const unsigned char *remote,
|
||||
int remotelen, const unsigned char *aeskey, const unsigned char *aesiv);
|
||||
|
||||
void raop_rtp_start_audio(raop_rtp_t *raop_rtp, int use_udp, unsigned short control_rport,
|
||||
unsigned short *control_lport, unsigned short *data_lport, unsigned char ct);
|
||||
void raop_rtp_start_audio(raop_rtp_t *raop_rtp, int use_udp, unsigned short *control_rport, unsigned short *control_lport,
|
||||
unsigned short *data_lport, unsigned char *ct, unsigned int *sr);
|
||||
|
||||
void raop_rtp_set_volume(raop_rtp_t *raop_rtp, float volume);
|
||||
void raop_rtp_set_metadata(raop_rtp_t *raop_rtp, const char *data, int datalen);
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*==================================================================
|
||||
* modified by fduncanh 2021-2023
|
||||
*/
|
||||
|
||||
#include "raop_rtp_mirror.h"
|
||||
@@ -48,7 +51,8 @@
|
||||
#define CAST
|
||||
#endif
|
||||
|
||||
#define SEC 1000000
|
||||
#define SECOND_IN_NSECS 1000000000UL
|
||||
#define SEC SECOND_IN_NSECS
|
||||
|
||||
/* for MacOS, where SOL_TCP and TCP_KEEPIDLE are not defined */
|
||||
#if !defined(SOL_TCP) && defined(IPPROTO_TCP)
|
||||
@@ -198,6 +202,8 @@ raop_rtp_mirror_thread(void *arg)
|
||||
bool conn_reset = false;
|
||||
uint64_t ntp_timestamp_nal = 0;
|
||||
uint64_t ntp_timestamp_raw = 0;
|
||||
uint64_t ntp_timestamp_remote = 0;
|
||||
uint64_t ntp_timestamp_local = 0;
|
||||
unsigned char nal_start_code[4] = { 0x00, 0x00, 0x00, 0x01 };
|
||||
|
||||
#ifdef DUMP_H264
|
||||
@@ -308,7 +314,15 @@ raop_rtp_mirror_thread(void *arg)
|
||||
|
||||
/*packet[0:3] contains the payload size */
|
||||
int payload_size = byteutils_get_int(packet, 0);
|
||||
|
||||
char packet_description[13] = {0};
|
||||
char *p = packet_description;
|
||||
for (int i = 4; i < 8; i++) {
|
||||
sprintf(p, "%2.2x ", (unsigned int) packet[i]);
|
||||
p += 3;
|
||||
}
|
||||
ntp_timestamp_raw = byteutils_get_long(packet, 8);
|
||||
ntp_timestamp_remote = raop_ntp_timestamp_to_nano_seconds(ntp_timestamp_raw, false);
|
||||
|
||||
/* packet[4] appears to have one of three possible values: *
|
||||
* 0x00 : encrypted packet *
|
||||
* 0x01 : unencrypted packet with a SPS and a PPS NAL, sent initially, and also when *
|
||||
@@ -359,15 +373,13 @@ raop_rtp_mirror_thread(void *arg)
|
||||
// Conveniently, the video data is already stamped with the remote wall clock time,
|
||||
// so no additional clock syncing needed. The only thing odd here is that the video
|
||||
// ntp time stamps don't include the SECONDS_FROM_1900_TO_1970, so it's really just
|
||||
// counting micro seconds since last boot.
|
||||
ntp_timestamp_raw = byteutils_get_long(packet, 8);
|
||||
uint64_t ntp_timestamp_remote = raop_ntp_timestamp_to_micro_seconds(ntp_timestamp_raw, false);
|
||||
uint64_t ntp_timestamp = raop_ntp_convert_remote_time(raop_rtp_mirror->ntp, ntp_timestamp_remote);
|
||||
// counting nano seconds since last boot.
|
||||
|
||||
ntp_timestamp_local = raop_ntp_convert_remote_time(raop_rtp_mirror->ntp, ntp_timestamp_remote);
|
||||
uint64_t ntp_now = raop_ntp_get_local_time(raop_rtp_mirror->ntp);
|
||||
int64_t latency = ((int64_t) ntp_now) - ((int64_t) ntp_timestamp);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp video: now = %8.6f, ntp = %8.6f, latency = %8.6f",
|
||||
((double) ntp_now) / SEC, ((double) ntp_timestamp) / SEC, ((double) latency) / SEC);
|
||||
int64_t latency = ((int64_t) ntp_now) - ((int64_t) ntp_timestamp_local);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp video: now = %8.6f, ntp = %8.6f, latency = %8.6f, ts = %8.6f, %s",
|
||||
(double) ntp_now / SEC, (double) ntp_timestamp_local / SEC, (double) latency / SEC, (double) ntp_timestamp_remote / SEC, packet_description);
|
||||
|
||||
#ifdef DUMP_H264
|
||||
fwrite(payload, payload_size, 1, file_source);
|
||||
@@ -436,7 +448,8 @@ raop_rtp_mirror_thread(void *arg)
|
||||
#endif
|
||||
payload_decrypted = NULL;
|
||||
h264_decode_struct h264_data;
|
||||
h264_data.pts = ntp_timestamp;
|
||||
h264_data.ntp_time_local = ntp_timestamp_local;
|
||||
h264_data.ntp_time_remote = ntp_timestamp_remote;
|
||||
h264_data.nal_count = nalus_count; /*nal_count will be the number of nal units in the packet */
|
||||
h264_data.data_len = payload_size;
|
||||
h264_data.data = payload_out;
|
||||
@@ -453,11 +466,13 @@ raop_rtp_mirror_thread(void *arg)
|
||||
case 0x01:
|
||||
// The information in the payload contains an SPS and a PPS NAL
|
||||
// The sps_pps is not encrypted
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "\nReceived unencryted codec packet from client: payload_size %d header %s ts_client = %8.6f",
|
||||
payload_size, packet_description, (double) ntp_timestamp_remote / SEC);
|
||||
if (payload_size == 0) {
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "raop_rtp_mirror, discard type 0x01 packet with no payload");
|
||||
break;
|
||||
}
|
||||
ntp_timestamp_nal = byteutils_get_long(packet, 8);
|
||||
ntp_timestamp_nal = ntp_timestamp_raw;
|
||||
float width = byteutils_get_float(packet, 16);
|
||||
float height = byteutils_get_float(packet, 20);
|
||||
float width_source = byteutils_get_float(packet, 40);
|
||||
@@ -537,7 +552,8 @@ raop_rtp_mirror_thread(void *arg)
|
||||
|
||||
break;
|
||||
case 0x05:
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "\nReceived video streaming performance info packet from client");
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_DEBUG, "\nReceived video streaming performance info packet from client: payload_size %d header %s ts_raw = %llu",
|
||||
payload_size, packet_description, ntp_timestamp_raw);
|
||||
/* payloads with packet[4] = 0x05 have no timestamp, and carry video info from the client as a binary plist *
|
||||
* Sometimes (e.g, when the client has a locked screen), there is a 25kB trailer attached to the packet. *
|
||||
* This 25000 Byte trailer with unidentified content seems to be the same data each time it is sent. */
|
||||
@@ -566,7 +582,8 @@ raop_rtp_mirror_thread(void *arg)
|
||||
}
|
||||
break;
|
||||
default:
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_WARNING, "\nReceived unexpected TCP packet from client, packet[4] = 0x%2.2x", packet[4]);
|
||||
logger_log(raop_rtp_mirror->logger, LOGGER_WARNING, "\nReceived unexpected TCP packet from client, size %d, %s ts_raw = raw%llu",
|
||||
payload_size, packet_description, ntp_timestamp_raw);
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*=================================================================
|
||||
* modified by fduncanh 2021-2023
|
||||
*/
|
||||
|
||||
#ifndef RAOP_RTP_MIRROR_H
|
||||
|
||||
11
lib/stream.h
11
lib/stream.h
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*=================================================================
|
||||
* modified by fduncanh 2022-2023
|
||||
*/
|
||||
|
||||
#ifndef AIRPLAYSERVER_STREAM_H
|
||||
@@ -22,13 +25,17 @@ typedef struct {
|
||||
int nal_count;
|
||||
unsigned char *data;
|
||||
int data_len;
|
||||
uint64_t pts;
|
||||
uint64_t ntp_time_local;
|
||||
uint64_t ntp_time_remote;
|
||||
} h264_decode_struct;
|
||||
|
||||
typedef struct {
|
||||
unsigned char *data;
|
||||
unsigned char ct;
|
||||
int data_len;
|
||||
uint64_t ntp_time;
|
||||
int sync_status;
|
||||
uint64_t ntp_time_local;
|
||||
uint64_t ntp_time_remote;
|
||||
uint64_t rtp_time;
|
||||
unsigned short seqnum;
|
||||
} audio_decode_struct;
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*=================================================================
|
||||
* modified by fduncanh 2022
|
||||
*/
|
||||
|
||||
#ifndef THREADS_H
|
||||
|
||||
16
lib/utils.c
16
lib/utils.c
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*=================================================================
|
||||
* modified by fduncanh 2021-2022
|
||||
*/
|
||||
|
||||
#include <stdlib.h>
|
||||
@@ -18,6 +21,7 @@
|
||||
#include <assert.h>
|
||||
#include <time.h>
|
||||
#include <stdint.h>
|
||||
#define SECOND_IN_NSECS 1000000000UL
|
||||
|
||||
char *
|
||||
utils_strsep(char **stringp, const char *delim)
|
||||
@@ -215,21 +219,21 @@ char *utils_data_to_text(const char *data, int datalen) {
|
||||
}
|
||||
|
||||
void ntp_timestamp_to_time(uint64_t ntp_timestamp, char *timestamp, size_t maxsize) {
|
||||
time_t rawtime = (time_t) (ntp_timestamp / 1000000);
|
||||
time_t rawtime = (time_t) (ntp_timestamp / SECOND_IN_NSECS);
|
||||
struct tm ts = *localtime(&rawtime);
|
||||
assert(maxsize > 26);
|
||||
assert(maxsize > 29);
|
||||
#ifdef _WIN32 /*modification for compiling for Windows */
|
||||
strftime(timestamp, 20, "%Y-%m-%d %H:%M:%S", &ts);
|
||||
#else
|
||||
strftime(timestamp, 20, "%F %T", &ts);
|
||||
#endif
|
||||
snprintf(timestamp + 19, 8,".%6.6u", (unsigned int) ntp_timestamp % 1000000);
|
||||
snprintf(timestamp + 19, 11,".%9.9lu", (unsigned long) ntp_timestamp % SECOND_IN_NSECS);
|
||||
}
|
||||
|
||||
void ntp_timestamp_to_seconds(uint64_t ntp_timestamp, char *timestamp, size_t maxsize) {
|
||||
time_t rawtime = (time_t) (ntp_timestamp / 1000000);
|
||||
time_t rawtime = (time_t) (ntp_timestamp / SECOND_IN_NSECS);
|
||||
struct tm ts = *localtime(&rawtime);
|
||||
assert(maxsize > 9);
|
||||
assert(maxsize > 12);
|
||||
strftime(timestamp, 3, "%S", &ts);
|
||||
snprintf(timestamp + 2, 8,".%6.6u", (unsigned int) ntp_timestamp % 1000000);
|
||||
snprintf(timestamp + 2, 11,".%9.9lu", (unsigned long) ntp_timestamp % SECOND_IN_NSECS);
|
||||
}
|
||||
|
||||
@@ -10,6 +10,9 @@
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
*==================================================================
|
||||
* modified by fduncanh 2021-2022
|
||||
*/
|
||||
|
||||
#ifndef UTILS_H
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
cmake_minimum_required(VERSION 3.4.1)
|
||||
|
||||
set( ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:/usr/local/lib/pkgconfig" ) # standard location for self-installed gstreamer
|
||||
if (APPLE )
|
||||
set( ENV{PKG_CONFIG_PATH} "/Library/FrameWorks/GStreamer.framework/Libraries/pkgconfig" ) # GStreamer.framework, preferred
|
||||
set( ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:/usr/local/lib/pkgconfig" ) # Brew or self-installed gstreamer
|
||||
set( ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:/opt/local/lib/pkgconfig/" ) # MacPorts
|
||||
message( "PKG_CONFIG_PATH (Apple, renderers) = " $ENV{PKG_CONFIG_PATH} )
|
||||
find_program( PKG_CONFIG_EXECUTABLE pkg-config PATHS /Library/FrameWorks/GStreamer.framework/Commands )
|
||||
set(PKG_CONFIG_EXECUTABLE ${PKG_CONFIG_EXECUTABLE} --define-prefix )
|
||||
else()
|
||||
set( ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:/usr/local/lib/pkgconfig" ) # standard location for self-installed gstreamer
|
||||
endif()
|
||||
|
||||
find_package( PkgConfig REQUIRED )
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
/**
|
||||
* RPiPlay - An open-source AirPlay mirroring server for Raspberry Pi
|
||||
* Copyright (C) 2019 Florian Draschbacher
|
||||
* Modified for:
|
||||
* UxPlay - An open-source AirPlay mirroring server
|
||||
* Copyright (C) 2021-23 F. Duncanh
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
@@ -27,14 +30,13 @@ extern "C" {
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
#include "../lib/raop_ntp.h"
|
||||
#include "../lib/logger.h"
|
||||
|
||||
bool gstreamer_init();
|
||||
void audio_renderer_init(logger_t *logger, const char* audiosink);
|
||||
void audio_renderer_init(logger_t *logger, const char* audiosink, const bool *audio_sync, const bool *video_sync);
|
||||
void audio_renderer_start(unsigned char* compression_type);
|
||||
void audio_renderer_stop();
|
||||
void audio_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data_len,
|
||||
uint64_t ntp_time, uint64_t rtp_time, unsigned short seqnum);
|
||||
void audio_renderer_render_buffer(unsigned char* data, int *data_len, unsigned short *seqnum, uint64_t *ntp_time);
|
||||
void audio_renderer_set_volume(float volume);
|
||||
void audio_renderer_flush();
|
||||
void audio_renderer_destroy();
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
/**
|
||||
* RPiPlay - An open-source AirPlay mirroring server for Raspberry Pi
|
||||
* Copyright (C) 2019 Florian Draschbacher
|
||||
* Modified for:
|
||||
* UxPlay - An open-source AirPlay mirroring server
|
||||
* Copyright (C) 2021-23 F. Duncanh
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
@@ -21,6 +24,22 @@
|
||||
#include <gst/gst.h>
|
||||
#include <gst/app/gstappsrc.h>
|
||||
#include "audio_renderer.h"
|
||||
#define SECOND_IN_NSECS 1000000000UL
|
||||
|
||||
#define NFORMATS 2 /* set to 4 to enable AAC_LD and PCM: allowed, but never seen in real-world use */
|
||||
|
||||
static GstClockTime gst_audio_pipeline_base_time = GST_CLOCK_TIME_NONE;
|
||||
static logger_t *logger = NULL;
|
||||
const char * format[NFORMATS];
|
||||
|
||||
typedef struct audio_renderer_s {
|
||||
GstElement *appsrc;
|
||||
GstElement *pipeline;
|
||||
GstElement *volume;
|
||||
unsigned char ct;
|
||||
} audio_renderer_t ;
|
||||
static audio_renderer_t *renderer_type[NFORMATS];
|
||||
static audio_renderer_t *renderer = NULL;
|
||||
|
||||
/* GStreamer Caps strings for Airplay-defined audio compression types (ct) */
|
||||
|
||||
@@ -37,14 +56,6 @@ static const char aac_lc_caps[] ="audio/mpeg,mpegversion=(int)4,channnels=(int)2
|
||||
/* ct = 8; codec_data from MPEG v4 ISO 14996-3 Section 1.6.2.1: AAC_ELD 44100/2 spf = 480 */
|
||||
static const char aac_eld_caps[] ="audio/mpeg,mpegversion=(int)4,channnels=(int)2,rate=(int)44100,stream-format=raw,codec_data=(buffer)f8e85000";
|
||||
|
||||
typedef struct audio_renderer_s {
|
||||
GstElement *appsrc;
|
||||
GstElement *pipeline;
|
||||
GstElement *volume;
|
||||
unsigned char ct;
|
||||
} audio_renderer_t ;
|
||||
|
||||
|
||||
static gboolean check_plugins (void)
|
||||
{
|
||||
int i;
|
||||
@@ -70,33 +81,30 @@ static gboolean check_plugins (void)
|
||||
}
|
||||
|
||||
bool gstreamer_init(){
|
||||
gst_init(NULL,NULL);
|
||||
gst_init(NULL,NULL);
|
||||
return (bool) check_plugins ();
|
||||
}
|
||||
|
||||
#define NFORMATS 2 /* set to 4 to enable AAC_LD and PCM: allowed, but never seen in real-world use */
|
||||
static audio_renderer_t *renderer_type[NFORMATS];
|
||||
static audio_renderer_t *renderer = NULL;
|
||||
static logger_t *logger = NULL;
|
||||
const char * format[NFORMATS];
|
||||
|
||||
void audio_renderer_init(logger_t *render_logger, const char* audiosink) {
|
||||
void audio_renderer_init(logger_t *render_logger, const char* audiosink, const bool* audio_sync, const bool* video_sync) {
|
||||
GError *error = NULL;
|
||||
GstCaps *caps = NULL;
|
||||
GstClock *clock = gst_system_clock_obtain();
|
||||
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_REALTIME, NULL);
|
||||
|
||||
logger = render_logger;
|
||||
|
||||
for (int i = 0; i < NFORMATS ; i++) {
|
||||
renderer_type[i] = (audio_renderer_t *) calloc(1,sizeof(audio_renderer_t));
|
||||
g_assert(renderer_type[i]);
|
||||
GString *launch = g_string_new("appsrc name=audio_source ! ");
|
||||
g_string_append(launch, "queue ! ");
|
||||
g_string_append(launch, "queue ");
|
||||
switch (i) {
|
||||
case 0: /* AAC-ELD */
|
||||
case 2: /* AAC-LC */
|
||||
g_string_append(launch, "avdec_aac ! ");
|
||||
g_string_append(launch, "! avdec_aac ! ");
|
||||
break;
|
||||
case 1: /* ALAC */
|
||||
g_string_append(launch, "avdec_alac ! ");
|
||||
g_string_append(launch, "! avdec_alac ! ");
|
||||
break;
|
||||
case 3: /*PCM*/
|
||||
break;
|
||||
@@ -107,15 +115,31 @@ void audio_renderer_init(logger_t *render_logger, const char* audiosink) {
|
||||
g_string_append (launch, "audioresample ! "); /* wasapisink must resample from 44.1 kHz to 48 kHz */
|
||||
g_string_append (launch, "volume name=volume ! level ! ");
|
||||
g_string_append (launch, audiosink);
|
||||
g_string_append (launch, " sync=false");
|
||||
switch(i) {
|
||||
case 1: /*ALAC*/
|
||||
if (*audio_sync) {
|
||||
g_string_append (launch, " sync=true");
|
||||
} else {
|
||||
g_string_append (launch, " sync=false");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (*video_sync) {
|
||||
g_string_append (launch, " sync=true");
|
||||
} else {
|
||||
g_string_append (launch, " sync=false");
|
||||
}
|
||||
break;
|
||||
}
|
||||
renderer_type[i]->pipeline = gst_parse_launch(launch->str, &error);
|
||||
if (error) {
|
||||
g_error ("gst_parse_launch error (audio %d):\n %s\n", i+1, error->message);
|
||||
g_clear_error (&error);
|
||||
}
|
||||
g_string_free(launch, TRUE);
|
||||
|
||||
g_assert (renderer_type[i]->pipeline);
|
||||
|
||||
gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer_type[i]->pipeline), clock);
|
||||
|
||||
renderer_type[i]->appsrc = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "audio_source");
|
||||
renderer_type[i]->volume = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "volume");
|
||||
switch (i) {
|
||||
@@ -142,9 +166,12 @@ void audio_renderer_init(logger_t *render_logger, const char* audiosink) {
|
||||
default:
|
||||
break;
|
||||
}
|
||||
logger_log(logger, LOGGER_DEBUG, "supported audio format %d: %s",i+1,format[i]);
|
||||
logger_log(logger, LOGGER_DEBUG, "Audio format %d: %s",i+1,format[i]);
|
||||
logger_log(logger, LOGGER_DEBUG, "GStreamer audio pipeline %d: \"%s\"", i+1, launch->str);
|
||||
g_string_free(launch, TRUE);
|
||||
g_object_set(renderer_type[i]->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
|
||||
gst_caps_unref(caps);
|
||||
g_object_unref(clock);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -172,21 +199,30 @@ void audio_renderer_start(unsigned char *ct) {
|
||||
logger_log(logger, LOGGER_INFO, "changed audio connection, format %s", format[id]);
|
||||
renderer = renderer_type[id];
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
|
||||
gst_audio_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
|
||||
}
|
||||
} else if (compression_type) {
|
||||
logger_log(logger, LOGGER_INFO, "start audio connection, format %s", format[id]);
|
||||
renderer = renderer_type[id];
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
|
||||
gst_audio_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "unknown audio compression type ct = %d", *ct);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void audio_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t ntp_time,
|
||||
uint64_t rtp_time, unsigned short seqnum) {
|
||||
void audio_renderer_render_buffer(unsigned char* data, int *data_len, unsigned short *seqnum, uint64_t *ntp_time) {
|
||||
GstBuffer *buffer;
|
||||
bool valid;
|
||||
GstClockTime pts = (GstClockTime) *ntp_time ; /* now in nsecs */
|
||||
//GstClockTimeDiff latency = GST_CLOCK_DIFF(gst_element_get_current_clock_time (renderer->appsrc), pts);
|
||||
if (pts >= gst_audio_pipeline_base_time) {
|
||||
pts -= gst_audio_pipeline_base_time;
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "*** invalid ntp_time < gst_audio_pipeline_base_time\n%8.6f ntp_time\n%8.6f base_time",
|
||||
((double) *ntp_time) / SECOND_IN_NSECS, ((double) gst_audio_pipeline_base_time) / SECOND_IN_NSECS);
|
||||
return;
|
||||
}
|
||||
if (data_len == 0 || renderer == NULL) return;
|
||||
|
||||
/* all audio received seems to be either ct = 8 (AAC_ELD 44100/2 spf 460 ) AirPlay Mirror protocol *
|
||||
@@ -196,10 +232,11 @@ void audio_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data
|
||||
* but is 0x80, 0x81 or 0x82: 0x100000(00,01,10) in ios9, ios10 devices *
|
||||
* first byte of AAC_LC should be 0xff (ADTS) (but has never been seen). */
|
||||
|
||||
buffer = gst_buffer_new_and_alloc(data_len);
|
||||
buffer = gst_buffer_new_allocate(NULL, *data_len, NULL);
|
||||
g_assert(buffer != NULL);
|
||||
GST_BUFFER_PTS(buffer) = (GstClockTime) ntp_time;
|
||||
gst_buffer_fill(buffer, 0, data, data_len);
|
||||
//g_print("audio latency %8.6f\n", (double) latency / SECOND_IN_NSECS);
|
||||
GST_BUFFER_PTS(buffer) = pts;
|
||||
gst_buffer_fill(buffer, 0, data, *data_len);
|
||||
switch (renderer->ct){
|
||||
case 8: /*AAC-ELD*/
|
||||
switch (data[0]){
|
||||
@@ -257,4 +294,3 @@ void audio_renderer_destroy() {
|
||||
free(renderer_type[i]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
/**
|
||||
* RPiPlay - An open-source AirPlay mirroring server for Raspberry Pi
|
||||
* Copyright (C) 2019 Florian Draschbacher
|
||||
* Modified for:
|
||||
* UxPlay - An open-source AirPlay mirroring server
|
||||
* Copyright (C) 2021-23 F. Duncanh
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
@@ -32,7 +35,6 @@ extern "C" {
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
#include "../lib/logger.h"
|
||||
#include "../lib/raop_ntp.h"
|
||||
|
||||
typedef enum videoflip_e {
|
||||
NONE,
|
||||
@@ -46,10 +48,11 @@ typedef enum videoflip_e {
|
||||
typedef struct video_renderer_s video_renderer_t;
|
||||
|
||||
void video_renderer_init (logger_t *logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
|
||||
const char *decoder, const char *converter, const char *videosink, const bool *fullscreen);
|
||||
const char *decoder, const char *converter, const char *videosink, const bool *fullscreen,
|
||||
const bool *video_sync);
|
||||
void video_renderer_start ();
|
||||
void video_renderer_stop ();
|
||||
void video_renderer_render_buffer (raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t pts, int nal_count);
|
||||
void video_renderer_render_buffer (unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time);
|
||||
void video_renderer_flush ();
|
||||
unsigned int video_renderer_listen(void *loop);
|
||||
void video_renderer_destroy ();
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
/**
|
||||
* RPiPlay - An open-source AirPlay mirroring server for Raspberry Pi
|
||||
* Copyright (C) 2019 Florian Draschbacher
|
||||
* Modified for:
|
||||
* UxPlay - An open-source AirPlay mirroring server
|
||||
* Copyright (C) 2021-23 F. Duncanh
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
@@ -21,13 +24,23 @@
|
||||
#include <gst/gst.h>
|
||||
#include <gst/app/gstappsrc.h>
|
||||
|
||||
#define SECOND_IN_NSECS 1000000000UL
|
||||
#ifdef X_DISPLAY_FIX
|
||||
#include <gst/video/navigation.h>
|
||||
#include "x_display_fix.h"
|
||||
static bool fullscreen = false;
|
||||
static bool alt_keypress = false;
|
||||
#define MAX_X11_SEARCH_ATTEMPTS 5 /*should be less than 256 */
|
||||
static unsigned char X11_search_attempts;
|
||||
#endif
|
||||
|
||||
static video_renderer_t *renderer = NULL;
|
||||
static GstClockTime gst_video_pipeline_base_time = GST_CLOCK_TIME_NONE;
|
||||
static logger_t *logger = NULL;
|
||||
static unsigned short width, height, width_source, height_source; /* not currently used */
|
||||
static bool first_packet = false;
|
||||
|
||||
|
||||
struct video_renderer_s {
|
||||
GstElement *appsrc, *pipeline, *sink;
|
||||
GstBus *bus;
|
||||
@@ -94,18 +107,14 @@ static void append_videoflip (GString *launch, const videoflip_t *flip, const vi
|
||||
}
|
||||
}
|
||||
|
||||
static video_renderer_t *renderer = NULL;
|
||||
static logger_t *logger = NULL;
|
||||
static unsigned short width, height, width_source, height_source; /* not currently used */
|
||||
static bool first_packet = false;
|
||||
|
||||
/* apple uses colorimetry=1:3:5:1 (not recognized by gstreamer v4l2) *
|
||||
/* apple uses colorimetry=1:3:5:1 *
|
||||
* (not recognized by v4l2 plugin in Gstreamer < 1.20.4) *
|
||||
* See .../gst-libs/gst/video/video-color.h in gst-plugins-base *
|
||||
* range = 1 -> GST_VIDEO_COLOR_RANGE_0_255 ("full RGB") *
|
||||
* matrix = 3 -> GST_VIDEO_COLOR_MATRIX_BT709 *
|
||||
* transfer = 5 -> GST_VIDEO_TRANSFER_BT709 *
|
||||
* primaries = 1 -> GST_VIDEO_COLOR_PRIMARIES_BT709 *
|
||||
* closest is BT709, 2:3:5:1 with *
|
||||
* closest used by GStreamer < 1.20.4 is BT709, 2:3:5:1 with * *
|
||||
* range = 2 -> GST_VIDEO_COLOR_RANGE_16_235 ("limited RGB") */
|
||||
|
||||
static const char h264_caps[]="video/x-h264,stream-format=(string)byte-stream,alignment=(string)au";
|
||||
@@ -119,9 +128,13 @@ void video_renderer_size(float *f_width_source, float *f_height_source, float *f
|
||||
}
|
||||
|
||||
void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
|
||||
const char *decoder, const char *converter, const char *videosink, const bool *initial_fullscreen) {
|
||||
const char *decoder, const char *converter, const char *videosink, const bool *initial_fullscreen,
|
||||
const bool *video_sync) {
|
||||
GError *error = NULL;
|
||||
GstCaps *caps = NULL;
|
||||
GstClock *clock = gst_system_clock_obtain();
|
||||
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_REALTIME, NULL);
|
||||
|
||||
logger = render_logger;
|
||||
|
||||
/* this call to g_set_application_name makes server_name appear in the X11 display window title bar, */
|
||||
@@ -144,7 +157,12 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
|
||||
g_string_append(launch, " ! ");
|
||||
append_videoflip(launch, &videoflip[0], &videoflip[1]);
|
||||
g_string_append(launch, videosink);
|
||||
g_string_append(launch, " name=video_sink sync=false");
|
||||
g_string_append(launch, " name=video_sink");
|
||||
if (*video_sync) {
|
||||
g_string_append(launch, " sync=true");
|
||||
} else {
|
||||
g_string_append(launch, " sync=false");
|
||||
}
|
||||
logger_log(logger, LOGGER_DEBUG, "GStreamer video pipeline will be:\n\"%s\"", launch->str);
|
||||
renderer->pipeline = gst_parse_launch(launch->str, &error);
|
||||
if (error) {
|
||||
@@ -152,13 +170,15 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
|
||||
g_clear_error (&error);
|
||||
}
|
||||
g_assert (renderer->pipeline);
|
||||
g_string_free(launch, TRUE);
|
||||
gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer->pipeline), clock);
|
||||
|
||||
renderer->appsrc = gst_bin_get_by_name (GST_BIN (renderer->pipeline), "video_source");
|
||||
g_assert(renderer->appsrc);
|
||||
caps = gst_caps_from_string(h264_caps);
|
||||
g_object_set(renderer->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
|
||||
g_string_free(launch, TRUE);
|
||||
gst_caps_unref(caps);
|
||||
gst_object_unref(clock);
|
||||
|
||||
renderer->sink = gst_bin_get_by_name (GST_BIN (renderer->pipeline), "video_sink");
|
||||
g_assert(renderer->sink);
|
||||
@@ -168,15 +188,22 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
|
||||
renderer->server_name = server_name;
|
||||
renderer->gst_window = NULL;
|
||||
bool x_display_fix = false;
|
||||
if (strcmp(videosink,"autovideosink") == 0 ||
|
||||
strcmp(videosink,"ximagesink") == 0 ||
|
||||
strcmp(videosink,"xvimagesink") == 0) {
|
||||
/* only include X11 videosinks that provide fullscreen mode, or need ZOOMFIX */
|
||||
/* limit searching for X11 Windows in case autovideosink selects an incompatible videosink */
|
||||
if (strncmp(videosink,"autovideosink", strlen("autovideosink")) == 0 ||
|
||||
strncmp(videosink,"ximagesink", strlen("ximagesink")) == 0 ||
|
||||
strncmp(videosink,"xvimagesink", strlen("xvimagesink")) == 0 ||
|
||||
strncmp(videosink,"fpsdisplaysink", strlen("fpsdisplaysink")) == 0 ) {
|
||||
x_display_fix = true;
|
||||
}
|
||||
if (x_display_fix) {
|
||||
renderer->gst_window = calloc(1, sizeof(X11_Window_t));
|
||||
g_assert(renderer->gst_window);
|
||||
get_X11_Display(renderer->gst_window);
|
||||
if (!renderer->gst_window->display) {
|
||||
free(renderer->gst_window);
|
||||
renderer->gst_window = NULL;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_READY);
|
||||
@@ -194,12 +221,25 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, vide
|
||||
|
||||
void video_renderer_start() {
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
|
||||
gst_video_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
|
||||
renderer->bus = gst_element_get_bus(renderer->pipeline);
|
||||
first_packet = true;
|
||||
#ifdef X_DISPLAY_FIX
|
||||
X11_search_attempts = 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
void video_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data_len, uint64_t pts, int nal_count) {
|
||||
void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time) {
|
||||
GstBuffer *buffer;
|
||||
GstClockTime pts = (GstClockTime) *ntp_time; /*now in nsecs */
|
||||
//GstClockTimeDiff latency = GST_CLOCK_DIFF(gst_element_get_current_clock_time (renderer->appsrc), pts);
|
||||
if (pts >= gst_video_pipeline_base_time) {
|
||||
pts -= gst_video_pipeline_base_time;
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "*** invalid ntp_time < gst_video_pipeline_base_time\n%8.6f ntp_time\n%8.6f base_time",
|
||||
((double) *ntp_time) / SECOND_IN_NSECS, ((double) gst_video_pipeline_base_time) / SECOND_IN_NSECS);
|
||||
return;
|
||||
}
|
||||
g_assert(data_len != 0);
|
||||
/* first four bytes of valid h264 video data are 0x00, 0x00, 0x00, 0x01. *
|
||||
* nal_count is the number of NAL units in the data: short SPS, PPS, SEI NALs *
|
||||
@@ -212,19 +252,24 @@ void video_renderer_render_buffer(raop_ntp_t *ntp, unsigned char* data, int data
|
||||
logger_log(logger, LOGGER_INFO, "Begin streaming to GStreamer video pipeline");
|
||||
first_packet = false;
|
||||
}
|
||||
buffer = gst_buffer_new_and_alloc(data_len);
|
||||
buffer = gst_buffer_new_allocate(NULL, *data_len, NULL);
|
||||
g_assert(buffer != NULL);
|
||||
GST_BUFFER_PTS(buffer) = (GstClockTime) pts;
|
||||
gst_buffer_fill(buffer, 0, data, data_len);
|
||||
//g_print("video latency %8.6f\n", (double) latency / SECOND_IN_NSECS);
|
||||
GST_BUFFER_PTS(buffer) = pts;
|
||||
gst_buffer_fill(buffer, 0, data, *data_len);
|
||||
gst_app_src_push_buffer (GST_APP_SRC(renderer->appsrc), buffer);
|
||||
#ifdef X_DISPLAY_FIX
|
||||
if (renderer->gst_window && !(renderer->gst_window->window)) {
|
||||
if (renderer->gst_window && !(renderer->gst_window->window) && X11_search_attempts < MAX_X11_SEARCH_ATTEMPTS) {
|
||||
X11_search_attempts++;
|
||||
logger_log(logger, LOGGER_DEBUG, "Looking for X11 UxPlay Window, attempt %d", (int) X11_search_attempts);
|
||||
get_x_window(renderer->gst_window, renderer->server_name);
|
||||
if (renderer->gst_window->window) {
|
||||
logger_log(logger, LOGGER_INFO, "\n*** X11 Windows: Use key F11 or (left Alt)+Enter to toggle full-screen mode\n");
|
||||
if (fullscreen) {
|
||||
set_fullscreen(renderer->gst_window, &fullscreen);
|
||||
}
|
||||
} else if (X11_search_attempts == MAX_X11_SEARCH_ATTEMPTS) {
|
||||
logger_log(logger, LOGGER_DEBUG, "X11 UxPlay Window not found in %d search attempts", MAX_X11_SEARCH_ATTEMPTS);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
/**
|
||||
* RPiPlay - An open-source AirPlay mirroring server for Raspberry Pi
|
||||
* Copyright (C) 2019 Florian Draschbacher
|
||||
* Modified for:
|
||||
* UxPlay - An open-source AirPlay mirroring server
|
||||
* Copyright (C) 2021-23 F. Duncanh
|
||||
*
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
|
||||
13
uxplay.1
13
uxplay.1
@@ -1,11 +1,11 @@
|
||||
.TH UXPLAY "1" "December 2022" "1.61" "User Commands"
|
||||
.TH UXPLAY "1" "February 2023" "1.63" "User Commands"
|
||||
.SH NAME
|
||||
uxplay \- start AirPlay server
|
||||
.SH SYNOPSIS
|
||||
.B uxplay
|
||||
[\fI\,-n name\/\fR] [\fI\,-s wxh\/\fR] [\fI\,-p \/\fR[\fI\,n\/\fR]] [more \fI OPTIONS \/\fR ...]
|
||||
.SH DESCRIPTION
|
||||
UxPlay 1.61: An open\-source AirPlay mirroring (+ audio streaming) server.
|
||||
UxPlay 1.63: An open\-source AirPlay mirroring (+ audio streaming) server.
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B
|
||||
@@ -13,7 +13,12 @@ UxPlay 1.61: An open\-source AirPlay mirroring (+ audio streaming) server.
|
||||
.TP
|
||||
\fB\-nh\fR Do \fBNOT\fR append "@\fIhostname\fR" at end of the AirPlay server name
|
||||
.TP
|
||||
.B
|
||||
\fB\-vsync\fR Mirror mode: sync audio to video (default: stream w/o sync)
|
||||
.TP
|
||||
\fB\-vsync\fI[x]\fR \fIx\fR is optional audio delay in millisecs, can be neg., decimal.
|
||||
.TP
|
||||
\fB\-async\fR[\fIx\fR] Audio-Only mode: sync audio to client video (default: no sync).
|
||||
.TP
|
||||
\fB\-s\fR wxh[@r]Set display resolution [refresh_rate] default 1920x1080[@60]
|
||||
.TP
|
||||
\fB\-o\fR Set display "overscanned" mode on (not usually needed)
|
||||
@@ -73,6 +78,8 @@ UxPlay 1.61: An open\-source AirPlay mirroring (+ audio streaming) server.
|
||||
.TP
|
||||
\fB\-as\fR 0 (or \fB\-a\fR) Turn audio off, streamed video only.
|
||||
.TP
|
||||
\fB\-al\fR x Audio latency in seconds (default 0.25) reported to client.
|
||||
.TP
|
||||
\fB\-ca\fI fn \fR In Airplay Audio (ALAC) mode, write cover-art to file fn.
|
||||
.TP
|
||||
\fB\-reset\fR n Reset after 3n seconds client silence (default 5, 0=never).
|
||||
|
||||
114
uxplay.cpp
114
uxplay.cpp
@@ -1,6 +1,9 @@
|
||||
/**
|
||||
* RPiPlay - An open-source AirPlay mirroring server for Raspberry Pi
|
||||
* Copyright (C) 2019 Florian Draschbacher
|
||||
* Modified extensively to become
|
||||
* UxPlay - An open-souce AirPlay mirroring server.
|
||||
* Modifications Copyright (C) 2021-23 F. Duncanh
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
@@ -51,8 +54,10 @@
|
||||
#include "renderers/video_renderer.h"
|
||||
#include "renderers/audio_renderer.h"
|
||||
|
||||
#define VERSION "1.61"
|
||||
#define VERSION "1.63"
|
||||
|
||||
#define SECOND_IN_USECS 1000000
|
||||
#define SECOND_IN_NSECS 1000000000UL
|
||||
#define DEFAULT_NAME "UxPlay"
|
||||
#define DEFAULT_DEBUG_LOG false
|
||||
#define LOWEST_ALLOWED_PORT 1024
|
||||
@@ -64,7 +69,10 @@ static std::string server_name = DEFAULT_NAME;
|
||||
static dnssd_t *dnssd = NULL;
|
||||
static raop_t *raop = NULL;
|
||||
static logger_t *render_logger = NULL;
|
||||
|
||||
static bool audio_sync = false;
|
||||
static bool video_sync = false;
|
||||
static int64_t audio_delay_alac = 0;
|
||||
static int64_t audio_delay_aac = 0;
|
||||
static bool relaunch_video = false;
|
||||
static bool reset_loop = false;
|
||||
static unsigned int open_connections= 0;
|
||||
@@ -73,6 +81,7 @@ static videoflip_t videoflip[2] = { NONE , NONE };
|
||||
static bool use_video = true;
|
||||
static unsigned char compression_type = 0;
|
||||
static std::string audiosink = "autoaudiosink";
|
||||
static int audiodelay = -1;
|
||||
static bool use_audio = true;
|
||||
static bool new_window_closing_behavior = true;
|
||||
static bool close_window;
|
||||
@@ -106,6 +115,7 @@ static bool bt709_fix = false;
|
||||
static int max_connections = 2;
|
||||
static unsigned short raop_port;
|
||||
static unsigned short airplay_port;
|
||||
static uint64_t remote_clock_offset = 0;
|
||||
|
||||
/* 95 byte png file with a 1x1 white square (single pixel): placeholder for coverart*/
|
||||
static const unsigned char empty_image[] = {
|
||||
@@ -357,6 +367,9 @@ static void print_info (char *name) {
|
||||
printf("Options:\n");
|
||||
printf("-n name Specify the network name of the AirPlay server\n");
|
||||
printf("-nh Do not add \"@hostname\" at the end of the AirPlay server name\n");
|
||||
printf("-vsync [x]Mirror mode: sync audio to video (default: stream w/o sync)\n");
|
||||
printf(" x is optional audio delay in millisecs, can be neg., decimal\n");
|
||||
printf("-async [x]Audio-Only mode: sync audio to client video (default: no sync)\n");
|
||||
printf("-s wxh[@r]Set display resolution [refresh_rate] default 1920x1080[@60]\n");
|
||||
printf("-o Set display \"overscanned\" mode on (not usually needed)\n");
|
||||
printf("-fs Full-screen (only works with X11, Wayland and VAAPI)\n");
|
||||
@@ -386,6 +399,7 @@ static void print_info (char *name) {
|
||||
printf(" some choices:pulsesink,alsasink,pipewiresink,jackaudiosink,\n");
|
||||
printf(" osssink,oss4sink,osxaudiosink,wasapisink,directsoundsink.\n");
|
||||
printf("-as 0 (or -a) Turn audio off, streamed video only\n");
|
||||
printf("-al x Audio latency in seconds (default 0.25) reported to client.\n");
|
||||
printf("-ca <fn> In Airplay Audio (ALAC) mode, write cover-art to file <fn>\n");
|
||||
printf("-reset n Reset after 3n seconds client silence (default %d, 0=never)\n", NTP_TIMEOUT_LIMIT);
|
||||
printf("-nc do Not Close video window when client stops mirroring\n");
|
||||
@@ -540,7 +554,7 @@ static void append_hostname(std::string &server_name) {
|
||||
#endif
|
||||
}
|
||||
|
||||
static void parse_arguments (int argc, char *argv[]) {
|
||||
static void parse_arguments (int argc, char *argv[]) {
|
||||
// Parse arguments
|
||||
for (int i = 1; i < argc; i++) {
|
||||
std::string arg(argv[i]);
|
||||
@@ -549,6 +563,36 @@ static void parse_arguments (int argc, char *argv[]) {
|
||||
server_name = std::string(argv[++i]);
|
||||
} else if (arg == "-nh") {
|
||||
do_append_hostname = false;
|
||||
} else if (arg == "-async") {
|
||||
audio_sync = true;
|
||||
if (i < argc - 1) {
|
||||
char *end;
|
||||
int n = (int) (strtof(argv[i + 1], &end) * 1000);
|
||||
if (*end == '\0') {
|
||||
i++;
|
||||
if (n > -SECOND_IN_USECS && n < SECOND_IN_USECS) {
|
||||
audio_delay_alac = n * 1000; /* units are nsecs */
|
||||
} else {
|
||||
fprintf(stderr, "invalid -async %s: requested delays must be smaller than +/- 1000 millisecs\n", argv[i] );
|
||||
exit (1);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (arg == "-vsync") {
|
||||
video_sync = true;
|
||||
if (i < argc - 1) {
|
||||
char *end;
|
||||
int n = (int) (strtof(argv[i + 1], &end) * 1000);
|
||||
if (*end == '\0') {
|
||||
i++;
|
||||
if (n > -SECOND_IN_USECS && n < SECOND_IN_USECS) {
|
||||
audio_delay_aac = n * 1000; /* units are nsecs */
|
||||
} else {
|
||||
fprintf(stderr, "invalid -vsync %s: requested delays must be smaller than +/- 1000 millisecs\n", argv[i]);
|
||||
exit (1);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (arg == "-s") {
|
||||
if (!option_has_value(i, argc, argv[i], argv[i+1])) exit(1);
|
||||
std::string value(argv[++i]);
|
||||
@@ -645,14 +689,14 @@ static void parse_arguments (int argc, char *argv[]) {
|
||||
video_converter = "videoconvert";
|
||||
} else if (arg == "-v4l2" || arg == "-rpi") {
|
||||
if (arg == "-rpi") {
|
||||
printf("*** -rpi no longer includes -bt709: add it if needed\n");
|
||||
LOGI("*** -rpi no longer includes -bt709: add it if needed");
|
||||
}
|
||||
video_decoder.erase();
|
||||
video_decoder = "v4l2h264dec";
|
||||
video_converter.erase();
|
||||
video_converter = "v4l2convert";
|
||||
} else if (arg == "-rpifb") {
|
||||
printf("*** -rpifb no longer includes -bt709: add it if needed\n");
|
||||
LOGI("*** -rpifb no longer includes -bt709: add it if needed");
|
||||
video_decoder.erase();
|
||||
video_decoder = "v4l2h264dec";
|
||||
video_converter.erase();
|
||||
@@ -660,7 +704,7 @@ static void parse_arguments (int argc, char *argv[]) {
|
||||
videosink.erase();
|
||||
videosink = "kmssink";
|
||||
} else if (arg == "-rpigl") {
|
||||
printf("*** -rpigl does not include -bt709: add it if needed\n");
|
||||
LOGI("*** -rpigl does not include -bt709: add it if needed");
|
||||
video_decoder.erase();
|
||||
video_decoder = "v4l2h264dec";
|
||||
video_converter.erase();
|
||||
@@ -668,7 +712,7 @@ static void parse_arguments (int argc, char *argv[]) {
|
||||
videosink.erase();
|
||||
videosink = "glimagesink";
|
||||
} else if (arg == "-rpiwl" ) {
|
||||
printf("*** -rpiwl no longer includes -bt709: add it if needed\n");
|
||||
LOGI("*** -rpiwl no longer includes -bt709: add it if needed");
|
||||
video_decoder.erase();
|
||||
video_decoder = "v4l2h264dec";
|
||||
video_converter.erase();
|
||||
@@ -728,15 +772,28 @@ static void parse_arguments (int argc, char *argv[]) {
|
||||
coverart_filename.erase();
|
||||
coverart_filename.append(argv[++i]);
|
||||
} else {
|
||||
LOGE("option -ca must be followed by a filename for cover-art output");
|
||||
fprintf(stderr,"option -ca must be followed by a filename for cover-art output\n");
|
||||
exit(1);
|
||||
}
|
||||
} else if (arg == "-bt709") {
|
||||
bt709_fix = true;
|
||||
} else if (arg == "-nohold") {
|
||||
max_connections = 3;
|
||||
} else {
|
||||
LOGE("unknown option %s, stopping\n",argv[i]);
|
||||
} else if (arg == "-al") {
|
||||
int n;
|
||||
char *end;
|
||||
if (i < argc - 1 && *argv[i+1] != '-') {
|
||||
n = (int) (strtof(argv[++i], &end) * SECOND_IN_USECS);
|
||||
if (*end == '\0' && n >=0 && n <= 10 * SECOND_IN_USECS) {
|
||||
audiodelay = n;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
fprintf(stderr, "invalid argument -al %s: must be a decimal time offset in seconds, range [0,10]\n"
|
||||
"(like 5 or 4.8, which will be converted to a whole number of microseconds)\n", argv[i]);
|
||||
exit(1);
|
||||
} else {
|
||||
fprintf(stderr, "unknown option %s, stopping\n",argv[i]);
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
@@ -751,7 +808,7 @@ static void process_metadata(int count, const char *dmap_tag, const unsigned cha
|
||||
printf("%d: dmap_tag [%s], %d\n", count, dmap_tag, datalen);
|
||||
}
|
||||
|
||||
/* String-type DMAP tags seen in Apple Music Radio are processed here. *
|
||||
/* UTF-8 String-type DMAP tags seen in Apple Music Radio are processed here. *
|
||||
* (DMAP tags "asal", "asar", "ascp", "asgn", "minm" ). TODO expand this */
|
||||
|
||||
if (datalen == 0) {
|
||||
@@ -828,6 +885,10 @@ static void process_metadata(int count, const char *dmap_tag, const unsigned cha
|
||||
printf("Format: ");
|
||||
} else if (strcmp (dmap_tag, "asgn") == 0) {
|
||||
printf("Genre: ");
|
||||
} else if (strcmp (dmap_tag, "asky") == 0) {
|
||||
printf("Keywords: ");
|
||||
} else if (strcmp (dmap_tag, "aslc") == 0) {
|
||||
printf("Long Content Description: ");
|
||||
} else {
|
||||
dmap_type = 0;
|
||||
}
|
||||
@@ -928,6 +989,7 @@ static int start_dnssd(std::vector<char> hw_addr, std::string name) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
// Server callbacks
|
||||
extern "C" void conn_init (void *cls) {
|
||||
open_connections++;
|
||||
@@ -939,6 +1001,9 @@ extern "C" void conn_destroy (void *cls) {
|
||||
//video_renderer_update_background(-1);
|
||||
open_connections--;
|
||||
//LOGD("Open connections: %i", open_connections);
|
||||
if (open_connections == 0) {
|
||||
remote_clock_offset = 0;
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" void conn_reset (void *cls, int timeouts, bool reset_video) {
|
||||
@@ -965,7 +1030,16 @@ extern "C" void audio_process (void *cls, raop_ntp_t *ntp, audio_decode_struct *
|
||||
dump_audio_to_file(data->data, data->data_len, (data->data)[0] & 0xf0);
|
||||
}
|
||||
if (use_audio) {
|
||||
audio_renderer_render_buffer(ntp, data->data, data->data_len, data->ntp_time, data->rtp_time, data->seqnum);
|
||||
if (!remote_clock_offset) {
|
||||
remote_clock_offset = data->ntp_time_local - data->ntp_time_remote;
|
||||
}
|
||||
data->ntp_time_remote = data->ntp_time_remote + remote_clock_offset;
|
||||
if (data->ct == 2 && audio_delay_alac) {
|
||||
data->ntp_time_remote = (uint64_t) ((int64_t) data->ntp_time_remote + audio_delay_alac);
|
||||
} else if (audio_delay_aac) {
|
||||
data->ntp_time_remote = (uint64_t) ((int64_t) data->ntp_time_remote + audio_delay_aac);
|
||||
}
|
||||
audio_renderer_render_buffer(data->data, &(data->data_len), &(data->seqnum), &(data->ntp_time_remote));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -974,7 +1048,11 @@ extern "C" void video_process (void *cls, raop_ntp_t *ntp, h264_decode_struct *d
|
||||
dump_video_to_file(data->data, data->data_len);
|
||||
}
|
||||
if (use_video) {
|
||||
video_renderer_render_buffer(ntp, data->data, data->data_len, data->pts, data->nal_count);
|
||||
if (!remote_clock_offset) {
|
||||
remote_clock_offset = data->ntp_time_local - data->ntp_time_remote;
|
||||
}
|
||||
data->ntp_time_remote = data->ntp_time_remote + remote_clock_offset;
|
||||
video_renderer_render_buffer(data->data, &(data->data_len), &(data->nal_count), &(data->ntp_time_remote));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1017,7 +1095,7 @@ extern "C" void audio_get_format (void *cls, unsigned char *ct, unsigned short *
|
||||
audio_type = type;
|
||||
|
||||
if (use_audio) {
|
||||
audio_renderer_start(ct);
|
||||
audio_renderer_start(ct);
|
||||
}
|
||||
|
||||
if (coverart_filename.length()) {
|
||||
@@ -1137,6 +1215,7 @@ int start_raop_server (unsigned short display[5], unsigned short tcp[3], unsigne
|
||||
|
||||
if (show_client_FPS_data) raop_set_plist(raop, "clientFPSdata", 1);
|
||||
raop_set_plist(raop, "max_ntp_timeouts", max_ntp_timeouts);
|
||||
if (audiodelay >= 0) raop_set_plist(raop, "audio_delay_micros", audiodelay);
|
||||
|
||||
/* network port selection (ports listed as "0" will be dynamically assigned) */
|
||||
raop_set_tcp_ports(raop, tcp);
|
||||
@@ -1259,14 +1338,14 @@ int main (int argc, char *argv[]) {
|
||||
logger_set_level(render_logger, debug_log ? LOGGER_DEBUG : LOGGER_INFO);
|
||||
|
||||
if (use_audio) {
|
||||
audio_renderer_init(render_logger, audiosink.c_str());
|
||||
audio_renderer_init(render_logger, audiosink.c_str(), &audio_sync, &video_sync);
|
||||
} else {
|
||||
LOGI("audio_disabled");
|
||||
}
|
||||
|
||||
if (use_video) {
|
||||
video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(),
|
||||
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), &fullscreen);
|
||||
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), &fullscreen, &video_sync);
|
||||
video_renderer_start();
|
||||
}
|
||||
|
||||
@@ -1319,7 +1398,8 @@ int main (int argc, char *argv[]) {
|
||||
if (use_video && close_window) {
|
||||
video_renderer_destroy();
|
||||
video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(),
|
||||
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), &fullscreen);
|
||||
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), &fullscreen,
|
||||
&video_sync);
|
||||
video_renderer_start();
|
||||
}
|
||||
if (relaunch_video) {
|
||||
|
||||
Reference in New Issue
Block a user