From 08c6f0cdb06649df7fe8f8512f12dc5693d9caeb Mon Sep 17 00:00:00 2001
From: "F. Duncanh"
Date: Tue, 10 Dec 2024 01:36:32 -0500
Subject: [PATCH] UxPlay 1.71: add support for HLS streaming video
---
README.html | 79 ++-
README.md | 54 +-
README.txt | 77 ++-
lib/airplay_video.c | 305 +++++++++++
lib/airplay_video.h | 74 +++
lib/fcup_request.h | 112 ++++
lib/http_handlers.h | 1001 ++++++++++++++++++++++++++++++++++++
lib/http_request.c | 37 +-
lib/http_request.h | 5 +-
lib/http_response.c | 15 +
lib/http_response.h | 2 +
lib/httpd.c | 167 +++++-
lib/httpd.h | 9 +-
lib/raop.c | 332 +++++++++---
lib/raop.h | 40 +-
lib/raop_handlers.h | 8 +-
lib/utils.c | 11 +
lib/utils.h | 1 +
renderers/video_renderer.c | 433 ++++++++++++----
renderers/video_renderer.h | 15 +-
uxplay.1 | 6 +-
uxplay.cpp | 133 ++++-
uxplay.spec | 4 +-
23 files changed, 2559 insertions(+), 361 deletions(-)
create mode 100644 lib/airplay_video.c
create mode 100644 lib/airplay_video.h
create mode 100644 lib/fcup_request.h
create mode 100644 lib/http_handlers.h
diff --git a/README.html b/README.html
index 0315545..20814da 100644
--- a/README.html
+++ b/README.html
@@ -1,6 +1,6 @@
UxPlay
-1.70: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix
+id="uxplay-1.71-airplay-mirror-and-airplay-audio-server-for-linux-macos-and-unix-now-also-runs-on-windows.">UxPlay
+1.71: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix
(now also runs on Windows).
Now
@@ -9,23 +9,17 @@ href="https://github.com/FDH2/UxPlay">https://github.com/FDH2/UxPlay
(where ALL user issues should be posted, and latest versions can be
found).
-- NEW in v1.70: Support for 4k (h265) video with
-the new “-h265” option. (Recent Apple devices will send HEVC (h265)
-video in AirPlay mirror mode if larger resolutions (h >
-1080) are requested with UxPlay’s “-s wxh” option; wired ethernet
-connection is prefered to wireless in this mode, and may also be
-required by the client; the “-h265” option changes the default
-resolution from 1920x1080 to 3840x2160, but leaves default maximum
-framerate (“-fps” option) at 30fps.)
+- NEW in v1.71: Support for (YouTube) HLS (HTTP
+Live Streaming) video with the new “-hls” option. Click on the
+airplay icon in the YouTube app to stream video.
Highlights:
- GPLv3, open source.
- Originally supported only AirPlay Mirror protocol, now has added
support for AirPlay Audio-only (Apple Lossless ALAC) streaming from
-current iOS/iPadOS clients. There is no current support for
-Airplay HLS video-streaming (e.g., YouTube video) but this is in
-development.
+current iOS/iPadOS clients. Now with support for Airplay HLS
+video-streaming (currently only YouTube video).
- macOS computers (2011 or later, both Intel and “Apple Silicon” M1/M2
systems) can act either as AirPlay clients, or as the server running
UxPlay. Using AirPlay, UxPlay can emulate a second display for macOS
@@ -169,16 +163,15 @@ stops/restarts as you leave/re-enter Audio
Note that Apple video-DRM (as found in “Apple TV app”
content on the client) cannot be decrypted by UxPlay, and the Apple TV
app cannot be watched using UxPlay’s AirPlay Mirror mode (only the
-unprotected audio will be streamed, in AAC format), but both video and
-audio content from DRM-free apps like “YouTube app” will be streamed by
-UxPlay in Mirror mode.
-As UxPlay does not currently support non-Mirror AirPlay
-video streaming (where the client controls a web server on the AirPlay
-server that directly receives HLS content to avoid it being decoded and
-re-encoded by the client), using the icon for AirPlay video in apps such
-as the YouTube app will only send audio (in lossless ALAC format)
-without the accompanying video (there are plans to support HLS video in
-future releases of UxPlay)
+unprotected audio will be streamed, in AAC format).
+With the new “-hls” option, UxPlay now also supports
+non-Mirror AirPlay video streaming (where the client controls a web
+server on the AirPlay server that directly receives HLS content to avoid
+it being decoded and re-encoded by the client). This currently only
+supports streaming of YouTube videos. Without the -hls option, using the
+icon for AirPlay video in apps such as the YouTube app will only send
+audio (in lossless ALAC format) without the accompanying
+video.
Possibility
@@ -644,7 +637,7 @@ is present in Buster.
H265 (4K) video is potentially supported by
hardware decoding on Raspberry Pi 5 models, as well as on Raspberry Pi 4
model B, using a dedicated HEVC decoding block, but the “rpivid” kernel
-driver for this it not yet supported by GStreamer (this driver decodes
+driver for this is not yet supported by GStreamer (this driver decodes
video into a non-standard format that cannot be supported by GStreamer
until the driver is in the mainline Linux kernel). Raspberry Pi provides
a version of ffmpeg that can use that format, but at present UxPlay
@@ -746,31 +739,23 @@ not supply a complete GStreamer, but seems to have everything needed for
UxPlay). New: the UxPlay build script will now also detect
Homebrew installations in non-standard locations indicated by the
environment variable $HOMEBREW_PREFIX.
-Using GStreamer installed from MacPorts : (MacPorts
-is now again supplying current or recent Gstreamer). Before building
-UxPlay, install the MacPorts GStreamer with
-“sudo port install gstreamer1 gstreamer1-gst-plugins-base”.
-Plugins are installed by
-“sudo port install gstreamer1-gst-plugins-*” where
-“*” is “good”, “bad”, (and optionally “ugly”). For the
-libav plugin,
-“sudo port install ffmpeg6 [+nonfree] gstreamer1-gst-libav”
-(where “+nonfree” is optional, and makes linked GPL binaries
-non-distributable). Unfortunately, the current MacPorts GStreamer build
-(bug or feature?) does not provide the opengl plugin, so the only
-working videosink it provides is osxvideosink. (Hopefully this will be
-corrected). It is also possible to install an X11-based GStreamer
-with MacPorts, (add ” +x11” after “base”, “good” “bad” and “ugly” in the
-plugin names): for X11 support on macOS, compile UxPlay using a special
-cmake option -DUSE_X11=ON, and run it from an XQuartz
-terminal with -vs ximagesink; older non-retina macs require a lower
-resolution when using X11: uxplay -s 800x600.
+Using GStreamer installed from MacPorts: this is
+not recommended, as currently the MacPorts GStreamer is
+old (v1.16.2), unmaintained, and built to use X11:
- Instead build
gstreamer yourself if you use MacPorts and do not want to use the
-“Official” Gstreamer binaries or Macports packages.
+“Official” Gstreamer binaries.
+(If you really wish to use the MacPorts GStreamer-1.16.2, install
+pkgconf (“sudo port install pkgconf”), then “sudo port install
+gstreamer1-gst-plugins-base gstreamer1-gst-plugins-good
+gstreamer1-gst-plugins-bad gstreamer1-gst-libav”. For X11 support on
+macOS, compile UxPlay using a special cmake option
+-DUSE_X11=ON, and run it from an XQuartz terminal with -vs
+ximagesink; older non-retina macs require a lower resolution when using
+X11: uxplay -s 800x600.)
After installing GStreamer, build and install uxplay: open a terminal
and change into the UxPlay source directory (“UxPlay-master” for zipfile
downloads, “UxPlay” for “git clone” downloads) and build/install with
@@ -954,6 +939,10 @@ and some iPhones) can send h265 video if a resolution “-s wxh” with h
> 1080 is requested. The “-h265” option changes the default
resolution (“-s” option) from 1920x1080 to 3840x2160, and leaves default
maximum framerate (“-fps” option) at 30fps.
+-hls Activate HTTP Live Streaming support. With this
+option YouTube videos can be streamed directly from YouTube servers to
+UxPlay (without passing through the client) by clicking on the AirPlay
+icon in the YouTube app.
-pin [nnnn]: (since v1.67) use Apple-style
(one-time) “pin” authentication when a new client connects for the first
time: a four-digit pin code is displayed on the terminal, and the client
@@ -1590,6 +1579,8 @@ an AppleTV6,2 with sourceVersion 380.20.1 (an AppleTV 4K 1st gen,
introduced 2017, running tvOS 12.2.1), so it does not seem to matter
what version UxPlay claims to be.
Changelog
+1.71 2024-12-10 Add support for HTTP Live Streaming (HLS), initially
+only for YouTube movies
1.70 2024-10-04 Add support for 4K (h265) video (resolution 3840 x
2160). Fix issue with GStreamer >= 1.24 when client sleeps, then
wakes.
diff --git a/README.md b/README.md
index 40563bd..3bc3adc 100644
--- a/README.md
+++ b/README.md
@@ -1,19 +1,16 @@
-# UxPlay 1.70: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix (now also runs on Windows).
+# UxPlay 1.71: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix (now also runs on Windows).
### **Now developed at the GitHub site [https://github.com/FDH2/UxPlay](https://github.com/FDH2/UxPlay) (where ALL user issues should be posted, and latest versions can be found).**
- * _**NEW in v1.70**: Support for 4k (h265) video with the new "-h265" option._ (Recent Apple devices will send HEVC (h265) video in AirPlay mirror mode
- if larger resolutions (_h_ > 1080) are requested with UxPlay's "-s wxh" option; wired ethernet connection is prefered to
- wireless in this mode, and may also be required by the client;
- the "-h265" option changes the default resolution from 1920x1080 to 3840x2160, but leaves default maximum framerate ("-fps" option) at 30fps.)
-
+ * _**NEW in v1.71**: Support for (YouTube) HLS (HTTP Live Streaming) video with the new "-hls" option._ Click on the airplay icon in the YouTube app to stream video.
+
## Highlights:
* GPLv3, open source.
* Originally supported only AirPlay Mirror protocol, now has added support
for AirPlay Audio-only (Apple Lossless ALAC) streaming
- from current iOS/iPadOS clients. **There is no current support for Airplay HLS
- video-streaming (e.g., YouTube video) but this is in development.**
+ from current iOS/iPadOS clients. **Now with support for Airplay HLS
+ video-streaming (currently only YouTube video).**
* macOS computers (2011 or later, both Intel and "Apple Silicon" M1/M2
systems) can act either as AirPlay clients, or
as the server running UxPlay. Using AirPlay, UxPlay can
@@ -124,15 +121,14 @@ switch back by initiating a_ **Mirror** _mode connection; cover-art display stop
* **Note that Apple video-DRM
(as found in "Apple TV app" content on the client) cannot be decrypted by UxPlay, and
-the Apple TV app cannot be watched using UxPlay's AirPlay Mirror mode (only the unprotected audio will be streamed, in AAC format),
-but both video and audio content from DRM-free apps like "YouTube app" will be streamed by UxPlay in Mirror mode.**
+the Apple TV app cannot be watched using UxPlay's AirPlay Mirror mode (only the unprotected audio will be streamed, in AAC format).**
-* **As UxPlay does not currently support non-Mirror AirPlay video streaming (where the
+* **With the new "-hls" option, UxPlay now also supports non-Mirror AirPlay video streaming (where the
client controls a web server on the AirPlay server that directly receives
-HLS content to avoid it being decoded and re-encoded by the client),
-using the icon for AirPlay video in apps such as the YouTube app
+HLS content to avoid it being decoded and re-encoded by the client). This currently only supports streaming of YouTube videos.
+Without the -hls option, using the icon for AirPlay video in apps such as the YouTube app
will only send audio (in lossless ALAC format) without the accompanying
-video (there are plans to support HLS video in future releases of UxPlay)**
+video.**
### Possibility for using hardware-accelerated h264/h265 video-decoding, if available.
@@ -512,7 +508,7 @@ See [Usage](#usage) for more run-time options.
* **H265 (4K)** video is potentially supported by hardware decoding on Raspberry Pi 5 models, as well as
on Raspberry Pi 4 model B, using a dedicated HEVC decoding block, but the "rpivid" kernel driver for this
- it not yet supported by GStreamer (this driver decodes video into a non-standard format that cannot be supported
+ is not yet supported by GStreamer (this driver decodes video into a non-standard format that cannot be supported
by GStreamer until the driver is in the mainline Linux kernel). Raspberry Pi provides a version of ffmpeg that
can use that format, but at present UxPlay cannot use this. The best solution would be for the driver to be
"upstreamed" to the kernel, allowing GStreamer support. (Software HEVC decoding works, but does not seem to
@@ -591,18 +587,20 @@ their location (Homebrew does not supply a complete GStreamer, but seems to have
the environment variable `$HOMEBREW_PREFIX`.**
-**Using GStreamer installed from MacPorts** : (MacPorts is now again supplying current or recent Gstreamer).
-Before building UxPlay, install the MacPorts GStreamer with "`sudo port install gstreamer1 gstreamer1-gst-plugins-base`". Plugins are
-installed by "`sudo port install gstreamer1-gst-plugins-*`" where "`*`" is "good", "bad", (and optionally "ugly"). For the libav plugin,
-"`sudo port install ffmpeg6 [+nonfree] gstreamer1-gst-libav`" (where "+nonfree" is optional, and makes linked GPL binaries non-distributable).
-Unfortunately, the current MacPorts GStreamer build (bug or feature?) does not provide the opengl plugin, so the only working videosink it provides is
-osxvideosink. (Hopefully this will be corrected). _It is also possible to install an X11-based GStreamer with MacPorts, (add " +x11" after "base", "good"
-"bad" and "ugly" in the plugin names):
-for X11 support on macOS, compile UxPlay using a special cmake option `-DUSE_X11=ON`, and run it from an XQuartz terminal with -vs ximagesink;
-older non-retina macs require a lower resolution when using X11: `uxplay -s 800x600`._
+**Using GStreamer installed from MacPorts**: this is **not** recommended, as currently the MacPorts GStreamer
+is old (v1.16.2), unmaintained, and built to use X11:
* Instead [build gstreamer yourself](https://github.com/FDH2/UxPlay/wiki/Building-GStreamer-from-Source-on-macOS-with-MacPorts)
-if you use MacPorts and do not want to use the "Official" Gstreamer binaries or Macports packages.
+if you use MacPorts and do not want to use the "Official" Gstreamer binaries.
+
+_(If you really wish to use the MacPorts GStreamer-1.16.2,
+install pkgconf ("sudo port install pkgconf"), then
+"sudo port install gstreamer1-gst-plugins-base gstreamer1-gst-plugins-good gstreamer1-gst-plugins-bad gstreamer1-gst-libav".
+For X11 support on macOS, compile UxPlay using a special cmake option `-DUSE_X11=ON`, and run
+it from an XQuartz terminal with -vs ximagesink; older non-retina macs require a lower resolution
+when using X11: `uxplay -s 800x600`.)_
+
+
After installing GStreamer, build and install uxplay: open a terminal and change into the UxPlay source directory
("UxPlay-master" for zipfile downloads, "UxPlay" for "git clone" downloads) and build/install with
@@ -748,6 +746,10 @@ with "`#`" are treated as comments, and ignored. Command line options supersede
The "-h265" option changes the default resolution ("-s" option) from 1920x1080 to 3840x2160, and leaves default maximum
framerate ("-fps" option) at 30fps.
+**-hls** Activate HTTP Live Streaming support. With this option YouTube videos can be streamed directly from
+ YouTube servers to UxPlay (without passing through the client)
+ by clicking on the AirPlay icon in the YouTube app.
+
**-pin [nnnn]**: (since v1.67) use Apple-style (one-time) "pin" authentication when a new client connects for the first time: a four-digit pin code is
displayed on the terminal, and the client screen shows a login prompt for this to be entered. When "-pin" is used by itself, a new random
pin code is chosen for each authentication; if "-pin nnnn" (e.g., "-pin 3939") is used, this will set an unchanging fixed code. Authentication adds the server to the client's list of
@@ -1236,6 +1238,8 @@ tvOS 12.2.1), so it does not seem to matter what version UxPlay claims to be.
# Changelog
+1.71 2024-12-10 Add support for HTTP Live Streaming (HLS), initially only for YouTube movies
+
1.70 2024-10-04 Add support for 4K (h265) video (resolution 3840 x 2160). Fix issue
with GStreamer >= 1.24 when client sleeps, then wakes.
diff --git a/README.txt b/README.txt
index 2c3b56e..c6a563f 100644
--- a/README.txt
+++ b/README.txt
@@ -1,24 +1,18 @@
-# UxPlay 1.70: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix (now also runs on Windows).
+# UxPlay 1.71: AirPlay-Mirror and AirPlay-Audio server for Linux, macOS, and Unix (now also runs on Windows).
### **Now developed at the GitHub site (where ALL user issues should be posted, and latest versions can be found).**
-- ***NEW in v1.70**: Support for 4k (h265) video with the new "-h265"
- option.* (Recent Apple devices will send HEVC (h265) video in
- AirPlay mirror mode if larger resolutions (*h* \> 1080) are
- requested with UxPlay's "-s wxh" option; wired ethernet connection
- is prefered to wireless in this mode, and may also be required by
- the client; the "-h265" option changes the default resolution from
- 1920x1080 to 3840x2160, but leaves default maximum framerate ("-fps"
- option) at 30fps.)
+- ***NEW in v1.71**: Support for (YouTube) HLS (HTTP Live Streaming)
+ video with the new "-hls" option.* Click on the airplay icon in the
+ YouTube app to stream video.
## Highlights:
- GPLv3, open source.
- Originally supported only AirPlay Mirror protocol, now has added
support for AirPlay Audio-only (Apple Lossless ALAC) streaming from
- current iOS/iPadOS clients. **There is no current support for
- Airplay HLS video-streaming (e.g., YouTube video) but this is in
- development.**
+ current iOS/iPadOS clients. **Now with support for Airplay HLS
+ video-streaming (currently only YouTube video).**
- macOS computers (2011 or later, both Intel and "Apple Silicon" M1/M2
systems) can act either as AirPlay clients, or as the server running
UxPlay. Using AirPlay, UxPlay can emulate a second display for macOS
@@ -159,17 +153,16 @@ stops/restarts as you leave/re-enter* **Audio** *mode.*
- **Note that Apple video-DRM (as found in "Apple TV app" content on
the client) cannot be decrypted by UxPlay, and the Apple TV app
cannot be watched using UxPlay's AirPlay Mirror mode (only the
- unprotected audio will be streamed, in AAC format), but both video
- and audio content from DRM-free apps like "YouTube app" will be
- streamed by UxPlay in Mirror mode.**
+ unprotected audio will be streamed, in AAC format).**
-- **As UxPlay does not currently support non-Mirror AirPlay video
- streaming (where the client controls a web server on the AirPlay
- server that directly receives HLS content to avoid it being decoded
- and re-encoded by the client), using the icon for AirPlay video in
- apps such as the YouTube app will only send audio (in lossless ALAC
- format) without the accompanying video (there are plans to support
- HLS video in future releases of UxPlay)**
+- **With the new "-hls" option, UxPlay now also supports non-Mirror
+ AirPlay video streaming (where the client controls a web server on
+ the AirPlay server that directly receives HLS content to avoid it
+ being decoded and re-encoded by the client). This currently only
+ supports streaming of YouTube videos. Without the -hls option, using
+ the icon for AirPlay video in apps such as the YouTube app will only
+ send audio (in lossless ALAC format) without the accompanying
+ video.**
### Possibility for using hardware-accelerated h264/h265 video-decoding, if available.
@@ -640,7 +633,7 @@ See [Usage](#usage) for more run-time options.
- **H265 (4K)** video is potentially supported by hardware decoding on
Raspberry Pi 5 models, as well as on Raspberry Pi 4 model B, using a
dedicated HEVC decoding block, but the "rpivid" kernel driver for
- this it not yet supported by GStreamer (this driver decodes video
+ this is not yet supported by GStreamer (this driver decodes video
into a non-standard format that cannot be supported by GStreamer
until the driver is in the mainline Linux kernel). Raspberry Pi
provides a version of ffmpeg that can use that format, but at
@@ -744,28 +737,22 @@ complete GStreamer, but seems to have everything needed for UxPlay).
installations in non-standard locations indicated by the environment
variable `$HOMEBREW_PREFIX`.**
-**Using GStreamer installed from MacPorts** : (MacPorts is now again
-supplying current or recent Gstreamer). Before building UxPlay, install
-the MacPorts GStreamer with
-"`sudo port install gstreamer1 gstreamer1-gst-plugins-base`". Plugins
-are installed by "`sudo port install gstreamer1-gst-plugins-*`" where
-"`*`" is "good", "bad", (and optionally "ugly"). For the libav plugin,
-"`sudo port install ffmpeg6 [+nonfree] gstreamer1-gst-libav`" (where
-"+nonfree" is optional, and makes linked GPL binaries
-non-distributable). Unfortunately, the current MacPorts GStreamer build
-(bug or feature?) does not provide the opengl plugin, so the only
-working videosink it provides is osxvideosink. (Hopefully this will be
-corrected). *It is also possible to install an X11-based GStreamer with
-MacPorts, (add " +x11" after "base", "good" "bad" and "ugly" in the
-plugin names): for X11 support on macOS, compile UxPlay using a special
-cmake option `-DUSE_X11=ON`, and run it from an XQuartz terminal with
--vs ximagesink; older non-retina macs require a lower resolution when
-using X11: `uxplay -s 800x600`.*
+**Using GStreamer installed from MacPorts**: this is **not**
+recommended, as currently the MacPorts GStreamer is old (v1.16.2),
+unmaintained, and built to use X11:
- Instead [build gstreamer
yourself](https://github.com/FDH2/UxPlay/wiki/Building-GStreamer-from-Source-on-macOS-with-MacPorts)
if you use MacPorts and do not want to use the "Official" Gstreamer
- binaries or Macports packages.
+ binaries.
+
+*(If you really wish to use the MacPorts GStreamer-1.16.2, install
+pkgconf ("sudo port install pkgconf"), then "sudo port install
+gstreamer1-gst-plugins-base gstreamer1-gst-plugins-good
+gstreamer1-gst-plugins-bad gstreamer1-gst-libav". For X11 support on
+macOS, compile UxPlay using a special cmake option `-DUSE_X11=ON`, and
+run it from an XQuartz terminal with -vs ximagesink; older non-retina
+macs require a lower resolution when using X11: `uxplay -s 800x600`.)*
After installing GStreamer, build and install uxplay: open a terminal
and change into the UxPlay source directory ("UxPlay-master" for zipfile
@@ -956,6 +943,11 @@ The "-h265" option changes the default resolution ("-s" option) from
1920x1080 to 3840x2160, and leaves default maximum framerate ("-fps"
option) at 30fps.
+**-hls** Activate HTTP Live Streaming support. With this option YouTube
+videos can be streamed directly from YouTube servers to UxPlay (without
+passing through the client) by clicking on the AirPlay icon in the
+YouTube app.
+
**-pin \[nnnn\]**: (since v1.67) use Apple-style (one-time) "pin"
authentication when a new client connects for the first time: a
four-digit pin code is displayed on the terminal, and the client screen
@@ -1628,6 +1620,9 @@ what version UxPlay claims to be.
# Changelog
+1.71 2024-12-10 Add support for HTTP Live Streaming (HLS), initially
+only for YouTube movies
+
1.70 2024-10-04 Add support for 4K (h265) video (resolution 3840 x
2160). Fix issue with GStreamer \>= 1.24 when client sleeps, then wakes.
diff --git a/lib/airplay_video.c b/lib/airplay_video.c
new file mode 100644
index 0000000..7a739bb
--- /dev/null
+++ b/lib/airplay_video.c
@@ -0,0 +1,305 @@
+/**
+ * Copyright (c) 2024 fduncanh
+ * All Rights Reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ */
+
+// it should only start and stop the media_data_store that handles all HLS transactions, without
+// otherwise participating in them.
+
+#include
+#include
+#include
+#include
+#include
+
+#include "raop.h"
+#include "airplay_video.h"
+
+struct media_item_s {
+ char *uri;
+ char *playlist;
+ int access;
+};
+
+struct airplay_video_s {
+ raop_t *raop;
+ char apple_session_id[37];
+ char playback_uuid[37];
+ char *uri_prefix;
+ char local_uri_prefix[23];
+ int next_uri;
+ int FCUP_RequestID;
+ float start_position_seconds;
+ playback_info_t *playback_info;
+ // The local port of the airplay server on the AirPlay server
+ unsigned short airplay_port;
+ char *master_uri;
+ char *master_playlist;
+ media_item_t *media_data_store;
+ int num_uri;
+};
+
+// initialize airplay_video service.
+int airplay_video_service_init(raop_t *raop, unsigned short http_port,
+ const char *session_id) {
+ char uri[] = "http://localhost:xxxxx";
+ assert(raop);
+
+ airplay_video_t *airplay_video = deregister_airplay_video(raop);
+ if (airplay_video) {
+ airplay_video_service_destroy(airplay_video);
+ }
+
+ airplay_video = (airplay_video_t *) calloc(1, sizeof(airplay_video_t));
+ if (!airplay_video) {
+ return -1;
+ }
+
+ /* create local_uri_prefix string */
+ strncpy(airplay_video->local_uri_prefix, uri, sizeof(airplay_video->local_uri_prefix));
+ char *ptr = strstr(airplay_video->local_uri_prefix, "xxxxx");
+ snprintf(ptr, 6, "%-5u", http_port);
+ ptr = strstr(airplay_video->local_uri_prefix, " ");
+ if (ptr) {
+ *ptr = '\0';
+ }
+
+ if (!register_airplay_video(raop, airplay_video)) {
+ return -2;
+ }
+
+ printf(" %p %p\n", airplay_video, get_airplay_video(raop));
+
+ airplay_video->raop = raop;
+
+
+ airplay_video->FCUP_RequestID = 0;
+
+
+ size_t len = strlen(session_id);
+ assert(len == 36);
+ strncpy(airplay_video->apple_session_id, session_id, len);
+ (airplay_video->apple_session_id)[len] = '\0';
+
+ airplay_video->start_position_seconds = 0.0f;
+
+ airplay_video->master_uri = NULL;
+ airplay_video->media_data_store = NULL;
+ airplay_video->master_playlist = NULL;
+ airplay_video->num_uri = 0;
+ airplay_video->next_uri = 0;
+ return 0;
+}
+
+// destroy the airplay_video service
+void
+airplay_video_service_destroy(airplay_video_t *airplay_video)
+{
+
+ if (airplay_video->uri_prefix) {
+ free(airplay_video->uri_prefix);
+ }
+ if (airplay_video->master_uri) {
+ free (airplay_video->master_uri);
+ }
+ if (airplay_video->media_data_store) {
+ destroy_media_data_store(airplay_video);
+ }
+ if (airplay_video->master_playlist) {
+ free (airplay_video->master_playlist);
+ }
+
+
+ free (airplay_video);
+}
+
+const char *get_apple_session_id(airplay_video_t *airplay_video) {
+ return airplay_video->apple_session_id;
+}
+
+float get_start_position_seconds(airplay_video_t *airplay_video) {
+ return airplay_video->start_position_seconds;
+}
+
+void set_start_position_seconds(airplay_video_t *airplay_video, float start_position_seconds) {
+ airplay_video->start_position_seconds = start_position_seconds;
+}
+
+void set_playback_uuid(airplay_video_t *airplay_video, const char *playback_uuid) {
+ size_t len = strlen(playback_uuid);
+ assert(len == 36);
+ memcpy(airplay_video->playback_uuid, playback_uuid, len);
+ (airplay_video->playback_uuid)[len] = '\0';
+}
+
+void set_uri_prefix(airplay_video_t *airplay_video, char *uri_prefix, int uri_prefix_len) {
+ if (airplay_video->uri_prefix) {
+ free (airplay_video->uri_prefix);
+ }
+ airplay_video->uri_prefix = (char *) calloc(uri_prefix_len + 1, sizeof(char));
+ memcpy(airplay_video->uri_prefix, uri_prefix, uri_prefix_len);
+}
+
+char *get_uri_prefix(airplay_video_t *airplay_video) {
+ return airplay_video->uri_prefix;
+}
+
+char *get_uri_local_prefix(airplay_video_t *airplay_video) {
+ return airplay_video->local_uri_prefix;
+}
+
+
+char *get_master_uri(airplay_video_t *airplay_video) {
+ return airplay_video->master_uri;
+}
+
+
+int get_next_FCUP_RequestID(airplay_video_t *airplay_video) {
+ return ++(airplay_video->FCUP_RequestID);
+}
+
+void set_next_media_uri_id(airplay_video_t *airplay_video, int num) {
+ airplay_video->next_uri = num;
+}
+
+int get_next_media_uri_id(airplay_video_t *airplay_video) {
+ return airplay_video->next_uri;
+}
+
+
+/* master playlist */
+
+void store_master_playlist(airplay_video_t *airplay_video, char *master_playlist) {
+ if (airplay_video->master_playlist) {
+ free (airplay_video->master_playlist);
+ }
+ airplay_video->master_playlist = master_playlist;
+}
+
+char *get_master_playlist(airplay_video_t *airplay_video) {
+ return airplay_video->master_playlist;
+}
+
+/* media_data_store */
+
+int get_num_media_uri(airplay_video_t *airplay_video) {
+ return airplay_video->num_uri;
+}
+
+void destroy_media_data_store(airplay_video_t *airplay_video) {
+ media_item_t *media_data_store = airplay_video->media_data_store;
+ if (media_data_store) {
+ for (int i = 0; i < airplay_video->num_uri ; i ++ ) {
+ if (media_data_store[i].uri) {
+ free (media_data_store[i].uri);
+ }
+ if (media_data_store[i].playlist) {
+ free (media_data_store[i].playlist);
+ }
+ }
+ }
+ free (media_data_store);
+ airplay_video->num_uri = 0;
+}
+
+void create_media_data_store(airplay_video_t * airplay_video, char ** uri_list, int num_uri) {
+ destroy_media_data_store(airplay_video);
+ media_item_t *media_data_store = calloc(num_uri, sizeof(media_item_t));
+ for (int i = 0; i < num_uri; i++) {
+ media_data_store[i].uri = uri_list[i];
+ media_data_store[i].playlist = NULL;
+ media_data_store[i].access = 0;
+ }
+ airplay_video->media_data_store = media_data_store;
+ airplay_video->num_uri = num_uri;
+}
+
+int store_media_data_playlist_by_num(airplay_video_t *airplay_video, char * media_playlist, int num) {
+ media_item_t *media_data_store = airplay_video->media_data_store;
+ if ( num < 0 || num >= airplay_video->num_uri) {
+ return -1;
+ } else if (media_data_store[num].playlist) {
+ return -2;
+ }
+ media_data_store[num].playlist = media_playlist;
+ return 0;
+}
+
+char * get_media_playlist_by_num(airplay_video_t *airplay_video, int num) {
+ media_item_t *media_data_store = airplay_video->media_data_store;
+ if (media_data_store == NULL) {
+ return NULL;
+ }
+ if (num >= 0 && num num_uri) {
+ return media_data_store[num].playlist;
+ }
+ return NULL;
+}
+
+char * get_media_playlist_by_uri(airplay_video_t *airplay_video, const char *uri) {
+ /* Problem: there can be more than one StreamInf playlist with the same uri:
+ * they differ by choice of partner Media (audio, subtitles) playlists
+ * If the same uri is requested again, one of the other ones will be returned
+ * (the least-previously-requested one will be served up)
+ */
+ media_item_t *media_data_store = airplay_video->media_data_store;
+ if (media_data_store == NULL) {
+ return NULL;
+ }
+ int found = 0;;
+ int num = -1;
+ int access = -1;
+ for (int i = 0; i < airplay_video->num_uri; i++) {
+ if (strstr(media_data_store[i].uri, uri)) {
+ if (!found) {
+ found = 1;
+ num = i;
+ access = media_data_store[i].access;
+ } else {
+ /* change > below to >= to reverse the order of choice */
+ if (access > media_data_store[i].access) {
+ access = media_data_store[i].access;
+ num = i;
+ }
+ }
+ }
+ }
+ if (found) {
+ printf("found %s\n", media_data_store[num].uri);
+ ++media_data_store[num].access;
+ return media_data_store[num].playlist;
+ }
+ return NULL;
+}
+
+char * get_media_uri_by_num(airplay_video_t *airplay_video, int num) {
+ media_item_t * media_data_store = airplay_video->media_data_store;
+ if (media_data_store == NULL) {
+ return NULL;
+ }
+ if (num >= 0 && num < airplay_video->num_uri) {
+ return media_data_store[num].uri;
+ }
+ return NULL;
+}
+
+int get_media_uri_num(airplay_video_t *airplay_video, char * uri) {
+ media_item_t *media_data_store = airplay_video->media_data_store;
+ for (int i = 0; i < airplay_video->num_uri ; i++) {
+ if (strstr(media_data_store[i].uri, uri)) {
+ return i;
+ }
+ }
+ return -1;
+}
diff --git a/lib/airplay_video.h b/lib/airplay_video.h
new file mode 100644
index 0000000..3a62ba0
--- /dev/null
+++ b/lib/airplay_video.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2024 fduncanh, All Rights Reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ *=================================================================
+ */
+
+#ifndef AIRPLAY_VIDEO_H
+#define AIRPLAY_VIDEO_H
+
+
+#include
+#include
+#include "raop.h"
+#include "logger.h"
+
+typedef struct airplay_video_s airplay_video_t;
+typedef struct media_item_s media_item_t;
+
+const char *get_apple_session_id(airplay_video_t *airplay_video);
+void set_start_position_seconds(airplay_video_t *airplay_video, float start_position_seconds);
+float get_start_position_seconds(airplay_video_t *airplay_video);
+void set_playback_uuid(airplay_video_t *airplay_video, const char *playback_uuid);
+void set_uri_prefix(airplay_video_t *airplay_video, char *uri_prefix, int uri_prefix_len);
+char *get_uri_prefix(airplay_video_t *airplay_video);
+char *get_uri_local_prefix(airplay_video_t *airplay_video);
+int get_next_FCUP_RequestID(airplay_video_t *airplay_video);
+void set_next_media_uri_id(airplay_video_t *airplay_video, int id);
+int get_next_media_uri_id(airplay_video_t *airplay_video);
+char * get_media_playlist_by_uri(airplay_video_t *airplay_video, const char *uri);
+void store_master_playlist(airplay_video_t *airplay_video, char *master_playlist);
+char *get_master_playlist(airplay_video_t *airplay_video);
+int get_num_media_uri(airplay_video_t *airplay_video);
+void destroy_media_data_store(airplay_video_t *airplay_video);
+void create_media_data_store(airplay_video_t * airplay_video, char ** media_data_store, int num_uri);
+int store_media_data_playlist_by_num(airplay_video_t *airplay_video, char * media_playlist, int num);
+char *get_media_playlist_by_num(airplay_video_t *airplay_video, int num);
+char *get_media_uri_by_num(airplay_video_t *airplay_video, int num);
+int get_media_uri_num(airplay_video_t *airplay_video, char * uri);
+
+
+void airplay_video_service_destroy(airplay_video_t *airplay_video);
+
+// C wrappers for c++ class MediaDataStore
+//create the media_data_store, return a pointer to it.
+void* media_data_store_create(void *conn_opaque, uint16_t port);
+
+//delete the media_data_store
+void media_data_store_destroy(void *media_data_store);
+
+// called by the POST /action handler:
+char *process_media_data(void *media_data_store, const char *url, const char *data, int datalen);
+
+//called by the POST /play handler
+bool request_media_data(void *media_data_store, const char *primary_url, const char * session_id);
+
+//called by airplay_video_media_http_connection::get_handler: &path = req.uri)
+char *query_media_data(void *media_data_store, const char *url, int *len);
+
+//called by the post_stop_handler:
+void media_data_store_reset(void *media_data_store);
+
+const char *adjust_primary_uri(void *media_data_store, const char *url);
+
+#endif //AIRPLAY_VIDEO_H
diff --git a/lib/fcup_request.h b/lib/fcup_request.h
new file mode 100644
index 0000000..166ffa9
--- /dev/null
+++ b/lib/fcup_request.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (c) 2022 fduncanh
+ * All Rights Reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ */
+
+/* this file is part of raop.c via http_handlers.h and should not be included in any other file */
+
+
+//produces the fcup request plist in xml format as a null-terminated string
+char *create_fcup_request(const char *url, int request_id, const char *client_session_id, int *datalen) {
+ char *plist_xml = NULL;
+ /* values taken from apsdk-public; */
+ /* these seem to be arbitrary choices */
+ const int sessionID = 1;
+ const int FCUP_Response_ClientInfo = 1;
+ const int FCUP_Response_ClientRef = 40030004;
+
+ /* taken from a working AppleTV? */
+ const char User_Agent[] = "AppleCoreMedia/1.0.0.11B554a (Apple TV; U; CPU OS 7_0_4 like Mac OS X; en_us";
+
+ plist_t req_root_node = plist_new_dict();
+
+ plist_t session_id_node = plist_new_uint((int64_t) sessionID);
+ plist_dict_set_item(req_root_node, "sessionID", session_id_node);
+ plist_t type_node = plist_new_string("unhandledURLRequest");
+ plist_dict_set_item(req_root_node, "type", type_node);
+
+ plist_t fcup_request_node = plist_new_dict();
+
+ plist_t client_info_node = plist_new_uint(FCUP_Response_ClientInfo);
+ plist_dict_set_item(fcup_request_node, "FCUP_Response_ClientInfo", client_info_node);
+ plist_t client_ref_node = plist_new_uint((int64_t) FCUP_Response_ClientRef);
+ plist_dict_set_item(fcup_request_node, "FCUP_Response_ClientRef", client_ref_node);
+ plist_t request_id_node = plist_new_uint((int64_t) request_id);
+ plist_dict_set_item(fcup_request_node, "FCUP_Response_RequestID", request_id_node);
+ plist_t url_node = plist_new_string(url);
+ plist_dict_set_item(fcup_request_node, "FCUP_Response_URL", url_node);
+ plist_t session_id1_node = plist_new_uint((int64_t) sessionID);
+ plist_dict_set_item(fcup_request_node, "sessionID", session_id1_node);
+
+ plist_t fcup_response_header_node = plist_new_dict();
+ plist_t playback_session_id_node = plist_new_string(client_session_id);
+ plist_dict_set_item(fcup_response_header_node, "X-Playback-Session-Id", playback_session_id_node);
+ plist_t user_agent_node = plist_new_string(User_Agent);
+ plist_dict_set_item(fcup_response_header_node, "User-Agent", user_agent_node);
+
+ plist_dict_set_item(fcup_request_node, "FCUP_Response_Headers", fcup_response_header_node);
+ plist_dict_set_item(req_root_node, "request", fcup_request_node);
+
+ uint32_t uint_val;
+
+ plist_to_xml(req_root_node, &plist_xml, &uint_val);
+ *datalen = (int) uint_val;
+ plist_free(req_root_node);
+ assert(plist_xml[*datalen] == '\0');
+ return plist_xml; //needs to be freed after use
+}
+
+int fcup_request(void *conn_opaque, const char *media_url, const char *client_session_id, int request_id) {
+
+ raop_conn_t *conn = (raop_conn_t *) conn_opaque;
+ int datalen = 0;
+ int requestlen;
+
+ int socket_fd = httpd_get_connection_socket_by_type(conn->raop->httpd, CONNECTION_TYPE_PTTH, 1);
+
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "fcup_request send socket = %d", socket_fd);
+
+ /* create xml plist request data */
+ char *plist_xml = create_fcup_request(media_url, request_id, client_session_id, &datalen);
+
+ /* use http_response tools for creating the reverse http request */
+ http_response_t *request = http_response_create();
+ http_response_reverse_request_init(request, "POST", "/event", "HTTP/1.1");
+ http_response_add_header(request, "X-Apple-Session-ID", client_session_id);
+ http_response_add_header(request, "Content-Type", "text/x-apple-plist+xml");
+ http_response_finish(request, plist_xml, datalen);
+
+ free(plist_xml);
+
+ const char *http_request = http_response_get_data(request, &requestlen);
+ int send_len = send(socket_fd, http_request, requestlen, 0);
+ if (send_len < 0) {
+ int sock_err = SOCKET_GET_ERROR();
+ logger_log(conn->raop->logger, LOGGER_ERR, "fcup_request: send error %d:%s\n",
+ sock_err, strerror(sock_err));
+ http_response_destroy(request);
+ /* shut down connection? */
+ return -1;
+ }
+
+ if (logger_get_level(conn->raop->logger) >= LOGGER_DEBUG) {
+ char *request_str = utils_data_to_text(http_request, requestlen);
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "\n%s", request_str);
+ free (request_str);
+ }
+ http_response_destroy(request);
+ logger_log(conn->raop->logger, LOGGER_DEBUG,"fcup_request: send sent Request of %d bytes from socket %d\n",
+ send_len, socket_fd);
+ return 0;
+}
diff --git a/lib/http_handlers.h b/lib/http_handlers.h
new file mode 100644
index 0000000..59fd78a
--- /dev/null
+++ b/lib/http_handlers.h
@@ -0,0 +1,1001 @@
+/**
+ * Copyright (c) 2024 fduncanh
+ * All Rights Reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ */
+
+/* this file is part of raop.c and should not be included in any other file */
+
+#include "airplay_video.h"
+#include "fcup_request.h"
+
+static void
+http_handler_server_info(raop_conn_t *conn, http_request_t *request, http_response_t *response,
+ char **response_data, int *response_datalen) {
+
+ assert(conn->raop->dnssd);
+ int hw_addr_raw_len = 0;
+ const char *hw_addr_raw = dnssd_get_hw_addr(conn->raop->dnssd, &hw_addr_raw_len);
+
+ char *hw_addr = calloc(1, 3 * hw_addr_raw_len);
+ //int hw_addr_len =
+ utils_hwaddr_airplay(hw_addr, 3 * hw_addr_raw_len, hw_addr_raw, hw_addr_raw_len);
+
+ plist_t r_node = plist_new_dict();
+
+ /* first 12 AirPlay features bits (R to L): 0x27F = 0010 0111 1111
+ * Only bits 0-6 and bit 9 are set:
+ * 0. video supported
+ * 1. photo supported
+ * 2. video protected wirh FairPlay DRM
+ * 3. volume control supported for video
+ * 4. HLS supported
+ * 5. slideshow supported
+ * 6. (unknown)
+ * 9. audio supported.
+ */
+ plist_t features_node = plist_new_uint(0x27F);
+ plist_dict_set_item(r_node, "features", features_node);
+
+ plist_t mac_address_node = plist_new_string(hw_addr);
+ plist_dict_set_item(r_node, "macAddress", mac_address_node);
+
+ plist_t model_node = plist_new_string(GLOBAL_MODEL);
+ plist_dict_set_item(r_node, "model", model_node);
+
+ plist_t os_build_node = plist_new_string("12B435");
+ plist_dict_set_item(r_node, "osBuildVersion", os_build_node);
+
+ plist_t protovers_node = plist_new_string("1.0");
+ plist_dict_set_item(r_node, "protovers", protovers_node);
+
+ plist_t source_version_node = plist_new_string(GLOBAL_VERSION);
+ plist_dict_set_item(r_node, "srcvers", source_version_node);
+
+ plist_t vv_node = plist_new_uint(strtol(AIRPLAY_VV, NULL, 10));
+ plist_dict_set_item(r_node, "vv", vv_node);
+
+ plist_t device_id_node = plist_new_string(hw_addr);
+ plist_dict_set_item(r_node, "deviceid", device_id_node);
+
+ plist_to_xml(r_node, response_data, (uint32_t *) response_datalen);
+
+ //assert(*response_datalen == strlen(*response_data));
+
+ /* last character (at *response_data[response_datalen - 1]) is 0x0a = '\n'
+ * (*response_data[response_datalen] is '\0').
+ * apsdk removes the last "\n" by overwriting it with '\0', and reducing response_datalen by 1.
+ * TODO: check if this is necessary */
+
+ plist_free(r_node);
+ http_response_add_header(response, "Content-Type", "text/x-apple-plist+xml");
+ free(hw_addr);
+
+ /* initialize the airplay video service */
+ const char *session_id = http_request_get_header(request, "X-Apple-Session-ID");
+
+ airplay_video_service_init(conn->raop, conn->raop->port, session_id);
+
+}
+
+static void
+http_handler_scrub(raop_conn_t *conn, http_request_t *request, http_response_t *response,
+ char **response_data, int *response_datalen) {
+ const char *url = http_request_get_url(request);
+ const char *data = strstr(url, "?");
+ float scrub_position = 0.0f;
+ if (data) {
+ data++;
+ const char *position = strstr(data, "=") + 1;
+ char *end;
+ double value = strtod(position, &end);
+ if (end && end != position) {
+ scrub_position = (float) value;
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "http_handler_scrub: got position = %.6f",
+ scrub_position);
+ }
+ }
+ printf("**********************SCRUB %f ***********************\n",scrub_position);
+ conn->raop->callbacks.on_video_scrub(conn->raop->callbacks.cls, scrub_position);
+}
+
+static void
+http_handler_rate(raop_conn_t *conn, http_request_t *request, http_response_t *response,
+ char **response_data, int *response_datalen) {
+
+ const char *url = http_request_get_url(request);
+ const char *data = strstr(url, "?");
+ float rate_value = 0.0f;
+ if (data) {
+ data++;
+ const char *rate = strstr(data, "=") + 1;
+ char *end;
+ float value = strtof(rate, &end);
+ if (end && end != rate) {
+ rate_value = value;
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "http_handler_rate: got rate = %.6f", rate_value);
+ }
+ }
+ conn->raop->callbacks.on_video_rate(conn->raop->callbacks.cls, rate_value);
+}
+
+static void
+http_handler_stop(raop_conn_t *conn, http_request_t *request, http_response_t *response,
+ char **response_data, int *response_datalen) {
+ logger_log(conn->raop->logger, LOGGER_INFO, "client HTTP request POST stop");
+
+ conn->raop->callbacks.on_video_stop(conn->raop->callbacks.cls);
+}
+
+/* handles PUT /setProperty http requests from Client to Server */
+
+static void
+http_handler_set_property(raop_conn_t *conn,
+ http_request_t *request, http_response_t *response,
+ char **response_data, int *response_datalen) {
+
+ const char *url = http_request_get_url(request);
+ const char *property = url + strlen("/setProperty?");
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "http_handler_set_property: %s", property);
+
+ /* actionAtItemEnd: values:
+ 0: advance (advance to next item, if there is one)
+ 1: pause (pause playing)
+ 2: none (do nothing)
+
+ reverseEndTime (only used when rate < 0) time at which reverse playback ends
+ forwardEndTime (only used when rate > 0) time at which reverse playback ends
+ */
+
+ if (!strcmp(property, "reverseEndTime") ||
+ !strcmp(property, "forwardEndTime") ||
+ !strcmp(property, "actionAtItemEnd")) {
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "property %s is known but unhandled", property);
+
+ plist_t errResponse = plist_new_dict();
+ plist_t errCode = plist_new_uint(0);
+ plist_dict_set_item(errResponse, "errorCode", errCode);
+ plist_to_xml(errResponse, response_data, (uint32_t *) response_datalen);
+ plist_free(errResponse);
+ http_response_add_header(response, "Content-Type", "text/x-apple-plist+xml");
+ } else {
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "property %s is unknown, unhandled", property);
+ http_response_add_header(response, "Content-Length", "0");
+ }
+}
+
+/* handles GET /getProperty http requests from Client to Server. (not implemented) */
+
+static void
+http_handler_get_property(raop_conn_t *conn, http_request_t *request, http_response_t *response,
+ char **response_data, int *response_datalen) {
+ const char *url = http_request_get_url(request);
+ const char *property = url + strlen("getProperty?");
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "http_handler_get_property: %s (unhandled)", property);
+}
+
+/* this request (for a variant FairPlay decryption) cannot be handled by UxPlay */
+static void
+http_handler_fpsetup2(raop_conn_t *conn, http_request_t *request, http_response_t *response,
+ char **response_data, int *response_datalen) {
+ logger_log(conn->raop->logger, LOGGER_WARNING, "client HTTP request POST fp-setup2 is unhandled");
+ http_response_add_header(response, "Content-Type", "application/x-apple-binary-plist");
+ int req_datalen;
+ const unsigned char *req_data = (unsigned char *) http_request_get_data(request, &req_datalen);
+ logger_log(conn->raop->logger, LOGGER_ERR, "only FairPlay version 0x03 is implemented, version is 0x%2.2x",
+ req_data[4]);
+ http_response_init(response, "HTTP/1.1", 421, "Misdirected Request");
+}
+
+// called by http_handler_playback_info while preparing response to a GET /playback_info request from the client.
+
+typedef struct time_range_s {
+ double start;
+ double duration;
+} time_range_t;
+
+void time_range_to_plist(void *time_ranges, const int n_time_ranges,
+ plist_t time_ranges_node) {
+ time_range_t *tr = (time_range_t *) time_ranges;
+ for (int i = 0 ; i < n_time_ranges; i++) {
+ plist_t time_range_node = plist_new_dict();
+ plist_t duration_node = plist_new_real(tr[i].duration);
+ plist_dict_set_item(time_range_node, "duration", duration_node);
+ plist_t start_node = plist_new_real(tr[i].start);
+ plist_dict_set_item(time_range_node, "start", start_node);
+ plist_array_append_item(time_ranges_node, time_range_node);
+ }
+}
+
+// called by http_handler_playback_info while preparing response to a GET /playback_info request from the client.
+
+int create_playback_info_plist_xml(playback_info_t *playback_info, char **plist_xml) {
+
+ plist_t res_root_node = plist_new_dict();
+
+ plist_t duration_node = plist_new_real(playback_info->duration);
+ plist_dict_set_item(res_root_node, "duration", duration_node);
+
+ plist_t position_node = plist_new_real(playback_info->position);
+ plist_dict_set_item(res_root_node, "position", position_node);
+
+ plist_t rate_node = plist_new_real(playback_info->rate);
+ plist_dict_set_item(res_root_node, "rate", rate_node);
+
+ /* should these be int or bool? */
+ plist_t ready_to_play_node = plist_new_uint(playback_info->ready_to_play);
+ plist_dict_set_item(res_root_node, "readyToPlay", ready_to_play_node);
+
+ plist_t playback_buffer_empty_node = plist_new_uint(playback_info->playback_buffer_empty);
+ plist_dict_set_item(res_root_node, "playbackBufferEmpty", playback_buffer_empty_node);
+
+ plist_t playback_buffer_full_node = plist_new_uint(playback_info->playback_buffer_full);
+ plist_dict_set_item(res_root_node, "playbackBufferFull", playback_buffer_full_node);
+
+ plist_t playback_likely_to_keep_up_node = plist_new_uint(playback_info->playback_likely_to_keep_up);
+ plist_dict_set_item(res_root_node, "playbackLikelyToKeepUp", playback_likely_to_keep_up_node);
+
+ plist_t loaded_time_ranges_node = plist_new_array();
+ time_range_to_plist(playback_info->loadedTimeRanges, playback_info->num_loaded_time_ranges,
+ loaded_time_ranges_node);
+ plist_dict_set_item(res_root_node, "loadedTimeRanges", loaded_time_ranges_node);
+
+ plist_t seekable_time_ranges_node = plist_new_array();
+ time_range_to_plist(playback_info->seekableTimeRanges, playback_info->num_seekable_time_ranges,
+ seekable_time_ranges_node);
+ plist_dict_set_item(res_root_node, "seekableTimeRanges", seekable_time_ranges_node);
+
+ int len;
+ plist_to_xml(res_root_node, plist_xml, (uint32_t *) &len);
+ /* plist_xml is null-terminated, last character is '/n' */
+
+ plist_free(res_root_node);
+
+ return len;
+}
+
+
+/* this handles requests from the Client for "Playback information" while the Media is playing on the
+ Media Player. (The Server gets this information by monitoring the Media Player). The Client could use
+ the information to e.g. update the slider it shows with progress to the player (0%-100%).
+ It does not affect playing of the Media*/
+
+static void
+http_handler_playback_info(raop_conn_t *conn, http_request_t *request, http_response_t *response,
+ char **response_data, int *response_datalen)
+{
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "http_handler_playback_info");
+ //const char *session_id = http_request_get_header(request, "X-Apple-Session-ID");
+ playback_info_t playback_info;
+
+ playback_info.stallcount = 0;
+ playback_info.ready_to_play = true; // ???;
+ playback_info.playback_buffer_empty = false; // maybe need to get this from playbin
+ playback_info.playback_buffer_full = true;
+ playback_info.playback_likely_to_keep_up = true;
+
+ conn->raop->callbacks.on_video_acquire_playback_info(conn->raop->callbacks.cls, &playback_info);
+ if (playback_info.duration == -1.0) {
+ /* video has finished, reset */
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "playback_info not available (finishing)");
+ //httpd_remove_known_connections(conn->raop->httpd);
+ http_response_set_disconnect(response,1);
+ conn->raop->callbacks.video_reset(conn->raop->callbacks.cls);
+ return;
+ } else if (playback_info.position == -1.0) {
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "playback_info not available");
+ return;
+ }
+
+ playback_info.num_loaded_time_ranges = 1;
+ time_range_t time_ranges_loaded[1];
+ time_ranges_loaded[0].start = playback_info.position;
+ time_ranges_loaded[0].duration = playback_info.duration - playback_info.position;
+ playback_info.loadedTimeRanges = (void *) &time_ranges_loaded;
+
+ playback_info.num_seekable_time_ranges = 1;
+ time_range_t time_ranges_seekable[1];
+ time_ranges_seekable[0].start = 0.0;
+ time_ranges_seekable[0].duration = playback_info.position;
+ playback_info.seekableTimeRanges = (void *) &time_ranges_seekable;
+
+ *response_datalen = create_playback_info_plist_xml(&playback_info, response_data);
+ http_response_add_header(response, "Content-Type", "text/x-apple-plist+xml");
+}
+
+/* this handles the POST /reverse request from Client to Server on a AirPlay http channel to "Upgrade"
+ to "PTTH/1.0" Reverse HTTP protocol proposed in 2009 Internet-Draft
+
+ https://datatracker.ietf.org/doc/id/draft-lentczner-rhttp-00.txt .
+
+ After the Upgrade the channel becomes a reverse http "AirPlay (reversed)" channel for
+ http requests from Server to Client.
+ */
+
+static void
+http_handler_reverse(raop_conn_t *conn, http_request_t *request, http_response_t *response,
+ char **response_data, int *response_datalen) {
+
+ /* get http socket for send */
+ int socket_fd = httpd_get_connection_socket (conn->raop->httpd, (void *) conn);
+ if (socket_fd < 0) {
+ logger_log(conn->raop->logger, LOGGER_ERR, "fcup_request failed to retrieve socket_fd from httpd");
+ /* shut down connection? */
+ }
+
+ const char *purpose = http_request_get_header(request, "X-Apple-Purpose");
+ const char *connection = http_request_get_header(request, "Connection");
+ const char *upgrade = http_request_get_header(request, "Upgrade");
+ logger_log(conn->raop->logger, LOGGER_INFO, "client requested reverse connection: %s; purpose: %s \"%s\"",
+ connection, upgrade, purpose);
+
+ httpd_set_connection_type(conn->raop->httpd, (void *) conn, CONNECTION_TYPE_PTTH);
+ int type_PTTH = httpd_count_connection_type(conn->raop->httpd, CONNECTION_TYPE_PTTH);
+
+ if (type_PTTH == 1) {
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "will use socket %d for %s connections", socket_fd, purpose);
+ http_response_init(response, "HTTP/1.1", 101, "Switching Protocols");
+ http_response_add_header(response, "Connection", "Upgrade");
+ http_response_add_header(response, "Upgrade", "PTTH/1.0");
+
+ } else {
+ logger_log(conn->raop->logger, LOGGER_ERR, "multiple TPPH connections (%d) are forbidden", type_PTTH );
+ }
+}
+
+/* this copies a Media Playlist into a null-terminated string. If it has the "#YT-EXT-CONDENSED-URI"
+ header, it is also expanded into the full Media Playlist format */
+
+char *adjust_yt_condensed_playlist(const char *media_playlist) {
+ /* expands a YT-EXT_CONDENSED-URL media playlist into a full media playlist
+ * returns a pointer to the expanded playlist, WHICH MUST BE FREED AFTER USE */
+
+ const char *base_uri_begin;
+ const char *params_begin;
+ const char *prefix_begin;
+ size_t base_uri_len;
+ size_t params_len;
+ size_t prefix_len;
+ const char* ptr = strstr(media_playlist, "#EXTM3U\n");
+
+ ptr += strlen("#EXTM3U\n");
+ assert(ptr);
+ if (strncmp(ptr, "#YT-EXT-CONDENSED-URL", strlen("#YT-EXT-CONDENSED-URL"))) {
+ size_t len = strlen(media_playlist);
+ char * playlist_copy = (char *) malloc(len + 1);
+ memcpy(playlist_copy, media_playlist, len);
+ playlist_copy[len] = '\0';
+ return playlist_copy;
+ }
+ ptr = strstr(ptr, "BASE-URI=");
+ base_uri_begin = strchr(ptr, '"');
+ base_uri_begin++;
+ ptr = strchr(base_uri_begin, '"');
+ base_uri_len = ptr - base_uri_begin;
+ char *base_uri = (char *) calloc(base_uri_len + 1, sizeof(char));
+ assert(base_uri);
+ memcpy(base_uri, base_uri_begin, base_uri_len); //must free
+
+ ptr = strstr(ptr, "PARAMS=");
+ params_begin = strchr(ptr, '"');
+ params_begin++;
+ ptr = strchr(params_begin,'"');
+ params_len = ptr - params_begin;
+ char *params = (char *) calloc(params_len + 1, sizeof(char));
+ assert(params);
+ memcpy(params, params_begin, params_len); //must free
+
+ ptr = strstr(ptr, "PREFIX=");
+ prefix_begin = strchr(ptr, '"');
+ prefix_begin++;
+ ptr = strchr(prefix_begin,'"');
+ prefix_len = ptr - prefix_begin;
+ char *prefix = (char *) calloc(prefix_len + 1, sizeof(char));
+ assert(prefix);
+ memcpy(prefix, prefix_begin, prefix_len); //must free
+
+ /* expand params */
+ int nparams = 0;
+ int *params_size = NULL;
+ const char **params_start = NULL;
+ if (strlen(params)) {
+ nparams = 1;
+ char * comma = strchr(params, ',');
+ while (comma) {
+ nparams++;
+ comma++;
+ comma = strchr(comma, ',');
+ }
+ params_start = (const char **) calloc(nparams, sizeof(char *)); //must free
+ params_size = (int *) calloc(nparams, sizeof(int)); //must free
+ ptr = params;
+ for (int i = 0; i < nparams; i++) {
+ comma = strchr(ptr, ',');
+ params_start[i] = ptr;
+ if (comma) {
+ params_size[i] = (int) (comma - ptr);
+ ptr = comma;
+ ptr++;
+ } else {
+ params_size[i] = (int) (params + params_len - ptr);
+ break;
+ }
+ }
+ }
+
+ int count = 0;
+ ptr = strstr(media_playlist, "#EXTINF");
+ while (ptr) {
+ count++;
+ ptr = strstr(++ptr, "#EXTINF");
+ }
+
+ size_t old_size = strlen(media_playlist);
+ size_t new_size = old_size;
+ new_size += count * (base_uri_len + params_len);
+
+ char * new_playlist = (char *) calloc( new_size + 100, sizeof(char));
+ const char *old_pos = media_playlist;
+ char *new_pos = new_playlist;
+ ptr = old_pos;
+ ptr = strstr(old_pos, "#EXTINF:");
+ size_t len = ptr - old_pos;
+ /* copy header section before chunks */
+ memcpy(new_pos, old_pos, len);
+ old_pos += len;
+ new_pos += len;
+ int counter = 0;
+ while (ptr) {
+ counter++;
+ /* for each chunk */
+ const char *end = NULL;
+ char *start = strstr(ptr, prefix);
+ len = start - ptr;
+ /* copy first line of chunk entry */
+ memcpy(new_pos, old_pos, len);
+ old_pos += len;
+ new_pos += len;
+
+ /* copy base uri to replace prefix*/
+ memcpy(new_pos, base_uri, base_uri_len);
+ new_pos += base_uri_len;
+ old_pos += prefix_len;
+ ptr = strstr(old_pos, "#EXTINF:");
+
+ /* insert the PARAMS separators on the slices line */
+ end = old_pos;
+ int last = nparams - 1;
+ for (int i = 0; i < nparams; i++) {
+ if (i != last) {
+ end = strchr(end, '/');
+ } else {
+ end = strstr(end, "#EXT"); /* the next line starts with either #EXTINF (usually) or #EXT-X-ENDLIST (at last chunk)*/
+ }
+ *new_pos = '/';
+ new_pos++;
+ memcpy(new_pos, params_start[i], params_size[i]);
+ new_pos += params_size[i];
+ *new_pos = '/';
+ new_pos++;
+
+ len = end - old_pos;
+ end++;
+
+ memcpy (new_pos, old_pos, len);
+ new_pos += len;
+ old_pos += len;
+ if (i != last) {
+ old_pos++; /* last entry is not followed by "/" separator */
+ }
+ }
+ }
+ /* copy tail */
+
+ len = media_playlist + strlen(media_playlist) - old_pos;
+ memcpy(new_pos, old_pos, len);
+ new_pos += len;
+ old_pos += len;
+
+ new_playlist[new_size] = '\0';
+
+ free (prefix);
+ free (base_uri);
+ free (params);
+ if (params_size) {
+ free (params_size);
+ }
+ if (params_start) {
+ free (params_start);
+ }
+
+ return new_playlist;
+}
+
+/* this adjusts the uri prefixes in the Master Playlist, for sending to the Media Player running on the Server Host */
+
+char *adjust_master_playlist (char *fcup_response_data, int fcup_response_datalen, char *uri_prefix, char *uri_local_prefix) {
+
+ size_t uri_prefix_len = strlen(uri_prefix);
+ size_t uri_local_prefix_len = strlen(uri_local_prefix);
+ int counter = 0;
+ char *ptr = strstr(fcup_response_data, uri_prefix);
+ while (ptr != NULL) {
+ counter++;
+ ptr++;
+ ptr = strstr(ptr, uri_prefix);
+ }
+
+ size_t len = uri_local_prefix_len - uri_prefix_len;
+ len *= counter;
+ len += fcup_response_datalen;
+ char *new_master = (char *) malloc(len + 1);
+ *(new_master + len) = '\0';
+ char *first = fcup_response_data;
+ char *new = new_master;
+ char *last = strstr(first, uri_prefix);
+ counter = 0;
+ while (last != NULL) {
+ counter++;
+ len = last - first;
+ memcpy(new, first, len);
+ first = last + uri_prefix_len;
+ new += len;
+ memcpy(new, uri_local_prefix, uri_local_prefix_len);
+ new += uri_local_prefix_len;
+ last = strstr(last + uri_prefix_len, uri_prefix);
+ if (last == NULL) {
+ len = fcup_response_data + fcup_response_datalen - first;
+ memcpy(new, first, len);
+ break;
+ }
+ }
+ return new_master;
+}
+
+/* this parses the Master Playlist to make a table of the Media Playlist uri's that it lists */
+
+int create_media_uri_table(const char *url_prefix, const char *master_playlist_data, int datalen,
+ char ***media_uri_table, int *num_uri) {
+ char *ptr = strstr(master_playlist_data, url_prefix);
+ char ** table = NULL;
+ if (ptr == NULL) {
+ return -1;
+ }
+ int count = 0;
+ while (ptr != NULL) {
+ char *end = strstr(ptr, "m3u8");
+ if (end == NULL) {
+ return 1;
+ }
+ end += sizeof("m3u8");
+ count++;
+ ptr = strstr(end, url_prefix);
+ }
+ table = (char **) calloc(count, sizeof(char *));
+ if (!table) {
+ return -1;
+ }
+ for (int i = 0; i < count; i++) {
+ table[i] = NULL;
+ }
+ ptr = strstr(master_playlist_data, url_prefix);
+ count = 0;
+ while (ptr != NULL) {
+ char *end = strstr(ptr, "m3u8");
+ char *uri;
+ if (end == NULL) {
+ return 0;
+ }
+ end += sizeof("m3u8");
+ size_t len = end - ptr - 1;
+ uri = (char *) calloc(len + 1, sizeof(char));
+ memcpy(uri , ptr, len);
+ table[count] = uri;
+ uri = NULL;
+ count ++;
+ ptr = strstr(end, url_prefix);
+ }
+ *num_uri = count;
+
+ *media_uri_table = table;
+ return 0;
+}
+
+/* the POST /action request from Client to Server on the AirPlay http channel follows a POST /event "FCUP Request"
+ from Server to Client on the reverse http channel, for a HLS playlist (first the Master Playlist, then the Media Playlists
+ listed in the Master Playlist. The POST /action request contains the playlist requested by the Server in
+ the preceding "FCUP Request". The FCUP Request sequence continues until all Media Playlists have been obtained by the Server */
+
+static void
+http_handler_action(raop_conn_t *conn, http_request_t *request, http_response_t *response,
+ char **response_data, int *response_datalen) {
+
+ bool data_is_plist = false;
+ plist_t req_root_node = NULL;
+ uint64_t uint_val;
+ int request_id = 0;
+ int fcup_response_statuscode = 0;
+ bool logger_debug = (logger_get_level(conn->raop->logger) >= LOGGER_DEBUG);
+
+
+ const char* session_id = http_request_get_header(request, "X-Apple-Session-ID");
+ if (!session_id) {
+ logger_log(conn->raop->logger, LOGGER_ERR, "Play request had no X-Apple-Session-ID");
+ goto post_action_error;
+ }
+ const char *apple_session_id = get_apple_session_id(conn->raop->airplay_video);
+ if (strcmp(session_id, apple_session_id)){
+ logger_log(conn->raop->logger, LOGGER_ERR, "X-Apple-Session-ID has changed:\n was:\"%s\"\n now:\"%s\"",
+ apple_session_id, session_id);
+ goto post_action_error;
+ }
+
+ /* verify that this request contains a binary plist*/
+ char *header_str = NULL;
+ http_request_get_header_string(request, &header_str);
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "request header: %s", header_str);
+ data_is_plist = (strstr(header_str,"apple-binary-plist") != NULL);
+ free(header_str);
+ if (!data_is_plist) {
+ logger_log(conn->raop->logger, LOGGER_INFO, "POST /action: did not receive expected plist from client");
+ goto post_action_error;
+ }
+
+ /* extract the root_node plist */
+ int request_datalen = 0;
+ const char *request_data = http_request_get_data(request, &request_datalen);
+ if (request_datalen == 0) {
+ logger_log(conn->raop->logger, LOGGER_INFO, "POST /action: did not receive expected plist from client");
+ goto post_action_error;
+ }
+ plist_from_bin(request_data, request_datalen, &req_root_node);
+
+ /* determine type of data */
+ plist_t req_type_node = plist_dict_get_item(req_root_node, "type");
+ if (!PLIST_IS_STRING(req_type_node)) {
+ goto post_action_error;
+ }
+
+ /* three possible types are known */
+ char *type = NULL;
+ int action_type = 0;
+ plist_get_string_val(req_type_node, &type);
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "action type is %s", type);
+ if (strstr(type, "unhandledURLResponse")) {
+ action_type = 1;
+ } else if (strstr(type, "playlistInsert")) {
+ action_type = 2;
+ } else if (strstr(type, "playlistRemove")) {
+ action_type = 3;
+ }
+ free (type);
+
+ plist_t req_params_node = NULL;
+ switch (action_type) {
+ case 1:
+ goto unhandledURLResponse;
+ case 2:
+ logger_log(conn->raop->logger, LOGGER_INFO, "unhandled action type playlistInsert (add new playback)");
+ goto finish;
+ case 3:
+ logger_log(conn->raop->logger, LOGGER_INFO, "unhandled action type playlistRemove (stop playback)");
+ goto finish;
+ default:
+ logger_log(conn->raop->logger, LOGGER_INFO, "unknown action type (unhandled)");
+ goto finish;
+ }
+
+ unhandledURLResponse:;
+
+ req_params_node = plist_dict_get_item(req_root_node, "params");
+ if (!PLIST_IS_DICT (req_params_node)) {
+ goto post_action_error;
+ }
+
+ /* handling type "unhandledURLResponse" (case 1)*/
+ uint_val = 0;
+ int fcup_response_datalen = 0;
+
+ if (logger_debug) {
+ plist_t plist_fcup_response_statuscode_node = plist_dict_get_item(req_params_node,
+ "FCUP_Response_StatusCode");
+ if (plist_fcup_response_statuscode_node) {
+ plist_get_uint_val(plist_fcup_response_statuscode_node, &uint_val);
+ fcup_response_statuscode = (int) uint_val;
+ uint_val = 0;
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "FCUP_Response_StatusCode = %d",
+ fcup_response_statuscode);
+ }
+
+ plist_t plist_fcup_response_requestid_node = plist_dict_get_item(req_params_node,
+ "FCUP_Response_RequestID");
+ if (plist_fcup_response_requestid_node) {
+ plist_get_uint_val(plist_fcup_response_requestid_node, &uint_val);
+ request_id = (int) uint_val;
+ uint_val = 0;
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "FCUP_Response_RequestID = %d", request_id);
+ }
+ }
+
+ plist_t plist_fcup_response_url_node = plist_dict_get_item(req_params_node, "FCUP_Response_URL");
+ if (!PLIST_IS_STRING(plist_fcup_response_url_node)) {
+ goto post_action_error;
+ }
+ char *fcup_response_url = NULL;
+ plist_get_string_val(plist_fcup_response_url_node, &fcup_response_url);
+ if (!fcup_response_url) {
+ goto post_action_error;
+ }
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "FCUP_Response_URL = %s", fcup_response_url);
+
+ plist_t plist_fcup_response_data_node = plist_dict_get_item(req_params_node, "FCUP_Response_Data");
+ if (!PLIST_IS_DATA(plist_fcup_response_data_node)){
+ goto post_action_error;
+ }
+
+ uint_val = 0;
+ char *fcup_response_data = NULL;
+ plist_get_data_val(plist_fcup_response_data_node, &fcup_response_data, &uint_val);
+ fcup_response_datalen = (int) uint_val;
+
+ if (!fcup_response_data) {
+ free (fcup_response_url);
+ goto post_action_error;
+ }
+
+ if (logger_debug) {
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "FCUP_Response datalen = %d", fcup_response_datalen);
+ char *ptr = fcup_response_data;
+ printf("begin FCUP Response data:\n");
+ for (int i = 0; i < fcup_response_datalen; i++) {
+ printf("%c", *ptr);
+ ptr++;
+ }
+ printf("end FCUP Response data\n");
+ }
+
+
+ char *ptr = strstr(fcup_response_url, "/master.m3u8");
+ if (ptr) {
+ /* this is a master playlist */
+ char *uri_prefix = get_uri_prefix(conn->raop->airplay_video);
+ char ** media_data_store = NULL;
+ int num_uri = 0;
+
+ char *uri_local_prefix = get_uri_local_prefix(conn->raop->airplay_video);
+ char *new_master = adjust_master_playlist (fcup_response_data, fcup_response_datalen, uri_prefix, uri_local_prefix);
+ store_master_playlist(conn->raop->airplay_video, new_master);
+ create_media_uri_table(uri_prefix, fcup_response_data, fcup_response_datalen, &media_data_store, &num_uri);
+ create_media_data_store(conn->raop->airplay_video, media_data_store, num_uri);
+ num_uri = get_num_media_uri(conn->raop->airplay_video);
+ set_next_media_uri_id(conn->raop->airplay_video, 0);
+ } else {
+ /* this is a media playlist */
+ assert(fcup_response_data);
+ char *playlist = (char *) calloc(fcup_response_datalen + 1, sizeof(char));
+ memcpy(playlist, fcup_response_data, fcup_response_datalen);
+ int uri_num = get_next_media_uri_id(conn->raop->airplay_video);
+ --uri_num; // (next num is current num + 1)
+ store_media_data_playlist_by_num(conn->raop->airplay_video, playlist, uri_num);
+ float duration = 0.0f, next;
+ int count = 0;
+ ptr = strstr(fcup_response_data, "#EXTINF:");
+ while (ptr != NULL) {
+ char *end;
+ ptr += strlen("#EXTINF:");
+ next = strtof(ptr, &end);
+ duration += next;
+ count++;
+ ptr = strstr(end, "#EXTINF:");
+ }
+ if (count) {
+ printf("\n%s:\nplaylist has %5d chunks, total duration %9.3f secs\n", fcup_response_url, count, duration);
+ }
+ }
+
+ if (fcup_response_data) {
+ free (fcup_response_data);
+ }
+ if (fcup_response_url) {
+ free (fcup_response_url);
+ }
+
+ int num_uri = get_num_media_uri(conn->raop->airplay_video);
+ int uri_num = get_next_media_uri_id(conn->raop->airplay_video);
+ if (uri_num < num_uri) {
+ fcup_request((void *) conn, get_media_uri_by_num(conn->raop->airplay_video, uri_num),
+ apple_session_id,
+ get_next_FCUP_RequestID(conn->raop->airplay_video));
+ set_next_media_uri_id(conn->raop->airplay_video, ++uri_num);
+ } else {
+ char * uri_local_prefix = get_uri_local_prefix(conn->raop->airplay_video);
+ conn->raop->callbacks.on_video_play(conn->raop->callbacks.cls,
+ strcat(uri_local_prefix, "/master.m3u8"),
+ get_start_position_seconds(conn->raop->airplay_video));
+ }
+
+ finish:
+ plist_free(req_root_node);
+ return;
+
+ post_action_error:;
+ http_response_init(response, "HTTP/1.1", 400, "Bad Request");
+
+ if (req_root_node) {
+ plist_free(req_root_node);
+ }
+
+}
+
+/* The POST /play request from the Client to Server on the AirPlay http channel contains (among other information)
+ the "Content Location" that specifies the HLS Playlists for the video to be streamed, as well as the video
+ "start position in seconds". Once this request is received by the Sever, the Server sends a POST /event
+ "FCUP Request" request to the Client on the reverse http channel, to request the HLS Master Playlist */
+
+static void
+http_handler_play(raop_conn_t *conn, http_request_t *request, http_response_t *response,
+ char **response_data, int *response_datalen) {
+
+ char* playback_location = NULL;
+ plist_t req_root_node = NULL;
+ float start_position_seconds = 0.0f;
+ bool data_is_binary_plist = false;
+ bool data_is_text = false;
+ bool data_is_octet = false;
+
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "http_handler_play");
+
+ const char* session_id = http_request_get_header(request, "X-Apple-Session-ID");
+ if (!session_id) {
+ logger_log(conn->raop->logger, LOGGER_ERR, "Play request had no X-Apple-Session-ID");
+ goto play_error;
+ }
+ const char *apple_session_id = get_apple_session_id(conn->raop->airplay_video);
+ if (strcmp(session_id, apple_session_id)){
+ logger_log(conn->raop->logger, LOGGER_ERR, "X-Apple-Session-ID has changed:\n was:\"%s\"\n now:\"%s\"",
+ apple_session_id, session_id);
+ goto play_error;
+ }
+
+ int request_datalen = -1;
+ const char *request_data = http_request_get_data(request, &request_datalen);
+
+ if (request_datalen > 0) {
+ char *header_str = NULL;
+ http_request_get_header_string(request, &header_str);
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "request header:\n%s", header_str);
+ data_is_binary_plist = (strstr(header_str, "x-apple-binary-plist") != NULL);
+ data_is_text = (strstr(header_str, "text/parameters") != NULL);
+ data_is_octet = (strstr(header_str, "octet-stream") != NULL);
+ free (header_str);
+ }
+ if (!data_is_text && !data_is_octet && !data_is_binary_plist) {
+ goto play_error;
+ }
+
+ if (data_is_text) {
+ logger_log(conn->raop->logger, LOGGER_ERR, "Play request Content is text (unsupported)");
+ goto play_error;
+ }
+
+ if (data_is_octet) {
+ logger_log(conn->raop->logger, LOGGER_ERR, "Play request Content is octet-stream (unsupported)");
+ goto play_error;
+ }
+
+ if (data_is_binary_plist) {
+ plist_from_bin(request_data, request_datalen, &req_root_node);
+
+ plist_t req_uuid_node = plist_dict_get_item(req_root_node, "uuid");
+ if (!req_uuid_node) {
+ goto play_error;
+ } else {
+ char* playback_uuid = NULL;
+ plist_get_string_val(req_uuid_node, &playback_uuid);
+ set_playback_uuid(conn->raop->airplay_video, playback_uuid);
+ free (playback_uuid);
+ }
+
+ plist_t req_content_location_node = plist_dict_get_item(req_root_node, "Content-Location");
+ if (!req_content_location_node) {
+ goto play_error;
+ } else {
+ plist_get_string_val(req_content_location_node, &playback_location);
+ }
+
+ plist_t req_start_position_seconds_node = plist_dict_get_item(req_root_node, "Start-Position-Seconds");
+ if (!req_start_position_seconds_node) {
+ logger_log(conn->raop->logger, LOGGER_INFO, "No Start-Position-Seconds in Play request");
+ } else {
+ double start_position = 0.0;
+ plist_get_real_val(req_start_position_seconds_node, &start_position);
+ start_position_seconds = (float) start_position;
+ }
+ set_start_position_seconds(conn->raop->airplay_video, (float) start_position_seconds);
+ }
+
+ char *ptr = strstr(playback_location, "/master.m3u8");
+ int prefix_len = (int) (ptr - playback_location);
+ set_uri_prefix(conn->raop->airplay_video, playback_location, prefix_len);
+ set_next_media_uri_id(conn->raop->airplay_video, 0);
+ fcup_request((void *) conn, playback_location, apple_session_id, get_next_FCUP_RequestID(conn->raop->airplay_video));
+
+ if (playback_location) {
+ free (playback_location);
+ }
+
+ if (req_root_node) {
+ plist_free(req_root_node);
+ }
+ return;
+
+ play_error:;
+ if (req_root_node) {
+ plist_free(req_root_node);
+ }
+ logger_log(conn->raop->logger, LOGGER_ERR, "Could not find valid Plist Data for /play, Unhandled");
+ http_response_init(response, "HTTP/1.1", 400, "Bad Request");
+}
+
+/* the HLS handler handles http requests GET /[uri] on the HLS channel from the media player to the Server, asking for
+ (adjusted) copies of Playlists: first the Master Playlist (adjusted to change the uri prefix to
+ "http://localhost:[port]/.......m3u8"), then the Media Playlists that the media player wishes to use.
+ If the client supplied Media playlists with the "YT-EXT-CONDENSED-URI" header, these must be adjusted into
+ the standard uncondensed form before sending with the response. The uri in the request is the uri for the
+ Media Playlist, taken from the Master Playlist, with the uri prefix removed.
+*/
+
+static void
+http_handler_hls(raop_conn_t *conn, http_request_t *request, http_response_t *response,
+ char **response_data, int *response_datalen) {
+ const char *method = http_request_get_method(request);
+ assert (!strcmp(method, "GET"));
+ const char *url = http_request_get_url(request);
+ const char* upgrade = http_request_get_header(request, "Upgrade");
+ if (upgrade) {
+ //don't accept Upgrade: h2c request ?
+ return;
+ }
+
+ if (!strcmp(url, "/master.m3u8")){
+ char * master_playlist = get_master_playlist(conn->raop->airplay_video);
+ size_t len = strlen(master_playlist);
+ char * data = (char *) malloc(len + 1);
+ memcpy(data, master_playlist, len);
+ data[len] = '\0';
+ *response_data = data;
+ *response_datalen = (int ) len;
+ } else {
+ char * media_playlist = NULL;
+ media_playlist = get_media_playlist_by_uri(conn->raop->airplay_video, url);
+ if (media_playlist) {
+ char *data = adjust_yt_condensed_playlist(media_playlist);
+ *response_data = data;
+ *response_datalen = strlen(data);
+ } else {
+ printf("%s not found\n", url);
+ assert(0);
+ }
+ }
+
+ http_response_add_header(response, "Access-Control-Allow-Headers", "Content-type");
+ http_response_add_header(response, "Access-Control-Allow-Origin", "*");
+ const char *date;
+ date = gmt_time_string();
+ http_response_add_header(response, "Date", date);
+ if (*response_datalen > 0) {
+ http_response_add_header(response, "Content-Type", "application/x-mpegURL; charset=utf-8");
+ } else if (*response_datalen == 0) {
+ http_response_init(response, "HTTP/1.1", 404, "Not Found");
+ }
+}
diff --git a/lib/http_request.c b/lib/http_request.c
index 5a6343a..76bece6 100644
--- a/lib/http_request.c
+++ b/lib/http_request.c
@@ -27,6 +27,7 @@ struct http_request_s {
llhttp_t parser;
llhttp_settings_t parser_settings;
+ bool is_reverse; // if true, this is a reverse-response from client
const char *method;
char *url;
char protocol[9];
@@ -160,7 +161,7 @@ http_request_init(void)
llhttp_init(&request->parser, HTTP_REQUEST, &request->parser_settings);
request->parser.data = request;
-
+ request->is_reverse = false;
return request;
}
@@ -206,6 +207,9 @@ int
http_request_has_error(http_request_t *request)
{
assert(request);
+ if (request->is_reverse) {
+ return 0;
+ }
return (llhttp_get_errno(&request->parser) != HPE_OK);
}
@@ -213,6 +217,9 @@ const char *
http_request_get_error_name(http_request_t *request)
{
assert(request);
+ if (request->is_reverse) {
+ return NULL;
+ }
return llhttp_errno_name(llhttp_get_errno(&request->parser));
}
@@ -220,6 +227,9 @@ const char *
http_request_get_error_description(http_request_t *request)
{
assert(request);
+ if (request->is_reverse) {
+ return NULL;
+ }
return llhttp_get_error_reason(&request->parser);
}
@@ -227,6 +237,9 @@ const char *
http_request_get_method(http_request_t *request)
{
assert(request);
+ if (request->is_reverse) {
+ return NULL;
+ }
return request->method;
}
@@ -234,6 +247,9 @@ const char *
http_request_get_url(http_request_t *request)
{
assert(request);
+ if (request->is_reverse) {
+ return NULL;
+ }
return request->url;
}
@@ -241,6 +257,9 @@ const char *
http_request_get_protocol(http_request_t *request)
{
assert(request);
+ if (request->is_reverse) {
+ return NULL;
+ }
return request->protocol;
}
@@ -250,6 +269,9 @@ http_request_get_header(http_request_t *request, const char *name)
int i;
assert(request);
+ if (request->is_reverse) {
+ return NULL;
+ }
for (i=0; iheaders_size; i+=2) {
if (!strcmp(request->headers[i], name)) {
@@ -263,7 +285,6 @@ const char *
http_request_get_data(http_request_t *request, int *datalen)
{
assert(request);
-
if (datalen) {
*datalen = request->datalen;
}
@@ -277,6 +298,10 @@ http_request_get_header_string(http_request_t *request, char **header_str)
*header_str = NULL;
return 0;
}
+ if (request->is_reverse) {
+ *header_str = NULL;
+ return 0;
+ }
int len = 0;
for (int i = 0; i < request->headers_size; i++) {
len += strlen(request->headers[i]);
@@ -309,3 +334,11 @@ http_request_get_header_string(http_request_t *request, char **header_str)
assert(p == &(str[len]));
return len;
}
+
+bool http_request_is_reverse(http_request_t *request) {
+ return request->is_reverse;
+}
+
+void http_request_set_reverse(http_request_t *request) {
+ request->is_reverse = true;
+}
diff --git a/lib/http_request.h b/lib/http_request.h
index d72a6f1..dd13680 100644
--- a/lib/http_request.h
+++ b/lib/http_request.h
@@ -15,8 +15,9 @@
#ifndef HTTP_REQUEST_H
#define HTTP_REQUEST_H
-typedef struct http_request_s http_request_t;
+#include
+typedef struct http_request_s http_request_t;
http_request_t *http_request_init(void);
@@ -32,6 +33,8 @@ const char *http_request_get_protocol(http_request_t *request);
const char *http_request_get_header(http_request_t *request, const char *name);
const char *http_request_get_data(http_request_t *request, int *datalen);
int http_request_get_header_string(http_request_t *request, char **header_str);
+bool http_request_is_reverse(http_request_t *request);
+void http_request_set_reverse(http_request_t *request);
void http_request_destroy(http_request_t *request);
diff --git a/lib/http_response.c b/lib/http_response.c
index 50f1d89..a194338 100644
--- a/lib/http_response.c
+++ b/lib/http_response.c
@@ -91,6 +91,21 @@ http_response_init(http_response_t *response, const char *protocol, int code, co
http_response_add_data(response, "\r\n", 2);
}
+void
+http_response_reverse_request_init(http_response_t *request, const char *method, const char *url, const char *protocol)
+{
+ assert(request);
+ request->data_length = 0; /* reinitialize a previously-initialized response as a reverse-HTTP (PTTH/1.0) request */
+
+ /* Add first line of response to the data array */
+ http_response_add_data(request, method, strlen(method));
+ http_response_add_data(request, " ", 1);
+ http_response_add_data(request, url, strlen(url));
+ http_response_add_data(request, " ", 1);
+ http_response_add_data(request, protocol, strlen(protocol));
+ http_response_add_data(request, "\r\n", 2);
+}
+
void
http_response_destroy(http_response_t *response)
{
diff --git a/lib/http_response.h b/lib/http_response.h
index fcc7899..e34ba0c 100644
--- a/lib/http_response.h
+++ b/lib/http_response.h
@@ -22,6 +22,8 @@ typedef struct http_response_s http_response_t;
http_response_t *http_response_create();
void http_response_init(http_response_t *response, const char *protocol, int code, const char *message);
+void http_response_reverse_request_init(http_response_t *request, const char *method, const char *url,
+ const char *protocol);
void http_response_add_header(http_response_t *response, const char *name, const char *value);
void http_response_finish(http_response_t *response, const char *data, int datalen);
diff --git a/lib/httpd.c b/lib/httpd.c
index 7140e2e..1799e34 100644
--- a/lib/httpd.c
+++ b/lib/httpd.c
@@ -20,12 +20,24 @@
#include
#include
#include
+#include
#include "httpd.h"
#include "netutils.h"
#include "http_request.h"
#include "compat.h"
#include "logger.h"
+#include "utils.h"
+
+
+static const char *typename[] = {
+ [CONNECTION_TYPE_UNKNOWN] = "Unknown",
+ [CONNECTION_TYPE_RAOP] = "RAOP",
+ [CONNECTION_TYPE_AIRPLAY] = "AirPlay",
+ [CONNECTION_TYPE_PTTH] = "AirPlay (reversed)",
+ [CONNECTION_TYPE_HLS] = "HLS"
+};
+
struct http_connection_s {
int connected;
@@ -57,6 +69,20 @@ struct httpd_s {
int server_fd6;
};
+int
+httpd_get_connection_socket (httpd_t *httpd, void *user_data) {
+ for (int i = 0; i < httpd->max_connections; i++) {
+ http_connection_t *connection = &httpd->connections[i];
+ if (!connection->connected) {
+ continue;
+ }
+ if (connection->user_data == user_data) {
+ return connection->socket_fd;
+ }
+ }
+ return -1;
+}
+
int
httpd_set_connection_type (httpd_t *httpd, void *user_data, connection_type_t type) {
for (int i = 0; i < httpd->max_connections; i++) {
@@ -87,6 +113,42 @@ httpd_count_connection_type (httpd_t *httpd, connection_type_t type) {
return count;
}
+int
+httpd_get_connection_socket_by_type (httpd_t *httpd, connection_type_t type, int instance){
+ int count = 0;
+ for (int i = 0; i < httpd->max_connections; i++) {
+ http_connection_t *connection = &httpd->connections[i];
+ if (!connection->connected) {
+ continue;
+ }
+ if (connection->type == type) {
+ count++;
+ if (count == instance) {
+ return connection->socket_fd;
+ }
+ }
+ }
+ return 0;
+}
+
+void *
+httpd_get_connection_by_type (httpd_t *httpd, connection_type_t type, int instance){
+ int count = 0;
+ for (int i = 0; i < httpd->max_connections; i++) {
+ http_connection_t *connection = &httpd->connections[i];
+ if (!connection->connected) {
+ continue;
+ }
+ if (connection->type == type) {
+ count++;
+ if (count == instance) {
+ return connection->user_data;
+ }
+ }
+ }
+ return NULL;
+}
+
#define MAX_CONNECTIONS 12 /* value used in AppleTV 3*/
httpd_t *
httpd_init(logger_t *logger, httpd_callbacks_t *callbacks, int nohold)
@@ -101,7 +163,6 @@ httpd_init(logger_t *logger, httpd_callbacks_t *callbacks, int nohold)
return NULL;
}
-
httpd->nohold = (nohold ? 1 : 0);
httpd->max_connections = MAX_CONNECTIONS;
httpd->connections = calloc(httpd->max_connections, sizeof(http_connection_t));
@@ -213,7 +274,7 @@ httpd_accept_connection(httpd_t *httpd, int server_fd, int is_ipv6)
local = netutils_get_address(&local_saddr, &local_len, &local_zone_id);
remote = netutils_get_address(&remote_saddr, &remote_len, &remote_zone_id);
assert (local_zone_id == remote_zone_id);
-
+
ret = httpd_add_connection(httpd, fd, local, local_len, remote, remote_len, local_zone_id);
if (ret == -1) {
shutdown(fd, SHUT_RDWR);
@@ -235,7 +296,7 @@ httpd_remove_known_connections(httpd_t *httpd) {
if (!connection->connected || connection->type == CONNECTION_TYPE_UNKNOWN) {
continue;
}
- httpd_remove_connection(httpd, connection);
+ httpd_remove_connection(httpd, connection);
}
}
@@ -243,10 +304,11 @@ static THREAD_RETVAL
httpd_thread(void *arg)
{
httpd_t *httpd = arg;
+ char http[] = "HTTP/1.1";
char buffer[1024];
int i;
+
bool logger_debug = (logger_get_level(httpd->logger) >= LOGGER_DEBUG);
-
assert(httpd);
while (1) {
@@ -254,6 +316,7 @@ httpd_thread(void *arg)
struct timeval tv;
int nfds=0;
int ret;
+ int new_request;
MUTEX_LOCK(httpd->run_mutex);
if (!httpd->running) {
@@ -299,7 +362,7 @@ httpd_thread(void *arg)
/* Timeout happened */
continue;
} else if (ret == -1) {
- logger_log(httpd->logger, LOGGER_ERR, "httpd error in select");
+ logger_log(httpd->logger, LOGGER_ERR, "httpd error in select: %d %s", errno, strerror(errno));
break;
}
@@ -337,20 +400,93 @@ httpd_thread(void *arg)
if (!connection->request) {
connection->request = http_request_init();
assert(connection->request);
- }
+ new_request = 1;
+ if (connection->type == CONNECTION_TYPE_PTTH) {
+ http_request_is_reverse(connection->request);
+ }
+ logger_log(httpd->logger, LOGGER_DEBUG, "new request, connection %d, socket %d type %s",
+ i, connection->socket_fd, typename [connection->type]);
+ } else {
+ new_request = 0;
+ }
- logger_log(httpd->logger, LOGGER_DEBUG, "httpd receiving on socket %d, connection %d", connection->socket_fd, i);
- ret = recv(connection->socket_fd, buffer, sizeof(buffer), 0);
- if (ret == 0) {
- logger_log(httpd->logger, LOGGER_INFO, "Connection closed for socket %d", connection->socket_fd);
- httpd_remove_connection(httpd, connection);
+ logger_log(httpd->logger, LOGGER_DEBUG, "httpd receiving on socket %d, connection %d",
+ connection->socket_fd, i);
+ if (logger_debug) {
+ printf("\nhttpd: current connections:\n");
+ for (int i = 0; i < httpd->max_connections; i++) {
+ http_connection_t *connection = &httpd->connections[i];
+ if(!connection->connected) {
+ continue;
+ }
+ if (!FD_ISSET(connection->socket_fd, &rfds)) {
+ printf("connection %d type %d socket %d conn %p %s\n", i,
+ connection->type, connection->socket_fd,
+ connection->user_data, typename [connection->type]);
+ } else {
+ printf("connection %d type %d socket %d conn %p %s ACTIVE CONNECTION\n", i, connection->type,
+ connection->socket_fd, connection->user_data, typename [connection->type]);
+ }
+ }
+ printf("\n");
+ }
+ /* reverse-http responses from the client must not be sent to the llhttp parser:
+ * such messages start with "HTTP/1.1" */
+ if (new_request) {
+ int readstart = 0;
+ new_request = 0;
+ while (readstart < 8) {
+ ret = recv(connection->socket_fd, buffer + readstart, sizeof(buffer) - 1 - readstart, 0);
+ if (ret == 0) {
+ logger_log(httpd->logger, LOGGER_INFO, "Connection closed for socket %d",
+ connection->socket_fd);
+ break;
+ } else if (ret == -1) {
+ if (errno == EAGAIN) {
+ continue;
+ } else {
+ int sock_err = SOCKET_GET_ERROR();
+ logger_log(httpd->logger, LOGGER_ERR, "httpd: recv socket error %d:%s",
+ sock_err, strerror(sock_err));
+ break;
+ }
+ } else {
+ readstart += ret;
+ ret = readstart;
+ }
+ }
+ if (!memcmp(buffer, http, 8)) {
+ http_request_set_reverse(connection->request);
+ }
+ } else {
+ ret = recv(connection->socket_fd, buffer, sizeof(buffer) - 1, 0);
+ if (ret == 0) {
+ logger_log(httpd->logger, LOGGER_INFO, "Connection closed for socket %d",
+ connection->socket_fd);
+ httpd_remove_connection(httpd, connection);
+ continue;
+ }
+ }
+ if (http_request_is_reverse(connection->request)) {
+ /* this is a response from the client to a
+ * GET /event reverse HTTP request from the server */
+ if (ret && logger_debug) {
+ buffer[ret] = '\0';
+ logger_log(httpd->logger, LOGGER_INFO, "<<<< received response from client"
+ " (reversed HTTP = \"PTTH/1.0\") connection"
+ " on socket %d:\n%s\n", connection->socket_fd, buffer);
+ }
+ if (ret == 0) {
+ httpd_remove_connection(httpd, connection);
+ }
continue;
}
/* Parse HTTP request from data read from connection */
http_request_add_data(connection->request, buffer, ret);
if (http_request_has_error(connection->request)) {
- logger_log(httpd->logger, LOGGER_ERR, "httpd error in parsing: %s", http_request_get_error_name(connection->request));
+ logger_log(httpd->logger, LOGGER_ERR, "httpd error in parsing: %s",
+ http_request_get_error_name(connection->request));
httpd_remove_connection(httpd, connection);
continue;
}
@@ -359,12 +495,13 @@ httpd_thread(void *arg)
if (http_request_is_complete(connection->request)) {
http_response_t *response = NULL;
// Callback the received data to raop
- if (logger_debug) {
+ if (logger_debug) {
const char *method = http_request_get_method(connection->request);
const char *url = http_request_get_url(connection->request);
const char *protocol = http_request_get_protocol(connection->request);
- logger_log(httpd->logger, LOGGER_INFO, "httpd request received on socket %d, connection %d, "
- "method = %s, url = %s, protocol = %s", connection->socket_fd, i, method, url, protocol);
+ logger_log(httpd->logger, LOGGER_INFO, "httpd request received on socket %d, "
+ "connection %d, method = %s, url = %s, protocol = %s",
+ connection->socket_fd, i, method, url, protocol);
}
httpd->callbacks.conn_request(connection->user_data, connection->request, &response);
http_request_destroy(connection->request);
diff --git a/lib/httpd.h b/lib/httpd.h
index f64d166..e00a950 100644
--- a/lib/httpd.h
+++ b/lib/httpd.h
@@ -23,7 +23,10 @@ typedef struct httpd_s httpd_t;
typedef enum connectype_type_e {
CONNECTION_TYPE_UNKNOWN,
- CONNECTION_TYPE_RAOP
+ CONNECTION_TYPE_RAOP,
+ CONNECTION_TYPE_AIRPLAY,
+ CONNECTION_TYPE_PTTH,
+ CONNECTION_TYPE_HLS
} connection_type_t;
struct httpd_callbacks_s {
@@ -39,7 +42,9 @@ void httpd_remove_known_connections(httpd_t *httpd);
int httpd_set_connection_type (httpd_t *http, void *user_data, connection_type_t type);
int httpd_count_connection_type (httpd_t *http, connection_type_t type);
-
+int httpd_get_connection_socket (httpd_t *httpd, void *user_data);
+int httpd_get_connection_socket_by_type (httpd_t *httpd, connection_type_t type, int instance);
+void *httpd_get_connection_by_type (httpd_t *httpd, connection_type_t type, int instance);
httpd_t *httpd_init(logger_t *logger, httpd_callbacks_t *callbacks, int nohold);
int httpd_is_running(httpd_t *httpd);
diff --git a/lib/raop.c b/lib/raop.c
index e9d551b..6bd3e0e 100644
--- a/lib/raop.c
+++ b/lib/raop.c
@@ -72,6 +72,12 @@ struct raop_s {
/* public key as string */
char pk_str[2*ED25519_KEY_SIZE + 1];
+
+ /* place to store media_data_store */
+ airplay_video_t *airplay_video;
+
+ /* activate support for HLS live streaming */
+ bool hls_support;
};
struct raop_conn_s {
@@ -81,7 +87,8 @@ struct raop_conn_s {
raop_rtp_mirror_t *raop_rtp_mirror;
fairplay_t *fairplay;
pairing_session_t *session;
-
+ airplay_video_t *airplay_video;
+
unsigned char *local;
int locallen;
@@ -92,11 +99,14 @@ struct raop_conn_s {
connection_type_t connection_type;
+ char *client_session_id;
+
bool have_active_remote;
};
typedef struct raop_conn_s raop_conn_t;
#include "raop_handlers.h"
+#include "http_handlers.h"
static void *
conn_init(void *opaque, unsigned char *local, int locallen, unsigned char *remote, int remotelen, unsigned int zone_id) {
@@ -147,6 +157,9 @@ conn_init(void *opaque, unsigned char *local, int locallen, unsigned char *remot
conn->remotelen = remotelen;
conn->connection_type = CONNECTION_TYPE_UNKNOWN;
+ conn->client_session_id = NULL;
+ conn->airplay_video = NULL;
+
conn->have_active_remote = false;
@@ -162,35 +175,110 @@ conn_request(void *ptr, http_request_t *request, http_response_t **response) {
char *response_data = NULL;
int response_datalen = 0;
raop_conn_t *conn = ptr;
-
+ bool hls_request = false;
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "conn_request");
bool logger_debug = (logger_get_level(conn->raop->logger) >= LOGGER_DEBUG);
+ /*
+ All requests arriving here have been parsed by llhttp to obtain
+ method | url | protocol (RTSP/1.0 or HTTP/1.1)
+
+ There are three types of connections supplying these requests:
+ Connections from the AirPlay client:
+ (1) type RAOP connections with CSeq seqence header, and no X-Apple-Session-ID header
+ (2) type AIRPLAY connection with an X-Apple-Sequence-ID header and no Cseq header
+ Connections from localhost:
+ (3) type HLS internal connections from the local HLS server (gstreamer) at localhost with neither
+ of these headers, but a Host: localhost:[port] header. method = GET.
+ */
+
const char *method = http_request_get_method(request);
- const char *url = http_request_get_url(request);
- const char *protocol = http_request_get_protocol(request);
+
+ if (!method) {
+ return;
+ }
+
+/* this rejects messages from _airplay._tcp for video streaming protocol unless bool raop->hls_support is true*/
const char *cseq = http_request_get_header(request, "CSeq");
+ const char *protocol = http_request_get_protocol(request);
+ if (!cseq && !conn->raop->hls_support) {
+ logger_log(conn->raop->logger, LOGGER_INFO, "ignoring AirPlay video streaming request (use option -hls to activate HLS support)");
+ return;
+ }
+
+ const char *url = http_request_get_url(request);
+ const char *client_session_id = http_request_get_header(request, "X-Apple-Session-ID");
+ const char *host = http_request_get_header(request, "Host");
+ hls_request = (host && !cseq && !client_session_id);
if (conn->connection_type == CONNECTION_TYPE_UNKNOWN) {
- if (httpd_count_connection_type(conn->raop->httpd, CONNECTION_TYPE_RAOP)) {
- char ipaddr[40];
- utils_ipaddress_to_string(conn->remotelen, conn->remote, conn->zone_id, ipaddr, (int) (sizeof(ipaddr)));
- if (httpd_nohold(conn->raop->httpd)) {
- logger_log(conn->raop->logger, LOGGER_INFO, "\"nohold\" feature: switch to new connection request from %s", ipaddr);
- if (conn->raop->callbacks.video_reset) {
- printf("**************************video_reset*************************\n");
- conn->raop->callbacks.video_reset(conn->raop->callbacks.cls);
- }
- httpd_remove_known_connections(conn->raop->httpd);
+ if (cseq) {
+ if (httpd_count_connection_type(conn->raop->httpd, CONNECTION_TYPE_RAOP)) {
+ char ipaddr[40];
+ utils_ipaddress_to_string(conn->remotelen, conn->remote, conn->zone_id, ipaddr, (int) (sizeof(ipaddr)));
+ if (httpd_nohold(conn->raop->httpd)) {
+ logger_log(conn->raop->logger, LOGGER_INFO, "\"nohold\" feature: switch to new connection request from %s", ipaddr);
+ if (conn->raop->callbacks.video_reset) {
+ conn->raop->callbacks.video_reset(conn->raop->callbacks.cls);
+ }
+ httpd_remove_known_connections(conn->raop->httpd);
+ } else {
+ logger_log(conn->raop->logger, LOGGER_WARNING, "rejecting new connection request from %s", ipaddr);
+ *response = http_response_create();
+ http_response_init(*response, protocol, 409, "Conflict: Server is connected to another client");
+ goto finish;
+ }
+ }
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "New connection %p identified as Connection type RAOP", ptr);
+ httpd_set_connection_type(conn->raop->httpd, ptr, CONNECTION_TYPE_RAOP);
+ conn->connection_type = CONNECTION_TYPE_RAOP;
+ } else if (client_session_id) {
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "New connection %p identified as Connection type AirPlay", ptr);
+ httpd_set_connection_type(conn->raop->httpd, ptr, CONNECTION_TYPE_AIRPLAY);
+ conn->connection_type = CONNECTION_TYPE_AIRPLAY;
+ size_t len = strlen(client_session_id) + 1;
+ conn->client_session_id = (char *) malloc(len);
+ strncpy(conn->client_session_id, client_session_id, len);
+ /* airplay video has been requested: shut down any running RAOP udp services */
+ raop_conn_t *raop_conn = (raop_conn_t *) httpd_get_connection_by_type(conn->raop->httpd, CONNECTION_TYPE_RAOP, 1);
+ if (raop_conn) {
+ raop_rtp_mirror_t *raop_rtp_mirror = raop_conn->raop_rtp_mirror;
+ if (raop_rtp_mirror) {
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "New AirPlay connection: stopping RAOP mirror"
+ " service on RAOP connection %p", raop_conn);
+ raop_rtp_mirror_stop(raop_rtp_mirror);
+ }
- } else {
- logger_log(conn->raop->logger, LOGGER_WARNING, "rejecting new connection request from %s", ipaddr);
- *response = http_response_create();
- http_response_init(*response, protocol, 409, "Conflict: Server is connected to another client");
- goto finish;
- }
- }
- httpd_set_connection_type(conn->raop->httpd, ptr, CONNECTION_TYPE_RAOP);
- conn->connection_type = CONNECTION_TYPE_RAOP;
+ raop_rtp_t *raop_rtp = raop_conn->raop_rtp;
+ if (raop_rtp) {
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "New AirPlay connection: stopping RAOP audio"
+ " service on RAOP connection %p", raop_conn);
+ raop_rtp_stop(raop_rtp);
+ }
+
+ raop_ntp_t *raop_ntp = raop_conn->raop_ntp;
+ if (raop_rtp) {
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "New AirPlay connection: stopping NTP time"
+ " service on RAOP connection %p", raop_conn);
+ raop_ntp_stop(raop_ntp);
+ }
+ }
+ } else if (host) {
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "New connection %p identified as Connection type HLS", ptr);
+ httpd_set_connection_type(conn->raop->httpd, ptr, CONNECTION_TYPE_HLS);
+ conn->connection_type = CONNECTION_TYPE_HLS;
+ } else {
+ logger_log(conn->raop->logger, LOGGER_WARNING, "connection from unknown connection type");
+ }
+ }
+
+ /* this response code and message will be modified by the handler if necessary */
+ *response = http_response_create();
+ http_response_init(*response, protocol, 200, "OK");
+
+ /* is this really necessary? or is it obsolete? (added for all RTSP requests EXCEPT "RECORD") */
+ if (cseq && strcmp(method, "RECORD")) {
+ http_response_add_header(*response, "Audio-Jack-Status", "connected; type=digital");
}
if (!conn->have_active_remote) {
@@ -204,15 +292,6 @@ conn_request(void *ptr, http_request_t *request, http_response_t **response) {
}
}
- if (!method) {
- return;
- }
-
- /* this rejects unsupported messages from _airplay._tcp for video streaming protocol*/
- if (!cseq) {
- return;
- }
-
logger_log(conn->raop->logger, LOGGER_DEBUG, "\n%s %s %s", method, url, protocol);
char *header_str= NULL;
http_request_get_header_string(request, &header_str);
@@ -225,74 +304,125 @@ conn_request(void *ptr, http_request_t *request, http_response_t **response) {
const char *request_data = http_request_get_data(request, &request_datalen);
if (request_data && logger_debug) {
if (request_datalen > 0) {
+ /* logger has a buffer limit of 4096 */
if (data_is_plist) {
- plist_t req_root_node = NULL;
+ plist_t req_root_node = NULL;
plist_from_bin(request_data, request_datalen, &req_root_node);
char * plist_xml;
uint32_t plist_len;
plist_to_xml(req_root_node, &plist_xml, &plist_len);
- logger_log(conn->raop->logger, LOGGER_DEBUG, "%s", plist_xml);
+ printf("%s\n",plist_xml);
+ //logger_log(conn->raop->logger, LOGGER_DEBUG, "%s", plist_xml);
free(plist_xml);
plist_free(req_root_node);
} else if (data_is_text) {
char *data_str = utils_data_to_text((char *) request_data, request_datalen);
- logger_log(conn->raop->logger, LOGGER_DEBUG, "%s", data_str);
+ printf("%s\n", data_str);
+ //logger_log(conn->raop->logger, LOGGER_DEBUG, "%s", data_str);
free(data_str);
} else {
char *data_str = utils_data_to_string((unsigned char *) request_data, request_datalen, 16);
- logger_log(conn->raop->logger, LOGGER_DEBUG, "%s", data_str);
+ printf("%s\n", data_str);
+ //logger_log(conn->raop->logger, LOGGER_DEBUG, "%s", data_str);
free(data_str);
}
}
}
}
- *response = http_response_create();
- http_response_init(*response, protocol, 200, "OK");
-
- //http_response_add_header(*response, "Apple-Jack-Status", "connected; type=analog");
-
+ if (client_session_id) {
+ assert(!strcmp(client_session_id, conn->client_session_id));
+ }
logger_log(conn->raop->logger, LOGGER_DEBUG, "Handling request %s with URL %s", method, url);
raop_handler_t handler = NULL;
- if (!strcmp(method, "GET") && !strcmp(url, "/info")) {
- handler = &raop_handler_info;
- } else if (!strcmp(method, "POST") && !strcmp(url, "/pair-pin-start")) {
- handler = &raop_handler_pairpinstart;
- } else if (!strcmp(method, "POST") && !strcmp(url, "/pair-setup-pin")) {
- handler = &raop_handler_pairsetup_pin;
- } else if (!strcmp(method, "POST") && !strcmp(url, "/pair-setup")) {
- handler = &raop_handler_pairsetup;
- } else if (!strcmp(method, "POST") && !strcmp(url, "/pair-verify")) {
- handler = &raop_handler_pairverify;
- } else if (!strcmp(method, "POST") && !strcmp(url, "/fp-setup")) {
- handler = &raop_handler_fpsetup;
- } else if (!strcmp(method, "OPTIONS")) {
- handler = &raop_handler_options;
- } else if (!strcmp(method, "SETUP")) {
- handler = &raop_handler_setup;
- } else if (!strcmp(method, "GET_PARAMETER")) {
- handler = &raop_handler_get_parameter;
- } else if (!strcmp(method, "SET_PARAMETER")) {
- handler = &raop_handler_set_parameter;
- } else if (!strcmp(method, "POST") && !strcmp(url, "/feedback")) {
- handler = &raop_handler_feedback;
- } else if (!strcmp(method, "RECORD")) {
- handler = &raop_handler_record;
- } else if (!strcmp(method, "FLUSH")) {
- handler = &raop_handler_flush;
- } else if (!strcmp(method, "TEARDOWN")) {
- handler = &raop_handler_teardown;
- } else {
- logger_log(conn->raop->logger, LOGGER_INFO, "Unhandled Client Request: %s %s", method, url);
+ if (!hls_request && !strcmp(protocol, "RTSP/1.0")) {
+ if (!strcmp(method, "POST")) {
+ if (!strcmp(url, "/feedback")) {
+ handler = &raop_handler_feedback;
+ } else if (!strcmp(url, "/pair-pin-start")) {
+ handler = &raop_handler_pairpinstart;
+ } else if (!strcmp(url, "/pair-setup-pin")) {
+ handler = &raop_handler_pairsetup_pin;
+ } else if (!strcmp(url, "/pair-setup")) {
+ handler = &raop_handler_pairsetup;
+ } else if (!strcmp(url, "/pair-verify")) {
+ handler = &raop_handler_pairverify;
+ } else if (!strcmp(url, "/fp-setup")) {
+ handler = &raop_handler_fpsetup;
+ } else if (!strcmp(url, "/getProperty")) {
+ handler = &http_handler_get_property;
+ } else if (!strcmp(url, "/audioMode")) {
+ //handler = &http_handler_audioMode;
+ }
+ } else if (!strcmp(method, "GET")) {
+ if (!strcmp(url, "/info")) {
+ handler = &raop_handler_info;
+ }
+ } else if (!strcmp(method, "OPTIONS")) {
+ handler = &raop_handler_options;
+ } else if (!strcmp(method, "SETUP")) {
+ handler = &raop_handler_setup;
+ } else if (!strcmp(method, "GET_PARAMETER")) {
+ handler = &raop_handler_get_parameter;
+ } else if (!strcmp(method, "SET_PARAMETER")) {
+ handler = &raop_handler_set_parameter;
+ } else if (!strcmp(method, "RECORD")) {
+ handler = &raop_handler_record;
+ } else if (!strcmp(method, "FLUSH")) {
+ handler = &raop_handler_flush;
+ } else if (!strcmp(method, "TEARDOWN")) {
+ handler = &raop_handler_teardown;
+ }
+ } else if (!hls_request && !strcmp(protocol, "HTTP/1.1")) {
+ if (!strcmp(method, "POST")) {
+ if (!strcmp(url, "/reverse")) {
+ handler = &http_handler_reverse;
+ } else if (!strcmp(url, "/play")) {
+ handler = &http_handler_play;
+ } else if (!strncmp (url, "/getProperty?", strlen("/getProperty?"))) {
+ handler = &http_handler_get_property;
+ } else if (!strncmp(url, "/scrub?", strlen("/scrub?"))) {
+ handler = &http_handler_scrub;
+ } else if (!strncmp(url, "/rate?", strlen("/rate?"))) {
+ handler = &http_handler_rate;
+ } else if (!strcmp(url, "/stop")) {
+ handler = &http_handler_stop;
+ } else if (!strcmp(url, "/action")) {
+ handler = &http_handler_action;
+ } else if (!strcmp(url, "/fp-setup2")) {
+ handler = &http_handler_fpsetup2;
+ }
+ } else if (!strcmp(method, "GET")) {
+ if (!strcmp(url, "/server-info")) {
+ handler = &http_handler_server_info;
+ } else if (!strcmp(url, "/playback-info")) {
+ handler = &http_handler_playback_info;
+ }
+ } else if (!strcmp(method, "PUT")) {
+ if (!strncmp (url, "/setProperty?", strlen("/setProperty?"))) {
+ handler = &http_handler_set_property;
+ } else {
+ }
+ }
+ } else if (hls_request) {
+ handler = &http_handler_hls;
}
if (handler != NULL) {
handler(conn, request, *response, &response_data, &response_datalen);
+ } else {
+ logger_log(conn->raop->logger, LOGGER_INFO,
+ "Unhandled Client Request: %s %s %s", method, url, protocol);
}
+
finish:;
- http_response_add_header(*response, "Server", "AirTunes/"GLOBAL_VERSION);
- http_response_add_header(*response, "CSeq", cseq);
+ if (!hls_request) {
+ http_response_add_header(*response, "Server", "AirTunes/"GLOBAL_VERSION);
+ if (cseq) {
+ http_response_add_header(*response, "CSeq", cseq);
+ }
+ }
http_response_finish(*response, response_data, response_datalen);
int len;
@@ -304,11 +434,14 @@ conn_request(void *ptr, http_request_t *request, http_response_t **response) {
}
header_str = utils_data_to_text(data, len);
logger_log(conn->raop->logger, LOGGER_DEBUG, "\n%s", header_str);
+
bool data_is_plist = (strstr(header_str,"apple-binary-plist") != NULL);
- bool data_is_text = (strstr(header_str,"text/parameters") != NULL);
+ bool data_is_text = (strstr(header_str,"text/") != NULL ||
+ strstr(header_str, "x-mpegURL") != NULL);
free(header_str);
if (response_data) {
if (response_datalen > 0 && logger_debug) {
+ /* logger has a buffer limit of 4096 */
if (data_is_plist) {
plist_t res_root_node = NULL;
plist_from_bin(response_data, response_datalen, &res_root_node);
@@ -316,21 +449,24 @@ conn_request(void *ptr, http_request_t *request, http_response_t **response) {
uint32_t plist_len;
plist_to_xml(res_root_node, &plist_xml, &plist_len);
plist_free(res_root_node);
- logger_log(conn->raop->logger, LOGGER_DEBUG, "%s", plist_xml);
+ printf("%s\n", plist_xml);
+ //logger_log(conn->raop->logger, LOGGER_DEBUG, "%s", plist_xml);
free(plist_xml);
} else if (data_is_text) {
char *data_str = utils_data_to_text((char*) response_data, response_datalen);
- logger_log(conn->raop->logger, LOGGER_DEBUG, "%s", data_str);
+ printf("%s\n", data_str);
+ //logger_log(conn->raop->logger, LOGGER_DEBUG, "%s", data_str);
free(data_str);
} else {
char *data_str = utils_data_to_string((unsigned char *) response_data, response_datalen, 16);
- logger_log(conn->raop->logger, LOGGER_DEBUG, "%s", data_str);
+ printf("%s\n", data_str);
+ //logger_log(conn->raop->logger, LOGGER_DEBUG, "%s", data_str);
free(data_str);
}
}
- free(response_data);
- response_data = NULL;
- response_datalen = 0;
+ if (response_data) {
+ free(response_data);
+ }
}
}
@@ -364,6 +500,13 @@ conn_destroy(void *ptr) {
free(conn->remote);
pairing_session_destroy(conn->session);
fairplay_destroy(conn->fairplay);
+ if (conn->client_session_id) {
+ free(conn->client_session_id);
+ }
+ if (conn->airplay_video) {
+ airplay_video_service_destroy(conn->airplay_video);
+ }
+
free(conn);
}
@@ -420,6 +563,8 @@ raop_init(raop_callbacks_t *callbacks) {
raop->max_ntp_timeouts = 0;
raop->audio_delay_micros = 250000;
+ raop->hls_support = false;
+
return raop;
}
@@ -474,6 +619,7 @@ raop_init2(raop_t *raop, int nohold, const char *device_id, const char *keyfile)
void
raop_destroy(raop_t *raop) {
if (raop) {
+ raop_destroy_airplay_video(raop);
raop_stop(raop);
pairing_destroy(raop->pairing);
httpd_destroy(raop->httpd);
@@ -533,6 +679,8 @@ int raop_set_plist(raop_t *raop, const char *plist_item, const int value) {
} else if (strcmp(plist_item, "pin") == 0) {
raop->pin = value;
raop->use_pin = true;
+ } else if (strcmp(plist_item, "hls") == 0) {
+ raop->hls_support = (value > 0 ? true : false);
} else {
retval = -1;
}
@@ -604,3 +752,27 @@ void raop_remove_known_connections(raop_t * raop) {
httpd_remove_known_connections(raop->httpd);
}
+airplay_video_t *deregister_airplay_video(raop_t *raop) {
+ airplay_video_t *airplay_video = raop->airplay_video;
+ raop->airplay_video = NULL;
+ return airplay_video;
+}
+
+bool register_airplay_video(raop_t *raop, airplay_video_t *airplay_video) {
+ if (raop->airplay_video) {
+ return false;
+ }
+ raop->airplay_video = airplay_video;
+ return true;
+}
+
+airplay_video_t * get_airplay_video(raop_t *raop) {
+ return raop->airplay_video;
+}
+
+void raop_destroy_airplay_video(raop_t *raop) {
+ if (raop->airplay_video) {
+ airplay_video_service_destroy(raop->airplay_video);
+ raop->airplay_video = NULL;
+ }
+}
diff --git a/lib/raop.h b/lib/raop.h
index 1157138..e0a2062 100644
--- a/lib/raop.h
+++ b/lib/raop.h
@@ -21,6 +21,7 @@
#include "dnssd.h"
#include "stream.h"
#include "raop_ntp.h"
+#include "airplay_video.h"
#if defined (WIN32) && defined(DLL_EXPORT)
# define RAOP_API __declspec(dllexport)
@@ -36,12 +37,29 @@ typedef struct raop_s raop_t;
typedef void (*raop_log_callback_t)(void *cls, int level, const char *msg);
+
+typedef struct playback_info_s {
+ //char * uuid;
+ uint32_t stallcount;
+ double duration;
+ double position;
+ float rate;
+ bool ready_to_play;
+ bool playback_buffer_empty;
+ bool playback_buffer_full;
+ bool playback_likely_to_keep_up;
+ int num_loaded_time_ranges;
+ int num_seekable_time_ranges;
+ void *loadedTimeRanges;
+ void *seekableTimeRanges;
+} playback_info_t;
+
typedef enum video_codec_e {
VIDEO_CODEC_UNKNOWN,
VIDEO_CODEC_H264,
VIDEO_CODEC_H265
} video_codec_t;
-
+
struct raop_callbacks_s {
void* cls;
@@ -49,8 +67,7 @@ struct raop_callbacks_s {
void (*video_process)(void *cls, raop_ntp_t *ntp, video_decode_struct *data);
void (*video_pause)(void *cls);
void (*video_resume)(void *cls);
- void (*video_codec) (void *cls, video_codec_t video_codec);
-
+
/* Optional but recommended callback functions */
void (*conn_init)(void *cls);
void (*conn_destroy)(void *cls);
@@ -72,11 +89,25 @@ struct raop_callbacks_s {
void (*export_dacp) (void *cls, const char *active_remote, const char *dacp_id);
void (*video_reset) (void *cls);
void (*video_set_codec)(void *cls, video_codec_t codec);
+ /* for HLS video player controls */
+ void (*on_video_play) (void *cls, const char *location, const float start_position);
+ void (*on_video_scrub) (void *cls, const float position);
+ void (*on_video_rate) (void *cls, const float rate);
+ void (*on_video_stop) (void *cls);
+ void (*on_video_acquire_playback_info) (void *cls, playback_info_t *playback_video);
+
};
typedef struct raop_callbacks_s raop_callbacks_t;
raop_ntp_t *raop_ntp_init(logger_t *logger, raop_callbacks_t *callbacks, const char *remote,
- int remote_addr_len, unsigned short timing_rport, timing_protocol_t *time_protocol);
+ int remote_addr_len, unsigned short timing_rport,
+ timing_protocol_t *time_protocol);
+int airplay_video_service_init(raop_t *raop, unsigned short port, const char *session_id);
+
+bool register_airplay_video(raop_t *raop, airplay_video_t *airplay_video);
+airplay_video_t *get_airplay_video(raop_t *raop);
+airplay_video_t *deregister_airplay_video(raop_t *raop);
+
RAOP_API raop_t *raop_init(raop_callbacks_t *callbacks);
RAOP_API int raop_init2(raop_t *raop, int nohold, const char *device_id, const char *keyfile);
RAOP_API void raop_set_log_level(raop_t *raop, int level);
@@ -93,6 +124,7 @@ RAOP_API void raop_stop(raop_t *raop);
RAOP_API void raop_set_dnssd(raop_t *raop, dnssd_t *dnssd);
RAOP_API void raop_destroy(raop_t *raop);
RAOP_API void raop_remove_known_connections(raop_t * raop);
+RAOP_API void raop_destroy_airplay_video(raop_t *raop);
#ifdef __cplusplus
}
diff --git a/lib/raop_handlers.h b/lib/raop_handlers.h
index 98f62dd..3b2c838 100644
--- a/lib/raop_handlers.h
+++ b/lib/raop_handlers.h
@@ -197,7 +197,6 @@ raop_handler_pairpinstart(raop_conn_t *conn,
logger_log(conn->raop->logger, LOGGER_INFO, "*** CLIENT MUST NOW ENTER PIN = \"%s\" AS AIRPLAY PASSWORD", pin);
*response_data = NULL;
response_datalen = 0;
- return;
}
static void
@@ -749,13 +748,14 @@ raop_handler_setup(raop_conn_t *conn,
conn->raop_rtp_mirror = raop_rtp_mirror_init(conn->raop->logger, &conn->raop->callbacks,
conn->raop_ntp, remote, conn->remotelen, aeskey);
- // plist_t res_event_port_node = plist_new_uint(conn->raop->port);
- plist_t res_event_port_node = plist_new_uint(0);
+ /* the event port is not used in mirror mode or audio mode */
+ unsigned short event_port = 0;
+ plist_t res_event_port_node = plist_new_uint(event_port);
plist_t res_timing_port_node = plist_new_uint(timing_lport);
plist_dict_set_item(res_root_node, "timingPort", res_timing_port_node);
plist_dict_set_item(res_root_node, "eventPort", res_event_port_node);
- logger_log(conn->raop->logger, LOGGER_DEBUG, "eport = %d, tport = %d", 0, timing_lport);
+ logger_log(conn->raop->logger, LOGGER_DEBUG, "eport = %d, tport = %d", event_port, timing_lport);
}
// Process stream setup requests
diff --git a/lib/utils.c b/lib/utils.c
index f6c89f0..8f82186 100644
--- a/lib/utils.c
+++ b/lib/utils.c
@@ -282,3 +282,14 @@ int utils_ipaddress_to_string(int addresslen, const unsigned char *address, unsi
}
return ret;
}
+
+const char *gmt_time_string() {
+ static char date_buf[64];
+ memset(date_buf, 0, 64);
+
+ time_t now = time(0);
+ if (strftime(date_buf, 63, "%c GMT", gmtime(&now)))
+ return date_buf;
+ else
+ return "";
+}
diff --git a/lib/utils.h b/lib/utils.h
index be5db30..82df1f5 100644
--- a/lib/utils.h
+++ b/lib/utils.h
@@ -30,6 +30,7 @@ char *utils_data_to_string(const unsigned char *data, int datalen, int chars_per
char *utils_data_to_text(const char *data, int datalen);
void ntp_timestamp_to_time(uint64_t ntp_timestamp, char *timestamp, size_t maxsize);
void ntp_timestamp_to_seconds(uint64_t ntp_timestamp, char *timestamp, size_t maxsize);
+const char *gmt_time_string();
int utils_ipaddress_to_string(int addresslen, const unsigned char *address,
unsigned int zone_id, char *string, int len);
#endif
diff --git a/renderers/video_renderer.c b/renderers/video_renderer.c
index 3846a9b..53784a8 100644
--- a/renderers/video_renderer.c
+++ b/renderers/video_renderer.c
@@ -3,7 +3,7 @@
* Copyright (C) 2019 Florian Draschbacher
* Modified for:
* UxPlay - An open-source AirPlay mirroring server
- * Copyright (C) 2021-24 F. Duncanh
+ * Copyright (C) 2021-23 F. Duncanh
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -20,10 +20,9 @@
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
-
+#include "video_renderer.h"
#include
#include
-#include "video_renderer.h"
#define SECOND_IN_NSECS 1000000000UL
#ifdef X_DISPLAY_FIX
@@ -31,7 +30,7 @@
#include "x_display_fix.h"
static bool fullscreen = false;
static bool alt_keypress = false;
-static unsigned char X11_search_attempts;
+static unsigned char X11_search_attempts;
#endif
static GstClockTime gst_video_pipeline_base_time = GST_CLOCK_TIME_NONE;
@@ -40,6 +39,10 @@ static unsigned short width, height, width_source, height_source; /* not curren
static bool first_packet = false;
static bool sync = false;
static bool auto_videosink = true;
+static bool hls_video = false;
+#ifdef X_DISPLAY_FIX
+static bool use_x11 = false;
+#endif
static bool logger_debug = false;
static bool video_terminate = false;
@@ -51,6 +54,9 @@ struct video_renderer_s {
const char *codec;
bool autovideo, state_pending;
int id;
+ gboolean terminate;
+ gint64 duration;
+ gint buffering_level;
#ifdef X_DISPLAY_FIX
bool use_x11;
const char * server_name;
@@ -63,6 +69,7 @@ static video_renderer_t *renderer_type[NCODECS] = {0};
static int n_renderers = NCODECS;
static char h264[] = "h264";
static char h265[] = "h265";
+static char hls[] = "hls";
static void append_videoflip (GString *launch, const videoflip_t *flip, const videoflip_t *rot) {
/* videoflip image transform */
@@ -85,7 +92,7 @@ static void append_videoflip (GString *launch, const videoflip_t *flip, const vi
case LEFT:
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_UL_LR ! ");
break;
- case RIGHT:
+ case RIGHT:
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_UR_LL ! ");
break;
default:
@@ -142,26 +149,78 @@ void video_renderer_size(float *f_width_source, float *f_height_source, float *f
logger_log(logger, LOGGER_DEBUG, "begin video stream wxh = %dx%d; source %dx%d", width, height, width_source, height_source);
}
-void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
+GstElement *make_video_sink(const char *videosink, const char *videosink_options) {
+ /* used to build a videosink for playbin, using the user-specified string "videosink" */
+ GstElement *video_sink = gst_element_factory_make(videosink, "videosink");
+ if (!video_sink) {
+ return NULL;
+ }
+
+ /* process the video_sink_optons */
+ size_t len = strlen(videosink_options);
+ if (!len) {
+ return video_sink;
+ }
+
+ char *options = (char *) malloc(len + 1);
+ strncpy(options, videosink_options, len + 1);
+
+ /* remove any extension begining with "!" */
+ char *end = strchr(options, '!');
+ if (end) {
+ *end = '\0';
+ }
+
+ /* add any fullscreen options "property=pval" included in string videosink_options*/
+ /* OK to use strtok_r in Windows with MSYS2 (POSIX); use strtok_s for MSVC */
+ char *token;
+ char *text = options;
+
+ while((token = strtok_r(text, " ", &text))) {
+ char *pval = strchr(token, '=');
+ if (pval) {
+ *pval = '\0';
+ pval++;
+ const gchar *property_name = (const gchar *) token;
+ const gchar *value = (const gchar *) pval;
+ g_print("playbin_videosink property: \"%s\" \"%s\"\n", property_name, value);
+ gst_util_set_object_arg(G_OBJECT (video_sink), property_name, value);
+ }
+ }
+ free(options);
+ return video_sink;
+}
+
+void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
const char *decoder, const char *converter, const char *videosink, const char *videosink_options,
- bool initial_fullscreen, bool video_sync, bool h265_support) {
+ bool initial_fullscreen, bool video_sync, bool h265_support, const char *uri) {
GError *error = NULL;
GstCaps *caps = NULL;
-
+ hls_video = (uri != NULL);
/* videosink choices that are auto */
auto_videosink = (strstr(videosink, "autovideosink") || strstr(videosink, "fpsdisplaysink"));
-
+
logger = render_logger;
logger_debug = (logger_get_level(logger) >= LOGGER_DEBUG);
video_terminate = false;
-
+
/* this call to g_set_application_name makes server_name appear in the X11 display window title bar, */
/* (instead of the program name uxplay taken from (argv[0]). It is only set one time. */
+
const gchar *appname = g_get_application_name();
if (!appname || strcmp(appname,server_name)) g_set_application_name(server_name);
appname = NULL;
-
- n_renderers = h265_support ? 2 : 1;
+
+ /* the renderer for hls video will only be built if a HLS uri is provided in
+ * the call to video_renderer_init, in which case the h264 and 265 mirror-mode
+ * renderers will not be built. This is because it appears that we cannot
+ * put playbin into GST_STATE_READY before knowing the uri (?), so cannot use a
+ * unified renderer structure with h264, h265 and hls */
+ if (hls_video) {
+ n_renderers = 1;
+ } else {
+ n_renderers = h265_support ? 2 : 1;
+ }
g_assert (n_renderers <= NCODECS);
for (int i = 0; i < n_renderers; i++) {
g_assert (i < 2);
@@ -170,77 +229,97 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, video
renderer_type[i]->autovideo = auto_videosink;
renderer_type[i]->id = i;
renderer_type[i]->bus = NULL;
- switch (i) {
- case 0:
- renderer_type[i]->codec = h264;
- caps = gst_caps_from_string(h264_caps);
- break;
- case 1:
- renderer_type[i]->codec = h265;
- caps = gst_caps_from_string(h265_caps);
- break;
- default:
- g_assert(0);
- }
- GString *launch = g_string_new("appsrc name=video_source ! ");
- g_string_append(launch, "queue ! ");
- g_string_append(launch, parser);
- g_string_append(launch, " ! ");
- g_string_append(launch, decoder);
- g_string_append(launch, " ! ");
- append_videoflip(launch, &videoflip[0], &videoflip[1]);
- g_string_append(launch, converter);
- g_string_append(launch, " ! ");
- g_string_append(launch, "videoscale ! ");
- g_string_append(launch, videosink);
- g_string_append(launch, " name=");
- g_string_append(launch, videosink);
- g_string_append(launch, "_");
- g_string_append(launch, renderer_type[i]->codec);
- g_string_append(launch, videosink_options);
- if (video_sync) {
- g_string_append(launch, " sync=true");
- sync = true;
+ if (hls_video) {
+ /* use playbin3 to play HLS video: replace "playbin3" by "playbin" to use playbin2 */
+ renderer_type[i]->pipeline = gst_element_factory_make("playbin3", "hls-playbin3");
+ g_assert(renderer_type[i]->pipeline);
+ renderer_type[i]->appsrc = NULL;
+ renderer_type[i]->codec = hls;
+ /* if we are not using autovideosink, build a videossink based on the stricng "videosink" */
+ if(strcmp(videosink, "autovideosink")) {
+ GstElement *playbin_videosink = make_video_sink(videosink, videosink_options);
+
+ if (!playbin_videosink) {
+ logger_log(logger, LOGGER_ERR, "video_renderer_init: failed to create playbin_videosink");
+ } else {
+ logger_log(logger, LOGGER_DEBUG, "video_renderer_init: create playbin_videosink at %p", playbin_videosink);
+ g_object_set(G_OBJECT (renderer_type[i]->pipeline), "video-sink", playbin_videosink, NULL);
+ }
+ }
+
+ g_object_set (G_OBJECT (renderer_type[i]->pipeline), "uri", uri, NULL);
} else {
- g_string_append(launch, " sync=false");
- sync = false;
- }
-
- if (!strcmp(renderer_type[i]->codec, h264)) {
- char *pos = launch->str;
- while ((pos = strstr(pos,h265))){
- pos +=3;
- *pos = '4';
+ switch (i) {
+ case 0:
+ renderer_type[i]->codec = h264;
+ caps = gst_caps_from_string(h264_caps);
+ break;
+ case 1:
+ renderer_type[i]->codec = h265;
+ caps = gst_caps_from_string(h265_caps);
+ break;
+ default:
+ g_assert(0);
}
- } else if (!strcmp(renderer_type[i]->codec, h265)) {
- char *pos = launch->str;
- while ((pos = strstr(pos,h264))){
- pos +=3;
- *pos = '5';
+ GString *launch = g_string_new("appsrc name=video_source ! ");
+ g_string_append(launch, "queue ! ");
+ g_string_append(launch, parser);
+ g_string_append(launch, " ! ");
+ g_string_append(launch, decoder);
+ g_string_append(launch, " ! ");
+ append_videoflip(launch, &videoflip[0], &videoflip[1]);
+ g_string_append(launch, converter);
+ g_string_append(launch, " ! ");
+ g_string_append(launch, "videoscale ! ");
+ g_string_append(launch, videosink);
+ g_string_append(launch, " name=");
+ g_string_append(launch, videosink);
+ g_string_append(launch, "_");
+ g_string_append(launch, renderer_type[i]->codec);
+ g_string_append(launch, videosink_options);
+ if (video_sync) {
+ g_string_append(launch, " sync=true");
+ sync = true;
+ } else {
+ g_string_append(launch, " sync=false");
+ sync = false;
}
- }
- logger_log(logger, LOGGER_DEBUG, "GStreamer video pipeline %d:\n\"%s\"", i + 1, launch->str);
- renderer_type[i]->pipeline = gst_parse_launch(launch->str, &error);
- if (error) {
- g_error ("get_parse_launch error (video) :\n %s\n",error->message);
- g_clear_error (&error);
- }
- g_assert (renderer_type[i]->pipeline);
+ if (!strcmp(renderer_type[i]->codec, h264)) {
+ char *pos = launch->str;
+ while ((pos = strstr(pos,h265))){
+ pos +=3;
+ *pos = '4';
+ }
+ } else if (!strcmp(renderer_type[i]->codec, h265)) {
+ char *pos = launch->str;
+ while ((pos = strstr(pos,h264))){
+ pos +=3;
+ *pos = '5';
+ }
+ }
- GstClock *clock = gst_system_clock_obtain();
- g_object_set(clock, "clock-type", GST_CLOCK_TYPE_REALTIME, NULL);
-
- gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer_type[i]->pipeline), clock);
- renderer_type[i]->appsrc = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "video_source");
- g_assert(renderer_type[i]->appsrc);
+ logger_log(logger, LOGGER_DEBUG, "GStreamer video pipeline %d:\n\"%s\"", i + 1, launch->str);
+ renderer_type[i]->pipeline = gst_parse_launch(launch->str, &error);
+ if (error) {
+ g_error ("get_parse_launch error (video) :\n %s\n",error->message);
+ g_clear_error (&error);
+ }
+ g_assert (renderer_type[i]->pipeline);
- g_object_set(renderer_type[i]->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
- g_string_free(launch, TRUE);
- gst_caps_unref(caps);
- gst_object_unref(clock);
+ GstClock *clock = gst_system_clock_obtain();
+ g_object_set(clock, "clock-type", GST_CLOCK_TYPE_REALTIME, NULL);
+ gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer_type[i]->pipeline), clock);
+ renderer_type[i]->appsrc = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "video_source");
+ g_assert(renderer_type[i]->appsrc);
+
+ g_object_set(renderer_type[i]->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
+ g_string_free(launch, TRUE);
+ gst_caps_unref(caps);
+ gst_object_unref(clock);
+ }
#ifdef X_DISPLAY_FIX
- bool use_x11 = (strstr(videosink, "xvimagesink") || strstr(videosink, "ximagesink") || auto_videosink);
+ use_x11 = (strstr(videosink, "xvimagesink") || strstr(videosink, "ximagesink") || auto_videosink);
fullscreen = initial_fullscreen;
renderer_type[i]->server_name = server_name;
renderer_type[i]->gst_window = NULL;
@@ -269,36 +348,54 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, video
GstState state;
if (gst_element_get_state (renderer_type[i]->pipeline, &state, NULL, 0)) {
if (state == GST_STATE_READY) {
- logger_log(logger, LOGGER_DEBUG, "Initialized GStreamer video renderer %d", i + 1);
+ logger_log(logger, LOGGER_DEBUG, "Initialized GStreamer video renderer %d", i + 1);
+ if (hls_video && i == 0) {
+ renderer = renderer_type[i];
+ }
} else {
- logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer %d", i + 1);
+ logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer %d", i + 1);
}
} else {
- logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer %d", i + 1);
- }
+ logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer %d", i + 1);
+ }
}
}
void video_renderer_pause() {
+ if (!renderer) {
+ return;
+ }
logger_log(logger, LOGGER_DEBUG, "video renderer paused");
gst_element_set_state(renderer->pipeline, GST_STATE_PAUSED);
}
void video_renderer_resume() {
+ if (!renderer) {
+ return;
+ }
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
GstState state;
/* wait with timeout 100 msec for pipeline to change state from PAUSED to PLAYING */
gst_element_get_state(renderer->pipeline, &state, NULL, 100 * GST_MSECOND);
const gchar *state_name = gst_element_state_get_name(state);
logger_log(logger, LOGGER_DEBUG, "video renderer resumed: state %s", state_name);
- gst_video_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
+ if (renderer->appsrc) {
+ gst_video_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
+ }
}
void video_renderer_start() {
- /* start both h264 and h265 pipelines; will shut down the "wrong" one when we know the codec */
+ if (hls_video) {
+ renderer->bus = gst_element_get_bus(renderer->pipeline);
+ gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
+ return;
+ }
+ /* when not hls, start both h264 and h265 pipelines; will shut down the "wrong" one when we know the codec */
for (int i = 0; i < n_renderers; i++) {
gst_element_set_state (renderer_type[i]->pipeline, GST_STATE_PLAYING);
- gst_video_pipeline_base_time = gst_element_get_base_time(renderer_type[i]->appsrc);
+ if (renderer_type[i]->appsrc) {
+ gst_video_pipeline_base_time = gst_element_get_base_time(renderer_type[i]->appsrc);
+ }
renderer_type[i]->bus = gst_element_get_bus(renderer_type[i]->pipeline);
}
renderer = NULL;
@@ -308,6 +405,23 @@ void video_renderer_start() {
#endif
}
+/* used to find any X11 Window used by the playbin (HLS) pipeline after it starts playing.
+* if use_x11 is true, called every 100 ms after playbin state is READY until the x11 window is found*/
+bool waiting_for_x11_window() {
+ if (!hls_video) {
+ return false;
+ }
+#ifdef X_DISPLAY_FIX
+ if (use_x11 && renderer->gst_window) {
+ get_x_window(renderer->gst_window, renderer->server_name);
+ if (!renderer->gst_window->window) {
+ return true; /* window still not found */
+ }
+ }
+#endif
+ return false;
+}
+
void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time) {
GstBuffer *buffer;
GstClockTime pts = (GstClockTime) *ntp_time; /*now in nsecs */
@@ -362,21 +476,28 @@ void video_renderer_flush() {
void video_renderer_stop() {
if (renderer) {
- gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
+ if (renderer->appsrc) {
+ gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
+ }
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
- }
+ //gst_element_set_state (renderer->playbin, GST_STATE_NULL);
+ }
}
-static void video_renderer_destroy_h26x(video_renderer_t *renderer) {
+void video_renderer_destroy() {
if (renderer) {
GstState state;
gst_element_get_state(renderer->pipeline, &state, NULL, 0);
if (state != GST_STATE_NULL) {
- gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
+ if (!hls_video) {
+ gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
+ }
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
}
gst_object_unref(renderer->bus);
- gst_object_unref (renderer->appsrc);
+ if (renderer->appsrc) {
+ gst_object_unref (renderer->appsrc);
+ }
gst_object_unref (renderer->pipeline);
#ifdef X_DISPLAY_FIX
if (renderer->gst_window) {
@@ -389,20 +510,7 @@ static void video_renderer_destroy_h26x(video_renderer_t *renderer) {
}
}
-
-void video_renderer_destroy() {
- for (int i = 0; i < n_renderers; i++) {
- if (renderer_type[i]) {
- video_renderer_destroy_h26x(renderer_type[i]);
- }
- }
-}
-
-/* not implemented for gstreamer */
-void video_renderer_update_background(int type) {
-}
-
-gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, void * loop) {
+gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, void *loop) {
/* identify which pipeline sent the message */
int type = -1;
@@ -413,18 +521,49 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, void
}
}
g_assert(type != -1);
-
+
if (logger_debug) {
g_print("GStreamer %s bus message: %s %s\n", renderer_type[type]->codec, GST_MESSAGE_SRC_NAME(message), GST_MESSAGE_TYPE_NAME(message));
}
+
+ if (logger_debug && hls_video) {
+ gint64 pos;
+ gst_element_query_position (renderer_type[type]->pipeline, GST_FORMAT_TIME, &pos);
+ if (GST_CLOCK_TIME_IS_VALID(pos)) {
+ g_print("GStreamer bus message %s %s; position: %" GST_TIME_FORMAT "\n", GST_MESSAGE_SRC_NAME(message),
+ GST_MESSAGE_TYPE_NAME(message), GST_TIME_ARGS(pos));
+ } else {
+ g_print("GStreamer bus message %s %s; position: none\n", GST_MESSAGE_SRC_NAME(message),
+ GST_MESSAGE_TYPE_NAME(message));
+ }
+ }
+
switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_DURATION:
+ renderer_type[type]->duration = GST_CLOCK_TIME_NONE;
+ break;
+ case GST_MESSAGE_BUFFERING:
+ if (hls_video) {
+ gint percent = -1;
+ gst_message_parse_buffering(message, &percent);
+ if (percent >= 0) {
+ renderer_type[type]->buffering_level = percent;
+ logger_log(logger, LOGGER_DEBUG, "Buffering :%u percent done", percent);
+ if (percent < 100) {
+ gst_element_set_state (renderer_type[type]->pipeline, GST_STATE_PAUSED);
+ } else {
+ gst_element_set_state (renderer_type[type]->pipeline, GST_STATE_PLAYING);
+ }
+ }
+ }
+ break;
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gboolean flushing;
gst_message_parse_error (message, &err, &debug);
- logger_log(logger, LOGGER_INFO, "GStreamer error: %s", err->message);
- if (strstr(err->message,"Internal data stream error")) {
+ logger_log(logger, LOGGER_INFO, "GStreamer error: %s %s", GST_MESSAGE_SRC_NAME(message),err->message);
+ if (!hls_video && strstr(err->message,"Internal data stream error")) {
logger_log(logger, LOGGER_INFO,
"*** This is a generic GStreamer error that usually means that GStreamer\n"
"*** was unable to construct a working video pipeline.\n\n"
@@ -436,19 +575,27 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, void
}
g_error_free (err);
g_free (debug);
- gst_app_src_end_of_stream (GST_APP_SRC(renderer_type[type]->appsrc));
- flushing = TRUE;
- gst_bus_set_flushing(bus, flushing);
- gst_element_set_state (renderer_type[type]->pipeline, GST_STATE_NULL);
- g_main_loop_quit( (GMainLoop *) loop);
+ if (renderer_type[type]->appsrc) {
+ gst_app_src_end_of_stream (GST_APP_SRC(renderer_type[type]->appsrc));
+ }
+ gst_bus_set_flushing(bus, TRUE);
+ gst_element_set_state (renderer_type[type]->pipeline, GST_STATE_READY);
+ renderer_type[type]->terminate = TRUE;
+ g_main_loop_quit( (GMainLoop *) loop);
break;
}
case GST_MESSAGE_EOS:
/* end-of-stream */
- logger_log(logger, LOGGER_INFO, "GStreamer: End-Of-Stream");
- // g_main_loop_quit( (GMainLoop *) loop);
+ logger_log(logger, LOGGER_INFO, "GStreamer: End-Of-Stream");
+ if (hls_video) {
+ gst_bus_set_flushing(bus, TRUE);
+ gst_element_set_state (renderer_type[type]->pipeline, GST_STATE_READY);
+ renderer_type[type]->terminate = TRUE;
+ g_main_loop_quit( (GMainLoop *) loop);
+ }
break;
case GST_MESSAGE_STATE_CHANGED:
+ESSAGE_STATE_CHANGED:
if (renderer_type[type]->state_pending && strstr(GST_MESSAGE_SRC_NAME(message), "pipeline")) {
GstState state;
gst_element_get_state(renderer_type[type]->pipeline, &state, NULL,0);
@@ -519,6 +666,7 @@ gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, void
}
void video_renderer_choose_codec (bool video_is_h265) {
+ g_assert(!hls_video);
/* set renderer to h264 or h265, depending on pps/sps received by raop_rtp_mirror */
video_renderer_t *renderer_new = video_is_h265 ? renderer_type[1] : renderer_type[0];
if (renderer == renderer_new) {
@@ -543,7 +691,9 @@ void video_renderer_choose_codec (bool video_is_h265) {
unsigned int video_reset_callback(void * loop) {
if (video_terminate) {
video_terminate = false;
- gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
+ if (renderer->appsrc) {
+ gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
+ }
gboolean flushing = TRUE;
gst_bus_set_flushing(renderer->bus, flushing);
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
@@ -552,6 +702,63 @@ unsigned int video_reset_callback(void * loop) {
return (unsigned int) TRUE;
}
+bool video_get_playback_info(double *duration, double *position, float *rate) {
+ gint64 pos = 0;
+ GstState state;
+ *duration = 0.0;
+ *position = -1.0;
+ *rate = 0.0f;
+ if (!renderer) {
+
+ return true;
+ }
+ gst_element_get_state(renderer->pipeline, &state, NULL, 0);
+ *rate = 0.0f;
+ switch (state) {
+ case GST_STATE_PLAYING:
+ *rate = 1.0f;
+ default:
+ break;
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID(renderer->duration)) {
+ if (!gst_element_query_duration (renderer->pipeline, GST_FORMAT_TIME, &renderer->duration)) {
+ return true;
+ }
+ }
+ *duration = ((double) renderer->duration) / GST_SECOND;
+ if (*duration) {
+ if (gst_element_query_position (renderer->pipeline, GST_FORMAT_TIME, &pos) &&
+ GST_CLOCK_TIME_IS_VALID(pos)) {
+ *position = ((double) pos) / GST_SECOND;
+ }
+ }
+
+ logger_log(logger, LOGGER_DEBUG, "********* video_get_playback_info: position %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT " %s *********",
+ GST_TIME_ARGS (pos), GST_TIME_ARGS (renderer->duration), gst_element_state_get_name(state));
+
+ return true;
+}
+
+void video_renderer_seek(float position) {
+ double pos = (double) position;
+ pos *= GST_SECOND;
+ gint64 seek_position = (gint64) pos;
+ seek_position = seek_position < 1000 ? 1000 : seek_position;
+ seek_position = seek_position > renderer->duration - 1000 ? renderer->duration - 1000: seek_position;
+ g_print("SCRUB: seek to %f secs = %" GST_TIME_FORMAT ", duration = %" GST_TIME_FORMAT "\n", position,
+ GST_TIME_ARGS(seek_position), GST_TIME_ARGS(renderer->duration));
+ gboolean result = gst_element_seek_simple(renderer->pipeline, GST_FORMAT_TIME,
+ (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT),
+ seek_position);
+ if (result) {
+ g_print("seek succeeded\n");
+ gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
+ } else {
+ g_print("seek failed\n");
+ }
+}
+
unsigned int video_renderer_listen(void *loop, int id) {
g_assert(id >= 0 && id < n_renderers);
return (unsigned int) gst_bus_add_watch(renderer_type[id]->bus,(GstBusFunc)
diff --git a/renderers/video_renderer.h b/renderers/video_renderer.h
index 23c8938..4db432a 100644
--- a/renderers/video_renderer.h
+++ b/renderers/video_renderer.h
@@ -46,23 +46,26 @@ typedef enum videoflip_e {
} videoflip_t;
typedef struct video_renderer_s video_renderer_t;
-
-void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
- const char *decoder, const char *converter, const char *videosink, const char *videosin_options,
- bool initial_fullscreen, bool video_sync, bool h265_support);
+
+void video_renderer_init (logger_t *logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
+ const char *decoder, const char *converter, const char *videosink, const char *videosink_options,
+ bool initial_fullscreen, bool video_sync, bool h265_support, const char *uri);
void video_renderer_start ();
void video_renderer_stop ();
void video_renderer_pause ();
+void video_renderer_seek(float position);
void video_renderer_resume ();
+bool video_renderer_is_paused();
void video_renderer_render_buffer (unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time);
void video_renderer_flush ();
+unsigned int video_renderer_listen(void *loop, int id);
void video_renderer_destroy ();
void video_renderer_size(float *width_source, float *height_source, float *width, float *height);
+bool waiting_for_x11_window();
+bool video_get_playback_info(double *duration, double *position, float *rate);
void video_renderer_choose_codec(bool is_h265);
-
unsigned int video_renderer_listen(void *loop, int id);
unsigned int video_reset_callback(void *loop);
-
#ifdef __cplusplus
}
#endif
diff --git a/uxplay.1 b/uxplay.1
index 419780a..4c39178 100644
--- a/uxplay.1
+++ b/uxplay.1
@@ -1,11 +1,11 @@
-.TH UXPLAY "1" "September 2024" "1.70" "User Commands"
+.TH UXPLAY "1" "December 2024" "1.71" "User Commands"
.SH NAME
uxplay \- start AirPlay server
.SH SYNOPSIS
.B uxplay
[\fI\,-n name\/\fR] [\fI\,-s wxh\/\fR] [\fI\,-p \/\fR[\fI\,n\/\fR]] [more \fI OPTIONS \/\fR ...]
.SH DESCRIPTION
-UxPlay 1.70: An open\-source AirPlay mirroring (+ audio streaming) server:
+UxPlay 1.71: An open\-source AirPlay mirroring (+ audio streaming) server:
.SH OPTIONS
.TP
.B
@@ -15,6 +15,8 @@ UxPlay 1.70: An open\-source AirPlay mirroring (+ audio streaming) server:
.TP
\fB\-h265\fR Support h265 (4K) video (with h265 versions of h264 plugins)
.TP
+\fB\-hls\fR Support HTTP Live Streaming (currently YouTube video only)
+.TP
\fB\-pin\fI[xxxx]\fRUse a 4-digit pin code to control client access (default: no)
.IP
without option, pin is random: optionally use fixed pin xxxx.
diff --git a/uxplay.cpp b/uxplay.cpp
index b2805fa..d6a2d1e 100644
--- a/uxplay.cpp
+++ b/uxplay.cpp
@@ -62,7 +62,7 @@
#include "renderers/video_renderer.h"
#include "renderers/audio_renderer.h"
-#define VERSION "1.70"
+#define VERSION "1.71"
#define SECOND_IN_USECS 1000000
#define SECOND_IN_NSECS 1000000000UL
@@ -144,6 +144,11 @@ static double db_high = 0.0;
static bool taper_volume = false;
static bool h265_support = false;
static int n_renderers = 0;
+static bool hls_support = false;
+static std::string url = "";
+static guint gst_x11_window_id = 0;
+static guint gst_hls_position_id = 0;
+static bool preserve_connections = false;
/* logging */
@@ -360,6 +365,16 @@ static gboolean reset_callback(gpointer loop) {
return TRUE;
}
+static gboolean x11_window_callback(gpointer loop) {
+ /* called while trying to find an x11 window used by playbin (HLS mode) */
+ if (waiting_for_x11_window()) {
+ return TRUE;
+ }
+ g_source_remove(gst_x11_window_id);
+ gst_x11_window_id = 0;
+ return FALSE;
+}
+
static gboolean sigint_callback(gpointer loop) {
relaunch_video = false;
g_main_loop_quit((GMainLoop *) loop);
@@ -400,6 +415,15 @@ static void main_loop() {
relaunch_video = false;
if (use_video) {
relaunch_video = true;
+ if (url.empty()) {
+ n_renderers = h265_support ? 2 : 1;
+ gst_x11_window_id = 0;
+ } else {
+ /* hls video will be rendered */
+ n_renderers = 1;
+ url.erase();
+ gst_x11_window_id = g_timeout_add(100, (GSourceFunc) x11_window_callback, (gpointer) loop);
+ }
for (int i = 0; i < n_renderers; i++) {
gst_bus_watch_id[i] = (guint) video_renderer_listen((void *)loop, i);
}
@@ -408,12 +432,12 @@ static void main_loop() {
guint video_reset_watch_id = g_timeout_add(100, (GSourceFunc) video_reset_callback, (gpointer) loop);
guint sigterm_watch_id = g_unix_signal_add(SIGTERM, (GSourceFunc) sigterm_callback, (gpointer) loop);
guint sigint_watch_id = g_unix_signal_add(SIGINT, (GSourceFunc) sigint_callback, (gpointer) loop);
- //printf("********** main_loop_run *******************\n");
g_main_loop_run(loop);
- //printf("********** main_loop_exit *******************\n");
+
for (int i = 0; i < n_renderers; i++) {
if (gst_bus_watch_id[i] > 0) g_source_remove(gst_bus_watch_id[i]);
}
+ if (gst_x11_window_id > 0) g_source_remove(gst_x11_window_id);
if (sigint_watch_id > 0) g_source_remove(sigint_watch_id);
if (sigterm_watch_id > 0) g_source_remove(sigterm_watch_id);
if (reset_watch_id > 0) g_source_remove(reset_watch_id);
@@ -582,6 +606,7 @@ static void print_info (char *name) {
printf("-n name Specify the network name of the AirPlay server\n");
printf("-nh Do not add \"@hostname\" at the end of AirPlay server name\n");
printf("-h265 Support h265 (4K) video (with h265 versions of h264 plugins)\n");
+ printf("-hls Support HTTP Live Streaming (currently Youtube video only) \n");
printf("-pin[xxxx]Use a 4-digit pin code to control client access (default: no)\n");
printf(" default pin is random: optionally use fixed pin xxxx\n");
printf("-reg [fn] Keep a register in $HOME/.uxplay.register to verify returning\n");
@@ -593,7 +618,7 @@ static void print_info (char *name) {
printf("-async no Switch off audio/(client)video timestamp synchronization\n");
printf("-db l[:h] Set minimum volume attenuation to l dB (decibels, negative);\n");
printf(" optional: set maximum to h dB (+ or -) default: -30.0:0.0 dB\n");
- printf("-taper Use a \"tapered\" AirPlay volume-control profile\n");
+ printf("-taper Use a \"tapered\" AirPlay volume-control profile\n");
printf("-s wxh[@r]Request to client for video display resolution [refresh_rate]\n");
printf(" default 1920x1080[@60] (or 3840x2160[@60] with -h265 option)\n");
printf("-o Set display \"overscanned\" mode on (not usually needed)\n");
@@ -607,6 +632,7 @@ static void print_info (char *name) {
printf("-vd ... Choose the GStreamer h264 decoder; default \"decodebin\"\n");
printf(" choices: (software) avdec_h264; (hardware) v4l2h264dec,\n");
printf(" nvdec, nvh264dec, vaapih64dec, vtdec,etc.\n");
+ printf(" choices: avdec_h264,vaapih264dec,nvdec,nvh264dec,v4l2h264dec\n");
printf("-vc ... Choose the GStreamer videoconverter; default \"videoconvert\"\n");
printf(" another choice when using v4l2h264dec: v4l2convert\n");
printf("-vs ... Choose the GStreamer videosink; default \"autovideosink\"\n");
@@ -1145,6 +1171,8 @@ static void parse_arguments (int argc, char *argv[]) {
db_low = db1;
db_high = db2;
printf("db range %f:%f\n", db_low, db_high);
+ } else if (arg == "-hls") {
+ hls_support = true;
} else if (arg == "-h265") {
h265_support = true;
} else if (arg == "-nofreeze") {
@@ -1356,7 +1384,7 @@ static int start_dnssd(std::vector hw_addr, std::string name) {
}
/* after dnssd starts, reset the default feature set here
- * (overwrites features set in dnssdint.h).
+ * (overwrites features set in dnssdint.h)
* default: FEATURES_1 = 0x5A7FFEE6, FEATURES_2 = 0 */
dnssd_set_airplay_features(dnssd, 0, 0); // AirPlay video supported
@@ -1399,7 +1427,8 @@ static int start_dnssd(std::vector hw_addr, std::string name) {
dnssd_set_airplay_features(dnssd, 30, 1); // RAOP support: with this bit set, the AirTunes service is not required.
dnssd_set_airplay_features(dnssd, 31, 0); //
- /* bits 32-63 see https://emanualcozzi.net/docs/airplay2/features
+
+ /* bits 32-63: see https://emanualcozzi.net/docs/airplay2/features
dnssd_set_airplay_features(dnssd, 32, 0); // isCarPlay when ON,; Supports InitialVolume when OFF
dnssd_set_airplay_features(dnssd, 33, 0); // Supports Air Play Video Play Queue
dnssd_set_airplay_features(dnssd, 34, 0); // Supports Air Play from cloud (requires that bit 6 is ON)
@@ -1412,8 +1441,7 @@ static int start_dnssd(std::vector hw_addr, std::string name) {
dnssd_set_airplay_features(dnssd, 40, 0); // Supports Buffered Audio
dnssd_set_airplay_features(dnssd, 41, 0); // Supports PTP
-
- dnssd_set_airplay_features(dnssd, 42, 0); // Supports Screen Multi Codec (allows h265 video)
+ dnssd_set_airplay_features(dnssd, 42, 0); // Supports Screen Multi Codec (allows h265 video)
dnssd_set_airplay_features(dnssd, 43, 0); // Supports System Pairing
dnssd_set_airplay_features(dnssd, 44, 0); // is AP Valeria Screen Sender
@@ -1440,9 +1468,15 @@ static int start_dnssd(std::vector hw_addr, std::string name) {
dnssd_set_airplay_features(dnssd, 61, 0); // Supports RFC2198 redundancy
*/
+ /* needed for HLS video support */
+ dnssd_set_airplay_features(dnssd, 0, (int) hls_support);
+ dnssd_set_airplay_features(dnssd, 4, (int) hls_support);
+ // not sure about this one (bit 8, screen rotation supported):
+ //dnssd_set_airplay_features(dnssd, 8, (int) hls_support);
+
/* needed for h265 video support */
dnssd_set_airplay_features(dnssd, 42, (int) h265_support);
-
+
/* bit 27 of Features determines whether the AirPlay2 client-pairing protocol will be used (1) or not (0) */
dnssd_set_airplay_features(dnssd, 27, (int) setup_legacy_pairing);
return 0;
@@ -1475,6 +1509,8 @@ static bool check_blocked_client(char *deviceid) {
// Server callbacks
extern "C" void video_reset(void *cls) {
+ LOGD("video_reset");
+ url.erase();
reset_loop = true;
remote_clock_offset = 0;
relaunch_video = true;
@@ -1539,6 +1575,7 @@ extern "C" void conn_reset (void *cls, int timeouts, bool reset_video) {
LOGI(" Sometimes the network connection may recover after a longer delay:\n"
" the default timeout limit n = %d can be changed with the \"-reset n\" option", NTP_TIMEOUT_LIMIT);
}
+ printf("reset_video %d\n",(int) reset_video);
if (!nofreeze) {
close_window = reset_video; /* leave "frozen" window open if reset_video is false */
}
@@ -1801,6 +1838,53 @@ extern "C" bool check_register(void *cls, const char *client_pk) {
return false;
}
}
+/* control callbacks for video player (unimplemented) */
+
+extern "C" void on_video_play(void *cls, const char* location, const float start_position) {
+ /* start_position needs to be implemented */
+ url.erase();
+ url.append(location);
+ reset_loop = true;
+ relaunch_video = true;
+ preserve_connections = true;
+ LOGD("********************on_video_play: location = %s***********************", url.c_str());
+}
+
+extern "C" void on_video_scrub(void *cls, const float position) {
+ LOGI("on_video_scrub: position = %7.5f\n", position);
+ video_renderer_seek(position);
+}
+
+extern "C" void on_video_rate(void *cls, const float rate) {
+ LOGI("on_video_rate = %7.5f\n", rate);
+ if (rate == 1.0f) {
+ video_renderer_resume();
+ } else if (rate == 0.0f) {
+ video_renderer_pause();
+ } else {
+ LOGI("on_video_rate: ignoring unexpected value rate = %f\n", rate);
+ }
+}
+
+extern "C" void on_video_stop(void *cls) {
+ LOGI("on_video_stop\n");
+}
+
+extern "C" void on_video_acquire_playback_info (void *cls, playback_info_t *playback_info) {
+ int buffering_level;
+ LOGD("on_video_acquire_playback info\n");
+ bool still_playing = video_get_playback_info(&playback_info->duration, &playback_info->position,
+ &playback_info->rate);
+ LOGD("on_video_acquire_playback info done\n");
+ if (!still_playing) {
+ LOGI(" video has finished, %f", playback_info->position);
+ playback_info->position = -1.0;
+ playback_info->duration = -1.0;
+ printf("about to stop\n");
+ video_renderer_stop();
+ printf("stopped\n");
+ }
+}
extern "C" void log_callback (void *cls, int level, const char *msg) {
switch (level) {
@@ -1851,6 +1935,11 @@ static int start_raop_server (unsigned short display[5], unsigned short tcp[3],
raop_cbs.export_dacp = export_dacp;
raop_cbs.video_reset = video_reset;
raop_cbs.video_set_codec = video_set_codec;
+ raop_cbs.on_video_play = on_video_play;
+ raop_cbs.on_video_scrub = on_video_scrub;
+ raop_cbs.on_video_rate = on_video_rate;
+ raop_cbs.on_video_stop = on_video_stop;
+ raop_cbs.on_video_acquire_playback_info = on_video_acquire_playback_info;
raop = raop_init(&raop_cbs);
if (raop == NULL) {
@@ -1879,6 +1968,7 @@ static int start_raop_server (unsigned short display[5], unsigned short tcp[3],
raop_set_plist(raop, "max_ntp_timeouts", max_ntp_timeouts);
if (audiodelay >= 0) raop_set_plist(raop, "audio_delay_micros", audiodelay);
if (require_password) raop_set_plist(raop, "pin", (int) pin);
+ if (hls_support) raop_set_plist(raop, "hls", 1);
/* network port selection (ports listed as "0" will be dynamically assigned) */
raop_set_tcp_ports(raop, tcp);
@@ -2070,9 +2160,9 @@ int main (int argc, char *argv[]) {
if (videosink == "d3d11videosink" && videosink_options.empty() && use_video) {
if (fullscreen) {
- videosink_options.append(" fullscreen-toggle-mode=GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_PROPERTY fullscreen=true ");
+ videosink_options.append(" fullscreen-toggle-mode=GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_PROPERTY fullscreen=true ");
} else {
- videosink_options.append(" fullscreen-toggle-mode=GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_ALT_ENTER ");
+ videosink_options.append(" fullscreen-toggle-mode=GST_D3D11_WINDOW_FULLSCREEN_TOGGLE_MODE_ALT_ENTER ");
}
LOGI("d3d11videosink is being used with option fullscreen-toggle-mode=alt-enter\n"
"Use Alt-Enter key combination to toggle into/out of full-screen mode");
@@ -2148,12 +2238,10 @@ int main (int argc, char *argv[]) {
} else {
LOGI("audio_disabled");
}
-
if (use_video) {
- n_renderers = h265_support ? 2 : 1;
video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(),
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(),
- videosink_options.c_str(),fullscreen, video_sync, h265_support);
+ videosink_options.c_str(), fullscreen, video_sync, h265_support, NULL);
video_renderer_start();
}
@@ -2196,7 +2284,6 @@ int main (int argc, char *argv[]) {
if (start_dnssd(server_hw_addr, server_name)) {
goto cleanup;
}
-
if (start_raop_server(display, tcp, udp, debug_log)) {
stop_dnssd();
goto cleanup;
@@ -2209,7 +2296,7 @@ int main (int argc, char *argv[]) {
reconnect:
compression_type = 0;
close_window = new_window_closing_behavior;
-
+
main_loop();
if (relaunch_video || reset_loop) {
if(reset_loop) {
@@ -2218,12 +2305,18 @@ int main (int argc, char *argv[]) {
raop_stop(raop);
}
if (use_audio) audio_renderer_stop();
- if (use_video && close_window) {
+ if (use_video && (close_window || preserve_connections)) {
video_renderer_destroy();
- raop_remove_known_connections(raop);
+ if (!preserve_connections) {
+ raop_destroy_airplay_video(raop);
+ url.erase();
+ raop_remove_known_connections(raop);
+ }
+ preserve_connections = false;
+ const char *uri = (url.empty() ? NULL : url.c_str());
video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(),
- video_decoder.c_str(), video_converter.c_str(), videosink.c_str(),
- videosink_options.c_str(), fullscreen, video_sync, h265_support);
+ video_decoder.c_str(), video_converter.c_str(), videosink.c_str(),
+ videosink_options.c_str(), fullscreen, video_sync, h265_support, uri);
video_renderer_start();
}
if (relaunch_video) {
diff --git a/uxplay.spec b/uxplay.spec
index 93fed46..6e9c7dc 100644
--- a/uxplay.spec
+++ b/uxplay.spec
@@ -1,5 +1,5 @@
Name: uxplay
-Version: 1.70
+Version: 1.71
Release: 1%{?dist}
%global gittag v%{version}
@@ -135,7 +135,7 @@ cd build
%{_docdir}/%{name}/llhttp/LICENSE-MIT
%changelog
-* Tue Sep 17 2024 UxPlay maintainer
+* Fri Nov 15 2024 UxPlay maintainer
Initial uxplay.spec: tested on Fedora 38, Rocky Linux 9.2, OpenSUSE
Leap 15.5, Mageia 9, OpenMandriva ROME, PCLinuxOS
-