-name: Basic ALSA configuration for systemd, using a build folder.
+name: systemd, alsa, pipewire, build folder.
on:
workflow_dispatch:
jobs:
build:
- runs-on: ubuntu-22.04
+ runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4.2.2
+ - uses: actions/checkout@v6.0.2
- name: Install Dependencies
- run: sudo apt-get -y --no-install-recommends install xmltoman libpopt-dev libconfig-dev libasound2-dev avahi-daemon libavahi-client-dev libsoxr-dev libplist-dev libsodium-dev libavutil-dev libavcodec-dev libavformat-dev
+ run: sudo apt-get -y --no-install-recommends install libpipewire-0.3-dev libplist-utils xmltoman libpopt-dev libconfig-dev libasound2-dev avahi-daemon libavahi-client-dev libsoxr-dev libplist-dev libsodium-dev libavutil-dev libavcodec-dev libavformat-dev libgcrypt-dev uuid-dev
- name: Configure
run: |
mkdir build
cd build
autoreconf -i ..
- ../configure --sysconfdir=/etc --with-alsa --with-soxr --with-avahi --with-ssl=openssl --with-systemd --with-airplay-2
+ ../configure --sysconfdir=/etc --with-alsa --with-soxr --with-avahi --with-ssl=openssl --with-systemd-startup --with-airplay-2 --with-pipewire
- name: Make
run: |
cd build
-name: Configuration (without pa, soundio or apple-alac) for systemd.
+name: systemd, alsa, ao, dummy, jack, pipe, stdout
on:
workflow_dispatch:
jobs:
build:
- runs-on: ubuntu-22.04
+ runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4.2.2
+ - uses: actions/checkout@v6.0.2
- name: Install Dependencies
- run: sudo apt-get -y --no-install-recommends install xmltoman libpopt-dev libconfig-dev libasound2-dev libao-dev libjack-dev libmosquitto-dev avahi-daemon libavahi-client-dev libsoxr-dev libplist-dev libsodium-dev libavutil-dev libavcodec-dev libavformat-dev
+ run: sudo apt-get -y --no-install-recommends install libglib2.0-dev libplist-utils xmltoman libpopt-dev libconfig-dev libasound2-dev libao-dev libjack-dev libmosquitto-dev avahi-daemon libavahi-client-dev libsoxr-dev libplist-dev libsodium-dev libavutil-dev libavcodec-dev libavformat-dev libgcrypt-dev
- name: Configure
run: |
autoreconf -fi
- ./configure --sysconfdir=/etc --with-alsa --with-ao --with-dummy --with-jack --with-pipe --with-stdout --with-soxr --with-avahi --with-ssl=openssl --with-systemd --with-dbus-interface --with-mpris-interface --with-mqtt-client --with-airplay-2
+ ./configure --sysconfdir=/etc --with-alsa --with-ao --with-dummy --with-jack --with-pipe --with-stdout --with-soxr --with-avahi --with-ssl=openssl --with-systemd-startup --with-dbus-interface --with-mpris-interface --with-mqtt-client --with-airplay-2
- name: Make
run: |
make -j
-name: Configuration (without pa, soundio or apple-alac) for systemd, using a build folder.
+name: systemd, alsa, pipewire, ao, dummy, jack, pipe, stdout,using a build folder.
on:
workflow_dispatch:
jobs:
build:
- runs-on: ubuntu-22.04
+ runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4.2.2
+ - uses: actions/checkout@v6.0.2
- name: Install Dependencies
- run: sudo apt-get -y --no-install-recommends install xmltoman libpopt-dev libconfig-dev libasound2-dev libao-dev libjack-dev libglib2.0-dev libmosquitto-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev libplist-dev libsodium-dev libavutil-dev libavcodec-dev libavformat-dev
+ run: sudo apt-get -y --no-install-recommends install libpipewire-0.3-dev libplist-utils xmltoman libpopt-dev libconfig-dev libasound2-dev libao-dev libjack-dev libglib2.0-dev libmosquitto-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev libplist-dev libsodium-dev libavutil-dev libavcodec-dev libavformat-dev libgcrypt-dev
- name: Configure
run: |
mkdir build
cd build
autoreconf -i ..
- ../configure --sysconfdir=/etc --with-alsa --with-ao --with-dummy --with-jack --with-pipe --with-stdout --with-soxr --with-avahi --with-ssl=openssl --with-systemd --with-dbus-interface --with-mpris-interface --with-mqtt-client --with-airplay-2
+ ../configure --sysconfdir=/etc --with-alsa --with-pipewire --with-ao --with-dummy --with-jack --with-pipe --with-stdout --with-soxr --with-avahi --with-ssl=openssl --with-systemd-startup --with-dbus-interface --with-mpris-interface --with-mqtt-client --with-airplay-2
- name: Make
run: |
cd build
-name: Configuration (but without pa, soundio, apple-alac) for a System V system.
+name: systemv, alsa, pipewire, ao, dummy, jack, pipe, stdout
on:
workflow_dispatch:
jobs:
build:
- runs-on: ubuntu-22.04
+ runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4.2.2
+ - uses: actions/checkout@v6.0.2
- name: Install Dependencies
- run: sudo apt-get -y --no-install-recommends install xmltoman libpopt-dev libdaemon-dev libconfig-dev libasound2-dev libao-dev libjack-dev libglib2.0-dev libmosquitto-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev libplist-dev libsodium-dev libavutil-dev libavcodec-dev libavformat-dev uuid-dev libgcrypt-dev
+ run: sudo apt-get -y --no-install-recommends install libpipewire-0.3-dev libplist-utils xmltoman libpopt-dev libdaemon-dev libconfig-dev libasound2-dev libao-dev libjack-dev libglib2.0-dev libmosquitto-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev libplist-dev libsodium-dev libavutil-dev libavcodec-dev libavformat-dev uuid-dev libgcrypt-dev
- name: Configure
run: |
autoreconf -i
- ./configure --sysconfdir=/etc --with-alsa --with-ao --with-dummy --with-libdaemon --with-jack --with-pipe --with-stdout --with-soxr --with-avahi --with-ssl=openssl --with-systemv --with-dbus-interface --with-mpris-interface --with-mqtt-client --with-airplay-2
+ ./configure --sysconfdir=/etc --with-alsa --with-pipewire --with-ao --with-dummy --with-libdaemon --with-jack --with-pipe --with-stdout --with-soxr --with-avahi --with-ssl=openssl --with-systemv-startup --with-dbus-interface --with-mpris-interface --with-mqtt-client --with-airplay-2
- name: Make
run: |
make -j
-name: Basic libao configuration for macOS with BREW -- classic only, because macOS can't host NQPTP.
+name: macos classic, libao, BREW -- classic only, because macOS can't host NQPTP.
on:
workflow_dispatch:
runs-on: macos-13
steps:
- - uses: actions/checkout@v4.2.2
+ - uses: actions/checkout@v6.0.2
- name: Install Dependencies
run: |
brew install automake
-name: Basic ALSA classic configuration for systemd, using a build folder.
+name: systemd classic, alsa, using a build folder.
on:
workflow_dispatch:
jobs:
build:
- runs-on: ubuntu-22.04
+ runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4.2.2
+ - uses: actions/checkout@v6.0.2
- name: Install Dependencies
run: sudo apt-get -y --no-install-recommends install xmltoman libpopt-dev libconfig-dev libasound2-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev
- name: Configure
mkdir build
cd build
autoreconf -i ..
- ../configure --sysconfdir=/etc --with-alsa --with-soxr --with-avahi --with-ssl=openssl --with-systemd
+ ../configure --sysconfdir=/etc --with-alsa --with-soxr --with-avahi --with-ssl=openssl --with-systemd-startup
- name: Make
run: |
cd build
-name: Classic (without pa, soundio, apple-alac) for systemd, using a build folder.
+name: systemd classic, ffmpeg, alsa, pipewire, build folder.
on:
workflow_dispatch:
jobs:
build:
- runs-on: ubuntu-22.04
+ runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4.2.2
+ - uses: actions/checkout@v6.0.2
- name: Install Dependencies
- run: sudo apt-get -y --no-install-recommends install xmltoman libpopt-dev libconfig-dev libasound2-dev libao-dev libjack-dev libglib2.0-dev libmosquitto-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev
+ run: sudo apt-get -y --no-install-recommends install xmltoman libpipewire-0.3-dev libpopt-dev libconfig-dev libasound2-dev libao-dev libjack-dev libglib2.0-dev libmosquitto-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev libavutil-dev libavcodec-dev libavformat-dev
- name: Configure
run: |
mkdir build
cd build
autoreconf -i ..
- ../configure --sysconfdir=/etc --with-alsa --with-ao --with-dummy --with-pipe --with-stdout --with-soxr --with-avahi --with-ssl=openssl --with-systemd --with-dbus-interface --with-mpris-interface --with-mqtt-client
+ ../configure --sysconfdir=/etc --with-ffmpeg --with-alsa --with-pipewire --with-ao --with-dummy --with-pipe --with-stdout --with-soxr --with-avahi --with-ssl=openssl --with-dbus-interface --with-mpris-interface --with-mqtt-client --with-systemd-startup
- name: Make
run: |
cd build
+++ /dev/null
-# Builds a docker image when a commit is made. Also pushes the build if the branch is 'master' or 'development'.
-
-# Tag pattern
-# 'master' - rolling, rolling-classic
-# 'development' - development, development-classic
-
-name: Build and push docker (push/pull request)
-
-on:
- workflow_dispatch:
- push:
- branches:
- - master
- - development
- pull_request:
- types: [opened, synchronize, reopened, ready_for_review]
-
-env:
- DOCKER_PLATFORMS: linux/386,linux/amd64,linux/arm/v6,linux/arm/v7,linux/arm64
- NQPTP_BRANCH: main
-
-jobs:
- test-build-on-pull-request:
- if: github.event_name == 'pull_request'
- runs-on: ubuntu-22.04
- steps:
- - name: Checkout Repo
- uses: actions/checkout@v4.2.2
- with:
- fetch-depth: 0
- ref: ${{github.event.pull_request.head.ref}}
- repository: ${{github.event.pull_request.head.repo.full_name}}
-
- - name: Set SHAIRPORT_SYNC_BRANCH env
- run: |
- SHAIRPORT_SYNC_BRANCH=$(git rev-parse --abbrev-ref HEAD)
- echo "Current SHAIRPORT_SYNC_BRANCH set to ${SHAIRPORT_SYNC_BRANCH}"
- echo "SHAIRPORT_SYNC_BRANCH=${SHAIRPORT_SYNC_BRANCH}" >> $GITHUB_ENV
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3.3.0
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3.8.0
-
- - name: Build (classic)
- uses: docker/build-push-action@v6.13.0
- with:
- context: ./
- file: ./docker/classic/Dockerfile
- push: false
- build-args: |
- SHAIRPORT_SYNC_BRANCH=${{ env.SHAIRPORT_SYNC_BRANCH }}
-
- - name: Build
- uses: docker/build-push-action@v6.13.0
- with:
- context: ./
- file: ./docker/Dockerfile
- push: false
- build-args: |
- SHAIRPORT_SYNC_BRANCH=${{ env.SHAIRPORT_SYNC_BRANCH }}
- NQPTP_BRANCH=${{ env.NQPTP_BRANCH }}
-
- build-and-publish:
- if: github.event_name != 'pull_request'
- runs-on: ubuntu-22.04
- steps:
- - name: Checkout
- uses: actions/checkout@v4.2.2
- with:
- fetch-depth: 0
-
- - name: Set SHAIRPORT_SYNC_BRANCH env.
- run: echo "SHAIRPORT_SYNC_BRANCH=${GITHUB_REF##*/}" >> $GITHUB_ENV
-
- - name: Is branch "master"?
- if: ${{ env.SHAIRPORT_SYNC_BRANCH == 'master' }}
- run: |
- echo "IMAGE_TAG_BASE=rolling" >> $GITHUB_ENV
-
- - name: Is branch "development"?
- if: ${{ env.SHAIRPORT_SYNC_BRANCH == 'development' }}
- run: |
- echo "NQPTP_BRANCH=development" >> $GITHUB_ENV
- echo "IMAGE_TAG_BASE=development" >> $GITHUB_ENV
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3.3.0
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3.8.0
-
- - name: Login to Docker Registry
- uses: docker/login-action@v3.3.0
- with:
- registry: ${{ secrets.DOCKER_REGISTRY }}
- username: ${{ secrets.DOCKER_REGISTRY_USER }}
- password: ${{ secrets.DOCKER_REGISTRY_TOKEN }}
-
- - name: Build and push (classic)
- uses: docker/build-push-action@v6.13.0
- with:
- context: ./
- file: ./docker/classic/Dockerfile
- platforms: ${{ env.DOCKER_PLATFORMS }}
- push: ${{ env.IMAGE_TAG_BASE != '' }}
- tags: ${{ secrets.DOCKER_IMAGE_NAME }}:${{ env.IMAGE_TAG_BASE }}-classic
- build-args: |
- SHAIRPORT_SYNC_BRANCH=${{ env.SHAIRPORT_SYNC_BRANCH }}
-
- - name: Build and push
- uses: docker/build-push-action@v6.13.0
- with:
- context: ./
- file: ./docker/Dockerfile
- platforms: ${{ env.DOCKER_PLATFORMS }}
- push: ${{ env.IMAGE_TAG_BASE != '' }}
- tags: ${{ secrets.DOCKER_IMAGE_NAME }}:${{ env.IMAGE_TAG_BASE }}
- build-args: |
- SHAIRPORT_SYNC_BRANCH=${{ env.SHAIRPORT_SYNC_BRANCH }}
- NQPTP_BRANCH=${{ env.NQPTP_BRANCH }}
+++ /dev/null
-# Builds & pushes a docker image when a tag is created.
-# Tag pattern: '[tag]' & '[tag]-classic'
-# 'latest' & 'classic' also, when master tagged.
-
-# Only pushes the tag when it matches one of the following patterns:
-# X, X.Y or X.Y.Z
-
-name: Build and push docker (tag)
-
-on:
- workflow_dispatch:
- push:
- tags:
- - '[0-9]+' # X
- - '[0-9]+\.[0-9]+' # X.Y
- - '[0-9]+\.[0-9]+\.[0-9]+' # X.Y.Z
-
-env:
- DOCKER_PLATFORMS: linux/386,linux/amd64,linux/arm/v6,linux/arm64,linux/arm/v7
- NQPTP_BRANCH: main
-
-jobs:
- main:
- runs-on: ubuntu-22.04
- steps:
- - name: Checkout
- uses: actions/checkout@v4.2.2
- with:
- fetch-depth: 0
-
- - name: Set SHAIRPORT_SYNC_BRANCH env.
- run: |
- raw=$(git branch -r --contains ${{ github.ref }})
- branch=${raw##*/}
- echo "SHAIRPORT_SYNC_BRANCH=${branch}" >> $GITHUB_ENV
-
- - name: Set tag env
- run: echo "GIT_TAG=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
-
- - name: Is branch "master"?
- if: ${{ env.SHAIRPORT_SYNC_BRANCH == 'master' }}
- run: echo "LATEST_TAG=true" >> $GITHUB_ENV
-
- - name: Is branch "development"?
- if: ${{ env.SHAIRPORT_SYNC_BRANCH == 'development' }}
- run: |
- echo "NQPTP_BRANCH=development" >> $GITHUB_ENV
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3.3.0
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3.8.0
-
- - name: Login to Docker Registry
- uses: docker/login-action@v3.3.0
- with:
- registry: ${{ secrets.DOCKER_REGISTRY }}
- username: ${{ secrets.DOCKER_REGISTRY_USER }}
- password: ${{ secrets.DOCKER_REGISTRY_TOKEN }}
-
- - name: Build and push (classic)
- uses: docker/build-push-action@v6.13.0
- with:
- context: ./
- file: ./docker/classic/Dockerfile
- platforms: ${{ env.DOCKER_PLATFORMS }}
- push: true
- tags: |
- ${{ secrets.DOCKER_IMAGE_NAME }}:${{ env.GIT_TAG }}-classic
- ${{ env.LATEST_TAG == 'true' && format('{0}:classic', secrets.DOCKER_IMAGE_NAME) || '' }}
- build-args: |
- SHAIRPORT_SYNC_BRANCH=${{ env.SHAIRPORT_SYNC_BRANCH }}
-
- - name: Build and push
- uses: docker/build-push-action@v6.13.0
- with:
- context: ./
- file: ./docker/Dockerfile
- platforms: ${{ env.DOCKER_PLATFORMS }}
- push: true
- tags: |
- ${{ secrets.DOCKER_IMAGE_NAME }}:${{ env.GIT_TAG }}
- ${{ env.LATEST_TAG == 'true' && format('{0}:latest', secrets.DOCKER_IMAGE_NAME) || '' }}
- build-args: |
- SHAIRPORT_SYNC_BRANCH=${{ env.SHAIRPORT_SYNC_BRANCH }}
- NQPTP_BRANCH=${{ env.NQPTP_BRANCH }}
--- /dev/null
+# Builds and pushes a docker image in the following scenarios:
+# When a commit is made to 'master' or 'development'.
+# Tag pattern
+# 'master' - rolling, rolling-classic
+# 'development' - development, development-classic
+
+# When a tag is created.
+# Tag pattern: '[tag]' & '[tag]-classic'
+
+# TODO: Does not currently push 'latest' or 'classic' tags.
+
+# Builds but does not push a docker image for PRs.
+
+name: Build and conditionally push docker image
+
+on:
+ workflow_dispatch:
+ push:
+ branches:
+ - master
+ - development
+ tags:
+ - "*"
+ pull_request:
+ types: [opened, synchronize, reopened, ready_for_review]
+
+jobs:
+ docker-vars:
+ uses: ./.github/workflows/docker-vars.yaml
+
+ build-docker-image-and-publish:
+ name: Build and conditionally push docker image
+ needs:
+ - docker-vars
+ runs-on: ubuntu-22.04
+
+ strategy:
+ matrix:
+ include:
+ - name: classic
+ dockerfile: ./docker/classic/Dockerfile
+ tag_suffix: "-classic-rc"
+ build_args: |
+ SHAIRPORT_SYNC_BRANCH=.
+ - name: main
+ dockerfile: ./docker/Dockerfile
+ tag_suffix: "-rc"
+ build_args: |
+ SHAIRPORT_SYNC_BRANCH=.
+ NQPTP_BRANCH=${{ needs.docker-vars.outputs.nqptp_branch }}
+
+ steps:
+ - name: Login to Docker Registry
+ uses: docker/login-action@v3.7.0
+ with:
+ registry: ${{ secrets.DOCKER_REGISTRY }}
+ username: ${{ secrets.DOCKER_REGISTRY_USER }}
+ password: ${{ secrets.DOCKER_REGISTRY_TOKEN }}
+ if: needs.docker-vars.outputs.push_docker_image == 'true'
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3.7.0
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3.12.0
+
+ - name: Checkout shairport sync repo
+ uses: actions/checkout@v6.0.2
+ with:
+ fetch-depth: 0
+
+ - name: Build and push ${{ matrix.name }}
+ uses: docker/build-push-action@v6.18.0
+ env:
+ registry_and_name: ${{ secrets.DOCKER_REGISTRY }}/${{ secrets.DOCKER_IMAGE_NAME }}
+ with:
+ context: ./
+ file: ${{ matrix.dockerfile }}
+ platforms: ${{ needs.docker-vars.outputs.docker_platforms }}
+ push: ${{ needs.docker-vars.outputs.push_docker_image == 'true' }}
+ # Assign tags based on branch or tag type for clarity
+ tags: |
+ ${{ github.ref_type == 'branch' && github.ref_name == 'development'
+ && format('{0}:development{1}', env.registry_and_name, matrix.tag_suffix) || '' }}
+
+ ${{ github.ref_type == 'branch' && github.ref_name == 'master'
+ && format('{0}:rolling{1}', env.registry_and_name, matrix.tag_suffix) || '' }}
+
+ ${{ github.ref_type == 'tag'
+ && format('{0}:{1}{2}', env.registry_and_name, github.ref_name, matrix.tag_suffix) || '' }}
+ build-args: |
+ ${{ matrix.build_args }}
+
+# TODO: Fix pushing of 'latest' and 'classic' tags.
+# env.is_tag == 'true' && env.branch_name == 'master' && format('{0}:latest{1}', env.registry_and_name, matrix.tag_suffix) || ''
\ No newline at end of file
--- /dev/null
+on:
+ workflow_call:
+ outputs:
+ docker_platforms:
+ description: "The docker platforms to build for."
+ value: ${{ jobs.docker-vars.outputs.docker_platforms }}
+ nqptp_branch:
+ description: "The NQPTP branch."
+ value: ${{ jobs.docker-vars.outputs.nqptp_branch }}
+ push_docker_image:
+ description: "Whether to push the docker image."
+ value: ${{ jobs.docker-vars.outputs.push_docker_image }}
+
+jobs:
+ docker-vars:
+ name: Set variables required for docker build.
+ runs-on: ubuntu-22.04
+ env:
+ NQPTP_BRANCH: main
+ PUSH_DOCKER_IMAGE: "false"
+ outputs:
+ nqptp_branch: ${{ env.NQPTP_BRANCH }}
+ push_docker_image: ${{ env.PUSH_DOCKER_IMAGE }}
+ docker_platforms: linux/386,linux/amd64,linux/arm/v6,linux/arm/v7,linux/arm64
+ steps:
+ - name: Push docker image if this is a tag.
+ if: github.ref_type == 'tag'
+ run: |
+ echo "PUSH_DOCKER_IMAGE=true" >> "$GITHUB_ENV"
+
+ - name: Push docker image if this is the "master" or "development" branch.
+ if: |
+ github.ref_type == 'branch' &&
+ (
+ github.ref_name == 'master' ||
+ github.ref_name == 'development'
+ )
+ run: |
+ echo "PUSH_DOCKER_IMAGE=true" >> "$GITHUB_ENV"
+
+ - name: If 'development' branch, set NQPTP_BRANCH to 'development'.
+ if: |
+ github.ref_type == 'branch' &&
+ github.ref_name == 'development'
+ run: |
+ echo "NQPTP_BRANCH=development" >> "$GITHUB_ENV"
\ No newline at end of file
stale:
runs-on: ubuntu-latest
steps:
- - uses: actions/stale@v9.1.0
+ - uses: actions/stale@v10.1.1
with:
stale-issue-message: 'This issue has been inactive for 28 days so will be closed 7 days from now. To prevent this, please remove the "stale" label or post a comment.'
stale-pr-message: 'This PR has been inactive for 28 days so will be closed 7 days from now. To prevent this, please remove the "stale" label or post a comment.'
man/Makefile
man/Makefile.in
scripts/shairport-sync.service
-scripts/shairport-sync.service-avahi
scripts/shairport-sync
shairport-sync.core
+gitversion-stamp
gitversion.*
plist_xml_strings.*
+plists/*.c
+plists/*.h
+plists/*.x
#Some dbus files that are automatically generated
/org.gnome.ShairportSync.service
.DS_Store
shairport-sync.xcodeproj
-# separate build directory
+# separate build directories
build
# vscode
.vscode
+
+# IntelliJ/JetBrains
+.idea/
-# Adjusting Synchronisation on Shairport Sync ("SPS")
+# Adjusting Synchronisation with Shairport Sync
-Sometimes, a timing difference can be heard, where the audio coming from the SPS-powered device is slightly ahead or slightly behind another device playing in synchrony. This can sometimes be heard as an irritating "echo".
+Sometimes, a timing difference can be heard, where the audio coming from the Shairport-Sync-powered device is slightly ahead or slightly behind another device playing in synchrony. This can sometimes be heard as an irritating "echo".
This is usually due to audio amplifier delays:
* If your output device is a HDMI-connected device such as a TV or an AV Receiver (AVR), it will almost certainly delay audio by anything up to several hundred milliseconds.
-In these circumstances, if the output from the SPS device is amplified by a conventional analog-only HiFi amplifier – which has almost no delay – it will be early by comparison with audio coming from the other device.
+In these circumstances, if the output from the Shairport Sync device is amplified by a conventional analog-only HiFi amplifier – which has almost no delay – it will be early by comparison with audio coming from the other device.
-Conversely, if the output from the SPS device is passed through an AVR, then it could be late by comparison with audio amplified by a conventional audio amplifier.
+Conversely, if the output from the Shairport Sync device is passed through an AVR, then it could be late by comparison with audio amplified by a conventional audio amplifier.
The fix for this is to get Shairport Sync to compensate for delays by providing audio to the output device _slightly late_ or _slightly early_, so that when audio emerges from the amplifier, it is in exact synchrony with audio from the other devices.
The setting to look for is in the `general` section of the Shairport Sync configuration file and is called `audio_backend_latency_offset_in_seconds`. By default it is `0.0` seconds.
-For example, to delay the output from the SPS device by 100 milliseconds (0.1 seconds), set the `audio_backend_latency_offset_in_seconds` to `0.1`, so that audio is provided to your output device 100 milliseconds later than nominal synchronisation time.
+For example, to delay the output from the Shairport Sync device by 100 milliseconds (0.1 seconds), set the `audio_backend_latency_offset_in_seconds` to `0.1`, so that audio is provided to your output device 100 milliseconds later than nominal synchronisation time.
-Similarly, to get the output from the SPS device 50 milliseconds (0.05 seconds) early, set the `audio_backend_latency_offset_in_seconds` to `-0.05`, so that audio is provided to your output device 50 milliseconds earlier than nominal synchronisation time.
+Similarly, to get the output from the Shairport Sync device 50 milliseconds (0.05 seconds) early, set the `audio_backend_latency_offset_in_seconds` to `-0.05`, so that audio is provided to your output device 50 milliseconds earlier than nominal synchronisation time.
Latency adjustments should be small, not more than about ± 250 milliseconds.
and with a decent CD-quality Digital to Analog Converter (DAC).
## CPU Power and Memory
-Computer power and memory requirements are modest – a Raspberry Pi 2 or better, including the Pi Zero 2 W, is fine.
-Unfortunately, while the original Raspberry Pi and the Pi Zero are powerful enough for AirPlay operation,
-they are not really suitable for AirPlay 2 operation.
+Computer power and memory requirements are modest – a Raspberry Pi B or better, including the Pi Zero W, is fine.
## CPU Clock
For best performance, Shairport Sync requires a stable and accurate system clock.
-This is because the output DAC's output rate is normally determined by the system clock (exceptionally, some high-end USB streamers use their own built-in clocks).
+This is because the output DAC's output rate is normally determined by the system clock (exceptionally, some very high-end USB streamers use their own built-in clocks).
If the clock drifts, or if its actual frequency is far from its nominal frequency, Shairport Sync will have to do more interpolation,
which inevitably must degrade the audio fidelity, even if it is very hard to hear.
Some very old laptops are known to have inaccurate clocks and and some embedded systems can suffer from temperature-related clock drift.
Recent Raspberry Pis seem to have very accurate clocks with low drift.
## Linux
-The best kind of Linux for Shairport Sync is a "bare" Linux.
-Raspberry Pi OS Lite, Debian Minimal Server, Ubuntu Server, Fedora Server and Arch Linux (Minimal Configuration) are good examples.
+The best kind of Linux for Shairport Sync is a "bare" or "headless" Linux, that is, a Linux without a graphical user interface (GUI). Raspberry Pi OS Lite, Debian Minimal Server, Ubuntu Server, Fedora Server and Arch Linux (Minimal Configuration) are good examples of suitable Linux distributions.
-Shairport Sync also runs on "desktop" Linuxes such as Raspberry Pi OS with desktop, Debian with a desktop environment,
-Ubuntu Desktop and Fedora Workstation.
-Desktop Linuxes are less suitable because they almost always use a sound server like PulseAudio or PipeWire.
-These can interfere with Shairport Sync, which needs direct and exclusive access to the audio hardware.
+Shairport Sync will also runs on "desktop" Linuxes such as Ubuntu Desktop, Fedora Workstation, Raspberry Pi OS with desktop and Debian. However, Linuxes with a GUI are less suitable because they almost always use a sound server like PipeWire or PulseAudio. These can interfere with Shairport Sync, which prefers direct and exclusive access to the audio hardware.
## DAC
A good Digital to Analog Converter (DAC) will have a huge influence on the quality of the audio.
-Shairport Sync runs at 44,100 frames per second (FPS), each frame consisting of a pair of signed 16 bit linear samples, one for left and one for right.
-Shairport Sync will take advantage to 24- or 32-bit DACs if available, and will run at 44,100 or 88,200, 176,400 or 352,800 FPS if necessary,
-though there is no advantage to the higher rates. The 44,100 FPS rate was chosen to match the rate of AirPlay.
+Shairport Sync runs at the frame rate of the input audio 44,100 or 48,000 frames per second (44,100 only if built for classic AirPlay without FFmpeg support.). It will automatically output in a wide range of formats and will try to match the output rate and channel count to that of the input. If not, it can transcode and mixdown automatically. Output rates, formats and channel counts can be restricted if necessary.
Good DACs are available at a very wide range of prices, from low-cost USB "Sound Cards" to very high-end HiFi streaming DACs.
In the Raspberry Pi world, many very good low-cost I2S DACs, some with integrated amplifiers, are available. The DAC powering the Pi's built-in audio jack is not great, however. While it may be good enough for trying out Shairport Sync or for casual listening, it has a very limited frequency response and can generate very large transients when it starts up. A separate DAC will transform the output quality.
**Note**
-Make sure that the DAC is capable of 44,100 FPS operation – this is really mandatory.
-Most recent DACs are okay, but some older DACs will only run at 48,000 FPS or multiples of it, and Shairport Sync can not use them.
+If you are building for classic AirPlay and do not wish to use the FFmpeg library, input and output is restricted to 44,100 frames per second. Make sure that the DAC is capable of 44,100 FPS operation.
+Most recent DACs are okay, but some older DACs will only run at 48,000 FPS or multiples of it.
+
## Maximum Output Level
The `volume_max_db` setting allows you to reduce the maximum level of DAC output to prevent possible overloading of the amplifier or premplifier it feeds.
## Volume Range
-The volume range is the difference (technically the ratio, expressed in dB) between the highest and lowest level of the volume control. Ideally, this should give the highest volume at the high end and a barely audible sound at the lowest level. Typical volume ranges are 60 dB to 75dB. If the range is much less than this, the difference between high and low volume won't seem large enough to the listener. If the range is much more, much of the low end of the volume control range will be inaudible. (The built-in DAC of the Raspberry Pi has this problem.) Use the `volume_range_db` setting to set the volume range. If the range you request is greater than the range available in the hardware mixer, the built-in attenuator will be used to make up the difference.
+The volume range is the difference (technically the ratio, expressed in dB) between the highest and lowest level of the volume control. Ideally, this should give the highest volume at the high end and a barely audible sound at the lowest level. Typical volume ranges are 60 dB to 85dB. If the range is much less than this, the difference between high and low volume won't seem large enough to the listener. If the range is much more, much of the low end of the volume control range will be inaudible. (The built-in DAC of the Raspberry Pi has this problem.) Use the `volume_range_db` setting to set the volume range. If the range you request is greater than the range available in the hardware mixer, the built-in attenuator will be used to make up the difference.
## Volume Control
Audio is sent at full volume in AirPlay and AirPlay 2 with separate information being sent to set the actual volume. This volume information can be used in four ways:
* It can be used to control a built-in attenuator. This is the default.
When you complete the instructions in [BUILD.md](../BUILD.md), you have a basic functioning Shairport Sync installation. If you want more control – for example, if you want to use a specific DAC, or if you want AirPlay to control the DAC's volume control – you can use settings in the configuration file (recommended) or you can use command-line options.
## The Configuration File
-Shairport Sync reads settings from a configuration file at `/etc/shairport-sync.conf` (note that in FreeBSD it will be at `/usr/local/etc/shairport-sync.conf`). When you run `$sudo make install`, a sample configuration file called `shairport-sync.conf.sample` is always installed or updated. This contains all the setting groups and all the settings available, but they all are commented out (comments begin with `//`) so that default values are used. The file contains explanations of the settings, useful hints and suggestions.
+Shairport Sync reads settings from a configuration file at `/etc/shairport-sync.conf` (in FreeBSD it will be at `/usr/local/etc/shairport-sync.conf`). When you run `$sudo make install`, a sample configuration file called `shairport-sync.conf.sample` is always installed or updated. This contains all the setting groups and all the settings available, but they all are commented out (comments begin with `//`) so that default values are used. The file contains explanations of the settings, useful hints and suggestions.
## Specifying the Output Device and Mixer Control
If you have followed the [BUILD.md](../BUILD.md) instructions, audio received by Shairport Sync will be sent to the `default` device. Depending on the configuration of your system, you may be able to specify a specific hardware output DAC and use its built-in mixer to control volume levels. This would be desirable because (1) the `default` device may be doing further processing on the audio before sending it to the hardware output device, degrading its fidelity, and (2) using the real device's hardware mixer to control volume would give Shairport Sync complete control of the volume range.
```
In this system, you can see that there are three hardware output devices, `"hw:Headphones"`, `"hw:sndrpihifiberry"` and `"hw:vc4hdmi"`.
-Using a tool like `alsamixer`, an output device can be checked to find the name of the volume control mixer. For the first device, the name of the mixer is `"Headphone"`.
+Using a tool like [`dacquery`](https://github.com/mikebrady/dacquery) or `alsamixer`, an output device can be checked to find the name of the volume control mixer. For the first device, the name of the mixer is `"Headphone"`.
These setting can be entered into the configuration file, in the `alsa` section, as follows:
```
alsa =
{
output_device = "hw:Headphones"; // the name of the alsa output device. Use "shairport-sync -h" to discover the names of ALSA hardware devices. Use "alsamixer" or "aplay" to find out the names of devices, mixers, etc.
- mixer_control_name = "Headphone"; // the name of the mixer to use to adjust output volume. If not specified, volume in adjusted in software.
+ mixer_control_name = "PCM"; // the name of the mixer to use to adjust output volume. If not specified, volume in adjusted in software.
...
}
```
If you make a syntax error, Shairport Sync will not start and will leave a message in the log. See more details below and in the comments in the configuration file.
-Please note that if your system has a sound server such as PulseAudio or PipeWire (most desktop linuxes have one of these), you may not be able to access the sound hardware directly, so you may only be able to use the `default` output.
-
-
+Please note that if your system has a sound server such as PipeWire or PulseAudio (most desktop Linuxes have one of these), you may not be able to access the sound hardware directly, so you may only be able to use the `default` output.
## More about Configuration Settings
Settings in the configuration file are grouped. For instance, there is a `general` group within which you can use the `name` tag to set the service name. Suppose you wanted to set the name of the service to `Front Room` and give the service the password `secret`, then you should do the following:
**Important:** You should *never* use an important password as the AirPlay password for a Shairport Sync player – the password is stored in Shairport Sync's configuration file in plain text and is thus completely vulnerable.
-No backend is specified here, so it will default to the `alsa` backend if more than one back end has been compiled. To route the output to PulseAudio, set:
+No backend is specified here, so it will default to the `alsa` backend if more than one back end has been compiled. To route the output to PipeWire, set:
```
- output_backend = "pa";
+ output_backend = "pipewire";
```
in the `general` group.
};
```
-The `pa` group is used to specify settings relevant to the PulseAudio backend. You can set the "Application Name" that will appear in the "Sound" control panel.
+The `pipewire` group is used to specify settings relevant to the PipeWire backend. You can set the "Application Name" that will appear in the "Sound" control panel.
-Note: Shairport Sync can take configuration settings from command line options. This is mainly for backward compatibility, but sometimes still useful. For normal use, it is recommended that you use the configuration file method.
+Note: Shairport Sync can take configuration settings from command line options. This is mainly for backward compatibility, but sometimes still useful. Where possible, it is recommended that you use the configuration file method.
### Raspberry Pi
alsa =
{
output_device = "hw:Headphones"; // the name of the alsa output device. Use "alsamixer" or "aplay" to find out the names of devices, mixers, etc.
- mixer_control_name = "Headphone"; // the name of the mixer to use to adjust output volume. If not specified, volume in adjusted in software.
+ mixer_control_name = "PCM"; // the name of the mixer to use to adjust output volume. If not specified, volume in adjusted in software.
// ... other alsa settings
```
(Remember to uncomment the lines by removing the `//` at the start of each.) When these changes have been made, restart Shairport Sync or simply reboot the system.
This gives the service the name "Joe's Stereo" and specifies that audio device `hw:0` be used.
-For best results with the `alsa` backend — including getting true mute and instant response to volume control and pause commands — you should access the hardware volume controls. Use `amixer` or `alsamixer` or similar to discover the name of the mixer control to be used as the `mixer_control_name`.
+For best results with the `alsa` backend — including getting true mute and instant response to volume control and pause commands — you should access the hardware volume controls. Use [`dacquery`](https://github.com/mikebrady/dacquery)`amixer` or `alsamixer` or similar to discover the name of the mixer control to be used as the `mixer_control_name`.
Here is an example for for a Raspberry Pi using its internal soundcard — device hw:0 — that drives the headphone jack:
```
* You can vary the resync threshold, or turn resync off completely, with the `general` `resync_threshold_in_seconds` setting.
### Tolerance
-Playback synchronisation is allowed to wander — to "drift" — a small amount before attempting to correct it. The default is 0.002 seconds, i.e. 2 ms. The smaller the tolerance, the more likely it is that overcorrection will occur. Overcorrection is when more corrections (insertions and deletions) are made than are strictly necessary to keep the stream in sync. Use the `statistics` setting to monitor correction levels. Corrections should not greatly exceed net corrections.
+Playback synchronisation is allowed to wander — to "drift" — a small amount before attempting to correct it. The default is 0.002 seconds, i.e. 2 ms. The smaller the tolerance, the more likely it is that overcorrection will occur. Overcorrection is when more corrections (insertions and deletions) are made than are strictly necessary to keep the stream in sync. Use the `statistics` setting to monitor correction levels. Corrections should not greatly exceed net corrections.
* You can vary the tolerance with the `general` `drift_tolerance_in_seconds` setting.
## Command Line Arguments
# Working with PulseAudio or PipeWire
-Many Linux systems have [PulseAudio](https://www.freedesktop.org/wiki/Software/PulseAudio/) or [PipeWire](https://pipewire.org) installed as [sound servers](https://en.wikipedia.org/wiki/Sound_server), typically providing
-audio facilities for the system's GUI. Unfortunately, they cause problems for Shairport Sync. As you'll see, these problems can be worked around.
-
-The following remarks apply to PulseAudio and PipeWire, so, for simplicity, let's refer to PulseAudio only.
-
-To understand the problems, first consider a Linux system that _does not_ have PulseAudio installed.
-1. Sound is managed by the Advanced Linux Sound Architecture ("[ALSA](https://www.alsa-project.org/wiki/Main_Page)") subsystem.
-2. The ALSA subsystem is loaded and becomes available at system startup.
-3. The ALSA `"default"` output device is mapped to the system's audio output DAC.
-
-Shairport Sync loads when system startup is complete and provides its service, routing audio to the ALSA default device.
-
-Now, consider a Linux system with a Graphical User Interface (GUI) that has PulseAudio installed.
-1. As before, sound is managed by the ALSA subsystem.
-2. As before, The ALSA subsystem is loaded and becomes available at system startup.
-3. PulseAudio loads and becomes a client of ALSA.
-4. The PulseAudio service becomes available after a user has logged in through the GUI. Importantly, if you don't log in through the GUI, you won't have a PulseAudio service.
-5. The ALSA `"default"` device no longer connects to a real output device. Instead audio sent to the `"default"` device is routed into PulseAudio. Importantly, if you have no PulseAudio service, then the ALSA default device either doesn't exist at all or goes nowhere.
-
-# The Problem
-When Shairport Sync is installed as a system service, it starts after the system has booted up.
-It runs under a low-priviliged user called `shairport-sync`.
-Unfortunately, per (4) above, since it was not logged in through the GUI, it won't have a PulseAudio service. If you are using the `"pa"` backend, Shairport Sync may well crash.
-Per (5) above, the ALSA `"default"` device either won't exist or -- if it does exist -- won't work. Hence, using the `"alsa"` backend, Shairport Sync will either terminate or remain silent.
-
-# The Fixes
-There are three ways to address this problem:
-1. Stop using Shairport Sync as a system service. Instead, launch it from a terminal window after log-in through the GUI.
-Alternatively, create a user service startup script so that it will be launched automatically after the user has logged in. This means that the system can not run without user intervention.
-2. If you are using the `"alsa"` backend, set the output device to a real ALSA hardware output device. Use `$ shairport-sync -h` (or, better, [`sps-alsa-explore`](https://github.com/mikebrady/sps-alsa-explore)) to get a list of output devices, and use the configuration file to set the device. There is a possibility this might prevent PulseAudio from working properly.
-# The Best Fix
-3. The best of all fixes is, if possible, to avoid using a system containing PulseAudio altogether. Linux operating systems typically have a "server", "lite" or "headless" version that has no GUI and no PulseAudio.
+Many Linux systems, especially desktop Linuxes with a GUI, have [PipeWire](https://pipewire.org) or [PulseAudio](https://www.freedesktop.org/wiki/Software/PulseAudio/) installed as [sound servers](https://en.wikipedia.org/wiki/Sound_server).
+PipeWire and PulseAudio are widely used and have the great advantage of being easily able to mix audio from multiple sources. The slight downside is that audio may be further processed (e.g. transcoded) on its way to the output device.
+
+To use PipeWire or PulseAudio-based systems, Shairport Sync must be set up as a user service.
+
+### Considerations
+1. Shairport Sync will work without modification in a PipeWire- or PulseAudio-based system if built with the default ALSA backend. This is because PipeWire and PulseAudio both provide a default ALSA pseudo-device to receive and play audio from ALSA-compatible programs.
+2. Shairport Sync can be built with "native" PipeWire or PulseAudio backends by adding the `--with-pipewire` or `--with-pulseaudio` configuration flags when it is being built. This has the advantage of bypassing the ALSA compatability layer.
+3. To check if PipeWire support is built into Shairport Sync, check that the string `PipeWire` is included in the version string. (Enter `$ shairport-sync -V` to get the version string.) Similarly, the version string will include `PulseAudio` if the PulseAudio backend is built in.
+4. Remember to specify which backend Shairport Sync should use in the configuration file or on the command line.
+
+## Automatic Startup of Shairport Sync
+
+The main thing to remember about PipeWire and PulseAudio sound servers is that the services they offer only become available when a user logs in -- that is, they are set up as _user services_.
+Shairport Sync relies on them, so it must also be set up as a user service; it can not be set up as a system service because the PipeWire or PulseAudio services needed by Shairport Sync are not available when system services are launched just after system startup.
+
+### Starting Shairport Sync as a User Service
+To make Shairport Sync start as a user service, (assuming you built and installed Shairport Sync using the BUILD.md guide), ensure you are logged in as the appropriate user and enter:
+```
+$ systemctl --user enable shairport-sync
+```
+Make sure it is _not enabled_ as a system service:
+```
+# systemctl disable shairport-sync
+```
+and then reboot.
+
+#### Problems with User Service
+Shairport Sync will function perfectly well as a user service, but there are a number of things to bear in mind:
+
+1. The AirPlay service will only be available when the user is logged in. When the user logs out, Shairport Sync will terminate and the AirPlay service will disappear.
+2. If the Linux system is a desktop Linux with a GUI, audio will be sent to the default output only when the user is logged in through the GUI.
+
+Automatic user login may help address these problems.
# Advanced Topics
Here you will find links to some advanced features and things you can do with Shairport Sync.
-* [Finish Setting Up](InitialConfiguration.md).
-* [Working with PulseAudio or PipeWire](https://github.com/mikebrady/shairport-sync/blob/development/ADVANCED%20TOPICS/PulseAudioAndPipeWire.md).
+* [Working with PulseAudio or PipeWire](PulseAudioAndPipeWire.md).
* [Adjusting Sync](AdjustingSync.md) – advance or delay the timing of the output from Shairport Sync to compensate for amplifier delays.
-* [Get The Best](GetTheBest.md) from your system.
* [Metadata](Metadata.md).
* [Events](Events.md).
* [Statistics](Statistics.md).
This is the net amount of interpolation done by Shairport Sync to keep the audio stream in sync in the last interval, i.e. the number of frames added **minus** the number of frames removed from the audio stream, relative to the total number of frames output, expressed in parts per million (PPM). For reference, adding or removing one frame per second into a 44,100 frames per second stream is 22.68 ppm.
##### All Sync PPM
This is the total amount of interpolation done by Shairport Sync to keep the audio stream in sync in the last interval, i.e. the number of frames added **plus** the number of frames removed from the audio stream, relative to the total number of frames output, expressed in parts per million (PPM). The magnitude of this should be the same as the `Net Sync PPM`. If it is much larger it means that Shairport Sync is overcorrecting for sync errors – try increasing the drift tolerance to reduce it.
+##### Av Sync Window (ms)
+This is an estimate, in milliseconds, of how often the audio system updates the latency data it provides.
##### Packets
This is the number of packets of audio frames received since the start of the session. A packet normally contains 352 ± 1 audio frames.
##### Missing
# AirPlay 2
-**Shairport Sync** (as of [v4.1](https://github.com/mikebrady/shairport-sync/releases/tag/4.1) and newer) offers **AirPlay 2** support for audio sources on:
+Please be aware that an official specification of AirPlay 2 has not been made public. What follows is based on what has been discovered so far.
+
+### Streams
+AirPlay 2 supports two stream types -- "Realtime Audio" and "Buffered Audio".
+- Realtime audio is played after a short delay. The delay is usually around two seconds. (This is similar to the older Classic AirPlay format.)
+- Buffered audio starts playing after a very short delay. Audio can arrive faster than it is played and is buffered locally in the Shairport Sync device.
+
+### Formats
+AirPlay 2 supports a variety of audio formats. This is a summary of what is known to work so far.
+ - Compression
+ - ALAC (Apple Lossless Advanced Codec) encoding is used for lossless streaming,
+ - AAC (Advanced Audio Coding) is used for lossy streaming.
+ - (BTW you can check your perception of lossy and lossless formats [here](http://abx.digitalfeed.net).)
+ - Encoding
+ - Signed 16- and 24-bit signed linear PCM samples ("S16" and "S24"),
+ - 24-bit floating point PCM samples ("F24").
+ - Rates
+ - Sample rates of 44,100 and 48,000 frames per second (fps).
+ - Channels
+ - Stereo, 5.1 and 7.1 Surround Sound.
+
+# Shairport Sync
+This information relates to Shairport Sync Version 5.0 onwards.
+
+Shairport Sync offers AirPlay 2 support for audio sources on:
- iOS devices,
- Macs from macOS 10.15 (Catalina) onwards,
- HomePod minis,
-- and Apple TVs.
+- Apple TVs.
## What Works
-- AirPlay 2 audio for iOS, HomePod mini, AppleTV and Mac players.
- * Audio is synchronised with other AirPlay 2 devices.
- * Two types of audio are received by Shairport Sync – "Realtime" streams of CD quality ALAC (like "classic" AirPlay) and "Buffered Audio" streams of AAC stereo at 44,100 frames per second.
- * The selection of stream type is made by the player.
- * Realtime streams generally have a latency of about two seconds. Buffered Audio streams typically have a latency of half a second or less.
- * In AirPlay 2 mode, Shairport Sync reverts to "classic" AirPlay when iTunes on macOS or macOS Music plays to multiple speakers and one of more of them is compatible with AirPlay only.
-
-- Devices running Shairport Sync in AirPlay 2 mode can be [added](https://github.com/mikebrady/shairport-sync/blob/development/ADDINGTOHOME.md) to the Home app.
+- Audio synchronised with other AirPlay 2 devices.
+- AirPlay 2 audio playback:
+ - Basic
+ - ALAC/S16/44100/2 realtime audio (stereo),
+ - AAC/F24/44100/2 buffered audio (stereo).
+ - Better
+ - AAC/F24/48000/2 buffered audio (stereo).
+ - Lossless
+ - ALAC/S24/48000/2 buffered audio (stereo).
+ - Surround
+ - AAC/F24/48000/5.1 and AAC/F24/48000/7.1 buffered audio (surround sound).
+- Transcoding: the output will be transcoded if necessary (e.g. from 44,100 to 48,000 fps) to match the output device's rate.
+- Mixdown: mixdown to fewer output channels (e.g. 5.1 to stereo) is automatic but can be controlled.
+- Shairport Sync can revert to "classic" AirPlay if necessary.
+- Devices running Shairport Sync in AirPlay 2 mode can be [added](https://github.com/mikebrady/shairport-sync/blob/master/ADDINGTOHOME.md) to the Home app.
+- Shairport Sync can be built to support classic AirPlay (aka "AirPlay 1") only. Classic Airplay offers only one format, ALAC/S16/44100/2, but output transcoding is available to, for example, 48000 fps.
## What Does Not Work
-- No AirPlay 2 for Windows iTunes.
+- High-Definition Lossless -- 96,000 and 192,000 fps material -- is not supported.
+- Dolby Atmos is not supported.
+- AirPlay 2 for Windows iTunes is not supported.
- Remote control facilities are not implemented.
- AirPlay 2 from macOS prior to 10.15 (Catalina) is not supported.
+- Multiple instances of the AirPlay 2 version of Shairport Sync can not be hosted on the same system. It seems that AirPlay 2 clients are confused by having multiple AirPlay 2 players at the same IP addresses.
+
## General
Shairport Sync uses a companion application called [NQPTP](https://github.com/mikebrady/nqptp) ("Not Quite PTP")
for timing and synchronisation in AirPlay 2. NQPTP must have exclusive access to ports `319` and `320`.
-Lossless and High Definition Lossless material is transcoded to AAC before it reaches Shairport Sync.
-
## What You Need
-AirPlay 2 support needs a slightly more powerful CPU for decoding and synchronisation and more memory for bigger buffers and larger libraries. A system with the power of a Raspberry Pi 2 or Raspberry Pi Zero 2 W, or better, is recommended.
+For AirPlay 2, a system with the power of a Raspberry Pi B, or better, is recommended.
Here are some guidelines:
-* Full access, including network capabilities or `root` privileges, to a system at least as powerful as a Raspberry Pi 2 or a Raspberry Pi Zero 2 W.
+* Full access, including low-port-number network access or `root` privileges, to a system at least as powerful as a Raspberry Pi B.
* Ports 319 and 320 must be free to use (i.e. they must not be in use by another service such as a PTP service) and must not be blocked by a firewall.
-* An up-to-date system. This is important, as some of the libraries must be the latest available.
-* Shairport Sync will not run in AirPlay 2 mode on a Mac because NQPTP, on which it relies, needs ports 319 and 320, which are already used by macOS.
-* A version of the [FFmpeg](https://www.ffmpeg.org) library with an AAC decoder capable of decoding Floating Planar -- `fltp` -- material. There is a guide [here](TROUBLESHOOTING.md#aac-decoder-issues-airplay-2-only) to help you find out if your system has it.
-* An audio output. The device must be capable of running at 44,100 frames per second. You can use [`sps-alsa-explore`](https://github.com/mikebrady/sps-alsa-explore) to test the suitability of hardware ALSA audio devices on your device.
-Other backends continue to work as with "classic" Shairport Sync.
-- Multiple instances of the AirPlay 2 version of Shairport Sync can not be hosted on the same system. It seems that AirPlay 2 clients are confused by having multiple AirPlay 2 players at the same IP addresses.
+* An up-to-date Linux, FreeBSD or OpenBSD system. This is important, as some of the libraries must be the latest available.
+
+* Due to realtime timing requirements, Shairport Sync does not work well on virtual machines outputting to ALSA, PipeWire or PulseAudio. For the same reason, Shairport Sync does not work very well with with Bluetooth. YMMV of course, and you can have success where timing is not crucial, such as outputting to `stdout` or to a unix pipe.
+* Shairport Sync can not run in AirPlay 2 mode on a Mac because NQPTP, on which it relies, needs ports 319 and 320, which are already used by macOS.
+* A version of the [FFmpeg](https://www.ffmpeg.org) library with an AAC decoder capable of decoding Floating Planar -- `fltp` -- material must be in your system. There is a guide [here](TROUBLESHOOTING.md#aac-decoder-issues-airplay-2-only) to help you find out if your system has it.
+* An audio output. For preference, the output device should be capable of accepting stereo or multichannel at 44,100 and 48,000 frames per second. With FFmpeg support, audio will be transcoded and mixed to match output device capabilities as necessary.
+* You can use [`dacquery`](https://github.com/mikebrady/dacquery) to test the suitability of hardware ALSA audio devices on your system.
+#### An Ideal System
+For the highest quality audio with the highest fidelty and a minimum of audio processing, an ideal system would be a bare Linux system without a GUI and without PipeWire or PulseAudio, such as Raspberry Pi OS (Lite) or similar. In this case, Shairport Sync connects directly to the ALSA output DAC. For testing, the build-in DAC or a low-cost USB DAC is usually sufficient.
+
+A problem with this setup is that Shairport Sync expects exclusive access to the audio device. If you have other audio sources, this can be problematic. In such a situation, an otherwise-bare system as described above, but with PipeWire added, can be used, so that all audio sources output to the PipeWire system, which takes care of mixing. Take care to ensure that the Unix users running the audio applications that use PipeWire are members of the `pipewire` group.
## Guides
* A building guide is available at [BUILD.md](BUILD.md).
# Build and Install Shairport Sync
This guide is for a basic installation of Shairport Sync in a recent (2018 onwards) Linux or FreeBSD.
-Shairport Sync can be built as an AirPlay 2 player (with [some limitations](AIRPLAY2.md#features-and-limitations)) or as "classic" Shairport Sync – a player for the older, but still supported, AirPlay (aka "AirPlay 1") protocol. Check ["What You Need"](AIRPLAY2.md#what-you-need) for some basic system requirements.
+## Important Note -- Upgrading to Version 5!
+
+If you have been using Shairport Sync prior to Version 5.0 and are rebuilding or reinstalling Shairport Sync, be aware that a few important things have changed.
+While the overall operation of Shairport Sync has not changed much, it is really important to fully remove existing startup scripts and to check and update configuration files. Some important changes are highlighted here:
+
+1. The default sample rate has changed from 44,100 to 48,000 for buffered audio. Real-time audio streams remain at 44,100.
+2. Shairport Sync will also play surround sound (5.1 and 7.1) and lossless (48k) audio.
+3. Shairport Sync will automatically switch output rates and formats to correspond to input rates and formats. This can be controlled.
+4. Many build flags have changed: for example `--with-systemd` is now `with-systemd-startup`.
+5. Many configuration settings names and facilities have changed. For example `convolution` is now `convolution_enabled`. Another example is that convolution is now multi-threaded, so a new `convolution_thread_pool_size` setting is available.
+7. Installation has changed: when Shairport Sync and NQPTP are installed, their startup scripts have changed to provide them with more suitable privileges. You must remove any existing startup scripts.
+8. Jack Audio is deprecated and will be removed in a future update. Consider using PipeWire instead.
+
+## 0. General
+
+Shairport Sync can be built as an AirPlay 2 player (with [some limitations](AIRPLAY2.md#features-and-limitations)) or as classic Shairport Sync – a player for the older, but still supported, "classic" AirPlay (aka "AirPlay 1") protocol. Check ["What You Need"](AIRPLAY2.md#what-you-need) for some basic system requirements.
+
+Note that Shairport Sync does not work well in virtual machines -- YMMV.
Overall, you'll be building and installing two programs – Shairport Sync itself and [NQPTP](https://github.com/mikebrady/nqptp), a companion app that Shairport Sync uses for AirPlay 2 timing. If you are building classic Shairport Sync, NQPTP is unnecessary and can be omitted.
```
# rm /usr/local/bin/shairport-sync
```
-Do this until no more copies of `shairport-sync` are found.
+Do this until no more copies of `shairport-sync` are found. Shairport Sync could also be found in `/usr/bin/shairport-sync` or elsewhere.
+#### Remove Old Configuration Files
+If you want to preserve any configuration settings you have made, you should make a note of them and then delete the configuration file.
+This is suggested because there may be new configuration options available, which will be present but disabled in the
+updated configuration file that will be installed.
+You can then apply your previous settings to the updated configuration file.
+
+The configuration file is typically at `/etc/shairport-sync.conf` or `/usr/local/etc/shairport-sync.conf`.
+
#### Remove Old Service Files
You should also remove any of the following service files that may be present:
* `/etc/systemd/system/shairport-sync.service`
* `/etc/systemd/user/shairport-sync.service`
* `/lib/systemd/system/shairport-sync.service`
* `/lib/systemd/user/shairport-sync.service`
+* `/etc/dbus-1/system.d/shairport-sync-dbus.conf`
+* `/etc/dbus-1/system.d/shairport-sync-mpris.conf`
* `/etc/init.d/shairport-sync`
-
+* `~/.config/systemd/user/shairport-sync.service`
+
New service files will be installed if necessary at the `# make install` stage.
+
+(In FreeBSD, there is no need to remove the file at `/usr/local/etc/rc.d/shairport-sync` – it's always replaced in the `make install` step.)
#### Reboot after Cleaning Up
-If you removed any installations of Shairport Sync or any of its service files in the last two steps, you should reboot.
+If you removed any installations of Shairport Sync or any of its service or configuration files in the last three steps, you should reboot.
## 2. Get Tools and Libraries
Okay, now let's get the tools and libraries for building and installing Shairport Sync (and NQPTP).
# apt upgrade # this is optional but recommended
# apt install --no-install-recommends build-essential git autoconf automake libtool \
libpopt-dev libconfig-dev libasound2-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev \
- libplist-dev libsodium-dev libavutil-dev libavcodec-dev libavformat-dev uuid-dev libgcrypt-dev xxd
+ libplist-dev libsodium-dev uuid-dev libgcrypt-dev xxd libplist-utils \
+ libavutil-dev libavcodec-dev libavformat-dev
```
If you are building classic Shairport Sync, the list of packages is shorter:
```
# apt update
# apt upgrade # this is optional but recommended
# apt-get install --no-install-recommends build-essential git autoconf automake libtool \
- libpopt-dev libconfig-dev libasound2-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev
+ libpopt-dev libconfig-dev libasound2-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev \
+ libavutil-dev libavcodec-dev libavformat-dev
```
Building on Ubuntu 24.10 or Debian 13 ("Trixie") and later -- and possibly on other distributions -- requires `systemd-dev`. It does no harm to attempt to install it -- the install will simply fail if the package doesn't exist:
```
# apt install --no-install-recommends systemd-dev # it's okay if this fails because the package doesn't exist
```
+
### Fedora (Fedora 40)
-For AirPlay 2 operation, _before you install the libraries_, please ensure the you have [enabled](https://docs.fedoraproject.org/en-US/quick-docs/rpmfusion-setup) RPM Fusion software repositories to the "Nonfree" level. If this is not done, the FFmpeg libraries will lack a suitable AAC decoder, preventing Shairport Sync from working in AirPlay 2 mode.
+Important: to get the correct version of FFmpeg, _before you install the libraries_, please ensure the you have [enabled](https://docs.fedoraproject.org/en-US/quick-docs/rpmfusion-setup) RPM Fusion software repositories to the "Nonfree" level. If this is not done, the FFmpeg libraries will lack a suitable AAC decoder, preventing Shairport Sync from working in AirPlay 2 mode.
```
# yum update
# yum install --allowerasing make automake gcc gcc-c++ \
# yum update
# yum install make automake gcc gcc-c++ \
git autoconf automake avahi-devel libconfig-devel openssl-devel popt-devel soxr-devel \
- alsa-lib-devel
+ ffmpeg ffmpeg-devel alsa-lib-devel
```
### Arch Linux
After you have installed the libraries, note that you should enable and start the `avahi-daemon` service.
If you are building classic Shairport Sync, the list of packages is shorter:
```
# pacman -Syu
-# pacman -Sy git base-devel alsa-lib popt libsoxr avahi libconfig
+# pacman -Sy git base-devel alsa-lib popt libsoxr avahi libconfig ffmpeg
```
Enable and start the `avahi-daemon` service.
```
# systemctl start avahi-daemon
```
### FreeBSD
-First, update everything:
+This is for FreeBSD 14.3. First, update everything:
```
# freebsd-update fetch
# freebsd-update install
-# pkg
# pkg update
+# pkg upgrade
```
-Next, install the Avahi subsystem. FYI, `avahi-app` is chosen because it doesn’t require X11. `nss_mdns` is included to allow FreeBSD to resolve mDNS-originated addresses – it's not actually needed by Shairport Sync. Thanks to [reidransom](https://gist.github.com/reidransom/6033227) for this.
+Next, install the Avahi subsystem.
+FYI, `avahi-app` is chosen because it doesn’t require X11. If you are using a GUI with your FreeBSD system, select `avahi` rather than `avahi-app`.
+`nss_mdns` is included to allow FreeBSD to resolve mDNS-originated addresses – it's not actually needed by Shairport Sync. Thanks to [reidransom](https://gist.github.com/reidransom/6033227) for this.
```
# pkg install avahi-app nss_mdns
```
```
Reboot for these changes to take effect.
-Next, install the packages that are needed for Shairport Sync and NQPTP:
+Next, install the packages that are needed for Shairport Sync and NQPTP. If you will be using using `--with-sndio` instead of `--with-alsa` -- see below -- you can omit the `alsa-utils` package.
+
+If you are building Shairport Sync for AirPlay 2, install the following packages:
```
# pkg install git autotools pkgconf popt libconfig openssl alsa-utils libsoxr \
- libplist libsodium ffmpeg e2fsprogs-libuuid vim
+ libplist libsodium ffmpeg libuuid vim
```
+
If you are building classic Shairport Sync, the list of packages is shorter:
```
-# pkg install git autotools pkgconf popt libconfig openssl alsa-utils libsoxr
+# pkg install git autotools pkgconf popt libconfig openssl alsa-utils ffmpeg
```
## 3. Build
### NQPTP
### Shairport Sync
#### Build and Install
-Download Shairport Sync, branch and configure, compile and install it. Before executing the commands, please note the following:
+Download Shairport Sync and configure, compile and install it. Before executing the commands, please note the following:
-* If building for FreeBSD, replace `--with-systemd` with `--with-os=freebsd --with-freebsd-service`.
-* Omit the `--with-airplay-2` from the `./configure` options if you are building classic Shairport Sync.
-* If you wish to add extra features, for example an extra audio backend, take a look at the [configuration flags](CONFIGURATION%20FLAGS.md). For this walkthrough, though, please do not remove the `--with-alsa` flag.
+##### Build Options
+* **FreeBSD:** For FreeBSD, replace `--with-systemd-startup` with `--with-os=freebsd --with-freebsd-startup`.
+ * Optionally, replace `--with-alsa` with the backend for FreeBSD's native audio system `--with-sndio`.
+ * If you omit the `--sysconfdir=/etc` entry, `/usr/local/etc` will be used as the `sysconfdir`, which is conventionally used in FreeBSD.
+* **Classic Shairport Sync:** For classic Shairport Sync, replace `--with-airplay-2` with `--with-ffmpeg`.
+ * You can actually omit `--with-ffmpeg` when building classic Shairport Sync, but it is not recommended. While you'll save space (you can omit the FFMpeg libraries in the build and run-time environments), transcoding, e.g. from 44,100 to 48,000 frames per second, will not be available and less well-maintained and less secure decoders will be used.
+* **Extra Features:** If you wish to add extra features, for example an extra audio backend, take a look at the [configuration flags](CONFIGURATION%20FLAGS.md). For this walkthrough, though, please do not change too much!
```
$ git clone https://github.com/mikebrady/shairport-sync.git
$ cd shairport-sync
-$ autoreconf -fi
+$ autoreconf -fi # about 1.5 minutes on a Raspberry Pi B
$ ./configure --sysconfdir=/etc --with-alsa \
- --with-soxr --with-avahi --with-ssl=openssl --with-systemd --with-airplay-2
-$ make
+ --with-soxr --with-avahi --with-ssl=openssl --with-systemd-startup --with-airplay-2
+$ make # just over 7 minutes on a Raspberry Pi B
# make install
```
-By the way, the `autoreconf` step may take quite a while – please be patient!
-
## 4. Test
-At this point, Shairport Sync should be built and installed but not running. If the user you are logged in as is a member of the unix `audio` group, Shairport Sync should run from the command line:
+At this point, Shairport Sync should be built and installed but not running. Now you can test it out from the command line. Before you start testing, though, if you have built Shairport Sync for AirPlay 2 operation, ensure `NQPTP` is running.
+
+To check the installation, enter:
```
$ shairport-sync
```
* Add the `-v` command line option to get some diagnostics.
* Add the `--statistics` option to get some infomation about the audio received.
-The AirPlay service should appear on the network and the audio you play should come through to the default ALSA device. (Use `alsamixer` or similar to adjust levels.)
-If you have problems, please check the items in Final Notes below, or in the [TROUBLESHOOTING.md](TROUBLESHOOTING.md) guide.
+The AirPlay service should appear on the network and the audio you play should come through to the default audio output device. You can use the AirPlay volume control or the system's volume controls or a command-line tool like `alsamixer` to adjust levels.
-Note: Shairport Sync will run indefinitely -- use Control-C it to stop it.
+If you have problems, please check the hints in Final Notes below, or in the [TROUBLESHOOTING.md](TROUBLESHOOTING.md) guide.
-## 5. Enable and Start Service
-If your system has a Graphical User Interface (GUI) it probably uses PulseAudio or PipeWire for audio services. If that is the case, please review [Working with PulseAudio or PipeWire](https://github.com/mikebrady/shairport-sync/blob/master/ADVANCED%20TOPICS/PulseAudioAndPipeWire.md).
-Otherwise, once you are happy that Shairport Sync runs from the command line, you should enable and start the `shairport-sync` service. This will launch Shairport Sync automatically as a background "daemon" service when the system powers up:
+When you are finished running Shairport Sync, use Control-C it to stop it.
+## 5. Enable and Start Service
### Linux
+If your system uses either PipeWire or PulseAudio as sound servers (most "desktop" Linuxes use one or the other), Shairport Sync must be started as a user service. This is because the PipeWire or PulseAudio services -- needed by Shairport Sync -- are user services themselves, and they must be running before Shairport Sync starts. That implies that Shairport Sync must be started as a user service.
+
+#### Checking for PipeWire or PulseAudio
+If PipeWire is installed and running, the following command should return status information, as follows:
+```
+$ systemctl --user status pipewire
+● pipewire.service - PipeWire Multimedia Service
+ Loaded: loaded (/usr/lib/systemd/user/pipewire.service; enabled; preset: enabled)
+...
+```
+If not, it will return something like this:
+```
+$ systemctl --user status pipewire
+Unit pipewire.service could not be found.
+```
+Similarly, if PulseAudio is installed, the following command return status information:
+```
+$ systemctl --user status pulseaudio
+```
+If PipeWire or PulseAudio is installed, you must enable Shairport Sync as a _user service_ only.
+
+#### Enable Shairport Sync as a User Service
+
+To enable Shairport Sync as a user service that starts automatically when the user logs in, reboot, ensure you are logged in as that user and then run the following convenience script:
+```
+$ sh user-service-install.sh
+```
+This will run a few checks, install a user startup script and start Shairport Sync immediately. (Run `$ sh user-service-install.sh --dry-run` initially if you prefer...)
+
+##### User Service Limitations.
+1. If Shairport Sync is installed as a user service, it is activated when that user logs in and deactivated when the user logs out.
+On an unattended system, this difficulty can be overcome by using automatic user login.
+2. If your system has a GUI (that is, if it's a "desktop Linux"), then audio will only be routed to the speakers when the user is logged in through the GUI.
+
+#### Enable Shairport Sync as a System Service
+
+If your system does not have either PipeWire or PulseAudio installed (see how to check above), then you can enable Shairport Sync as a system service that starts automatically when the system boots up. To do so -- assuming you have followed the build guide successfully -- enter the following command:
```
# systemctl enable shairport-sync
```
+This enables Shairport Sync to start automatically when the system boots up. Please remember, this will not work if PipeWire or PulseAudio are installed on your system.
+
+You should not enable Shairport Sync as a user service and a system service at the same time!
+
### FreeBSD
To make the `shairport-sync` daemon load at startup, add the following line to `/etc/rc.conf`:
```
shairport_sync_enable="YES"
```
## 6. Check
-Reboot the machine. The AirPlay service should once again be visible on the network and audio will be sent to the default ALSA device.
+Reboot the machine. The AirPlay service should once again be visible on the network and audio will be sent to the default audio output.
## 7. Final Notes
A number of system settings can affect Shairport Sync. Please review them as follows:
+### ALSA Output Devices
+If your system is _not_ using PipeWire or PulseAudio (see [above](#checking-for-pipewire-or-pulseaudio)), it probably uses ALSA directly. The following hints might be useful:
+1. To access ALSA output devices, a user must be in the `audio` group. To add the current user to the `audio` group, enter:
+ ```
+ $ sudo usermod -aG audio $USER
+ ```
+2. Ensure that the ALSA default output device is not muted and has the volume turned up -- `alsamixer` is very useful for this.
+3. To explore the ALSA output devices on your system, consider using the [dacquery](https://github.com/mikebrady/dacquery) tool.
### Power Saving
If your computer has an `Automatic Suspend` Power Saving Option, you should experiment with disabling it, because your computer has to be available for AirPlay service at all times.
### WiFi Power Management – Linux
-If you are using WiFi, you should turn off WiFi Power Management:
+If you are using WiFi, you should turn off WiFi Power Management. On a Raspberry Pi, for example, you can use the following commands:
```
# iwconfig wlan0 power off
```
### Add to Home
With AirPlay 2, you can follow the steps in [ADDINGTOHOME.md](ADDINGTOHOME.md) to add your device to the Apple Home system.
### Wait, there's more...
-Instead of using default values for everything, you can use the configuration file to get finer control over the setup, particularly the output device and mixer control -- see [Finish Setting Up](ADVANCED%20TOPICS/InitialConfiguration.md).
+
+At this point, you should have a basic functioning Shairport Sync installation. If you want more control – for example, using the ALSA backend, if you want to use a specific DAC, or if you want AirPlay to control the DAC's volume control – you can use settings in the configuration file or you can use command-line options.
+
+#### Configuration Sample File
+When you run `# make install`, a configuration file is installed if one doesn't already exist. Additionally, a sample configuration file called `shairport-sync.conf.sample` is _always_ installed. This contains all the setting groups and all the settings available, commented out so that default values are used. The file contains explanations of the settings, useful hints and suggestions. The configuration file and the sample configuration file are installed in the `sysconfdir` you specified at the `./configure...` step above.
Please take a look at [Advanced Topics](ADVANCED%20TOPICS/README.md) for some ideas about what else you can do to enhance the operation of Shairport Sync. For example, you can adjust synchronisation to compensate for delays in your system.
## The Basic Idea
-The basic idea is to use a small Linux computer to create an isolated WiFi network (a "car network") and run Shairport Sync on it to provide an AirPlay service. An iPhone or an iPad with cellular capability can simultaneously connect to internet radio, YouTube, Apple Music, Spotify, etc. over the cellular network and send AirPlay audio through the car network to the AirPlay service provided by Shairport Sync. This sends the audio to the computer's DAC which is connected to the AUX input of your car audio.
+The basic idea is to use a small Linux computer to create an isolated WiFi network (a "Car WiFi Network") and run Shairport Sync on it to provide an AirPlay service. An iPhone or an iPad with cellular capability can simultaneously connect to internet radio, YouTube, Apple Music, Spotify, etc. over its cellular network connection and send AirPlay audio through the Car WiFi Network to the AirPlay service provided by Shairport Sync. This sends the audio to the computer's sound card or DAC, which is connected to the AUX input of your car audio.
Please note that Android phones and tablets can not, so far, do this trick of using the two networks simultaneously.
## Example
+This example is based on Raspberry Pi OS Lite (Trixie). Note that some of the details will vary if you are using a different version of Linux.
+
If you are updating an existing installation, please refer to the [updating](#updating) section below.
-In this example, a Raspberry Pi Zero 2 W and a Pimoroni PHAT DAC are used. Shairport Sync will be built for AirPlay 2 operation, but you can build it for "classic" AirPlay (aka AirPlay 1) operation if you prefer. A Pi Zero W is powerful enough for classic AirPlay.
+In this example, a Raspberry Pi Zero W and a Pimoroni PHAT DAC are used. Shairport Sync will be built for AirPlay 2 operation. You can build classic Shairport Sync to support only the original "classic" AirPlay (aka AirPlay 1) if you prefer.
-Please note that some of the details of setting up networks are specific to the version of Linux used.
### Prepare the initial SD Image
-* Download Raspberry Pi OS (Lite) and install it onto an SD Card using `Raspberry Pi Imager`. The Lite version is preferable to the Desktop version as it doesn't include a sound server like PulseAudio or PipeWire that can prevent direct access to the audio output device.
+* Download Raspberry Pi OS Lite (Trixie) and install it onto an SD Card using `Raspberry Pi Imager`. The Lite version is preferable to the Desktop version as it doesn't include a sound server like PulseAudio or PipeWire that is not needed in this case.
* Before writing the image to the card, use the Settings control on `Raspberry Pi Imager` to set hostname, enable SSH and provide a username and password to use while building the system. Similarly, you can specify a wireless network the Pi will connect to while building the system. Later on, the Pi will be configured to start its own isolated network.
* The next few steps are to add the overlay needed for the sound card. This may not be necessary in your case, but in this example a Pimoroni PHAT is being used. If you do not need to add an overlay, skip these steps.
* Mount the card on a Linux machine. Two drives should appear – a `boot` drive and a `rootfs` drive.
* `cd` to the `boot` drive (since my username is `mike`, it will be `$ cd /media/mike/boot`).
+ * From there, cd to the `firmware` subdirectory, i.e. `/boot/firmware`.
* Edit the `config.txt` file to add the overlay needed for the sound card. This may not be necessary in your case, but in this example a Pimoroni PHAT is being used and it needs the following entry to be added:
```
dtoverlay=hifiberry-dac
After a short time, the Pi should appear on your network – it may take a couple of minutes. To check, try to `ping` it at the `<hostname>.local`, e.g. if the hostname is `bmw` then use `$ ping bmw.local`. Once it has appeared, you can SSH into it and configure it.
### Boot, Configure, Update
-The first thing to do on a Pi would be to use the `raspi-config` tool to expand the file system to use the entire card. Next, do the usual update and upgrade:
+The first thing to do on a Pi would be to do the usual update and upgrade:
```
# apt-get update
# apt-get upgrade
Let's get the tools and libraries for building and installing Shairport Sync (and NQPTP).
```
-# apt install --no-install-recommends build-essential git xmltoman autoconf automake libtool \
+# apt update
+# apt upgrade # this is optional but recommended
+# apt install --no-install-recommends build-essential git autoconf automake libtool \
libpopt-dev libconfig-dev libasound2-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev \
- libplist-dev libsodium-dev libavutil-dev libavcodec-dev libavformat-dev uuid-dev libgcrypt-dev xxd
+ libplist-dev libsodium-dev uuid-dev libgcrypt-dev xxd libplist-utils \
+ libavutil-dev libavcodec-dev libavformat-dev systemd-dev
```
If you are building classic Shairport Sync, the list of packages is shorter:
```
-# apt-get install --no-install-recommends build-essential git xmltoman autoconf automake libtool \
- libpopt-dev libconfig-dev libasound2-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev
+# apt update
+# apt upgrade # this is optional but recommended
+# apt-get install --no-install-recommends build-essential git autoconf automake libtool \
+ libpopt-dev libconfig-dev libasound2-dev avahi-daemon libavahi-client-dev libssl-dev libsoxr-dev \
+ libavutil-dev libavcodec-dev libavformat-dev systemd-dev
```
+Note: older versions of the Raspberry Pi OS don't have -- and don't need -- the `systemd-dev` package. If it is reported as unknown, omit if from the installation.
#### NQPTP
-Skip this section if you are building classic Shairport Sync – NQPTP is not needed for classic Shairport Sync.
+Skip this step if you are building classic Shairport Sync – NQPTP is not needed for classic Shairport Sync.
-Download, install, enable and start NQPTP from [here](https://github.com/mikebrady/nqptp) following the guide for Linux.
+Download, install, enable and start NQPTP from [here](https://github.com/mikebrady/nqptp/tree/development) following the guide for Linux.
#### Shairport Sync
Download Shairport Sync, configure, compile and install it.
-* Omit the `--with-airplay-2` from the `./configure` options if you are building classic Shairport Sync.
+* Replace `--with-airplay-2` from the `./configure` options with `--with-ffmpeg` if you are building classic Shairport Sync.
```
$ git clone https://github.com/mikebrady/shairport-sync.git
$ cd shairport-sync
+$ git checkout development
$ autoreconf -fi
$ ./configure --sysconfdir=/etc --with-alsa \
- --with-soxr --with-avahi --with-ssl=openssl --with-systemd --with-airplay-2
+ --with-soxr --with-avahi --with-ssl=openssl --with-systemd-startup --with-airplay-2
$ make
# make install
# systemctl enable shairport-sync
INTERFACESv6=""
```
### Set up the Startup Sequence
-Configure the startup sequence by adding commands to `/etc/rc.local` to start `hostapd` and the `dhcp` automatically after startup. Its contents should look like this:
+Configure the startup sequence by adding commands to `/etc/rc.local`. If `/etc/rc.local` does not already exist, create it with owner and group `root` and permissons `755`. The commands will start the necessary services automatically after startup, depending on the `MODE` -- `DEC` for develoment mode, and `RUN` for normal operating mode.
+
+The contents of `/etc/rc.local` should look like this:
```
#!/bin/sh -e
#
exit 0 # normal exit here
```
-#### Disable Unused Services - Optional
-These optional steps have been tested on a Raspberry Pi only -- they have not been tested on other systems.
-Some services are not necessary for this setup and can be disabled as follows:
-```
-# systemctl disable keyboard-setup
-# systemctl disable triggerhappy
-# systemctl disable dphys-swapfile
-```
-
-
-#### Disable Unused Services - Mandatory
-You now need to disable some services; that is, you need to stop them starting automatically on power-up. This is because they either interfere with the system's operation in WiFi Access Point mode, or because they won't work when the system isn't connected to the Internet. Only one of the `NetworkManager` and the `dhcpcd` service will be present in your system, but it's no harm to try to disable both.
+#### Disable Services - Mandatory
+You now need to disable some services; that is, you need to stop them starting automatically on power-up. This is because they interfere with the system's operation in WiFi Access Point mode, or because they don't work when the system isn't connected to the Internet. A further possibility if that they will be started by the script in `/etc/rc.local` when appropriate. Only one of the `NetworkManager` and the `dhcpcd` service will be present in your system, but it's no harm to try to disable both.
```
# systemctl disable dhcpcd
# systemctl disable NetworkManager
# systemctl disable wpa_supplicant
# systemctl disable systemd-timesyncd
```
-Lastly, note that the WiFi credentials you used initially to connect to your network (e.g. your home network) will have been stored in the system in plain text. This is convenient for when you want to reconnect to update (see later), but if you prefer to delete them, they will be in `/etc/wpa_supplicant/wpa_supplicant.conf`.
+
+#### Disable Unused Services - Optional
+These optional steps have been tested on a Raspberry Pi Lite (Trixie) only -- they have not been tested on other systems.
+Some services are not necessary for this setup and can be disabled as follows:
+```
+# systemctl disable keyboard-setup
+```
+#### Security Note
+The WiFi credentials you used initially to connect to your network (e.g. your home network) will have been stored in the system.
+This is convenient for when you want to reconnect to update (see later), but if you prefer to delete them, use `nmtui` or `nmcli` to remove them.
#### Optional: Read-only mode – Raspberry Pi Specific
This optional step is applicable to a Raspberry Pi only. Run `sudo raspi-config` and then choose `Performance Options` > `Overlay Filesystem` and choose to enable the overlay filesystem, and to set the boot partition to be write-protected. (The idea here is that this offers more protection against files being corrupted by the sudden removal of power.)
+Note that some packages may be installed to enable read-only mode, so the Pi needs to be connected to the internet for this step. Thus, if may be necessary to temporarily enable and disable read-only mode while connected to your network before re-enabling it after restarting in its final `RUN` mode as a stand-alone AirPlay receiver.
+
### Final Step
When you are finished, carefully power down the machine before unplugging it from power:
```
### Ready
Install the Raspberry Pi in your car. It should be powered from a source that is switched off when you leave the car, otherwise the slight current drain will eventually flatten the car's battery.
-When the power source is switched on -- typically when you start the car -- it will take around 35 seconds for the system to become available (timing based on a Raspberry Pi Zero 2 W running Bookworm).
+When the power source is switched on -- typically when you start the car -- it will take around 75 seconds for the system to become available using a Raspberry Pi Zero W, about 35 seconds with a Pi Zero 2 W.
### Enjoy!
---
## Updating
From time to time, you may wish to update this installation. Assuming you haven't deleted your original WiFi network credentials, the easiest thing is to temporarily reconnect to the network you used when you created the system. You can then update the operating system and libraries in the normal way and then update Shairport Sync.
-However, if you're *upgrading* the operating system to e.g. from Bullseye to Bookworm, the names and index numbers of the output devices may change, and the names of the mixer controls may also change. You can use [`sps-alsa-explore`](https://github.com/mikebrady/sps-alsa-explore) to discover device names and mixer names.
+However, if you're *upgrading* the operating system to e.g. from Bullseye to Bookworm, the names and index numbers of the output devices may change, and the names of the mixer controls may also change. You can use [`dacquery`](https://github.com/mikebrady/dacquery) to discover device names and mixer names.
#### Exit Raspberry Pi Read-Only Mode
If it's a Raspberry Pi and you have optionally enabled the read-only mode, you must take the device out of Read-only mode:
| ---- |
| `--with-airplay-2` |
-AirPlay 2 is the current version of the AirPlay protocol. It offers multi-room operation and integration with the Home application. However, the Shairport Sync implementation is doesn't support iTunes on Windows, and its integration with the Home app and support for remote control is incomplete. Additionally, it requires a somewhat more powerful computer system (Raspberry Pi 2 equivalent or better) and a recent (2018 or later) version of a Debian-like Linux, Alpine Linux or FreeBSD. It has not been tested on other Linux distributions such as OpenWrt. Finally, AirPlay 2 can be lossy – in one mode of operation, audio is encoded in 256kbps AAC.
+_AirPlay 2_ is the current version of the AirPlay protocol. It offers multi-room operation and integration with the Home application. However, the Shairport Sync implementation is doesn't support iTunes on Windows, and its integration with the Home app and support for remote control is incomplete. Additionally, it requires a fairly recent (2018 or later) version of a Debian-like Linux, Alpine Linux or FreeBSD. It has not been tested on other Linux distributions such as OpenWrt. Finally, AirPlay 2 can be lossy – in one mode of operation, audio is encoded in 256kbps AAC.
-AirPlay (aka AirPlay 1) is an older version of the AirPlay protocol. If offers multi-room operation to iTunes on macOS or Windows, the Music app on macOS and some third-party computer applications such as [OwnTone](https://owntone.github.io/owntone-server/). It will run on lower powered machines, e.g. the original Raspberry Pi and many embedded devices. This version of AirPlay is lossless – audio is received in 44,100 frames per second 16-bit interleaved stereo ALAC format. It is compatible with a wider range of Linux distributions, back to around 2012. However, support for this version of AirPlay seems to be gradually waning. It does not offer multi-room operation to iOS, iPadOS or AppleTV and is incompatible with HomePod. It is not integrated with the Home app.
+_AirPlay_ (without the "2", aka "classic AirPlay" or "AirPlay 1") is an older version of the AirPlay protocol. If offers multi-room operation to iTunes on macOS or Windows, the Music app on macOS and some third-party computer applications such as [OwnTone](https://owntone.github.io/owntone-server/). It will run on lower powered machines, including many embedded devices. This version of AirPlay is lossless – audio is received in 44,100 frames per second 16-bit interleaved stereo ALAC format. It is compatible with a wider range of Linux distributions, back to around 2012. However, support for this version of AirPlay seems to be gradually waning. It does not offer multi-room operation to iOS, iPadOS or AppleTV and is incompatible with HomePod. It is not integrated with the Home app.
-To build Shairport Sync for AirPlay 2, include the `--with-airplay-2` option in the `./configure ...` options. You will also have to include extra libraries. Omitting this option will cause Shairport Sync to be built for the older AirPlay protocol.
+To build Shairport Sync for AirPlay 2, include the `--with-airplay-2` option in the `./configure ...` options. You will also have to include extra libraries. Omitting this option will cause Shairport Sync to be built for the older AirPlay protocol. Include the `--with-ffmpeg` to build for classic AirPlay but using the FFmpeg libraries for decoding and for transcoding. This is recommended for classic AirPlay because it is better maintained and more flexible. Unfortunately, the FFmpeg library is very bulky and so many embedded devices simply don't have space for it.
## Audio Output
| Flags |
| ----- |
| `--with-alsa` |
| `--with-sndio` |
-| `--with-pa` |
-| `--with-pw` |
-| `--with-ao` |
+| `--with-pipewire` |
+| `--with-pulseaudio` |
| `--with-jack` |
-| `--with-soundio` |
+| `--with-ao` |
| `--with-stdout` |
| `--with-pipe` |
- `--with-alsa` Output to the Advanced Linux Sound Architecture ([ALSA](https://www.alsa-project.org/wiki/Main_Page)) system. This is recommended for highest quality.
- `--with-sndio` Output to the FreeBSD-native [sndio](https://sndio.org) system.
-- `--with-pa` Include the PulseAudio audio back end.
-- `--with-pw` Output to the [PipeWire](https://pipewire.org) system.
-- `--with-ao` Output to the [libao](https://xiph.org/ao/) system. No synchronisation.
+- `--with-pipewire` Output to the [PipeWire](https://pipewire.org) sound server.
+- `--with-pulseaudio` Include the [PulseAudio](https://www.freedesktop.org/wiki/Software/PulseAudio) sound server.
- `--with-jack` Output to the [Jack Audio](https://jackaudio.org) system.
-- `--with-soundio` Include an optional backend module for audio to be output through the [`soundio`](http://libsound.io) system. No synchronisation.
+- `--with-ao` Output to the [libao](https://xiph.org/ao/) system. No synchronisation.
- `--with-stdout` Include an optional backend module to enable raw audio to be output through standard output (`STDOUT`).
- `--with-pipe` Include an optional backend module to enable raw audio to be output through a unix pipe.
### PulseAudio and PipeWire
-Many recent Linux distributions with a GUI -- "desktop" Linuxes -- use PulseAudio or PipeWire to handle sound. There are two things to consider with these sound servers:
-1. They may not always be available: a sound server generally becomes available when a user logs in via the GUI and disappears when the user logs out; it is not available when the system starts up and it is not available to non-GUI users. This means that Shairport Sync can not run as a daemon (see "Daemonisation" below) using a sound server unless the sound server is configured as a system-wide service.
-2. The fidelity of the audio is unknown: once audio is delivered to the sound server, it is unknown what happens to it as it is processed through PulseAudio to arrives eventually at the loudspeakers.
+If your system uses either PipeWire or PulseAudio as sound servers, Shairport Sync must be started as a user service. This is because the PipeWire or PulseAudio services -- needed by Shairport Sync -- are user services themselves, and they must be running before Shairport Sync starts. That implies that Shairport Sync must be started as a user service.
-It should be noted that both PulseAudio and PipeWire provide a default ALSA pseudo device that enables ALSA-compatible programs to send audio. Shairport Sync can therefore use the ALSA backend with PulseAudio- or PipeWire-based systems.
+PipeWire and PulseAudio provide a default ALSA pseudo device, so that Shairport Sync can therefore use the ALSA backend with PipeWire- or PulseAudio-based systems.
## Audio Options
| Flags |
| `--with-soxr` |
| `--with-apple-alac` |
| `--with-convolution` |
+| `--with-ffmpeg` |
-- `--with-soxr` Allows Shairport Sync to use [libsoxr](https://sourceforge.net/p/soxr/wiki/Home/)-based resampling for improved interpolation. Recommended.
-- `--with-apple-alac` Allows Shairport Sync to use the Apple ALAC Decoder. Requires [`libalac`](https://github.com/mikebrady/alac).
-- `--with-convolution` Includes a convolution filter that can be used to apply effects such as frequency and phase correction, and a loudness filter that compensates for the non-linearity of the human auditory system. Requires `libsndfile`.
+- `--with-soxr` Enables Shairport Sync to use [libsoxr](https://sourceforge.net/p/soxr/wiki/Home/)-based resampling for improved interpolation. Recommended.
+- `--with-apple-alac` Enables Shairport Sync to use the Apple ALAC Decoder. Requires [`libalac`](https://github.com/mikebrady/alac). Deprecated due to security issues.
+- `--with-convolution` Includes a convolution filter that can be used to apply effects such as frequency and phase correction, and a loudness filter that compensates for the non-linearity of the human auditory system. Requires `libsndfile`. Note that this is only available with audio at 44100 frames per second at present.
+- `--with-ffmpeg` Enables classic Shairport Sync to use the [FFmpeg](https://ffmpeg.org) ALAC decoder and the FFmpeg software resampler to transcode, e.g. from 44100 to 48000 frames per second. Requires FFmpeg libraries. (Note: this is for Classic AirPlay only -- `--with-ffmpeg` is automatically enabled for AirPlay 2.)
## Metadata
| Flags |
### Automatic Start
| Flags |
| ----- |
-| `--with-systemd` |
+| `--with-systemd-startup` |
| `--with-systemdsystemunitdir=<dir>` |
-| `--with-systemv` |
-| `--with-freebsd-service` |
-| `--with-sygwin-service` |
+| `--with-systemv-startup` |
+| `--with-freebsd-startup` |
+| `--with-cygwin-service` |
-Daemon programs such as Shairport Sync need to be started automatically, so that the service they provide becomes available without further intervention. Typically this is done using startup scripts. Four options are provided – two for Linux, one for FreeBSD and one for CYGWIN. In Linux, the choice depends on whether [systemd](https://en.wikipedia.org/wiki/Systemd) is used or not. If `systemd` is installed, then the `--with-systemd` option is suggested. If not, the `--with-systemv` option is suggested.
+Daemon programs such as Shairport Sync should be started automatically so that the service they provide becomes available without further intervention. Typically this is done using startup scripts. Four options are provided – two for Linux, one for FreeBSD and one for CYGWIN. In Linux, the choice depends on whether [systemd](https://en.wikipedia.org/wiki/Systemd) is used or not. If `systemd` is installed, then the `--with-systemd-startup` option is suggested. If not, the `--with-systemv-startup` option is suggested.
-- `--with-systemd` Includes a script to create a Shairport Sync service that can optionally launch automatically at startup on `systemd`-based Linuxes. Default is not to to install. Note: an associated special-purpose option allows you to specify where the `systemd` service file will be placed:
+- `--with-systemd-startup` Includes scripts to create a Shairport Sync service that can optionally launch automatically at startup or at user login on `systemd`-based Linuxes. Default is not to to install. Note: an associated special-purpose option allows you to specify where the `systemd` system startup file will be placed:
- `--with-systemdsystemunitdir=<dir>` Specifies the directory for `systemd` service files.
-- `--with-systemv` Includes a script to create a Shairport Sync service that can optionally launch automatically at startup on System V based Linuxes. Default is not to to install.
-- `--with-freebsd-service` Includes a script to create a Shairport Sync service that can optionally launch automatically at startup on FreeBSD. Default is not to to install.
+- `--with-systemv-startup` Includes a script to create a Shairport Sync service that can optionally launch automatically at startup on System V based Linuxes. Default is not to to install.
+- `--with-freebsd-startup` Includes a script to create a Shairport Sync service that can optionally launch automatically at startup on FreeBSD. Default is not to to install.
- `--with-cygwin-service` Includes a script to create a Shairport Sync service that can optionally launch automatically at startup on CYGWIN. Default is not to to install.
### Cryptography
-// ==================================================================================
-// Copyright (c) 2016 HiFi-LoFi
-//
-// Permission is hereby granted, free of charge, to any person obtaining a copy
-// of this software and associated documentation files (the "Software"), to deal
-// in the Software without restriction, including without limitation the rights
-// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-// copies of the Software, and to permit persons to whom the Software is furnished
-// to do so, subject to the following conditions:
-//
-// The above copyright notice and this permission notice shall be included in
-// all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-// ==================================================================================
-
-#include "AudioFFT.h"
-
-#include <cassert>
-#include <cmath>
-#include <cstring>
-
-
-#if defined(AUDIOFFT_APPLE_ACCELERATE)
- #define AUDIOFFT_APPLE_ACCELERATE_USED
- #include <Accelerate/Accelerate.h>
- #include <vector>
-#elif defined (AUDIOFFT_FFTW3)
- #define AUDIOFFT_FFTW3_USED
- #include <fftw3.h>
-#else
- #if !defined(AUDIOFFT_OOURA)
- #define AUDIOFFT_OOURA
- #endif
- #define AUDIOFFT_OOURA_USED
- #include <vector>
-#endif
-
-
-namespace audiofft
-{
-
- namespace details
- {
-
- static bool IsPowerOf2(size_t val)
- {
- return (val == 1 || (val & (val-1)) == 0);
- }
-
-
- template<typename TypeDest, typename TypeSrc>
- void ConvertBuffer(TypeDest* dest, const TypeSrc* src, size_t len)
- {
- for (size_t i=0; i<len; ++i)
- {
- dest[i] = static_cast<TypeDest>(src[i]);
- }
- }
-
-
- template<typename TypeDest, typename TypeSrc, typename TypeFactor>
- void ScaleBuffer(TypeDest* dest, const TypeSrc* src, const TypeFactor factor, size_t len)
- {
- for (size_t i=0; i<len; ++i)
- {
- dest[i] = static_cast<TypeDest>(static_cast<TypeFactor>(src[i]) * factor);
- }
- }
-
-
- // ================================================================
-
-
-#ifdef AUDIOFFT_OOURA_USED
-
- /**
- * @internal
- * @class OouraFFT
- * @brief FFT implementation based on the great radix-4 routines by Takuya Ooura
- */
- class OouraFFT : public AudioFFTImpl
- {
- public:
- OouraFFT() :
- AudioFFTImpl(),
- _size(0),
- _ip(),
- _w(),
- _buffer()
- {
- }
-
- virtual void init(size_t size) override
- {
- if (_size != size)
- {
- _ip.resize(2 + static_cast<int>(std::sqrt(static_cast<double>(size))));
- _w.resize(size / 2);
- _buffer.resize(size);
- _size = size;
-
- const int size4 = static_cast<int>(_size) / 4;
- makewt(size4, _ip.data(), _w.data());
- makect(size4, _ip.data(), _w.data() + size4);
- }
- }
-
- virtual void fft(const float* data, float* re, float* im) override
- {
- // Convert into the format as required by the Ooura FFT
- ConvertBuffer(&_buffer[0], data, _size);
-
- rdft(static_cast<int>(_size), +1, _buffer.data(), _ip.data(), _w.data());
-
- // Convert back to split-complex
- {
- double* b = &_buffer[0];
- double* bEnd = b + _size;
- float *r = re;
- float *i = im;
- while (b != bEnd)
- {
- *(r++) = static_cast<float>(*(b++));
- *(i++) = static_cast<float>(-(*(b++)));
- }
- }
- const size_t size2 = _size / 2;
- re[size2] = -im[0];
- im[0] = 0.0;
- im[size2] = 0.0;
- }
-
- virtual void ifft(float* data, const float* re, const float* im) override
- {
- // Convert into the format as required by the Ooura FFT
- {
- double* b = &_buffer[0];
- double* bEnd = b + _size;
- const float *r = re;
- const float *i = im;
- while (b != bEnd)
- {
- *(b++) = static_cast<double>(*(r++));
- *(b++) = -static_cast<double>(*(i++));
- }
- _buffer[1] = re[_size / 2];
- }
-
- rdft(static_cast<int>(_size), -1, _buffer.data(), _ip.data(), _w.data());
-
- // Convert back to split-complex
- ScaleBuffer(data, &_buffer[0], 2.0 / static_cast<double>(_size), _size);
- }
-
- private:
- size_t _size;
- std::vector<int> _ip;
- std::vector<double> _w;
- std::vector<double> _buffer;
-
- void rdft(int n, int isgn, double *a, int *ip, double *w)
- {
- int nw = ip[0];
- int nc = ip[1];
-
- if (isgn >= 0)
- {
- if (n > 4)
- {
- bitrv2(n, ip + 2, a);
- cftfsub(n, a, w);
- rftfsub(n, a, nc, w + nw);
- }
- else if (n == 4)
- {
- cftfsub(n, a, w);
- }
- double xi = a[0] - a[1];
- a[0] += a[1];
- a[1] = xi;
- }
- else
- {
- a[1] = 0.5 * (a[0] - a[1]);
- a[0] -= a[1];
- if (n > 4)
- {
- rftbsub(n, a, nc, w + nw);
- bitrv2(n, ip + 2, a);
- cftbsub(n, a, w);
- }
- else if (n == 4)
- {
- cftfsub(n, a, w);
- }
- }
- }
-
-
- /* -------- initializing routines -------- */
-
- void makewt(int nw, int *ip, double *w)
- {
- int j, nwh;
- double delta, x, y;
-
- ip[0] = nw;
- ip[1] = 1;
- if (nw > 2) {
- nwh = nw >> 1;
- delta = atan(1.0) / nwh;
- w[0] = 1;
- w[1] = 0;
- w[nwh] = cos(delta * nwh);
- w[nwh + 1] = w[nwh];
- if (nwh > 2) {
- for (j = 2; j < nwh; j += 2) {
- x = cos(delta * j);
- y = sin(delta * j);
- w[j] = x;
- w[j + 1] = y;
- w[nw - j] = y;
- w[nw - j + 1] = x;
- }
- bitrv2(nw, ip + 2, w);
- }
- }
- }
-
-
- void makect(int nc, int *ip, double *c)
- {
- int j, nch;
- double delta;
-
- ip[1] = nc;
- if (nc > 1) {
- nch = nc >> 1;
- delta = atan(1.0) / nch;
- c[0] = cos(delta * nch);
- c[nch] = 0.5 * c[0];
- for (j = 1; j < nch; j++) {
- c[j] = 0.5 * cos(delta * j);
- c[nc - j] = 0.5 * sin(delta * j);
- }
- }
- }
-
-
- /* -------- child routines -------- */
-
-
- void bitrv2(int n, int *ip, double *a)
- {
- int j, j1, k, k1, l, m, m2;
- double xr, xi, yr, yi;
-
- ip[0] = 0;
- l = n;
- m = 1;
- while ((m << 3) < l) {
- l >>= 1;
- for (j = 0; j < m; j++) {
- ip[m + j] = ip[j] + l;
- }
- m <<= 1;
- }
- m2 = 2 * m;
- if ((m << 3) == l) {
- for (k = 0; k < m; k++) {
- for (j = 0; j < k; j++) {
- j1 = 2 * j + ip[k];
- k1 = 2 * k + ip[j];
- xr = a[j1];
- xi = a[j1 + 1];
- yr = a[k1];
- yi = a[k1 + 1];
- a[j1] = yr;
- a[j1 + 1] = yi;
- a[k1] = xr;
- a[k1 + 1] = xi;
- j1 += m2;
- k1 += 2 * m2;
- xr = a[j1];
- xi = a[j1 + 1];
- yr = a[k1];
- yi = a[k1 + 1];
- a[j1] = yr;
- a[j1 + 1] = yi;
- a[k1] = xr;
- a[k1 + 1] = xi;
- j1 += m2;
- k1 -= m2;
- xr = a[j1];
- xi = a[j1 + 1];
- yr = a[k1];
- yi = a[k1 + 1];
- a[j1] = yr;
- a[j1 + 1] = yi;
- a[k1] = xr;
- a[k1 + 1] = xi;
- j1 += m2;
- k1 += 2 * m2;
- xr = a[j1];
- xi = a[j1 + 1];
- yr = a[k1];
- yi = a[k1 + 1];
- a[j1] = yr;
- a[j1 + 1] = yi;
- a[k1] = xr;
- a[k1 + 1] = xi;
- }
- j1 = 2 * k + m2 + ip[k];
- k1 = j1 + m2;
- xr = a[j1];
- xi = a[j1 + 1];
- yr = a[k1];
- yi = a[k1 + 1];
- a[j1] = yr;
- a[j1 + 1] = yi;
- a[k1] = xr;
- a[k1 + 1] = xi;
- }
- } else {
- for (k = 1; k < m; k++) {
- for (j = 0; j < k; j++) {
- j1 = 2 * j + ip[k];
- k1 = 2 * k + ip[j];
- xr = a[j1];
- xi = a[j1 + 1];
- yr = a[k1];
- yi = a[k1 + 1];
- a[j1] = yr;
- a[j1 + 1] = yi;
- a[k1] = xr;
- a[k1 + 1] = xi;
- j1 += m2;
- k1 += m2;
- xr = a[j1];
- xi = a[j1 + 1];
- yr = a[k1];
- yi = a[k1 + 1];
- a[j1] = yr;
- a[j1 + 1] = yi;
- a[k1] = xr;
- a[k1 + 1] = xi;
- }
- }
- }
- }
-
-
- void cftfsub(int n, double *a, double *w)
- {
- int j, j1, j2, j3, l;
- double x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
-
- l = 2;
- if (n > 8) {
- cft1st(n, a, w);
- l = 8;
- while ((l << 2) < n) {
- cftmdl(n, l, a, w);
- l <<= 2;
- }
- }
- if ((l << 2) == n) {
- for (j = 0; j < l; j += 2) {
- j1 = j + l;
- j2 = j1 + l;
- j3 = j2 + l;
- x0r = a[j] + a[j1];
- x0i = a[j + 1] + a[j1 + 1];
- x1r = a[j] - a[j1];
- x1i = a[j + 1] - a[j1 + 1];
- x2r = a[j2] + a[j3];
- x2i = a[j2 + 1] + a[j3 + 1];
- x3r = a[j2] - a[j3];
- x3i = a[j2 + 1] - a[j3 + 1];
- a[j] = x0r + x2r;
- a[j + 1] = x0i + x2i;
- a[j2] = x0r - x2r;
- a[j2 + 1] = x0i - x2i;
- a[j1] = x1r - x3i;
- a[j1 + 1] = x1i + x3r;
- a[j3] = x1r + x3i;
- a[j3 + 1] = x1i - x3r;
- }
- } else {
- for (j = 0; j < l; j += 2) {
- j1 = j + l;
- x0r = a[j] - a[j1];
- x0i = a[j + 1] - a[j1 + 1];
- a[j] += a[j1];
- a[j + 1] += a[j1 + 1];
- a[j1] = x0r;
- a[j1 + 1] = x0i;
- }
- }
- }
-
-
- void cftbsub(int n, double *a, double *w)
- {
- int j, j1, j2, j3, l;
- double x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
-
- l = 2;
- if (n > 8) {
- cft1st(n, a, w);
- l = 8;
- while ((l << 2) < n) {
- cftmdl(n, l, a, w);
- l <<= 2;
- }
- }
- if ((l << 2) == n) {
- for (j = 0; j < l; j += 2) {
- j1 = j + l;
- j2 = j1 + l;
- j3 = j2 + l;
- x0r = a[j] + a[j1];
- x0i = -a[j + 1] - a[j1 + 1];
- x1r = a[j] - a[j1];
- x1i = -a[j + 1] + a[j1 + 1];
- x2r = a[j2] + a[j3];
- x2i = a[j2 + 1] + a[j3 + 1];
- x3r = a[j2] - a[j3];
- x3i = a[j2 + 1] - a[j3 + 1];
- a[j] = x0r + x2r;
- a[j + 1] = x0i - x2i;
- a[j2] = x0r - x2r;
- a[j2 + 1] = x0i + x2i;
- a[j1] = x1r - x3i;
- a[j1 + 1] = x1i - x3r;
- a[j3] = x1r + x3i;
- a[j3 + 1] = x1i + x3r;
- }
- } else {
- for (j = 0; j < l; j += 2) {
- j1 = j + l;
- x0r = a[j] - a[j1];
- x0i = -a[j + 1] + a[j1 + 1];
- a[j] += a[j1];
- a[j + 1] = -a[j + 1] - a[j1 + 1];
- a[j1] = x0r;
- a[j1 + 1] = x0i;
- }
- }
- }
-
-
- void cft1st(int n, double *a, double *w)
- {
- int j, k1, k2;
- double wk1r, wk1i, wk2r, wk2i, wk3r, wk3i;
- double x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
-
- x0r = a[0] + a[2];
- x0i = a[1] + a[3];
- x1r = a[0] - a[2];
- x1i = a[1] - a[3];
- x2r = a[4] + a[6];
- x2i = a[5] + a[7];
- x3r = a[4] - a[6];
- x3i = a[5] - a[7];
- a[0] = x0r + x2r;
- a[1] = x0i + x2i;
- a[4] = x0r - x2r;
- a[5] = x0i - x2i;
- a[2] = x1r - x3i;
- a[3] = x1i + x3r;
- a[6] = x1r + x3i;
- a[7] = x1i - x3r;
- wk1r = w[2];
- x0r = a[8] + a[10];
- x0i = a[9] + a[11];
- x1r = a[8] - a[10];
- x1i = a[9] - a[11];
- x2r = a[12] + a[14];
- x2i = a[13] + a[15];
- x3r = a[12] - a[14];
- x3i = a[13] - a[15];
- a[8] = x0r + x2r;
- a[9] = x0i + x2i;
- a[12] = x2i - x0i;
- a[13] = x0r - x2r;
- x0r = x1r - x3i;
- x0i = x1i + x3r;
- a[10] = wk1r * (x0r - x0i);
- a[11] = wk1r * (x0r + x0i);
- x0r = x3i + x1r;
- x0i = x3r - x1i;
- a[14] = wk1r * (x0i - x0r);
- a[15] = wk1r * (x0i + x0r);
- k1 = 0;
- for (j = 16; j < n; j += 16) {
- k1 += 2;
- k2 = 2 * k1;
- wk2r = w[k1];
- wk2i = w[k1 + 1];
- wk1r = w[k2];
- wk1i = w[k2 + 1];
- wk3r = wk1r - 2 * wk2i * wk1i;
- wk3i = 2 * wk2i * wk1r - wk1i;
- x0r = a[j] + a[j + 2];
- x0i = a[j + 1] + a[j + 3];
- x1r = a[j] - a[j + 2];
- x1i = a[j + 1] - a[j + 3];
- x2r = a[j + 4] + a[j + 6];
- x2i = a[j + 5] + a[j + 7];
- x3r = a[j + 4] - a[j + 6];
- x3i = a[j + 5] - a[j + 7];
- a[j] = x0r + x2r;
- a[j + 1] = x0i + x2i;
- x0r -= x2r;
- x0i -= x2i;
- a[j + 4] = wk2r * x0r - wk2i * x0i;
- a[j + 5] = wk2r * x0i + wk2i * x0r;
- x0r = x1r - x3i;
- x0i = x1i + x3r;
- a[j + 2] = wk1r * x0r - wk1i * x0i;
- a[j + 3] = wk1r * x0i + wk1i * x0r;
- x0r = x1r + x3i;
- x0i = x1i - x3r;
- a[j + 6] = wk3r * x0r - wk3i * x0i;
- a[j + 7] = wk3r * x0i + wk3i * x0r;
- wk1r = w[k2 + 2];
- wk1i = w[k2 + 3];
- wk3r = wk1r - 2 * wk2r * wk1i;
- wk3i = 2 * wk2r * wk1r - wk1i;
- x0r = a[j + 8] + a[j + 10];
- x0i = a[j + 9] + a[j + 11];
- x1r = a[j + 8] - a[j + 10];
- x1i = a[j + 9] - a[j + 11];
- x2r = a[j + 12] + a[j + 14];
- x2i = a[j + 13] + a[j + 15];
- x3r = a[j + 12] - a[j + 14];
- x3i = a[j + 13] - a[j + 15];
- a[j + 8] = x0r + x2r;
- a[j + 9] = x0i + x2i;
- x0r -= x2r;
- x0i -= x2i;
- a[j + 12] = -wk2i * x0r - wk2r * x0i;
- a[j + 13] = -wk2i * x0i + wk2r * x0r;
- x0r = x1r - x3i;
- x0i = x1i + x3r;
- a[j + 10] = wk1r * x0r - wk1i * x0i;
- a[j + 11] = wk1r * x0i + wk1i * x0r;
- x0r = x1r + x3i;
- x0i = x1i - x3r;
- a[j + 14] = wk3r * x0r - wk3i * x0i;
- a[j + 15] = wk3r * x0i + wk3i * x0r;
- }
- }
-
-
- void cftmdl(int n, int l, double *a, double *w)
- {
- int j, j1, j2, j3, k, k1, k2, m, m2;
- double wk1r, wk1i, wk2r, wk2i, wk3r, wk3i;
- double x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
-
- m = l << 2;
- for (j = 0; j < l; j += 2) {
- j1 = j + l;
- j2 = j1 + l;
- j3 = j2 + l;
- x0r = a[j] + a[j1];
- x0i = a[j + 1] + a[j1 + 1];
- x1r = a[j] - a[j1];
- x1i = a[j + 1] - a[j1 + 1];
- x2r = a[j2] + a[j3];
- x2i = a[j2 + 1] + a[j3 + 1];
- x3r = a[j2] - a[j3];
- x3i = a[j2 + 1] - a[j3 + 1];
- a[j] = x0r + x2r;
- a[j + 1] = x0i + x2i;
- a[j2] = x0r - x2r;
- a[j2 + 1] = x0i - x2i;
- a[j1] = x1r - x3i;
- a[j1 + 1] = x1i + x3r;
- a[j3] = x1r + x3i;
- a[j3 + 1] = x1i - x3r;
- }
- wk1r = w[2];
- for (j = m; j < l + m; j += 2) {
- j1 = j + l;
- j2 = j1 + l;
- j3 = j2 + l;
- x0r = a[j] + a[j1];
- x0i = a[j + 1] + a[j1 + 1];
- x1r = a[j] - a[j1];
- x1i = a[j + 1] - a[j1 + 1];
- x2r = a[j2] + a[j3];
- x2i = a[j2 + 1] + a[j3 + 1];
- x3r = a[j2] - a[j3];
- x3i = a[j2 + 1] - a[j3 + 1];
- a[j] = x0r + x2r;
- a[j + 1] = x0i + x2i;
- a[j2] = x2i - x0i;
- a[j2 + 1] = x0r - x2r;
- x0r = x1r - x3i;
- x0i = x1i + x3r;
- a[j1] = wk1r * (x0r - x0i);
- a[j1 + 1] = wk1r * (x0r + x0i);
- x0r = x3i + x1r;
- x0i = x3r - x1i;
- a[j3] = wk1r * (x0i - x0r);
- a[j3 + 1] = wk1r * (x0i + x0r);
- }
- k1 = 0;
- m2 = 2 * m;
- for (k = m2; k < n; k += m2) {
- k1 += 2;
- k2 = 2 * k1;
- wk2r = w[k1];
- wk2i = w[k1 + 1];
- wk1r = w[k2];
- wk1i = w[k2 + 1];
- wk3r = wk1r - 2 * wk2i * wk1i;
- wk3i = 2 * wk2i * wk1r - wk1i;
- for (j = k; j < l + k; j += 2) {
- j1 = j + l;
- j2 = j1 + l;
- j3 = j2 + l;
- x0r = a[j] + a[j1];
- x0i = a[j + 1] + a[j1 + 1];
- x1r = a[j] - a[j1];
- x1i = a[j + 1] - a[j1 + 1];
- x2r = a[j2] + a[j3];
- x2i = a[j2 + 1] + a[j3 + 1];
- x3r = a[j2] - a[j3];
- x3i = a[j2 + 1] - a[j3 + 1];
- a[j] = x0r + x2r;
- a[j + 1] = x0i + x2i;
- x0r -= x2r;
- x0i -= x2i;
- a[j2] = wk2r * x0r - wk2i * x0i;
- a[j2 + 1] = wk2r * x0i + wk2i * x0r;
- x0r = x1r - x3i;
- x0i = x1i + x3r;
- a[j1] = wk1r * x0r - wk1i * x0i;
- a[j1 + 1] = wk1r * x0i + wk1i * x0r;
- x0r = x1r + x3i;
- x0i = x1i - x3r;
- a[j3] = wk3r * x0r - wk3i * x0i;
- a[j3 + 1] = wk3r * x0i + wk3i * x0r;
- }
- wk1r = w[k2 + 2];
- wk1i = w[k2 + 3];
- wk3r = wk1r - 2 * wk2r * wk1i;
- wk3i = 2 * wk2r * wk1r - wk1i;
- for (j = k + m; j < l + (k + m); j += 2) {
- j1 = j + l;
- j2 = j1 + l;
- j3 = j2 + l;
- x0r = a[j] + a[j1];
- x0i = a[j + 1] + a[j1 + 1];
- x1r = a[j] - a[j1];
- x1i = a[j + 1] - a[j1 + 1];
- x2r = a[j2] + a[j3];
- x2i = a[j2 + 1] + a[j3 + 1];
- x3r = a[j2] - a[j3];
- x3i = a[j2 + 1] - a[j3 + 1];
- a[j] = x0r + x2r;
- a[j + 1] = x0i + x2i;
- x0r -= x2r;
- x0i -= x2i;
- a[j2] = -wk2i * x0r - wk2r * x0i;
- a[j2 + 1] = -wk2i * x0i + wk2r * x0r;
- x0r = x1r - x3i;
- x0i = x1i + x3r;
- a[j1] = wk1r * x0r - wk1i * x0i;
- a[j1 + 1] = wk1r * x0i + wk1i * x0r;
- x0r = x1r + x3i;
- x0i = x1i - x3r;
- a[j3] = wk3r * x0r - wk3i * x0i;
- a[j3 + 1] = wk3r * x0i + wk3i * x0r;
- }
- }
- }
-
-
- void rftfsub(int n, double *a, int nc, double *c)
- {
- int j, k, kk, ks, m;
- double wkr, wki, xr, xi, yr, yi;
-
- m = n >> 1;
- ks = 2 * nc / m;
- kk = 0;
- for (j = 2; j < m; j += 2) {
- k = n - j;
- kk += ks;
- wkr = 0.5 - c[nc - kk];
- wki = c[kk];
- xr = a[j] - a[k];
- xi = a[j + 1] + a[k + 1];
- yr = wkr * xr - wki * xi;
- yi = wkr * xi + wki * xr;
- a[j] -= yr;
- a[j + 1] -= yi;
- a[k] += yr;
- a[k + 1] -= yi;
- }
- }
-
-
- void rftbsub(int n, double *a, int nc, double *c)
- {
- int j, k, kk, ks, m;
- double wkr, wki, xr, xi, yr, yi;
-
- a[1] = -a[1];
- m = n >> 1;
- ks = 2 * nc / m;
- kk = 0;
- for (j = 2; j < m; j += 2) {
- k = n - j;
- kk += ks;
- wkr = 0.5 - c[nc - kk];
- wki = c[kk];
- xr = a[j] - a[k];
- xi = a[j + 1] + a[k + 1];
- yr = wkr * xr + wki * xi;
- yi = wkr * xi - wki * xr;
- a[j] -= yr;
- a[j + 1] = yi - a[j + 1];
- a[k] += yr;
- a[k + 1] = yi - a[k + 1];
- }
- a[m + 1] = -a[m + 1];
- }
-
- OouraFFT(const OouraFFT&) = delete;
- OouraFFT& operator=(const OouraFFT&) = delete;
- };
-
- std::unique_ptr<AudioFFTImpl> MakeAudioFFTImpl()
- {
- return std::unique_ptr<OouraFFT>(new OouraFFT());
- }
-
-
-#endif // AUDIOFFT_OOURA_USED
-
-
- // ================================================================
-
-
-#ifdef AUDIOFFT_APPLE_ACCELERATE_USED
-
-
- /**
- * @internal
- * @class AppleAccelerateFFT
- * @brief FFT implementation using the Apple Accelerate framework internally
- */
- class AppleAccelerateFFT : public AudioFFTImpl
- {
- public:
- AppleAccelerateFFT() :
- AudioFFTImpl(),
- _size(0),
- _powerOf2(0),
- _fftSetup(0),
- _re(),
- _im()
- {
- }
-
- virtual ~AppleAccelerateFFT()
- {
- init(0);
- }
-
- virtual void init(size_t size) override
- {
- if (_fftSetup)
- {
- vDSP_destroy_fftsetup(_fftSetup);
- _size = 0;
- _powerOf2 = 0;
- _fftSetup = 0;
- _re.clear();
- _im.clear();
- }
-
- if (size > 0)
- {
- _size = size;
- _powerOf2 = 0;
- while ((1 << _powerOf2) < _size)
- {
- ++_powerOf2;
- }
- _fftSetup = vDSP_create_fftsetup(_powerOf2, FFT_RADIX2);
- _re.resize(_size / 2);
- _im.resize(_size / 2);
- }
- }
-
- virtual void fft(const float* data, float* re, float* im) override
- {
- const size_t size2 = _size / 2;
- DSPSplitComplex splitComplex;
- splitComplex.realp = re;
- splitComplex.imagp = im;
- vDSP_ctoz(reinterpret_cast<const COMPLEX*>(data), 2, &splitComplex, 1, size2);
- vDSP_fft_zrip(_fftSetup, &splitComplex, 1, _powerOf2, FFT_FORWARD);
- const float factor = 0.5f;
- vDSP_vsmul(re, 1, &factor, re, 1, size2);
- vDSP_vsmul(im, 1, &factor, im, 1, size2);
- re[size2] = im[0];
- im[0] = 0.0f;
- im[size2] = 0.0f;
- }
-
- virtual void ifft(float* data, const float* re, const float* im) override
- {
- const size_t size2 = _size / 2;
- ::memcpy(_re.data(), re, size2 * sizeof(float));
- ::memcpy(_im.data(), im, size2 * sizeof(float));
- _im[0] = re[size2];
- DSPSplitComplex splitComplex;
- splitComplex.realp = _re.data();
- splitComplex.imagp = _im.data();
- vDSP_fft_zrip(_fftSetup, &splitComplex, 1, _powerOf2, FFT_INVERSE);
- vDSP_ztoc(&splitComplex, 1, reinterpret_cast<COMPLEX*>(data), 2, size2);
- const float factor = 1.0f / static_cast<float>(_size);
- vDSP_vsmul(data, 1, &factor, data, 1, _size);
- }
-
- private:
- size_t _size;
- size_t _powerOf2;
- FFTSetup _fftSetup;
- std::vector<float> _re;
- std::vector<float> _im;
-
- AppleAccelerateFFT(const AppleAccelerateFFT&) = delete;
- AppleAccelerateFFT& operator=(const AppleAccelerateFFT&) = delete;
- };
-
-
- std::unique_ptr<AudioFFTImpl> MakeAudioFFTImpl()
- {
- return std::unique_ptr<AppleAccelerateFFT>(new AppleAccelerateFFT());
- }
-
-
-#endif // AUDIOFFT_APPLE_ACCELERATE_USED
-
-
- // ================================================================
-
-
-#ifdef AUDIOFFT_FFTW3_USED
-
-
- /**
- * @internal
- * @class FFTW3FFT
- * @brief FFT implementation using FFTW3 internally (see fftw.org)
- */
- class FFTW3FFT : public AudioFFTImpl
- {
- public:
- FFTW3FFT() :
- AudioFFTImpl(),
- _size(0),
- _complexSize(0),
- _planForward(0),
- _planBackward(0),
- _data(0),
- _re(0),
- _im(0)
- {
- }
-
- virtual ~FFTW3FFT()
- {
- init(0);
- }
-
- virtual void init(size_t size) override
- {
- if (_size != size)
- {
- if (_size > 0)
- {
- fftwf_destroy_plan(_planForward);
- fftwf_destroy_plan(_planBackward);
- _planForward = 0;
- _planBackward = 0;
- _size = 0;
- _complexSize = 0;
-
- if (_data)
- {
- fftwf_free(_data);
- _data = 0;
- }
-
- if (_re)
- {
- fftwf_free(_re);
- _re = 0;
- }
-
- if (_im)
- {
- fftwf_free(_im);
- _im = 0;
- }
- }
-
- if (size > 0)
- {
- _size = size;
- _complexSize = AudioFFT::ComplexSize(_size);
- const size_t complexSize = AudioFFT::ComplexSize(_size);
- _data = reinterpret_cast<float*>(fftwf_malloc(_size * sizeof(float)));
- _re = reinterpret_cast<float*>(fftwf_malloc(complexSize * sizeof(float)));
- _im = reinterpret_cast<float*>(fftwf_malloc(complexSize * sizeof(float)));
-
- fftw_iodim dim;
- dim.n = static_cast<int>(size);
- dim.is = 1;
- dim.os = 1;
- _planForward = fftwf_plan_guru_split_dft_r2c(1, &dim, 0, 0, _data, _re, _im, FFTW_MEASURE);
- _planBackward = fftwf_plan_guru_split_dft_c2r(1, &dim, 0, 0, _re, _im, _data, FFTW_MEASURE);
- }
- }
- }
-
- virtual void fft(const float* data, float* re, float* im) override
- {
- ::memcpy(_data, data, _size * sizeof(float));
- fftwf_execute_split_dft_r2c(_planForward, _data, _re, _im);
- ::memcpy(re, _re, _complexSize * sizeof(float));
- ::memcpy(im, _im, _complexSize * sizeof(float));
- }
-
- void ifft(float* data, const float* re, const float* im)
- {
- ::memcpy(_re, re, _complexSize * sizeof(float));
- ::memcpy(_im, im, _complexSize * sizeof(float));
- fftwf_execute_split_dft_c2r(_planBackward, _re, _im, _data);
- ScaleBuffer(data, _data, 1.0f / static_cast<float>(_size), _size);
- }
-
- private:
- size_t _size;
- size_t _complexSize;
- fftwf_plan _planForward;
- fftwf_plan _planBackward;
- float* _data;
- float* _re;
- float* _im;
-
- FFTW3FFT(const FFTW3FFT&) = delete;
- FFTW3FFT& operator=(const FFTW3FFT&) = delete;
- };
-
-
- std::unique_ptr<AudioFFTImpl> MakeAudioFFTImpl()
- {
- return std::unique_ptr<FFTW3FFT>(new FFTW3FFT());
- }
-
-
-#endif // AUDIOFFT_FFTW3_USED
-
- } // End of namespace details
-
-
- // =============================================================
-
-
- AudioFFT::AudioFFT() :
- _impl(details::MakeAudioFFTImpl())
- {
- }
-
-
- void AudioFFT::init(size_t size)
- {
- assert(details::IsPowerOf2(size));
- _impl->init(size);
- }
-
-
- void AudioFFT::fft(const float* data, float* re, float* im)
- {
- _impl->fft(data, re, im);
- }
-
-
- void AudioFFT::ifft(float* data, const float* re, const float* im)
- {
- _impl->ifft(data, re, im);
- }
-
-
- size_t AudioFFT::ComplexSize(size_t size)
- {
- return (size / 2) + 1;
- }
-
-} // End of namespace
+// ==================================================================================\r
+// Copyright (c) 2017 HiFi-LoFi\r
+//\r
+// Permission is hereby granted, free of charge, to any person obtaining a copy\r
+// of this software and associated documentation files (the "Software"), to deal\r
+// in the Software without restriction, including without limitation the rights\r
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\r
+// copies of the Software, and to permit persons to whom the Software is furnished\r
+// to do so, subject to the following conditions:\r
+//\r
+// The above copyright notice and this permission notice shall be included in\r
+// all copies or substantial portions of the Software.\r
+//\r
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\r
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\r
+// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\r
+// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\r
+// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\r
+// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\r
+// ==================================================================================\r
+\r
+#include "AudioFFT.h"\r
+\r
+#include <cassert>\r
+#include <cmath>\r
+#include <cstring>\r
+\r
+\r
+#if defined(AUDIOFFT_APPLE_ACCELERATE)\r
+ #define AUDIOFFT_APPLE_ACCELERATE_USED\r
+ #include <Accelerate/Accelerate.h>\r
+ #include <vector>\r
+#elif defined (AUDIOFFT_FFTW3)\r
+ #define AUDIOFFT_FFTW3_USED\r
+ #include <fftw3.h>\r
+#else\r
+ #if !defined(AUDIOFFT_OOURA)\r
+ #define AUDIOFFT_OOURA\r
+ #endif\r
+ #define AUDIOFFT_OOURA_USED\r
+ #include <vector>\r
+#endif\r
+\r
+\r
+namespace audiofft\r
+{\r
+\r
+ namespace detail\r
+ {\r
+\r
+ class AudioFFTImpl\r
+ {\r
+ public:\r
+ AudioFFTImpl() = default;\r
+ AudioFFTImpl(const AudioFFTImpl&) = delete;\r
+ AudioFFTImpl& operator=(const AudioFFTImpl&) = delete;\r
+ virtual ~AudioFFTImpl() = default;\r
+ virtual void init(size_t size) = 0;\r
+ virtual void fft(const float* data, float* re, float* im) = 0;\r
+ virtual void ifft(float* data, const float* re, const float* im) = 0;\r
+ };\r
+\r
+\r
+ constexpr bool IsPowerOf2(size_t val)\r
+ {\r
+ return (val == 1 || (val & (val-1)) == 0);\r
+ }\r
+\r
+\r
+ template<typename TypeDest, typename TypeSrc>\r
+ void ConvertBuffer(TypeDest* dest, const TypeSrc* src, size_t len)\r
+ {\r
+ for (size_t i=0; i<len; ++i)\r
+ {\r
+ dest[i] = static_cast<TypeDest>(src[i]);\r
+ }\r
+ }\r
+\r
+\r
+ template<typename TypeDest, typename TypeSrc, typename TypeFactor>\r
+ void ScaleBuffer(TypeDest* dest, const TypeSrc* src, const TypeFactor factor, size_t len)\r
+ {\r
+ for (size_t i=0; i<len; ++i)\r
+ {\r
+ dest[i] = static_cast<TypeDest>(static_cast<TypeFactor>(src[i]) * factor);\r
+ }\r
+ }\r
+\r
+ } // End of namespace detail\r
+\r
+\r
+ // ================================================================\r
+\r
+\r
+#ifdef AUDIOFFT_OOURA_USED\r
+\r
+ /**\r
+ * @internal\r
+ * @class OouraFFT\r
+ * @brief FFT implementation based on the great radix-4 routines by Takuya Ooura\r
+ */\r
+ class OouraFFT : public detail::AudioFFTImpl\r
+ {\r
+ public:\r
+ OouraFFT() :\r
+ detail::AudioFFTImpl(),\r
+ _size(0),\r
+ _ip(),\r
+ _w(),\r
+ _buffer()\r
+ {\r
+ }\r
+\r
+ OouraFFT(const OouraFFT&) = delete;\r
+ OouraFFT& operator=(const OouraFFT&) = delete;\r
+\r
+ virtual void init(size_t size) override\r
+ {\r
+ if (_size != size)\r
+ {\r
+ _ip.resize(2 + static_cast<int>(std::sqrt(static_cast<double>(size))));\r
+ _w.resize(size / 2);\r
+ _buffer.resize(size);\r
+ _size = size;\r
+\r
+ const int size4 = static_cast<int>(_size) / 4;\r
+ makewt(size4, _ip.data(), _w.data());\r
+ makect(size4, _ip.data(), _w.data() + size4);\r
+ }\r
+ }\r
+\r
+ virtual void fft(const float* data, float* re, float* im) override\r
+ {\r
+ // Convert into the format as required by the Ooura FFT\r
+ detail::ConvertBuffer(_buffer.data(), data, _size);\r
+\r
+ rdft(static_cast<int>(_size), +1, _buffer.data(), _ip.data(), _w.data());\r
+\r
+ // Convert back to split-complex\r
+ {\r
+ double* b = _buffer.data();\r
+ double* bEnd = b + _size;\r
+ float *r = re;\r
+ float *i = im;\r
+ while (b != bEnd)\r
+ {\r
+ *(r++) = static_cast<float>(*(b++));\r
+ *(i++) = static_cast<float>(-(*(b++)));\r
+ }\r
+ }\r
+ const size_t size2 = _size / 2;\r
+ re[size2] = -im[0];\r
+ im[0] = 0.0;\r
+ im[size2] = 0.0;\r
+ }\r
+\r
+ virtual void ifft(float* data, const float* re, const float* im) override\r
+ {\r
+ // Convert into the format as required by the Ooura FFT\r
+ {\r
+ double* b = _buffer.data();\r
+ double* bEnd = b + _size;\r
+ const float *r = re;\r
+ const float *i = im;\r
+ while (b != bEnd)\r
+ {\r
+ *(b++) = static_cast<double>(*(r++));\r
+ *(b++) = -static_cast<double>(*(i++));\r
+ }\r
+ _buffer[1] = re[_size / 2];\r
+ }\r
+\r
+ rdft(static_cast<int>(_size), -1, _buffer.data(), _ip.data(), _w.data());\r
+\r
+ // Convert back to split-complex\r
+ detail::ScaleBuffer(data, _buffer.data(), 2.0 / static_cast<double>(_size), _size);\r
+ }\r
+\r
+ private:\r
+ size_t _size;\r
+ std::vector<int> _ip;\r
+ std::vector<double> _w;\r
+ std::vector<double> _buffer;\r
+\r
+ void rdft(int n, int isgn, double *a, int *ip, double *w)\r
+ {\r
+ int nw = ip[0];\r
+ int nc = ip[1];\r
+\r
+ if (isgn >= 0)\r
+ {\r
+ if (n > 4)\r
+ {\r
+ bitrv2(n, ip + 2, a);\r
+ cftfsub(n, a, w);\r
+ rftfsub(n, a, nc, w + nw);\r
+ }\r
+ else if (n == 4)\r
+ {\r
+ cftfsub(n, a, w);\r
+ }\r
+ double xi = a[0] - a[1];\r
+ a[0] += a[1];\r
+ a[1] = xi;\r
+ }\r
+ else\r
+ {\r
+ a[1] = 0.5 * (a[0] - a[1]);\r
+ a[0] -= a[1];\r
+ if (n > 4)\r
+ {\r
+ rftbsub(n, a, nc, w + nw);\r
+ bitrv2(n, ip + 2, a);\r
+ cftbsub(n, a, w);\r
+ }\r
+ else if (n == 4)\r
+ {\r
+ cftfsub(n, a, w);\r
+ }\r
+ }\r
+ }\r
+\r
+\r
+ /* -------- initializing routines -------- */\r
+\r
+ void makewt(int nw, int *ip, double *w)\r
+ {\r
+ int j, nwh;\r
+ double delta, x, y;\r
+\r
+ ip[0] = nw;\r
+ ip[1] = 1;\r
+ if (nw > 2) {\r
+ nwh = nw >> 1;\r
+ delta = atan(1.0) / nwh;\r
+ w[0] = 1;\r
+ w[1] = 0;\r
+ w[nwh] = cos(delta * nwh);\r
+ w[nwh + 1] = w[nwh];\r
+ if (nwh > 2) {\r
+ for (j = 2; j < nwh; j += 2) {\r
+ x = cos(delta * j);\r
+ y = sin(delta * j);\r
+ w[j] = x;\r
+ w[j + 1] = y;\r
+ w[nw - j] = y;\r
+ w[nw - j + 1] = x;\r
+ }\r
+ bitrv2(nw, ip + 2, w);\r
+ }\r
+ }\r
+ }\r
+\r
+\r
+ void makect(int nc, int *ip, double *c)\r
+ {\r
+ int j, nch;\r
+ double delta;\r
+\r
+ ip[1] = nc;\r
+ if (nc > 1) {\r
+ nch = nc >> 1;\r
+ delta = atan(1.0) / nch;\r
+ c[0] = cos(delta * nch);\r
+ c[nch] = 0.5 * c[0];\r
+ for (j = 1; j < nch; j++) {\r
+ c[j] = 0.5 * cos(delta * j);\r
+ c[nc - j] = 0.5 * sin(delta * j);\r
+ }\r
+ }\r
+ }\r
+\r
+\r
+ /* -------- child routines -------- */\r
+\r
+\r
+ void bitrv2(int n, int *ip, double *a)\r
+ {\r
+ int j, j1, k, k1, l, m, m2;\r
+ double xr, xi, yr, yi;\r
+\r
+ ip[0] = 0;\r
+ l = n;\r
+ m = 1;\r
+ while ((m << 3) < l) {\r
+ l >>= 1;\r
+ for (j = 0; j < m; j++) {\r
+ ip[m + j] = ip[j] + l;\r
+ }\r
+ m <<= 1;\r
+ }\r
+ m2 = 2 * m;\r
+ if ((m << 3) == l) {\r
+ for (k = 0; k < m; k++) {\r
+ for (j = 0; j < k; j++) {\r
+ j1 = 2 * j + ip[k];\r
+ k1 = 2 * k + ip[j];\r
+ xr = a[j1];\r
+ xi = a[j1 + 1];\r
+ yr = a[k1];\r
+ yi = a[k1 + 1];\r
+ a[j1] = yr;\r
+ a[j1 + 1] = yi;\r
+ a[k1] = xr;\r
+ a[k1 + 1] = xi;\r
+ j1 += m2;\r
+ k1 += 2 * m2;\r
+ xr = a[j1];\r
+ xi = a[j1 + 1];\r
+ yr = a[k1];\r
+ yi = a[k1 + 1];\r
+ a[j1] = yr;\r
+ a[j1 + 1] = yi;\r
+ a[k1] = xr;\r
+ a[k1 + 1] = xi;\r
+ j1 += m2;\r
+ k1 -= m2;\r
+ xr = a[j1];\r
+ xi = a[j1 + 1];\r
+ yr = a[k1];\r
+ yi = a[k1 + 1];\r
+ a[j1] = yr;\r
+ a[j1 + 1] = yi;\r
+ a[k1] = xr;\r
+ a[k1 + 1] = xi;\r
+ j1 += m2;\r
+ k1 += 2 * m2;\r
+ xr = a[j1];\r
+ xi = a[j1 + 1];\r
+ yr = a[k1];\r
+ yi = a[k1 + 1];\r
+ a[j1] = yr;\r
+ a[j1 + 1] = yi;\r
+ a[k1] = xr;\r
+ a[k1 + 1] = xi;\r
+ }\r
+ j1 = 2 * k + m2 + ip[k];\r
+ k1 = j1 + m2;\r
+ xr = a[j1];\r
+ xi = a[j1 + 1];\r
+ yr = a[k1];\r
+ yi = a[k1 + 1];\r
+ a[j1] = yr;\r
+ a[j1 + 1] = yi;\r
+ a[k1] = xr;\r
+ a[k1 + 1] = xi;\r
+ }\r
+ } else {\r
+ for (k = 1; k < m; k++) {\r
+ for (j = 0; j < k; j++) {\r
+ j1 = 2 * j + ip[k];\r
+ k1 = 2 * k + ip[j];\r
+ xr = a[j1];\r
+ xi = a[j1 + 1];\r
+ yr = a[k1];\r
+ yi = a[k1 + 1];\r
+ a[j1] = yr;\r
+ a[j1 + 1] = yi;\r
+ a[k1] = xr;\r
+ a[k1 + 1] = xi;\r
+ j1 += m2;\r
+ k1 += m2;\r
+ xr = a[j1];\r
+ xi = a[j1 + 1];\r
+ yr = a[k1];\r
+ yi = a[k1 + 1];\r
+ a[j1] = yr;\r
+ a[j1 + 1] = yi;\r
+ a[k1] = xr;\r
+ a[k1 + 1] = xi;\r
+ }\r
+ }\r
+ }\r
+ }\r
+\r
+\r
+ void cftfsub(int n, double *a, double *w)\r
+ {\r
+ int j, j1, j2, j3, l;\r
+ double x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;\r
+\r
+ l = 2;\r
+ if (n > 8) {\r
+ cft1st(n, a, w);\r
+ l = 8;\r
+ while ((l << 2) < n) {\r
+ cftmdl(n, l, a, w);\r
+ l <<= 2;\r
+ }\r
+ }\r
+ if ((l << 2) == n) {\r
+ for (j = 0; j < l; j += 2) {\r
+ j1 = j + l;\r
+ j2 = j1 + l;\r
+ j3 = j2 + l;\r
+ x0r = a[j] + a[j1];\r
+ x0i = a[j + 1] + a[j1 + 1];\r
+ x1r = a[j] - a[j1];\r
+ x1i = a[j + 1] - a[j1 + 1];\r
+ x2r = a[j2] + a[j3];\r
+ x2i = a[j2 + 1] + a[j3 + 1];\r
+ x3r = a[j2] - a[j3];\r
+ x3i = a[j2 + 1] - a[j3 + 1];\r
+ a[j] = x0r + x2r;\r
+ a[j + 1] = x0i + x2i;\r
+ a[j2] = x0r - x2r;\r
+ a[j2 + 1] = x0i - x2i;\r
+ a[j1] = x1r - x3i;\r
+ a[j1 + 1] = x1i + x3r;\r
+ a[j3] = x1r + x3i;\r
+ a[j3 + 1] = x1i - x3r;\r
+ }\r
+ } else {\r
+ for (j = 0; j < l; j += 2) {\r
+ j1 = j + l;\r
+ x0r = a[j] - a[j1];\r
+ x0i = a[j + 1] - a[j1 + 1];\r
+ a[j] += a[j1];\r
+ a[j + 1] += a[j1 + 1];\r
+ a[j1] = x0r;\r
+ a[j1 + 1] = x0i;\r
+ }\r
+ }\r
+ }\r
+\r
+\r
+ void cftbsub(int n, double *a, double *w)\r
+ {\r
+ int j, j1, j2, j3, l;\r
+ double x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;\r
+\r
+ l = 2;\r
+ if (n > 8) {\r
+ cft1st(n, a, w);\r
+ l = 8;\r
+ while ((l << 2) < n) {\r
+ cftmdl(n, l, a, w);\r
+ l <<= 2;\r
+ }\r
+ }\r
+ if ((l << 2) == n) {\r
+ for (j = 0; j < l; j += 2) {\r
+ j1 = j + l;\r
+ j2 = j1 + l;\r
+ j3 = j2 + l;\r
+ x0r = a[j] + a[j1];\r
+ x0i = -a[j + 1] - a[j1 + 1];\r
+ x1r = a[j] - a[j1];\r
+ x1i = -a[j + 1] + a[j1 + 1];\r
+ x2r = a[j2] + a[j3];\r
+ x2i = a[j2 + 1] + a[j3 + 1];\r
+ x3r = a[j2] - a[j3];\r
+ x3i = a[j2 + 1] - a[j3 + 1];\r
+ a[j] = x0r + x2r;\r
+ a[j + 1] = x0i - x2i;\r
+ a[j2] = x0r - x2r;\r
+ a[j2 + 1] = x0i + x2i;\r
+ a[j1] = x1r - x3i;\r
+ a[j1 + 1] = x1i - x3r;\r
+ a[j3] = x1r + x3i;\r
+ a[j3 + 1] = x1i + x3r;\r
+ }\r
+ } else {\r
+ for (j = 0; j < l; j += 2) {\r
+ j1 = j + l;\r
+ x0r = a[j] - a[j1];\r
+ x0i = -a[j + 1] + a[j1 + 1];\r
+ a[j] += a[j1];\r
+ a[j + 1] = -a[j + 1] - a[j1 + 1];\r
+ a[j1] = x0r;\r
+ a[j1 + 1] = x0i;\r
+ }\r
+ }\r
+ }\r
+\r
+\r
+ void cft1st(int n, double *a, double *w)\r
+ {\r
+ int j, k1, k2;\r
+ double wk1r, wk1i, wk2r, wk2i, wk3r, wk3i;\r
+ double x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;\r
+\r
+ x0r = a[0] + a[2];\r
+ x0i = a[1] + a[3];\r
+ x1r = a[0] - a[2];\r
+ x1i = a[1] - a[3];\r
+ x2r = a[4] + a[6];\r
+ x2i = a[5] + a[7];\r
+ x3r = a[4] - a[6];\r
+ x3i = a[5] - a[7];\r
+ a[0] = x0r + x2r;\r
+ a[1] = x0i + x2i;\r
+ a[4] = x0r - x2r;\r
+ a[5] = x0i - x2i;\r
+ a[2] = x1r - x3i;\r
+ a[3] = x1i + x3r;\r
+ a[6] = x1r + x3i;\r
+ a[7] = x1i - x3r;\r
+ wk1r = w[2];\r
+ x0r = a[8] + a[10];\r
+ x0i = a[9] + a[11];\r
+ x1r = a[8] - a[10];\r
+ x1i = a[9] - a[11];\r
+ x2r = a[12] + a[14];\r
+ x2i = a[13] + a[15];\r
+ x3r = a[12] - a[14];\r
+ x3i = a[13] - a[15];\r
+ a[8] = x0r + x2r;\r
+ a[9] = x0i + x2i;\r
+ a[12] = x2i - x0i;\r
+ a[13] = x0r - x2r;\r
+ x0r = x1r - x3i;\r
+ x0i = x1i + x3r;\r
+ a[10] = wk1r * (x0r - x0i);\r
+ a[11] = wk1r * (x0r + x0i);\r
+ x0r = x3i + x1r;\r
+ x0i = x3r - x1i;\r
+ a[14] = wk1r * (x0i - x0r);\r
+ a[15] = wk1r * (x0i + x0r);\r
+ k1 = 0;\r
+ for (j = 16; j < n; j += 16) {\r
+ k1 += 2;\r
+ k2 = 2 * k1;\r
+ wk2r = w[k1];\r
+ wk2i = w[k1 + 1];\r
+ wk1r = w[k2];\r
+ wk1i = w[k2 + 1];\r
+ wk3r = wk1r - 2 * wk2i * wk1i;\r
+ wk3i = 2 * wk2i * wk1r - wk1i;\r
+ x0r = a[j] + a[j + 2];\r
+ x0i = a[j + 1] + a[j + 3];\r
+ x1r = a[j] - a[j + 2];\r
+ x1i = a[j + 1] - a[j + 3];\r
+ x2r = a[j + 4] + a[j + 6];\r
+ x2i = a[j + 5] + a[j + 7];\r
+ x3r = a[j + 4] - a[j + 6];\r
+ x3i = a[j + 5] - a[j + 7];\r
+ a[j] = x0r + x2r;\r
+ a[j + 1] = x0i + x2i;\r
+ x0r -= x2r;\r
+ x0i -= x2i;\r
+ a[j + 4] = wk2r * x0r - wk2i * x0i;\r
+ a[j + 5] = wk2r * x0i + wk2i * x0r;\r
+ x0r = x1r - x3i;\r
+ x0i = x1i + x3r;\r
+ a[j + 2] = wk1r * x0r - wk1i * x0i;\r
+ a[j + 3] = wk1r * x0i + wk1i * x0r;\r
+ x0r = x1r + x3i;\r
+ x0i = x1i - x3r;\r
+ a[j + 6] = wk3r * x0r - wk3i * x0i;\r
+ a[j + 7] = wk3r * x0i + wk3i * x0r;\r
+ wk1r = w[k2 + 2];\r
+ wk1i = w[k2 + 3];\r
+ wk3r = wk1r - 2 * wk2r * wk1i;\r
+ wk3i = 2 * wk2r * wk1r - wk1i;\r
+ x0r = a[j + 8] + a[j + 10];\r
+ x0i = a[j + 9] + a[j + 11];\r
+ x1r = a[j + 8] - a[j + 10];\r
+ x1i = a[j + 9] - a[j + 11];\r
+ x2r = a[j + 12] + a[j + 14];\r
+ x2i = a[j + 13] + a[j + 15];\r
+ x3r = a[j + 12] - a[j + 14];\r
+ x3i = a[j + 13] - a[j + 15];\r
+ a[j + 8] = x0r + x2r;\r
+ a[j + 9] = x0i + x2i;\r
+ x0r -= x2r;\r
+ x0i -= x2i;\r
+ a[j + 12] = -wk2i * x0r - wk2r * x0i;\r
+ a[j + 13] = -wk2i * x0i + wk2r * x0r;\r
+ x0r = x1r - x3i;\r
+ x0i = x1i + x3r;\r
+ a[j + 10] = wk1r * x0r - wk1i * x0i;\r
+ a[j + 11] = wk1r * x0i + wk1i * x0r;\r
+ x0r = x1r + x3i;\r
+ x0i = x1i - x3r;\r
+ a[j + 14] = wk3r * x0r - wk3i * x0i;\r
+ a[j + 15] = wk3r * x0i + wk3i * x0r;\r
+ }\r
+ }\r
+\r
+\r
+ void cftmdl(int n, int l, double *a, double *w)\r
+ {\r
+ int j, j1, j2, j3, k, k1, k2, m, m2;\r
+ double wk1r, wk1i, wk2r, wk2i, wk3r, wk3i;\r
+ double x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;\r
+\r
+ m = l << 2;\r
+ for (j = 0; j < l; j += 2) {\r
+ j1 = j + l;\r
+ j2 = j1 + l;\r
+ j3 = j2 + l;\r
+ x0r = a[j] + a[j1];\r
+ x0i = a[j + 1] + a[j1 + 1];\r
+ x1r = a[j] - a[j1];\r
+ x1i = a[j + 1] - a[j1 + 1];\r
+ x2r = a[j2] + a[j3];\r
+ x2i = a[j2 + 1] + a[j3 + 1];\r
+ x3r = a[j2] - a[j3];\r
+ x3i = a[j2 + 1] - a[j3 + 1];\r
+ a[j] = x0r + x2r;\r
+ a[j + 1] = x0i + x2i;\r
+ a[j2] = x0r - x2r;\r
+ a[j2 + 1] = x0i - x2i;\r
+ a[j1] = x1r - x3i;\r
+ a[j1 + 1] = x1i + x3r;\r
+ a[j3] = x1r + x3i;\r
+ a[j3 + 1] = x1i - x3r;\r
+ }\r
+ wk1r = w[2];\r
+ for (j = m; j < l + m; j += 2) {\r
+ j1 = j + l;\r
+ j2 = j1 + l;\r
+ j3 = j2 + l;\r
+ x0r = a[j] + a[j1];\r
+ x0i = a[j + 1] + a[j1 + 1];\r
+ x1r = a[j] - a[j1];\r
+ x1i = a[j + 1] - a[j1 + 1];\r
+ x2r = a[j2] + a[j3];\r
+ x2i = a[j2 + 1] + a[j3 + 1];\r
+ x3r = a[j2] - a[j3];\r
+ x3i = a[j2 + 1] - a[j3 + 1];\r
+ a[j] = x0r + x2r;\r
+ a[j + 1] = x0i + x2i;\r
+ a[j2] = x2i - x0i;\r
+ a[j2 + 1] = x0r - x2r;\r
+ x0r = x1r - x3i;\r
+ x0i = x1i + x3r;\r
+ a[j1] = wk1r * (x0r - x0i);\r
+ a[j1 + 1] = wk1r * (x0r + x0i);\r
+ x0r = x3i + x1r;\r
+ x0i = x3r - x1i;\r
+ a[j3] = wk1r * (x0i - x0r);\r
+ a[j3 + 1] = wk1r * (x0i + x0r);\r
+ }\r
+ k1 = 0;\r
+ m2 = 2 * m;\r
+ for (k = m2; k < n; k += m2) {\r
+ k1 += 2;\r
+ k2 = 2 * k1;\r
+ wk2r = w[k1];\r
+ wk2i = w[k1 + 1];\r
+ wk1r = w[k2];\r
+ wk1i = w[k2 + 1];\r
+ wk3r = wk1r - 2 * wk2i * wk1i;\r
+ wk3i = 2 * wk2i * wk1r - wk1i;\r
+ for (j = k; j < l + k; j += 2) {\r
+ j1 = j + l;\r
+ j2 = j1 + l;\r
+ j3 = j2 + l;\r
+ x0r = a[j] + a[j1];\r
+ x0i = a[j + 1] + a[j1 + 1];\r
+ x1r = a[j] - a[j1];\r
+ x1i = a[j + 1] - a[j1 + 1];\r
+ x2r = a[j2] + a[j3];\r
+ x2i = a[j2 + 1] + a[j3 + 1];\r
+ x3r = a[j2] - a[j3];\r
+ x3i = a[j2 + 1] - a[j3 + 1];\r
+ a[j] = x0r + x2r;\r
+ a[j + 1] = x0i + x2i;\r
+ x0r -= x2r;\r
+ x0i -= x2i;\r
+ a[j2] = wk2r * x0r - wk2i * x0i;\r
+ a[j2 + 1] = wk2r * x0i + wk2i * x0r;\r
+ x0r = x1r - x3i;\r
+ x0i = x1i + x3r;\r
+ a[j1] = wk1r * x0r - wk1i * x0i;\r
+ a[j1 + 1] = wk1r * x0i + wk1i * x0r;\r
+ x0r = x1r + x3i;\r
+ x0i = x1i - x3r;\r
+ a[j3] = wk3r * x0r - wk3i * x0i;\r
+ a[j3 + 1] = wk3r * x0i + wk3i * x0r;\r
+ }\r
+ wk1r = w[k2 + 2];\r
+ wk1i = w[k2 + 3];\r
+ wk3r = wk1r - 2 * wk2r * wk1i;\r
+ wk3i = 2 * wk2r * wk1r - wk1i;\r
+ for (j = k + m; j < l + (k + m); j += 2) {\r
+ j1 = j + l;\r
+ j2 = j1 + l;\r
+ j3 = j2 + l;\r
+ x0r = a[j] + a[j1];\r
+ x0i = a[j + 1] + a[j1 + 1];\r
+ x1r = a[j] - a[j1];\r
+ x1i = a[j + 1] - a[j1 + 1];\r
+ x2r = a[j2] + a[j3];\r
+ x2i = a[j2 + 1] + a[j3 + 1];\r
+ x3r = a[j2] - a[j3];\r
+ x3i = a[j2 + 1] - a[j3 + 1];\r
+ a[j] = x0r + x2r;\r
+ a[j + 1] = x0i + x2i;\r
+ x0r -= x2r;\r
+ x0i -= x2i;\r
+ a[j2] = -wk2i * x0r - wk2r * x0i;\r
+ a[j2 + 1] = -wk2i * x0i + wk2r * x0r;\r
+ x0r = x1r - x3i;\r
+ x0i = x1i + x3r;\r
+ a[j1] = wk1r * x0r - wk1i * x0i;\r
+ a[j1 + 1] = wk1r * x0i + wk1i * x0r;\r
+ x0r = x1r + x3i;\r
+ x0i = x1i - x3r;\r
+ a[j3] = wk3r * x0r - wk3i * x0i;\r
+ a[j3 + 1] = wk3r * x0i + wk3i * x0r;\r
+ }\r
+ }\r
+ }\r
+\r
+\r
+ void rftfsub(int n, double *a, int nc, double *c)\r
+ {\r
+ int j, k, kk, ks, m;\r
+ double wkr, wki, xr, xi, yr, yi;\r
+\r
+ m = n >> 1;\r
+ ks = 2 * nc / m;\r
+ kk = 0;\r
+ for (j = 2; j < m; j += 2) {\r
+ k = n - j;\r
+ kk += ks;\r
+ wkr = 0.5 - c[nc - kk];\r
+ wki = c[kk];\r
+ xr = a[j] - a[k];\r
+ xi = a[j + 1] + a[k + 1];\r
+ yr = wkr * xr - wki * xi;\r
+ yi = wkr * xi + wki * xr;\r
+ a[j] -= yr;\r
+ a[j + 1] -= yi;\r
+ a[k] += yr;\r
+ a[k + 1] -= yi;\r
+ }\r
+ }\r
+\r
+\r
+ void rftbsub(int n, double *a, int nc, double *c)\r
+ {\r
+ int j, k, kk, ks, m;\r
+ double wkr, wki, xr, xi, yr, yi;\r
+\r
+ a[1] = -a[1];\r
+ m = n >> 1;\r
+ ks = 2 * nc / m;\r
+ kk = 0;\r
+ for (j = 2; j < m; j += 2) {\r
+ k = n - j;\r
+ kk += ks;\r
+ wkr = 0.5 - c[nc - kk];\r
+ wki = c[kk];\r
+ xr = a[j] - a[k];\r
+ xi = a[j + 1] + a[k + 1];\r
+ yr = wkr * xr + wki * xi;\r
+ yi = wkr * xi - wki * xr;\r
+ a[j] -= yr;\r
+ a[j + 1] = yi - a[j + 1];\r
+ a[k] += yr;\r
+ a[k + 1] = yi - a[k + 1];\r
+ }\r
+ a[m + 1] = -a[m + 1];\r
+ }\r
+ };\r
+\r
+\r
+ /**\r
+ * @internal\r
+ * @brief Concrete FFT implementation\r
+ */\r
+ typedef OouraFFT AudioFFTImplementation;\r
+\r
+\r
+#endif // AUDIOFFT_OOURA_USED\r
+\r
+\r
+ // ================================================================\r
+\r
+\r
+#ifdef AUDIOFFT_APPLE_ACCELERATE_USED\r
+\r
+\r
+ /**\r
+ * @internal\r
+ * @class AppleAccelerateFFT\r
+ * @brief FFT implementation using the Apple Accelerate framework internally\r
+ */\r
+ class AppleAccelerateFFT : public detail::AudioFFTImpl\r
+ {\r
+ public:\r
+ AppleAccelerateFFT() :\r
+ detail::AudioFFTImpl(),\r
+ _size(0),\r
+ _powerOf2(0),\r
+ _fftSetup(0),\r
+ _re(),\r
+ _im()\r
+ {\r
+ }\r
+\r
+ AppleAccelerateFFT(const AppleAccelerateFFT&) = delete;\r
+ AppleAccelerateFFT& operator=(const AppleAccelerateFFT&) = delete;\r
+\r
+ virtual ~AppleAccelerateFFT()\r
+ {\r
+ init(0);\r
+ }\r
+\r
+ virtual void init(size_t size) override\r
+ {\r
+ if (_fftSetup)\r
+ {\r
+ vDSP_destroy_fftsetup(_fftSetup);\r
+ _size = 0;\r
+ _powerOf2 = 0;\r
+ _fftSetup = 0;\r
+ _re.clear();\r
+ _im.clear();\r
+ }\r
+\r
+ if (size > 0)\r
+ {\r
+ _size = size;\r
+ _powerOf2 = 0;\r
+ while ((1 << _powerOf2) < _size)\r
+ {\r
+ ++_powerOf2;\r
+ }\r
+ _fftSetup = vDSP_create_fftsetup(_powerOf2, FFT_RADIX2);\r
+ _re.resize(_size / 2);\r
+ _im.resize(_size / 2);\r
+ }\r
+ }\r
+\r
+ virtual void fft(const float* data, float* re, float* im) override\r
+ {\r
+ const size_t size2 = _size / 2;\r
+ DSPSplitComplex splitComplex;\r
+ splitComplex.realp = re;\r
+ splitComplex.imagp = im;\r
+ vDSP_ctoz(reinterpret_cast<const COMPLEX*>(data), 2, &splitComplex, 1, size2);\r
+ vDSP_fft_zrip(_fftSetup, &splitComplex, 1, _powerOf2, FFT_FORWARD);\r
+ const float factor = 0.5f;\r
+ vDSP_vsmul(re, 1, &factor, re, 1, size2);\r
+ vDSP_vsmul(im, 1, &factor, im, 1, size2);\r
+ re[size2] = im[0];\r
+ im[0] = 0.0f;\r
+ im[size2] = 0.0f;\r
+ }\r
+\r
+ virtual void ifft(float* data, const float* re, const float* im) override\r
+ {\r
+ const size_t size2 = _size / 2;\r
+ ::memcpy(_re.data(), re, size2 * sizeof(float));\r
+ ::memcpy(_im.data(), im, size2 * sizeof(float));\r
+ _im[0] = re[size2];\r
+ DSPSplitComplex splitComplex;\r
+ splitComplex.realp = _re.data();\r
+ splitComplex.imagp = _im.data();\r
+ vDSP_fft_zrip(_fftSetup, &splitComplex, 1, _powerOf2, FFT_INVERSE);\r
+ vDSP_ztoc(&splitComplex, 1, reinterpret_cast<COMPLEX*>(data), 2, size2);\r
+ const float factor = 1.0f / static_cast<float>(_size);\r
+ vDSP_vsmul(data, 1, &factor, data, 1, _size);\r
+ }\r
+\r
+ private:\r
+ size_t _size;\r
+ size_t _powerOf2;\r
+ FFTSetup _fftSetup;\r
+ std::vector<float> _re;\r
+ std::vector<float> _im;\r
+ };\r
+\r
+\r
+ /**\r
+ * @internal\r
+ * @brief Concrete FFT implementation\r
+ */\r
+ typedef AppleAccelerateFFT AudioFFTImplementation;\r
+\r
+\r
+#endif // AUDIOFFT_APPLE_ACCELERATE_USED\r
+\r
+\r
+ // ================================================================\r
+\r
+\r
+#ifdef AUDIOFFT_FFTW3_USED\r
+\r
+\r
+ /**\r
+ * @internal\r
+ * @class FFTW3FFT\r
+ * @brief FFT implementation using FFTW3 internally (see fftw.org)\r
+ */\r
+ class FFTW3FFT : public detail::AudioFFTImpl\r
+ {\r
+ public:\r
+ FFTW3FFT() :\r
+ detail::AudioFFTImpl(),\r
+ _size(0),\r
+ _complexSize(0),\r
+ _planForward(0),\r
+ _planBackward(0),\r
+ _data(0),\r
+ _re(0),\r
+ _im(0)\r
+ {\r
+ }\r
+\r
+ FFTW3FFT(const FFTW3FFT&) = delete;\r
+ FFTW3FFT& operator=(const FFTW3FFT&) = delete;\r
+\r
+ virtual ~FFTW3FFT()\r
+ {\r
+ init(0);\r
+ }\r
+\r
+ virtual void init(size_t size) override\r
+ {\r
+ if (_size != size)\r
+ {\r
+ if (_size > 0)\r
+ {\r
+ fftwf_destroy_plan(_planForward);\r
+ fftwf_destroy_plan(_planBackward);\r
+ _planForward = 0;\r
+ _planBackward = 0;\r
+ _size = 0;\r
+ _complexSize = 0;\r
+\r
+ if (_data)\r
+ {\r
+ fftwf_free(_data);\r
+ _data = 0;\r
+ }\r
+\r
+ if (_re)\r
+ {\r
+ fftwf_free(_re);\r
+ _re = 0;\r
+ }\r
+\r
+ if (_im)\r
+ {\r
+ fftwf_free(_im);\r
+ _im = 0;\r
+ }\r
+ }\r
+\r
+ if (size > 0)\r
+ {\r
+ \r
+ _size = size;\r
+ _complexSize = AudioFFT::ComplexSize(_size);\r
+ const size_t complexSize = AudioFFT::ComplexSize(_size);\r
+ _data = reinterpret_cast<float*>(fftwf_malloc(_size * sizeof(float)));\r
+ _re = reinterpret_cast<float*>(fftwf_malloc(complexSize * sizeof(float)));\r
+ _im = reinterpret_cast<float*>(fftwf_malloc(complexSize * sizeof(float)));\r
+ fftwf_set_timelimit(0.01);\r
+ fftw_iodim dim;\r
+ dim.n = static_cast<int>(size);\r
+ dim.is = 1;\r
+ dim.os = 1;\r
+ _planForward = fftwf_plan_guru_split_dft_r2c(1, &dim, 0, 0, _data, _re, _im, FFTW_MEASURE);\r
+ _planBackward = fftwf_plan_guru_split_dft_c2r(1, &dim, 0, 0, _re, _im, _data, FFTW_MEASURE);\r
+ }\r
+ }\r
+ }\r
+\r
+ virtual void fft(const float* data, float* re, float* im) override\r
+ {\r
+ ::memcpy(_data, data, _size * sizeof(float));\r
+ fftwf_execute_split_dft_r2c(_planForward, _data, _re, _im);\r
+ ::memcpy(re, _re, _complexSize * sizeof(float));\r
+ ::memcpy(im, _im, _complexSize * sizeof(float));\r
+ }\r
+\r
+ virtual void ifft(float* data, const float* re, const float* im) override\r
+ {\r
+ ::memcpy(_re, re, _complexSize * sizeof(float));\r
+ ::memcpy(_im, im, _complexSize * sizeof(float));\r
+ fftwf_execute_split_dft_c2r(_planBackward, _re, _im, _data);\r
+ detail::ScaleBuffer(data, _data, 1.0f / static_cast<float>(_size), _size);\r
+ }\r
+\r
+ private:\r
+ size_t _size;\r
+ size_t _complexSize;\r
+ fftwf_plan _planForward;\r
+ fftwf_plan _planBackward;\r
+ float* _data;\r
+ float* _re;\r
+ float* _im;\r
+ };\r
+\r
+\r
+ /**\r
+ * @internal\r
+ * @brief Concrete FFT implementation\r
+ */\r
+ typedef FFTW3FFT AudioFFTImplementation;\r
+\r
+\r
+#endif // AUDIOFFT_FFTW3_USED\r
+\r
+\r
+ // =============================================================\r
+\r
+\r
+ AudioFFT::AudioFFT() :\r
+ _impl(new AudioFFTImplementation())\r
+ {\r
+ }\r
+\r
+\r
+ AudioFFT::~AudioFFT()\r
+ {\r
+ }\r
+\r
+\r
+ void AudioFFT::init(size_t size)\r
+ {\r
+ assert(detail::IsPowerOf2(size));\r
+ _impl->init(size);\r
+ }\r
+\r
+\r
+ void AudioFFT::fft(const float* data, float* re, float* im)\r
+ {\r
+ _impl->fft(data, re, im);\r
+ }\r
+\r
+\r
+ void AudioFFT::ifft(float* data, const float* re, const float* im)\r
+ {\r
+ _impl->ifft(data, re, im);\r
+ }\r
+\r
+\r
+ size_t AudioFFT::ComplexSize(size_t size)\r
+ {\r
+ return (size / 2) + 1;\r
+ }\r
+\r
+} // End of namespace\r
-// ==================================================================================
-// Copyright (c) 2016 HiFi-LoFi
-//
-// Permission is hereby granted, free of charge, to any person obtaining a copy
-// of this software and associated documentation files (the "Software"), to deal
-// in the Software without restriction, including without limitation the rights
-// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-// copies of the Software, and to permit persons to whom the Software is furnished
-// to do so, subject to the following conditions:
-//
-// The above copyright notice and this permission notice shall be included in
-// all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-// ==================================================================================
-
-#ifndef _AUDIOFFT_H
-#define _AUDIOFFT_H
-
-
-/**
-* AudioFFT provides real-to-complex/complex-to-real FFT routines.
-*
-* Features:
-*
-* - Real-complex FFT and complex-real inverse FFT for power-of-2-sized real data.
-*
-* - Uniform interface to different FFT implementations (currently Ooura, FFTW3 and Apple Accelerate).
-*
-* - Complex data is handled in "split-complex" format, i.e. there are separate
-* arrays for the real and imaginary parts which can be useful for SIMD optimizations
-* (split-complex arrays have to be of length (size/2+1) representing bins from DC
-* to Nyquist frequency).
-*
-* - Output is "ready to use" (all scaling etc. is already handled internally).
-*
-* - No allocations/deallocations after the initialization which makes it usable
-* for real-time audio applications (that's what I wrote it for and using it).
-*
-*
-* How to use it in your project:
-*
-* - Add the .h and .cpp file to your project - that's all.
-*
-* - To get extra speed, you can link FFTW3 to your project and define
-* AUDIOFFT_FFTW3 (however, please check whether your project suits the
-* according license).
-*
-* - To get the best speed on Apple platforms, you can link the Apple
-* Accelerate framework to your project and define
-* AUDIOFFT_APPLE_ACCELERATE (however, please check whether your
-* project suits the according license).
-*
-*
-* Remarks:
-*
-* - AudioFFT is not intended to be the fastest FFT, but to be a fast-enough
-* FFT suitable for most audio applications.
-*
-* - AudioFFT uses the quite liberal MIT license.
-*
-*
-* Example usage:
-* @code
-* #include "AudioFFT.h"
-*
-* void Example()
-* {
-* const size_t fftSize = 1024; // Needs to be power of 2!
-*
-* std::vector<float> input(fftSize, 0.0f);
-* std::vector<float> re(audiofft::AudioFFT::ComplexSize(fftSize));
-* std::vector<float> im(audiofft::AudioFFT::ComplexSize(fftSize));
-* std::vector<float> output(fftSize);
-*
-* audiofft::AudioFFT fft;
-* fft.init(1024);
-* fft.fft(input.data(), re.data(), im.data());
-* fft.ifft(output.data(), re.data(), im.data());
-* }
-* @endcode
-*/
-
-
-#include <cstddef>
-#include <memory>
-
-
-namespace audiofft
-{
-
- namespace details
- {
-
- class AudioFFTImpl
- {
- public:
- AudioFFTImpl() = default;
- virtual ~AudioFFTImpl() = default;
- virtual void init(size_t size) = 0;
- virtual void fft(const float* data, float* re, float* im) = 0;
- virtual void ifft(float* data, const float* re, const float* im) = 0;
-
- private:
- AudioFFTImpl(const AudioFFTImpl&) = delete;
- AudioFFTImpl& operator=(const AudioFFTImpl&) = delete;
- };
- }
-
-
- // ======================================================
-
-
- /**
- * @class AudioFFT
- * @brief Performs 1D FFTs
- */
- class AudioFFT
- {
- public:
- /**
- * @brief Constructor
- */
- AudioFFT();
-
- /**
- * @brief Initializes the FFT object
- * @param size Size of the real input (must be power 2)
- */
- void init(size_t size);
-
- /**
- * @brief Performs the forward FFT
- * @param data The real input data (has to be of the length as specified in init())
- * @param re The real part of the complex output (has to be of length as returned by ComplexSize())
- * @param im The imaginary part of the complex output (has to be of length as returned by ComplexSize())
- */
- void fft(const float* data, float* re, float* im);
-
- /**
- * @brief Performs the inverse FFT
- * @param data The real output data (has to be of the length as specified in init())
- * @param re The real part of the complex input (has to be of length as returned by ComplexSize())
- * @param im The imaginary part of the complex input (has to be of length as returned by ComplexSize())
- */
- void ifft(float* data, const float* re, const float* im);
-
- /**
- * @brief Calculates the necessary size of the real/imaginary complex arrays
- * @param size The size of the real data
- * @return The size of the real/imaginary complex arrays
- */
- static size_t ComplexSize(size_t size);
-
- private:
- std::unique_ptr<details::AudioFFTImpl> _impl;
-
- AudioFFT(const AudioFFT&) = delete;
- AudioFFT& operator=(const AudioFFT&) = delete;
- };
-
-
- /**
- * @deprecated
- * @brief Let's keep an AudioFFTBase type around for now because it has been here already in the 1st version in order to avoid breaking existing code.
- */
- typedef AudioFFT AudioFFTBase;
-
-} // End of namespace
-
-#endif // Header guard
+// ==================================================================================\r
+// Copyright (c) 2017 HiFi-LoFi\r
+//\r
+// Permission is hereby granted, free of charge, to any person obtaining a copy\r
+// of this software and associated documentation files (the "Software"), to deal\r
+// in the Software without restriction, including without limitation the rights\r
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\r
+// copies of the Software, and to permit persons to whom the Software is furnished\r
+// to do so, subject to the following conditions:\r
+//\r
+// The above copyright notice and this permission notice shall be included in\r
+// all copies or substantial portions of the Software.\r
+//\r
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\r
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\r
+// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\r
+// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\r
+// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\r
+// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\r
+// ==================================================================================\r
+\r
+#ifndef _AUDIOFFT_H\r
+#define _AUDIOFFT_H\r
+\r
+\r
+/**\r
+* AudioFFT provides real-to-complex/complex-to-real FFT routines.\r
+*\r
+* Features:\r
+*\r
+* - Real-complex FFT and complex-real inverse FFT for power-of-2-sized real data.\r
+*\r
+* - Uniform interface to different FFT implementations (currently Ooura, FFTW3 and Apple Accelerate).\r
+*\r
+* - Complex data is handled in "split-complex" format, i.e. there are separate\r
+* arrays for the real and imaginary parts which can be useful for SIMD optimizations\r
+* (split-complex arrays have to be of length (size/2+1) representing bins from DC\r
+* to Nyquist frequency).\r
+*\r
+* - Output is "ready to use" (all scaling etc. is already handled internally).\r
+*\r
+* - No allocations/deallocations after the initialization which makes it usable\r
+* for real-time audio applications (that's what I wrote it for and using it).\r
+*\r
+*\r
+* How to use it in your project:\r
+*\r
+* - Add the .h and .cpp file to your project - that's all.\r
+*\r
+* - To get extra speed, you can link FFTW3 to your project and define\r
+* AUDIOFFT_FFTW3 (however, please check whether your project suits the\r
+* according license).\r
+*\r
+* - To get the best speed on Apple platforms, you can link the Apple\r
+* Accelerate framework to your project and define\r
+* AUDIOFFT_APPLE_ACCELERATE (however, please check whether your\r
+* project suits the according license).\r
+*\r
+*\r
+* Remarks:\r
+*\r
+* - AudioFFT is not intended to be the fastest FFT, but to be a fast-enough\r
+* FFT suitable for most audio applications.\r
+*\r
+* - AudioFFT uses the quite liberal MIT license.\r
+*\r
+*\r
+* Example usage:\r
+* @code\r
+* #include "AudioFFT.h"\r
+*\r
+* void Example()\r
+* {\r
+* const size_t fftSize = 1024; // Needs to be power of 2!\r
+*\r
+* std::vector<float> input(fftSize, 0.0f);\r
+* std::vector<float> re(audiofft::AudioFFT::ComplexSize(fftSize));\r
+* std::vector<float> im(audiofft::AudioFFT::ComplexSize(fftSize));\r
+* std::vector<float> output(fftSize);\r
+*\r
+* audiofft::AudioFFT fft;\r
+* fft.init(1024);\r
+* fft.fft(input.data(), re.data(), im.data());\r
+* fft.ifft(output.data(), re.data(), im.data());\r
+* }\r
+* @endcode\r
+*/\r
+\r
+\r
+#include <cstddef>\r
+#include <memory>\r
+\r
+\r
+namespace audiofft\r
+{\r
+\r
+ namespace detail\r
+ {\r
+ class AudioFFTImpl;\r
+ }\r
+\r
+\r
+ // =============================================================\r
+\r
+\r
+ /**\r
+ * @class AudioFFT\r
+ * @brief Performs 1D FFTs\r
+ */\r
+ class AudioFFT\r
+ {\r
+ public:\r
+ /**\r
+ * @brief Constructor\r
+ */\r
+ AudioFFT();\r
+\r
+ AudioFFT(const AudioFFT&) = delete;\r
+ AudioFFT& operator=(const AudioFFT&) = delete;\r
+\r
+ /**\r
+ * @brief Destructor\r
+ */\r
+ ~AudioFFT();\r
+\r
+ /**\r
+ * @brief Initializes the FFT object\r
+ * @param size Size of the real input (must be power 2)\r
+ */\r
+ void init(size_t size);\r
+\r
+ /**\r
+ * @brief Performs the forward FFT\r
+ * @param data The real input data (has to be of the length as specified in init())\r
+ * @param re The real part of the complex output (has to be of length as returned by ComplexSize())\r
+ * @param im The imaginary part of the complex output (has to be of length as returned by ComplexSize())\r
+ */\r
+ void fft(const float* data, float* re, float* im);\r
+\r
+ /**\r
+ * @brief Performs the inverse FFT\r
+ * @param data The real output data (has to be of the length as specified in init())\r
+ * @param re The real part of the complex input (has to be of length as returned by ComplexSize())\r
+ * @param im The imaginary part of the complex input (has to be of length as returned by ComplexSize())\r
+ */\r
+ void ifft(float* data, const float* re, const float* im);\r
+\r
+ /**\r
+ * @brief Calculates the necessary size of the real/imaginary complex arrays\r
+ * @param size The size of the real data\r
+ * @return The size of the real/imaginary complex arrays\r
+ */\r
+ static size_t ComplexSize(size_t size);\r
+\r
+ private:\r
+ std::unique_ptr<detail::AudioFFTImpl> _impl;\r
+ };\r
+\r
+\r
+ /**\r
+ * @deprecated\r
+ * @brief Let's keep an AudioFFTBase type around for now because it has been here already in the 1st version in order to avoid breaking existing code.\r
+ */\r
+ typedef AudioFFT AudioFFTBase;\r
+\r
+} // End of namespace\r
+\r
+#endif // Header guard\r
--- /dev/null
+#include "ConvolverThreadPool.h"
+#include "FFTConvolver.h"
+#include "config.h"
+
+ConvolverThreadPool::ConvolverThreadPool()
+ : _convolvers(), _threads(), _taskQueue(), _queueMutex(), _condition(), _completionCV(),
+ _stop(false), _activeTasks(0) {}
+
+ConvolverThreadPool::~ConvolverThreadPool() {
+ shutdown();
+
+ // Delete all convolver instances
+ for (size_t i = 0; i < _convolvers.size(); ++i) {
+ delete _convolvers[i];
+ }
+ _convolvers.clear();
+}
+
+bool ConvolverThreadPool::init(size_t numThreads, size_t numConvolvers) {
+ // Clean up any existing threads first
+ shutdown();
+
+ // Validate parameters
+ if (numThreads == 0 || numConvolvers == 0) {
+ return false;
+ }
+
+ // Delete any existing convolvers
+ for (size_t i = 0; i < _convolvers.size(); ++i) {
+ delete _convolvers[i];
+ }
+ _convolvers.clear();
+
+ // Reset state
+ _stop = false;
+ _activeTasks = 0;
+
+ // Create convolver instances (but don't initialize them yet)
+ _convolvers.resize(numConvolvers);
+ for (size_t i = 0; i < numConvolvers; ++i) {
+ _convolvers[i] = new FFTConvolver();
+ }
+
+ // Create worker threads
+ _threads.reserve(numThreads);
+ for (size_t i = 0; i < numThreads; i++) {
+#ifndef COMPILE_FOR_OSX
+ pthread_setname_np(pthread_self(), "convolver");
+#endif
+ _threads.emplace_back([this]() { workerThread(); });
+ }
+
+ return true;
+}
+
+// Mutex to protect FFTW plan creation (required)
+std::mutex fftwMutex;
+
+bool ConvolverThreadPool::initConvolver(size_t convolverId, size_t blockSize, const Sample *ir,
+ size_t irLen) {
+ if (convolverId >= _convolvers.size()) {
+ return false;
+ }
+
+ // Make sure no tasks are using this convolver
+ waitForAll();
+ std::lock_guard<std::mutex> lock(fftwMutex);
+ return _convolvers[convolverId]->init(blockSize, ir, irLen);
+}
+
+bool ConvolverThreadPool::initAllConvolvers(size_t blockSize, const Sample *ir, size_t irLen) {
+ // Make sure no tasks are running
+ waitForAll();
+
+ for (size_t i = 0; i < _convolvers.size(); ++i) {
+ if (!_convolvers[i]->init(blockSize, ir, irLen)) {
+ return false;
+ }
+ }
+
+ return true;
+}
+
+void ConvolverThreadPool::processAsync(size_t convolverId, const Sample *input, Sample *output,
+ size_t len) {
+ assert(convolverId < _convolvers.size());
+
+ {
+ std::lock_guard<std::mutex> lock(_queueMutex);
+
+ // Create the task
+ _taskQueue.push([this, convolverId, input, output, len]() {
+ _convolvers[convolverId]->process(input, output, len);
+ });
+
+ ++_activeTasks;
+ }
+
+ // Wake up one worker thread
+ _condition.notify_one();
+}
+
+void ConvolverThreadPool::waitForAll() {
+ std::unique_lock<std::mutex> lock(_queueMutex);
+ _completionCV.wait(lock, [this]() { return _taskQueue.empty() && _activeTasks == 0; });
+}
+
+void ConvolverThreadPool::clearState(size_t convolverId) {
+ assert(convolverId < _convolvers.size());
+
+ // Make sure no tasks are running before clearing state
+ waitForAll();
+
+ // _convolvers[convolverId]->clearState();
+}
+
+void ConvolverThreadPool::clearAllStates() {
+ // Make sure no tasks are running before clearing state
+ waitForAll();
+
+ for (size_t i = 0; i < _convolvers.size(); ++i) {
+ _convolvers[i]->clearState();
+ }
+}
+
+void ConvolverThreadPool::workerThread() {
+ while (true) {
+ std::function<void()> task;
+
+ // Wait for a task or stop signal
+ {
+ std::unique_lock<std::mutex> lock(_queueMutex);
+ _condition.wait(lock, [this]() { return _stop || !_taskQueue.empty(); });
+
+ // Exit if stopping and no more tasks
+ if (_stop && _taskQueue.empty()) {
+ return;
+ }
+
+ // Get the next task
+ task = std::move(_taskQueue.front());
+ _taskQueue.pop();
+ }
+
+ // Execute the task (outside the lock for better parallelism)
+ task();
+
+ // Mark task as complete
+ {
+ std::lock_guard<std::mutex> lock(_queueMutex);
+ --_activeTasks;
+ _completionCV.notify_all();
+ }
+ }
+}
+
+void ConvolverThreadPool::shutdown() {
+ // Signal threads to stop
+ {
+ std::lock_guard<std::mutex> lock(_queueMutex);
+ _stop = true;
+ }
+ _condition.notify_all();
+
+ // Wait for all threads to finish
+ for (auto &thread : _threads) {
+ if (thread.joinable()) {
+ thread.join();
+ }
+ }
+
+ // Clear thread vector
+ _threads.clear();
+
+ // Clear remaining tasks - properly this time
+ {
+ std::lock_guard<std::mutex> lock(_queueMutex);
+ while (!_taskQueue.empty()) {
+ _taskQueue.pop();
+ }
+ }
+
+ // Reset state
+ _activeTasks = 0;
+ _stop = false;
+}
\ No newline at end of file
--- /dev/null
+#ifndef CONVOLVER_THREAD_POOL_H
+#define CONVOLVER_THREAD_POOL_H
+
+#include "FFTConvolver.h"
+#include <cassert>
+#include <condition_variable>
+#include <functional>
+#include <mutex>
+#include <queue>
+#include <thread>
+#include <vector>
+
+// Use the fftconvolver namespace
+using fftconvolver::FFTConvolver;
+using fftconvolver::Sample;
+
+class ConvolverThreadPool {
+private:
+ std::vector<FFTConvolver *> _convolvers;
+ std::vector<std::thread> _threads;
+ std::queue<std::function<void()>> _taskQueue;
+ std::mutex _queueMutex;
+ std::condition_variable _condition;
+ std::condition_variable _completionCV;
+ bool _stop;
+ size_t _activeTasks;
+
+public:
+ ConvolverThreadPool();
+ ~ConvolverThreadPool();
+
+ // Initialize the thread pool (creates convolvers but doesn't initialize them)
+ // numThreads: number of worker threads (level of parallelism)
+ // numConvolvers: total number of convolver instances
+ bool init(size_t numThreads, size_t numConvolvers);
+
+ // Initialize a specific convolver with its IR
+ // convolverId: which convolver to initialize
+ // blockSize: block size for convolution
+ // ir: impulse response data
+ // irLen: length of impulse response
+ bool initConvolver(size_t convolverId, size_t blockSize, const Sample *ir, size_t irLen);
+
+ // Initialize all convolvers with the same IR
+ bool initAllConvolvers(size_t blockSize, const Sample *ir, size_t irLen);
+
+ // Queue a convolution task (non-blocking)
+ void processAsync(size_t convolverId, const Sample *input, Sample *output, size_t len);
+
+ // Wait for all queued tasks to complete (blocking)
+ void waitForAll();
+
+ // Clear the state of a specific convolver
+ void clearState(size_t convolverId);
+
+ // Clear the state of all convolvers
+ void clearAllStates();
+
+ // Get the number of convolvers
+ size_t getNumConvolvers() const { return _convolvers.size(); }
+
+ // Get the number of threads
+ size_t getNumThreads() const { return _threads.size(); }
+
+ void shutdown();
+
+private:
+ void workerThread();
+ // void shutdown();
+};
+
+#endif // CONVOLVER_THREAD_POOL_H
\ No newline at end of file
_inputBufferFill = 0;
}
+void FFTConvolver::clearState()
+{
+ if (_segCount == 0)
+ {
+ return; // Not initialized
+ }
+
+ _inputBuffer.setZero();
+ _inputBufferFill = 0;
+ _overlap.setZero();
+
+ for (size_t i = 0; i < _segCount; ++i)
+ {
+ _segments[i]->setZero();
+ }
+
+ _preMultiplied.setZero();
+ _conv.setZero();
+ _current = 0;
+}
bool FFTConvolver::init(size_t blockSize, const Sample* ir, size_t irLen)
{
// ==================================================================================
-// Copyright (c) 2012 HiFi-LoFi
+// Copyright (c) 2017 HiFi-LoFi
//
-// This is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is furnished
+// to do so, subject to the following conditions:
//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
//
-// You should have received a copy of the GNU General Public License
-// along with this program. If not, see <http://www.gnu.org/licenses/>.
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ==================================================================================
#ifndef _FFTCONVOLVER_FFTCONVOLVER_H
*/
void reset();
+ /**
+ * @brief Clears audio history
+ */
+
+ void clearState();
+
private:
size_t _blockSize;
size_t _segSize;
// ==================================================================================
-// Copyright (c) 2012 HiFi-LoFi
+// Copyright (c) 2017 HiFi-LoFi
//
-// This is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is furnished
+// to do so, subject to the following conditions:
//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
//
-// You should have received a copy of the GNU General Public License
-// along with this program. If not, see <http://www.gnu.org/licenses/>.
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ==================================================================================
#include "Utilities.h"
// ==================================================================================
-// Copyright (c) 2012 HiFi-LoFi
+// Copyright (c) 2017 HiFi-LoFi
//
-// This is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is furnished
+// to do so, subject to the following conditions:
//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
//
-// You should have received a copy of the GNU General Public License
-// along with this program. If not, see <http://www.gnu.org/licenses/>.
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ==================================================================================
#ifndef _FFTCONVOLVER_UTILITIES_H
_size = size;
}
}
- if (_data)
- setZero();
+ setZero();
}
size_t size() const
-#include <pthread.h>
-#include <sndfile.h>
#include "convolver.h"
+#include "ConvolverThreadPool.h"
#include "FFTConvolver.h"
#include "Utilities.h"
+#include <pthread.h>
+#include <sndfile.h>
+#include <unistd.h>
extern "C" void _warn(const char *filename, const int linenumber, const char *format, ...);
-extern "C" void _debug(const char *filename, const int linenumber, int level, const char *format, ...);
+extern "C" void _debug(const char *filename, const int linenumber, int level, const char *format,
+ ...);
#define warn(...) _warn(__FILE__, __LINE__, __VA_ARGS__)
#define debug(...) _debug(__FILE__, __LINE__, __VA_ARGS__)
-fftconvolver::FFTConvolver convolver_l;
-fftconvolver::FFTConvolver convolver_r;
+// Create and initialize the thread pool
+ConvolverThreadPool pool;
-// always lock use this when accessing the playing conn value
-pthread_mutex_t convolver_lock = PTHREAD_MUTEX_INITIALIZER;
+void convolver_pool_init(size_t numThreads, size_t numConvolvers) {
+ if (!pool.init(numThreads, numConvolvers)) {
+ debug(1, "failed to initialize thread pool!");
+ } else {
+ debug(1, "thread pool initialized with %u threads and %u convolvers.", numThreads,
+ numConvolvers);
+ }
+}
+void convolver_pool_closedown() {
+ pool.shutdown(); // Just shutdown, don't delete
+ debug(3, "thread pool shut down");
+}
-int convolver_init(const char* filename, int max_length) {
+int convolver_init(const char *filename, unsigned char channel_count,
+ double max_length_in_seconds, size_t block_size) {
+ debug(3, "convolver_init");
int success = 0;
- SF_INFO info;
+ SF_INFO info = {}; // Zero everything, including format
if (filename) {
- SNDFILE* file = sf_open(filename, SFM_READ, &info);
+ SNDFILE *file = sf_open(filename, SFM_READ, &info);
if (file) {
-
- if (info.samplerate == 44100) {
- if ((info.channels == 1) || (info.channels == 2)) {
- const size_t size = info.frames > max_length ? max_length : info.frames;
- float buffer[size*info.channels];
-
+ size_t max_length = (size_t)(max_length_in_seconds * info.samplerate);
+ const size_t size =
+ (unsigned int)info.frames > max_length ? max_length : (unsigned int)info.frames;
+ float *buffer = (float*)malloc(sizeof(float) * size * info.channels);
+ if (buffer != NULL) {
+ // float buffer[size * info.channels];
+ float *abuffer = (float*)malloc(sizeof(float) * size);
+ if (abuffer != NULL) {
size_t l = sf_readf_float(file, buffer, size);
if (l != 0) {
- pthread_mutex_lock(&convolver_lock);
- convolver_l.reset(); // it is possible that init could be called more than once
- convolver_r.reset(); // so it could be necessary to remove all previous settings
-
+ unsigned int cc;
if (info.channels == 1) {
- convolver_l.init(352, buffer, size);
- convolver_r.init(352, buffer, size);
- } else {
- // deinterleave
- float buffer_l[size];
- float buffer_r[size];
-
- unsigned int i;
- for (i=0; i<size; ++i)
- {
- buffer_l[i] = buffer[2*i+0];
- buffer_r[i] = buffer[2*i+1];
+ for (cc = 0; cc < channel_count; cc++) {
+ if (!pool.initConvolver(cc, block_size, buffer, size)) {
+ debug(1, "new convolver failed to initialize convolver %u ", cc);
+ }
+ }
+ } else if (info.channels == channel_count) {
+ // we have to deinterleave the ir file channels for each convolver
+ // float abuffer[size];
+ for (cc = 0; cc < channel_count; cc++) {
+ unsigned int i;
+ for (i = 0; i < size; ++i) {
+ abuffer[i] = buffer[channel_count * i + cc];
+ }
+ if (!pool.initConvolver(cc, block_size, abuffer, size)) {
+ debug(1, "new convolver failed to initialize convolver %u ", cc);
+ }
}
-
- convolver_l.init(352, buffer_l, size);
- convolver_r.init(352, buffer_r, size);
-
}
- pthread_mutex_unlock(&convolver_lock);
success = 1;
}
- debug(1, "IR initialized from \"%s\" with %d channels and %d samples", filename, info.channels, size);
+ debug(2,
+ "convolution impulse response filter initialized from \"%s\" with %d channel%s and "
+ "%d samples",
+ filename, info.channels, info.channels == 1 ? "" : "s", size);
+ sf_close(file);
+ free((void*)abuffer);
} else {
- warn("Impulse file \"%s\" contains %d channels. Only 1 or 2 is supported.", filename, info.channels);
+ debug(1, "failed to init convolvers because insufficient memory was available");
}
+ free((void*)buffer);
} else {
- warn("Impulse file \"%s\" sample rate is %d Hz. Only 44100 Hz is supported", filename, info.samplerate);
+ warn("failed to init convolvers because insufficient memory was available");
}
+ } else {
+ warn("Convolution impulse response filter file \"%s\" can not be opened. Please check that "
+ "it exists, is a valid sound file and has appropriate access permissions.",
+ filename);
+ }
+ }
+ return success;
+}
+
+void convolver_process(unsigned int channel, float *data, int length) {
+ pool.processAsync(channel, data, data, length);
+}
+
+void convolver_wait_for_all() { pool.waitForAll(); }
+
+void convolver_clear_state() {
+ pool.clearAllStates();
+}
+
+const unsigned int max_channels = 8;
+fftconvolver::FFTConvolver convolvers[max_channels];
+
+// fftconvolver::FFTConvolver convolver_l;
+// fftconvolver::FFTConvolver convolver_r;
+
+// always lock use this when accessing the playing conn value
+/*
+
+pthread_mutex_t convolver_lock = PTHREAD_MUTEX_INITIALIZER;
+
+int convolver_init(const char *filename, unsigned char channel_count, double max_length_in_seconds,
+ size_t block_size) {
+ debug(1, "convolver_init");
+ int success = 0;
+ SF_INFO info;
+ if (filename) {
+ SNDFILE *file = sf_open(filename, SFM_READ, &info);
+ if (file) {
+ size_t max_length = (size_t)(max_length_in_seconds * info.samplerate);
+ const size_t size =
+ (unsigned int)info.frames > max_length ? max_length : (unsigned int)info.frames;
+ float buffer[size * info.channels];
+
+ size_t l = sf_readf_float(file, buffer, size);
+ if (l != 0) {
+ pthread_mutex_lock(&convolver_lock);
+
+ unsigned int cc;
+ for (cc = 0; cc < channel_count; cc++) {
+ convolvers[cc].reset();
+ }
+
+ if (info.channels == 1) {
+ for (cc = 0; cc < channel_count; cc++) {
+ convolvers[cc].init(block_size, buffer, size);
+ }
+ } else if (info.channels == channel_count) {
+ // we have to deinterleave the ir file channels for each convolver
+ for (cc = 0; cc < channel_count; cc++) {
+ float abuffer[size];
+ unsigned int i;
+ for (i = 0; i < size; ++i) {
+ abuffer[i] = buffer[channel_count * i + cc];
+ }
+ convolvers[cc].init(block_size, abuffer, size);
+ }
+ }
+ pthread_mutex_unlock(&convolver_lock);
+ success = 1;
+ }
+ debug(2,
+ "convolution impulse response filter initialized from \"%s\" with %d channel%s and "
+ "%d samples",
+ filename, info.channels, info.channels == 1 ? "" : "s", size);
sf_close(file);
+ } else {
+ warn("Convolution impulse response filter file \"%s\" can not be opened. Please check that "
+ "it exists, is a valid sound file and has appropriate access permissions.",
+ filename);
}
}
return success;
}
+void convolver_reset() {
+ debug(1, "convolver_reset");
+ pthread_mutex_lock(&convolver_lock);
+ unsigned int cc;
+ for (cc = 0; cc < max_channels; cc++) {
+ convolvers[cc].reset();
+ }
+ // convolver_l.reset(); // it is possible that init could be called more than once
+ // convolver_r.reset(); // so it could be necessary to remove all previous settings
+ pthread_mutex_unlock(&convolver_lock);
+}
+
+void convolver_clear_state() {
+ debug(1, "convolver_clear_state");
+ pthread_mutex_lock(&convolver_lock);
+ unsigned int cc;
+ for (cc = 0; cc < max_channels; cc++) {
+ convolvers[cc].clearState();
+ }
+ // convolver_l.reset(); // it is possible that init could be called more than once
+ // convolver_r.reset(); // so it could be necessary to remove all previous settings
+ pthread_mutex_unlock(&convolver_lock);
+}
+
+void convolver_process(unsigned int channel, float *data, int length) {
+ pthread_mutex_lock(&convolver_lock);
+ convolvers[channel].process(data, data, length);
+ pthread_mutex_unlock(&convolver_lock);
+ usleep(100);
+}
-void convolver_process_l(float* data, int length) {
+void convolver_process_l(float *data, int length) {
pthread_mutex_lock(&convolver_lock);
convolver_l.process(data, data, length);
pthread_mutex_unlock(&convolver_lock);
}
-void convolver_process_r(float* data, int length) {
+void convolver_process_r(float *data, int length) {
pthread_mutex_lock(&convolver_lock);
convolver_r.process(data, data, length);
pthread_mutex_unlock(&convolver_lock);
}
+*/
\ No newline at end of file
#ifdef __cplusplus
extern "C" {
#endif
+
+ #include <stddef.h>
-int convolver_init(const char* file, int max_length);
-void convolver_process_l(float* data, int length);
-void convolver_process_r(float* data, int length);
+// int convolver_init(const char* file, unsigned char channel_count, double max_length_in_seconds, size_t block_size);
+void convolver_reset();
+//void convolver_clear_state();
+// void convolver_process(unsigned int channel, float *data, int length);
+// void convolver_process_l(float* data, int length);
+// void convolver_process_r(float* data, int length);
+
+void convolver_pool_init(size_t numThreads, size_t numConvolvers);
+void convolver_pool_closedown();
+int convolver_init(const char* file, unsigned char channel_count, double max_length_in_seconds, size_t block_size);
+void convolver_process(unsigned int channel, float *data, int length);
+void convolver_clear_state();
+void convolver_wait_for_all();
#ifdef __cplusplus
}
// publish_raw = "no"; // Whether to publish all available metadata under the codes given in the 'metadata' docs.
publish_parsed = "yes"; // Whether to publish a small (but useful) subset of metadata under human-understandable topics.
publish_cover = "yes"; // Whether to publish the cover over MQTT in binary form. This may lead to a bit of load on the broker.
+// publish_retain = "no"; // Whether to set the retain flag on published MQTT messages. When enabled, the broker stores the last message for each topic so new subscribers receive the most recent value immediately.
// enable_remote = "no"; // Whether to remote control via MQTT. RC is available under `topic`/remote.
};
```
ARFLAGS = cr
-man_MANS = $(top_srcdir)/man/shairport-sync.1
-
-lib_pair_ap_a_CFLAGS = -Wall -g -DCONFIG_GCRYPT -pthread
+# To be able to ref to the user's own home directory:
+homedir = @HOME@
+
+# Note that xmlmantohtml isn't producing correct HTML.
+# Uncomment the if/else and SUBDIR lines in this stanza to make xmltoman build shairport-sync.1
+# if USE_XMLTOMAN
+# SUBDIRS = man
+# else
+ man_MANS = $(top_srcdir)/man/shairport-sync.1
+# endif
+
+lib_pair_ap_a_CFLAGS = -Wall -g -DCONFIG_GCRYPT -pthread
lib_tinyhttp_a_CFLAGS = -pthread
lib_dbus_interface_a_CFLAGS = -pthread
lib_mpris_interface_a_CFLAGS = -pthread
-
bin_PROGRAMS = shairport-sync
+# BUILT_SOURCES get built before anything else (?)
+
BUILT_SOURCES =
-noinst_HEADERS =
CLEANFILES =
shairport_sync_LDADD =
noinst_LIBRARIES =
# See below for the flags for the test client program
-shairport_sync_SOURCES = shairport.c rtsp.c mdns.c common.c rtp.c player.c alac.c audio.c loudness.c activity_monitor.c
+shairport_sync_SOURCES = shairport.c bonjour_strings.c rtsp.c mdns.c common.c rtp.c player.c audio.c loudness.c activity_monitor.c utilities/debug.c utilities/network_utilities.c
if BUILD_FOR_DARWIN
AM_CXXFLAGS = -I/usr/local/include -Wno-multichar -Wall -Wextra -Wno-deprecated-declarations -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
- AM_CFLAGS = -Wno-multichar -Wall -Wextra -Wno-deprecated-declarations -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
+ AM_CFLAGS = --include=utilities/debug.h -Wno-multichar -Wall -Wextra -Wno-deprecated-declarations -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
else
if BUILD_FOR_FREEBSD
AM_CXXFLAGS = -I/usr/local/include -Wno-multichar -Wall -Wextra -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
- AM_CFLAGS = -Wno-multichar -Wall -Wextra -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
+ AM_CFLAGS = --include=utilities/debug.h -Wno-multichar -Wall -Wextra -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
else
if BUILD_FOR_OPENBSD
AM_CXXFLAGS = -I/usr/local/include -Wno-multichar -Wall -Wextra -Wno-clobbered -Wno-psabi -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
- AM_CFLAGS = -Wno-multichar -Wall -Wextra -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
+ AM_CFLAGS = --include=utilities/debug.h -Wno-multichar -Wall -Wextra -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
else
- AM_CXXFLAGS = -fno-common -Wno-multichar -Wall -Wextra -Wno-clobbered -Wno-psabi -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
- AM_CFLAGS = -fno-common -Wno-multichar -Wall -Wextra -Wno-clobbered -Wno-psabi -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
- shairport_sync_SOURCES += scripts/shairport-sync.service scripts/shairport-sync.service-avahi scripts/shairport-sync
+ AM_CXXFLAGS = -Wshadow -fno-common -Wno-multichar -Wall -Wextra -Wformat -Wformat=2 -Wno-clobbered -Wno-psabi -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
+ AM_CFLAGS = --include=utilities/debug.h -Wshadow -fno-common -Wno-multichar -Wall -Wextra -Wformat -Wformat=2 -Wno-clobbered -Wno-psabi -pthread -DSYSCONFDIR=\"$(sysconfdir)\"
endif
endif
endif
-# include information generated by 'git describe --tags --dirty --broken' if requested
+# include version information from git if available
if USE_GIT_VERSION
-shairport_sync_SOURCES += gitversion.c
-gitversion.h: .git/index
- printf "// Do not edit!\n" > gitversion.h
- printf "// This file is automatically generated by 'git describe --tags --dirty --broken', if available.\n" >> gitversion.h
- printf " char git_version_string[] = \"" >> gitversion.h
- git describe --tags --dirty --broken | tr -d '[[:space:]]' >> gitversion.h
- printf "\";\n" >> gitversion.h
-gitversion.c: gitversion.h
- touch gitversion.c
-BUILT_SOURCES += gitversion.c gitversion.h
-noinst_HEADERS += $(BUILT_SOURCES)
-# Correctly clean the generated headers, but keep the xml description
-CLEANFILES += $(BUILT_SOURCES)
+## Check if the git version information has changed and rebuild gitversion.h if so
+.PHONY: gitversion-check
+gitversion-check:
+ $(top_srcdir)/verify-gitversion
+
+BUILT_SOURCES += gitversion-check
+CLEANFILES += gitversion-stamp gitversion.h
+endif
+
+if USE_HAMMERTON
+ shairport_sync_SOURCES += alac.c
endif
if USE_APPLE_ALAC
shairport_sync_SOURCES += audio_soundio.c
endif
-if USE_PA
+if USE_PULSEAUDIO
shairport_sync_SOURCES += audio_pa.c
endif
-if USE_PW
+if USE_PIPEWIRE
shairport_sync_SOURCES += audio_pw.c
endif
if USE_CONVOLUTION
-shairport_sync_SOURCES += FFTConvolver/AudioFFT.cpp FFTConvolver/FFTConvolver.cpp FFTConvolver/Utilities.cpp FFTConvolver/convolver.cpp
+shairport_sync_SOURCES += FFTConvolver/AudioFFT.cpp FFTConvolver/FFTConvolver.cpp FFTConvolver/Utilities.cpp FFTConvolver/convolver.cpp FFTConvolver/ConvolverThreadPool.cpp
AM_CXXFLAGS += -std=c++11
endif
endif
if USE_AIRPLAY_2
-shairport_sync_SOURCES += ptp-utilities.c plist_xml_strings.c
+shairport_sync_SOURCES += ap2_buffered_audio_processor.c ap2_event_receiver.c ap2_rc_event_receiver.c ptp-utilities.c utilities/buffered_read.c utilities/structured_buffer.c utilities/mod23.c plists/get_info_response.c
shairport_sync_LDADD += lib_pair_ap.a
lib_pair_ap_a_SOURCES = pair_ap/pair.c pair_ap/pair_fruit.c pair_ap/pair_homekit.c pair_ap/pair-tlv.c
noinst_LIBRARIES += lib_pair_ap.a
-plist_xml_strings.h: plists/get_info_response.xml
- printf "// Do not edit!\n" > plist_xml_strings.h
- printf "// This file is automatically generated from files in the plists folder.\n\n" >> plist_xml_strings.h
- xxd -i $(top_srcdir)/plists/get_info_response.xml - | sed -e 's/[^ ]*plists_get_info_response_xml/plists_get_info_response_xml/g' >> plist_xml_strings.h
- printf "\n" >> plist_xml_strings.h
-plist_xml_strings.c: plist_xml_strings.h
- touch plist_xml_strings.c
-BUILT_SOURCES += plist_xml_strings.c plist_xml_strings.h
-noinst_HEADERS += $(BUILT_SOURCES)
-# Correctly clean the generated headers, but keep the xml description
-CLEANFILES += $(BUILT_SOURCES)
+plists/get_info_response.h: plists/get_info_response.xml
+ sh $(top_srcdir)/xml_plist_codegen.sh $(top_srcdir)/plists/get_info_response.xml $(abs_builddir)/plists
+plists/get_info_response.c: plists/get_info_response.h
+ touch plists/get_info_response.c
+
+BUILT_SOURCES += plists/get_info_response.h plists/get_info_response.c
+CLEANFILES +=plists/get_info_response.h plists/get_info_response.c
+
endif
if USE_DBUS
lib_dbus_interface_a_SOURCES = dbus-interface.c
shairport_sync_SOURCES += dbus-service.c
BUILT_SOURCES += dbus-interface.h dbus-interface.c
-# We don't want to install this header
-noinst_HEADERS += $(BUILT_SOURCES)
-# Correctly clean the generated headers, but keep the xml description
-CLEANFILES += $(BUILT_SOURCES)
-
-
+CLEANFILES += dbus-interface.h dbus-interface.c
dbus-interface.c: org.gnome.ShairportSync.xml
gdbus-codegen --interface-prefix org.gnome --generate-c-code dbus-interface $(top_srcdir)/org.gnome.ShairportSync.xml
lib_mpris_interface_a_SOURCES = mpris-interface.c
shairport_sync_SOURCES += mpris-service.c
BUILT_SOURCES += mpris-interface.h mpris-interface.c
-# We don't want to install this header
-noinst_HEADERS += $(BUILT_SOURCES)
-# Correctly clean the generated headers, but keep the xml description
-CLEANFILES += $(BUILT_SOURCES)
+CLEANFILES += mpris-interface.h mpris-interface.c
mpris-interface.c: org.mpris.MediaPlayer2.xml
gdbus-codegen --interface-prefix org.mpris --generate-c-code mpris-interface $(top_srcdir)/org.mpris.MediaPlayer2.xml
if USE_DBUS
-if INSTALL_CYGWIN_SERVICE
+if INSTALL_CYGWIN_STARTUP
DBUS_POLICY_FILE = scripts/shairport-sync-dbus-policy-cygwin.conf
else
DBUS_POLICY_FILE = scripts/shairport-sync-dbus-policy.conf
-endif # INSTALL_CYGWIN_SERVICE
+endif # INSTALL_CYGWIN_STARTUP
DBUS_POLICY_INSTALL_TARGET = dbus-policy-install-local
if USE_MPRIS
-if INSTALL_CYGWIN_SERVICE
+if INSTALL_CYGWIN_STARTUP
MPRIS_POLICY_FILE = scripts/shairport-sync-mpris-policy-cygwin.conf
else
MPRIS_POLICY_FILE = scripts/shairport-sync-mpris-policy.conf
-endif # INSTALL_CYGWIN_SERVICE
+endif # INSTALL_CYGWIN_STARTUP
MPRIS_POLICY_INSTALL_TARGET = mpris-policy-install-local
$(INSTALL_GROUP_TARGET):
getent group shairport-sync &>/dev/null || groupadd -r shairport-sync &>/dev/null
-if INSTALL_CREATE_USER_GROUP
INSTALL_USER_TARGET = install-user-local
-else
-INSTALL_USER_TARGET =
-endif
$(INSTALL_USER_TARGET): $(INSTALL_GROUP_TARGET)
- getent passwd shairport-sync &>/dev/null || useradd -r -M -g shairport-sync -s /usr/sbin/nologin -G audio shairport-sync &>/dev/null
+ getent passwd shairport-sync &> /dev/null || useradd -r -M -g shairport-sync -s /usr/sbin/nologin -G audio shairport-sync &>/dev/null
-if INSTALL_SYSTEMV
+if INSTALL_SYSTEMV_STARTUP
INSTALL_SYSTEMV_TARGET = install-systemv-local
install -d $(DESTDIR)$(sysconfdir)/init.d
[ -e $(DESTDIR)$(sysconfdir)/init.d/shairport-sync ] || install -m 0755 scripts/shairport-sync $(DESTDIR)$(sysconfdir)/init.d
-endif # INSTALL_SYSTEMV
+endif # INSTALL_SYSTEMV_STARTUP
-if INSTALL_SYSTEMD
+if INSTALL_SYSTEMD_STARTUP
-if USE_AVAHI
-SYSTEMD_SERVICE = shairport-sync.service-avahi
-else
SYSTEMD_SERVICE = shairport-sync.service
-endif # USE_AVAHI
INSTALL_SYSTEMD_TARGET = install-systemd-local
# will be stored in a scripts folder in the _build_ folder
# which will be the source folder if you're not using a separate build folder
+# Install a system service script, but don't replace an existing script
$(INSTALL_SYSTEMD_TARGET): scripts/$(SYSTEMD_SERVICE) $(INSTALL_USER_TARGET)
install -d $(DESTDIR)$(systemdsystemunitdir)
[ -e $(DESTDIR)$(systemdsystemunitdir)/shairport-sync.service ] || install -m 0644 scripts/$(SYSTEMD_SERVICE) $(DESTDIR)$(systemdsystemunitdir)/shairport-sync.service
-endif # INSTALL_SYSTEMD
+endif # INSTALL_SYSTEMD_STARTUP
-if INSTALL_FREEBSD_SERVICE
+if INSTALL_FREEBSD_STARTUP
# Choose a uid and gid of 801 completely arbitrarity, except that it should be below 1000. FreeBSD doesn't seem to allow you to say "an ID in the range of..."
install-freebsd-user-local:
install -d $(DESTDIR)/usr/local/etc/rc.d/
install -m 0555 $(top_srcdir)/scripts/shairport-sync.freebsd $(DESTDIR)/usr/local/etc/rc.d/shairport_sync
-endif # INSTALL_FREEBSD_SERVICE
+endif # INSTALL_FREEBSD_STARTUP
-if INSTALL_CYGWIN_SERVICE
+if INSTALL_CYGWIN_STARTUP
INSTALL_CYGWIN_TARGET = install-cygwin-local
install -d $(DESTDIR)/usr/local/bin
[ -e $(DESTDIR)/usr/local/bin/shairport-sync-config ] || install -m 0755 $(top_srcdir)/scripts/shairport-sync-config $(DESTDIR)/usr/local/bin/
-endif # INSTALL_CYGWIN_SERVICE
+endif # INSTALL_CYGWIN_STARTUP
install-config-files: $(CONFIG_FILE_INSTALL_TARGET) \
$(DBUS_POLICY_INSTALL_TARGET) \
$(MPRIS_POLICY_INSTALL_TARGET) \
$(INSTALL_SYSTEMV_TARGET) \
$(INSTALL_SYSTEMD_TARGET) \
+ $(INSTALL_SYSTEMD_USER_TARGET) \
$(INSTALL_FREEBSD_TARGET) \
$(INSTALL_CYGWIN_TARGET)
* Some advanced topics and developed in [ADVANCED TOPICS](https://github.com/mikebrady/shairport-sync/tree/master/ADVANCED%20TOPICS).
# Features
-* Outputs AirPlay audio to [ALSA](https://www.alsa-project.org/wiki/Main_Page), [sndio](http://www.sndio.org), [PulseAudio](https://www.freedesktop.org/wiki/Software/PulseAudio/), [Jack Audio](http://jackaudio.org), to a unix pipe or to `STDOUT`. It also has experimental support for [PipeWire](https://pipewire.org) and limited support for [libao](https://xiph.org/ao/) and for [libsoundio](http://libsound.io).
+* Outputs AirPlay audio to [ALSA](https://www.alsa-project.org/wiki/Main_Page), [sndio](http://www.sndio.org), [PipeWire](https://pipewire.org), [PulseAudio](https://www.freedesktop.org/wiki/Software/PulseAudio/), [Jack Audio](http://jackaudio.org), to a unix pipe or to `STDOUT`. It also has limited support for [libao](https://xiph.org/ao/).
* Metadata — Shairport Sync can deliver metadata supplied by the source, such as Album Name, Artist Name, Cover Art, etc. through a pipe or UDP socket to a recipient application program — see https://github.com/mikebrady/shairport-sync-metadata-reader for a sample recipient. Sources that supply metadata include iTunes and the Music app in macOS and iOS.
* An interface to [MQTT](https://en.wikipedia.org/wiki/MQTT), a popular protocol for Inter Process Communication, Machine-to-Machine, Internet of Things and Home Automation projects. The interface provides access to metadata and artwork, and has limited remote control.
* Digital Signal Processing facilities – please see the [DSP Wiki Page Guide](https://github.com/mikebrady/shairport-sync/wiki/Digital-Signal-Processing-with-Shairport-Sync). (Thanks to [Yann Pomarède](https://github.com/yannpom) for the code and to [Paul Wieland](https://github.com/PaulWieland) for the guide.)
* An [MPRIS](https://specifications.freedesktop.org/mpris-spec/2.2/)-like interface, partially complete and very functional, including access to metadata and artwork, and limited remote control.
* A native D-Bus interface, including access to metadata and artwork, limited remote control and system settings.
* Better Volume Control — Shairport Sync offers finer control at very top and very bottom of the volume range. See http://tangentsoft.net/audio/atten.html for a good discussion of audio "attenuators", upon which volume control in Shairport Sync is modelled. See also the diagram of the volume transfer function in the documents folder. In addition, Shairport Sync can offer an extended volume control range on devices with a restricted range.
-* Support for the [Apple ALAC decoder](https://macosforge.github.io/alac/) (library available [here](https://github.com/mikebrady/alac)).
-* Output bit depths of 8, 16, 24 and 32 bits, rather than the standard 16 bits.
-* Output frame rates of 44,100, 88,200, 176,000 or 352,000 frames per second.
+* Flexible output rates, formats and channels with built-in transcoding.
Some features require configuration at build time – see [CONFIGURATION FLAGS.md](CONFIGURATION%20FLAGS.md).
# Status
Shairport Sync was designed to [run best](ADVANCED%20TOPICS/GetTheBest.md) on stable, dedicated, stand-alone low-power "headless" systems with ALSA as the audio system and with a decent CD-quality Digital to Analog Converter (DAC).
-Shairport Sync runs on recent (2018 onwards) Linux systems, FreeBSD from 12.1 onwards and OpenBSD. It requires a system with the power of a Raspberry Pi 2 or a Pi Zero 2 or better.
+Shairport Sync runs on recent (2018 onwards) Linux systems, FreeBSD from 12.1 onwards and OpenBSD. It requires a system with the power of a Raspberry Pi B or better.
Classic Shairport Sync runs on a wider variety of Linux sytems, including OpenWrt and Cygwin and it also runs on OpenBSD. Many embedded devices are powerful enough to power classic Shairport Sync.
Shairport Sync is a substantial rewrite of the fantastic work done in Shairport 1.0 by James Wah (aka [abrasive](https://github.com/abrasive)), James Laird and others — please see [this list](https://github.com/abrasive/shairport/blob/master/README.md#contributors-to-version-1x) of the contributors to Shairport 1.x and Shairport 0.x. From a "heritage" point of view, Shairport Sync is a fork of Shairport 1.0.
-For the development of AirPlay 2 support, special thanks are due to:
+For the development of AirPlay 2 support in Version 4.x, special thanks are due to:
* [JD Smith](https://github.com/jdtsmith) for really thorough testing, support and encouragement.
* [ejurgensen](https://github.com/ejurgensen) for advice and [code to deal with pairing and encryption](https://github.com/ejurgensen/pair_ap).
* [ckdo](https://github.com/ckdo) for pointing the way, particularly with pairing and encryption protocols, with a [functional Python implementation](https://github.com/ckdo/airplay2-receiver) of AirPlay 2.
Thanks to everyone who has supported and improved Shairport Sync over the years.
# More about Shairport Sync
-The audio that Shairport Sync receives is sent to the computer's sound system, to a named unix pipe or to `STDOUT`. By far the best sound system to use is ALSA. This is because ALSA can give direct access to the Digital to Analog Converter (DAC) hardware of the machine. Audio samples can be sent through ALSA directly to the DAC, maximising fidelity, and accurate timing information can be obtained from the DAC, maximising synchronisation. Direct access to hardware is given through ALSA devices with names beginning with `hw:`.
+The audio that Shairport Sync receives is sent to the computer's sound system, to a named unix pipe or to `STDOUT`. By far the best sound system to use is ALSA. This is because ALSA can give direct access to the Digital to Analog Converter (DAC) hardware of the machine. Audio samples can be sent through ALSA directly to the DAC, maximising fidelity, and accurate timing information can be obtained from the DAC, maximising synchronisation.
## Synchronised Audio
-Shairport Sync offers *full audio synchronisation*. Full audio synchronisation means that audio is played on the output device at exactly the time specified by the audio source. To accomplish this, Shairport Sync needs access to audio systems – such as ALSA on Linux and `sndio` on FreeBSD and OpenBSD – that provide very accurate timing information about audio being streamed to output devices. Ideally, Shairport Sync should have direct access to the output device used, which should be a real sound card capable of working with 44,100, 88,200 or 176,400 samples per second, interleaved PCM stereo of 8, 16, 24 or 32 bits. Using the ALSA sound system, Shairport Sync will choose the greatest bit depth available at 44,100 samples per second, resorting to multiples of 44,100 if it is not available. You'll get a message in the log if there's a problem. With all other sound systems, a sample rate of 44,100 is chosen with a bit depth of 16 bit.
+Shairport Sync offers *full audio synchronisation*. Full audio synchronisation means that audio is played on the output device at exactly the time specified by the audio source. To accomplish this, Shairport Sync needs access to audio systems – such as ALSA on Linux and `sndio` on FreeBSD and OpenBSD – that provide very accurate timing information about audio being streamed to output devices. Ideally, Shairport Sync should have direct access to the output device used, which should be a real sound card capable of working with 44,100 or 48,000 samples ("frames") per second, interleaved PCM stereo of 8, 16, 24 or 32 bits. Shairport Sync will choose a suitable output rate, format and channel count. This can be done manually or automatically.
-Shairport Sync works well with PulseAudio, a widely used sound server found on many desktop Linuxes. While the timing information is not as accurate as that of ALSA or `sndio`, it is often impractical to remove or disable PulseAudio.
+Shairport Sync works well with sound servers such as PipeWire and PulseAudio, widely used sound servers found on many desktop Linuxes. While the timing information is not as accurate as that of ALSA or `sndio`, it is often impractical to bypass these systems.
For other use cases, Shairport Sync can provide synchronised audio output to a unix pipe or to `STDOUT`, or to audio systems that do not provide timing information. This could perhaps be described as *partial audio synchronisation*, where synchronised audio is provided by Shairport Sync, but what happens to it in the subsequent processing chain, before it reaches the listener's ear, is outside the control of Shairport Sync.
## Latency, "Stuffing", Timing
AirPlay protocols use an agreed *latency* – a time lag or delay – between the time represented by a sound sample's `timestamp` and the time it is actually played by the audio output device, typically a Digital to Audio Converter (DAC). Latency gives players time to compensate for network delays, processing time variations and so on. The latency is specified by the audio source when it negotiates with Shairport Sync. AirPlay sources set a latency of around 2.0 to 2.25 seconds. AirPlay 2 can use shorter latencies, around half a second.
-As mentioned previously, Shairport Sync implements full audio synchronisation when used with ALSA, `sndio` or PulseAudio audio systems. This is done by monitoring the timestamps present in data coming from the audio source and the timing information coming back from the audio system itself. To maintain the latency required for exact synchronisation, if the output device is running slow relative to the source, Shairport Sync will delete frames of audio to allow the device to keep up. If the output device is running fast, Shairport Sync will insert ("stuff") extra frames to keep time. The number of frames inserted or deleted is so small as to be almost inaudible on normal audio material. Frames are inserted or deleted as necessary at pseudorandom intervals. Alternatively, with `libsoxr` support, Shairport Sync can resample the audio feed to ensure the output device can keep up. This is less obtrusive than insertion and deletion but requires a good deal of processing power — most embedded devices probably can't support it. If your computer is fast enough, Shairport Sync will, by default, automatically choose this method.
+As mentioned previously, Shairport Sync implements full audio synchronisation when used with ALSA, `sndio`, PipeWire or PulseAudio audio systems. This is done by monitoring the timestamps present in data coming from the audio source and the timing information coming back from the audio system itself. To maintain the latency required for exact synchronisation, Shairport Sync will perform interpolation -- effectively shortening or lengthening the stream of audio to exactly match the output rate to the input rate. If the output device is running too slow or too fast relative to the source, Shairport Sync will resample sequences of audio frames to add or remove frames as needed. Higher quality resampling can be achieved with `libsoxr` support, but this requires a good deal of processing power — most embedded devices probably can't support it. If your computer is fast enough, Shairport Sync will automatically choose this method.
-Stuffing is not done for partial audio synchronisation – the audio samples are simply presented at exactly the right time to the next stage in the processing chain.
+Interpolation is not done for partial audio synchronisation – the audio samples are simply presented at exactly the right time to the next stage in the processing chain.
Timestamps are referenced relative to the source computer's clock – the "source clock", but timing must be done relative to the clock of the computer running Shairport Sync – the "local clock". So, Shairport Sync synchronises the source clock and the local clock, usually to within a fraction of a millisecond. In AirPlay 2, this is done with the assistance of a companion application called [NQPTP](https://github.com/mikebrady/nqptp) using a [PTP](https://en.wikipedia.org/wiki/Precision_Time_Protocol)-based timing protocol. In classic AirPlay, a variant of [NTP](https://en.wikipedia.org/wiki/Network_Time_Protocol) synchronisation protocols is used.
*
*
* This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2019
+ * Copyright (c) Mike Brady 2019--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
rc = pthread_cond_init(&activity_monitor_cv, NULL);
if (rc)
- die("activity_monitor: error %d initialising activity_monitor_cv.");
+ die("activity_monitor: error %d initialising activity_monitor_cv.", rc);
pthread_cleanup_push(activity_thread_cleanup_handler, arg);
uint64_t sec;
void activity_monitor_start() {
// debug(1,"activity_monitor_start");
- pthread_create(&activity_monitor_thread, NULL, activity_monitor_thread_code, NULL);
+ named_pthread_create(&activity_monitor_thread, NULL, activity_monitor_thread_code, NULL,
+ "activity_mon");
activity_monitor_running = 1;
}
/* read warm-up samples */
if (predictor_coef_num > 0) {
- int i;
- for (i = 0; i < predictor_coef_num; i++) {
+ int li;
+ for (li = 0; li < predictor_coef_num; li++) {
int32_t val;
- val = buffer_out[i] + error_buffer[i + 1];
+ val = buffer_out[li] + error_buffer[li + 1];
val = SIGN_EXTENDED32(val, readsamplesize);
- buffer_out[i + 1] = val;
+ buffer_out[li + 1] = val;
}
}
}
if (uncompressed_bytes) {
- int i;
- for (i = 0; i < outputsamples; i++) {
- alac->uncompressed_bytes_buffer_a[i] = readbits(alac, uncompressed_bytes * 8);
+ int li;
+ for (li = 0; li < outputsamples; li++) {
+ alac->uncompressed_bytes_buffer_a[li] = readbits(alac, uncompressed_bytes * 8);
}
}
/*********************/
if (uncompressed_bytes) { /* see mono case */
- int i;
- for (i = 0; i < outputsamples; i++) {
- alac->uncompressed_bytes_buffer_a[i] = readbits(alac, uncompressed_bytes * 8);
- alac->uncompressed_bytes_buffer_b[i] = readbits(alac, uncompressed_bytes * 8);
+ int li;
+ for (li = 0; li < outputsamples; li++) {
+ alac->uncompressed_bytes_buffer_a[li] = readbits(alac, uncompressed_bytes * 8);
+ alac->uncompressed_bytes_buffer_b[li] = readbits(alac, uncompressed_bytes * 8);
}
}
--- /dev/null
+/*
+ * AirPlay 2 Buffered Audio Processor. This file is part of Shairport Sync
+ * Copyright (c) Mike Brady 2025
+ * All rights reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+#include "ap2_buffered_audio_processor.h"
+#include "common.h"
+#include "player.h"
+#include "rtp.h"
+#include "utilities/buffered_read.h"
+#include "utilities/mod23.h"
+#include <sodium.h>
+#include <stdint.h>
+
+#ifdef CONFIG_CONVOLUTION
+#include "FFTConvolver/convolver.h"
+#endif
+
+void addADTStoPacket(uint8_t *packet, int packetLen, int rate, int channel_configuration) {
+ // https://stackoverflow.com/questions/18862715/how-to-generate-the-aac-adts-elementary-stream-with-android-mediacodec
+ // with thanks!
+
+ // See https://wiki.multimedia.cx/index.php/Understanding_AAC
+ // see also https://wiki.multimedia.cx/index.php/ADTS for the ADTS layout
+ // see https://wiki.multimedia.cx/index.php/MPEG-4_Audio#Sampling_Frequencies for sampling
+ // frequencies
+
+ /**
+ * Add ADTS header at the beginning of each and every AAC packet.
+ * This is needed as the packet is raw AAC data.
+ *
+ * Note the packetLen must count in the ADTS header itself.
+ **/
+
+ int profile = 2;
+ int freqIdx = 4;
+ if (rate == 44100)
+ freqIdx = 4;
+ else if (rate == 48000)
+ freqIdx = 3;
+ else
+ debug(1, "Unsupported AAC sample rate %d.", rate);
+
+ // Channel Configuration
+ // https://wiki.multimedia.cx/index.php/MPEG-4_Audio#Channel_Configurations
+ // clang-format off
+ // 0: Defined in AOT Specifc Config
+ // 1: 1 channel: front-center
+ // 2: 2 channels: front-left, front-right
+ // 3: 3 channels: front-center, front-left, front-right
+ // 4: 4 channels: front-center, front-left, front-right, back-center
+ // 5: 5 channels: front-center, front-left, front-right, back-left, back-right
+ // 6: 6 channels: front-center, front-left, front-right, back-left, back-right, LFE-channel
+ // 7: 8 channels: front-center, front-left, front-right, side-left, side-right, back-left, back-right, LFE-channel
+ // 8-15: Reserved
+ // clang-format on
+
+ int chanCfg = channel_configuration; // CPE
+
+ // fill in ADTS data
+ packet[0] = 0xFF;
+ packet[1] = 0xF9;
+ packet[2] = ((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2);
+ packet[3] = ((chanCfg & 3) << 6) + (packetLen >> 11);
+ packet[4] = (packetLen & 0x7FF) >> 3;
+ packet[5] = ((packetLen & 7) << 5) + 0x1F;
+ packet[6] = 0xFC;
+}
+
+void rtp_buffered_audio_cleanup_handler(__attribute__((unused)) void *arg) {
+ debug(2, "Buffered Audio Receiver Cleanup Start.");
+ rtsp_conn_info *conn = (rtsp_conn_info *)arg;
+ close(conn->buffered_audio_socket);
+ debug(3, "Connection %d: closing TCP Buffered Audio port: %u.", conn->connection_number,
+ conn->local_buffered_audio_port);
+ conn->buffered_audio_socket = 0;
+ debug(2, "Connection %d: rtp_buffered_audio_processor exit.", conn->connection_number);
+}
+
+void *rtp_buffered_audio_processor(void *arg) {
+ rtsp_conn_info *conn = (rtsp_conn_info *)arg;
+ // #include <syscall.h>
+ // debug(1, "Connection %d: rtp_buffered_audio_processor PID %d start", conn->connection_number,
+ // syscall(SYS_gettid));
+ conn->incoming_ssrc = 0; // reset
+ conn->resampler_ssrc = 0;
+
+ // turn off all flush requests that might have been pending in the connection. Not sure if this is
+ // right...
+ unsigned int fr = 0;
+ for (fr = 0; fr < MAX_DEFERRED_FLUSH_REQUESTS; fr++) {
+ conn->ap2_deferred_flush_requests[fr].inUse = 0;
+ conn->ap2_deferred_flush_requests[fr].active = 0;
+ }
+ conn->ap2_immediate_flush_requested = 0;
+
+ pthread_cleanup_push(rtp_buffered_audio_cleanup_handler, arg);
+
+ pthread_t *buffered_reader_thread = malloc(sizeof(pthread_t));
+ if (buffered_reader_thread == NULL)
+ debug(1, "cannot allocate a buffered_reader_thread!");
+ memset(buffered_reader_thread, 0, sizeof(pthread_t));
+ pthread_cleanup_push(malloc_cleanup, &buffered_reader_thread);
+
+ buffered_tcp_desc *buffered_audio = malloc(sizeof(buffered_tcp_desc));
+ if (buffered_audio == NULL)
+ debug(1, "cannot allocate a buffered_tcp_desc!");
+ // initialise the
+
+ memset(buffered_audio, 0, sizeof(buffered_tcp_desc));
+ pthread_cleanup_push(malloc_cleanup, &buffered_audio);
+
+ if (pthread_mutex_init(&buffered_audio->mutex, NULL))
+ debug(1, "Connection %d: error %d initialising buffered_audio mutex.", conn->connection_number,
+ errno);
+ pthread_cleanup_push(mutex_cleanup, &buffered_audio->mutex);
+
+ if (pthread_cond_init(&buffered_audio->not_empty_cv, NULL))
+ die("Connection %d: error %d initialising not_empty cv.", conn->connection_number, errno);
+ pthread_cleanup_push(cv_cleanup, &buffered_audio->not_empty_cv);
+
+ if (pthread_cond_init(&buffered_audio->not_full_cv, NULL))
+ die("Connection %d: error %d initialising not_full cv.", conn->connection_number, errno);
+ pthread_cleanup_push(cv_cleanup, &buffered_audio->not_full_cv);
+
+ // initialise the buffer data structure
+ buffered_audio->buffer_max_size = conn->ap2_audio_buffer_size;
+ buffered_audio->buffer = malloc(conn->ap2_audio_buffer_size);
+ if (buffered_audio->buffer == NULL)
+ debug(1, "cannot allocate an audio buffer of %zu bytes!", buffered_audio->buffer_max_size);
+ pthread_cleanup_push(malloc_cleanup, &buffered_audio->buffer);
+
+ // pthread_mutex_lock(&conn->buffered_audio_mutex);
+ buffered_audio->toq = buffered_audio->buffer;
+ buffered_audio->eoq = buffered_audio->buffer;
+
+ buffered_audio->sock_fd = conn->buffered_audio_socket;
+
+ named_pthread_create(buffered_reader_thread, NULL, &buffered_tcp_reader, buffered_audio,
+ "ap2_buf_rdr_%d", conn->connection_number);
+ pthread_cleanup_push(thread_cleanup, buffered_reader_thread);
+
+ const size_t leading_free_space_length =
+ 256; // leave this many bytes free to make room for prefixes that might be added later
+ uint8_t packet[32 * 1024];
+ unsigned char m[32 * 1024 + leading_free_space_length];
+
+ unsigned char *payload_pointer = NULL;
+ unsigned long long payload_length = 0;
+ uint32_t payload_ssrc =
+ SSRC_NONE; // this is the SSRC of the payload, needed to decide if it should be muted
+ uint32_t previous_ssrc = SSRC_NONE;
+
+ uint32_t seq_no =
+ 0; // audio packet number. Initialised to avoid a "possibly uninitialised" warning.
+ uint32_t previous_seqno = 0;
+ uint16_t sequence_number_for_player = 0;
+
+ uint32_t timestamp = 0; // initialised to avoid a "possibly uninitialised" warning.
+ uint32_t previous_timestamp = 0;
+
+ uint32_t expected_timestamp = 0;
+ uint64_t previous_buffer_should_be_time = 0;
+
+ ssize_t nread;
+
+ int new_audio_block_needed = 0; // goes true when a block is needed, false one is read in, but
+ // will be made true by flushing or by playing the block
+ int finished = 0;
+
+ uint64_t blocks_read_since_play_began = 0;
+ uint64_t blocks_read = 0;
+
+ int ap2_immediate_flush_requested = 0; // for diagnostics, probably
+
+ uint32_t first_timestamp_in_this_sequence = 0;
+ int packets_played_in_this_sequence = 0;
+
+ int play_enabled = 0;
+ // double requested_lead_time = 0.0; // normal lead time minimum -- maybe it should be about 0.1
+
+ // wait until our timing information is valid
+ while (have_ptp_timing_information(conn) == 0)
+ usleep(1000);
+
+ reset_buffer(conn); // in case there is any garbage in the player
+
+ do {
+
+ if ((play_enabled == 0) && (conn->ap2_play_enabled != 0)) {
+ // play newly started
+ debug(2, "Play started.");
+ new_audio_block_needed = 1;
+ blocks_read_since_play_began = 0;
+ }
+
+ if ((play_enabled != 0) && (conn->ap2_play_enabled == 0)) {
+ debug(2, "Play stopped.");
+ packets_played_in_this_sequence = 0; // not all blocks read are played...
+#ifdef CONFIG_CONVOLUTION
+ convolver_clear_state();
+#endif
+ reset_buffer(conn); // stop play ASAP
+ }
+
+ play_enabled = conn->ap2_play_enabled;
+
+ // now, if get_next_block is non-zero, read a block. We may flush or use it
+
+ if (new_audio_block_needed != 0) {
+ // a block is preceded by its length in a uint16_t
+ uint16_t data_len;
+ // here we read from the buffer that our thread has been reading
+
+ size_t bytes_remaining_in_buffer;
+ nread =
+ read_sized_block(buffered_audio, &data_len, sizeof(data_len), &bytes_remaining_in_buffer);
+ data_len = ntohs(data_len);
+
+ // diagnostic
+ if ((conn->ap2_audio_buffer_minimum_size < 0) ||
+ (bytes_remaining_in_buffer < (size_t)conn->ap2_audio_buffer_minimum_size))
+ conn->ap2_audio_buffer_minimum_size = bytes_remaining_in_buffer;
+
+ if (nread > 0) {
+ // get the block itself
+ // debug(1,"buffered audio packet of size %u detected.", data_len - 2);
+ nread = read_sized_block(buffered_audio, packet, data_len - 2, &bytes_remaining_in_buffer);
+
+ // diagnostic
+ if ((conn->ap2_audio_buffer_minimum_size < 0) ||
+ (bytes_remaining_in_buffer < (size_t)conn->ap2_audio_buffer_minimum_size))
+ conn->ap2_audio_buffer_minimum_size = bytes_remaining_in_buffer;
+ // debug(1, "buffered audio packet of size %u received.", nread);
+
+ if (nread > 0) {
+ // got the block
+ blocks_read++; // note, this doesn't mean they are valid audio blocks
+ blocks_read_since_play_began++; // 1 means previous seq_no and timestamps are invalid
+
+ // get the sequence number
+ // see https://en.wikipedia.org/wiki/Real-time_Transport_Protocol#Packet_header
+ // the Marker bit is always set, and it and the remaining 23 bits form the sequence number
+
+ previous_seqno = seq_no;
+ seq_no = nctohl(&packet[0]) & 0x7FFFFF;
+
+ previous_timestamp = timestamp;
+ timestamp = nctohl(&packet[4]);
+
+ if (payload_ssrc != SSRC_NONE)
+ previous_ssrc = payload_ssrc;
+ payload_ssrc = nctohl(&packet[8]);
+
+
+
+ if ((payload_ssrc != previous_ssrc) && (payload_ssrc != SSRC_NONE)) {
+ if (ssrc_is_recognised(payload_ssrc) == 0) {
+ debug(2, "Unrecognised SSRC: %u.", payload_ssrc);
+ } else {
+ debug(2, "Connection %d: incoming audio encoding is%s \"%s\".",
+ conn->connection_number, previous_ssrc == SSRC_NONE ? "" : " switching to", get_ssrc_name(payload_ssrc));
+ }
+ }
+
+ if ((payload_ssrc != previous_ssrc) && (ssrc_is_recognised(payload_ssrc) == 0)) {
+ debug(2, "Unrecognised SSRC: %u.", payload_ssrc);
+ }
+
+ if (blocks_read_since_play_began == 1) {
+ debug(2, "Preparing initial decoding chain for %s.", get_ssrc_name(payload_ssrc));
+ prepare_decoding_chain(conn, payload_ssrc); // needed to set the input rate...
+ sequence_number_for_player =
+ seq_no & 0xffff; // this is arbitrary -- the sequence_number_for_player numbers will
+ // be sequential irrespective of seq_no jumps...
+ }
+
+ if (blocks_read_since_play_began > 1) {
+
+ uint32_t t_expected_seqno = (previous_seqno + 1) & 0x7fffff;
+ if (t_expected_seqno != seq_no) {
+ debug(2,
+ "reading block %u, the sequence number differs from the expected sequence "
+ "number %u. The previous sequence number was %u",
+ seq_no, t_expected_seqno, previous_seqno);
+ }
+ uint32_t t_expected_timestamp =
+ previous_timestamp + get_ssrc_block_length(previous_ssrc);
+ int32_t diff = timestamp - t_expected_timestamp;
+ if (diff != 0) {
+ debug(2, "reading block %u, the timestamp %u differs from expected_timestamp %u.",
+ seq_no, timestamp, t_expected_timestamp);
+ }
+ }
+ new_audio_block_needed = 0; // block has been read.
+ }
+ }
+
+ if (nread == 0) {
+ // nread is 0 -- the port has been closed
+ debug(2, "Connection %d: buffered audio port closed!", conn->connection_number);
+ finished = 1;
+ } else if (nread < 0) {
+ char errorstring[1024];
+ strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ debug(1, "error in rtp_buffered_audio_processor %d: \"%s\". Could not recv a data_len .",
+ errno, errorstring);
+ finished = 1;
+ }
+ }
+
+ if (finished == 0) {
+ pthread_cleanup_debug_mutex_lock(&conn->flush_mutex, 25000,
+ 1); // 25 ms is a long time to wait!
+ if (blocks_read != 0) {
+ if (conn->ap2_immediate_flush_requested != 0) {
+ if (ap2_immediate_flush_requested == 0) {
+ debug(2, "immediate flush started at sequence number %u until sequence number of %u.",
+ seq_no, conn->ap2_immediate_flush_until_sequence_number);
+ }
+ if ((blocks_read != 0) && ((a_minus_b_mod23(seq_no, conn->ap2_immediate_flush_until_sequence_number) > 0))) {
+ debug(1, "immediate flush may have escaped its endpoint! Seq_no is %u, conn->ap2_immediate_flush_until_sequence_number is %u.", seq_no, conn->ap2_immediate_flush_until_sequence_number);
+ }
+
+ if ((blocks_read != 0) && ((a_minus_b_mod23(seq_no, conn->ap2_immediate_flush_until_sequence_number) >= 0))) {
+ debug(2, "immediate flush completed at seq_no: %u, conn->ap2_immediate_flush_until_sequence_number: %u.", seq_no, conn->ap2_immediate_flush_until_sequence_number);
+
+ conn->ap2_immediate_flush_requested = 0;
+ ap2_immediate_flush_requested = 0;
+
+
+ // turn off all deferred requests. Not sure if this is right...
+ unsigned int f = 0;
+ for (f = 0; f < MAX_DEFERRED_FLUSH_REQUESTS; f++) {
+ if ((conn->ap2_deferred_flush_requests[f].inUse != 0) && (conn->ap2_deferred_flush_requests[f].active = 0)) {
+ debug(1,
+ "deferred flush cancelled by an immediate flush: flushFromTS: %12u, flushFromSeq: %12u, "
+ "flushUntilTS: %12u, flushUntilSeq: %12u, timestamp: %12u.",
+ conn->ap2_deferred_flush_requests[f].flushFromTS,
+ conn->ap2_deferred_flush_requests[f].flushFromSeq,
+ conn->ap2_deferred_flush_requests[f].flushUntilTS,
+ conn->ap2_deferred_flush_requests[f].flushUntilSeq, timestamp);
+ }
+ conn->ap2_deferred_flush_requests[f].inUse = 0;
+ conn->ap2_deferred_flush_requests[f].active = 0;
+ }
+
+
+ } else {
+ debug(3, "immediate flush of block %u until block %u", seq_no,
+ conn->ap2_immediate_flush_until_sequence_number);
+ ap2_immediate_flush_requested = 1;
+ new_audio_block_needed = 1; //
+ }
+ }
+ }
+
+ // now, even if an immediate flush has been requested and is active, we still need to process
+ // deferred flush requests as they may refer to sequences that are going to be purged anyway
+
+ unsigned int f = 0;
+ for (f = 0; f < MAX_DEFERRED_FLUSH_REQUESTS; f++) {
+ if (conn->ap2_deferred_flush_requests[f].inUse != 0) {
+ if ((conn->ap2_deferred_flush_requests[f].flushFromSeq == seq_no) &&
+ (conn->ap2_deferred_flush_requests[f].flushUntilSeq != seq_no)) {
+ debug(2,
+ "deferred flush activated: flushFromTS: %12u, flushFromSeq: %12u, "
+ "flushUntilTS: %12u, flushUntilSeq: %12u, timestamp: %12u.",
+ conn->ap2_deferred_flush_requests[f].flushFromTS,
+ conn->ap2_deferred_flush_requests[f].flushFromSeq,
+ conn->ap2_deferred_flush_requests[f].flushUntilTS,
+ conn->ap2_deferred_flush_requests[f].flushUntilSeq, timestamp);
+ conn->ap2_deferred_flush_requests[f].active = 1;
+ new_audio_block_needed = 1;
+ }
+ if (conn->ap2_deferred_flush_requests[f].flushUntilSeq == seq_no) {
+ debug(2,
+ "deferred flush terminated: flushFromTS: %12u, flushFromSeq: %12u, "
+ "flushUntilTS: %12u, flushUntilSeq: %12u, timestamp: %12u.",
+ conn->ap2_deferred_flush_requests[f].flushFromTS,
+ conn->ap2_deferred_flush_requests[f].flushFromSeq,
+ conn->ap2_deferred_flush_requests[f].flushUntilTS,
+ conn->ap2_deferred_flush_requests[f].flushUntilSeq, timestamp);
+ conn->ap2_deferred_flush_requests[f].active = 0;
+ conn->ap2_deferred_flush_requests[f].inUse = 0;
+ } else if (a_minus_b_mod23(seq_no, conn->ap2_deferred_flush_requests[f].flushUntilSeq) >
+ 0) {
+ // now, do a modulo 2^23 unsigned int calculation to see if we may have overshot the
+ // flushUntilSeq
+ debug(2,
+ "deferred flush terminated due to overshoot at block %u: flushFromTS: %12u, "
+ "flushFromSeq: %12u, "
+ "flushUntilTS: %12u, flushUntilSeq: %12u, timestamp: %12u.",
+ seq_no, conn->ap2_deferred_flush_requests[f].flushFromTS,
+ conn->ap2_deferred_flush_requests[f].flushFromSeq,
+ conn->ap2_deferred_flush_requests[f].flushUntilTS,
+ conn->ap2_deferred_flush_requests[f].flushUntilSeq, timestamp);
+ conn->ap2_deferred_flush_requests[f].active = 0;
+ conn->ap2_deferred_flush_requests[f].inUse = 0;
+ debug(2, "immediate flush was %s.", ap2_immediate_flush_requested == 0 ? "off" : "on");
+ } else if (conn->ap2_deferred_flush_requests[f].active != 0) {
+ new_audio_block_needed = 1;
+ debug(3,
+ "deferred flush of block: %u. flushFromTS: %12u, flushFromSeq: %12u, "
+ "flushUntilTS: %12u, flushUntilSeq: %12u, timestamp: %12u.",
+ seq_no, conn->ap2_deferred_flush_requests[f].flushFromTS,
+ conn->ap2_deferred_flush_requests[f].flushFromSeq,
+ conn->ap2_deferred_flush_requests[f].flushUntilTS,
+ conn->ap2_deferred_flush_requests[f].flushUntilSeq, timestamp);
+ }
+ }
+ }
+ pthread_cleanup_pop(1); // the mutex
+
+ // now, if the block is not invalidated by the flush code, see if we need
+ // to decode it and pass it to the player
+ if (new_audio_block_needed == 0) {
+ // is there space in the player thread's buffer system?
+ size_t player_buffer_occupancy = get_audio_buffer_occupancy(conn);
+ // debug(1,"player buffer size and occupancy: %u and %u", player_buffer_size,
+ // player_buffer_occupancy);
+
+ // If we are playing and there is room in the player buffer, go ahead and decode the block
+ // and send it to the player. Otherwise, keep the block and sleep for a while.
+ if ((play_enabled != 0) &&
+ (((1.0 * player_buffer_occupancy * conn->frames_per_packet) / conn->input_rate) <=
+ config.audio_decoded_buffer_desired_length)) {
+ uint64_t buffer_should_be_time;
+ frame_to_local_time(timestamp, &buffer_should_be_time, conn);
+
+ // try to identify blocks that are timed to before the last buffer, and drop 'em
+ int64_t time_from_last_buffer_time =
+ buffer_should_be_time - previous_buffer_should_be_time;
+
+ if ((packets_played_in_this_sequence == 0) || (time_from_last_buffer_time > 0)) {
+ int64_t lead_time = buffer_should_be_time - get_absolute_time_in_ns();
+ payload_length = 0;
+ if (ssrc_is_recognised(payload_ssrc) != 0) {
+ // prepare_decoding_chain(conn, payload_ssrc);
+ unsigned long long new_payload_length = 0;
+ payload_pointer = m + leading_free_space_length;
+ if ((lead_time < (int64_t)30000000000L) &&
+ (lead_time >= 0)) { // only decipher the packet if it's not too late or too early
+ int response = -1; // guess that there is a problem
+ if (conn->session_key != NULL) {
+ unsigned char nonce[12];
+ memset(nonce, 0, sizeof(nonce));
+ memcpy(
+ nonce + 4, packet + nread - 8,
+ 8); // front-pad the 8-byte nonce received to get the 12-byte nonce expected
+
+ // https://libsodium.gitbook.io/doc/secret-key_cryptography/aead/chacha20-poly1305/ietf_chacha20-poly1305_construction
+ // Note: the eight-byte nonce must be front-padded out to 12 bytes.
+
+ // Leave leading_free_space_length bytes at the start for possible headers like an
+ // ADTS header (7 bytes)
+ memset(m, 0, leading_free_space_length);
+ response = crypto_aead_chacha20poly1305_ietf_decrypt(
+ payload_pointer, // where the decrypted payload will start
+ &new_payload_length, // mlen_p
+ NULL, // nsec,
+ packet +
+ 12, // the ciphertext starts 12 bytes in and is followed by the MAC tag,
+ nread - (8 + 12), // clen -- the last 8 bytes are the nonce
+ packet + 4, // authenticated additional data
+ 8, // authenticated additional data length
+ nonce,
+ conn->session_key); // *k
+ if (response != 0)
+ debug(1, "Error decrypting audio packet %u -- packet length %zd.", seq_no,
+ nread);
+ } else {
+ debug(2, "No session key, so the audio packet can not be deciphered -- skipped.");
+ }
+
+ if ((response == 0) && (new_payload_length > 0)) {
+ // now we have the deciphered block, so send it to the player if we can
+ payload_length = new_payload_length;
+
+ if (ssrc_is_aac(payload_ssrc)) {
+ payload_pointer =
+ payload_pointer - 7; // including the 7-byte leader for the ADTS
+ payload_length = payload_length + 7;
+
+ // now, fill in the 7-byte ADTS information, which seems to be needed by the
+ // decoder we made room for it in the front of the buffer by filling from m + 7.
+ int channelConfiguration = 2; // 2: 2 channels: front-left, front-right
+ if (payload_ssrc == AAC_48000_F24_5P1)
+ channelConfiguration = 6; // 6: 6 channels: front-center, front-left,
+ // front-right, back-left, back-right, LFE-channel
+ else if (payload_ssrc == AAC_48000_F24_7P1)
+ channelConfiguration =
+ 7; // 7: 8 channels: front-center, front-left, front-right,
+ // side-left, side-right, back-left, back-right, LFE-channel
+ addADTStoPacket(payload_pointer, payload_length, conn->input_rate,
+ channelConfiguration);
+ }
+ int mute =
+ ((packets_played_in_this_sequence == 0) && (ssrc_is_aac(payload_ssrc)));
+ if (mute) {
+ debug(2, "Connection %d: muting first AAC block -- block %u -- timestamp %u.",
+ conn->connection_number, seq_no, timestamp);
+ }
+ int32_t timestamp_difference = 0;
+ if (packets_played_in_this_sequence == 0) {
+ // first_block_in_this_sequence = seq_no;
+ first_timestamp_in_this_sequence = timestamp;
+ debug(2,
+ "Connection %d: "
+ "first block %u, first timestamp %u.",
+ conn->connection_number, seq_no, timestamp);
+ } else {
+ timestamp_difference = timestamp - expected_timestamp;
+ if (timestamp_difference != 0) {
+ debug(2,
+ "Connection %d: "
+ "unexpected timestamp in block %u. Actual: %u, expected: %u "
+ "difference: %d, "
+ "%f ms. "
+ "Positive means later, i.e. a gap. First timestamp was %u, payload "
+ "type: \"%s\".",
+ conn->connection_number, seq_no, timestamp, expected_timestamp,
+ timestamp_difference, 1000.0 * timestamp_difference / conn->input_rate,
+ first_timestamp_in_this_sequence, get_ssrc_name(payload_ssrc));
+ // mute the first packet after a discontinuity
+ if (ssrc_is_aac(payload_ssrc)) {
+ debug(2,
+ "Connection %d: muting first AAC block -- block %u -- following a "
+ "timestamp discontinuity, timestamp %u.",
+ conn->connection_number, seq_no, timestamp);
+ mute = 1;
+ }
+ }
+ }
+ int skip_this_block = 0;
+ if (timestamp_difference < 0) {
+
+ // uncomment this to work back to replace buffers that have been already decoded
+ // and placed in the player queue with the incoming new buffers this is a bit
+ // trickier, but maybe the new buffers are better than the previous ones they
+ // will replace (?)
+ /*
+ seq_t revised_seqno = get_revised_seqno(conn, timestamp);
+ if (revised_seqno != sequence_number_for_player) {
+ debug(1, "revised seqno calculated: conn->ab_read: %u, revised_seqno: %u,
+ conn->ab_write: %u.", conn->ab_read, revised_seqno, conn->ab_write);
+ clear_buffers_from(conn, revised_seqno);
+ sequence_number_for_player = revised_seqno;
+ timestamp_difference = 0;
+ }
+ */
+
+ // uncomment this to drop incoming new buffers that are too old and for whose
+ // timings buffers have already been decoded and placed in the player queue this
+ // is easier, but maybe the new late buffers are better than the previous ones
+ // (?)
+
+ int32_t abs_timestamp_difference = -timestamp_difference;
+ if ((size_t)abs_timestamp_difference > get_ssrc_block_length(payload_ssrc)) {
+ skip_this_block = 1;
+ debug(2,
+ "skipping block %u because it is too old. Timestamp "
+ "difference: %d, length of block: %zu.",
+ seq_no, timestamp_difference, get_ssrc_block_length(payload_ssrc));
+ }
+ }
+ if (skip_this_block == 0) {
+ uint32_t packet_size = player_put_packet(
+ payload_ssrc, sequence_number_for_player, timestamp, payload_pointer,
+ payload_length, mute, timestamp_difference, conn);
+ debug(3, "block %u, timestamp %u, length %u sent to the player.", seq_no,
+ timestamp, packet_size);
+ sequence_number_for_player++; // simply increment
+ expected_timestamp = timestamp + packet_size; // for the next time
+ packets_played_in_this_sequence++;
+ }
+ }
+ } else {
+ debug(3,
+ "skipped deciphering block %u with timestamp %u because its lead time is "
+ "out of range at %f "
+ "seconds.",
+ seq_no, timestamp, lead_time * 1.0E-9);
+ uint32_t currentAnchorRTP = 0;
+ uint64_t currentAnchorLocalTime = 0;
+ if (get_ptp_anchor_local_time_info(conn, ¤tAnchorRTP,
+ ¤tAnchorLocalTime) == clock_ok) {
+ debug(3, "anchorRTP: %u, anchorLocalTime: %" PRIu64 ".", currentAnchorRTP,
+ currentAnchorLocalTime);
+ } else {
+ debug(3, "Clock not okay");
+ }
+ }
+ } else {
+ debug(2, "Unrecognised or invalid ssrc: %s.", get_ssrc_name(payload_ssrc));
+ }
+ } else {
+ debug(1, "dropping buffer that should have played before the last actually played.");
+ }
+ new_audio_block_needed = 1; // the block has been used up and is no longer current
+ } else {
+ usleep(20000); // wait for a while
+ }
+ }
+ }
+ } while (finished == 0);
+ // debug(1, "Connection %d: rtp_buffered_audio_processor PID %d exiting", conn->connection_number,
+ // syscall(SYS_gettid));
+ pthread_cleanup_pop(1); // buffered_tcp_reader thread creation
+ pthread_cleanup_pop(1); // buffer malloc
+ pthread_cleanup_pop(1); // not_full_cv
+ pthread_cleanup_pop(1); // not_empty_cv
+ pthread_cleanup_pop(1); // mutex
+ pthread_cleanup_pop(1); // descriptor malloc
+ pthread_cleanup_pop(1); // pthread_t malloc
+ pthread_cleanup_pop(1); // do the cleanup.
+ // debug(1, "Connection %d: rtp_buffered_audio_processor PID %d finish", conn->connection_number,
+ // syscall(SYS_gettid));
+ pthread_exit(NULL);
+}
--- /dev/null
+#ifndef _AP2_BUFFERED_AUDIO_PROCESSOR_H
+#define _AP2_BUFFERED_AUDIO_PROCESSOR_H
+
+void *rtp_buffered_audio_processor(void *arg);
+
+#endif // _AP2_BUFFERED_AUDIO_PROCESSOR_H
--- /dev/null
+/*
+ * Apple AirPlay 2 Event Receiver. This file is part of Shairport Sync.
+ * Copyright (c) Mike Brady 2014--2025
+ * All rights reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+#include "ap2_event_receiver.h"
+#include "bonjour_strings.h"
+#include "common.h"
+#include "player.h"
+#include "ptp-utilities.h"
+#include "rtsp.h"
+#include "utilities/network_utilities.h"
+#include "utilities/structured_buffer.h"
+
+void ap2_event_receiver_cleanup_handler(void *arg) {
+ rtsp_conn_info *conn = (rtsp_conn_info *)arg;
+ // debug(1, "Connection %d: AP2 Event Receiver Cleanup start.", conn->connection_number);
+ // only update these things if you're (still) the principal conn
+
+#ifdef CONFIG_METADATA
+ // this is here to ensure it's only performed once during a teardown of a ptp stream
+ send_ssnc_metadata('disc', conn->client_ip_string, strlen(conn->client_ip_string), 1);
+#endif
+
+ if (conn->airplay_gid != NULL) {
+ free(conn->airplay_gid);
+ conn->airplay_gid = NULL;
+ }
+ conn->groupContainsGroupLeader = 0;
+ if (conn->dacp_active_remote != NULL) {
+ free(conn->dacp_active_remote);
+ conn->dacp_active_remote = NULL;
+ }
+ if (conn->ap2_client_name) {
+ free(conn->ap2_client_name);
+ conn->ap2_client_name = NULL;
+ }
+ /*
+ pthread_rwlock_wrlock(&principal_conn_lock); // don't let the principal_conn be changed
+ pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
+ if (principal_conn)
+ debug(1, "principal_conn: %d.", principal_conn->connection_number);
+ else
+ debug(1, "principal_conn: is NULL.");
+ if (principal_conn == conn) {
+ config.airplay_statusflags &= (0xffffffff - (1 << 11)); // DeviceSupportsRelay
+ build_bonjour_strings(conn);
+ debug(1, "Connection %d: SETUP mdns_update on %s.", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
+ mdns_update(NULL, secondary_txt_records);
+ principal_conn = NULL;
+ }
+ pthread_cleanup_pop(1); // release the principal_conn lock
+ */
+ debug(2, "Connection %d: AP2 Event Receiver Cleanup is complete.", conn->connection_number);
+}
+
+void *ap2_event_receiver(void *arg) {
+ // #include <syscall.h>
+ // debug(1, "rtp_event_receiver PID %d", syscall(SYS_gettid));
+ rtsp_conn_info *conn = (rtsp_conn_info *)arg;
+ debug(2, "Connection %d: AP2 Event Receiver started", conn->connection_number);
+ structured_buffer *sbuf = sbuf_new(4096);
+ if (sbuf != NULL) {
+ pthread_cleanup_push(sbuf_cleanup, sbuf);
+
+ /*
+ // only update these things if you're (still) the principal conn
+ pthread_rwlock_wrlock(&principal_conn_lock); // don't let the principal_conn be changed
+ pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
+ if (principal_conn == conn) {
+ config.airplay_statusflags |= 1 << 11; // DeviceSupportsRelay
+ // config.airplay_statusflags |= 1 << 17; // ReceiverSessionIsActive
+ build_bonjour_strings(conn);
+ debug(2, "Connection %d: SETUP mdns_update on %s.", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
+ mdns_update(NULL, secondary_txt_records);
+ }
+ pthread_cleanup_pop(1); // release the principal_conn lock
+ */
+ pthread_cleanup_push(ap2_event_receiver_cleanup_handler, arg);
+
+ // listen(conn->event_socket, 5); // this is now done in the handle_setup_2 code
+
+ uint8_t packet[4096];
+ ssize_t nread;
+ SOCKADDR remote_addr;
+ memset(&remote_addr, 0, sizeof(remote_addr));
+ socklen_t addr_size = sizeof(remote_addr);
+
+ int fd = eintr_checked_accept(conn->event_socket, (struct sockaddr *)&remote_addr, &addr_size);
+ debug(2,
+ "Connection %d: ap2_event_receiver accepted a connection on socket %d and moved to a new "
+ "socket %d.",
+ conn->connection_number, conn->event_socket, fd);
+ intptr_t pfd = fd;
+ pthread_cleanup_push(socket_cleanup, (void *)pfd);
+ int finished = 0;
+ do {
+
+ plist_t value_plist = generateInfoPlist(conn);
+ if (value_plist != NULL) {
+ void *txtData = NULL;
+ size_t txtDataLength = 0;
+ generateTxtDataValueInfo(conn, &txtData, &txtDataLength);
+ plist_dict_set_item(value_plist, "txtAirPlay", plist_new_data(txtData, txtDataLength));
+ free(txtData);
+ plist_t update_info_plist = plist_new_dict();
+ if (update_info_plist != NULL) {
+ plist_dict_set_item(update_info_plist, "type", plist_new_string("updateInfo"));
+ plist_dict_set_item(update_info_plist, "value", value_plist);
+ char *plistString = NULL;
+ uint32_t plistStringLength = 0;
+ plist_to_bin(update_info_plist, &plistString, &plistStringLength);
+ if (plistString != NULL) {
+ char *plist_as_string = plist_as_xml_text(update_info_plist);
+ if (plist_as_string != NULL) {
+ debug(3, "Plist is: \"%s\".", plist_as_string);
+ free(plist_as_string);
+ }
+ sbuf_printf(sbuf, "POST /command RTSP/1.0\r\nContent-Length: %u\r\n",
+ plistStringLength);
+ sbuf_printf(sbuf, "Content-Type: application/x-apple-binary-plist\r\n\r\n");
+ sbuf_append(sbuf, plistString, plistStringLength);
+
+ free(plistString); // should be plist_to_bin_free, but it's not defined in older
+ // libraries
+ char *b = 0;
+ size_t l = 0;
+ sbuf_buf_and_length(sbuf, &b, &l);
+ ssize_t wres =
+ write_encrypted(fd, &conn->ap2_pairing_context.event_cipher_bundle, b, l);
+ if ((wres == -1) || ((size_t)wres != l))
+ debug(1, "Encrypted write error");
+
+ sbuf_clear(sbuf);
+ } else {
+ debug(1, "plist string not created!");
+ }
+ plist_free(update_info_plist);
+ } else {
+ debug(1, "Could not build an updateInfo plist");
+ }
+ // plist_free(value_plist);
+ } else {
+ debug(1, "Could not build an value plist");
+ }
+
+ while (finished == 0) {
+ nread = read_encrypted(fd, &conn->ap2_pairing_context.event_cipher_bundle, packet,
+ sizeof(packet));
+
+ // nread = recv(fd, packet, sizeof(packet), 0);
+
+ if (nread < 0) {
+ char errorstring[1024];
+ strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ debug(1,
+ "Connection %d: error in ap2_event_receiver %d: \"%s\". Could not recv a packet.",
+ conn->connection_number, errno, errorstring);
+ // if ((config.diagnostic_drop_packet_fraction == 0.0) ||
+ // (drand48() > config.diagnostic_drop_packet_fraction)) {
+ } else if (nread > 0) {
+
+ // ssize_t plen = nread;
+ packet[nread] = '\0';
+ debug(3, "Connection %d: Packet Received on Event Port with contents: \"%s\".",
+ conn->connection_number, packet);
+ } else {
+ debug(2, "Connection %d: Event Port connection closed by client",
+ conn->connection_number);
+ finished = 1;
+ }
+ }
+
+ } while (finished == 0);
+
+ debug(3, "Connection %d: AP2 Event Receiver RTP thread starting \"normal\" exit.",
+ conn->connection_number);
+ pthread_cleanup_pop(1); // close the socket
+
+ pthread_cleanup_pop(1); // do the cleanup
+ pthread_cleanup_pop(1); // delete the structured buffer
+ debug(2, "Connection %d: AP2 Event Receiver RTP thread \"normal\" exit.",
+ conn->connection_number);
+ } else {
+ debug(1, "Could not allocate a structured buffer!");
+ }
+ conn->ap2_event_receiver_exited = 1;
+ pthread_exit(NULL);
+}
--- /dev/null
+#ifndef _AP2_EVENT_RECEIVER_H
+#define _AP2_EVENT_RECEIVER_H
+
+void *ap2_event_receiver(void *arg);
+
+#endif // _AP2_EVENT_RECEIVER_H
--- /dev/null
+/*
+ * Apple AirPlay 2 Remote Control (RC) Event Receiver. This file is part of Shairport Sync.
+ * Copyright (c) Mike Brady 2014--2025
+ * All rights reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+#include "ap2_rc_event_receiver.h"
+#include "common.h"
+#include "player.h"
+#include "rtsp.h"
+#include "utilities/network_utilities.h"
+#include "utilities/structured_buffer.h"
+
+void ap2_rc_event_receiver_cleanup_handler(void *arg) {
+ rtsp_conn_info *conn = (rtsp_conn_info *)arg;
+ debug(2, "Connection %d: AP2 Event Receiver Cleanup.", conn->connection_number);
+}
+
+void *ap2_rc_event_receiver(void *arg) {
+ // #include <syscall.h>
+ // debug(1, "rtp_event_receiver PID %d", syscall(SYS_gettid));
+ rtsp_conn_info *conn = (rtsp_conn_info *)arg;
+ if (conn->airplay_stream_category == remote_control_stream)
+ debug(2, "Connection %d (RC): AP2 RC Event Receiver started", conn->connection_number);
+ else
+ debug(2, "Connection %d: AP2 RC Event Receiver started", conn->connection_number);
+
+ structured_buffer *sbuf = sbuf_new(4096);
+ if (sbuf != NULL) {
+ pthread_cleanup_push(sbuf_cleanup, sbuf);
+
+ pthread_cleanup_push(ap2_rc_event_receiver_cleanup_handler, arg);
+
+ // listen(conn->event_socket, 5); // this is now done in the handle_setup_2 code
+
+ uint8_t packet[4096];
+ ssize_t nread;
+ SOCKADDR remote_addr;
+ memset(&remote_addr, 0, sizeof(remote_addr));
+ socklen_t addr_size = sizeof(remote_addr);
+
+ int fd = eintr_checked_accept(conn->event_socket, (struct sockaddr *)&remote_addr, &addr_size);
+ debug(2,
+ "Connection %d: ap2_rc_event_receiver accepted a connection on socket %d and moved to a "
+ "new "
+ "socket %d.",
+ conn->connection_number, conn->event_socket, fd);
+ intptr_t pfd = fd;
+ pthread_cleanup_push(socket_cleanup, (void *)pfd);
+ int finished = 0;
+ do {
+
+ plist_t value_plist = generateInfoPlist(conn);
+ if (value_plist != NULL) {
+ void *txtData = NULL;
+ size_t txtDataLength = 0;
+ generateTxtDataValueInfo(conn, &txtData, &txtDataLength);
+ plist_dict_set_item(value_plist, "txtAirPlay", plist_new_data(txtData, txtDataLength));
+ free(txtData);
+ plist_t update_info_plist = plist_new_dict();
+ if (update_info_plist != NULL) {
+ plist_dict_set_item(update_info_plist, "type", plist_new_string("updateInfo"));
+ plist_dict_set_item(update_info_plist, "value", value_plist);
+ char *plistString = NULL;
+ uint32_t plistStringLength = 0;
+ plist_to_bin(update_info_plist, &plistString, &plistStringLength);
+ if (plistString != NULL) {
+ char *plist_as_string = plist_as_xml_text(update_info_plist);
+ if (plist_as_string != NULL) {
+ debug(3, "Plist is: \"%s\".", plist_as_string);
+ free(plist_as_string);
+ }
+ sbuf_printf(sbuf, "POST /command RTSP/1.0\r\nContent-Length: %u\r\n",
+ plistStringLength);
+ sbuf_printf(sbuf, "Content-Type: application/x-apple-binary-plist\r\n\r\n");
+ sbuf_append(sbuf, plistString, plistStringLength);
+
+ free(plistString); // should be plist_to_bin_free, but it's not defined in older
+ // libraries
+ char *b = 0;
+ size_t l = 0;
+ sbuf_buf_and_length(sbuf, &b, &l);
+ ssize_t wres =
+ write_encrypted(fd, &conn->ap2_pairing_context.event_cipher_bundle, b, l);
+ if ((wres == -1) || ((size_t)wres != l))
+ debug(1, "Encrypted write error");
+
+ sbuf_clear(sbuf);
+ } else {
+ debug(1, "plist string not created!");
+ }
+ plist_free(update_info_plist);
+ } else {
+ debug(1, "Could not build an updateInfo plist");
+ }
+ // plist_free(value_plist);
+ } else {
+ debug(1, "Could not build an value plist");
+ }
+
+ while (finished == 0) {
+ nread = read_encrypted(fd, &conn->ap2_pairing_context.event_cipher_bundle, packet,
+ sizeof(packet));
+
+ // nread = recv(fd, packet, sizeof(packet), 0);
+
+ if (nread < 0) {
+ char errorstring[1024];
+ strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ debug(
+ 1,
+ "Connection %d: error in ap2_rc_event_receiver %d: \"%s\". Could not recv a packet.",
+ conn->connection_number, errno, errorstring);
+ // if ((config.diagnostic_drop_packet_fraction == 0.0) ||
+ // (drand48() > config.diagnostic_drop_packet_fraction)) {
+ } else if (nread > 0) {
+
+ // ssize_t plen = nread;
+ packet[nread] = '\0';
+ debug(2,
+ "Connection %d: ap2_rc_event_receiver Packet Received on Event Port with contents: "
+ "\"%s\".",
+ conn->connection_number, packet);
+ } else {
+ debug(2, "Connection %d: ap2_rc_event_receiver Event Port connection closed by client",
+ conn->connection_number);
+ finished = 1;
+ }
+ }
+
+ } while (finished == 0);
+ pthread_cleanup_pop(1); // close the socket
+ pthread_cleanup_pop(1); // do the cleanup
+ pthread_cleanup_pop(1); // delete the structured buffer
+ debug(2, "Connection %d: AP2 ap2_rc_event_receiver \"normal\" exit.", conn->connection_number);
+ } else {
+ debug(1, "Could not allocate a structured buffer!");
+ }
+ conn->ap2_event_receiver_exited = 1;
+ pthread_exit(NULL);
+}
--- /dev/null
+// RC means Remote Control
+
+#ifndef _AP2_RC_EVENT_RECEIVER_H
+#define _AP2_RC_EVENT_RECEIVER_H
+
+void *ap2_rc_event_receiver(void *arg);
+
+#endif // _AP2_RC_EVENT_RECEIVER_H
/*
* This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2019
+ * Copyright (c) Mike Brady 2019--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
/*
* This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2019
+ * Copyright (c) Mike Brady 2019--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
/*
* Audio driver handler. This file is part of Shairport.
* Copyright (c) James Laird 2013
- * Modifications (c) Mike Brady 2014 -- 2019
+ * Modifications (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
#include "common.h"
#include "config.h"
#include <stdio.h>
+#include <stdlib.h>
#include <string.h>
#ifdef CONFIG_JACK
#ifdef CONFIG_SOUNDIO
extern audio_output audio_soundio;
#endif
-#ifdef CONFIG_PW
+#ifdef CONFIG_PIPEWIRE
extern audio_output audio_pw;
#endif
-#ifdef CONFIG_PA
+#ifdef CONFIG_PULSEAUDIO
extern audio_output audio_pa;
#endif
#ifdef CONFIG_ALSA
#ifdef CONFIG_SNDIO
&audio_sndio,
#endif
-#ifdef CONFIG_PW
+#ifdef CONFIG_PIPEWIRE
&audio_pw,
#endif
-#ifdef CONFIG_PA
+#ifdef CONFIG_PULSEAUDIO
&audio_pa,
#endif
#ifdef CONFIG_JACK
}
}
-void parse_general_audio_options(void) {
+void parse_audio_options(const char *named_stanza, uint32_t default_format_set,
+ uint32_t default_rate_set, uint32_t default_channel_set) {
/* this must be called after the output device has been initialised, so that the default values
* are set before any options are chosen */
int value;
/* Get the desired buffer size setting (deprecated). */
if (config_lookup_int(config.cfg, "general.audio_backend_buffer_desired_length", &value)) {
- if ((value < 0) || (value > 66150)) {
- inform("The setting general.audio_backend_buffer_desired_length is deprecated. "
- "Use alsa.audio_backend_buffer_desired_length_in_seconds instead.");
- die("Invalid audio_backend_buffer_desired_length value: \"%d\". It "
- "should be between 0 and "
- "66150, default is %d",
- value, (int)(config.audio_backend_buffer_desired_length * 44100));
- } else {
- inform("The setting general.audio_backend_buffer_desired_length is deprecated. "
- "Use general.audio_backend_buffer_desired_length_in_seconds instead.");
- config.audio_backend_buffer_desired_length = 1.0 * value / 44100;
- }
+ inform("The setting general.audio_backend_buffer_desired_length is no longer supported. "
+ "Please use general.audio_backend_buffer_desired_length_in_seconds instead.");
}
- /* Get the desired buffer size setting in seconds. */
+ /* Get the desired backend buffer size setting in seconds. */
+ /* This is the size of the buffer in the output system, e.g. in the DAC itself in ALSA */
if (config_lookup_float(config.cfg, "general.audio_backend_buffer_desired_length_in_seconds",
&dvalue)) {
if (dvalue < 0) {
}
}
+ /* Get the desired decoded buffer size setting in seconds. */
+ /* This is the size of the buffer of decoded and deciphered audio held in the player's output
+ * queue prior to sending it to the output system */
+ if (config_lookup_float(config.cfg, "general.audio_decoded_buffer_desired_length_in_seconds",
+ &dvalue)) {
+ if (dvalue < 0) {
+ die("Invalid audio_decoded_buffer_desired_length_in_seconds value: \"%f\". It "
+ "should be 0.0 or greater."
+ " The default is %.3f seconds",
+ dvalue, config.audio_decoded_buffer_desired_length);
+ } else {
+ config.audio_decoded_buffer_desired_length = dvalue;
+ }
+ }
+
/* Get the minimum buffer size for fancy interpolation setting in seconds. */
if (config_lookup_float(config.cfg,
"general.audio_backend_buffer_interpolation_threshold_in_seconds",
/* Get the latency offset (deprecated). */
if (config_lookup_int(config.cfg, "general.audio_backend_latency_offset", &value)) {
- if ((value < -66150) || (value > 66150)) {
- inform("The setting general.audio_backend_latency_offset is deprecated. "
- "Use general.audio_backend_latency_offset_in_seconds instead.");
- die("Invalid audio_backend_latency_offset value: \"%d\". It "
- "should be between -66150 and +66150, default is 0",
- value);
- } else {
- inform("The setting general.audio_backend_latency_offset is deprecated. "
- "Use general.audio_backend_latency_offset_in_seconds instead.");
- config.audio_backend_latency_offset = 1.0 * value / 44100;
- }
+
+ inform("The setting general.audio_backend_latency_offset is no longer supported. "
+ "Please use general.audio_backend_latency_offset_in_seconds instead.");
}
/* Get the latency offset in seconds. */
}
}
}
+
+ if (named_stanza != NULL) {
+ config.format_set = get_format_settings(named_stanza, "output_format");
+ config.rate_set = get_rate_settings(named_stanza, "output_rate");
+ config.channel_set = get_channel_settings(named_stanza, "output_channels");
+ }
+ // use the supplied defaults if no settings were given
+ if (config.format_set == 0)
+ config.format_set = default_format_set;
+ if (config.rate_set == 0)
+ config.rate_set = default_rate_set;
+ if (config.channel_set == 0)
+ config.channel_set = default_channel_set;
+}
+
+uint32_t get_format_settings(const char *stanza_name, const char *setting_name) {
+ uint32_t format_set = 0;
+ if (config.cfg != NULL) {
+ char setting_path[256];
+ snprintf(setting_path, sizeof(setting_path) - 1, "%s.%s", stanza_name, setting_name);
+ // get any format settings -- can be "auto", a single format e.g. "S16" or a list of formats
+ config_setting_t *format_setting = config_lookup(config.cfg, setting_path);
+ if (format_setting != NULL) {
+ const char **format_settings;
+ int format_settings_count =
+ config_get_string_settings_as_string_array(format_setting, &format_settings);
+ if (format_settings_count > 0) {
+ int i;
+ for (i = 0; i < format_settings_count; i++) {
+ debug(3, "format setting %u: \"%s\".", i, format_settings[i]);
+ if (strcmp(format_settings[i], "auto") == 0) {
+ if (format_settings_count != 1)
+ warn("in the \"%s\" setting in the \"%s\" section of the configuration file, "
+ "multiple formats, including \"auto\", "
+ "are specified, but \"auto\" includes all "
+ "formats anyway...",
+ setting_name, stanza_name);
+ format_set = SPS_FORMAT_SET; // all valid formats
+ } else if (strcmp(format_settings[i], "S16") == 0) {
+ format_set |= (1 << SPS_FORMAT_S16_LE) | (1 << SPS_FORMAT_S16_BE);
+ } else if (strcmp(format_settings[i], "S24") == 0) {
+ format_set |= (1 << SPS_FORMAT_S24_LE) | (1 << SPS_FORMAT_S24_BE) |
+ (1 << SPS_FORMAT_S24_3LE) | (1 << SPS_FORMAT_S24_3BE);
+ } else if (strcmp(format_settings[i], "S32") == 0) {
+ format_set |= (1 << SPS_FORMAT_S32_LE) | (1 << SPS_FORMAT_S32_BE);
+ } else {
+ sps_format_t f;
+ int valid = 0;
+ for (f = SPS_FORMAT_LOWEST; f <= SPS_FORMAT_HIGHEST_NATIVE; f++) {
+ if (strcmp(format_settings[i], sps_format_description_string(f)) == 0) {
+ format_set |= (1 << f);
+ valid = 1;
+ }
+ }
+ if (valid == 0) {
+ warn("in the \"%s\" setting in the \"%s\" section of the configuration file, an "
+ "invalid format: \"%s\" has been detected.",
+ setting_name, stanza_name, format_settings[i]);
+ }
+ }
+ }
+ free(format_settings);
+ } else {
+ debug(1,
+ "in the \"%s\" setting in the \"%s\" section of the configuration file, an error has "
+ "been detected at argument %d.",
+ setting_name, stanza_name, -format_settings_count);
+ }
+ }
+ }
+ sps_format_t f;
+ uint32_t t_format_set = format_set;
+ char buf[256];
+ char *p = buf;
+ for (f = SPS_FORMAT_UNKNOWN; f <= SPS_FORMAT_S32_BE; f++) {
+ if ((t_format_set & (1 << f)) != 0) {
+ snprintf(p, sizeof(buf) - (p - buf) - 1, "%s", sps_format_description_string(f));
+ p = p + strlen(sps_format_description_string(f));
+ t_format_set -= (1 << f);
+ if (t_format_set != 0) {
+ snprintf(p, sizeof(buf) - (p - buf) - 1, ", ");
+ p = p + strlen(", ");
+ }
+ }
+ }
+ if (p != buf) {
+ *p = '.';
+ p++;
+ }
+ *p = '\0';
+ if (strlen(buf) == 0)
+ debug(3, "No \"%s\" output format settings.", stanza_name);
+ else
+ debug(3, "The \"%s\" output format settings are: \"%s\".", stanza_name, buf);
+ return format_set;
+}
+
+uint32_t get_rate_settings(const char *stanza_name, const char *setting_name) {
+ uint32_t rate_set = 0;
+ if (config.cfg != NULL) {
+ char setting_path[256];
+ snprintf(setting_path, sizeof(setting_path) - 1, "%s.%s", stanza_name, setting_name);
+ config_setting_t *rate_setting = config_lookup(config.cfg, setting_path);
+ if (rate_setting != NULL) {
+ if (config_setting_type(rate_setting) == CONFIG_TYPE_STRING) {
+ // see if it is "auto"
+ if (strcmp(config_setting_get_string(rate_setting), "auto") == 0) {
+#ifdef CONFIG_FFMPEG
+ rate_set = SPS_RATE_SET; // all valid rates
+#else
+ rate_set = SPS_RATE_NON_FFMPEG_SET;
+#endif
+ } else {
+ warn("In the \"%s\" setting in the \"%s\" section of the configuration file, an invalid "
+ "character string -- \"%s\" -- has been detected. (Note that numbers must not be "
+ "enclosed in quotes.)",
+ setting_name, stanza_name, config_setting_get_string(rate_setting));
+ }
+ } else {
+ int *rates;
+ int rate_settings_count = config_get_int_settings_as_int_array(rate_setting, &rates);
+ if (rate_settings_count > 0) {
+ debug(3, "%d rate settings found.", rate_settings_count);
+ int i;
+ for (i = 0; i < rate_settings_count; i++) {
+ debug(3, "rate setting %d: %d.", i, rates[i]);
+ sps_rate_t r;
+ int valid = 0;
+ for (r = SPS_RATE_5512; r <= SPS_RATE_384000; r++) {
+ if ((unsigned int)rates[i] == sps_rate_actual_rate(r)) {
+ valid = 1;
+
+#ifdef CONFIG_FFMPEG
+ rate_set |= (1 << r);
+#else
+ if (((1 << r) & SPS_RATE_NON_FFMPEG_SET) != 0) {
+ rate_set |= (1 << r);
+ } else {
+ warn("In the \"%s\" setting in the \"%s\" section of the configuration file, "
+ "the rate selected -- %d -- can not be used because Shairport Sync has been "
+ "built without FFmpeg support.",
+ setting_name, stanza_name, rates[i]);
+ }
+#endif
+ }
+ }
+ if (valid == 0) {
+ warn("In the \"%s\" setting in the \"%s\" section of the configuration file, an "
+ "invalid rate: %d has been detected.",
+ setting_name, stanza_name, rates[i]);
+ }
+ }
+ free(rates);
+ } else {
+ warn("in the \"%s\" setting in the \"%s\" section of the configuration file, an error "
+ "has been detected at argument %d. (Note that numbers must not be enclosed in "
+ "quotes.)",
+ setting_name, stanza_name, -rate_settings_count);
+ }
+ }
+ }
+ }
+ sps_rate_t r;
+ char buf[256];
+ char *p = buf;
+ uint32_t t_rate_set = rate_set;
+ char numbuf[32];
+ for (r = SPS_RATE_UNKNOWN; r <= SPS_RATE_384000; r++) {
+ if ((t_rate_set & (1 << r)) != 0) {
+ snprintf(numbuf, sizeof(numbuf) - 1, "%u", sps_rate_actual_rate(r));
+ snprintf(p, sizeof(buf) - (p - buf) - 1, "%s", numbuf);
+ p = p + strlen(numbuf);
+ t_rate_set -= (1 << r);
+ if (t_rate_set != 0) {
+ snprintf(p, sizeof(buf) - (p - buf) - 1, ", ");
+ p = p + strlen(", ");
+ }
+ }
+ }
+ if (p != buf) {
+ *p = '.';
+ p++;
+ }
+ *p = '\0';
+ if (strlen(buf) == 0)
+ debug(3, "No \"%s\" output rate settings.", stanza_name);
+ else
+ debug(3, "The \"%s\" output rate settings are: \"%s\".", stanza_name, buf);
+ return rate_set;
+}
+
+uint32_t get_channel_settings(const char *stanza_name, const char *setting_name) {
+ uint32_t channel_set = 0;
+ if (config.cfg != NULL) {
+ char setting_path[256];
+ snprintf(setting_path, sizeof(setting_path) - 1, "%s.%s", stanza_name, setting_name);
+ // now get the channels count set
+ config_setting_t *channels_setting = config_lookup(config.cfg, setting_path);
+ if (channels_setting != NULL) {
+ if (config_setting_type(channels_setting) == CONFIG_TYPE_STRING) {
+ // see if it is "auto"
+ if (strcmp(config_setting_get_string(channels_setting), "auto") == 0) {
+#ifdef CONFIG_FFMPEG
+ channel_set = SPS_CHANNEL_SET; // all valid channels
+#else
+ channel_set = SPS_CHANNNEL_NON_FFMPEG_SET; // just two channels
+#endif
+ } else {
+ warn("in the \"%s\" setting in the \"%s\" section of the configuration file, an invalid "
+ "setting: \"%s\" has been detected.",
+ setting_name, stanza_name, config_setting_get_string(channels_setting));
+ }
+ } else {
+ int *channel_counts;
+ int channel_counts_count =
+ config_get_int_settings_as_int_array(channels_setting, &channel_counts);
+ if (channel_counts_count > 0) {
+ debug(3, "%d channel count settings found.", channel_counts_count);
+ int i;
+ for (i = 0; i < channel_counts_count; i++) {
+ debug(3, "channel count setting %d: %d.", i, channel_counts[i]);
+
+ if ((channel_counts[i] >= 1) && (channel_counts[i] <= 8)) {
+#ifdef CONFIG_FFMPEG
+ channel_set |= (1 << channel_counts[i]);
+#else
+ if (((1 << channel_counts[i]) & SPS_CHANNNEL_NON_FFMPEG_SET) != 0) {
+ channel_set |= (1 << channel_counts[i]);
+ } else {
+ warn("in the \"%s\" setting in the \"%s\" section of the configuration file, "
+ "the channel count selected -- %d -- can not be used because Shairport Sync "
+ "has been built without FFmpeg support.",
+ setting_name, stanza_name, channel_counts[i]);
+ }
+#endif
+ } else {
+ warn("in the \"%s\" setting in the \"%s\" section of the configuration file, an "
+ "invalid channel count: %d has been detected.",
+ setting_name, stanza_name, channel_counts[i]);
+ }
+ }
+ free(channel_counts);
+ } else {
+ debug(1,
+ "in the \"%s\" setting in the \"%s\" section of the configuration file, an error "
+ "has been detected at argument %d.",
+ setting_name, stanza_name, -channel_counts_count);
+ }
+ }
+ }
+ }
+ char buf[256];
+ char *p = buf;
+ char numbuf[32];
+ unsigned int c;
+ uint32_t t_channel_set = channel_set;
+ for (c = 0; c <= 8; c++) {
+ if ((t_channel_set & (1 << c)) != 0) {
+ snprintf(numbuf, sizeof(numbuf) - 1, "%u", c);
+ snprintf(p, sizeof(buf) - (p - buf) - 1, "%s", numbuf);
+ p = p + strlen(numbuf);
+ t_channel_set -= (1 << c);
+ if (t_channel_set != 0) {
+ snprintf(p, sizeof(buf) - (p - buf) - 1, ", ");
+ p = p + strlen(", ");
+ }
+ }
+ }
+ if (p != buf) {
+ *p = '.';
+ p++;
+ }
+ *p = '\0';
+ if (strlen(buf) == 0)
+ debug(3, "No \"%s\" output channel settings.", stanza_name);
+ else
+ debug(3, "The \"%s\" output channel settings are: \"%s\".", stanza_name, buf);
+ return channel_set;
+}
+
+sps_format_t check_configuration_with_formats(
+ unsigned int channels, unsigned int rate, sps_format_t format,
+ int (*check_configuration)(unsigned int channels, unsigned int rate, unsigned int format)) {
+ sps_format_t response = SPS_FORMAT_UNKNOWN; // this is encoded as zero
+
+ // first, check that the channels are permissible...
+ if ((config.channel_set & (1 << channels)) != 0) {
+ // now check the rate...
+ sps_rate_t r = SPS_RATE_LOWEST;
+ int rate_is_permissible = 0;
+ while ((rate_is_permissible == 0) && (r <= SPS_RATE_HIGHEST)) {
+ if (((config.rate_set & (1 << r)) != 0) && (rate == sps_rate_actual_rate(r)))
+ rate_is_permissible = 1;
+ else
+ r++;
+ }
+ if (rate_is_permissible != 0) {
+ // check the actual requested format first with the initial_search array
+ // and if it fails, then check in the subsequent_search array.
+ // the choice of subsequent search array is determined by the machine's endianness
+
+ // clang-format off
+ sps_format_t ordered_initial_search_U8[] = {SPS_FORMAT_U8, SPS_FORMAT_S8};
+ sps_format_t ordered_initial_search_S8[] = {SPS_FORMAT_S8, SPS_FORMAT_U8};
+ sps_format_t ordered_subsequent_search_8_LE[] = {SPS_FORMAT_S16_LE, SPS_FORMAT_S16_BE, SPS_FORMAT_S24_LE, SPS_FORMAT_S24_BE, SPS_FORMAT_S24_3LE, SPS_FORMAT_S24_3BE, SPS_FORMAT_S32_LE, SPS_FORMAT_S32_BE};
+ sps_format_t ordered_subsequent_search_8_BE[] = {SPS_FORMAT_S16_BE, SPS_FORMAT_S16_LE, SPS_FORMAT_S24_BE, SPS_FORMAT_S24_LE, SPS_FORMAT_S24_3BE, SPS_FORMAT_S24_3LE, SPS_FORMAT_S32_BE, SPS_FORMAT_S32_LE};
+
+ sps_format_t ordered_initial_search_S16_LE[] = {SPS_FORMAT_S16_LE, SPS_FORMAT_S16_BE};
+ sps_format_t ordered_initial_search_S16_BE[] = {SPS_FORMAT_S16_BE, SPS_FORMAT_S16_LE};
+ sps_format_t ordered_subsequent_search_S16_LE[] = {SPS_FORMAT_S24_LE, SPS_FORMAT_S24_BE, SPS_FORMAT_S24_3LE, SPS_FORMAT_S24_3BE, SPS_FORMAT_S32_LE, SPS_FORMAT_S32_BE, SPS_FORMAT_S8, SPS_FORMAT_U8};
+ sps_format_t ordered_subsequent_search_S16_BE[] = {SPS_FORMAT_S24_BE, SPS_FORMAT_S24_LE, SPS_FORMAT_S24_3BE, SPS_FORMAT_S24_3LE, SPS_FORMAT_S32_BE, SPS_FORMAT_S32_LE, SPS_FORMAT_S8, SPS_FORMAT_U8};
+
+ sps_format_t ordered_initial_search_S24_LE[] = {SPS_FORMAT_S24_LE, SPS_FORMAT_S24_BE};
+ sps_format_t ordered_initial_search_S24_BE[] = {SPS_FORMAT_S24_BE, SPS_FORMAT_S24_LE};
+ sps_format_t ordered_subsequent_search_S24_LE[] = {SPS_FORMAT_S24_3LE, SPS_FORMAT_S24_3BE, SPS_FORMAT_S32_LE, SPS_FORMAT_S32_BE, SPS_FORMAT_S16_LE, SPS_FORMAT_S16_BE, SPS_FORMAT_S8, SPS_FORMAT_U8};
+ sps_format_t ordered_subsequent_search_S24_BE[] = {SPS_FORMAT_S24_3BE, SPS_FORMAT_S24_3LE, SPS_FORMAT_S32_BE, SPS_FORMAT_S32_LE, SPS_FORMAT_S16_BE, SPS_FORMAT_S16_LE, SPS_FORMAT_S8, SPS_FORMAT_U8};
+
+ sps_format_t ordered_initial_search_S24_3le[] = {SPS_FORMAT_S24_3LE, SPS_FORMAT_S24_3BE};
+ sps_format_t ordered_initial_search_S24_3be[] = {SPS_FORMAT_S24_3BE, SPS_FORMAT_S24_3LE};
+ sps_format_t ordered_subsequent_search_S24_3le[] = {SPS_FORMAT_S24_LE, SPS_FORMAT_S24_BE, SPS_FORMAT_S32_LE, SPS_FORMAT_S32_BE, SPS_FORMAT_S16_LE, SPS_FORMAT_S16_BE, SPS_FORMAT_S8, SPS_FORMAT_U8};
+ sps_format_t ordered_subsequent_search_S24_3be[] = {SPS_FORMAT_S24_BE, SPS_FORMAT_S24_LE, SPS_FORMAT_S32_BE, SPS_FORMAT_S32_LE, SPS_FORMAT_S16_BE, SPS_FORMAT_S16_LE, SPS_FORMAT_S8, SPS_FORMAT_U8};
+
+ sps_format_t ordered_initial_search_S32_LE[] = {SPS_FORMAT_S32_LE, SPS_FORMAT_S32_BE};
+ sps_format_t ordered_initial_search_S32_BE[] = {SPS_FORMAT_S32_BE, SPS_FORMAT_S32_LE};
+ sps_format_t ordered_subsequent_search_S32_LE[] = {SPS_FORMAT_S24_LE, SPS_FORMAT_S24_BE, SPS_FORMAT_S24_3LE, SPS_FORMAT_S24_3BE, SPS_FORMAT_S16_LE, SPS_FORMAT_S16_BE, SPS_FORMAT_S8, SPS_FORMAT_U8};
+ sps_format_t ordered_subsequent_search_S32_BE[] = {SPS_FORMAT_S24_BE, SPS_FORMAT_S24_LE, SPS_FORMAT_S24_3BE, SPS_FORMAT_S24_3LE, SPS_FORMAT_S16_BE, SPS_FORMAT_S16_LE, SPS_FORMAT_S8, SPS_FORMAT_U8};
+ // clang-format on
+
+ // set up the initial_search pointer
+ sps_format_t *initial_formats_to_check = NULL;
+ unsigned int number_of_initial_formats_to_check = 0;
+
+ // set up the subsequent_search pointer
+ sps_format_t *subsequent_formats_to_check = NULL;
+ unsigned int number_of_subsequent_formats_to_check = 0;
+
+ // Set up the initial and subsequent search pointers.
+
+ // Searching is done with an initial search and, if necessary, a subsequent search.
+ // Searching stops when a suitable format is confirmed as being acceptable.
+
+ // The initial search checks the requested format first, followed by its equivalents of the
+ // same bit size. The subsequent search puts the formats to check in order of suitability but
+ // with the native-endian format first. SPS_FORMAT_S16, SPS_FORMAT_S24 and SPS_FORMAT_S24 are
+ // treated the same as their equivalent with native endianness. AUTO is treated as
+ // SPS_FORMAT_S24 with native endianness.
+
+ switch (format) {
+ case SPS_FORMAT_U8:
+ case SPS_FORMAT_S8:
+ if (config.endianness == SS_LITTLE_ENDIAN) {
+ subsequent_formats_to_check = ordered_subsequent_search_8_LE;
+ number_of_subsequent_formats_to_check =
+ sizeof(ordered_subsequent_search_8_LE) / sizeof(sps_format_t);
+ } else {
+ subsequent_formats_to_check = ordered_subsequent_search_8_BE;
+ number_of_subsequent_formats_to_check =
+ sizeof(ordered_subsequent_search_8_BE) / sizeof(sps_format_t);
+ }
+ if (format == SPS_FORMAT_S8) {
+ initial_formats_to_check = ordered_initial_search_S8;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S8) / sizeof(sps_format_t);
+ } else {
+ initial_formats_to_check = ordered_initial_search_U8;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_U8) / sizeof(sps_format_t);
+ }
+ break;
+ case SPS_FORMAT_S16:
+ case SPS_FORMAT_S16_LE:
+ case SPS_FORMAT_S16_BE:
+ if (config.endianness == SS_LITTLE_ENDIAN) {
+ subsequent_formats_to_check = ordered_subsequent_search_S16_LE;
+ number_of_subsequent_formats_to_check =
+ sizeof(ordered_subsequent_search_S16_LE) / sizeof(sps_format_t);
+ if (format == SPS_FORMAT_S16) {
+ initial_formats_to_check = ordered_initial_search_S16_LE;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S16_LE) / sizeof(sps_format_t);
+ }
+ } else {
+ subsequent_formats_to_check = ordered_subsequent_search_S16_BE;
+ number_of_subsequent_formats_to_check =
+ sizeof(ordered_subsequent_search_S16_BE) / sizeof(sps_format_t);
+ if (format == SPS_FORMAT_S16) {
+ initial_formats_to_check = ordered_initial_search_S16_BE;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S16_BE) / sizeof(sps_format_t);
+ }
+ }
+ if (format == SPS_FORMAT_S16_LE) {
+ initial_formats_to_check = ordered_initial_search_S16_LE;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S16_LE) / sizeof(sps_format_t);
+ } else if (format == SPS_FORMAT_S16_BE) {
+ initial_formats_to_check = ordered_initial_search_S16_BE;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S16_BE) / sizeof(sps_format_t);
+ }
+ break;
+ case SPS_FORMAT_S24:
+ case SPS_FORMAT_S24_LE:
+ case SPS_FORMAT_S24_BE:
+ if (config.endianness == SS_LITTLE_ENDIAN) {
+ subsequent_formats_to_check = ordered_subsequent_search_S24_LE;
+ number_of_subsequent_formats_to_check =
+ sizeof(ordered_subsequent_search_S24_LE) / sizeof(sps_format_t);
+ if (format == SPS_FORMAT_S24) {
+ initial_formats_to_check = ordered_initial_search_S24_LE;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S24_LE) / sizeof(sps_format_t);
+ }
+ } else {
+ subsequent_formats_to_check = ordered_subsequent_search_S24_BE;
+ number_of_subsequent_formats_to_check =
+ sizeof(ordered_subsequent_search_S24_BE) / sizeof(sps_format_t);
+ if (format == SPS_FORMAT_S24) {
+ initial_formats_to_check = ordered_initial_search_S24_BE;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S24_BE) / sizeof(sps_format_t);
+ }
+ }
+ if (format == SPS_FORMAT_S24_LE) {
+ initial_formats_to_check = ordered_initial_search_S24_LE;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S24_LE) / sizeof(sps_format_t);
+ } else if (format == SPS_FORMAT_S24_BE) {
+ initial_formats_to_check = ordered_initial_search_S24_BE;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S24_BE) / sizeof(sps_format_t);
+ }
+ break;
+ case SPS_FORMAT_S32:
+ case SPS_FORMAT_S32_LE:
+ case SPS_FORMAT_S32_BE:
+ // case SPS_FORMAT_AUTO:
+ if (config.endianness == SS_LITTLE_ENDIAN) {
+ subsequent_formats_to_check = ordered_subsequent_search_S32_LE;
+ number_of_subsequent_formats_to_check =
+ sizeof(ordered_subsequent_search_S32_LE) / sizeof(sps_format_t);
+ if ((format == SPS_FORMAT_S32) || (format == SPS_FORMAT_AUTO)) {
+ initial_formats_to_check = ordered_initial_search_S32_LE;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S32_LE) / sizeof(sps_format_t);
+ }
+ } else {
+ subsequent_formats_to_check = ordered_subsequent_search_S32_BE;
+ number_of_subsequent_formats_to_check =
+ sizeof(ordered_subsequent_search_S32_BE) / sizeof(sps_format_t);
+ if ((format == SPS_FORMAT_S32) || (format == SPS_FORMAT_AUTO)) {
+ initial_formats_to_check = ordered_initial_search_S32_BE;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S32_BE) / sizeof(sps_format_t);
+ }
+ }
+ if (format == SPS_FORMAT_S32_LE) {
+ initial_formats_to_check = ordered_initial_search_S32_LE;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S32_LE) / sizeof(sps_format_t);
+ } else if (format == SPS_FORMAT_S32_BE) {
+ initial_formats_to_check = ordered_initial_search_S32_BE;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S32_BE) / sizeof(sps_format_t);
+ }
+ break;
+ case SPS_FORMAT_S24_3LE:
+ case SPS_FORMAT_S24_3BE:
+ if (config.endianness == SS_LITTLE_ENDIAN) {
+ subsequent_formats_to_check = ordered_subsequent_search_S24_3le;
+ number_of_subsequent_formats_to_check =
+ sizeof(ordered_subsequent_search_S24_3le) / sizeof(sps_format_t);
+ } else {
+ subsequent_formats_to_check = ordered_subsequent_search_S24_3be;
+ number_of_subsequent_formats_to_check =
+ sizeof(ordered_subsequent_search_S24_3be) / sizeof(sps_format_t);
+ }
+ if (format == SPS_FORMAT_S24_3LE) {
+ initial_formats_to_check = ordered_initial_search_S24_3le;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S24_3le) / sizeof(sps_format_t);
+ } else {
+ initial_formats_to_check = ordered_initial_search_S24_3be;
+ number_of_initial_formats_to_check =
+ sizeof(ordered_initial_search_S24_3be) / sizeof(sps_format_t);
+ }
+ break;
+ default:
+ debug(1, "unknown format %u to check.", format);
+ break;
+ }
+
+ if ((initial_formats_to_check == NULL) || (subsequent_formats_to_check == NULL)) {
+ debug(1, "error initialising for format %s.", sps_format_description_string(format));
+ }
+
+ // here, we know the channel and rate are permissible according to the settings,
+ // but we don't yet know about each of the formats
+ // do initial search
+ unsigned int i = 0;
+ while ((i < number_of_initial_formats_to_check) && (response == SPS_FORMAT_UNKNOWN)) {
+ // only call check_configuration() if the format is permissible
+ if ((((1 << initial_formats_to_check[i]) & config.format_set) != 0) &&
+ ((check_configuration == NULL) ||
+ (check_configuration(channels, rate, initial_formats_to_check[i]) == 0)))
+ response = initial_formats_to_check[i];
+ else
+ i++;
+ }
+ // if no joy, do subsequent search
+ if (response == 0) {
+ i = 0;
+ while ((i < number_of_subsequent_formats_to_check) && (response == SPS_FORMAT_UNKNOWN)) {
+ // only call check_configuration() if the format is permissible
+ if ((((1 << subsequent_formats_to_check[i]) & config.format_set) != 0) &&
+ ((check_configuration == NULL) ||
+ (check_configuration(channels, rate, subsequent_formats_to_check[i]) == 0)))
+ response = subsequent_formats_to_check[i];
+ else
+ i++;
+ }
+ }
+ }
+ }
+ return response;
+}
+
+/*
+
+sps_format_t all_formats_to_check_le[] = {SPS_FORMAT_S32_LE, SPS_FORMAT_S32_BE, SPS_FORMAT_S24_LE,
+ SPS_FORMAT_S24_BE, SPS_FORMAT_S24_3LE, SPS_FORMAT_S24_3BE,
+ SPS_FORMAT_S16_LE, SPS_FORMAT_S16_BE};
+// look at little-endian formats first
+sps_format_t all_formats_to_check_le[] = {SPS_FORMAT_S32_LE, SPS_FORMAT_S32_BE, SPS_FORMAT_S24_LE,
+ SPS_FORMAT_S24_BE, SPS_FORMAT_S24_3LE, SPS_FORMAT_S24_3BE,
+ SPS_FORMAT_S16_LE, SPS_FORMAT_S16_BE};
+// look at big-endian formats first
+sps_format_t all_formats_to_check_be[] = {SPS_FORMAT_S32_BE, SPS_FORMAT_S32_LE, SPS_FORMAT_S24_BE,
+ SPS_FORMAT_S24_LE, SPS_FORMAT_S24_3BE, SPS_FORMAT_S24_3LE,
+ SPS_FORMAT_S16_BE, SPS_FORMAT_S16_LE};
+// look at little-endian formats first
+sps_format_t s32_formats_to_check_le[] = {SPS_FORMAT_S32_LE, SPS_FORMAT_S32_BE};
+// look at big-endian formats first
+sps_format_t s32_formats_to_check_be[] = {SPS_FORMAT_S32_BE, SPS_FORMAT_S32_LE};
+// look at little-endian formats first
+sps_format_t s24_formats_to_check_le[] = {
+ SPS_FORMAT_S24_LE,
+ SPS_FORMAT_S24_BE,
+ SPS_FORMAT_S24_3LE,
+ SPS_FORMAT_S24_3BE,
+};
+// look at big-endian formats first
+sps_format_t s24_formats_to_check_be[] = {
+ SPS_FORMAT_S24_BE,
+ SPS_FORMAT_S24_LE,
+ SPS_FORMAT_S24_3BE,
+ SPS_FORMAT_S24_3LE,
+};
+// look at little-endian formats first
+sps_format_t s16_formats_to_check_le[] = {SPS_FORMAT_S16_LE, SPS_FORMAT_S16_BE};
+// look at big-endian formats first
+sps_format_t s16_formats_to_check_be[] = {SPS_FORMAT_S16_BE, SPS_FORMAT_S16_LE};
+
+sps_format_t single_format_to_check[1];
+
+sps_format_t check_configuration_with_formats(
+ unsigned int channels, unsigned int rate, sps_format_t format,
+ int (*check_configuration)(unsigned int channels, unsigned int rate, unsigned int format)) {
+ sps_format_t response = SPS_FORMAT_UNKNOWN; // this is encoded as zero
+
+ // assume there is one single format to check
+ unsigned int number_of_formats_to_check = 1;
+ sps_format_t single_format_to_check[] = {format};
+ sps_format_t *formats_to_check = single_format_to_check;
+
+ // now, check to see if there are multiple formats to check, e.g. if the format is given as AUTO
+ // or if it doesn't specify _LE or _BE. Try to favour the host endianness:
+
+ if (config.endianness == SS_LITTLE_ENDIAN) {
+ switch (format) {
+ case SPS_FORMAT_AUTO:
+ case SPS_FORMAT_S32:
+ case SPS_FORMAT_32_LE:
+ case SPS_FORMAT_32_BE:
+ formats_to_check = all_formats_to_check_le;
+ number_of_formats_to_check = sizeof(all_formats_to_check_le) / sizeof(sps_format_t);
+ break;
+ case SPS_FORMAT_S24:
+ case SPS_FORMAT_S24_LE:
+ case SPS_FORMAT_S24_BE:
+ formats_to_check = s32_formats_to_check_le;
+ number_of_formats_to_check = sizeof(s32_formats_to_check_le) / sizeof(sps_format_t);
+ break;
+ case SPS_FORMAT_S24:
+ formats_to_check = s24_formats_to_check_le;
+ number_of_formats_to_check = sizeof(s24_formats_to_check_le) / sizeof(sps_format_t);
+ break;
+ case SPS_FORMAT_S16:
+ formats_to_check = s16_formats_to_check_le;
+ number_of_formats_to_check = sizeof(s16_formats_to_check_le) / sizeof(sps_format_t);
+ break;
+ default:
+ break;
+ };
+ } else { // big-endian
+ switch (format) {
+ case SPS_FORMAT_AUTO:
+ formats_to_check = all_formats_to_check_be;
+ number_of_formats_to_check = sizeof(all_formats_to_check_be) / sizeof(sps_format_t);
+ break;
+ case SPS_FORMAT_S32:
+ formats_to_check = s32_formats_to_check_be;
+ number_of_formats_to_check = sizeof(s32_formats_to_check_be) / sizeof(sps_format_t);
+ break;
+ case SPS_FORMAT_S24:
+ formats_to_check = s24_formats_to_check_be;
+ number_of_formats_to_check = sizeof(s24_formats_to_check_be) / sizeof(sps_format_t);
+ break;
+ case SPS_FORMAT_S16:
+ formats_to_check = s16_formats_to_check_be;
+ number_of_formats_to_check = sizeof(s16_formats_to_check_be) / sizeof(sps_format_t);
+ break;
+ default:
+ break;
+ };
+ }
+
+ unsigned int i = 0;
+ while ((i < number_of_formats_to_check) && (response == SPS_FORMAT_UNKNOWN)) {
+ if (check_configuration(channels, rate, formats_to_check[i]) == 0)
+ response = formats_to_check[i];
+ else
+ i++;
+ }
+
+ return response;
+}
+
+*/
+
+// This looks for a suitable configuration in the right order and checks possible
+// configurations using the check_configuration method passed in.
+
+// If it's the same as the most recent request, it sends back the cached reply.
+// The cache is keyed to the address of the checker method in order to distinguish
+// between requests from different backends. It's moot, as only one back end can be
+// active, but anyway... ya never just know.
+
+// but it does mean that it only remembers the last response, whichever backend it was for
+
+typedef struct {
+ int32_t encoded_request; // this will have the channels and the rate requested but not the format
+ int32_t encoded_response; // this will have the channels, rate and format suggested
+ int (*check_configuration)(unsigned int channels, unsigned int rate, unsigned int format);
+} configuration_request_and_response_t;
+
+static configuration_request_and_response_t search_for_suitable_configuration_last_response = {
+ 0, 0, NULL};
+
+// First it looks for an exact match of channels, rate or with a rate that is an even multiple.
+// If that fails it will look for an exact match of channels with the next highest rate
+// If that fails it will look for an exact match of channels with the next lowest rate
+// If that fails it increases the number channels, if available, and checks all over again
+// Finally, if that fails it reduces the number channels, if possible, and checks all over again.
+
+// Regarding the requested format: ask for the exact format only if no audio processing will be
+// done in the software or (as far as can be known) in the hardware mixer.
+// So, if ignore_volume_control is set and volume_max_db is
+// not set, and the number of output channels is greater than or equal to those requested,
+// and mixing to mono is not requested and the rate being checked is the rate requested,
+// then look for the exact rate format.
+// Otherwise ask for the deepest format, which could be useful
+// for attenuation, mixing, transcoding or other audio processing.
+// (Haven't included checking for convolution here.)
+
+int32_t search_for_suitable_configuration(unsigned int channels, unsigned int rate,
+ unsigned int format,
+ int (*check_configuration)(unsigned int in_channels,
+ unsigned int in_rate,
+ unsigned int in_format)) {
+ int32_t reply = 0; // means nothing was found
+ if ((channels == 0) || (rate == 0))
+ debug(1, "invalid channel %u or rate %u parameters to search_for_suitable_configuration",
+ channels, rate);
+ int32_t encoded_request = CHANNELS_TO_ENCODED_FORMAT(channels) | RATE_TO_ENCODED_FORMAT(rate) |
+ FORMAT_TO_ENCODED_FORMAT(format);
+
+ if ((search_for_suitable_configuration_last_response.check_configuration ==
+ check_configuration) && // same method (implies same backend)
+ (search_for_suitable_configuration_last_response.encoded_request ==
+ encoded_request)) { // same request
+ reply = search_for_suitable_configuration_last_response
+ .encoded_response; // provide cached response...
+ }
+ if (reply == 0) { // no luck with the last response generated, if any...
+ // check for native number of channels or more... and then, if not successful, with fewer
+ // channels
+#ifdef CONFIG_FFMPEG
+ unsigned int channel_count_check_sequence[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 7, 6, 5, 4, 3, 2, 1};
+ unsigned int rates[] = {44100, 48000, 88200, 96000, 176400, 192000, 352800, 384000,
+ 64000, 32000, 22050, 16000, 11025, 8000, 5512};
+#else
+ unsigned int channel_count_check_sequence[] = {0, 1, 2};
+// unsigned int rates[] = {44100};
+#endif
+
+ unsigned int channel_count_index = channels;
+ // start looking with the required number of channels
+ unsigned int local_channels;
+ unsigned int local_rate;
+ sps_format_t local_format = SPS_FORMAT_UNKNOWN;
+ uint32_t channel_check_bit_field = 0; // set (1 << channel_count) when check done.
+ // Use it to stop checking the same channel count on the way down as well as on the way up...
+ while ((local_format == SPS_FORMAT_UNKNOWN) &&
+ (channel_count_index < (sizeof(channel_count_check_sequence) / sizeof(unsigned int)))) {
+ local_channels = channel_count_check_sequence[channel_count_index];
+ // if we haven't already checked....
+ if ((channel_check_bit_field & (1 << local_channels)) == 0) {
+ channel_check_bit_field |= (1 << local_channels); // flag that we have checked it
+ int rate_multiplier = 2;
+ local_rate = rate; // begin with the requested rate
+ debug(3, "check for an exact multiple of %u with %u channels.", rate, local_channels);
+ while ((local_rate <= 384000) && (local_format == SPS_FORMAT_UNKNOWN)) {
+ if (
+ // clang-format off
+ // check for the exact format only under these conditions, otherwise look for the best
+ (config.ignore_volume_control != 0) &&
+ (config.volume_max_db_set == 0) &&
+ (local_rate == rate) &&
+ (local_channels >= channels) &&
+ (config.playback_mode != ST_mono)
+ // clang-format on
+ ) {
+ // debug(1, "check exact");
+ local_format = check_configuration_with_formats(
+ local_channels, local_rate, (sps_format_t)format, check_configuration);
+ } else {
+ // debug(1, "check best");
+ local_format = check_configuration_with_formats(local_channels, local_rate,
+ SPS_FORMAT_S32, check_configuration);
+ }
+
+ if (local_format == SPS_FORMAT_UNKNOWN) {
+ local_rate = rate * rate_multiplier;
+ rate_multiplier = rate_multiplier * 2;
+ }
+ }
+#ifdef CONFIG_FFMPEG
+ if (local_format == SPS_FORMAT_UNKNOWN) {
+ debug(3, "check for an next highest rate above %u with %u channels.", rate,
+ local_channels);
+ unsigned int rate_pointer = 0;
+ while ((rate_pointer < sizeof(rates) / sizeof(unsigned int)) &&
+ (local_format == SPS_FORMAT_UNKNOWN)) {
+ local_rate = rates[rate_pointer];
+ if (local_rate > rate) {
+ local_format = check_configuration_with_formats(
+ local_channels, local_rate, (sps_format_t)format, check_configuration);
+ }
+ if (local_format == SPS_FORMAT_UNKNOWN) {
+ rate_pointer++;
+ }
+ }
+ }
+
+ if (local_format == SPS_FORMAT_UNKNOWN) {
+ int rate_pointer = (int)(sizeof(rates) / sizeof(unsigned int) - 1);
+ debug(3, "check for an next lowest rate below %u with %u channels.", rate,
+ local_channels);
+ while ((rate_pointer >= 0) && (local_format == SPS_FORMAT_UNKNOWN)) {
+ local_rate = rates[rate_pointer];
+ if (local_rate < rate) {
+ local_format = check_configuration_with_formats(
+ local_channels, local_rate, (sps_format_t)format, check_configuration);
+ }
+ if (local_format == SPS_FORMAT_UNKNOWN) {
+ rate_pointer--;
+ }
+ }
+ }
+#endif
+ }
+ // if unsuccessful, try with, firstly, more channels, and then, later, with fewer channels
+ if (local_format == SPS_FORMAT_UNKNOWN)
+ channel_count_index++;
+ }
+ if (local_format != SPS_FORMAT_UNKNOWN) {
+ reply = CHANNELS_TO_ENCODED_FORMAT(local_channels) | RATE_TO_ENCODED_FORMAT(local_rate) |
+ FORMAT_TO_ENCODED_FORMAT(local_format);
+
+ // cache the response
+ search_for_suitable_configuration_last_response.check_configuration = check_configuration;
+ search_for_suitable_configuration_last_response.encoded_request = encoded_request;
+ search_for_suitable_configuration_last_response.encoded_response = reply;
+
+ debug(3,
+ "output configuration search for request: %s/%u/%u, succeeded with response: %u/%s/%u.",
+ sps_format_description_string(format), rate, channels,
+ RATE_FROM_ENCODED_FORMAT(reply), // rate
+ sps_format_description_string(FORMAT_FROM_ENCODED_FORMAT(reply)), // format
+ CHANNELS_FROM_ENCODED_FORMAT(reply) // channels
+ );
+
+ } else {
+ debug(1, "output configuration search for request: %s/%u/%u failed.",
+ sps_format_description_string(format), rate, channels);
+ }
+ }
+ return reply;
}
} play_samples_type;
typedef struct {
- double current_volume_dB;
+ unsigned int channels;
+ unsigned int rate;
+ unsigned int format; // this contains an sps_format_t enum but "common.h" is not imported here.
+ char short_description[32]; // e.g. S32/44100/8.
+ char channel_map[128];
+} output_configuration_t;
+
+typedef struct {
int32_t minimum_volume_dB;
int32_t maximum_volume_dB;
-} audio_parameters;
+} volume_range_t;
+
+typedef struct {
+ volume_range_t *volume_range;
+} output_parameters_t;
+// backend interface
typedef struct {
+ // may be NULL if no implemented
void (*help)(void);
char *name;
-
// start of program
int (*init)(int argc, char **argv);
// at end of program
void (*deinit)(void);
- void (*prepare_to_play)(void); // sent when audio is received for the first time -- advance warning.
- int (*prepare)(void); // looks and sets stuff in the config data structure
+ int (*prepare)(void); // send when a play session is about to begin
+
+ // Get an encoded configuration for the given channels, rate and depth
+ // The response may have a different rate, format and channel count!
+ // See FROM_ENCODED_FORMAL macros in common.h for the encoding scheme
+ // An encoded configuration will always be positive.
+ // 0 if not successful
+ // may be NULL if not implemented
+ // sps_format is not typed because the definition is in common.h, which can't be included here,
+ // sigh.
+ int32_t (*get_configuration)(unsigned int channels, unsigned int rate, unsigned int sps_format);
+
+ // Set the output configuration
+ // Returns 0 and a channel map (if available) if successful
+ // may be NULL if not implemented.
+ // Set channel_map to NULL if you don't want it.
+ // Otherwise, a space-separated channel map string will be returned
+ // and you are responsible for freeing it.
+ // If there isn't a channel map a NULL will be returned.
+ int (*configure)(int32_t encoded_output_format, char **channel_map);
void (*start)(int sample_rate, int sample_format);
int (*play)(void *buf, int samples, int sample_type, uint32_t timestamp, uint64_t playtime);
void (*stop)(void);
- // may be null if no implemented
+ // may be NULL if no implemented
int (*is_running)(
void); // if implemented, will return 0 if everything is okay, non-zero otherwise
- // may be null if not implemented
+ // may be NULL if no implemented
void (*flush)(void);
// returns the delay before the next frame to be sent to the device would actually be audible.
- // almost certainly wrong if the buffer is empty, so put silent buffers into it to make it busy.
- // will change dynamically, so keep watching it. Implemented in ALSA only.
+ // almost certainly wrong if the buffer is empty, so play silent buffers to make it busy.
+ // will change dynamically, so keep watching it.
// returns a negative error code if there's a problem
- int (*delay)(long *the_delay); // snd_pcm_sframes_t is a signed long
+ int (*delay)(long *the_delay); // snd_pcm_sframes_t is a long
int (*stats)(uint64_t *raw_measurement_time, uint64_t *corrected_measurement_time,
uint64_t *delay,
uint64_t *frames_sent_to_dac); // use this to get the true rate of the DAC
// may be NULL, in which case soft volume is applied
void (*volume)(double vol);
- // may be NULL, in which case soft volume parameters are used
- void (*parameters)(audio_parameters *info);
+ // may be NULL, in which case defaults are used
+ output_parameters_t *(*parameters)();
// may be NULL, in which case software muting is used.
// also, will return a 1 if it is actually using the mute facility, 0 otherwise
} audio_output;
+// this looks for a suitable configuration in the right order and checks possible
+// configurations using the check_configuration method passed in.
+int32_t search_for_suitable_configuration(
+ unsigned int channels, unsigned int rate, unsigned int format,
+ int (*check_configuration)(unsigned int channels, unsigned int rate, unsigned int format));
+
audio_output *audio_get_output(const char *name);
void audio_ls_outputs(void);
-void parse_general_audio_options(void);
+void parse_audio_options(const char *named_stanza, uint32_t default_format_set,
+ uint32_t default_rate_set,
+ uint32_t default_channel_set); // look in "general" and in the named stanza
+uint32_t get_format_settings(const char *stanza_name, const char *setting_name);
+uint32_t get_rate_settings(const char *stanza_name, const char *setting_name);
+uint32_t get_channel_settings(const char *stanza_name, const char *setting_name);
#endif //_AUDIO_H
/*
* libalsa output driver. This file is part of Shairport.
* Copyright (c) Muffinman, Skaman 2013
- * Copyright (c) Mike Brady 2014 -- 2024
+ * Copyright (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
typedef struct {
snd_pcm_format_t alsa_code;
- int frame_size;
+ int sample_size;
} format_record;
+// This array is of all the formats known to Shairport Sync, in order of the SPS_FORMAT definitions,
+// with their equivalent alsa codes and their frame sizes.
+// If just one format is requested, then its entry is searched for in the array and checked on the
+// device
+// If auto format is requested, then each entry in turn is tried until a working format is found.
+// So, it should be in the search order.
+
+format_record fr[] = {
+ {SND_PCM_FORMAT_UNKNOWN, 0}, // unknown
+ {SND_PCM_FORMAT_S8, 1}, {SND_PCM_FORMAT_U8, 1}, {SND_PCM_FORMAT_S16_LE, 2},
+ {SND_PCM_FORMAT_S16_BE, 2}, {SND_PCM_FORMAT_S24_LE, 4}, {SND_PCM_FORMAT_S24_BE, 4},
+ {SND_PCM_FORMAT_S24_3LE, 3}, {SND_PCM_FORMAT_S24_3BE, 3}, {SND_PCM_FORMAT_S32_LE, 4},
+ {SND_PCM_FORMAT_S32_BE, 4}, {SND_PCM_FORMAT_UNKNOWN, 0}, // auto
+ {SND_PCM_FORMAT_UNKNOWN, 0}, // illegal
+};
+
int output_method_signalled = 0; // for reporting whether it's using mmap or not
int delay_type_notified = -1; // for controlling the reporting of whether the output device can do
// precision delays (e.g. alsa->pulsaudio virtual devices can't)
int use_monotonic_clock = 0; // this value will be set when the hardware is initialised
+static int32_t current_encoded_output_format; // ms 8 bits: channels; next 16 bits: rate/100;
+ // rightmost 8 bits: sps_format
+
+static char public_channel_map[128] = "";
+
+// static output_configuration_t alsa_configuration; // sample
+// static output_configuration_t *current_alsa_configuration;
+
+static volume_range_t volume_range = {0, 0};
+static output_parameters_t output_parameters = {NULL};
+
static void help(void);
static int init(int argc, char **argv);
static void deinit(void);
+static int prepare(void);
+static int32_t get_configuration(unsigned int channels, unsigned int rate, unsigned int sps_format);
+static int configure(int32_t requested_encoded_format, char **channel_map);
static void start(int i_sample_rate, int i_sample_format);
static int play(void *buf, int samples, __attribute__((unused)) int sample_type,
__attribute__((unused)) uint32_t timestamp,
static void volume(double vol);
static void do_volume(double vol);
-static int prepare(void);
static int do_play(void *buf, int samples);
-static void parameters(audio_parameters *info);
+static output_parameters_t *parameters();
static int mute(int do_mute); // returns true if it actually is allowed to use the mute
static double set_volume;
-audio_output audio_alsa = {
- .name = "alsa",
- .help = &help,
- .init = &init,
- .deinit = &deinit,
- .prepare = &prepare,
- .start = &start,
- .stop = &stop,
- .is_running = NULL,
- .flush = &flush,
- .delay = &delay,
- .play = &play,
- .stats = &stats, // will also include frames of silence sent to stop
- // standby mode
- // .rate_info = NULL,
- .mute = NULL, // a function will be provided if it can, and is allowed to,
- // do hardware mute
- .volume = NULL, // a function will be provided if it can do hardware volume
- .parameters = NULL}; // a function will be provided if it can do hardware volume
+audio_output audio_alsa = {.name = "alsa",
+ .help = &help,
+ .init = &init,
+ .deinit = &deinit,
+ .prepare = &prepare,
+ .get_configuration = &get_configuration,
+ .configure = &configure,
+ .start = &start,
+ .stop = &stop,
+ .is_running = NULL,
+ .flush = &flush,
+ .delay = &delay,
+ .play = &play,
+ .stats = &stats, // will also include frames of silence sent to stop
+ // standby mode
+ // .rate_info = NULL,
+ .mute = NULL, // a function will be provided if it can, and is allowed
+ // to, do hardware mute
+ .volume =
+ NULL, // a function will be provided if it can do hardware volume
+ .parameters = ¶meters};
+static int do_open();
+static int do_close();
pthread_mutex_t alsa_mutex = PTHREAD_MUTEX_INITIALIZER;
pthread_mutex_t alsa_mixer_mutex = PTHREAD_MUTEX_INITIALIZER;
int mute_requested_externally = 0;
int mute_requested_internally = 0;
-// for tracking how long the output device has stalled
-uint64_t stall_monitor_start_time; // zero if not initialised / not started /
- // zeroed by flush
-long stall_monitor_frame_count; // set to delay at start of time, incremented by
- // any writes
-uint64_t stall_monitor_error_threshold; // if the time is longer than this, it's
- // an error
+// for tracking if the output device has stalled
+uint64_t stall_monitor_new_frame_count_time; // when the delay was last measured
+long stall_monitor_new_frame_count; // the delay when measured last plus all subsequently added
+ // frames
+uint64_t stall_monitor_error_threshold; // if no frames have been output in a time longer than this,
+ // it's an error
snd_output_t *output = NULL;
-int frame_size; // in bytes for interleaved stereo
int alsa_device_initialised; // boolean to ensure the initialisation is only
// done once
snd_pcm_t *alsa_handle = NULL;
int alsa_handle_status =
- ENODEV; // if alsa_handle is NULL, this should say why with a unix error code
+ -ENODEV; // if alsa_handle is NULL, this should say why with a unix error code
snd_pcm_hw_params_t *alsa_params = NULL;
snd_pcm_sw_params_t *alsa_swparams = NULL;
snd_ctl_t *ctl = NULL;
int volume_based_mute_is_active =
0; // set when muting is being done by a setting the volume to a magic value
+sps_format_t disable_standby_mode_default_format;
+int disable_standby_mode_default_rate;
+int disable_standby_mode_default_channels;
+
// use this to allow the use of snd_pcm_writei or snd_pcm_mmap_writei
snd_pcm_sframes_t (*alsa_pcm_write)(snd_pcm_t *, const void *, snd_pcm_uframes_t) = snd_pcm_writei;
}
}
+static char *device_types[] = {
+ "SND_PCM_TYPE_HW", "SND_PCM_TYPE_HOOKS", "SND_PCM_TYPE_MULTI",
+ "SND_PCM_TYPE_FILE", "SND_PCM_TYPE_NULL", "SND_PCM_TYPE_SHM",
+ "SND_PCM_TYPE_INET", "SND_PCM_TYPE_COPY", "SND_PCM_TYPE_LINEAR",
+ "SND_PCM_TYPE_ALAW", "SND_PCM_TYPE_MULAW", "SND_PCM_TYPE_ADPCM",
+ "SND_PCM_TYPE_RATE", "SND_PCM_TYPE_ROUTE", "SND_PCM_TYPE_PLUG",
+ "SND_PCM_TYPE_SHARE", "SND_PCM_TYPE_METER", "SND_PCM_TYPE_MIX",
+ "SND_PCM_TYPE_DROUTE", "SND_PCM_TYPE_LBSERVER", "SND_PCM_TYPE_LINEAR_FLOAT",
+ "SND_PCM_TYPE_LADSPA", "SND_PCM_TYPE_DMIX", "SND_PCM_TYPE_JACK",
+ "SND_PCM_TYPE_DSNOOP", "SND_PCM_TYPE_DSHARE", "SND_PCM_TYPE_IEC958",
+ "SND_PCM_TYPE_SOFTVOL", "SND_PCM_TYPE_IOPLUG", "SND_PCM_TYPE_EXTPLUG",
+ "SND_PCM_TYPE_MMAP_EMUL"};
+
+static int permissible_configuration_check_done = 0;
+
+static uint16_t permissible_configurations[SPS_RATE_HIGHEST + 1][SPS_FORMAT_HIGHEST_NATIVE + 1]
+ [8 + 1];
+
+static int get_permissible_configuration_settings() {
+ int ret = 0;
+ if (permissible_configuration_check_done == 0) {
+ uint64_t hto = get_absolute_time_in_ns();
+ snd_pcm_hw_params_t *local_alsa_params = NULL;
+ snd_pcm_hw_params_alloca(&local_alsa_params);
+ snd_pcm_info_t *local_alsa_info;
+ snd_pcm_info_alloca(&local_alsa_info);
+ pthread_cleanup_debug_mutex_lock(&alsa_mutex, 50000, 0);
+ snd_pcm_t *temporary_alsa_handle = NULL;
+ ret = snd_pcm_open(&temporary_alsa_handle, alsa_out_dev, SND_PCM_STREAM_PLAYBACK, 0);
+ if (ret == 0) {
+ snd_pcm_type_t device_type = snd_pcm_type(temporary_alsa_handle);
+ ret = snd_pcm_info(temporary_alsa_handle, local_alsa_info);
+ if (ret == 0) {
+ int card_number = snd_pcm_info_get_card(local_alsa_info);
+
+ if (card_number >= 0) {
+ debug(2, "output device is card %d.", card_number);
+ char device_name[64] = "";
+ snprintf(device_name, sizeof(device_name) - 1, "hw:%d", card_number);
+ snd_ctl_t *handle;
+ int err = snd_ctl_open(&handle, device_name, 0);
+
+ if (err == 0) {
+ snd_ctl_card_info_t *info;
+ snd_ctl_card_info_alloca(&info);
+ err = snd_ctl_card_info(handle, info);
+ if (err == 0) {
+ debug(2, "card name: \"%s\", long name: \"%s\".", snd_ctl_card_info_get_name(info),
+ snd_ctl_card_info_get_longname(info));
+ }
+ snd_ctl_close(handle);
+ }
+ }
+
+ debug(
+ 2, "device: \"%s\", name: \"%s\", type: \"%s\", id: \"%s\", CARD=%d,DEV=%u,SUBDEV=%u.",
+ alsa_out_dev, snd_pcm_info_get_name(local_alsa_info), device_types[device_type],
+ snd_pcm_info_get_id(local_alsa_info), snd_pcm_info_get_card(local_alsa_info),
+ snd_pcm_info_get_device(local_alsa_info), snd_pcm_info_get_subdevice(local_alsa_info));
+
+ // check what numbers of channels the device can provide...
+ unsigned int c;
+ // The Raspberry Pi built-in audio jack advertises 8-channel ability,
+ // but it is not actually capable of doing it.
+ // It can handle one- and two-channel stuff.
+ if (strcmp("bcm2835 Headphones", snd_pcm_info_get_name(local_alsa_info)) == 0) {
+ debug(1, "the output is to the Raspberry Pi built-in jack -- no more than two channels "
+ "will be used.");
+ for (c = 3; c <= 8; c++)
+ config.channel_set &=
+ ~(1 << c); // the pi built-in jack DAC can't accommodate this number of channels
+ }
+ for (c = 1; c <= 8; c++) {
+ // if it's in the channel set -- either due to a setting in the configuration file or by
+ // default, check it...
+ if ((config.channel_set & (1 << c)) != 0) {
+ snd_pcm_hw_free(temporary_alsa_handle); // remove any previous configurations
+ ret = snd_pcm_hw_params_any(temporary_alsa_handle, local_alsa_params);
+ if (ret < 0)
+ debug(1, "Broken configuration for \"%s\": no configurations available: %s\n",
+ alsa_out_dev, snd_strerror(ret));
+ ret = snd_pcm_hw_params_set_rate_resample(temporary_alsa_handle, local_alsa_params, 0);
+ ret = snd_pcm_hw_params_test_channels(temporary_alsa_handle, local_alsa_params, c);
+ if (ret == 0) {
+ debug(3, "\"%s\" can handle %u channels.", alsa_out_dev, c);
+ } else {
+ // the device can't handle this number of channels
+ debug(3, "\"%s\" can not handle %u channels.", alsa_out_dev, c);
+ config.channel_set &=
+ ~(1 << c); // the alsa device can't accommodate this number of channels
+ }
+ }
+ }
+
+ // check what speeds the device can handle
+ sps_rate_t r;
+ for (r = SPS_RATE_LOWEST; r <= SPS_RATE_HIGHEST; r++) {
+ // if it's in the rate set -- either due to a setting in the configuration file or by
+ // default, check it...
+ if ((config.rate_set & (1 << r)) != 0) {
+ snd_pcm_hw_free(temporary_alsa_handle); // remove any previous configurations
+ ret = snd_pcm_hw_params_any(temporary_alsa_handle, local_alsa_params);
+ if (ret < 0)
+ debug(1, "Broken configuration for \"%s\": no configurations available: %s\n",
+ alsa_out_dev, snd_strerror(ret));
+ ret = snd_pcm_hw_params_set_rate_resample(temporary_alsa_handle, local_alsa_params, 0);
+ ret = snd_pcm_hw_params_test_rate(temporary_alsa_handle, local_alsa_params,
+ sps_rate_actual_rate(r),
+ 0); // 0 means exact rate only
+ if (ret == 0) {
+ debug(3, "\"%s\" can handle a rate of %u fps.", alsa_out_dev,
+ sps_rate_actual_rate(r));
+ } else {
+ debug(3, "\"%s\" can not handle a rate of %u fps.", alsa_out_dev,
+ sps_rate_actual_rate(r));
+ config.rate_set &= ~(1 << r); // the alsa device doesn't do this rate
+ }
+ }
+ }
+
+ // check what formats the device can handle
+ sps_format_t f;
+ for (f = SPS_FORMAT_LOWEST; f <= SPS_FORMAT_HIGHEST_NATIVE; f++) {
+ // if it's in the format set -- either due to a setting in the configuration file or by
+ // default, check it...
+ if ((config.format_set & (1 << f)) != 0) {
+ snd_pcm_hw_free(temporary_alsa_handle); // remove any previous configurations
+ ret = snd_pcm_hw_params_any(temporary_alsa_handle, local_alsa_params);
+ if (ret < 0)
+ debug(1, "Broken configuration for \"%s\": no configurations available: %s\n",
+ alsa_out_dev, snd_strerror(ret));
+ ret = snd_pcm_hw_params_set_rate_resample(temporary_alsa_handle, local_alsa_params, 0);
+ ret = snd_pcm_hw_params_test_format(temporary_alsa_handle, local_alsa_params,
+ fr[f].alsa_code);
+ if (ret == 0) {
+ debug(3, "\"%s\" can handle the %s format.", alsa_out_dev,
+ sps_format_description_string(f));
+ } else {
+ debug(3, "\"%s\" can not handle the %s format.", alsa_out_dev,
+ sps_format_description_string(f));
+ config.format_set &= ~(1 << f); // the alsa device doesn't do this format
+ }
+ }
+ }
+
+ // now we have the channels, rates and formats, but we need to check each combination
+ // set the permissible_configurations array (r/f/c) to EINVAL
+ for (r = SPS_RATE_LOWEST; r <= SPS_RATE_HIGHEST; r++)
+ for (f = SPS_FORMAT_LOWEST; f <= SPS_FORMAT_HIGHEST_NATIVE; f++)
+ for (c = 0; c <= 8; c++) {
+ permissible_configurations[r][f][c] = EINVAL;
+ }
+ // now check each combination of permitted rate/format/channel and see if it's really
+ // allowed
+ for (r = SPS_RATE_LOWEST; r <= SPS_RATE_HIGHEST; r++) {
+ if ((config.rate_set & (1 << r)) != 0) {
+ for (f = SPS_FORMAT_LOWEST; f <= SPS_FORMAT_HIGHEST_NATIVE; f++) {
+ if ((config.format_set & (1 << f)) != 0) {
+ for (c = 0; c <= 8; c++) {
+ if ((config.channel_set & (1 << c)) != 0) {
+ // debug(1, "check %u/%s/%u.", sps_rate_actual_rate(r),
+ // sps_format_description_string(f), c); note: only do the check if it's not a
+ // plug-in, i.e. not of type SND_PCM_TYPE_PLUG, or a If it is a plugin, this
+ // check may take way too long and will likely be unnecessary anyway. Similarly
+ // for a NULL or an IOPLUG device.
+
+ if ((device_type == SND_PCM_TYPE_PLUG) || (device_type == SND_PCM_TYPE_NULL) ||
+ (device_type == SND_PCM_TYPE_IOPLUG)) {
+ permissible_configurations[r][f][c] = 0;
+ } else {
+ snd_pcm_hw_free(temporary_alsa_handle); // remove any previous configurations
+ ret = snd_pcm_hw_params_any(temporary_alsa_handle, local_alsa_params);
+ if (ret == 0) {
+ ret = snd_pcm_hw_params_set_rate_resample(temporary_alsa_handle,
+ local_alsa_params, 0);
+ ret = snd_pcm_hw_params_test_channels(temporary_alsa_handle,
+ local_alsa_params, c);
+ if (ret == 0) {
+ ret = snd_pcm_hw_params_test_rate(
+ temporary_alsa_handle, local_alsa_params, sps_rate_actual_rate(r),
+ 0); // 0 means exact rate only
+ if (ret == 0) {
+ ret = snd_pcm_hw_params_test_format(temporary_alsa_handle,
+ local_alsa_params, fr[f].alsa_code);
+ if (ret == 0) {
+ // debug(1, "passed: \"%s\", format: %s, rate: %u channels: %u.",
+ // alsa_out_dev, sps_format_description_string(f),
+ // sps_rate_actual_rate(r), c);
+ permissible_configurations[r][f][c] =
+ 0; // i.e. no error, so remove the EINVAL
+ } else {
+ debug(1, "Can't set format %s for \"%s\": %s.",
+ sps_format_description_string(f), alsa_out_dev,
+ snd_strerror(ret));
+ }
+ } else {
+ debug(1, "Can't set rate of %u for \"%s\": %s.",
+ sps_rate_actual_rate(r), alsa_out_dev, snd_strerror(ret));
+ }
+ } else {
+ debug(1, "Can't set channel count of %u for \"%s\": %s.", c, alsa_out_dev,
+ snd_strerror(ret));
+ }
+ } else {
+ debug(1, "Broken configuration for \"%s\": %s.", alsa_out_dev,
+ snd_strerror(ret));
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ // close the device
+ snd_pcm_close(temporary_alsa_handle);
+ permissible_configuration_check_done = 1;
+ ret = 0; // all good here, even if the last ret was an error
+ }
+ }
+ pthread_cleanup_pop(1); // unlock the mutex
+ if (ret != 0) {
+ char errorstring[1024];
+ strerror_r(-ret, (char *)errorstring, sizeof(errorstring));
+ debug(1, "get_permissible_configuration_settings: error %d (\"%s\").", ret, errorstring);
+ }
+ int64_t hot = get_absolute_time_in_ns() - hto;
+ if (hot > 200000000)
+ debug(1,
+ "get_permissible_configuration_settings: permissible configurations check took %f ms.",
+ 0.000001 * hot);
+ }
+ return ret;
+}
+
static int precision_delay_and_status(snd_pcm_state_t *state, snd_pcm_sframes_t *delay,
yndk_type *using_update_timestamps);
static int standard_delay_and_status(snd_pcm_state_t *state, snd_pcm_sframes_t *delay,
// was able to use the (non-zero) update timestamps
int frames_of_silence = 4410;
- size_t size_of_silence_buffer = frames_of_silence * frame_size;
+ size_t size_of_silence_buffer =
+ frames_of_silence *
+ fr[FORMAT_FROM_ENCODED_FORMAT(current_encoded_output_format)].sample_size *
+ CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format);
void *silence = malloc(size_of_silence_buffer);
if (silence == NULL) {
debug(1, "alsa: precision_delay_available -- failed to "
"allocate memory for a "
"silent frame buffer.");
} else {
- pthread_cleanup_push(malloc_cleanup, silence);
+ pthread_cleanup_push(malloc_cleanup, &silence);
int use_dither = 0;
if ((alsa_mix_ctrl == NULL) && (config.ignore_volume_control == 0) &&
(config.airplay_volume != 0.0))
use_dither = 1;
dither_random_number_store =
- generate_zero_frames(silence, frames_of_silence, config.output_format,
+ generate_zero_frames(silence, frames_of_silence,
use_dither, // i.e. with dither
- dither_random_number_store);
+ dither_random_number_store, current_encoded_output_format);
do_play(silence, frames_of_silence);
pthread_cleanup_pop(1);
// now we can get the delay, and we'll note if it uses update timestamps
int frames_sent_break_occurred;
// if a device name ends in ",DEV=0", drop it. Then if it also begins with "CARD=", drop that too.
-static void simplify_and_printf_mutable_device_name(char *device_name) {
+static void simplify_and_printf_device_name(char *device_name) {
if (strstr(device_name, ",DEV=0") == device_name + strlen(device_name) - strlen(",DEV=0")) {
char *shortened_device_name = str_replace(device_name, ",DEV=0", "");
char *simplified_device_name = str_replace(shortened_device_name, "CARD=", "");
}
static void help(void) {
-
printf(" -d output-device set the output device, default is \"default\".\n"
" -c mixer-control set the mixer control name, default is to use no mixer.\n"
" -m mixer-device set the mixer device, default is the output device.\n"
}
}
if (hdmi_str != NULL) {
- simplify_and_printf_mutable_device_name(hdmi_str);
+ simplify_and_printf_device_name(hdmi_str);
} else if (hw_str != NULL) {
- simplify_and_printf_mutable_device_name(hw_str);
+ simplify_and_printf_device_name(hw_str);
}
if (hdmi_str != NULL)
free(hdmi_str);
if (hw_alsa_out_dev != NULL)
free(hw_alsa_out_dev);
hw_alsa_out_dev = str_replace(alsa_out_dev, "hdmi:", "hw:");
-} // ugh -- not static!
+}
// assuming pthread cancellation is disabled
// returns zero of all is okay, a Unx error code if there's a problem
return response;
}
-// This array is a sequence of the output rates to be tried if automatic speed selection is
-// requested.
-// There is no benefit to upconverting the frame rate, other than for compatibility.
-// The lowest rate that the DAC is capable of is chosen.
-
-unsigned int auto_speed_output_rates[] = {
- 44100,
- 88200,
- 176400,
- 352800,
-};
-
-// This array is of all the formats known to Shairport Sync, in order of the SPS_FORMAT definitions,
-// with their equivalent alsa codes and their frame sizes.
-// If just one format is requested, then its entry is searched for in the array and checked on the
-// device
-// If auto format is requested, then each entry in turn is tried until a working format is found.
-// So, it should be in the search order.
-
-format_record fr[] = {
- {SND_PCM_FORMAT_UNKNOWN, 0}, // unknown
- {SND_PCM_FORMAT_S8, 2}, {SND_PCM_FORMAT_U8, 2}, {SND_PCM_FORMAT_S16, 4},
- {SND_PCM_FORMAT_S16_LE, 4}, {SND_PCM_FORMAT_S16_BE, 4}, {SND_PCM_FORMAT_S24, 8},
- {SND_PCM_FORMAT_S24_LE, 8}, {SND_PCM_FORMAT_S24_BE, 8}, {SND_PCM_FORMAT_S24_3LE, 6},
- {SND_PCM_FORMAT_S24_3BE, 6}, {SND_PCM_FORMAT_S32, 8}, {SND_PCM_FORMAT_S32_LE, 8},
- {SND_PCM_FORMAT_S32_BE, 8}, {SND_PCM_FORMAT_UNKNOWN, 0}, // auto
- {SND_PCM_FORMAT_UNKNOWN, 0}, // illegal
-};
-
-// This array is the sequence of formats to be tried if automatic selection of the format is
-// requested.
-// Ideally, audio should pass through Shairport Sync unaltered, apart from occasional interpolation.
-// If the user chooses a hardware mixer, then audio could go straight through, unaltered, as signed
-// 16 bit stereo.
-// However, the user might, at any point, select an option that requires modification, such as
-// stereo to mono mixing,
-// additional volume attenuation, convolution, and so on. For this reason,
-// we look for the greatest depth the DAC is capable of, since upconverting it is completely
-// lossless.
-// If audio processing is required, then the dither that must be added will
-// be added at the lowest possible level.
-// Hence, selecting the greatest bit depth is always either beneficial or neutral.
-
-sps_format_t auto_format_check_sequence[] = {
- SPS_FORMAT_S32, SPS_FORMAT_S32_LE, SPS_FORMAT_S32_BE, SPS_FORMAT_S24, SPS_FORMAT_S24_LE,
- SPS_FORMAT_S24_BE, SPS_FORMAT_S24_3LE, SPS_FORMAT_S24_3BE, SPS_FORMAT_S16, SPS_FORMAT_S16_LE,
- SPS_FORMAT_S16_BE, SPS_FORMAT_S8, SPS_FORMAT_U8,
-};
-
// assuming pthread cancellation is disabled
-// if do_auto_setting is true and auto format or auto speed has been requested,
-// select the settings as appropriate and store them
-static int actual_open_alsa_device(int do_auto_setup) {
+static int actual_open_alsa_device() {
// the alsa mutex is already acquired when this is called
- const snd_pcm_uframes_t minimal_buffer_headroom =
- 352 * 2; // we accept this much headroom in the hardware buffer, but we'll
- // accept less
- /*
- const snd_pcm_uframes_t requested_buffer_headroom =
- minimal_buffer_headroom + 2048; // we ask for this much headroom in the
- // hardware buffer, but we'll accept
- less
- */
-
- int ret, dir = 0;
- unsigned int
- actual_sample_rate; // this will be given the rate requested and will be given the actual rate
- // snd_pcm_uframes_t frames = 441 * 10;
- snd_pcm_uframes_t actual_buffer_length;
- snd_pcm_access_t access;
-
- // ensure no calls are made to the alsa device enquiring about the buffer
- // length if
- // synchronisation is disabled.
- if (config.no_sync != 0)
- audio_alsa.delay = NULL;
-
- // ensure no calls are made to the alsa device enquiring about the buffer
- // length if
- // synchronisation is disabled.
- if (config.no_sync != 0)
- audio_alsa.delay = NULL;
-
- ret = snd_pcm_open(&alsa_handle, alsa_out_dev, SND_PCM_STREAM_PLAYBACK, 0);
- // EHOSTDOWN seems to signify that it's a PipeWire pseudo device that can't be accessed by this
- // user. So, try the first device ALSA device and log it.
- if ((ret == -EHOSTDOWN) && (strcmp(alsa_out_dev, "default") == 0)) {
- ret = snd_pcm_open(&alsa_handle, "hw:0", SND_PCM_STREAM_PLAYBACK, 0);
- if ((ret == 0) || (ret == -EBUSY)) {
- // being busy should be okay
- inform("the default ALSA device is inaccessible -- \"hw:0\" used instead.", alsa_out_dev);
- set_alsa_out_dev("hw:0");
+ int result = 0;
+ if (current_encoded_output_format != 0) {
+
+ unsigned int rate = RATE_FROM_ENCODED_FORMAT(current_encoded_output_format);
+ sps_format_t format = (sps_format_t)FORMAT_FROM_ENCODED_FORMAT(current_encoded_output_format);
+ unsigned int channels = CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format);
+
+ int ret, dir = 0;
+ // unsigned int actual_sample_rate; // this will be given the rate requested and will be given
+ // the actual rate snd_pcm_uframes_t frames = 441 * 10;
+ snd_pcm_uframes_t actual_buffer_length_in_frames;
+ snd_pcm_access_t access;
+
+ ret = snd_pcm_open(&alsa_handle, alsa_out_dev, SND_PCM_STREAM_PLAYBACK, 0);
+ // EHOSTDOWN seems to signify that it's a PipeWire pseudo device that can't be accessed by this
+ // user. So, try the first device ALSA device and log it.
+ if ((ret == -EHOSTDOWN) && (strcmp(alsa_out_dev, "default") == 0)) {
+ ret = snd_pcm_open(&alsa_handle, "hw:0", SND_PCM_STREAM_PLAYBACK, 0);
+ if ((ret == 0) || (ret == -EBUSY)) {
+ // being busy should be okay
+ inform("the default ALSA device is inaccessible -- \"hw:0\" used instead.");
+ set_alsa_out_dev("hw:0");
+ }
}
- }
- if (ret == 0) {
- if (alsa_handle_status == -EBUSY)
- warn("The output device \"%s\" is no longer busy and will be used by Shairport Sync.",
- alsa_out_dev);
- alsa_handle_status = ret; // all cool
- } else {
- alsa_handle = NULL; // to be sure to be sure
- if (ret == -EBUSY) {
- if (alsa_handle_status != -EBUSY)
- warn("The output device \"%s\" is busy and can't be used by Shairport Sync at present.",
+ if (ret == 0) {
+ if (alsa_handle_status == -EBUSY)
+ warn("The output device \"%s\" is no longer busy and will be used by Shairport Sync.",
alsa_out_dev);
- debug(2, "the alsa output_device \"%s\" is busy.", alsa_out_dev);
+ alsa_handle_status = ret; // all cool
+ } else {
+ alsa_handle = NULL; // to be sure to be sure
+ if (ret == -EBUSY) {
+ if (alsa_handle_status != -EBUSY)
+ warn("The output device \"%s\" is busy and can't be used by Shairport Sync at present.",
+ alsa_out_dev);
+ debug(2, "the alsa output_device \"%s\" is busy.", alsa_out_dev);
+ }
+ alsa_handle_status = ret;
+ frames_sent_break_occurred = 1;
+ return ret;
}
- alsa_handle_status = ret;
- frames_sent_break_occurred = 1;
- return ret;
- }
- snd_pcm_hw_params_alloca(&alsa_params);
- snd_pcm_sw_params_alloca(&alsa_swparams);
+ snd_pcm_hw_params_alloca(&alsa_params);
+ snd_pcm_sw_params_alloca(&alsa_swparams);
- ret = snd_pcm_hw_params_any(alsa_handle, alsa_params);
- if (ret < 0) {
- die("audio_alsa: Broken configuration for device \"%s\": no configurations "
- "available",
- alsa_out_dev);
- return ret;
- }
-
- if ((config.no_mmap == 0) &&
- (snd_pcm_hw_params_set_access(alsa_handle, alsa_params, SND_PCM_ACCESS_MMAP_INTERLEAVED) >=
- 0)) {
- if (output_method_signalled == 0) {
- debug(3, "Output written using MMAP");
- output_method_signalled = 1;
- }
- access = SND_PCM_ACCESS_MMAP_INTERLEAVED;
- alsa_pcm_write = snd_pcm_mmap_writei;
- } else {
- if (output_method_signalled == 0) {
- debug(3, "Output written with RW");
- output_method_signalled = 1;
+ ret = snd_pcm_hw_params_any(alsa_handle, alsa_params);
+ if (ret < 0) {
+ die("audio_alsa: Broken configuration for device \"%s\": no configurations "
+ "available",
+ alsa_out_dev);
+ return ret;
}
- access = SND_PCM_ACCESS_RW_INTERLEAVED;
- alsa_pcm_write = snd_pcm_writei;
- }
-
- ret = snd_pcm_hw_params_set_access(alsa_handle, alsa_params, access);
- if (ret < 0) {
- die("audio_alsa: Access type not available for device \"%s\": %s", alsa_out_dev,
- snd_strerror(ret));
- return ret;
- }
-
- ret = snd_pcm_hw_params_set_channels(alsa_handle, alsa_params, 2);
- if (ret < 0) {
- die("audio_alsa: Channels count (2) not available for device \"%s\": %s", alsa_out_dev,
- snd_strerror(ret));
- return ret;
- }
-
- snd_pcm_format_t sf;
- if ((do_auto_setup == 0) || (config.output_format_auto_requested == 0)) { // no auto format
- if ((config.output_format > SPS_FORMAT_UNKNOWN) && (config.output_format < SPS_FORMAT_AUTO)) {
- sf = fr[config.output_format].alsa_code;
- frame_size = fr[config.output_format].frame_size;
+ if ((config.no_mmap == 0) &&
+ (snd_pcm_hw_params_set_access(alsa_handle, alsa_params, SND_PCM_ACCESS_MMAP_INTERLEAVED) >=
+ 0)) {
+ if (output_method_signalled == 0) {
+ debug(3, "Output written using MMAP");
+ output_method_signalled = 1;
+ }
+ access = SND_PCM_ACCESS_MMAP_INTERLEAVED;
+ alsa_pcm_write = snd_pcm_mmap_writei;
} else {
- warn("alsa: unexpected output format %d. Set to S16_LE.", config.output_format);
- config.output_format = SPS_FORMAT_S16_LE;
- sf = fr[config.output_format].alsa_code;
- frame_size = fr[config.output_format].frame_size;
+ if (output_method_signalled == 0) {
+ debug(3, "Output written with RW");
+ output_method_signalled = 1;
+ }
+ access = SND_PCM_ACCESS_RW_INTERLEAVED;
+ alsa_pcm_write = snd_pcm_writei;
}
- ret = snd_pcm_hw_params_set_format(alsa_handle, alsa_params, sf);
+
+ ret = snd_pcm_hw_params_set_access(alsa_handle, alsa_params, access);
if (ret < 0) {
- die("audio_alsa: Alsa sample format %d not available for device \"%s\": %s", sf, alsa_out_dev,
- snd_strerror(ret));
+ die("alsa: access type not available for device \"%s\": %s", alsa_out_dev, snd_strerror(ret));
return ret;
}
- } else { // auto format
- int number_of_formats_to_try;
- sps_format_t *formats;
- formats = auto_format_check_sequence;
- number_of_formats_to_try = sizeof(auto_format_check_sequence) / sizeof(sps_format_t);
- int i = 0;
- int format_found = 0;
- sps_format_t trial_format = SPS_FORMAT_UNKNOWN;
- while ((i < number_of_formats_to_try) && (format_found == 0)) {
- trial_format = formats[i];
- sf = fr[trial_format].alsa_code;
- frame_size = fr[trial_format].frame_size;
- ret = snd_pcm_hw_params_set_format(alsa_handle, alsa_params, sf);
- if (ret == 0)
- format_found = 1;
- else
- i++;
- }
- if (ret == 0) {
- config.output_format = trial_format;
- debug(2, "alsa: output format chosen is \"%s\".",
- sps_format_description_string(config.output_format));
- } else {
- die("audio_alsa: Could not automatically set the output format for device \"%s\": %s",
- alsa_out_dev, snd_strerror(ret));
+
+ ret = snd_pcm_hw_params_set_channels(alsa_handle, alsa_params, channels);
+ if (ret < 0) {
+ die("alsa: %u channels not available for device \"%s\": %s", channels, alsa_out_dev,
+ snd_strerror(ret));
return ret;
}
- }
- if ((do_auto_setup == 0) || (config.output_rate_auto_requested == 0)) { // no auto format
- actual_sample_rate =
- config.output_rate; // this is the requested rate -- it'll be changed to the actual rate
- ret = snd_pcm_hw_params_set_rate_near(alsa_handle, alsa_params, &actual_sample_rate, &dir);
+ snd_pcm_format_t sf = fr[format].alsa_code;
+ ret = snd_pcm_hw_params_set_format(alsa_handle, alsa_params, sf);
if (ret < 0) {
- die("audio_alsa: The frame rate of %i frames per second is not available for playback: %s",
- config.output_rate, snd_strerror(ret));
+ die("alsa: sample format %s not available for device \"%s\": %s",
+ sps_format_description_string(format), alsa_out_dev, snd_strerror(ret));
return ret;
}
- } else {
- int number_of_speeds_to_try;
- unsigned int *speeds;
- speeds = auto_speed_output_rates;
- number_of_speeds_to_try = sizeof(auto_speed_output_rates) / sizeof(int);
+ // on some devices, it seems that setting the rate directly doesn't work.
+ dir = 0;
+ unsigned int actual_sample_rate = rate;
+ ret = snd_pcm_hw_params_set_rate_near(alsa_handle, alsa_params, &actual_sample_rate, &dir);
- int i = 0;
- int speed_found = 0;
+ if ((ret < 0) || (actual_sample_rate != rate)) {
+ die("alsa: The frame rate of %i frames per second is not available for playback: %s", rate,
+ snd_strerror(ret));
+ return ret;
+ }
- while ((i < number_of_speeds_to_try) && (speed_found == 0)) {
- actual_sample_rate = speeds[i];
- ret = snd_pcm_hw_params_set_rate_near(alsa_handle, alsa_params, &actual_sample_rate, &dir);
- if (ret == 0) {
- speed_found = 1;
- if (actual_sample_rate != speeds[i])
- die("The output DAC can not be set to %d frames per second (fps). The nearest speed "
- "available is %d fps.",
- speeds[i], actual_sample_rate);
+ if (set_period_size_request != 0) {
+ debug(1, "Attempting to set the period size to %lu", period_size_requested);
+ ret = snd_pcm_hw_params_set_period_size_near(alsa_handle, alsa_params, &period_size_requested,
+ &dir);
+ if (ret < 0) {
+ warn("alsa: cannot set period size of %lu: %s", period_size_requested, snd_strerror(ret));
+ return ret;
} else {
- i++;
+ snd_pcm_uframes_t actual_period_size;
+ snd_pcm_hw_params_get_period_size(alsa_params, &actual_period_size, &dir);
+ if (actual_period_size != period_size_requested)
+ inform("Actual period size set to a different value than requested. "
+ "Requested: %lu, actual "
+ "setting: %lu",
+ period_size_requested, actual_period_size);
}
}
- if (ret == 0) {
- config.output_rate = actual_sample_rate;
- debug(2, "alsa: output speed chosen is %d.", config.output_rate);
- } else {
- die("audio_alsa: Could not automatically set the output rate for device \"%s\": %s",
- alsa_out_dev, snd_strerror(ret));
- return ret;
+
+ if (set_buffer_size_request != 0) {
+ debug(1, "Attempting to set the buffer size to %lu", buffer_size_requested);
+ ret =
+ snd_pcm_hw_params_set_buffer_size_near(alsa_handle, alsa_params, &buffer_size_requested);
+ if (ret < 0) {
+ warn("alsa: cannot set buffer size of %lu: %s", buffer_size_requested, snd_strerror(ret));
+ return ret;
+ } else {
+ snd_pcm_uframes_t actual_buffer_size;
+ snd_pcm_hw_params_get_buffer_size(alsa_params, &actual_buffer_size);
+ if (actual_buffer_size != buffer_size_requested)
+ inform("Actual period size set to a different value than requested. "
+ "Requested: %lu, actual "
+ "setting: %lu",
+ buffer_size_requested, actual_buffer_size);
+ }
}
- }
- if (set_period_size_request != 0) {
- debug(1, "Attempting to set the period size to %lu", period_size_requested);
- ret = snd_pcm_hw_params_set_period_size_near(alsa_handle, alsa_params, &period_size_requested,
- &dir);
+ ret = snd_pcm_hw_params(alsa_handle, alsa_params);
if (ret < 0) {
- warn("audio_alsa: cannot set period size of %lu: %s", period_size_requested,
- snd_strerror(ret));
+ die("alsa: Unable to set hw parameters for device \"%s\": %s.", alsa_out_dev,
+ snd_strerror(ret));
return ret;
- } else {
+ }
+
+ // check parameters after attempting to set them
+
+ if (set_period_size_request != 0) {
snd_pcm_uframes_t actual_period_size;
snd_pcm_hw_params_get_period_size(alsa_params, &actual_period_size, &dir);
if (actual_period_size != period_size_requested)
"setting: %lu",
period_size_requested, actual_period_size);
}
- }
- if (set_buffer_size_request != 0) {
- debug(1, "Attempting to set the buffer size to %lu", buffer_size_requested);
- ret = snd_pcm_hw_params_set_buffer_size_near(alsa_handle, alsa_params, &buffer_size_requested);
- if (ret < 0) {
- warn("audio_alsa: cannot set buffer size of %lu: %s", buffer_size_requested,
- snd_strerror(ret));
- return ret;
- } else {
+ if (set_buffer_size_request != 0) {
snd_pcm_uframes_t actual_buffer_size;
snd_pcm_hw_params_get_buffer_size(alsa_params, &actual_buffer_size);
if (actual_buffer_size != buffer_size_requested)
"setting: %lu",
buffer_size_requested, actual_buffer_size);
}
- }
- ret = snd_pcm_hw_params(alsa_handle, alsa_params);
- if (ret < 0) {
- die("audio_alsa: Unable to set hw parameters for device \"%s\": %s.", alsa_out_dev,
- snd_strerror(ret));
- return ret;
- }
+ use_monotonic_clock = snd_pcm_hw_params_is_monotonic(alsa_params);
- // check parameters after attempting to set them
+ ret = snd_pcm_hw_params_get_buffer_size(alsa_params, &actual_buffer_length_in_frames);
+ if (ret < 0) {
+ warn("alsa: unable to get hw buffer length for device \"%s\": %s.", alsa_out_dev,
+ snd_strerror(ret));
+ return ret;
+ }
- if (set_period_size_request != 0) {
- snd_pcm_uframes_t actual_period_size;
- snd_pcm_hw_params_get_period_size(alsa_params, &actual_period_size, &dir);
- if (actual_period_size != period_size_requested)
- inform("Actual period size set to a different value than requested. "
- "Requested: %lu, actual "
- "setting: %lu",
- period_size_requested, actual_period_size);
- }
+ ret = snd_pcm_sw_params_current(alsa_handle, alsa_swparams);
+ if (ret < 0) {
+ warn("alsa: unable to get current sw parameters for device \"%s\": "
+ "%s.",
+ alsa_out_dev, snd_strerror(ret));
+ return ret;
+ }
- if (set_buffer_size_request != 0) {
- snd_pcm_uframes_t actual_buffer_size;
- snd_pcm_hw_params_get_buffer_size(alsa_params, &actual_buffer_size);
- if (actual_buffer_size != buffer_size_requested)
- inform("Actual period size set to a different value than requested. "
- "Requested: %lu, actual "
- "setting: %lu",
- buffer_size_requested, actual_buffer_size);
- }
+ ret = snd_pcm_sw_params_set_tstamp_mode(alsa_handle, alsa_swparams, SND_PCM_TSTAMP_ENABLE);
+ if (ret < 0) {
+ warn("alsa: can't enable timestamp mode of device: \"%s\": %s.", alsa_out_dev,
+ snd_strerror(ret));
+ return ret;
+ }
- if (actual_sample_rate != config.output_rate) {
- die("Can't set the output DAC to the requested frame rate of %d fps.", config.output_rate);
- return -EINVAL;
- }
+ /* write the sw parameters */
+ ret = snd_pcm_sw_params(alsa_handle, alsa_swparams);
+ if (ret < 0) {
+ warn("alsa: unable to set software parameters of device: \"%s\": %s.", alsa_out_dev,
+ snd_strerror(ret));
+ return ret;
+ }
- use_monotonic_clock = snd_pcm_hw_params_is_monotonic(alsa_params);
+ ret = snd_pcm_prepare(alsa_handle);
+ if (ret < 0) {
+ warn("alsa: unable to prepare the device: \"%s\": %s.", alsa_out_dev, snd_strerror(ret));
+ return ret;
+ }
- ret = snd_pcm_hw_params_get_buffer_size(alsa_params, &actual_buffer_length);
- if (ret < 0) {
- warn("audio_alsa: Unable to get hw buffer length for device \"%s\": %s.", alsa_out_dev,
- snd_strerror(ret));
- return ret;
- }
+ if (config.use_precision_timing == YNA_YES)
+ delay_and_status = precision_delay_and_status;
+ else if (config.use_precision_timing == YNA_AUTO) {
+ if (precision_delay_available()) {
+ delay_and_status = precision_delay_and_status;
+ debug(2, "alsa: precision timing selected for \"auto\" mode");
+ }
+ }
- ret = snd_pcm_sw_params_current(alsa_handle, alsa_swparams);
- if (ret < 0) {
- warn("audio_alsa: Unable to get current sw parameters for device \"%s\": "
- "%s.",
- alsa_out_dev, snd_strerror(ret));
- return ret;
- }
+ if (alsa_characteristics_already_listed == 0) {
+ alsa_characteristics_already_listed = 1;
+ int log_level = 2; // the level at which debug information should be output
+ // int rc;
+ snd_pcm_access_t access_type;
+ snd_pcm_format_t format_type;
+ snd_pcm_subformat_t subformat_type;
+ // unsigned int val, val2;
+ unsigned int uval, uval2;
+ int sval;
+ int direction;
+ snd_pcm_uframes_t frames;
- ret = snd_pcm_sw_params_set_tstamp_mode(alsa_handle, alsa_swparams, SND_PCM_TSTAMP_ENABLE);
- if (ret < 0) {
- warn("audio_alsa: Can't enable timestamp mode of device: \"%s\": %s.", alsa_out_dev,
- snd_strerror(ret));
- return ret;
- }
+ debug(log_level, "PCM handle name = '%s'", snd_pcm_name(alsa_handle));
- /* write the sw parameters */
- ret = snd_pcm_sw_params(alsa_handle, alsa_swparams);
- if (ret < 0) {
- warn("audio_alsa: Unable to set software parameters of device: \"%s\": %s.", alsa_out_dev,
- snd_strerror(ret));
- return ret;
- }
+ debug(log_level, "alsa device parameters:");
- ret = snd_pcm_prepare(alsa_handle);
- if (ret < 0) {
- warn("audio_alsa: Unable to prepare the device: \"%s\": %s.", alsa_out_dev, snd_strerror(ret));
- return ret;
- }
+ snd_pcm_hw_params_get_access(alsa_params, &access_type);
+ debug(log_level, " access type = %s", snd_pcm_access_name(access_type));
- if (actual_buffer_length < config.audio_backend_buffer_desired_length + minimal_buffer_headroom) {
- /*
- // the dac buffer is too small, so let's try to set it
- buffer_size =
- config.audio_backend_buffer_desired_length + requested_buffer_headroom;
- ret = snd_pcm_hw_params_set_buffer_size_near(alsa_handle, alsa_params,
- &buffer_size);
- if (ret < 0)
- die("audio_alsa: Unable to set hw buffer size to %lu for device \"%s\": "
- "%s.",
- config.audio_backend_buffer_desired_length +
- requested_buffer_headroom,
- alsa_out_dev, snd_strerror(ret));
- if (config.audio_backend_buffer_desired_length + minimal_buffer_headroom >
- buffer_size) {
- die("audio_alsa: Can't set hw buffer size to %lu or more for device "
- "\"%s\". Requested size: %lu, granted size: %lu.",
- config.audio_backend_buffer_desired_length + minimal_buffer_headroom,
- alsa_out_dev, config.audio_backend_buffer_desired_length +
- requested_buffer_headroom,
- buffer_size);
- }
- */
- debug(1,
- "The alsa buffer is smaller (%lu bytes) than the desired backend "
- "buffer "
- "length (%ld) you have chosen.",
- actual_buffer_length, config.audio_backend_buffer_desired_length);
- }
+ snd_pcm_hw_params_get_format(alsa_params, &format_type);
+ debug(log_level, " format = '%s' (%s)", snd_pcm_format_name(format_type),
+ snd_pcm_format_description(format_type));
- if (config.use_precision_timing == YNA_YES)
- delay_and_status = precision_delay_and_status;
- else if (config.use_precision_timing == YNA_AUTO) {
- if (precision_delay_available()) {
- delay_and_status = precision_delay_and_status;
- debug(2, "alsa: precision timing selected for \"auto\" mode");
- }
- }
+ snd_pcm_hw_params_get_subformat(alsa_params, &subformat_type);
+ debug(log_level, " subformat = '%s' (%s)", snd_pcm_subformat_name(subformat_type),
+ snd_pcm_subformat_description(subformat_type));
- if (alsa_characteristics_already_listed == 0) {
- alsa_characteristics_already_listed = 1;
- int log_level = 2; // the level at which debug information should be output
- // int rc;
- snd_pcm_access_t access_type;
- snd_pcm_format_t format_type;
- snd_pcm_subformat_t subformat_type;
- // unsigned int val, val2;
- unsigned int uval, uval2;
- int sval;
- int dir;
- snd_pcm_uframes_t frames;
-
- debug(log_level, "PCM handle name = '%s'", snd_pcm_name(alsa_handle));
-
- // ret = snd_pcm_hw_params_any(alsa_handle, alsa_params);
- // if (ret < 0) {
- // die("audio_alsa: Cannpot get configuration for
- // device
- //\"%s\":
- // no
- // configurations
- //"
- // "available",
- // alsa_out_dev);
- // }
-
- debug(log_level, "alsa device parameters:");
-
- snd_pcm_hw_params_get_access(alsa_params, &access_type);
- debug(log_level, " access type = %s", snd_pcm_access_name(access_type));
-
- snd_pcm_hw_params_get_format(alsa_params, &format_type);
- debug(log_level, " format = '%s' (%s)", snd_pcm_format_name(format_type),
- snd_pcm_format_description(format_type));
-
- snd_pcm_hw_params_get_subformat(alsa_params, &subformat_type);
- debug(log_level, " subformat = '%s' (%s)", snd_pcm_subformat_name(subformat_type),
- snd_pcm_subformat_description(subformat_type));
-
- snd_pcm_hw_params_get_channels(alsa_params, &uval);
- debug(log_level, " number of channels = %u", uval);
-
- sval = snd_pcm_hw_params_get_sbits(alsa_params);
- debug(log_level, " number of significant bits = %d", sval);
-
- snd_pcm_hw_params_get_rate(alsa_params, &uval, &dir);
- switch (dir) {
- case -1:
- debug(log_level, " rate = %u frames per second (<).", uval);
- break;
- case 0:
- debug(log_level, " rate = %u frames per second (precisely).", uval);
- break;
- case 1:
- debug(log_level, " rate = %u frames per second (>).", uval);
- break;
- }
+ snd_pcm_hw_params_get_channels(alsa_params, &uval);
+ debug(log_level, " number of channels = %u", uval);
- if ((snd_pcm_hw_params_get_rate_numden(alsa_params, &uval, &uval2) == 0) && (uval2 != 0))
- // watch for a divide by zero too!
- debug(log_level, " precise (rational) rate = %.3f frames per second (i.e. %u/%u).", uval,
- uval2, ((double)uval) / uval2);
- else
- debug(log_level, " precise (rational) rate information unavailable.");
+ sval = snd_pcm_hw_params_get_sbits(alsa_params);
+ debug(log_level, " number of significant bits = %d", sval);
- snd_pcm_hw_params_get_period_time(alsa_params, &uval, &dir);
- switch (dir) {
- case -1:
- debug(log_level, " period_time = %u us (<).", uval);
- break;
- case 0:
- debug(log_level, " period_time = %u us (precisely).", uval);
- break;
- case 1:
- debug(log_level, " period_time = %u us (>).", uval);
- break;
- }
+ snd_pcm_hw_params_get_rate(alsa_params, &uval, &direction);
+ switch (direction) {
+ case -1:
+ debug(log_level, " rate = %u frames per second (<).", uval);
+ break;
+ case 0:
+ debug(log_level, " rate = %u frames per second (precisely).", uval);
+ break;
+ case 1:
+ debug(log_level, " rate = %u frames per second (>).", uval);
+ break;
+ }
- snd_pcm_hw_params_get_period_size(alsa_params, &frames, &dir);
- switch (dir) {
- case -1:
- debug(log_level, " period_size = %lu frames (<).", frames);
- break;
- case 0:
- debug(log_level, " period_size = %lu frames (precisely).", frames);
- break;
- case 1:
- debug(log_level, " period_size = %lu frames (>).", frames);
- break;
- }
+ if ((snd_pcm_hw_params_get_rate_numden(alsa_params, &uval, &uval2) == 0) && (uval2 != 0))
+ // watch for a divide by zero too!
+ debug(log_level, " precise (rational) rate = %.3f frames per second (i.e. %u/%u).", (1.0 * uval) / uval2, uval,
+ uval2);
+ else
+ debug(log_level, " precise (rational) rate information unavailable.");
- snd_pcm_hw_params_get_buffer_time(alsa_params, &uval, &dir);
- switch (dir) {
- case -1:
- debug(log_level, " buffer_time = %u us (<).", uval);
- break;
- case 0:
- debug(log_level, " buffer_time = %u us (precisely).", uval);
- break;
- case 1:
- debug(log_level, " buffer_time = %u us (>).", uval);
- break;
- }
+ snd_pcm_hw_params_get_period_time(alsa_params, &uval, &direction);
+ switch (direction) {
+ case -1:
+ debug(log_level, " period_time = %u us (<).", uval);
+ break;
+ case 0:
+ debug(log_level, " period_time = %u us (precisely).", uval);
+ break;
+ case 1:
+ debug(log_level, " period_time = %u us (>).", uval);
+ break;
+ }
- snd_pcm_hw_params_get_buffer_size(alsa_params, &frames);
- switch (dir) {
- case -1:
- debug(log_level, " buffer_size = %lu frames (<).", frames);
- break;
- case 0:
- debug(log_level, " buffer_size = %lu frames (precisely).", frames);
- break;
- case 1:
- debug(log_level, " buffer_size = %lu frames (>).", frames);
- break;
- }
+ snd_pcm_hw_params_get_period_size(alsa_params, &frames, &direction);
+ switch (direction) {
+ case -1:
+ debug(log_level, " period_size = %lu frames (<).", frames);
+ break;
+ case 0:
+ debug(log_level, " period_size = %lu frames (precisely).", frames);
+ break;
+ case 1:
+ debug(log_level, " period_size = %lu frames (>).", frames);
+ break;
+ }
- snd_pcm_hw_params_get_periods(alsa_params, &uval, &dir);
- switch (dir) {
- case -1:
- debug(log_level, " periods_per_buffer = %u (<).", uval);
- break;
- case 0:
- debug(log_level, " periods_per_buffer = %u (precisely).", uval);
- break;
- case 1:
- debug(log_level, " periods_per_buffer = %u (>).", uval);
- break;
+ snd_pcm_hw_params_get_buffer_time(alsa_params, &uval, &direction);
+ switch (direction) {
+ case -1:
+ debug(log_level, " buffer_time = %u us (<).", uval);
+ break;
+ case 0:
+ debug(log_level, " buffer_time = %u us (precisely).", uval);
+ break;
+ case 1:
+ debug(log_level, " buffer_time = %u us (>).", uval);
+ break;
+ }
+
+ snd_pcm_hw_params_get_buffer_size(alsa_params, &frames);
+ switch (direction) {
+ case -1:
+ debug(log_level, " buffer_size = %lu frames (<).", frames);
+ break;
+ case 0:
+ debug(log_level, " buffer_size = %lu frames (precisely).", frames);
+ break;
+ case 1:
+ debug(log_level, " buffer_size = %lu frames (>).", frames);
+ break;
+ }
+
+ snd_pcm_hw_params_get_periods(alsa_params, &uval, &direction);
+ switch (direction) {
+ case -1:
+ debug(log_level, " periods_per_buffer = %u (<).", uval);
+ break;
+ case 0:
+ debug(log_level, " periods_per_buffer = %u (precisely).", uval);
+ break;
+ case 1:
+ debug(log_level, " periods_per_buffer = %u (>).", uval);
+ break;
+ }
}
+ stall_monitor_new_frame_count = 0;
+ stall_monitor_new_frame_count_time = 0;
+ } else {
+ debug(1, "no current_alsa_configuration");
+ result = -1;
}
- return 0;
+ return result;
}
-static int open_alsa_device(int do_auto_setup) {
+static int open_alsa_device() {
int result;
int oldState;
pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState); // make this un-cancellable
- result = actual_open_alsa_device(do_auto_setup);
+ result = actual_open_alsa_device();
+ if (result != 0)
+ debug(1, "Error in open_alsa_device: %d.", result);
pthread_setcancelstate(oldState, NULL);
return result;
}
// if there's a hardware mixer, it needs to be initialised before use
if (alsa_mix_ctrl == NULL) {
audio_alsa.volume = NULL;
- audio_alsa.parameters = NULL;
audio_alsa.mute = NULL;
+ output_parameters.volume_range = NULL; // until we know, we won't offer a volume range
} else {
debug(2, "alsa: hardware mixer prepare");
int oldState;
if (snd_mixer_selem_get_playback_dB_range(alsa_mix_elem, &alsa_mix_mindb,
&alsa_mix_maxdb) == 0) {
- audio_alsa.volume = &volume; // insert the volume function now we
- // know it can do dB stuff
- audio_alsa.parameters = ¶meters; // likewise the parameters stuff
if (alsa_mix_mindb == SND_CTL_TLV_DB_GAIN_MUTE) {
// For instance, the Raspberry Pi does this
debug(2, "Lowest dB value is a mute");
// going to be SND_CTL_TLV_DB_GAIN_MUTE, right?
// debug(1, "Try minimum volume + 1 as lowest true attenuation
// value");
- if (snd_mixer_selem_ask_playback_vol_dB(alsa_mix_elem, alsa_mix_minv + 1,
- &alsa_mix_mindb) != 0)
- debug(1, "Can't get dB value corresponding to a minimum volume "
- "+ 1.");
+
+ // now we need to find a lowest dB value that isn't a mute
+ // so we'll work from alsa_mix_minv upwards until we get a db value
+ // that is not SND_CTL_TLV_DB_GAIN_MUTE
+
+ long cv;
+ for (cv = alsa_mix_minv;
+ cv <= alsa_mix_maxv && (alsa_mix_mindb == SND_CTL_TLV_DB_GAIN_MUTE); cv++) {
+ if (snd_mixer_selem_ask_playback_vol_dB(alsa_mix_elem, cv, &alsa_mix_mindb) != 0)
+ debug(1, "Can't get dB value corresponding to a minimum volume "
+ "+ 1.");
+ }
}
debug(3, "Hardware mixer has dB volume from %f to %f.", (1.0 * alsa_mix_mindb) / 100.0,
(1.0 * alsa_mix_maxdb) / 100.0);
+
+ audio_alsa.volume = &volume; // insert the volume function now we
+ // know it can do dB stuff
+ volume_range.minimum_volume_dB = alsa_mix_mindb;
+ volume_range.maximum_volume_dB = alsa_mix_maxdb;
+ output_parameters.volume_range = &volume_range;
+
} else {
// use the linear scale and do the db conversion ourselves
warn("The hardware mixer specified -- \"%s\" -- does not have "
- "a dB volume scale, and so can not be used by Shairport Sync.",
+ "a dB volume scale.",
alsa_mix_ctrl);
- /*
+
if ((response = snd_ctl_open(&ctl, alsa_mix_dev, 0)) < 0) {
warn("Cannot open control \"%s\"", alsa_mix_dev);
}
"from %f to %f.",
alsa_mix_ctrl, (1.0 * alsa_mix_mindb) / 100.0, (1.0 * alsa_mix_maxdb) / 100.0);
has_softvol = 1;
- audio_alsa.volume = &volume; // insert the volume function now
- // we know it can do dB stuff
- audio_alsa.parameters = ¶meters; // likewise the parameters stuff
+ audio_alsa.volume = &volume; // insert the volume function now we
+ // know it can do dB stuff
+ volume_range.minimum_volume_dB = alsa_mix_mindb;
+ volume_range.maximum_volume_dB = alsa_mix_maxdb;
+ output_parameters.volume_range = &volume_range;
} else {
- debug(1, "Cannot get a dB range from the volume control \"%s\"", alsa_mix_ctrl);
+ debug(1, "Cannot get the dB range from the volume control \"%s\"", alsa_mix_ctrl);
}
}
- */
}
}
if (((config.alsa_use_hardware_mute == 1) &&
static int alsa_device_init() { return prepare_mixer(); }
+static void
+snd_error_quiet(__attribute__((unused)) const char *file, __attribute__((unused)) int line,
+ __attribute__((unused)) const char *func, __attribute__((unused)) int err,
+ __attribute__((unused)) const char *fmt, __attribute__((unused)) va_list arg) {
+ // return NULL;
+}
+
static int init(int argc, char **argv) {
+ snd_lib_error_set_handler((snd_lib_error_handler_t)snd_error_quiet);
+ current_encoded_output_format = 0;
// for debugging
snd_output_stdio_attach(&output, stdout, 0);
-
// debug(2,"audio_alsa init called.");
- int response = 0; // this will be what we return to the caller.
+ // int response = 0; // this will be what we return to the caller.
alsa_device_initialised = 0;
const char *str;
int value;
// set up default values first
+ config.no_mmap = 1; // some devices don't implement this properly and crash with data is dropped
alsa_backend_state = abm_disconnected; // startup state
debug(2, "alsa: init() -- alsa_backend_state => abm_disconnected.");
set_period_size_request = 0;
config.audio_backend_latency_offset = 0;
config.audio_backend_buffer_desired_length = 0.200;
config.audio_backend_buffer_interpolation_threshold_in_seconds =
- 0.120; // below this, basic interpolation will be used to save time.
+ 0.060; // below this, basic interpolation will be used to save time.
config.alsa_maximum_stall_time = 0.200; // 200 milliseconds -- if it takes longer, it's a problem
config.disable_standby_mode_silence_threshold =
0.040; // start sending silent frames if the delay goes below this time
- config.disable_standby_mode_silence_scan_interval = 0.004; // check silence threshold this often
- stall_monitor_error_threshold =
- (uint64_t)1000000 * config.alsa_maximum_stall_time; // stall time max to microseconds;
- stall_monitor_error_threshold = (stall_monitor_error_threshold << 32) / 1000000; // now in fp form
- debug(1, "alsa: alsa_maximum_stall_time of %f sec.", config.alsa_maximum_stall_time);
-
- stall_monitor_start_time = 0;
- stall_monitor_frame_count = 0;
+ // on slower single-core machines, it doesn't make sense to make this much less than about 40 ms,
+ // as, whatever the setting, the scheduler may let it sleep for much longer -- up to 80
+ // milliseconds.
+ config.disable_standby_mode_silence_scan_interval = 0.030; // check silence threshold this often
config.disable_standby_mode = disable_standby_off;
config.keep_dac_busy = 0;
- config.use_precision_timing = YNA_AUTO;
+ config.use_precision_timing = YNA_NO;
+
+ disable_standby_mode_default_format = SPS_FORMAT_S32_LE;
+#ifdef CONFIG_AIRPLAY_2
+ disable_standby_mode_default_rate = 48000;
+#else
+ disable_standby_mode_default_rate = 44100;
+#endif
+ disable_standby_mode_default_channels = 2;
// get settings from settings file first, allow them to be overridden by
// command line options
- // do the "general" audio options. Note, these options are in the "general"
- // stanza!
- parse_general_audio_options();
+ // get settings from settings file, passing in defaults for format_set, rate_set and channel_set
+ // Note, these options may be in the "general" stanza or the named stanza
+#ifdef CONFIG_FFMPEG
+ parse_audio_options("alsa", SPS_FORMAT_SET, SPS_RATE_SET, SPS_CHANNEL_SET);
+#else
+ parse_audio_options("alsa", SPS_FORMAT_NON_FFMPEG_SET, SPS_RATE_NON_FFMPEG_SET,
+ SPS_CHANNNEL_NON_FFMPEG_SET);
+#endif
if (config.cfg != NULL) {
double dvalue;
/* Get the Output Device Name. */
- if (config_lookup_string(config.cfg, "alsa.output_device", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "alsa.output_device", &str)) {
alsa_out_dev = (char *)str;
}
/* Get the Mixer Type setting. */
- if (config_lookup_string(config.cfg, "alsa.mixer_type", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "alsa.mixer_type", &str)) {
inform("The alsa mixer_type setting is deprecated and has been ignored. "
"FYI, using the \"mixer_control_name\" setting automatically "
"chooses a hardware mixer.");
}
/* Get the Mixer Device Name. */
- if (config_lookup_string(config.cfg, "alsa.mixer_device", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "alsa.mixer_device", &str)) {
alsa_mix_dev = (char *)str;
}
/* Get the Mixer Control Name. */
- if (config_lookup_string(config.cfg, "alsa.mixer_control_name", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "alsa.mixer_control_name", &str)) {
alsa_mix_ctrl = (char *)str;
}
else {
warn("Invalid disable_synchronization option choice \"%s\". It should "
"be \"yes\" or "
- "\"no\". It is set to \"no\".");
+ "\"no\". It is set to \"no\".", str);
config.no_sync = 0;
}
}
else {
warn("Invalid mute_using_playback_switch option choice \"%s\". It "
"should be \"yes\" or "
- "\"no\". It is set to \"no\".");
+ "\"no\". It is set to \"no\".", str);
config.alsa_use_hardware_mute = 0;
}
}
else {
warn("Invalid use_hardware_mute_if_available option choice \"%s\". It "
"should be \"yes\" or "
- "\"no\". It is set to \"no\".");
+ "\"no\". It is set to \"no\".", str);
config.alsa_use_hardware_mute = 0;
}
}
- /* Get the output format, using the same names as aplay does*/
- if (config_lookup_string(config.cfg, "alsa.output_format", &str)) {
- int temp_output_format_auto_requested = config.output_format_auto_requested;
- config.output_format_auto_requested = 0; // assume a valid format will be given.
- if (strcasecmp(str, "S16") == 0)
- config.output_format = SPS_FORMAT_S16;
- else if (strcasecmp(str, "S16_LE") == 0)
- config.output_format = SPS_FORMAT_S16_LE;
- else if (strcasecmp(str, "S16_BE") == 0)
- config.output_format = SPS_FORMAT_S16_BE;
- else if (strcasecmp(str, "S24") == 0)
- config.output_format = SPS_FORMAT_S24;
- else if (strcasecmp(str, "S24_LE") == 0)
- config.output_format = SPS_FORMAT_S24_LE;
- else if (strcasecmp(str, "S24_BE") == 0)
- config.output_format = SPS_FORMAT_S24_BE;
- else if (strcasecmp(str, "S24_3LE") == 0)
- config.output_format = SPS_FORMAT_S24_3LE;
- else if (strcasecmp(str, "S24_3BE") == 0)
- config.output_format = SPS_FORMAT_S24_3BE;
- else if (strcasecmp(str, "S32") == 0)
- config.output_format = SPS_FORMAT_S32;
- else if (strcasecmp(str, "S32_LE") == 0)
- config.output_format = SPS_FORMAT_S32_LE;
- else if (strcasecmp(str, "S32_BE") == 0)
- config.output_format = SPS_FORMAT_S32_BE;
- else if (strcasecmp(str, "U8") == 0)
- config.output_format = SPS_FORMAT_U8;
- else if (strcasecmp(str, "S8") == 0)
- config.output_format = SPS_FORMAT_S8;
- else if (strcasecmp(str, "auto") == 0)
- config.output_format_auto_requested = 1;
- else {
- config.output_format_auto_requested =
- temp_output_format_auto_requested; // format was invalid; recall the original setting
- warn("Invalid output format \"%s\". It should be \"auto\", \"U8\", \"S8\", "
- "\"S16\", \"S24\", \"S24_LE\", \"S24_BE\", "
- "\"S24_3LE\", \"S24_3BE\" or "
- "\"S32\", \"S32_LE\", \"S32_BE\". It remains set to \"%s\".",
- str,
- config.output_format_auto_requested == 1
- ? "auto"
- : sps_format_description_string(config.output_format));
- }
- }
-
- if (config_lookup_string(config.cfg, "alsa.output_rate", &str)) {
- if (strcasecmp(str, "auto") == 0) {
- config.output_rate_auto_requested = 1;
- } else {
- if (config.output_rate_auto_requested == 1)
- warn("Invalid output rate \"%s\". It should be \"auto\", 44100, 88200, 176400 or 352800. "
- "It remains set to \"auto\". Note: numbers should not be placed in quotes.",
- str);
- else
- warn("Invalid output rate \"%s\". It should be \"auto\", 44100, 88200, 176400 or 352800. "
- "It remains set to %d. Note: numbers should not be placed in quotes.",
- str, config.output_rate);
- }
- }
-
- /* Get the output rate, which must be a multiple of 44,100*/
- if (config_lookup_int(config.cfg, "alsa.output_rate", &value)) {
- debug(1, "alsa output rate is %d frames per second", value);
- switch (value) {
- case 44100:
- case 88200:
- case 176400:
- case 352800:
- config.output_rate = value;
- config.output_rate_auto_requested = 0;
- break;
- default:
- if (config.output_rate_auto_requested == 1)
- warn("Invalid output rate \"%d\". It should be \"auto\", 44100, 88200, 176400 or 352800. "
- "It remains set to \"auto\".",
- value);
- else
- warn("Invalid output rate \"%d\".It should be \"auto\", 44100, 88200, 176400 or 352800. "
- "It remains set to %d.",
- value, config.output_rate);
- }
- }
-
/* Get the use_mmap_if_available setting. */
if (config_lookup_string(config.cfg, "alsa.use_mmap_if_available", &str)) {
if (strcasecmp(str, "no") == 0)
else {
warn("Invalid use_mmap_if_available option choice \"%s\". It should be "
"\"yes\" or \"no\". "
- "It remains set to \"yes\".");
+ "It remains set to \"yes\".", str);
config.no_mmap = 0;
}
}
}
}
+ /* Get the optional disable_standby_mode_default_rate. */
+ if (config_lookup_int(config.cfg, "alsa.disable_standby_mode_default_rate", &value)) {
+ if (value < 0) {
+ warn("Invalid alsa disable_standby_mode_default_rate setting %d. It "
+ "must be greater than 0. Default is %d. No setting is made.",
+ value, disable_standby_mode_default_rate);
+ } else {
+ disable_standby_mode_default_rate = value;
+ }
+ }
+
+ /* Get the optional disable_standby_mode_default_channels. */
+ if (config_lookup_int(config.cfg, "alsa.disable_standby_mode_default_channels", &value)) {
+ if (value < 0) {
+ warn("Invalid alsa disable_standby_mode_default_channels setting %d. It "
+ "must be greater than 0. Default is %d. No setting is made.",
+ value, disable_standby_mode_default_channels);
+ } else {
+ disable_standby_mode_default_channels = value;
+ }
+ }
+
if (config_lookup_string(config.cfg, "alsa.use_precision_timing", &str)) {
if ((strcasecmp(str, "no") == 0) || (strcasecmp(str, "off") == 0) ||
(strcasecmp(str, "never") == 0))
else {
warn("Invalid use_precision_timing option choice \"%s\". It should be "
"\"yes\", \"auto\" or \"no\". "
- "It remains set to \"%s\".",
+ "It remains set to \"%s\".", str,
config.use_precision_timing == YNA_NO ? "no"
: config.use_precision_timing == YNA_AUTO ? "auto"
: "yes");
}
+ inform("Note: the \"alsa\" setting \"use_precision_timing\" is deprecated and will be "
+ "removed in a future update.");
}
-
- debug(1, "alsa: disable_standby_mode is \"%s\".",
- config.disable_standby_mode == disable_standby_off ? "never"
- : config.disable_standby_mode == disable_standby_always ? "always"
- : "auto");
- debug(1, "alsa: disable_standby_mode_silence_threshold is %f seconds.",
- config.disable_standby_mode_silence_threshold);
- debug(1, "alsa: disable_standby_mode_silence_scan_interval is %f seconds.",
- config.disable_standby_mode_silence_scan_interval);
}
optind = 1; // optind=0 is equivalent to optind=1 plus special behaviour
warn("Invalid audio argument: \"%s\" -- ignored", argv[optind]);
}
- debug(1, "alsa: output device name is \"%s\".", alsa_out_dev);
-
-
+ debug(2, "alsa: output device name is \"%s\".", alsa_out_dev);
+
// now, we need a version of the alsa_out_dev that substitutes "hw:" for "hdmi" if it's
// there. It seems hw:1 would be a valid devcie name where hdmi:1 would not
-
+
if (alsa_out_dev != NULL)
hw_alsa_out_dev = str_replace(alsa_out_dev, "hdmi:", "hw:");
-
+
+ debug(2, "alsa: disable_standby_mode is \"%s\".",
+ config.disable_standby_mode == disable_standby_off ? "never"
+ : config.disable_standby_mode == disable_standby_always ? "always"
+ : "auto");
+ debug(2, "alsa: disable_standby_mode_silence_threshold is %f seconds.",
+ config.disable_standby_mode_silence_threshold);
+ debug(2, "alsa: disable_standby_mode_silence_scan_interval is %f seconds.",
+ config.disable_standby_mode_silence_scan_interval);
+
+ stall_monitor_error_threshold =
+ (uint64_t)(config.alsa_maximum_stall_time * 1000000000); // stall time max to nanoseconds;
+
// so, now, if the option to keep the DAC running has been selected, start a
// thread to monitor the
// length of the queue
// if the queue gets too short, stuff it with silence
-
- pthread_create(&alsa_buffer_monitor_thread, NULL, &alsa_buffer_monitor_thread_code, NULL);
+ return named_pthread_create_with_priority(&alsa_buffer_monitor_thread, 4,
+ &alsa_buffer_monitor_thread_code, NULL, "alsa_buf_mon");
+
+ return 0;
+}
+
+static int check_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ // we know that the format/rate/channel count are legitimate but the combination
+ // may not be permitted.
+ // now see if the individual formats, rates and channel count are permissible
+ sps_rate_t r = SPS_RATE_LOWEST;
+ int found = 0;
+ // we know the rate is there, we just have to find it.
+ while ((r <= SPS_RATE_HIGHEST) && (found == 0)) {
+ if ((sps_rate_actual_rate(r) == rate) && ((config.rate_set & (1 << r)) != 0))
+ found = 1;
+ else
+ r++;
+ }
+
+ int response = permissible_configurations[r][format][channels];
+ if (response != 0)
+ debug(3, "check %u/%s/%u returns %d.", rate, sps_format_description_string(format), channels,
+ response);
+ return response;
+}
+
+/*
+static int check_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ int response = response = check_settings(format, rate, channels);
+ debug(3, "alsa: configuration: %u/%s/%u %s.", rate, sps_format_description_string(format),
+ channels, response == 0 ? "is okay" : "can not be configured");
+ return response;
+}
+*/
+
+static char *get_channel_map_str() {
+ if (alsa_handle != NULL) {
+
+ snd_pcm_chmap_t *channel_map = snd_pcm_get_chmap(alsa_handle);
+ if (channel_map) {
+
+ int j, k;
+ // check for duplicates and replace subsequent duplicates with
+ // SND_CHMAP_UNKNOWN
+
+ for (j = 0; j < (int)channel_map->channels; j++) {
+ for (k = 0; k < j; k++) {
+ if ((channel_map->pos[k] != SND_CHMAP_UNKNOWN) &&
+ (channel_map->pos[k] == channel_map->pos[j])) {
+ debug(3,
+ "alsa: There is an error in the built-in %u channel map of the "
+ "output device:"
+ " channel %d has the same name as channel "
+ "%d (\"%s\"). Both names have been changed to \"UNKNOWN\".",
+ channel_map->channels, j, k, snd_pcm_chmap_name(channel_map->pos[k]));
+ channel_map->pos[j] = SND_CHMAP_UNKNOWN;
+ channel_map->pos[k] = SND_CHMAP_UNKNOWN;
+ }
+ }
+ }
+
+ unsigned int i;
+ for (i = 0; i < channel_map->channels; i++) {
+ debug(3, "channel %d is %d, name: \"%s\", long name: \"%s\".", i, channel_map->pos[i],
+ snd_pcm_chmap_name(channel_map->pos[i]),
+ snd_pcm_chmap_long_name(channel_map->pos[i]));
+ }
+ if (snd_pcm_chmap_print(channel_map, sizeof(public_channel_map), public_channel_map) < 0)
+ public_channel_map[0] = '\0'; // if there's any problem
+ debug(3,
+ "channel count: %d, channel name list: "
+ "\"%s\".",
+ channel_map->channels, public_channel_map);
+ free(channel_map);
+ } else {
+ debug(2, "no channel map -- if it's two-channel, assume standard stereo.");
+ if (CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format) == 2) {
+ // if it's a two-channel configuration, assume it's stereo and return
+ // a standard mapping
+ snprintf(public_channel_map, sizeof(public_channel_map) - 1, "FL FR");
+ }
+ }
+ }
+ return public_channel_map;
+}
+
+static int configure(int32_t requested_encoded_format, char **channel_map) {
+ int response = 0;
+ int oldState;
+ pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState); // make this un-cancellable
+ pthread_cleanup_debug_mutex_lock(&alsa_mutex, 200000, 0);
+ if (current_encoded_output_format != requested_encoded_format) {
+ if (current_encoded_output_format == 0)
+ debug(2, "alsa: setting output configuration to %s.",
+ short_format_description(requested_encoded_format));
+ else
+ // note -- can't use short_format_description twice in one call because it returns the same
+ // string buffer each time
+ debug(3, "alsa: changing output configuration to %s.",
+ short_format_description(requested_encoded_format));
+ do_close();
+ current_encoded_output_format = requested_encoded_format;
+ response = do_open();
+ }
+ if ((response == 0) && (channel_map != NULL)) {
+ *channel_map = get_channel_map_str();
+ }
+ debug_mutex_unlock(&alsa_mutex, 0);
+ pthread_cleanup_pop(0);
+ pthread_setcancelstate(oldState, NULL);
+ if (response != 0)
+ debug(1, "alsa: could not open the output device with configuration %s",
+ short_format_description(requested_encoded_format));
return response;
}
static void deinit(void) {
int oldState;
pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState); // make this un-cancellable
- // debug(2,"audio_alsa deinit called.");
- stop();
+ debug(2, "audio_alsa deinit called.");
+ pthread_cleanup_debug_mutex_lock(&alsa_mutex, 10000, 1);
+ if (alsa_handle != NULL) {
+ debug(3, "alsa: closing the output device.");
+ do_close();
+ } else {
+ debug(3, "alsa: output device already closed.");
+ }
+ pthread_cleanup_pop(1); // release the mutex
debug(2, "Cancel buffer monitor thread.");
pthread_cancel(alsa_buffer_monitor_thread);
- debug(3, "Join buffer monitor thread.");
+ debug(2, "Join buffer monitor thread.");
pthread_join(alsa_buffer_monitor_thread, NULL);
+ debug(2, "Joined buffer monitor thread.");
pthread_setcancelstate(oldState, NULL);
if (hw_alsa_out_dev != NULL)
free(hw_alsa_out_dev);
}
+static int prepare() { return get_permissible_configuration_settings(); }
+
static int set_mute_state() {
int response = 1; // some problem expected, e.g. no mixer or not allowed to use it or disconnected
int oldState;
return response;
}
+static output_parameters_t *parameters() { return &output_parameters; }
+
static void start(__attribute__((unused)) int i_sample_rate,
__attribute__((unused)) int i_sample_format) {
- debug(3, "audio_alsa start called.");
- // frame_index = 0;
- // measurement_data_is_valid = 0;
-
- stall_monitor_start_time = 0;
- stall_monitor_frame_count = 0;
if (alsa_device_initialised == 0) {
- debug(1, "alsa: start() calling alsa_device_init.");
+ debug(2, "alsa: start() calling alsa_device_init.");
alsa_device_init();
alsa_device_initialised = 1;
}
// delay_temp = 0;
ret = 0;
}
- } // else {
- // debug(1, "alsa_handle is NULL in standard_delay_and_status.");
- // }
-
- stall_monitor_start_time = 0; // zero if not initialised / not started / zeroed by flush
- stall_monitor_frame_count = 0; // set to delay at start of time, incremented by any writes
+ }
if (delay != NULL)
*delay = delay_temp;
if ((state_temp == SND_PCM_STATE_RUNNING) || (state_temp == SND_PCM_STATE_DRAINING)) {
- // uint64_t update_timestamp_ns =
- // update_timestamp.tv_sec * (uint64_t)1000000000 + update_timestamp.tv_nsec;
-
uint64_t update_timestamp_ns = update_timestamp.tv_sec;
update_timestamp_ns = update_timestamp_ns * 1000000000;
update_timestamp_ns = update_timestamp_ns + update_timestamp.tv_nsec;
time_now_ns = time_now_ns * 1000000000;
time_now_ns = time_now_ns + tn.tv_nsec;
+ // if the delay is not zero and if stall_monitor_new_frame_count_time is non-zero, then
+ // if the delay is longer than the stall threshold
+ // and the delay is the same as it was, a stall has occurred.
+
+ if ((stall_monitor_new_frame_count_time != 0) && (delay_temp != 0)) {
+
+ uint64_t time_since_last_measurement = time_now_ns - stall_monitor_new_frame_count_time;
+ if ((time_since_last_measurement > stall_monitor_error_threshold) &&
+ (stall_monitor_new_frame_count == delay_temp)) {
+ debug(1, "DAC has stalled for %f seconds with a frame count of %ld.",
+ time_since_last_measurement * 1E-9, delay_temp);
+ debug(1, "time_now_ns: %" PRIu64 ", stall_monitor_new_frame_count_time: %" PRIu64 ".",
+ time_now_ns, stall_monitor_new_frame_count_time);
+ }
+ }
+
+ // for the next time...
+ stall_monitor_new_frame_count = delay_temp;
+ stall_monitor_new_frame_count_time = time_now_ns;
+
+ /*
// see if it's stalled
if ((stall_monitor_start_time != 0) && (stall_monitor_frame_count == delay_temp)) {
if (((update_timestamp_ns - stall_monitor_start_time) >
stall_monitor_error_threshold) ||
((time_now_ns - stall_monitor_start_time) > stall_monitor_error_threshold)) {
- debug(2,
- "DAC seems to have stalled with time_now_ns: %" PRIX64
- ", update_timestamp_ns: %" PRIX64 ", stall_monitor_start_time %" PRIX64
- ", stall_monitor_error_threshold %" PRIX64 ".",
+ debug(1,
+ "DAC seems to have stalled with time_now_ns: %" PRIu64
+ ", update_timestamp_ns: %" PRIu64 ", stall_monitor_start_time %" PRIu64
+ ", stall_monitor_error_threshold %" PRIu64 ", delay_temp %u.",
time_now_ns, update_timestamp_ns, stall_monitor_start_time,
- stall_monitor_error_threshold);
- debug(2,
- "DAC seems to have stalled with time_now: %lx,%lx"
- ", update_timestamp: %lx,%lx, stall_monitor_start_time %" PRIX64
- ", stall_monitor_error_threshold %" PRIX64 ".",
- tn.tv_sec, tn.tv_nsec, update_timestamp.tv_sec, update_timestamp.tv_nsec,
- stall_monitor_start_time, stall_monitor_error_threshold);
- ret = sps_extra_code_output_stalled;
+ stall_monitor_error_threshold, delay_temp);
+ // ret = sps_extra_code_output_stalled;
}
- } else {
+ } // else {
stall_monitor_start_time = update_timestamp_ns;
stall_monitor_frame_count = delay_temp;
- }
+ // }
+ */
if (ret == 0) {
uint64_t delta = time_now_ns - update_timestamp_ns;
- // uint64_t frames_played_since_last_interrupt =
- // ((uint64_t)config.output_rate * delta) / 1000000000;
-
- uint64_t frames_played_since_last_interrupt = config.output_rate;
+ uint64_t frames_played_since_last_interrupt =
+ RATE_FROM_ENCODED_FORMAT(current_encoded_output_format);
frames_played_since_last_interrupt = frames_played_since_last_interrupt * delta;
frames_played_since_last_interrupt = frames_played_since_last_interrupt / 1000000000;
} else { // not running, thus no delay information, thus can't check for
// stall
delay_temp = 0;
- stall_monitor_start_time = 0; // zero if not initialised / not started / zeroed by flush
- stall_monitor_frame_count = 0; // set to delay at start of time, incremented by any writes
-
- // not running, thus no delay information, thus can't check for frame
- // rates
- // frame_index = 0; // we'll be starting over...
- // measurement_data_is_valid = 0;
+ stall_monitor_new_frame_count_time = 0;
}
} else {
- debug(1, "alsa: can't get device's status.");
+ debug(1, "alsa: can't get device's status -- error %d.", ret);
}
} else {
*delay = delay_temp;
if (state != NULL)
*state = state_temp;
+ debug(3, "precision_delay_and_status returning state: %d and delay %ld.", state_temp, delay_temp);
return ret;
}
pthread_setcancelstate(oldState, NULL);
uint64_t hd = my_delay; // note: snd_pcm_sframes_t is a long
*the_delay = hd;
+ // if (ret != 0)
+ // debug(1, "frames_sent_break_occurred? value is %d.", ret);
return ret;
}
(state != SND_PCM_STATE_XRUN)) {
debug(1, "alsa: DAC in odd SND_PCM_STATE_* %d prior to writing.", state);
}
+ if (state == SND_PCM_STATE_XRUN) {
+ debug(1, "alsa: DAC in SND_PCM_STATE_XRUN prior to writing.");
+ ret = snd_pcm_recover(alsa_handle, ret, 1);
+ }
snd_pcm_state_t prior_state = state; // keep this for afterwards....
- // debug(3, "alsa: write %d frames.", samples);
+ debug(3, "alsa: write %d frames.", samples);
ret = alsa_pcm_write(alsa_handle, buf, samples);
+ if (ret == -EIO) {
+ debug(1, "alsa: I/O Error.");
+ usleep(20000); // give it a breather...
+ }
if (ret > 0)
frames_sent_for_playing += ret; // this is the number of frames accepted
if (ret == samples) {
- stall_monitor_frame_count += samples;
+ stall_monitor_new_frame_count += samples;
} else {
frames_sent_break_occurred = 1; // note than an output error has occurred
if (ret == -EPIPE) { /* underrun */
return ret;
}
-static int do_open(int do_auto_setup) {
+static int do_open() {
int ret = 0;
if (alsa_backend_state != abm_disconnected)
- debug(1, "alsa: do_open() -- opening the output device when it is already "
+ debug(1, "alsa: do_open() -- asking to open the output device when it is already "
"connected");
if (alsa_handle == NULL) {
- // debug(1,"alsa: do_open() -- opening the output device");
- ret = open_alsa_device(do_auto_setup);
+ debug(3, "alsa: do_open() -- opening the output device");
+ ret = open_alsa_device();
if (ret == 0) {
mute_requested_internally = 0;
if (audio_alsa.volume)
frames_sent_break_occurred = 1; // there is a discontinuity with
// any previously-reported frame count
frames_sent_for_playing = 0;
+ debug(3, "alsa: do_open() -- alsa_backend_state => abm_connected");
alsa_backend_state = abm_connected; // only do this if it really opened it.
} else {
if ((ret == -ENOENT) || (ret == -ENODEV)) // if the device isn't there...
}
static int do_close() {
- debug(2, "alsa: do_close()");
if (alsa_backend_state == abm_disconnected)
- debug(1, "alsa: do_close() -- closing the output device when it is already "
- "disconnected");
+ debug(3, "alsa: do_close() -- output device is already disconnected");
int derr = 0;
if (alsa_handle) {
- // debug(1,"alsa: do_close() -- closing the output device");
+ debug(3, "alsa: do_close() -- closing the output device");
if ((derr = snd_pcm_drop(alsa_handle)))
debug(1, "Error %d (\"%s\") dropping output device.", derr, snd_strerror(derr));
- usleep(10000); // wait for the hardware to do its trick. BTW, this make the function pthread
+ usleep(20000); // wait for the hardware to do its trick. BTW, this make the function pthread
// cancellable
if ((derr = snd_pcm_hw_free(alsa_handle)))
debug(1, "Error %d (\"%s\") freeing the output device hardware.", derr, snd_strerror(derr));
- debug(2, "alsa: do_close() -- closing alsa handle");
+ debug(3, "alsa: do_close() -- closing alsa handle");
if ((derr = snd_pcm_close(alsa_handle)))
debug(1, "Error %d (\"%s\") closing the output device.", derr, snd_strerror(derr));
alsa_handle = NULL;
- alsa_handle_status = ENODEV; // no device open
+ alsa_handle_status = -ENODEV; // no device open
} else {
- debug(1, "alsa: do_close() -- output device already closed.");
+ debug(3, "alsa: do_close() -- output device is already closed.");
}
+ debug(3, "alsa: do_close() -- alsa_backend_state => abm_disconnected.");
alsa_backend_state = abm_disconnected;
return derr;
}
static int sub_flush() {
if (alsa_backend_state == abm_disconnected)
- debug(1, "alsa: do_flush() -- closing the output device when it is already "
+ debug(1, "alsa: do_flush() -- asking to flush the output device when it is already "
"disconnected");
int derr = 0;
if (alsa_handle) {
- debug(2, "alsa: do_flush() -- flushing the output device");
+ debug(3, "alsa: do_flush() -- flushing the output device");
frames_sent_break_occurred = 1;
if ((derr = snd_pcm_drop(alsa_handle)))
debug(1, "Error %d (\"%s\") dropping output device.", derr, snd_strerror(derr));
if ((derr = snd_pcm_prepare(alsa_handle)))
debug(1, "Error %d (\"%s\") preparing output device after flush.", derr, snd_strerror(derr));
+ stall_monitor_new_frame_count = 0;
+ stall_monitor_new_frame_count_time = 0;
+ if (alsa_backend_state != abm_connected)
+ debug(2, "alsa: sub_flush() -- alsa_backend_state => abm_connected.");
alsa_backend_state = abm_connected;
} else {
debug(1, "alsa: do_flush() -- output device already closed.");
// DAC must be
// connected
- // debug(3,"audio_alsa play called.");
int ret = 0;
pthread_cleanup_debug_mutex_lock(&alsa_mutex, 50000, 0);
if (alsa_backend_state == abm_disconnected) {
- ret = do_open(0); // don't try to auto setup
+ ret = do_open();
if (ret == 0)
debug(2, "alsa: play() -- opened output device");
}
return ret;
}
-static int prepare(void) {
- // this will leave the DAC open / connected.
- int ret = 0;
-
- pthread_cleanup_debug_mutex_lock(&alsa_mutex, 50000, 0);
-
- if (alsa_backend_state == abm_disconnected) {
- if (alsa_device_initialised == 0) {
- // debug(1, "alsa: prepare() calling alsa_device_init.");
- alsa_device_init();
- alsa_device_initialised = 1;
- }
- ret = do_open(1); // do auto setup
- if (ret == 0)
- debug(2, "alsa: prepare() -- opened output device");
- }
-
- debug_mutex_unlock(&alsa_mutex, 0);
- pthread_cleanup_pop(0); // release the mutex
- return ret;
-}
-
static void flush(void) {
- // debug(2,"audio_alsa flush called.");
pthread_cleanup_debug_mutex_lock(&alsa_mutex, 10000, 1);
if (alsa_backend_state != abm_disconnected) { // must be playing or connected...
// do nothing for a flush if config.keep_dac_busy is true
pthread_cleanup_pop(0); // release the mutex
}
-static void parameters(audio_parameters *info) {
- info->minimum_volume_dB = alsa_mix_mindb;
- info->maximum_volume_dB = alsa_mix_maxdb;
-}
-
static void do_volume(double vol) { // caller is assumed to have the alsa_mutex when
// using this function
debug(3, "Setting volume db to %f.", vol);
do_volume(vol);
}
-/*
-static void linear_volume(double vol) {
- debug(2, "Setting linear volume to %f.", vol);
- set_volume = vol;
- if ((alsa_mix_ctrl == NULL) && alsa_mix_handle) {
- double linear_volume = pow(10, vol);
- // debug(1,"Linear volume is %f.",linear_volume);
- long int_vol = alsa_mix_minv + (alsa_mix_maxv - alsa_mix_minv) *
-linear_volume;
- // debug(1,"Setting volume to %ld, for volume input of %f.",int_vol,vol);
- if (alsa_mix_handle) {
- if (snd_mixer_selem_set_playback_volume_all(alsa_mix_elem, int_vol) != 0)
- die("Failed to set playback volume");
-
- }
- }
-}
-*/
-
static int mute(int mute_state_requested) { // these would be for external reasons, not
// because of the
// state of the backend.
*/
static void *alsa_buffer_monitor_thread_code(__attribute__((unused)) void *arg) {
+ // #include <syscall.h>
+ // debug(1, "alsa_buffer_monitor_thread_code PID %d", syscall(SYS_gettid));
+ // Wait until the output configuration has been set by the main program
+ debug(2, "alsa: alsa_buffer_monitor_thread_code started.");
int frame_count = 0;
int error_count = 0;
int error_detected = 0;
+ int64_t sleep_time_actual_ns = 0; // actual sleep time since last check, or zero
int okb = -1;
+
+ while (config.keep_dac_busy == 0)
+ usleep(10000);
+
+ debug(1, "alsa: get initial disable standby parameters for rate/channels: %u/%u.",
+ disable_standby_mode_default_rate, disable_standby_mode_default_channels);
+ while (get_permissible_configuration_settings() != 0) {
+ debug(1, "wait 50 ms to check again for success");
+ usleep(50000);
+ }
+
+ current_encoded_output_format =
+ get_configuration(disable_standby_mode_default_channels, disable_standby_mode_default_rate,
+ disable_standby_mode_default_format);
+
+ debug(1, "alsa: disable standby initial parameters: %s.",
+ short_format_description(current_encoded_output_format));
+
// if too many play errors occur early on, we will turn off the disable standby mode
while (error_detected == 0) {
int keep_dac_busy_has_just_gone_off = 0;
if (okb != config.keep_dac_busy) {
- if ((okb != 0) && (config.keep_dac_busy == 0))
+ if ((okb != 0) && (config.keep_dac_busy == 0)) {
keep_dac_busy_has_just_gone_off = 1;
+ }
debug(2, "keep_dac_busy is now \"%s\"", config.keep_dac_busy == 0 ? "no" : "yes");
okb = config.keep_dac_busy;
}
if ((config.keep_dac_busy != 0) && (alsa_device_initialised == 0)) {
- debug(2, "alsa: alsa_buffer_monitor_thread_code() calling "
- "alsa_device_init.");
+ debug(2, "alsa: alsa_buffer_monitor_thread_code() preparing for use and initialising.");
alsa_device_init();
alsa_device_initialised = 1;
}
int sleep_time_us = (int)(config.disable_standby_mode_silence_scan_interval * 1000000);
+ if (sleep_time_actual_ns > ((8 * sleep_time_us * 1000) / 4))
+ debug(1,
+ "alsa_buffer_monitor_thread_code sleep was %.6f sec but request was for %.6f sec. "
+ "Disabling standby may not work properly!",
+ sleep_time_actual_ns * 0.000000001, config.disable_standby_mode_silence_scan_interval);
pthread_cleanup_debug_mutex_lock(&alsa_mutex, 200000, 0);
// check possible state transitions here
if ((alsa_backend_state == abm_disconnected) && (config.keep_dac_busy != 0)) {
// open the dac and move to abm_connected mode
- if (do_open(1) == 0) // no automatic setup of rate and speed if necessary
- debug(2, "alsa: alsa_buffer_monitor_thread_code() -- output device opened; "
- "alsa_backend_state => abm_connected");
+ if (do_open() == 0) {
+ debug(2,
+ "alsa: alsa_buffer_monitor_thread_code() -- output device opened; "
+ "alsa_backend_state from abm_disconnected => abm_connected. error_detected = %d",
+ error_detected);
+ } else {
+ debug(1, "alsa_buffer_monitor_thread_code: can't open output device -- terminating");
+ error_detected = 1;
+ }
} else if ((alsa_backend_state != abm_disconnected) && (keep_dac_busy_has_just_gone_off != 0)) {
- stall_monitor_start_time = 0;
- // frame_index = 0;
- // measurement_data_is_valid = 0;
debug(2, "alsa: alsa_buffer_monitor_thread_code() -- closing the output "
"device");
do_close();
- debug(2, "alsa: alsa_buffer_monitor_thread_code() -- alsa_backend_state "
- "=> abm_disconnected");
}
// now, if the backend is not in the abm_disconnected state
// and config.keep_dac_busy is true (at the present, this has to be the case
// to be in the
// abm_connected state in the first place...) then do the silence-filling
// thing, if needed /* only if the output device is capable of precision delay */.
- if ((alsa_backend_state != abm_disconnected) &&
- (config.keep_dac_busy != 0) /* && precision_delay_available() */) {
+ if ((alsa_backend_state != abm_disconnected) && (config.keep_dac_busy != 0) &&
+ (error_detected == 0) /* && precision_delay_available() */) {
int reply;
long buffer_size = 0;
snd_pcm_state_t state;
debug(1, "alsa: alsa_buffer_monitor_thread_code delay error %d: \"%s\".", reply,
(char *)errorstring);
}
- long buffer_size_threshold =
- (long)(config.disable_standby_mode_silence_threshold * config.output_rate);
+ uint64_t current_delay = 0;
+ if (buffer_size < 0) {
+ debug(1, "delay of less than 0: %ld.", buffer_size);
+ current_delay = 0;
+ } else {
+ current_delay = buffer_size;
+ }
+ if (current_delay < minimum_dac_queue_size) {
+ minimum_dac_queue_size = current_delay; // update for display later
+ }
+
+ long buffer_size_threshold = (long)(config.disable_standby_mode_silence_threshold *
+ RATE_FROM_ENCODED_FORMAT(current_encoded_output_format));
+ // debug(1, "current_delay: %" PRIu64 ", buffer_size: %ld, buffer_size_threshold %ld,
+ // frames_of_silence: %d.", current_delay, buffer_size, buffer_size_threshold,
+ // buffer_size_threshold - buffer_size + current_alsa_configuration->rate / 10);
+
size_t size_of_silence_buffer;
+ // debug(1, "buffer_size %d, buffer_size_threshold %d.", buffer_size,
+ // buffer_size_threshold);
if (buffer_size < buffer_size_threshold) {
- int frames_of_silence = 1024;
- size_of_silence_buffer = frames_of_silence * frame_size;
- void *silence = malloc(size_of_silence_buffer);
+ int frames_of_silence = buffer_size_threshold - buffer_size +
+ RATE_FROM_ENCODED_FORMAT(current_encoded_output_format) / 10;
+ size_of_silence_buffer =
+ frames_of_silence *
+ fr[FORMAT_FROM_ENCODED_FORMAT(current_encoded_output_format)].sample_size *
+ CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format);
+ void *silence = calloc(size_of_silence_buffer, 1);
if (silence == NULL) {
warn("disable_standby_mode has been turned off because a memory allocation error "
"occurred.");
error_detected = 1;
} else {
int ret;
- pthread_cleanup_push(malloc_cleanup, silence);
+ pthread_cleanup_push(malloc_cleanup, &silence);
int use_dither = 0;
- if ((alsa_mix_ctrl == NULL) && (config.ignore_volume_control == 0) &&
- (config.airplay_volume != 0.0))
+ if ((alsa_mix_ctrl == NULL) &&
+ (((config.ignore_volume_control == 0) && (config.airplay_volume != 0.0)) ||
+ (config.playback_mode == ST_mono)))
use_dither = 1;
+
dither_random_number_store =
- generate_zero_frames(silence, frames_of_silence, config.output_format,
+ generate_zero_frames(silence, frames_of_silence,
use_dither, // i.e. with dither
- dither_random_number_store);
+ dither_random_number_store, current_encoded_output_format);
+
ret = do_play(silence, frames_of_silence);
+ debug(3, "Played %u frames of silence on %u channels, equal to %zu bytes.",
+ frames_of_silence, CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format),
+ size_of_silence_buffer);
frame_count++;
pthread_cleanup_pop(1); // free malloced buffer
if (ret < 0) {
}
debug_mutex_unlock(&alsa_mutex, 0);
pthread_cleanup_pop(0); // release the mutex
- usleep(sleep_time_us); // has a cancellation point in it
+ uint64_t tsb = get_absolute_time_in_ns();
+ usleep(sleep_time_us); // has a cancellation point in it
+ sleep_time_actual_ns = get_absolute_time_in_ns() - tsb;
}
pthread_exit(NULL);
}
+
+static int32_t get_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ // pass in SPS_FORMAT_AUTO because we want the best (deepest) format.
+
+ // first, check that the device is there!
+ snd_pcm_t *temp_alsa_handle = NULL;
+ int response = snd_pcm_open(&temp_alsa_handle, alsa_out_dev, SND_PCM_STREAM_PLAYBACK, 0);
+ ;
+ if ((response == 0) && (temp_alsa_handle != NULL)) {
+ response = snd_pcm_close(temp_alsa_handle);
+ if (response != 0) {
+ char errorstring[1024];
+ strerror_r(-response, (char *)errorstring, sizeof(errorstring));
+ debug(1, "error %d closing probed alsa output device \"%s\".", -response, alsa_out_dev);
+ }
+ } else if (response == -EBUSY) {
+ response = 0; // busy is okay -- it means the device exists
+ } else {
+ char errorstring[1024];
+ strerror_r(-response, (char *)errorstring, sizeof(errorstring));
+ debug(3,
+ "the alsa output device called \"%s\" can not be accessed. Error %d (\"%s\"). Maybe it "
+ "doesn't exist or is not ready yet...",
+ alsa_out_dev, -response, errorstring);
+ }
+ // if we can access the device, then search for configurations
+ if (response == 0)
+ response = search_for_suitable_configuration(channels, rate, format, &check_configuration);
+ return response;
+}
/*
* libao output driver. This file is part of Shairport.
* Copyright (c) James Laird 2013
- * Copyright (c) Mike Brady 2014 -- 2022
+ * Copyright (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
#include <stdio.h>
#include <unistd.h>
+typedef struct {
+ sps_format_t sps_format;
+ int bits_per_sample;
+ int byte_format;
+} sps_ao_t;
+
+// these are the only formats that audio_pw will ever allow itself to be configured with
+static sps_ao_t format_lookup[] = {{SPS_FORMAT_S16_LE, 16, AO_FMT_LITTLE},
+ {SPS_FORMAT_S16_BE, 16, AO_FMT_BIG},
+ {SPS_FORMAT_S32_LE, 32, AO_FMT_LITTLE},
+ {SPS_FORMAT_S32_BE, 32, AO_FMT_BIG}};
+
+// use an SPS_FORMAT_... to find an entry in the format_lookup table or return NULL
+static sps_ao_t *sps_format_lookup(sps_format_t to_find) {
+ sps_ao_t *response = NULL;
+ unsigned int i = 0;
+ while ((response == NULL) && (i < sizeof(format_lookup) / sizeof(sps_ao_t))) {
+ if (format_lookup[i].sps_format == to_find)
+ response = &format_lookup[i];
+ else
+ i++;
+ }
+ return response;
+}
+
ao_device *dev = NULL;
ao_option *ao_opts = NULL;
-ao_sample_format fmt;
+ao_sample_format ao_output_format;
int driver = 0;
+static int32_t current_encoded_output_format = 0;
+
+static int check_settings(sps_format_t sample_format, unsigned int sample_rate,
+ unsigned int channel_count) {
+
+ // debug(1, "ao check_settings: configuration: %u/%s/%u.", sample_rate,
+ // sps_format_description_string(sample_format), channel_count);
+
+ int response = EINVAL;
+
+ sps_ao_t *format_info = sps_format_lookup(sample_format);
+ if (format_info != NULL) {
+
+ ao_sample_format check_fmt;
+ memset(&check_fmt, 0, sizeof(check_fmt));
+ check_fmt.bits = format_info->bits_per_sample;
+ check_fmt.rate = sample_rate;
+ check_fmt.channels = channel_count;
+ check_fmt.byte_format = format_info->byte_format;
+ // fmt.matrix = strdup("L,R");
+ ao_device *check_dev = ao_open_live(driver, &check_fmt, ao_opts);
+ if (check_dev != NULL) {
+ debug(3, "ao: check_settings %u/%s/%u works!", sample_rate,
+ sps_format_description_string(sample_format), channel_count);
+ if (ao_close(check_dev) == 0)
+ debug(1, "ao check_settings: error at ao_close");
+ response = 0;
+ } else {
+ debug(3, "ao: check_settings %u/%s/%u is not available!", sample_rate,
+ sps_format_description_string(sample_format), channel_count);
+ }
+
+ // response = 0;
+ }
+
+ // debug(1, "pa check_settings: configuration: %u/%s/%u %s.", sample_rate,
+ // sps_format_description_string(sample_format), channel_count,
+ // response == 0 ? "is okay" : "can not be configured");
+ return response;
+}
+
+static int check_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ return check_settings(format, rate, channels);
+}
+
+static int32_t get_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ uint64_t start_time = get_absolute_time_in_ns();
+ int32_t response =
+ search_for_suitable_configuration(channels, rate, format, &check_configuration);
+ int64_t elapsed_time = get_absolute_time_in_ns() - start_time;
+ debug(1, "ao: get_configuration took %0.3f mS.", elapsed_time * 0.000001);
+ return response;
+}
+
+static int configure(int32_t requested_encoded_format, char **channel_map) {
+ debug(3, "a0: configure %s.", short_format_description(requested_encoded_format));
+ int response = EINVAL;
+ if (current_encoded_output_format != requested_encoded_format) {
+ uint64_t start_time = get_absolute_time_in_ns();
+ if (current_encoded_output_format == 0)
+ debug(1, "a0: setting output configuration to %s.",
+ short_format_description(requested_encoded_format));
+ else
+ // note -- can't use short_format_description twice in one call because it returns the same
+ // string buffer each time
+ debug(1, "a0: changing output configuration to %s.",
+ short_format_description(requested_encoded_format));
+ current_encoded_output_format = requested_encoded_format;
+ sps_ao_t *format_info =
+ sps_format_lookup(FORMAT_FROM_ENCODED_FORMAT(current_encoded_output_format));
+
+ if (format_info == NULL)
+ die("ao: can't find format information!");
+
+ if (dev != NULL) {
+ if (ao_close(dev) == 0) {
+ debug(1, "ao configure: error closing existing device ao_close");
+ }
+ }
+ dev = NULL;
+ memset(&ao_output_format, 0, sizeof(ao_output_format));
+ ao_output_format.bits = format_info->bits_per_sample;
+ ao_output_format.rate = RATE_FROM_ENCODED_FORMAT(current_encoded_output_format);
+ ao_output_format.channels = CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format);
+ ao_output_format.byte_format = format_info->byte_format;
+ switch (CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format)) {
+ case 2:
+ ao_output_format.matrix = strdup("L,R");
+ break;
+ case 6:
+ ao_output_format.matrix = strdup("L,R,C,LFE,BR,BL");
+ break;
+ case 8:
+ ao_output_format.matrix = strdup("L,R,C,LFE,BR,BL,SL,SR");
+ break;
+ default:
+ break;
+ }
+ // fmt.matrix = strdup("L,R");
+ /*
+ dev = ao_open_live(driver, &ao_output_format, ao_opts);
+ if (dev != NULL) {
+ debug(
+ 1, "ao: configure %u/%s/%u opened!",
+ RATE_FROM_ENCODED_FORMAT(current_encoded_output_format),
+ sps_format_description_string(FORMAT_FROM_ENCODED_FORMAT(current_encoded_output_format)),
+ CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format));
+ response = 0;
+ } else {
+ debug(
+ 1, "ao: configure %u/%s/%u is not available!",
+ RATE_FROM_ENCODED_FORMAT(current_encoded_output_format),
+ sps_format_description_string(FORMAT_FROM_ENCODED_FORMAT(current_encoded_output_format)),
+ CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format));
+ }
+ */
+ int64_t elapsed_time = get_absolute_time_in_ns() - start_time;
+ debug(3, "pa: configure took %0.3f mS.", elapsed_time * 0.000001);
+ } else {
+ debug(3, "pa: setting output configuration -- configuration unchanged, so nothing done.");
+ }
+ if ((response == 0) && (channel_map != NULL)) {
+ *channel_map = NULL; // nothing back here
+ }
+ return response;
+}
static void help(void) {
printf(" -d driver set the output driver\n"
}
static int init(int argc, char **argv) {
- int oldState;
- pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState); // make this un-cancellable
ao_initialize();
driver = ao_default_driver_id();
if (driver == -1) {
- warn("libao can not find a usable driver!");
+ warn("the ao backend can not find a usable output device with the default driver!");
} else {
// set up default values first
// get settings from settings file first, allow them to be overridden by
// command line options
- // do the "general" audio options. Note, these options are in the "general" stanza!
- parse_general_audio_options();
+ // get settings from settings file, passing in defaults for format_set, rate_set and channel_set
+ // Note, these options may be in the "general" stanza or the named stanza
+#ifdef CONFIG_FFMPEG
+ parse_audio_options("ao", SPS_FORMAT_SET, SPS_RATE_SET, SPS_CHANNEL_SET);
+#else
+ parse_audio_options("ao", SPS_FORMAT_NON_FFMPEG_SET, SPS_RATE_NON_FFMPEG_SET,
+ SPS_CHANNNEL_NON_FFMPEG_SET);
+#endif
optind = 1; // optind=0 is equivalent to optind=1 plus special behaviour
argv--; // so we shift the arguments to satisfy getopt()
if (optind < argc)
die("Invalid audio argument: %s", argv[optind]);
- memset(&fmt, 0, sizeof(fmt));
-
- fmt.bits = 16;
- fmt.rate = 44100;
- fmt.channels = 2;
- fmt.byte_format = AO_FMT_NATIVE;
- fmt.matrix = strdup("L,R");
+ // get_permissible_configuration_settings();
}
- pthread_setcancelstate(oldState, NULL);
return 0;
}
static void deinit(void) {
- int oldState;
- pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState); // make this un-cancellable
if (dev != NULL)
ao_close(dev);
dev = NULL;
ao_shutdown();
- pthread_setcancelstate(oldState, NULL);
-}
-
-static void start(__attribute__((unused)) int sample_rate,
- __attribute__((unused)) int sample_format) {
- // debug(1,"libao start");
}
static int play(void *buf, int samples, __attribute__((unused)) int sample_type,
__attribute__((unused)) uint32_t timestamp,
__attribute__((unused)) uint64_t playtime) {
int response = 0;
+
+ if (dev == NULL) {
+ debug(1, "ao play(): ao_open_live to play %d samples, bytes per sample: %u, channels: %u",
+ samples, ao_output_format.bits / 8, ao_output_format.channels);
+ dev = ao_open_live(driver, &ao_output_format, ao_opts);
+ }
+ // debug(1,"ao play(): play %d samples, bytes per sample: %u, channels: %u", samples,
+ // ao_output_format.bits/8, ao_output_format.channels);
int oldState;
pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState); // make this un-cancellable
- if (driver != -1) {
- if (dev == NULL)
- dev = ao_open_live(driver, &fmt, ao_opts);
- if (dev != NULL)
- response = ao_play(dev, buf, samples * 4);
- }
+ response = ao_play(dev, buf, samples * ao_output_format.bits / 8 * ao_output_format.channels);
pthread_setcancelstate(oldState, NULL);
return response;
}
static void stop(void) {
- // debug(1,"libao stop");
- int oldState;
- pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState); // make this un-cancellable
if (dev != NULL) {
+ debug(1, "ao stop(): ao_close");
ao_close(dev);
dev = NULL;
}
- pthread_setcancelstate(oldState, NULL);
}
audio_output audio_ao = {.name = "ao",
.help = &help,
.init = &init,
.deinit = &deinit,
- .prepare = NULL,
- .start = &start,
+ .configure = &configure,
+ .get_configuration = &get_configuration,
+ // .start = &start,
.stop = &stop,
.is_running = NULL,
.flush = NULL,
* All rights reserved.
*
* Modifications for audio synchronisation
- * and related work, copyright (c) Mike Brady 2014 -- 2022
+ * and related work, copyright (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
.help = NULL,
.init = &init,
.deinit = &deinit,
- .prepare = NULL,
+ .configure = NULL,
.start = &start,
.stop = &stop,
.is_running = NULL,
/*
* jack output driver. This file is part of Shairport Sync.
- * Copyright (c) 2019 -- 2024 Mike Brady <4265913+mikebrady@users.noreply.github.com>,
- * Jörn Nettingsmeier <nettings@luchtbeweging.nl>
+ * Copyright (c) 2019--2025 Mike Brady <4265913+mikebrady@users.noreply.github.com>,
+ * Jörn Nettingsmeier <nettings@luchtbeweging.nl>
*
* All rights reserved.
*
jack_port_t *port[NPORTS];
const char *port_name[NPORTS] = {"out_L", "out_R"};
-
int sps_sample_rate;
jack_client_t *client;
// instead.
config.audio_backend_buffer_interpolation_threshold_in_seconds = 0.25;
- // Do the "general" audio options. Note, these options are in the "general" stanza!
- parse_general_audio_options();
+ // this ensures that the Shairport Sync system will provide only 44100/S16/2 to this backend.
+ parse_audio_options(NULL, (1 << SPS_FORMAT_S16_LE), (1 << SPS_RATE_44100), (1 << 2));
+
#ifdef CONFIG_SOXR
config.jack_soxr_resample_quality = -1; // don't resample by default
#endif
// Now the options specific to the backend, from the "jack" stanza:
if (config.cfg != NULL) {
const char *str;
- if (config_lookup_string(config.cfg, "jack.client_name", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "jack.client_name", &str)) {
config.jack_client_name = (char *)str;
}
- if (config_lookup_string(config.cfg, "jack.autoconnect_pattern", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "jack.autoconnect_pattern", &str)) {
config.jack_autoconnect_pattern = (char *)str;
}
#ifdef CONFIG_SOXR
- if (config_lookup_string(config.cfg, "jack.soxr_resample_quality", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "jack.soxr_resample_quality", &str)) {
debug(1, "SOXR quality %s", str);
config.jack_soxr_resample_quality = parse_soxr_quality_name(str);
}
soxr_delete(soxr);
}
soxr_error_t e = NULL;
- soxr = soxr_create(sps_sample_rate, jack_sample_rate, NPORTS, &e, &io_spec, &quality_spec, NULL);
+ soxr =
+ soxr_create(sps_sample_rate, jack_sample_rate, NPORTS, &e, &io_spec, &quality_spec, NULL);
if (!soxr) {
die("Unable to create soxr resampler for JACK: %s", e);
}
// jack_latency is set by the graph() callback, it's the average of the maximum
// latencies of all our output ports. Adjust this constant baseline delay according
// to the buffer fill level:
- int64_t the_delay_in_jack_frames = jack_latency + audio_occupancy_now - frames_processed_since_latest_latency_check;
+ int64_t the_delay_in_jack_frames =
+ jack_latency + audio_occupancy_now - frames_processed_since_latest_latency_check;
int64_t the_delay_in_sps_frames = (the_delay_in_jack_frames * sps_sample_rate) / jack_sample_rate;
*the_delay = the_delay_in_sps_frames;
- // debug(2, "reporting a delay of %ld frames at Shairport Sync's rate of %d FPS.",*the_delay, sps_sample_rate);
+ // debug(2, "reporting a delay of %ld frames at Shairport Sync's rate of %d FPS.",*the_delay,
+ // sps_sample_rate);
return 0;
}
.help = NULL,
.init = &jack_init,
.deinit = &jack_deinit,
- .prepare = NULL,
+ .configure = NULL,
.start = &jack_start,
.stop = NULL,
.is_running = NULL,
/*
- * Asynchronous PulseAudio Backend. This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2017-2023
+ * PulseAudio Backend. This file is part of Shairport Sync.
+ * Copyright (c) Mike Brady 2017--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
#include <string.h>
#include <unistd.h>
-// note -- these are hacked and hardwired into this code.
-#define FORMAT PA_SAMPLE_S16NE
-#define RATE 44100
+typedef struct {
+ pa_sample_format_t pa_format;
+ sps_format_t sps_format;
+ unsigned int bytes_per_sample;
+} pa_sps_t;
-// Four seconds buffer -- should be plenty
-#define buffer_allocation RATE * 4 * 2 * 2
+// these are the only formats that audio_pw will ever allow itself to be configured with
+static pa_sps_t format_lookup[] = {{PA_SAMPLE_S16LE, SPS_FORMAT_S16_LE, 2},
+ {PA_SAMPLE_S16BE, SPS_FORMAT_S16_BE, 2},
+ {PA_SAMPLE_S32LE, SPS_FORMAT_S32_LE, 4},
+ {PA_SAMPLE_S32BE, SPS_FORMAT_S32_BE, 4}};
+
+#define CHANNEL_MAP_SIZE 1024
+static char channel_map[CHANNEL_MAP_SIZE + 1];
static pthread_mutex_t buffer_mutex = PTHREAD_MUTEX_INITIALIZER;
pa_mainloop_api *mainloop_api;
pa_context *context;
pa_stream *stream;
+
+static int32_t current_encoded_output_format = 0;
+const char *default_channel_layouts = NULL;
static char *audio_lmb, *audio_umb, *audio_toq, *audio_eoq;
-static size_t audio_size = buffer_allocation;
-static size_t audio_occupancy;
+static size_t audio_size, audio_occupancy;
+
+// use an SPS_FORMAT_... to find an entry in the format_lookup table or return NULL
+static pa_sps_t *sps_format_lookup(sps_format_t to_find) {
+ pa_sps_t *response = NULL;
+ unsigned int i = 0;
+ while ((response == NULL) && (i < sizeof(format_lookup) / sizeof(pa_sps_t))) {
+ if (format_lookup[i].sps_format == to_find)
+ response = &format_lookup[i];
+ else
+ i++;
+ }
+ return response;
+}
void context_state_cb(pa_context *context, void *mainloop);
void stream_state_cb(pa_stream *s, void *mainloop);
}
}
-static void connect_stream() {
- // debug(1, "connect_stream");
- uint32_t buffer_size_in_bytes = (uint32_t)2 * 2 * RATE * 0.1; // hard wired in here
- // debug(1, "pa_buffer size is %u bytes.", buffer_size_in_bytes);
+static int check_settings(sps_format_t sample_format, unsigned int sample_rate,
+ unsigned int channel_count) {
- pa_threaded_mainloop_lock(mainloop);
- // Create a playback stream
- pa_sample_spec sample_specifications;
- sample_specifications.format = FORMAT;
- sample_specifications.rate = RATE;
- sample_specifications.channels = 2;
-
- pa_channel_map map;
- pa_channel_map_init_stereo(&map);
-
- stream = pa_stream_new(context, "Playback", &sample_specifications, &map);
- pa_stream_set_state_callback(stream, stream_state_cb, mainloop);
- pa_stream_set_write_callback(stream, stream_write_cb, mainloop);
- // pa_stream_set_latency_update_callback(stream, stream_latency_cb, mainloop);
-
- // recommended settings, i.e. server uses sensible values
- pa_buffer_attr buffer_attr;
- buffer_attr.maxlength = (uint32_t)-1;
- buffer_attr.tlength = buffer_size_in_bytes;
- buffer_attr.prebuf = (uint32_t)0;
- buffer_attr.minreq = (uint32_t)-1;
-
- pa_stream_flags_t stream_flags;
- stream_flags = PA_STREAM_START_CORKED | PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_NOT_MONOTONIC |
- PA_STREAM_AUTO_TIMING_UPDATE | PA_STREAM_ADJUST_LATENCY;
-
- int connect_result;
-
- if (config.pa_sink) {
- // Connect stream to the sink specified in the config
- connect_result =
- pa_stream_connect_playback(stream, config.pa_sink, &buffer_attr, stream_flags, NULL, NULL);
- } else {
- // Connect stream to the default audio output sink
- connect_result =
- pa_stream_connect_playback(stream, NULL, &buffer_attr, stream_flags, NULL, NULL);
+ // debug(1, "pa check_settings: configuration: %u/%s/%u.", sample_rate,
+ // sps_format_description_string(sample_format), channel_count);
+
+ int response = EINVAL;
+
+ pa_sps_t *format_info = sps_format_lookup(sample_format);
+ if (format_info != NULL) {
+ // here, try to create a stream of the given format.
+
+ pa_threaded_mainloop_lock(mainloop);
+ // Create a playback stream
+ pa_sample_spec sample_specifications;
+ sample_specifications.format = format_info->pa_format;
+ sample_specifications.rate = sample_rate;
+ sample_specifications.channels = channel_count;
+
+ pa_channel_map map;
+ pa_channel_map_init_auto(&map, sample_specifications.channels, PA_CHANNEL_MAP_DEFAULT);
+
+ pa_stream *check_settings_stream =
+ pa_stream_new(context, "Playback", &sample_specifications, &map);
+
+ if (check_settings_stream != NULL) {
+ response = 0; // success
+ pa_stream_unref(check_settings_stream);
+ }
+ pa_threaded_mainloop_unlock(mainloop);
+
+ // response = 0;
}
- if (connect_result != 0)
- die("could not connect to the pulseaudio playback stream -- the error message is \"%s\".",
- pa_strerror(pa_context_errno(context)));
+ // debug(1, "pa check_settings: configuration: %u/%s/%u %s.", sample_rate,
+ // sps_format_description_string(sample_format), channel_count,
+ // response == 0 ? "is okay" : "can not be configured");
+ return response;
+}
- // Wait for the stream to be ready
- for (;;) {
- pa_stream_state_t stream_state = pa_stream_get_state(stream);
- if (!PA_STREAM_IS_GOOD(stream_state))
- die("stream state is no longer good while waiting for stream to become ready -- the error "
- "message is \"%s\".",
+static int check_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ return check_settings(format, rate, channels);
+}
+
+static int32_t get_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ uint64_t start_time = get_absolute_time_in_ns();
+ int32_t response =
+ search_for_suitable_configuration(channels, rate, format, &check_configuration);
+ int64_t elapsed_time = get_absolute_time_in_ns() - start_time;
+ debug(3, "pa: get_configuration took %0.3f mS.", elapsed_time * 0.000001);
+ return response;
+}
+
+static int configure(int32_t requested_encoded_format, char **resulting_channel_map) {
+ debug(3, "pa: configure %s.", short_format_description(requested_encoded_format));
+ int response = 0;
+ if (current_encoded_output_format != requested_encoded_format) {
+ uint64_t start_time = get_absolute_time_in_ns();
+ if (current_encoded_output_format == 0)
+ debug(3, "pa: setting output configuration to %s.",
+ short_format_description(requested_encoded_format));
+ else
+ // note -- can't use short_format_description twice in one call because it returns the same
+ // string buffer each time
+ debug(3, "pa: changing output configuration to %s.",
+ short_format_description(requested_encoded_format));
+ current_encoded_output_format = requested_encoded_format;
+ pa_sps_t *format_info =
+ sps_format_lookup(FORMAT_FROM_ENCODED_FORMAT(current_encoded_output_format));
+
+ if (format_info == NULL)
+ die("pa: can't find format information!");
+
+ if (audio_lmb != NULL) {
+ free(audio_lmb); // release previous buffer
+ }
+
+ if (stream != NULL) {
+ // debug(1, "pa: stopping and releasing the current stream...");
+ pa_threaded_mainloop_lock(mainloop);
+ if (pa_stream_is_corked(stream) == 0) {
+ // debug(1,"Flush and cork for flush.");
+ pa_stream_flush(stream, stream_success_cb, NULL);
+ pa_stream_cork(stream, 1, stream_success_cb, mainloop);
+ }
+ pa_stream_disconnect(stream);
+ pa_stream_unref(stream);
+ pa_threaded_mainloop_unlock(mainloop);
+ stream = NULL;
+ }
+
+ audio_size = RATE_FROM_ENCODED_FORMAT(current_encoded_output_format) *
+ format_info->bytes_per_sample *
+ CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format) * 1; // one seconds
+ audio_lmb = malloc(audio_size);
+ if (audio_lmb == NULL)
+ die("Can't allocate %zd bytes for pulseaudio buffer.", audio_size);
+ audio_toq = audio_eoq = audio_lmb;
+ audio_umb = audio_lmb + audio_size;
+ audio_occupancy = 0;
+
+ pa_threaded_mainloop_lock(mainloop);
+ // Create a playback stream
+ pa_sample_spec sample_specifications;
+ sample_specifications.format = format_info->pa_format;
+ sample_specifications.rate = RATE_FROM_ENCODED_FORMAT(current_encoded_output_format);
+ sample_specifications.channels = CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format);
+
+ uint32_t buffer_size_in_bytes =
+ (uint32_t)CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format) *
+ format_info->bytes_per_sample * RATE_FROM_ENCODED_FORMAT(current_encoded_output_format) /
+ 10;
+
+ pa_channel_map map;
+
+ pa_channel_map *pacm = NULL;
+
+ // if we've not asked specifically for native formats, ask for the alsa format...
+ if ((default_channel_layouts == NULL) || (strcasecmp(default_channel_layouts, "alsa") == 0)) {
+ pacm = pa_channel_map_init_auto(&map, sample_specifications.channels, PA_CHANNEL_MAP_ALSA);
+ }
+
+ // ask for the native format...
+ if (pacm == NULL) {
+ pacm = pa_channel_map_init_auto(&map, sample_specifications.channels, PA_CHANNEL_MAP_DEFAULT);
+ }
+
+ // ask for some format...
+ if (pacm == NULL) {
+ pacm =
+ pa_channel_map_init_extend(&map, sample_specifications.channels, PA_CHANNEL_MAP_DEFAULT);
+ }
+
+ if (resulting_channel_map != NULL) { // if needed...
+ // PA_CHANNEL_MAP_ALSA gives default channel maps that correspond to the FFmpeg defaults.
+ // make up a channel map
+ channel_map[0] = '\0';
+ int c;
+ for (c = 0; c < map.channels; c++) {
+ switch (map.map[c]) {
+
+ case PA_CHANNEL_POSITION_MONO:
+ strncat(channel_map, "FC", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_FRONT_LEFT:
+ strncat(channel_map, "FL", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_FRONT_RIGHT:
+ strncat(channel_map, "FR", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_FRONT_CENTER:
+ strncat(channel_map, "FC", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_REAR_CENTER:
+ strncat(channel_map, "BC", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_REAR_LEFT:
+ strncat(channel_map, "BL", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_REAR_RIGHT:
+ strncat(channel_map, "BR", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_LFE:
+ strncat(channel_map, "LFE", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER:
+ strncat(channel_map, "FLC", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER:
+ strncat(channel_map, "FRC", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_SIDE_LEFT:
+ strncat(channel_map, "SL", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_SIDE_RIGHT:
+ strncat(channel_map, "SR", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX0:
+ strncat(channel_map, "AUX0", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX1:
+ strncat(channel_map, "AUX1", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX2:
+ strncat(channel_map, "AUX2", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX3:
+ strncat(channel_map, "AUX3", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX4:
+ strncat(channel_map, "AUX4", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX5:
+ strncat(channel_map, "AUX5", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX6:
+ strncat(channel_map, "AUX6", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX7:
+ strncat(channel_map, "AUX7", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX8:
+ strncat(channel_map, "AUX8", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX9:
+ strncat(channel_map, "AUX9", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX10:
+ strncat(channel_map, "AUX10", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX11:
+ strncat(channel_map, "AUX11", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX12:
+ strncat(channel_map, "AUX12", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX13:
+ strncat(channel_map, "AUX13", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX14:
+ strncat(channel_map, "AUX14", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX15:
+ strncat(channel_map, "AUX15", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX16:
+ strncat(channel_map, "AUX16", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX17:
+ strncat(channel_map, "AUX17", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX18:
+ strncat(channel_map, "AUX18", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX19:
+ strncat(channel_map, "AUX19", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX20:
+ strncat(channel_map, "AUX20", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX21:
+ strncat(channel_map, "AUX21", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX22:
+ strncat(channel_map, "AUX22", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX23:
+ strncat(channel_map, "AUX23", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX24:
+ strncat(channel_map, "AUX24", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX25:
+ strncat(channel_map, "AUX25", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX26:
+ strncat(channel_map, "AUX26", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX27:
+ strncat(channel_map, "AUX27", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX28:
+ strncat(channel_map, "AUX28", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX29:
+ strncat(channel_map, "AUX29", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX30:
+ strncat(channel_map, "AUX30", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_AUX31:
+ strncat(channel_map, "AUX31", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_TOP_CENTER:
+ strncat(channel_map, "TC", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_TOP_FRONT_LEFT:
+ strncat(channel_map, "TFL", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_TOP_FRONT_RIGHT:
+ strncat(channel_map, "TFR", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_TOP_FRONT_CENTER:
+ strncat(channel_map, "TFC", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_TOP_REAR_LEFT:
+ strncat(channel_map, "TBL", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_TOP_REAR_RIGHT:
+ strncat(channel_map, "TBR", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ case PA_CHANNEL_POSITION_TOP_REAR_CENTER:
+ strncat(channel_map, "TBC", sizeof(channel_map) - 1 - strlen(channel_map));
+ break;
+ default:
+ break;
+ }
+ if (c != (map.channels - 1))
+ strncat(channel_map, " ", sizeof(channel_map) - 1 - strlen(channel_map));
+ }
+
+ debug(1, "audio_pa: channel map for %d channels is \"%s\".", sample_specifications.channels,
+ channel_map);
+ }
+
+ stream = pa_stream_new(context, "Playback", &sample_specifications, &map);
+ pa_stream_set_state_callback(stream, stream_state_cb, mainloop);
+ pa_stream_set_write_callback(stream, stream_write_cb, mainloop);
+
+ // recommended settings, i.e. server uses sensible values
+ pa_buffer_attr buffer_attr;
+ buffer_attr.maxlength = (uint32_t)-1;
+ buffer_attr.tlength = buffer_size_in_bytes;
+ buffer_attr.prebuf = (uint32_t)0;
+ buffer_attr.minreq = (uint32_t)-1;
+
+ pa_stream_flags_t stream_flags;
+ stream_flags = PA_STREAM_START_CORKED | PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_NOT_MONOTONIC |
+ PA_STREAM_AUTO_TIMING_UPDATE | PA_STREAM_ADJUST_LATENCY;
+
+ int connect_result;
+
+ if (config.pa_sink) {
+ // Connect stream to the sink specified in the config
+ connect_result = pa_stream_connect_playback(stream, config.pa_sink, &buffer_attr,
+ stream_flags, NULL, NULL);
+ } else {
+ // Connect stream to the default audio output sink
+ connect_result =
+ pa_stream_connect_playback(stream, NULL, &buffer_attr, stream_flags, NULL, NULL);
+ }
+
+ if (connect_result != 0)
+ die("could not connect to the pulseaudio playback stream -- the error message is \"%s\".",
pa_strerror(pa_context_errno(context)));
- if (stream_state == PA_STREAM_READY)
- break;
- pa_threaded_mainloop_wait(mainloop);
- }
- pa_threaded_mainloop_unlock(mainloop);
+ // Wait for the stream to be ready
+ for (;;) {
+ pa_stream_state_t stream_state = pa_stream_get_state(stream);
+ if (!PA_STREAM_IS_GOOD(stream_state))
+ die("stream state is no longer good while waiting for stream to become ready -- the error "
+ "message is \"%s\".",
+ pa_strerror(pa_context_errno(context)));
+ if (stream_state == PA_STREAM_READY)
+ break;
+ pa_threaded_mainloop_wait(mainloop);
+ }
+
+ pa_threaded_mainloop_unlock(mainloop);
+
+ // to here
+
+ int64_t elapsed_time = get_absolute_time_in_ns() - start_time;
+ debug(3, "pa: configuration took %0.3f mS.", elapsed_time * 0.000001);
+ } else {
+ debug(3, "pa: setting output configuration -- configuration unchanged, so nothing done.");
+ }
+ if ((response == 0) && (resulting_channel_map != NULL)) {
+ *resulting_channel_map = channel_map;
+ }
+ return response;
}
static int init(__attribute__((unused)) int argc, __attribute__((unused)) char **argv) {
config.audio_backend_latency_offset = 0;
- // get settings from settings file
-
- // do the "general" audio options. Note, these options are in the "general" stanza!
- parse_general_audio_options();
+ // get settings from settings file, passing in defaults for format_set, rate_set and channel_set
+ // Note, these options may be in the "general" stanza or the named stanza
+#ifdef CONFIG_FFMPEG
+ parse_audio_options("pulseaudio", SPS_FORMAT_SET, SPS_RATE_SET, SPS_CHANNEL_SET);
+#else
+ parse_audio_options("pulseaudio", SPS_FORMAT_NON_FFMPEG_SET, SPS_RATE_NON_FFMPEG_SET,
+ SPS_CHANNNEL_NON_FFMPEG_SET);
+#endif
// now the specific options
if (config.cfg != NULL) {
const char *str;
/* Get the PulseAudio server name. */
- if (config_lookup_string(config.cfg, "pa.server", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "pulseaudio.server", &str)) {
config.pa_server = (char *)str;
}
+ // get the default channel mapping setting basis -- native ("no") or pa ("yes").
+
+ if (config_lookup_non_empty_string(config.cfg, "pulseaudio", &default_channel_layouts)) {
+ if ((strcasecmp(default_channel_layouts, "alsa") == 0) ||
+ (strcasecmp(default_channel_layouts, "pulseaudio") == 0)) {
+ debug(1, "pulseaudio default_channel_layouts setting: \"%s\".", default_channel_layouts);
+ } else {
+ debug(1, "Invalid pulseaudio default_channel_layouts setting. Must be \"alsa\" or "
+ "\"pulseaudio\".");
+ default_channel_layouts = NULL;
+ }
+ };
+
/* Get the Application Name. */
- if (config_lookup_string(config.cfg, "pa.application_name", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "pulseaudio.application_name", &str)) {
config.pa_application_name = (char *)str;
}
/* Get the PulseAudio sink name. */
- if (config_lookup_string(config.cfg, "pa.sink", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "pulseaudio.sink", &str)) {
config.pa_sink = (char *)str;
}
}
// finish collecting settings
- // allocate space for the audio buffer
- audio_lmb = malloc(audio_size);
- if (audio_lmb == NULL)
- die("Can't allocate %d bytes for pulseaudio buffer.", audio_size);
- audio_toq = audio_eoq = audio_lmb;
- audio_umb = audio_lmb + audio_size;
- audio_occupancy = 0;
+ stream = NULL; // no stream
+ audio_lmb = NULL; // no buffer
// Get a mainloop and its context
+
mainloop = pa_threaded_mainloop_new();
if (mainloop == NULL)
die("could not create a pa_threaded_mainloop.");
break;
pa_threaded_mainloop_wait(mainloop);
}
-
pa_threaded_mainloop_unlock(mainloop);
- connect_stream();
- check_pa_stream_status(stream, "audio_pa initialisation.");
return 0;
}
static void deinit(void) {
- check_pa_stream_status(stream, "audio_pa deinitialisation.");
- pa_stream_disconnect(stream);
- pa_threaded_mainloop_stop(mainloop);
- pa_threaded_mainloop_free(mainloop);
- // debug(1, "pa deinit done");
+ // debug(1, "pa deinit");
+ if (stream != NULL) {
+ check_pa_stream_status(stream, "audio_pa deinitialisation.");
+ pa_stream_disconnect(stream);
+ pa_threaded_mainloop_stop(mainloop);
+ pa_threaded_mainloop_free(mainloop);
+ debug(1, "pa deinit done");
+ }
}
+/*
static void start(__attribute__((unused)) int sample_rate,
__attribute__((unused)) int sample_format) {
check_pa_stream_status(stream, "audio_pa start.");
}
+*/
static int play(void *buf, int samples, __attribute__((unused)) int sample_type,
__attribute__((unused)) uint32_t timestamp,
// debug(1,"pa_play of %d samples.",samples);
// copy the samples into the queue
check_pa_stream_status(stream, "audio_pa play.");
- size_t bytes_to_transfer = samples * 2 * 2;
+
+ pa_sps_t *format_info =
+ sps_format_lookup(FORMAT_FROM_ENCODED_FORMAT(current_encoded_output_format));
+ size_t bytes_to_transfer = samples * format_info->bytes_per_sample *
+ CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format);
pthread_mutex_lock(&buffer_mutex);
size_t bytes_available = audio_size - audio_occupancy;
if (bytes_available < bytes_to_transfer)
bytes_to_transfer = bytes_available;
- if (bytes_to_transfer > 0) {
+ if ((bytes_to_transfer > 0) && (audio_lmb != NULL)) {
size_t space_to_end_of_buffer = audio_umb - audio_eoq;
if (space_to_end_of_buffer >= bytes_to_transfer) {
memcpy(audio_eoq, buf, bytes_to_transfer);
audio_occupancy += bytes_to_transfer;
}
- if ((audio_occupancy >= 11025 * 2 * 2) && (pa_stream_is_corked(stream))) {
+ if ((audio_occupancy >=
+ (RATE_FROM_ENCODED_FORMAT(current_encoded_output_format) * format_info->bytes_per_sample *
+ CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format)) /
+ 4) &&
+ (pa_stream_is_corked(stream))) {
// debug(1,"Uncorked");
pthread_mutex_unlock(&buffer_mutex);
pa_threaded_mainloop_lock(mainloop);
}
int pa_delay(long *the_delay) {
+ // debug(1, "pa delay");
check_pa_stream_status(stream, "audio_pa delay.");
// debug(1,"pa_delay");
long result = 0;
pa_threaded_mainloop_lock(mainloop);
int gl = pa_stream_get_latency(stream, &latency, &negative);
pa_threaded_mainloop_unlock(mainloop);
- if (gl == PA_ERR_NODATA) {
- // debug(1, "No latency data yet.");
+ if (gl == -PA_ERR_NODATA) {
reply = -ENODEV;
} else if (gl != 0) {
- // debug(1,"Error %d getting latency.",gl);
reply = -EIO;
} else {
- result = (audio_occupancy / (2 * 2)) + (latency * 44100) / 1000000;
+ pa_sps_t *format_info =
+ sps_format_lookup(FORMAT_FROM_ENCODED_FORMAT(current_encoded_output_format));
+ // convert audio_occupancy bytes to frames and latency microseconds into frames
+ result = (audio_occupancy / (format_info->bytes_per_sample *
+ CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format))) +
+ (latency * RATE_FROM_ENCODED_FORMAT(current_encoded_output_format)) / 1000000;
reply = 0;
}
*the_delay = result;
}
static void flush(void) {
- check_pa_stream_status(stream, "audio_pa flush.");
- pa_threaded_mainloop_lock(mainloop);
- if (pa_stream_is_corked(stream) == 0) {
- // debug(1,"Flush and cork for flush.");
- pa_stream_flush(stream, stream_success_cb, NULL);
- pa_stream_cork(stream, 1, stream_success_cb, mainloop);
+ // debug(1, "pa flush");
+ if (stream != NULL) {
+ check_pa_stream_status(stream, "audio_pa flush.");
+ pa_threaded_mainloop_lock(mainloop);
+ if (pa_stream_is_corked(stream) == 0) {
+ // debug(1,"Flush and cork for flush.");
+ pa_stream_flush(stream, stream_success_cb, NULL);
+ pa_stream_cork(stream, 1, stream_success_cb, mainloop);
+ }
+ pa_threaded_mainloop_unlock(mainloop);
}
- pa_threaded_mainloop_unlock(mainloop);
pthread_mutex_lock(&buffer_mutex);
audio_toq = audio_eoq = audio_lmb;
audio_umb = audio_lmb + audio_size;
}
static void stop(void) {
- check_pa_stream_status(stream, "audio_pa stop.");
- // Cork the stream so it will stop playing
- pa_threaded_mainloop_lock(mainloop);
- if (pa_stream_is_corked(stream) == 0) {
- // debug(1,"Flush and cork for stop.");
- pa_stream_flush(stream, stream_success_cb, NULL);
- pa_stream_cork(stream, 1, stream_success_cb, mainloop);
+ // debug(1, "pa stop");
+ if (stream != NULL) {
+ check_pa_stream_status(stream, "audio_pa stop.");
+ // Cork the stream so it will stop playing
+ pa_threaded_mainloop_lock(mainloop);
+ if (pa_stream_is_corked(stream) == 0) {
+ // debug(1,"Flush and cork for stop.");
+ pa_stream_flush(stream, stream_success_cb, NULL);
+ pa_stream_cork(stream, 1, stream_success_cb, mainloop);
+ }
+ pa_threaded_mainloop_unlock(mainloop);
}
- pa_threaded_mainloop_unlock(mainloop);
pthread_mutex_lock(&buffer_mutex);
audio_toq = audio_eoq = audio_lmb;
audio_umb = audio_lmb + audio_size;
pthread_mutex_unlock(&buffer_mutex);
}
-audio_output audio_pa = {.name = "pa",
- .help = NULL,
- .init = &init,
- .deinit = &deinit,
- .prepare = NULL,
- .start = &start,
- .stop = &stop,
- .is_running = NULL,
- .flush = &flush,
- .delay = &pa_delay,
- .stats = NULL,
- .play = &play,
- .volume = NULL,
- .parameters = NULL,
- .mute = NULL};
-
-void context_state_cb(__attribute__((unused)) pa_context *context, void *mainloop) {
+void context_state_cb(__attribute__((unused)) pa_context *local_context, void *local_mainloop) {
// debug(1,"context_state_cb called.");
- pa_threaded_mainloop_signal(mainloop, 0);
+ pa_threaded_mainloop_signal(local_mainloop, 0);
}
-void stream_state_cb(__attribute__((unused)) pa_stream *s, void *mainloop) {
+void stream_state_cb(__attribute__((unused)) pa_stream *s, void *local_mainloop) {
// debug(1,"stream_state_cb called.");
- pa_threaded_mainloop_signal(mainloop, 0);
+ pa_threaded_mainloop_signal(local_mainloop, 0);
}
-void stream_write_cb(pa_stream *stream, size_t requested_bytes,
+void stream_write_cb(pa_stream *local_stream, size_t requested_bytes,
__attribute__((unused)) void *userdata) {
- check_pa_stream_status(stream, "audio_pa stream_write_cb.");
+ // debug(1, "pa stream_write_cb");
+ check_pa_stream_status(local_stream, "audio_pa stream_write_cb.");
int bytes_to_transfer = requested_bytes;
- // int bytes_transferred = 0;
+ int bytes_transferred = 0;
uint8_t *buffer = NULL;
int ret = 0;
pthread_mutex_lock(&buffer_mutex);
pthread_cleanup_push(mutex_unlock, (void *)&buffer_mutex);
while ((bytes_to_transfer > 0) && (audio_occupancy > 0) && (ret == 0)) {
- if (pa_stream_is_suspended(stream))
- debug(1, "stream is suspended");
+ if (pa_stream_is_suspended(local_stream))
+ debug(1, "local_stream is suspended");
size_t bytes_we_can_transfer = bytes_to_transfer;
+ if (audio_occupancy == 0) {
+ pa_stream_cork(local_stream, 1, stream_success_cb, mainloop);
+ debug(1, "stream_write_cb: corked");
+ }
if (audio_occupancy < bytes_we_can_transfer) {
// debug(1, "Underflow? We have %d bytes but we are asked for %d bytes", audio_occupancy,
// bytes_we_can_transfer);
- pa_stream_cork(stream, 1, stream_success_cb, mainloop);
- // debug(1, "Corked");
bytes_we_can_transfer = audio_occupancy;
}
// bytes we can transfer will never be greater than the bytes available
- ret = pa_stream_begin_write(stream, (void **)&buffer, &bytes_we_can_transfer);
- if ((ret == 0) && (buffer != NULL)) {
+ ret = pa_stream_begin_write(local_stream, (void **)&buffer, &bytes_we_can_transfer);
+ if ((ret == 0) && (buffer != NULL) && (audio_lmb != NULL)) {
if (bytes_we_can_transfer <= (size_t)(audio_umb - audio_toq)) {
- // the bytes are all in a row in the audio buffer
+ // the bytes are all in a row in the audo buffer
memcpy(buffer, audio_toq, bytes_we_can_transfer);
audio_toq += bytes_we_can_transfer;
- ret = pa_stream_write(stream, buffer, bytes_we_can_transfer, NULL, 0LL, PA_SEEK_RELATIVE);
+ ret = pa_stream_write(local_stream, buffer, bytes_we_can_transfer, NULL, 0LL,
+ PA_SEEK_RELATIVE);
} else {
// the bytes are in two places in the audio buffer
size_t first_portion_to_write = audio_umb - audio_toq;
memcpy(buffer, audio_toq, first_portion_to_write);
uint8_t *new_buffer = buffer + first_portion_to_write;
memcpy(new_buffer, audio_lmb, bytes_we_can_transfer - first_portion_to_write);
- ret = pa_stream_write(stream, buffer, bytes_we_can_transfer, NULL, 0LL, PA_SEEK_RELATIVE);
+ ret = pa_stream_write(local_stream, buffer, bytes_we_can_transfer, NULL, 0LL,
+ PA_SEEK_RELATIVE);
audio_toq = audio_lmb + bytes_we_can_transfer - first_portion_to_write;
}
- // bytes_transferred += bytes_we_can_transfer;
+ bytes_transferred += bytes_we_can_transfer;
audio_occupancy -= bytes_we_can_transfer;
bytes_to_transfer -= bytes_we_can_transfer;
}
if (ret != 0)
debug(1, "error writing to pa buffer");
// debug(1,"<<<Frames requested %d, written to pa: %d, corked status:
- // %d.",requested_bytes/4,bytes_transferred/4,pa_stream_is_corked(stream));
+ // %d.",requested_bytes/4,bytes_transferred/4,pa_stream_is_corked(local_stream));
}
-void stream_success_cb(__attribute__((unused)) pa_stream *stream,
+void stream_success_cb(__attribute__((unused)) pa_stream *local_stream,
__attribute__((unused)) int success,
__attribute__((unused)) void *userdata) {
return;
}
+
+audio_output audio_pa = {.name = "pulseaudio",
+ .help = NULL,
+ .init = &init,
+ .deinit = &deinit,
+ .start = NULL,
+ .configure = &configure,
+ .get_configuration = &get_configuration,
+ .stop = &stop,
+ .is_running = NULL,
+ .flush = &flush,
+ .delay = &pa_delay,
+ .stats = NULL,
+ .play = &play,
+ .volume = NULL,
+ .parameters = NULL,
+ .mute = NULL};
* All rights reserved.
*
* Modifications for audio synchronisation
- * and related work, copyright (c) Mike Brady 2014
+ * and related work, copyright (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
#include <unistd.h>
static int fd = -1;
+static int warned = 0;
+static unsigned int bytes_per_frame = 0;
char *pipename = NULL;
char *default_pipe_name = "/tmp/shairport-sync-audio";
// if it's got a reader, write to it.
if (fd > 0) {
// int rc = non_blocking_write(fd, buf, samples * 4);
- int rc = write(fd, buf, samples * 4);
- if ((rc < 0) && (errno != EPIPE)) {
+ if (bytes_per_frame == 0)
+ debug(1, "pipe: bytes per frame not initialised before play()!");
+ int rc = write(fd, buf, samples * bytes_per_frame);
+ if ((rc < 0) && (errno != EPIPE) && (warned == 0)) {
strerror_r(errno, (char *)errorstring, 1024);
- debug(1, "audio_pip play: error %d writing to the pipe named \"%s\": \"%s\".", errno,
- pipename, errorstring);
+ debug(1, "error %d writing to the pipe named \"%s\": \"%s\".", errno, pipename, errorstring);
+ warned = 1;
}
}
return 0;
config.audio_backend_buffer_desired_length = 1.0;
config.audio_backend_latency_offset = 0;
- // do the "general" audio options. Note, these options are in the "general" stanza!
- parse_general_audio_options();
-
+ // get settings from settings file, passing in defaults for format_set, rate_set and channel_set
+ // Note, these options may be in the "general" stanza or the named stanza
+#ifdef CONFIG_AIRPLAY_2
+ parse_audio_options("pipe", (1 << SPS_FORMAT_S32_LE), (1 << SPS_RATE_48000), (1 << 2));
+#else
+ parse_audio_options("pipe", (1 << SPS_FORMAT_S16_LE), (1 << SPS_RATE_44100), (1 << 2));
+#endif
if (config.cfg != NULL) {
/* Get the Output Pipename. */
const char *str;
- if (config_lookup_string(config.cfg, "pipe.name", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "pipe.name", &str)) {
pipename = (char *)str;
+ } else {
+ die("pipename needed");
}
}
printf(" Provide the pipe's pathname. The default is \"%s\".\n", default_pipe_name);
}
+static int32_t get_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ // use the standard format/rate/channel search to get a suitable configuration. No
+ // check_configuration() method needs to be passed to search_for_suitable_configuration() because
+ // it will always return a valid choice based on any settings and the defaults
+ return search_for_suitable_configuration(channels, rate, format, NULL);
+}
+
+static int configure(int32_t requested_encoded_format, __attribute__((unused)) char **channel_map) {
+ int response = 0;
+ unsigned int bytes_per_sample =
+ sps_format_sample_size(FORMAT_FROM_ENCODED_FORMAT(requested_encoded_format));
+ if (bytes_per_sample == 0) {
+ debug(1, "pipe: unknown output format.");
+ bytes_per_sample = 4; // emergency hack
+ response = EINVAL;
+ }
+ bytes_per_frame = bytes_per_sample * CHANNELS_FROM_ENCODED_FORMAT(requested_encoded_format);
+ return response;
+}
+
audio_output audio_pipe = {.name = "pipe",
.help = &help,
.init = &init,
.deinit = &deinit,
- .prepare = NULL,
+ .get_configuration = &get_configuration,
+ .configure = &configure,
.start = &start,
.stop = &stop,
.is_running = NULL,
/*
* Asynchronous PipeWire Backend. This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2023
+ * Copyright (c) Mike Brady 2024--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
#include <pipewire/pipewire.h>
#include <spa/param/audio/format-utils.h>
-// note -- these are hardwired into this code.
-#define DEFAULT_FORMAT SPA_AUDIO_FORMAT_S16_LE
-#define DEFAULT_BYTES_PER_SAMPLE 2
-
-#define DEFAULT_RATE 44100
-#define DEFAULT_CHANNELS 2
-#define DEFAULT_BUFFER_SIZE_IN_SECONDS 4
-
-// Four seconds buffer -- should be plenty
-#define buffer_allocation DEFAULT_RATE * DEFAULT_BUFFER_SIZE_IN_SECONDS * DEFAULT_BYTES_PER_SAMPLE * DEFAULT_CHANNELS
-
+static char channel_map_mono[] = "FC";
+static char channel_map_stereo[] = "FL FR";
+static char channel_map_2p1[] = "FL FR LFE";
+static char channel_map_4p0[] = "FL FR FC BC";
+static char channel_map_5p0[] = "FL FR FC BL BR";
+static char channel_map_5p1[] = "FL FR FC LFE BL BR";
+static char channel_map_6p1[] = "FL FR FC LFE BC SL SR";
+static char channel_map_7p1[] = "FL FR FC LFE BL BR SL SR";
+
+typedef struct {
+ enum spa_audio_format spa_format;
+ sps_format_t sps_format;
+ unsigned int bytes_per_sample;
+} spa_sps_t;
+
+// these are the only formats that audio_pw will ever allow itself to be configured with
+static spa_sps_t format_lookup[] = {{SPA_AUDIO_FORMAT_S16_LE, SPS_FORMAT_S16_LE, 2},
+ {SPA_AUDIO_FORMAT_S16_BE, SPS_FORMAT_S16_BE, 2},
+ {SPA_AUDIO_FORMAT_S32_LE, SPS_FORMAT_S32_LE, 4},
+ {SPA_AUDIO_FORMAT_S32_BE, SPS_FORMAT_S32_BE, 4}};
+
+#define BUFFER_SIZE_IN_SECONDS 1
+
+static uint8_t buffer[1024];
static pthread_mutex_t buffer_mutex = PTHREAD_MUTEX_INITIALIZER;
+static int32_t current_encoded_output_format = 0;
static char *audio_lmb, *audio_umb, *audio_toq, *audio_eoq;
-static size_t audio_size = buffer_allocation;
+static size_t audio_size = 0;
static size_t audio_occupancy;
static int enable_fill;
static int stream_is_active;
-
-struct timing_data {
- int pw_time_is_valid; // set when the pw_time has been set
- struct pw_time time_info; // information about the last time a process callback occurred
- size_t frames; // the number of frames sent at that time
-};
-
-// to avoid using a mutex, write the same data twice and check they are the same
-// to ensure they are consistent. Make sure the first is written strictly before the second
-// using __sync_synchronize();
-struct timing_data timing_data_1, timing_data_2;
+static int on_process_is_running = 0;
struct data {
struct pw_thread_loop *loop;
struct pw_stream *stream;
+ unsigned int rate;
+ unsigned int bytes_per_sample;
+ unsigned int channels;
};
// the pipewire global data structure
-struct data data = {NULL, NULL};
-
-/*
-static void on_state_changed(__attribute__((unused)) void *userdata, enum pw_stream_state old,
- enum pw_stream_state state,
- __attribute__((unused)) const char *error) {
- // struct pw_data *pw = userdata;
- debug(3, "pw: stream state changed %s -> %s", pw_stream_state_as_string(old),
- pw_stream_state_as_string(state));
+struct data data = {NULL, NULL, 0, 0, 0};
+
+// use an SPS_FORMAT_... to find an entry in the format_lookup table or return NULL
+static spa_sps_t *sps_format_lookup(sps_format_t to_find) {
+ spa_sps_t *response = NULL;
+ unsigned int i = 0;
+ while ((response == NULL) && (i < sizeof(format_lookup) / sizeof(spa_sps_t))) {
+ if (format_lookup[i].sps_format == to_find)
+ response = &format_lookup[i];
+ else
+ i++;
+ }
+ return response;
}
-*/
static void on_process(void *userdata) {
- struct data *data = userdata;
+ struct data *local_data = userdata;
int n_frames = 0;
pthread_mutex_lock(&buffer_mutex);
+ // debug(1, "on_process called.");
+
+ if (stream_is_active == 0)
+ debug(1, "on_process called while stream inactive!");
+
+ on_process_is_running = 1;
if ((audio_occupancy > 0) || (enable_fill)) {
// get a buffer to see how big it can be
- struct pw_buffer *b = pw_stream_dequeue_buffer(data->stream);
+ struct pw_buffer *b = pw_stream_dequeue_buffer(local_data->stream);
if (b == NULL) {
pw_log_warn("out of buffers: %m");
die("PipeWire failure -- out of buffers!");
struct spa_buffer *buf = b->buffer;
uint8_t *dest = buf->datas[0].data;
if (dest != NULL) {
- int stride = DEFAULT_BYTES_PER_SAMPLE * DEFAULT_CHANNELS;
-
+ int stride = local_data->bytes_per_sample * local_data->channels;
+
// note: the requested field is the number of frames, not bytes, requested
int max_possible_frames = SPA_MIN(b->requested, buf->datas[0].maxsize / stride);
memset(dest, 0, bytes_we_can_transfer);
n_frames = max_possible_frames;
}
+
buf->datas[0].chunk->offset = 0;
buf->datas[0].chunk->stride = stride;
buf->datas[0].chunk->size = n_frames * stride;
- pw_stream_queue_buffer(data->stream, b);
- debug(3, "Queueing %d frames for output.", n_frames);
+ pw_stream_queue_buffer(local_data->stream, b);
+
} // (else the first data block does not contain a data pointer)
}
pthread_mutex_unlock(&buffer_mutex);
-
- timing_data_1.frames = n_frames;
- if (pw_stream_get_time_n(data->stream, &timing_data_1.time_info, sizeof(struct timing_data)) == 0)
- timing_data_1.pw_time_is_valid = 1;
- else
- timing_data_1.pw_time_is_valid = 0;
- __sync_synchronize();
- memcpy((char *)&timing_data_2, (char *)&timing_data_1, sizeof(struct timing_data));
- __sync_synchronize();
}
static const struct pw_stream_events stream_events = {PW_VERSION_STREAM_EVENTS,
.process = on_process};
-// PW_VERSION_STREAM_EVENTS, .process = on_process, .state_changed = on_state_changed};
static void deinit(void) {
pw_thread_loop_stop(data.loop);
- pw_stream_destroy(data.stream);
+ if (data.stream != NULL)
+ pw_stream_destroy(data.stream);
pw_thread_loop_destroy(data.loop);
pw_deinit();
- free(audio_lmb); // deallocate that buffer
+ on_process_is_running = 0;
+ if (audio_lmb != NULL)
+ free(audio_lmb); // deallocate that buffer
}
static int init(__attribute__((unused)) int argc, __attribute__((unused)) char **argv) {
// set up default values first
- memset(&timing_data_1, 0, sizeof(struct timing_data));
- memset(&timing_data_2, 0, sizeof(struct timing_data));
- config.audio_backend_buffer_desired_length = 0.35;
+ config.audio_backend_buffer_desired_length = 0.5;
config.audio_backend_buffer_interpolation_threshold_in_seconds =
0.02; // below this, soxr interpolation will not occur -- it'll be basic interpolation
// instead.
config.audio_backend_latency_offset = 0;
- // get settings from settings file
- // do the "general" audio options. Note, these options are in the "general" stanza!
- parse_general_audio_options();
-
- // now any PipeWire-specific options
- if (config.cfg != NULL) {
- const char *str;
-
- // Get the optional Application Name, if provided.
- if (config_lookup_string(config.cfg, "pw.application_name", &str)) {
- config.pw_application_name = (char *)str;
- }
+ // get settings from settings file, passing in defaults for format_set, rate_set and channel_set
+ // Note, these options may be in the "general" stanza or the named stanza
+#ifdef CONFIG_FFMPEG
+ parse_audio_options("pipewire", SPS_FORMAT_SET, SPS_RATE_SET, SPS_CHANNEL_SET);
+#else
+ parse_audio_options("pipewire", SPS_FORMAT_NON_FFMPEG_SET, SPS_RATE_NON_FFMPEG_SET,
+ SPS_CHANNNEL_NON_FFMPEG_SET);
+#endif
+
+ // now any PipeWire-specific options
+ if (config.cfg != NULL) {
+ const char *str;
+
+ // Get the optional Application Name, if provided.
+ if (config_lookup_non_empty_string(config.cfg, "pipewire.application_name", &str)) {
+ config.pw_application_name = (char *)str;
+ }
- // Get the optional PipeWire node name, if provided.
- if (config_lookup_string(config.cfg, "pw.node_name", &str)) {
- config.pw_node_name = (char *)str;
- }
+ // Get the optional PipeWire node name, if provided.
+ if (config_lookup_non_empty_string(config.cfg, "pipewire.node_name", &str)) {
+ config.pw_node_name = (char *)str;
+ }
- // Get the optional PipeWire sink target name, if provided.
- if (config_lookup_string(config.cfg, "pw.sink_target", &str)) {
- config.pw_sink_target = (char *)str;
- }
+ // Get the optional PipeWire sink target name, if provided.
+ if (config_lookup_non_empty_string(config.cfg, "pipewire.sink_target", &str)) {
+ config.pw_sink_target = (char *)str;
}
-
+ }
+
// finished collecting settings
- // allocate space for the audio buffer
- audio_lmb = malloc(audio_size);
- if (audio_lmb == NULL)
- die("Can't allocate %d bytes for PipeWire buffer.", audio_size);
- audio_toq = audio_eoq = audio_lmb;
- audio_umb = audio_lmb + audio_size;
- audio_occupancy = 0;
- // debug(1, "init enable_fill");
+ audio_lmb = NULL;
+ audio_size = 0;
+ current_encoded_output_format = 0;
enable_fill = 1;
- const struct spa_pod *params[1];
- uint8_t buffer[1024];
- struct pw_properties *props;
- struct spa_pod_builder b = SPA_POD_BUILDER_INIT(buffer, sizeof(buffer));
-
int largc = 0;
pw_init(&largc, NULL);
pw_thread_loop_lock(data.loop);
- pw_thread_loop_start(data.loop);
-
- char* appname = config.pw_application_name;
+ char *appname = config.pw_application_name;
if (appname == NULL)
appname = "Shairport Sync";
-
- char* nodename = config.pw_node_name;
+
+ char *nodename = config.pw_node_name;
if (nodename == NULL)
nodename = "Shairport Sync";
- props = pw_properties_new(PW_KEY_MEDIA_TYPE, "Audio", PW_KEY_MEDIA_CATEGORY, "Playback",
- PW_KEY_MEDIA_ROLE, "Music", PW_KEY_APP_NAME, appname,
- PW_KEY_NODE_NAME, nodename, NULL);
-
+ struct pw_properties *props = pw_properties_new(
+ PW_KEY_MEDIA_TYPE, "Audio", PW_KEY_MEDIA_CATEGORY, "Playback", PW_KEY_MEDIA_ROLE, "Music",
+ PW_KEY_APP_NAME, appname, PW_KEY_NODE_NAME, nodename, NULL);
+
if (config.pw_sink_target != NULL) {
debug(3, "setting sink target to \"%s\".", config.pw_sink_target);
pw_properties_set(props, PW_KEY_TARGET_OBJECT, config.pw_sink_target);
data.stream = pw_stream_new_simple(pw_thread_loop_get_loop(data.loop), config.appName, props,
&stream_events, &data);
+ pw_thread_loop_start(data.loop);
+
+ on_process_is_running = 0;
- // Make one parameter with the supported formats. The SPA_PARAM_EnumFormat
- // id means that this is a format enumeration (of 1 value).
- params[0] = spa_format_audio_raw_build(&b, SPA_PARAM_EnumFormat,
- &SPA_AUDIO_INFO_RAW_INIT(.format = DEFAULT_FORMAT,
- .channels = DEFAULT_CHANNELS,
- .rate = DEFAULT_RATE));
-
- // Now connect this stream. We ask that our process function is
- // called in a realtime thread.
- pw_stream_connect(data.stream, PW_DIRECTION_OUTPUT, PW_ID_ANY,
- PW_STREAM_FLAG_AUTOCONNECT | PW_STREAM_FLAG_MAP_BUFFERS |
- PW_STREAM_FLAG_RT_PROCESS | PW_STREAM_FLAG_INACTIVE,
- params, 1);
- stream_is_active = 0;
pw_thread_loop_unlock(data.loop);
return 0;
}
-static void start(__attribute__((unused)) int sample_rate,
- __attribute__((unused)) int sample_format) {
+static int check_settings(sps_format_t sample_format, unsigned int sample_rate,
+ unsigned int channel_count) {
+ // we know the formats with be big- or little-ended.
+ // we will accept only S32_..., S16_...
+
+ int response = EINVAL;
+
+ if (sps_format_lookup(sample_format) != NULL)
+ response = 0;
+
+ debug(3, "pw: configuration: %u/%s/%u %s.", sample_rate,
+ sps_format_description_string(sample_format), channel_count,
+ response == 0 ? "is okay" : "can not be configured");
+ return response;
}
-static void prepare_to_play() {
- // debug(1, "prepare to play");
- if (stream_is_active == 0) {
+static int check_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ return check_settings(format, rate, channels);
+}
+
+static int32_t get_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ return search_for_suitable_configuration(channels, rate, format, &check_configuration);
+}
+
+static int configure(int32_t requested_encoded_format, char **resulting_channel_map) {
+ // debug(2, "pw: configure %s.", short_format_description(requested_encoded_format));
+ int response = 0;
+ char *channel_map = NULL;
+ // if (1) {
+ if (current_encoded_output_format != requested_encoded_format) {
+ uint64_t start_time = get_absolute_time_in_ns();
+ if (current_encoded_output_format == 0)
+ debug(2, "pw: setting output configuration to %s.",
+ short_format_description(requested_encoded_format));
+ else
+ // note -- can't use short_format_description twice in one call because it returns the same
+ // string buffer each time
+ debug(2, "pw: changing output configuration to %s.",
+ short_format_description(requested_encoded_format));
+ current_encoded_output_format = requested_encoded_format;
+ spa_sps_t *format_info =
+ sps_format_lookup(FORMAT_FROM_ENCODED_FORMAT(current_encoded_output_format));
+
+ if (format_info == NULL)
+ die("Can't find format information!");
+ // enum spa_audio_format spa_format = format_info->spa_format;
+ data.bytes_per_sample = format_info->bytes_per_sample;
+ data.channels = CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format);
+ data.rate = RATE_FROM_ENCODED_FORMAT(current_encoded_output_format);
+
pw_thread_loop_lock(data.loop);
- pw_stream_set_active(data.stream, true);
+ enable_fill = 0;
+
+ if (pw_stream_get_state(data.stream, NULL) != PW_STREAM_STATE_UNCONNECTED) {
+ response = pw_stream_disconnect(data.stream);
+ if (response != 0) {
+ debug(1, "error %d disconnecting stream.", response);
+ }
+ }
+
+ if (audio_lmb != NULL) {
+ // debug(3, "deallocating existing audio_pw.c buffer.");
+ free(audio_lmb);
+ }
+
+ audio_size = data.rate * BUFFER_SIZE_IN_SECONDS * data.bytes_per_sample * data.channels;
+ // allocate space for the audio buffer
+ audio_lmb = malloc(audio_size);
+ if (audio_lmb == NULL)
+ die("Can't allocate %zd bytes for PipeWire buffer.", audio_size);
+ audio_toq = audio_eoq = audio_lmb;
+ audio_umb = audio_lmb + audio_size;
+ audio_occupancy = 0;
+
+ // Make one parameter with the supported formats. The SPA_PARAM_EnumFormat
+ // id means that this is a format enumeration (of 1 value).
+ struct spa_pod_builder b = SPA_POD_BUILDER_INIT(buffer, sizeof(buffer));
+
+ const struct spa_pod *params[1];
+ // create a stream with the default channel layout corresponding to
+ // the number of channels
+ switch (CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format)) {
+ case 1:
+ channel_map = channel_map_mono;
+ params[0] = spa_format_audio_raw_build(
+ &b, SPA_PARAM_EnumFormat,
+ // we are giving the position of 8 channels here, even if we need less than that.
+ &SPA_AUDIO_INFO_RAW_INIT(.format = format_info->spa_format, .channels = data.channels,
+ .rate = data.rate, .position = {SPA_AUDIO_CHANNEL_FC}));
+ break;
+ case 2:
+ channel_map = channel_map_stereo;
+ params[0] = spa_format_audio_raw_build(
+ &b, SPA_PARAM_EnumFormat,
+ // we are giving the position of 8 channels here, even if we need less than that.
+ &SPA_AUDIO_INFO_RAW_INIT(.format = format_info->spa_format, .channels = data.channels,
+ .rate = data.rate,
+ .position = {SPA_AUDIO_CHANNEL_FL, SPA_AUDIO_CHANNEL_FR}));
+ break;
+ case 3:
+ channel_map = channel_map_2p1;
+ params[0] = spa_format_audio_raw_build(
+ &b, SPA_PARAM_EnumFormat,
+ // we are giving the position of 8 channels here, even if we need less than that.
+ &SPA_AUDIO_INFO_RAW_INIT(.format = format_info->spa_format, .channels = data.channels,
+ .rate = data.rate,
+ .position = {SPA_AUDIO_CHANNEL_FL, SPA_AUDIO_CHANNEL_FR,
+ SPA_AUDIO_CHANNEL_LFE}));
+ break;
+ case 4:
+ channel_map = channel_map_4p0;
+ params[0] = spa_format_audio_raw_build(
+ &b, SPA_PARAM_EnumFormat,
+ // we are giving the position of 8 channels here, even if we need less than that.
+ &SPA_AUDIO_INFO_RAW_INIT(.format = format_info->spa_format, .channels = data.channels,
+ .rate = data.rate,
+ .position = {SPA_AUDIO_CHANNEL_FL, SPA_AUDIO_CHANNEL_FR,
+ SPA_AUDIO_CHANNEL_FC, SPA_AUDIO_CHANNEL_BC}));
+ break;
+ case 5:
+ channel_map = channel_map_5p0;
+ params[0] = spa_format_audio_raw_build(
+ &b, SPA_PARAM_EnumFormat,
+ // we are giving the position of 8 channels here, even if we need less than that.
+ &SPA_AUDIO_INFO_RAW_INIT(.format = format_info->spa_format, .channels = data.channels,
+ .rate = data.rate,
+ .position = {SPA_AUDIO_CHANNEL_FL, SPA_AUDIO_CHANNEL_FR,
+ SPA_AUDIO_CHANNEL_FC, SPA_AUDIO_CHANNEL_RL,
+ SPA_AUDIO_CHANNEL_RR}));
+ break;
+ case 6:
+ channel_map = channel_map_5p1;
+ params[0] = spa_format_audio_raw_build(
+ &b, SPA_PARAM_EnumFormat,
+ &SPA_AUDIO_INFO_RAW_INIT(.format = format_info->spa_format, .channels = data.channels,
+ .rate = data.rate,
+ .position = {SPA_AUDIO_CHANNEL_FL, SPA_AUDIO_CHANNEL_FR,
+ SPA_AUDIO_CHANNEL_FC, SPA_AUDIO_CHANNEL_LFE,
+ SPA_AUDIO_CHANNEL_RL, SPA_AUDIO_CHANNEL_RR}));
+ break;
+ case 7:
+ channel_map = channel_map_6p1;
+ params[0] = spa_format_audio_raw_build(
+ &b, SPA_PARAM_EnumFormat,
+ &SPA_AUDIO_INFO_RAW_INIT(.format = format_info->spa_format, .channels = data.channels,
+ .rate = data.rate,
+ .position = {SPA_AUDIO_CHANNEL_FL, SPA_AUDIO_CHANNEL_FR,
+ SPA_AUDIO_CHANNEL_FC, SPA_AUDIO_CHANNEL_LFE,
+ SPA_AUDIO_CHANNEL_BC, SPA_AUDIO_CHANNEL_SL,
+ SPA_AUDIO_CHANNEL_SR}));
+ break;
+ case 8:
+ channel_map = channel_map_7p1;
+ params[0] = spa_format_audio_raw_build(
+ &b, SPA_PARAM_EnumFormat,
+ &SPA_AUDIO_INFO_RAW_INIT(.format = format_info->spa_format, .channels = data.channels,
+ .rate = data.rate,
+ .position = {SPA_AUDIO_CHANNEL_FL, SPA_AUDIO_CHANNEL_FR,
+ SPA_AUDIO_CHANNEL_FC, SPA_AUDIO_CHANNEL_LFE,
+ SPA_AUDIO_CHANNEL_RL, SPA_AUDIO_CHANNEL_RR,
+ SPA_AUDIO_CHANNEL_SL, SPA_AUDIO_CHANNEL_SR}));
+ break;
+ default:
+ channel_map = NULL;
+ params[0] = spa_format_audio_raw_build(
+ &b, SPA_PARAM_EnumFormat,
+ // we are giving the position of 8 channels here, even if we need less than that.
+ &SPA_AUDIO_INFO_RAW_INIT(.format = format_info->spa_format, .channels = data.channels,
+ .rate = data.rate,
+ .position = {SPA_AUDIO_CHANNEL_FL, SPA_AUDIO_CHANNEL_FR,
+ SPA_AUDIO_CHANNEL_FC, SPA_AUDIO_CHANNEL_LFE,
+ SPA_AUDIO_CHANNEL_RL, SPA_AUDIO_CHANNEL_RR,
+ SPA_AUDIO_CHANNEL_SL, SPA_AUDIO_CHANNEL_SR}));
+ break;
+ }
+
+ // Now connect this stream. We ask that our process function is
+ // called in a realtime thread.
+ pw_stream_connect(data.stream, PW_DIRECTION_OUTPUT, PW_ID_ANY,
+ PW_STREAM_FLAG_AUTOCONNECT | PW_STREAM_FLAG_MAP_BUFFERS |
+ PW_STREAM_FLAG_RT_PROCESS,
+ params, 1);
+ stream_is_active = 0;
+ enable_fill = 1;
pw_thread_loop_unlock(data.loop);
- stream_is_active = 1;
- debug(3, "prepare to play activating stream");
+ int64_t elapsed_time = get_absolute_time_in_ns() - start_time;
+ debug(3, "pw: configuration took %0.3f mS.", elapsed_time * 0.000001);
+ } else {
+ debug(2, "pw: setting output configuration -- configuration unchanged, so nothing done.");
+ }
+ if ((response == 0) && (resulting_channel_map != NULL)) {
+ *resulting_channel_map = channel_map;
}
+ return response;
}
static int play(__attribute__((unused)) void *buf, int samples,
__attribute__((unused)) uint64_t playtime) {
if (stream_is_active == 0) {
pw_thread_loop_lock(data.loop);
+ on_process_is_running = 0;
pw_stream_set_active(data.stream, true);
pw_thread_loop_unlock(data.loop);
stream_is_active = 1;
- debug(3, "set stream active");
+ // debug(1, "set stream active");
}
// copy the samples into the queue
- debug(3, "play %u samples; %u bytes already in the buffer.", samples, audio_occupancy);
- size_t bytes_to_transfer = samples * DEFAULT_CHANNELS * DEFAULT_BYTES_PER_SAMPLE;
+ // debug(3, "play %u samples; %u samples already in the buffer.", samples, audio_occupancy /
+ // (data.bytes_per_sample * data.channels));
+ size_t bytes_to_transfer = samples * data.channels * data.bytes_per_sample;
pthread_mutex_lock(&buffer_mutex);
size_t bytes_available = audio_size - audio_occupancy;
if (bytes_available < bytes_to_transfer)
return 0;
}
-int delay(long *the_delay) {
+static int delay(long *the_delay) {
long result = 0;
- int reply = 0;
- // find out what's already in the PipeWire system and when
- struct timing_data timing_data;
- int loop_count = 1;
- do {
- memcpy(&timing_data, (char *)&timing_data_1, sizeof(struct timing_data));
- __sync_synchronize();
- if (memcmp(&timing_data, (char *)&timing_data_2, sizeof(struct timing_data)) != 0) {
- usleep(2); // microseconds
- loop_count++;
- __sync_synchronize();
+ int reply = -ENODEV; // ENODATA is not defined in FreeBSD
+
+ if (on_process_is_running == 0) {
+ debug(3, "pw_processor not running");
+ }
+
+ if ((stream_is_active == 0) && (on_process_is_running != 0)) {
+ debug(3, "stream not active but on_process_is_running is true.");
+ }
+ if (on_process_is_running != 0) {
+
+ struct pw_time stream_time_info_1, stream_time_info_2;
+ ssize_t audio_occupancy_now;
+
+ // get stable pw_time info to ensure we get an audio occupancy figure
+ // that relates to the pw_time we have.
+ // we do this by getting a pw_time before and after getting the occupancy
+ // and accepting the information if they are both the same
+
+ int loop_count = 1;
+ int non_matching;
+ int stream_time_valid_if_zero;
+ do {
+ stream_time_valid_if_zero =
+ pw_stream_get_time_n(data.stream, &stream_time_info_1, sizeof(struct pw_time));
+ audio_occupancy_now = audio_occupancy;
+ pw_stream_get_time_n(data.stream, &stream_time_info_2, sizeof(struct pw_time));
+
+ non_matching = memcmp(&stream_time_info_1, &stream_time_info_2, sizeof(struct pw_time));
+ if (non_matching != 0) {
+ loop_count++;
+ }
+ } while (((non_matching != 0) || (stream_time_valid_if_zero != 0)) && (loop_count < 10));
+
+ if (non_matching != 0) {
+ debug(1, "can't get a stable pw_time!");
}
- } while ((memcmp(&timing_data, (char *)&timing_data_2, sizeof(struct timing_data)) != 0) &&
- (loop_count < 10));
- long total_delay_now_frames_long = 0;
- if ((loop_count < 10) && (timing_data.pw_time_is_valid != 0)) {
- struct timespec time_now;
- clock_gettime(CLOCK_MONOTONIC, &time_now);
- int64_t interval_from_process_time_to_now =
- SPA_TIMESPEC_TO_NSEC(&time_now) - timing_data.time_info.now;
- int64_t delay_in_ns = timing_data.time_info.delay + timing_data.time_info.buffered;
- delay_in_ns = delay_in_ns * 1000000000;
- delay_in_ns = delay_in_ns * timing_data.time_info.rate.num;
- delay_in_ns = delay_in_ns / timing_data.time_info.rate.denom;
-
- int64_t total_delay_now_ns = delay_in_ns - interval_from_process_time_to_now;
- int64_t total_delay_now_frames = (total_delay_now_ns * DEFAULT_RATE) / 1000000000 + timing_data.frames;
- total_delay_now_frames_long = total_delay_now_frames;
- debug(3, "total delay in frames: %ld.", total_delay_now_frames_long);
-
- if (timing_data.time_info.queued != 0) {
- debug(1, "buffers queued: %d", timing_data.time_info.queued);
+ if (stream_time_valid_if_zero != 0) {
+ debug(1, "can't get valid stream info");
+ }
+ if (stream_time_info_1.rate.denom == 0) {
+ debug(2, "non valid stream_time_info_1");
}
- /*
- debug(3,
- "interval_from_process_time_to_now: %" PRId64 " ns, "
- "delay_in_ns: %" PRId64 ", queued: %" PRId64 ", buffered: %" PRId64 ".",
- // delay_timing_data.time_info.rate.num, delay_timing_data.time_info.rate.denom,
- interval_from_process_time_to_now, delay_in_ns,
- timing_data.time_info.queued, timing_data.time_info.buffered);
- */
- } else {
- warn("Shairport Sync's PipeWire backend can not get timing information from the PipeWire "
- "system. Is PipeWire running?");
+ if ((non_matching == 0) && (stream_time_valid_if_zero == 0) &&
+ (stream_time_info_1.rate.denom != 0)) {
+ int64_t interval_from_pw_time_to_now_ns =
+ pw_stream_get_nsec(data.stream) - stream_time_info_1.now;
+
+ uint64_t frames_possibly_played_since_measurement =
+ ((interval_from_pw_time_to_now_ns * data.rate) + 500000000L) / 1000000000L;
+
+ uint64_t net_delay_in_frames = stream_time_info_1.queued + stream_time_info_1.buffered;
+
+ uint64_t fixed_delay_ns =
+ (stream_time_info_1.delay * stream_time_info_1.rate.num * 1000000000L) /
+ stream_time_info_1.rate.denom; // ns;
+ uint64_t fixed_delay_in_frames = ((fixed_delay_ns * data.rate) + 500000000L) / 1000000000L;
+
+ net_delay_in_frames = net_delay_in_frames + fixed_delay_in_frames +
+ audio_occupancy_now / (data.bytes_per_sample * data.channels) -
+ frames_possibly_played_since_measurement;
+
+ result = net_delay_in_frames;
+ reply = 0;
+ }
}
- pthread_mutex_lock(&buffer_mutex);
- result = total_delay_now_frames_long + audio_occupancy / (DEFAULT_BYTES_PER_SAMPLE * DEFAULT_CHANNELS);
- pthread_mutex_unlock(&buffer_mutex);
*the_delay = result;
return reply;
}
pw_stream_set_active(data.stream, false);
pw_thread_loop_unlock(data.loop);
stream_is_active = 0;
- debug(3, "set stream inactive");
+ // debug(1, "set stream inactive");
}
}
-audio_output audio_pw = {.name = "pw",
+audio_output audio_pw = {.name = "pipewire",
.help = NULL,
.init = &init,
.deinit = &deinit,
- .prepare = NULL,
- .start = &start,
+ .start = NULL,
+ .get_configuration = &get_configuration,
+ .configure = &configure,
.stop = &stop,
.is_running = NULL,
.flush = &flush,
.delay = &delay,
.stats = NULL,
.play = &play,
- .prepare_to_play = &prepare_to_play,
.volume = NULL,
.parameters = NULL,
.mute = NULL};
* sndio output driver. This file is part of Shairport Sync.
* Copyright (c) 2013 Dimitri Sokolyuk <demon@dim13.org>
* Copyright (c) 2017 Tobias Kortkamp <t@tobik.me>
- *
- * Modifications for audio synchronisation
- * and related work, copyright (c) Mike Brady 2014 -- 2024
+ * Copyright (c) 2014--2025 Mike Brady
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software for any
#include "audio.h"
#include "common.h"
+#include <errno.h>
#include <pthread.h>
#include <sndio.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
+// see https://sndio.org/tips.html#section_2_1, section 11 Glossary
+// don't have any information for 7 and 8 channels
+static char channel_map_1[] = "FL";
+static char channel_map_2[] = "FL FR";
+static char channel_map_3[] = "FL FR BL";
+static char channel_map_4[] = "FL FR BL BR";
+static char channel_map_5[] = "FL FR BL BR FC";
+static char channel_map_6[] = "FL FR BL BR FC LFE";
+
static pthread_mutex_t sndio_mutex = PTHREAD_MUTEX_INITIALIZER;
+
+static int current_encoded_output_format;
static struct sio_hdl *hdl;
+static const char *output_device_name;
+static unsigned int output_device_driver_bufsiz; // parameters for opening the output device
+static unsigned int output_device_driver_round; // parameters for opening the output device
static int is_running;
+
static int framesize;
static size_t played;
static size_t written;
struct sio_par par;
struct sndio_formats {
- const char *name;
- sps_format_t fmt;
- unsigned int rate;
- unsigned int bits;
- unsigned int bps;
- unsigned int sig;
- unsigned int le;
+ sps_format_t sps_format;
+ unsigned int bits; // bits per sample
+ unsigned int sig; // signed = 1,
+ unsigned int le; // is little endian
};
-static struct sndio_formats formats[] = {{"S8", SPS_FORMAT_S8, 44100, 8, 1, 1, SIO_LE_NATIVE},
- {"U8", SPS_FORMAT_U8, 44100, 8, 1, 0, SIO_LE_NATIVE},
- {"S16", SPS_FORMAT_S16, 44100, 16, 2, 1, SIO_LE_NATIVE},
- {"AUTOMATIC", SPS_FORMAT_S16, 44100, 16, 2, 1,
- SIO_LE_NATIVE}, // TODO: make this really automatic?
- {"S24", SPS_FORMAT_S24, 44100, 24, 4, 1, SIO_LE_NATIVE},
- {"S24_3LE", SPS_FORMAT_S24_3LE, 44100, 24, 3, 1, 1},
- {"S24_3BE", SPS_FORMAT_S24_3BE, 44100, 24, 3, 1, 0},
- {"S32", SPS_FORMAT_S32, 44100, 32, 4, 1, SIO_LE_NATIVE}};
+static struct sndio_formats format_lookup[] = {{SPS_FORMAT_S16_LE, 16, 1, 1},
+ {SPS_FORMAT_S16_BE, 16, 1, 0},
+ {SPS_FORMAT_S32_LE, 32, 1, 1},
+ {SPS_FORMAT_S32_BE, 32, 1, 0}};
+
+// use an SPS_FORMAT_... to find an entry in the format_lookup table or return NULL
+static struct sndio_formats *sps_format_lookup(sps_format_t to_find) {
+ struct sndio_formats *response = NULL;
+ unsigned int i = 0;
+ while ((response == NULL) && (i < sizeof(format_lookup) / sizeof(struct sndio_formats))) {
+ if (format_lookup[i].sps_format == to_find)
+ response = &format_lookup[i];
+ else
+ i++;
+ }
+ return response;
+}
+
+static uint16_t permissible_configurations[SPS_RATE_HIGHEST + 1][SPS_FORMAT_HIGHEST_NATIVE + 1]
+ [8 + 1];
+
+static int get_permissible_configuration_settings() {
+ int ret = 0;
+ uint64_t hto = get_absolute_time_in_ns();
+ pthread_cleanup_debug_mutex_lock(&sndio_mutex, 1000, 1);
+ struct sio_hdl *hdl = sio_open(output_device_name, SIO_PLAY, 0);
+ if (hdl != NULL) {
+ struct sio_cap cap;
+ // memset(&cap, 0, sizeof(struct sio_cap));
+ ret = sio_getcap(hdl, &cap);
+ if (ret == 1) {
+ sps_format_t f;
+ sps_rate_t r;
+ unsigned int c;
+ // check what numbers of channels the device can provide...
+ for (c = 1; c <= 8; c++) {
+ // if it's in the channel set -- either due to a setting in the configuration file or by
+ // default, check it...
+ if ((config.channel_set & (1 << c)) != 0) {
+
+ unsigned int s = 0;
+ int fo = 0;
+ while ((s < SIO_NCHAN) && (fo == 0)) {
+ if (cap.pchan[s] == c)
+ fo = 1;
+ else
+ s++;
+ }
+ if (fo == 0) {
+ debug(3, "sndio: output device can't deal with %u channels.", c);
+ config.channel_set &= ~(1 << c); // remove this channel count
+ } else {
+ debug(3, "sndio: output device can have %u channels.", c);
+ }
+ }
+ }
+
+ // check what speeds the device can handle
+ for (r = SPS_RATE_LOWEST; r <= SPS_RATE_HIGHEST; r++) {
+ // if it's in the rate set -- either due to a setting in the configuration file or by
+ // default, check it...
+ if ((config.rate_set & (1 << r)) != 0) {
+ unsigned int s = 0;
+ int fo = 0;
+ while ((s < SIO_NRATE) && (fo == 0)) {
+ if (cap.rate[s] == sps_rate_actual_rate(r))
+ fo = 1;
+ else
+ s++;
+ }
+ if (fo == 0) {
+ debug(3, "sndio: output device can't be set to %u fps.", sps_rate_actual_rate(r));
+ config.rate_set &= ~(1 << r); // remove this rate
+ } else {
+ debug(3, "sndio: output device can be set to %u fps.", sps_rate_actual_rate(r));
+ }
+ }
+ }
+
+ // check what formats the device can handle
+ for (f = SPS_FORMAT_LOWEST; f <= SPS_FORMAT_HIGHEST_NATIVE; f++) {
+ // if it's in the format set -- either due to a setting in the configuration file or by
+ // default, check it...
+ if ((config.format_set & (1 << f)) != 0) {
+ struct sndio_formats *format = sps_format_lookup(f);
+ unsigned int s = 0;
+ int found = 0;
+ if (format != NULL) {
+ while ((s < SIO_NENC) && (found == 0)) {
+ if ((cap.enc[s].bits == format->bits) && (cap.enc[s].le == format->le) &&
+ (cap.enc[s].sig == format->sig)) {
+ // debug(1, "bits: %u, %u. le: %u, %u. sig: %u, %u.", cap.enc[s].bits, format->bits,
+ // cap.enc[s].le, format->le, cap.enc[s].sig, format->sig);
+ found = 1;
+ } else {
+ s++;
+ }
+ }
+ } else {
+ debug(3, "sndio: no entry for format %s.", sps_format_description_string(f));
+ }
+ if (found == 0) {
+ if (format != 0)
+ debug(3, "sndio: output device can't be set to format %s.",
+ sps_format_description_string(f));
+ config.format_set &= ~(1 << f); // remove this format
+ } else {
+ debug(3, "sndio: output device can be set to format %s.",
+ sps_format_description_string(f));
+ }
+ }
+ }
+ // for (f = SPS_FORMAT_LOWEST; f <= SPS_FORMAT_HIGHEST_NATIVE; f++) {
+ // if ((config.format_set & (1 << f)) != 0)
+ // debug(1, "format %s okay.", sps_format_description_string(f));
+ // }
+ // now we have the channels, rates and formats, but we need to check each combination
+ // set the permissible_configurations array (r/f/c) to EINVAL
+ for (r = SPS_RATE_LOWEST; r <= SPS_RATE_HIGHEST; r++)
+ for (f = SPS_FORMAT_LOWEST; f <= SPS_FORMAT_HIGHEST_NATIVE; f++)
+ for (c = 0; c <= 8; c++) {
+ permissible_configurations[r][f][c] = EINVAL;
+ }
+ // now check each combination of permitted rate/format/channel and see if it's really allowed
+ for (r = SPS_RATE_LOWEST; r <= SPS_RATE_HIGHEST; r++) {
+ if ((config.rate_set & (1 << r)) != 0) {
+ for (f = SPS_FORMAT_LOWEST; f <= SPS_FORMAT_HIGHEST_NATIVE; f++) {
+ if ((config.format_set & (1 << f)) != 0) {
+ for (c = 0; c <= 8; c++) {
+ if ((config.channel_set & (1 << c)) != 0) {
+ // debug(1, "check %u/%s/%u.", sps_rate_actual_rate(r),
+ // sps_format_description_string(f), c);
+ struct sio_par proposed_par, actual_par;
+ struct sndio_formats *format_info = sps_format_lookup(f);
+ sio_initpar(&proposed_par);
+ proposed_par.rate = sps_rate_actual_rate(r);
+ proposed_par.pchan = c;
+ proposed_par.bits = format_info->bits;
+ proposed_par.bps = SIO_BPS(par.bits);
+ proposed_par.le = format_info->le;
+ proposed_par.sig = format_info->sig;
+ if (sio_setpar(hdl, &proposed_par) == 1) {
+ if (sio_getpar(hdl, &actual_par) == 1) {
+ if ((actual_par.rate == proposed_par.rate) &&
+ (actual_par.pchan == proposed_par.pchan) &&
+ (actual_par.bits == proposed_par.bits) &&
+ (actual_par.le == proposed_par.le) &&
+ (actual_par.sig == proposed_par.sig)) {
+ permissible_configurations[r][f][c] =
+ 0; // i.e. no error, so remove the EINVAL
+ } else {
+ debug(3, "sndio: check_setting: could not set format exactly");
+ }
+ } else {
+ debug(1,
+ "sndio: check_setting: could not get response for a proposed format");
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ pthread_cleanup_pop(1); // let the handle go...
+ ret = 0; // all good here, even if the last ret was an error
+ int64_t hot = get_absolute_time_in_ns() - hto;
+ debug(2, "sndio: get_permissible_configuration_settings took %f ms.", 0.000001 * hot);
+ return ret;
+}
+
+static int check_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ // we know that the format/rate/channel count are legitimate but the combination
+ // may not be permitted.
+ // now see if the individual formats, rates and channel count are permissible
+ sps_rate_t r = SPS_RATE_LOWEST;
+ int found = 0;
+ // we know the rate is there, we just have to find it.
+ while ((r <= SPS_RATE_HIGHEST) && (found == 0)) {
+ if ((sps_rate_actual_rate(r) == rate) && ((config.rate_set & (1 << r)) != 0))
+ found = 1;
+ else
+ r++;
+ }
+
+ int response = permissible_configurations[r][format][channels];
+
+ if (response != 0)
+ debug(3, "check %u/%s/%u returns %d.", rate, sps_format_description_string(format), channels,
+ response);
+ return response;
+}
+
+static int32_t get_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ return search_for_suitable_configuration(channels, rate, format, &check_configuration);
+}
+
+static int configure(int32_t requested_encoded_format, char **channel_map) {
+ int response = 0;
+ debug(3, "sndio: configure %s.", short_format_description(requested_encoded_format));
+
+ // if (1) {
+ if (current_encoded_output_format != requested_encoded_format) {
+ if (current_encoded_output_format == 0)
+ debug(2, "sndio: setting output configuration to %s.",
+ short_format_description(requested_encoded_format));
+ else
+ // note -- can't use short_format_description twice in one call because it returns the same
+ // string buffer each time
+ debug(2, "sndio: changing output configuration to %s.",
+ short_format_description(requested_encoded_format));
+ current_encoded_output_format = requested_encoded_format;
+
+ struct sndio_formats *format_info =
+ sps_format_lookup(FORMAT_FROM_ENCODED_FORMAT(requested_encoded_format));
+
+ if (format_info == NULL)
+ die("sndio: can't find format information!");
+
+ if (is_running != 0) {
+ debug(1, "sndio: the output device is running while changing configuration");
+ if (sio_flush(hdl) != 1)
+ debug(1, "sndio: unable to flush");
+ written = played = is_running = 0;
+ time_of_last_onmove_cb = 0;
+ at_least_one_onmove_cb_seen = 0;
+ }
+
+ struct sio_par par;
+ memset(&par, 0, sizeof(struct sio_par));
+ sio_initpar(&par);
+ par.rate = RATE_FROM_ENCODED_FORMAT(requested_encoded_format);
+ par.pchan = CHANNELS_FROM_ENCODED_FORMAT(requested_encoded_format);
+ par.bits = format_info->bits;
+ par.bps = SIO_BPS(par.bits);
+ par.le = format_info->le;
+ par.sig = format_info->sig;
+ debug(3, "Requested %u/%u/%u/%u/%u (rate/bits/signed/le/channels)", par.rate, par.bits, par.sig,
+ par.le, par.pchan);
+ if (sio_setpar(hdl, &par) == 1) {
+ struct sio_par apar;
+ if (sio_getpar(hdl, &apar) == 1) {
+ debug(3, "Got %u/%u/%u/%u/%u (rate/bits/signed/le/channels)", apar.rate, apar.bits,
+ apar.sig, apar.le, apar.pchan);
+ if ((apar.rate == par.rate) && (apar.pchan == par.pchan) && (apar.bits == par.bits) &&
+ (apar.le == par.le) && (apar.sig == par.sig)) {
+ framesize = apar.bps * apar.pchan;
+ // config.audio_backend_buffer_desired_length = 1.0 * par.bufsz / par.rate;
+ debug(
+ 2,
+ "bufsiz is %u, rate is %u: computed buffer length is %f, actual buffer length is %f.",
+ apar.bufsz, apar.rate, (1.0 * apar.bufsz) / apar.rate,
+ config.audio_backend_buffer_desired_length);
+ } else {
+ debug(1, "sndio: configure: could not set format exactly");
+ response = 1;
+ }
+ } else {
+ debug(1, "sndio: configure: could not set format");
+ response = 1;
+ }
+ }
+ }
+ if ((response == 0) && (channel_map != NULL)) {
+ switch (CHANNELS_FROM_ENCODED_FORMAT(current_encoded_output_format)) {
+ case 1:
+ *channel_map = channel_map_1;
+ break;
+ case 2:
+ *channel_map = channel_map_2;
+ break;
+ case 3:
+ *channel_map = channel_map_3;
+ break;
+ case 4:
+ *channel_map = channel_map_4;
+ break;
+ case 5:
+ *channel_map = channel_map_5;
+ break;
+ case 6:
+ *channel_map = channel_map_6;
+ break;
+ case 7:
+ *channel_map = channel_map_6;
+ break;
+ case 8:
+ *channel_map = channel_map_6;
+ break;
+ default:
+ *channel_map = NULL;
+ break;
+ }
+ }
+ return response;
+}
static void help() {
printf(" -d output-device set the output device [default|rsnd/0|rsnd/1...]\n");
}
-void onmove_cb(__attribute__((unused)) void *arg, int delta) {
+static void onmove_cb(__attribute__((unused)) void *arg, int delta) {
time_of_last_onmove_cb = get_absolute_time_in_ns();
at_least_one_onmove_cb_seen = 1;
played += delta;
}
static int init(int argc, char **argv) {
- int found, opt, round, rate, bufsz;
- unsigned int i;
- const char *devname, *tmp;
-
- // set up default values first
-
- sio_initpar(&par);
- par.rate = 44100;
- par.pchan = 2;
- par.bits = 16;
- par.bps = SIO_BPS(par.bits);
- par.le = 1;
- par.sig = 1;
- devname = SIO_DEVANY;
+ // debug(1, "sndio: init");
+ current_encoded_output_format = 0;
+ is_running = 0;
+ // defaults
config.audio_backend_buffer_desired_length = 1.0;
config.audio_backend_buffer_interpolation_threshold_in_seconds =
0.25; // below this, soxr interpolation will not occur -- it'll be basic interpolation
// instead.
config.audio_backend_latency_offset = 0;
+ output_device_name = SIO_DEVANY;
+ output_device_driver_bufsiz = 0; // hmm, not sure about this...
+ output_device_driver_round = 0;
+ int opt;
- // get settings from settings file
-
- // do the "general" audio options. Note, these options are in the "general" stanza!
- parse_general_audio_options();
-
- // get the specific settings
+ // get settings from settings file, passing in defaults for format_set, rate_set and channel_set
+ // Note, these options may be in the "general" stanza or the named stanza
+#ifdef CONFIG_FFMPEG
+ parse_audio_options("sndio", SPS_FORMAT_SET, SPS_RATE_SET, SPS_CHANNEL_SET);
+#else
+ parse_audio_options("sndio", SPS_FORMAT_NON_FFMPEG_SET, SPS_RATE_NON_FFMPEG_SET,
+ SPS_CHANNNEL_NON_FFMPEG_SET);
+#endif
+ // get specific settings
+ int value;
if (config.cfg != NULL) {
- if (!config_lookup_string(config.cfg, "sndio.device", &devname))
- devname = SIO_DEVANY;
- if (config_lookup_int(config.cfg, "sndio.rate", &rate)) {
- if (rate % 44100 == 0 && rate >= 44100 && rate <= 352800) {
- par.rate = rate;
- } else {
- die("sndio: output rate must be a multiple of 44100 and 44100 <= rate <= "
- "352800");
- }
- }
- if (config_lookup_int(config.cfg, "sndio.bufsz", &bufsz)) {
- if (bufsz > 0) {
- par.appbufsz = bufsz;
+ if (!config_lookup_non_empty_string(config.cfg, "sndio.device", &output_device_name))
+ output_device_name = SIO_DEVANY;
+
+ if (config_lookup_int(config.cfg, "sndio.bufsz", &value)) {
+ if (value > 0) {
+ output_device_driver_bufsiz = value;
} else {
die("sndio: bufsz must be > 0");
}
}
- if (config_lookup_int(config.cfg, "sndio.round", &round)) {
- if (round > 0) {
- par.round = round;
+
+ if (config_lookup_int(config.cfg, "sndio.round", &value)) {
+ if (value > 0) {
+ output_device_driver_round = value;
} else {
die("sndio: round must be > 0");
}
}
- if (config_lookup_string(config.cfg, "sndio.format", &tmp)) {
- for (i = 0, found = 0; i < sizeof(formats) / sizeof(formats[0]); i++) {
- if (strcasecmp(formats[i].name, tmp) == 0) {
- config.output_format = formats[i].fmt;
- found = 1;
- break;
- }
- }
- if (!found)
- die("Invalid output format \"%s\". Should be one of: S8, U8, S16, S24, "
- "S24_3LE, S24_3BE, S32, Automatic",
- tmp);
- }
}
+
optind = 1; // optind=0 is equivalent to optind=1 plus special behaviour
argv--; // so we shift the arguments to satisfy getopt()
argc++;
while ((opt = getopt(argc, argv, "d:")) > 0) {
switch (opt) {
case 'd':
- devname = optarg;
+ output_device_name = optarg;
break;
default:
help();
}
if (optind < argc)
die("Invalid audio argument: %s", argv[optind]);
- pthread_cleanup_debug_mutex_lock(&sndio_mutex, 1000, 1);
- // pthread_mutex_lock(&sndio_mutex);
- debug(1, "sndio: output device name is \"%s\".", devname);
- debug(1, "sndio: rate: %u.", par.rate);
- debug(1, "sndio: bits: %u.", par.bits);
-
- is_running = 0;
- hdl = sio_open(devname, SIO_PLAY, 0);
+ /*
+ written = played = 0;
+ time_of_last_onmove_cb = 0;
+ at_least_one_onmove_cb_seen = 0;
+ */
+ get_permissible_configuration_settings();
+ debug(2, "sndio: output device name is \"%s\".", output_device_name);
+ hdl = sio_open(output_device_name, SIO_PLAY, 0);
if (!hdl)
die("sndio: cannot open audio device");
-
- written = played = 0;
- time_of_last_onmove_cb = 0;
- at_least_one_onmove_cb_seen = 0;
-
- for (i = 0; i < sizeof(formats) / sizeof(formats[0]); i++) {
- if (formats[i].fmt == config.output_format) {
- par.bits = formats[i].bits;
- par.bps = formats[i].bps;
- par.sig = formats[i].sig;
- par.le = formats[i].le;
- break;
- }
- }
-
- if (!sio_setpar(hdl, &par) || !sio_getpar(hdl, &par))
- die("sndio: failed to set audio parameters");
- for (i = 0, found = 0; i < sizeof(formats) / sizeof(formats[0]); i++) {
- if (formats[i].bits == par.bits && formats[i].bps == par.bps && formats[i].sig == par.sig &&
- formats[i].le == par.le && formats[i].rate == par.rate) {
- config.output_format = formats[i].fmt;
- found = 1;
- break;
- }
- }
- if (!found)
- die("sndio: could not set output device to the required format and rate.");
-
- framesize = par.bps * par.pchan;
- config.output_rate = par.rate;
- if (par.rate == 0) {
- die("sndio: par.rate set to zero.");
- }
-
- config.audio_backend_buffer_desired_length = 1.0 * par.bufsz / par.rate;
- config.audio_backend_latency_offset = 0;
-
sio_onmove(hdl, onmove_cb, NULL);
-
- // pthread_mutex_unlock(&sndio_mutex);
- pthread_cleanup_pop(1); // unlock the mutex
- if (framesize == 0) {
- die("sndio: framesize set to zero.");
- }
+ // debug(1, "sndio: init done");
return 0;
}
static int play(void *buf, int frames, __attribute__((unused)) int sample_type,
__attribute__((unused)) uint32_t timestamp,
__attribute__((unused)) uint64_t playtime) {
+
if (frames > 0) {
pthread_cleanup_debug_mutex_lock(&sndio_mutex, 1000, 1);
if (is_running == 0) {
}
static void stop() {
+
pthread_cleanup_debug_mutex_lock(&sndio_mutex, 1000, 1);
if (hdl != NULL) {
if (is_running != 0) {
int get_delay(long *delay) {
int response = 0;
+
size_t estimated_extra_frames_output = 0;
if (at_least_one_onmove_cb_seen) { // when output starts, the onmove_cb callback will be made
// calculate the difference in time between now and when the last callback occurred,
if (is_running != 0) {
result = get_delay(delay);
} else {
- debug(1, "sndio: output device is not open for delay!");
+ debug(2, "sndio: output device is not open for delay!");
if (delay != NULL)
*delay = 0;
}
debug(1, "sndio: unable to flush");
written = played = is_running = 0;
} else {
- debug(1, "sndio: flush: not running.");
+ debug(2, "sndio: flush: not running.");
}
} else {
debug(1, "sndio: output device is not open for flush!");
.help = &help,
.init = &init,
.deinit = &deinit,
- .prepare = NULL,
+ .configure = &configure,
+ .get_configuration = &get_configuration,
.start = NULL,
.stop = &stop,
.is_running = NULL,
if (frame_count_min > fill_count) {
int frame_count = frame_count_min;
if ((err = soundio_outstream_begin_write(outstream, &areas, &frame_count))) {
- debug(0, "[--->>] begin write error: %s", soundio_strerror(err));
+ debug(1, "[--->>] begin write error: %s", soundio_strerror(err));
}
for (int frame = 0; frame < frame_count; frame += 1) {
for (int ch = 0; ch < outstream->layout.channel_count; ch += 1) {
}
}
if ((err = soundio_outstream_end_write(outstream)))
- debug(0, "[--->>] end write error: %s", soundio_strerror(err));
+ debug(1, "[--->>] end write error: %s", soundio_strerror(err));
return;
}
int frame_count = frames_left;
if ((err = soundio_outstream_begin_write(outstream, &areas, &frame_count)))
- debug(0, "[--->>] begin write error: %s", soundio_strerror(err));
+ debug(1, "[--->>] begin write error: %s", soundio_strerror(err));
if (frame_count <= 0)
break;
}
if ((err = soundio_outstream_end_write(outstream)))
- debug(0, "[--->>] end write error: %s", soundio_strerror(err));
+ debug(1, "[--->>] end write error: %s", soundio_strerror(err));
frames_left -= frame_count;
}
static void underflow_callback(__attribute__((unused)) struct SoundIoOutStream *outstream) {
static int count = 0;
- debug(0, "underflow %d\n", ++count);
+ debug(1, "underflow %d\n", ++count);
}
static int init(__attribute__((unused)) int argc, __attribute__((unused)) char **argv) {
config.audio_backend_buffer_desired_length = 2.0;
config.audio_backend_latency_offset = 0;
- // get settings from settings file
-
- // do the "general" audio options. Note, these options are in the "general" stanza!
- parse_general_audio_options();
+ // this ensures that the Shairport Sync system will provide only 44100/S16/2 to this backend.
+ parse_audio_options(NULL, (1 << SPS_FORMAT_S16_LE), (1 << SPS_RATE_44100), (1 << 2));
// get the specific settings
soundio = soundio_create();
if (!soundio) {
- debug(0, "out of memory\n");
+ debug(1, "out of memory\n");
return 1;
}
- if ((err = soundio_connect_backend(soundio, SoundIoBackendCoreAudio))) {
- debug(0, "error connecting: %s", soundio_strerror(err));
+ if ((err = soundio_connect_backend(soundio, SoundIoBackendAlsa))) {
+ debug(1, "error connecting: %s", soundio_strerror(err));
return 1;
}
soundio_flush_events(soundio);
int default_out_device_index = soundio_default_output_device_index(soundio);
if (default_out_device_index < 0) {
- debug(0, "no output device found");
+ debug(1, "no output device found");
return 1;
}
device = soundio_get_output_device(soundio, default_out_device_index);
if (!device) {
- debug(0, "out of memory");
+ debug(1, "out of memory");
return 1;
}
- debug(0, "Output device: %s\n", device->name);
+ debug(1, "Output device: %s\n", device->name);
return 0;
}
soundio_ring_buffer_destroy(ring_buffer);
soundio_device_unref(device);
soundio_destroy(soundio);
- debug(0, "soundio audio deinit\n");
+ debug(1, "soundio audio deinit\n");
}
static void start(int sample_rate, int sample_format) {
outstream->layout.channel_count = 2;
outstream->write_callback = write_callback;
outstream->underflow_callback = underflow_callback;
- // outstream->software_latency = 0;
+ outstream->software_latency = 0;
if ((err = soundio_outstream_open(outstream))) {
- debug(0, "unable to open device: %s", soundio_strerror(err));
+ debug(1, "unable to open device: %s", soundio_strerror(err));
}
if (outstream->layout_error)
- debug(0, "unable to set channel layout: %s\n", soundio_strerror(outstream->layout_error));
+ debug(1, "unable to set channel layout: %s\n", soundio_strerror(outstream->layout_error));
int capacity = outstream->sample_rate * outstream->bytes_per_frame;
ring_buffer = soundio_ring_buffer_create(soundio, capacity);
if (!ring_buffer)
- debug(0, "unable to create ring buffer: out of memory");
+ debug(1, "unable to create ring buffer: out of memory");
char *buf = soundio_ring_buffer_write_ptr(ring_buffer);
memset(buf, 0, capacity);
soundio_ring_buffer_advance_write_ptr(ring_buffer, capacity);
if ((err = soundio_outstream_start(outstream))) {
- debug(0, "unable to start outstream: %s", soundio_strerror(err));
+ debug(1, "unable to start outstream: %s", soundio_strerror(err));
}
debug(1, "libsoundio output started\n");
return 0;
}
-static void parameters(audio_parameters *info) {
- info->minimum_volume_dB = -30.0;
- info->maximum_volume_dB = 0.0;
- debug(2, "Parameters\n");
- debug(2, "Current Volume dB: %f\n", info->current_volume_dB);
- debug(2, "Minimum Volume dB: %d\n", info->minimum_volume_dB);
- debug(2, "Maximum Volume dB: %d\n", info->maximum_volume_dB);
-}
-
static void stop(void) {
soundio_outstream_destroy(outstream);
soundio_ring_buffer_clear(ring_buffer);
.help = NULL,
.init = &init,
.deinit = &deinit,
- .prepare = NULL,
+ .configure = NULL,
.start = &start,
.stop = &stop,
.is_running = NULL,
.stats = NULL,
.play = &play,
.volume = NULL,
- .parameters = ¶meters,
+ .parameters = NULL,
.mute = NULL};
/*
* stdout output driver. This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2015
+ * Copyright (c) Mike Brady 2015--2025
*
* Based on pipe output driver
* Copyright (c) James Laird 2013
#include <stdlib.h>
#include <unistd.h>
-static int fd = -1;
+static int fd = STDOUT_FILENO;
static int warned = 0;
-
-static void start(__attribute__((unused)) int sample_rate,
- __attribute__((unused)) int sample_format) {
- fd = STDOUT_FILENO;
- warned = 0;
-}
+static unsigned int bytes_per_frame = 0;
static int play(void *buf, int samples, __attribute__((unused)) int sample_type,
__attribute__((unused)) uint32_t timestamp,
__attribute__((unused)) uint64_t playtime) {
char errorstring[1024];
- int rc = write(fd, buf, samples * 4);
+ if (bytes_per_frame == 0)
+ debug(1, "stdout: bytes per frame not initialised before play()!");
+ int rc = write(fd, buf, samples * bytes_per_frame);
if ((rc < 0) && (warned == 0)) {
strerror_r(errno, (char *)errorstring, 1024);
- warn("Error %d writing to stdout (fd: %d): \"%s\".", errno, fd, errorstring);
+ warn("error %d writing to stdout (fd: %d): \"%s\".", errno, fd, errorstring);
warned = 1;
}
return rc;
}
-static void stop(void) {
- // Do nothing when play stops
-}
-
static int init(__attribute__((unused)) int argc, __attribute__((unused)) char **argv) {
// set up default values first
config.audio_backend_buffer_desired_length = 1.0;
config.audio_backend_latency_offset = 0;
- // get settings from settings file
- // do the "general" audio options. Note, these options are in the "general" stanza!
- parse_general_audio_options();
+ // get settings from settings file, passing in defaults for format_set, rate_set and channel_set
+ // Note, these options may be in the "general" stanza or the named stanza
+#ifdef CONFIG_AIRPLAY_2
+ parse_audio_options("stdout", (1 << SPS_FORMAT_S32_LE), (1 << SPS_RATE_48000), (1 << 2));
+#else
+ parse_audio_options("stdout", (1 << SPS_FORMAT_S16_LE), (1 << SPS_RATE_44100), (1 << 2));
+#endif
return 0;
}
-static void deinit(void) {
- // don't close stdout
+static int32_t get_configuration(unsigned int channels, unsigned int rate, unsigned int format) {
+ // use the standard format/rate/channel search to get a suitable configuration. No
+ // check_configuration() method needs to be passed to search_for_suitable_configuration() because
+ // it will always return a valid choice based on any settings and the defaults
+ return search_for_suitable_configuration(channels, rate, format, NULL);
+}
+
+static int configure(int32_t requested_encoded_format, __attribute__((unused)) char **channel_map) {
+ int response = 0;
+ unsigned int bytes_per_sample =
+ sps_format_sample_size(FORMAT_FROM_ENCODED_FORMAT(requested_encoded_format));
+ if (bytes_per_sample == 0) {
+ debug(1, "stdout: unknown output format.");
+ bytes_per_sample = 4; // emergency hack
+ response = EINVAL;
+ }
+ bytes_per_frame = bytes_per_sample * CHANNELS_FROM_ENCODED_FORMAT(requested_encoded_format);
+ return response;
}
audio_output audio_stdout = {.name = "stdout",
.help = NULL,
.init = &init,
- .deinit = &deinit,
- .prepare = NULL,
- .start = &start,
- .stop = &stop,
+ .deinit = NULL,
+ .get_configuration = &get_configuration,
+ .configure = &configure,
+ .start = NULL,
+ .stop = NULL,
.is_running = NULL,
.flush = NULL,
.delay = NULL,
--- /dev/null
+/*
+ * Bonjour strings manager. This file is part of Shairport Sync.
+ * Copyright (c) Mike Brady 2014--2025
+ * All rights reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+#include "bonjour_strings.h"
+#include "common.h"
+
+char *txt_records[128];
+char *secondary_txt_records[128];
+
+// mDNS advertisement strings
+
+// Create these strings and then keep them updated.
+// When necessary, update the mDNS service records, using e.g. Avahi
+// from these sources.
+
+char fwString[128];
+char ap1_featuresString[128];
+char ap1StatusFlagsString[128];
+char ap1ModelString[128];
+char ap1SrcversString[128];
+char pkString[128];
+
+#ifdef CONFIG_AIRPLAY_2
+char deviceIdString[128];
+char featuresString[128];
+char statusflagsString[128];
+char piString[128];
+char gidString[128];
+char psiString[128];
+char fexString[128];
+char modelString[128];
+char srcversString[128];
+char osversString[128];
+char ap1OsversString[128];
+#endif
+
+#ifdef CONFIG_AIRPLAY_2
+void build_bonjour_strings(rtsp_conn_info *conn) {
+#else
+void build_bonjour_strings(__attribute((unused)) rtsp_conn_info *conn) {
+#endif
+
+ // Watch out here, the strings that form each entry
+ // need to be permanent, because we don't know
+ // when avahi will look at them.
+ // bnprintf is (should be) the same as snprintf except that it returns a pointer to the resulting
+ // character string. so this rather odd arranement below allows you to use a snprintf for
+ // convenience but get the character string as a result, both as a store for the item so that
+ // Avahi can see it in future and as a pointer
+ int entry_number = 0;
+
+ // the txt_records entries are for the _raop._tcp characteristics
+ // the secondary_txt_records are for the _airplay._tcp items.
+
+#ifdef CONFIG_AIRPLAY_2
+ txt_records[entry_number++] = "cn=0,1";
+ txt_records[entry_number++] = "da=true";
+ txt_records[entry_number++] = "et=0,1";
+
+ uint64_t features_hi = config.airplay_features;
+ features_hi = (features_hi >> 32) & 0xffffffff;
+ uint64_t features_lo = config.airplay_features;
+ features_lo = features_lo & 0xffffffff;
+
+ txt_records[entry_number++] =
+ bnprintf(ap1_featuresString, sizeof(ap1_featuresString), "ft=0x%" PRIX64 ",0x%" PRIX64 "",
+ features_lo, features_hi);
+
+ txt_records[entry_number++] =
+ bnprintf(fwString, sizeof(fwString), "fv=%s", config.firmware_version);
+ txt_records[entry_number++] = bnprintf(ap1StatusFlagsString, sizeof(ap1StatusFlagsString),
+ "sf=0x%" PRIX32, config.airplay_statusflags);
+#ifdef CONFIG_METADATA
+ if (config.get_coverart == 0)
+ txt_records[entry_number++] = "md=0,2";
+ else
+ txt_records[entry_number++] = "md=0,1,2";
+#endif
+ txt_records[entry_number++] =
+ bnprintf(ap1ModelString, sizeof(ap1ModelString), "am=%s", config.model);
+ txt_records[entry_number++] = bnprintf(pkString, sizeof(pkString), "pk=%s", config.pk_string);
+ txt_records[entry_number++] = "tp=UDP";
+ txt_records[entry_number++] = "vn=65537";
+ txt_records[entry_number++] =
+ bnprintf(ap1SrcversString, sizeof(ap1SrcversString), "vs=%s", config.srcvers);
+ txt_records[entry_number++] =
+ bnprintf(ap1OsversString, sizeof(ap1OsversString), "ov=%s", config.osvers);
+ txt_records[entry_number++] = NULL;
+
+#else
+ // here, just replicate what happens in mdns.h when using those #defines
+ txt_records[entry_number++] =
+ bnprintf(ap1StatusFlagsString, sizeof(ap1StatusFlagsString), "sf=0x4");
+ txt_records[entry_number++] =
+ bnprintf(fwString, sizeof(fwString), "fv=%s", config.firmware_version);
+ txt_records[entry_number++] =
+ bnprintf(ap1ModelString, sizeof(ap1ModelString), "am=%s", config.model);
+ txt_records[entry_number++] = bnprintf(ap1SrcversString, sizeof(ap1SrcversString), "vs=105.1");
+ txt_records[entry_number++] = "tp=TCP,UDP";
+ txt_records[entry_number++] = "vn=65537";
+#ifdef CONFIG_METADATA
+ if (config.get_coverart == 0)
+ txt_records[entry_number++] = "md=0,2";
+ else
+ txt_records[entry_number++] = "md=0,1,2";
+#endif
+ txt_records[entry_number++] = "ss=16";
+ txt_records[entry_number++] = "sr=44100";
+ txt_records[entry_number++] = "da=true";
+ txt_records[entry_number++] = "sv=false";
+ txt_records[entry_number++] = "et=0,1";
+ txt_records[entry_number++] = "ek=1";
+ txt_records[entry_number++] = "cn=0,1";
+ txt_records[entry_number++] = "ch=2";
+ txt_records[entry_number++] = "txtvers=1";
+ if (config.password == 0)
+ txt_records[entry_number++] = "pw=false";
+ else
+ txt_records[entry_number++] = "pw=true";
+ txt_records[entry_number++] = NULL;
+#endif
+
+#ifdef CONFIG_AIRPLAY_2
+ // make up a secondary set of text records
+ entry_number = 0;
+
+ secondary_txt_records[entry_number++] = "acl=0";
+ secondary_txt_records[entry_number++] = "btaddr=00:00:00:00:00:00";
+ secondary_txt_records[entry_number++] =
+ bnprintf(deviceIdString, sizeof(deviceIdString), "deviceid=%s", config.airplay_device_id);
+ secondary_txt_records[entry_number++] =
+ bnprintf(fexString, sizeof(fexString), "fex=%s", config.airplay_fex);
+ secondary_txt_records[entry_number++] =
+ bnprintf(featuresString, sizeof(featuresString), "features=0x%" PRIX64 ",0x%" PRIX64 "",
+ features_lo, features_hi); // features_hi and features_lo already calculated.
+ secondary_txt_records[entry_number++] = bnprintf(statusflagsString, sizeof(statusflagsString),
+ "flags=0x%" PRIX32, config.airplay_statusflags);
+ if ((conn != NULL) && (conn->airplay_gid != 0)) {
+ snprintf(gidString, sizeof(gidString), "gid=%s", conn->airplay_gid);
+ } else {
+ snprintf(gidString, sizeof(gidString), "gid=%s", config.airplay_pi);
+ }
+ secondary_txt_records[entry_number++] = gidString;
+
+ if ((conn != NULL) && (conn->groupContainsGroupLeader != 0)) {
+ secondary_txt_records[entry_number++] = "igl=0";
+ secondary_txt_records[entry_number++] = "gcgl=1";
+ } else {
+ secondary_txt_records[entry_number++] = "igl=0";
+ secondary_txt_records[entry_number++] = "gcgl=0";
+ }
+ // if ((conn != NULL) && (conn->airplay_gid != 0)) // if it's in a group
+ // secondary_txt_records[entry_number++] = "isGroupLeader=0";
+ secondary_txt_records[entry_number++] =
+ bnprintf(modelString, sizeof(modelString), "model=%s", config.model);
+ secondary_txt_records[entry_number++] = "protovers=1.1";
+ secondary_txt_records[entry_number++] =
+ bnprintf(piString, sizeof(piString), "pi=%s", config.airplay_pi);
+ secondary_txt_records[entry_number++] =
+ bnprintf(psiString, sizeof(psiString), "psi=%s", config.airplay_psi);
+ secondary_txt_records[entry_number++] = pkString; // already calculated
+ secondary_txt_records[entry_number++] =
+ bnprintf(srcversString, sizeof(srcversString), "srcvers=%s", config.srcvers);
+ secondary_txt_records[entry_number++] =
+ bnprintf(osversString, sizeof(osversString), "osvers=%s", config.osvers);
+ secondary_txt_records[entry_number++] = "vv=2";
+ secondary_txt_records[entry_number++] = fwString; // already calculated
+ secondary_txt_records[entry_number++] = NULL;
+#endif
+}
--- /dev/null
+#ifndef _BONJOUR_STRINGS_H
+#define _BONJOUR_STRINGS_H
+
+#include "player.h"
+
+extern char *txt_records[128];
+extern char *secondary_txt_records[128];
+
+#ifdef CONFIG_AIRPLAY_2
+void build_bonjour_strings(rtsp_conn_info *conn);
+#else
+void build_bonjour_strings(__attribute((unused)) rtsp_conn_info *conn);
+#endif
+
+#endif // _BONJOUR_STRINGS_H
* Utility routines. This file is part of Shairport.
* Copyright (c) James Laird 2013
* The volume to attenuation function vol2attn copyright (c) Mike Brady 2014
- * Further changes and additions (c) Mike Brady 2014 -- 2021
+ * Further changes and additions (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#include <sys/stat.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <time.h>
#include <unistd.h>
+#include <net/if.h>
#include <ifaddrs.h>
#ifdef COMPILE_FOR_LINUX
#include <netinet/in.h>
#endif
+#ifdef CONFIG_CONVOLUTION
+#include <ctype.h>
+#include <sndfile.h>
+#endif
+
#ifdef CONFIG_OPENSSL
#include <openssl/aes.h> // needed for older AES stuff
#include <openssl/bio.h> // needed for BIO_new_mem_buf
config_t config_file_stuff;
int type_of_exit_cleanup;
-uint64_t ns_time_at_startup, ns_time_at_last_debug_message;
-
-// always lock use this when accessing the ns_time_at_last_debug_message
-static pthread_mutex_t debug_timing_lock = PTHREAD_MUTEX_INITIALIZER;
+uint64_t minimum_dac_queue_size;
pthread_mutex_t the_conn_lock = PTHREAD_MUTEX_INITIALIZER;
+unsigned int sps_format_sample_size_array[] = {
+ 0, // unknown
+ 1, 1, // S8, U8
+ 2, 2, // S16_LE, S16_BE,
+ 4, 4, // S24_LE, S24_BE,
+ 3, 3, // S24_3LE, S24_3BE,
+ 4, 4, // S32_LE, S32_BE,
+ 2, 4, 4, // S16, S24, S32
+ 0, 0 // Auto, Invalid
+};
+
+unsigned int sps_format_sample_size(sps_format_t format) {
+ unsigned int response = 0;
+ if (format <= SPS_FORMAT_AUTO)
+ response = sps_format_sample_size_array[format];
+ return response;
+}
+
const char *sps_format_description_string_array[] = {
- "unknown", "S8", "U8", "S16", "S16_LE", "S16_BE", "S24", "S24_LE",
- "S24_BE", "S24_3LE", "S24_3BE", "S32", "S32_LE", "S32_BE", "auto", "invalid"};
+ "unknown", "S8", "U8", "S16_LE", "S16_BE", "S24_LE", "S24_BE", "S24_3LE",
+ "S24_3BE", "S32_LE", "S32_BE", "S16", "S24", "S32", "auto", "invalid"};
const char *sps_format_description_string(sps_format_t format) {
if (format <= SPS_FORMAT_AUTO)
return sps_format_description_string_array[SPS_FORMAT_INVALID];
}
-// true if Shairport Sync is supposed to be sending output to the output device, false otherwise
+unsigned int sps_rate_actual_rate(sps_rate_t rate) {
+ unsigned int response = 0;
+ switch (rate) {
+ case SPS_RATE_5512:
+ response = 5512;
+ break;
+ case SPS_RATE_8000:
+ response = 8000;
+ break;
+ case SPS_RATE_11025:
+ response = 11025;
+ break;
+ case SPS_RATE_16000:
+ response = 16000;
+ break;
+ case SPS_RATE_22050:
+ response = 22050;
+ break;
+ case SPS_RATE_32000:
+ response = 32000;
+ break;
+ case SPS_RATE_44100:
+ response = 44100;
+ break;
+ case SPS_RATE_48000:
+ response = 48000;
+ break;
+ case SPS_RATE_64000:
+ response = 64000;
+ break;
+ case SPS_RATE_88200:
+ response = 88200;
+ break;
+ case SPS_RATE_96000:
+ response = 96000;
+ break;
+ case SPS_RATE_176400:
+ response = 176400;
+ break;
+ case SPS_RATE_192000:
+ response = 192000;
+ break;
+ case SPS_RATE_352800:
+ response = 352800;
+ break;
+ case SPS_RATE_384000:
+ response = 384000;
+ break;
+ default:
+ debug(1, "unrecognised SPS_RATE_: %u.", rate);
+ break;
+ }
+ return response;
+}
+
+char sfd[32];
+const char *short_format_description(int32_t encoded_format) {
+ if (encoded_format < 0)
+ snprintf(sfd, sizeof(sfd) - 1, "error %d", encoded_format);
+ else
+ snprintf(
+ sfd, sizeof(sfd) - 1, "%u/%s/%u", RATE_FROM_ENCODED_FORMAT(encoded_format),
+ sps_format_description_string((sps_format_t)(FORMAT_FROM_ENCODED_FORMAT(encoded_format))),
+ CHANNELS_FROM_ENCODED_FORMAT(encoded_format));
+ return (const char *)sfd;
+}
+// true if Shairport Sync is supposed to be sending output to the output device, false otherwise
static volatile int requested_connection_state_to_output = 1;
// this stuff is to direct logging to syslog via libdaemon or directly
shairport_cfg config;
-volatile int debuglev = 0;
-
sigset_t pselect_sigset;
+// note -- don't use this to shutdown from dbus -- see its own code in dbus-service.c
+void sps_shutdown(type_of_exit_type shutdown_type) { // TOE_normal, TOE_emergency
+ type_of_exit_cleanup = shutdown_type;
+ if (type_of_exit_cleanup == TOE_emergency) {
+ debug(1, "emergency shutdown requested");
+ exit(EXIT_FAILURE);
+ } else {
+ debug(1, "normal shutdown requested");
+ exit(EXIT_SUCCESS);
+ }
+}
+
int usleep_uncancellable(useconds_t usec) {
int response;
int oldState;
ret = errno;
close(local_socket);
char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ getErrorText((char *)errorstring, sizeof(errorstring));
warn("error %d: \"%s\". Could not bind a port!", errno, errorstring);
} else {
uint16_t sport;
ret = errno;
close(local_socket);
char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ getErrorText((char *)errorstring, sizeof(errorstring));
warn("error %d: \"%s\". Could not retrieve socket's port!", errno, errorstring);
} else {
#ifdef AF_INET6
if (ret < 0) {
close(local_socket);
char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ getErrorText((char *)errorstring, sizeof(errorstring));
die("error %d: \"%s\". Could not bind a UDP port! Check the udp_port_range is large enough -- "
"it must be "
"at least 3, and 10 or more is suggested -- or "
void set_requested_connection_state_to_output(int v) { requested_connection_state_to_output = v; }
-char *generate_preliminary_string(char *buffer, size_t buffer_length, double tss, double tsl,
- const char *filename, const int linenumber, const char *prefix) {
- char *insertion_point = buffer;
- if (config.debugger_show_elapsed_time) {
- snprintf(insertion_point, buffer_length, "% 20.9f", tss);
- insertion_point = insertion_point + strlen(insertion_point);
- }
- if (config.debugger_show_relative_time) {
- snprintf(insertion_point, buffer_length, "% 20.9f", tsl);
- insertion_point = insertion_point + strlen(insertion_point);
- }
- if (config.debugger_show_file_and_line) {
- snprintf(insertion_point, buffer_length, " \"%s:%d\"", filename, linenumber);
- insertion_point = insertion_point + strlen(insertion_point);
- }
- if (prefix) {
- snprintf(insertion_point, buffer_length, "%s", prefix);
- insertion_point = insertion_point + strlen(insertion_point);
- }
- return insertion_point;
-}
-
-void _die(const char *thefilename, const int linenumber, const char *format, ...) {
- int oldState;
- pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
-
- char b[16384];
- b[0] = 0;
- char *s;
- if (debuglev) {
- pthread_mutex_lock(&debug_timing_lock);
- uint64_t time_now = get_absolute_time_in_ns();
- uint64_t time_since_start = time_now - ns_time_at_startup;
- uint64_t time_since_last_debug_message = time_now - ns_time_at_last_debug_message;
- ns_time_at_last_debug_message = time_now;
- pthread_mutex_unlock(&debug_timing_lock);
- char *basec = strdup(thefilename);
- char *filename = basename(basec);
- s = generate_preliminary_string(b, sizeof(b), 1.0 * time_since_start / 1000000000,
- 1.0 * time_since_last_debug_message / 1000000000, filename,
- linenumber, " *fatal error: ");
- free(basec);
- } else {
- strncpy(b, "fatal error: ", sizeof(b));
- s = b + strlen(b);
- }
- va_list args;
- va_start(args, format);
- vsnprintf(s, sizeof(b) - (s - b), format, args);
- va_end(args);
- sps_log(LOG_ERR, "%s", b);
- pthread_setcancelstate(oldState, NULL);
- type_of_exit_cleanup = TOE_emergency;
- exit(EXIT_FAILURE);
-}
-
-void _warn(const char *thefilename, const int linenumber, const char *format, ...) {
- int oldState;
- pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
- char b[16384];
- b[0] = 0;
- char *s;
- if (debuglev) {
- pthread_mutex_lock(&debug_timing_lock);
- uint64_t time_now = get_absolute_time_in_ns();
- uint64_t time_since_start = time_now - ns_time_at_startup;
- uint64_t time_since_last_debug_message = time_now - ns_time_at_last_debug_message;
- ns_time_at_last_debug_message = time_now;
- pthread_mutex_unlock(&debug_timing_lock);
- char *basec = strdup(thefilename);
- char *filename = basename(basec);
- s = generate_preliminary_string(b, sizeof(b), 1.0 * time_since_start / 1000000000,
- 1.0 * time_since_last_debug_message / 1000000000, filename,
- linenumber, " *warning: ");
- free(basec);
- } else {
- strncpy(b, "warning: ", sizeof(b));
- s = b + strlen(b);
- }
- va_list args;
- va_start(args, format);
- vsnprintf(s, sizeof(b) - (s - b), format, args);
- va_end(args);
- sps_log(LOG_WARNING, "%s", b);
- pthread_setcancelstate(oldState, NULL);
-}
-
-void _debug(const char *thefilename, const int linenumber, int level, const char *format, ...) {
- if (level > debuglev)
- return;
- int oldState;
- pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
-
- char b[16384];
- b[0] = 0;
- pthread_mutex_lock(&debug_timing_lock);
- uint64_t time_now = get_absolute_time_in_ns();
- uint64_t time_since_start = time_now - ns_time_at_startup;
- uint64_t time_since_last_debug_message = time_now - ns_time_at_last_debug_message;
- ns_time_at_last_debug_message = time_now;
- pthread_mutex_unlock(&debug_timing_lock);
- char *basec = strdup(thefilename);
- char *filename = basename(basec);
- char *s = generate_preliminary_string(b, sizeof(b), 1.0 * time_since_start / 1000000000,
- 1.0 * time_since_last_debug_message / 1000000000, filename,
- linenumber, " ");
- free(basec);
- va_list args;
- va_start(args, format);
- vsnprintf(s, sizeof(b) - (s - b), format, args);
- va_end(args);
- sps_log(LOG_INFO, b); // LOG_DEBUG is hard to read on macOS terminal
- pthread_setcancelstate(oldState, NULL);
-}
-
-void _inform(const char *thefilename, const int linenumber, const char *format, ...) {
- int oldState;
- pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
- char b[16384];
- b[0] = 0;
- char *s;
- if (debuglev) {
- pthread_mutex_lock(&debug_timing_lock);
- uint64_t time_now = get_absolute_time_in_ns();
- uint64_t time_since_start = time_now - ns_time_at_startup;
- uint64_t time_since_last_debug_message = time_now - ns_time_at_last_debug_message;
- ns_time_at_last_debug_message = time_now;
- pthread_mutex_unlock(&debug_timing_lock);
- char *basec = strdup(thefilename);
- char *filename = basename(basec);
- s = generate_preliminary_string(b, sizeof(b), 1.0 * time_since_start / 1000000000,
- 1.0 * time_since_last_debug_message / 1000000000, filename,
- linenumber, " ");
- free(basec);
- } else {
- s = b;
- }
- va_list args;
- va_start(args, format);
- vsnprintf(s, sizeof(b) - (s - b), format, args);
- va_end(args);
- sps_log(LOG_INFO, "%s", b);
- pthread_setcancelstate(oldState, NULL);
-}
-
-void _debug_print_buffer(const char *thefilename, const int linenumber, int level, void *vbuf,
- size_t buf_len) {
- if (level > debuglev)
- return;
- char *buf = (char *)vbuf;
- char *obf =
- malloc(buf_len * 4 + 1); // to be on the safe side -- 4 characters on average for each byte
- if (obf != NULL) {
- char *obfp = obf;
- unsigned int obfc;
- for (obfc = 0; obfc < buf_len; obfc++) {
- snprintf(obfp, 3, "%02X", buf[obfc]);
- obfp += 2;
- if (obfc != buf_len - 1) {
- if (obfc % 32 == 31) {
- snprintf(obfp, 5, " || ");
- obfp += 4;
- } else if (obfc % 16 == 15) {
- snprintf(obfp, 4, " | ");
- obfp += 3;
- } else if (obfc % 4 == 3) {
- snprintf(obfp, 2, " ");
- obfp += 1;
- }
- }
- };
- *obfp = 0;
- _debug(thefilename, linenumber, level, "%s", obf);
- free(obf);
- }
+void getErrorText(char *destinationString, size_t destinationStringLength) {
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wunused-result"
+ strerror_r(errno, destinationString, destinationStringLength);
+#pragma GCC diagnostic pop
}
// The following two functions are adapted slightly and with thanks from Jonathan Leffler's sample
return (status);
}
+// including a simple base64 encoder to minimise malloc/free activity
+
+// From Stack Overflow, with thanks:
+// http://stackoverflow.com/questions/342409/how-do-i-base64-encode-decode-in-c
+// minor mods to make independent of C99.
+// more significant changes make it not malloc memory
+// needs to initialise the encoding table first
+
+// add _so to end of name to avoid confusion with polarssl's implementation
+
+static char encoding_table[] = {'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
+ 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
+ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
+ 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
+ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/'};
+
+static size_t mod_table[] = {0, 2, 1};
+
+// pass in a pointer to the data, its length, a pointer to the output buffer and
+// a pointer to an int
+// containing its maximum length
+// the actual length will be returned.
+
+char *base64_encode_so(const unsigned char *data, size_t input_length, char *encoded_data,
+ size_t *output_length) {
+
+ size_t calculated_output_length = 4 * ((input_length + 2) / 3);
+ if (calculated_output_length > *output_length)
+ return (NULL);
+ *output_length = calculated_output_length;
+
+ size_t i, j;
+ for (i = 0, j = 0; i < input_length;) {
+
+ uint32_t octet_a = i < input_length ? (unsigned char)data[i++] : 0;
+ uint32_t octet_b = i < input_length ? (unsigned char)data[i++] : 0;
+ uint32_t octet_c = i < input_length ? (unsigned char)data[i++] : 0;
+
+ uint32_t triple = (octet_a << 0x10) + (octet_b << 0x08) + octet_c;
+
+ encoded_data[j++] = encoding_table[(triple >> 3 * 6) & 0x3F];
+ encoded_data[j++] = encoding_table[(triple >> 2 * 6) & 0x3F];
+ encoded_data[j++] = encoding_table[(triple >> 1 * 6) & 0x3F];
+ encoded_data[j++] = encoding_table[(triple >> 0 * 6) & 0x3F];
+ }
+
+ for (i = 0; i < mod_table[input_length % 3]; i++)
+ encoded_data[*output_length - 1 - i] = '=';
+
+ return encoded_data;
+}
+
+// with thanks!
+//
+
#ifdef CONFIG_MBEDTLS
char *base64_enc(uint8_t *input, int length) {
char *buf = NULL;
// input,strlen(input),inbuf,inbufsize);
int rc = mbedtls_base64_decode(NULL, 0, &dlen, (unsigned char *)inbuf, inbufsize);
if (rc && (rc != MBEDTLS_ERR_BASE64_BUFFER_TOO_SMALL))
- debug(1, "Error %d getting decode length, result is %d.", rc, dlen);
+ debug(1, "Error %d getting decode length, result is %ld.", rc, dlen);
else {
// debug(1,"Decode size is %d.",dlen);
buf = malloc(dlen);
if (EVP_PKEY_sign(ctx, NULL, &ol, (const unsigned char *)input, inlen) > 0) { // 1.0.2
out = (unsigned char *)malloc(ol);
if (EVP_PKEY_sign(ctx, out, &ol, (const unsigned char *)input, inlen) > 0) { // 1.0.2
- debug(3, "success with output length of %lu.", ol);
+ debug(3, "success with output length of %zu.", ol);
} else {
debug(1, "error 2 \"%s\" with EVP_PKEY_sign:",
ERR_error_string(ERR_get_error(), NULL));
rc = mbedtls_pk_parse_key(&pkctx, (unsigned char *)super_secret_key, sizeof(super_secret_key),
NULL, 0, mbedtls_ctr_drbg_random, &ctr_drbg);
#else
- rc = mbedtls_pk_parse_key(&pkctx, (unsigned char *)super_secret_key, sizeof(super_secret_key),
+ rc = mbedtls_pk_parse_key(&pkctx, (unsigned char *)super_secret_key, sizeof(super_secret_key),
NULL, 0);
#endif
switch (mode) {
case RSA_MODE_AUTH:
- mbedtls_rsa_set_padding(trsa, MBEDTLS_RSA_PKCS_V15, MBEDTLS_MD_NONE);
+ mbedtls_rsa_set_padding(trsa, MBEDTLS_RSA_PKCS_V15, MBEDTLS_MD_NONE);
outbuf = malloc(trsa->MBEDTLS_PRIVATE_V3_ONLY(len));
#if MBEDTLS_VERSION_MAJOR == 3
- rc = mbedtls_rsa_pkcs1_encrypt(trsa, mbedtls_ctr_drbg_random, &ctr_drbg,
- inlen, input, outbuf);
+ rc = mbedtls_pk_sign(&pkctx, MBEDTLS_MD_NONE, input, inlen, outbuf, mbedtls_pk_get_len(&pkctx), &olen, mbedtls_ctr_drbg_random, &ctr_drbg);
+ *outlen = olen;
#else
rc = mbedtls_rsa_pkcs1_encrypt(trsa, mbedtls_ctr_drbg_random, &ctr_drbg, MBEDTLS_RSA_PRIVATE,
inlen, input, outbuf);
+ *outlen = trsa->len;
#endif
if (rc != 0)
debug(1, "mbedtls_pk_encrypt error %d.", rc);
- *outlen = trsa->MBEDTLS_PRIVATE_V3_ONLY(len);
break;
case RSA_MODE_KEY:
mbedtls_rsa_set_padding(trsa, MBEDTLS_RSA_PKCS_V21, MBEDTLS_MD_SHA1);
outbuf = malloc(trsa->MBEDTLS_PRIVATE_V3_ONLY(len));
#if MBEDTLS_VERSION_MAJOR == 3
- rc = mbedtls_rsa_pkcs1_decrypt(trsa, mbedtls_ctr_drbg_random, &ctr_drbg,
- &olen, input, outbuf, trsa->MBEDTLS_PRIVATE_V3_ONLY(len));
+ rc = mbedtls_rsa_pkcs1_decrypt(trsa, mbedtls_ctr_drbg_random, &ctr_drbg, &olen, input, outbuf,
+ trsa->MBEDTLS_PRIVATE_V3_ONLY(len));
#else
rc = mbedtls_rsa_pkcs1_decrypt(trsa, mbedtls_ctr_drbg_random, &ctr_drbg, MBEDTLS_RSA_PRIVATE,
&olen, input, outbuf, trsa->len);
}
#endif
+int config_lookup_non_empty_string(const config_t *cfg, const char *path, const char **value) {
+ int response = config_lookup_string(cfg, path, value);
+ if (response == CONFIG_TRUE) {
+ if ((value != NULL) && ((*value == NULL) || (*value[0] == 0))) {
+ warn("The \"%s\" parameter is an empty string and has been ignored.", path);
+ response = CONFIG_FALSE;
+ }
+ }
+ return response;
+}
+
int config_set_lookup_bool(config_t *cfg, char *where, int *dst) {
const char *str = 0;
if (config_lookup_string(cfg, where, &str)) {
}
}
+// remember to free the returned array of strings.
+// you don't need to free the strings themselves -- they belong to libconfig.
+unsigned int config_get_string_settings_as_string_array(config_setting_t *setting,
+ const char ***result) {
+ unsigned int count = 0;
+ int error = 0;
+ *result = NULL;
+ const char **arr = NULL;
+ if (setting != NULL) { // definitely a setting
+ const char *str = config_setting_get_string(setting);
+ if (str != NULL) { // definitely a string
+ arr = malloc(sizeof(const char *));
+ arr[0] = str;
+ count = 1;
+ } else { // it might be a list, an array or a group
+ count = config_setting_length(setting);
+ if (count != 0) {
+ arr = malloc(sizeof(const char *) * count);
+ unsigned int i;
+ for (i = 0; i < count; i++) {
+ config_setting_t *item = config_setting_get_elem(setting, i);
+ if (config_setting_type(item) == CONFIG_TYPE_STRING)
+ arr[i] = config_setting_get_string(item);
+ else
+ error = i + 1;
+ }
+ } else {
+ error = 1;
+ }
+ }
+ }
+ if (error != 0) {
+ if (arr != NULL) {
+ free(arr);
+ }
+ count = -error; // signify an error
+ } else {
+ *result = arr;
+ }
+ return count;
+}
+
+// remember to free the returned array of ints.
+unsigned int config_get_int_settings_as_int_array(config_setting_t *setting, int **result) {
+ int error = 0;
+ unsigned int count = 0;
+ *result = NULL;
+ int *arr = NULL;
+ if (setting != NULL) { // definitely a setting there
+ if (config_setting_type(setting) == CONFIG_TYPE_INT) {
+ arr = malloc(sizeof(int));
+ arr[0] = config_setting_get_int(setting);
+ count = 1;
+ } else if (config_setting_is_aggregate(setting) == CONFIG_TRUE) {
+ count = config_setting_length(setting);
+ if (count != 0) {
+ arr = malloc(sizeof(int) * count);
+ unsigned int i;
+ for (i = 0; i < count; i++) {
+ config_setting_t *item = config_setting_get_elem(setting, i);
+ if (config_setting_type(item) == CONFIG_TYPE_INT)
+ arr[i] = config_setting_get_int(item);
+ else
+ error = i + 1;
+ }
+ }
+ } else {
+ error = 1; // subtract 1 from the error number to get the element number
+ }
+ }
+ if (error != 0) {
+ if (arr != NULL) {
+ free(arr);
+ }
+ count = -error; // signify an error
+ } else {
+ *result = arr;
+ }
+ return count;
+}
+
+// Look for the item in the setting which could be either a string or an array or list or group of
+// strings. Result: 0 means there is a setting but no match, 1 means there's no setting, 2 means
+// "auto" was found, 3 means a match.
+int check_string_or_list_setting(config_setting_t *setting, const char *item) {
+ int result = 1; // means there is no setting at all (so the caller should implement the default)
+ if (setting != NULL) { // definitely a setting
+ const char *str = config_setting_get_string(setting);
+ debug(3, "check \"%s\" against \"%s\"", str, item);
+ if (str != NULL) { // definitely a string
+ if (strcasecmp(str, item) == 0) {
+ result = 3; // an exact match
+ } else if (strcasecmp(str, "auto") == 0) {
+ result = 2; // auto
+ } else {
+ result = 0; // a string that is not a match
+ }
+ } else { // it might be a list, an array or a group
+ int i = 0;
+ result = 0; // presume there is no match
+ // keep looking, even if "auto" has been found, to see if the exact match (preferred) is there
+ // too.
+ while (((result == 0) || (result == 2)) && (i < config_setting_length(setting))) {
+ const char *str2 = config_setting_get_string_elem(setting, i);
+ if (str2 != NULL) { // definitely a string
+ if (strcasecmp(str2, "auto") == 0) {
+ result = 2; // auto
+ } else if (strcasecmp(str2, item) == 0) {
+ result = 3; // an exact match
+ }
+ }
+ i++; // will point to 1 past the found item or last item.
+ }
+ }
+ }
+ return result;
+}
+
+// Look for the item in the setting which could be either an int or an array or list or group of
+// ints. Result: 0 means there is a setting but no match, 1 means there's no setting, 2 means "auto"
+// was found, 3 means a match.
+int check_int_or_list_setting(config_setting_t *setting, const int item) {
+ int result = 1; // means there is no setting at all
+ if (setting != NULL) { // definitely a setting
+ int setting_type = config_setting_type(setting);
+ if (setting_type == CONFIG_TYPE_STRING) {
+ if (strcasecmp(config_setting_get_string(setting), "auto") == 0) {
+ result = 2; // auto
+ } else {
+ result = 0; // a string that can not be a match
+ }
+ } else if (setting_type == CONFIG_TYPE_INT) {
+ if (item == config_setting_get_int(setting))
+ result = 3; // an exact match
+ else
+ result = 0; // a setting but not a match
+ } else { // it might be a list, an array or a group
+ int i = 0;
+ result = 0; // presume there is no match (there is a setting)
+ // keep looking, even if "auto" has been found, to see if the exact match (preferred) is there
+ // too.
+ while (((result == 0) || (result == 2)) && (i < config_setting_length(setting))) {
+ config_setting_t *sub_setting = config_setting_get_elem(setting, i);
+ int sub_setting_type = config_setting_type(sub_setting);
+ if (sub_setting_type == CONFIG_TYPE_STRING) {
+ if (strcasecmp(config_setting_get_string_elem(sub_setting, i), "auto") == 0) {
+ result = 2; // auto
+ }
+ } else if (sub_setting_type == CONFIG_TYPE_INT) {
+ if (item == config_setting_get_int(sub_setting))
+ result = 3; // an exact match
+ }
+ i++; // will point to 1 past the found item or last item.
+ }
+ }
+ }
+ return result;
+}
+
void command_set_volume(double volume) {
// this has a cancellation point if waiting is enabled
if (config.cmd_set_volume) {
int flags = fcntl(fdis, F_GETFL);
if (flags == -1) {
char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ getErrorText((char *)errorstring, sizeof(errorstring));
debug(1, "try_to_open_pipe -- error %d (\"%s\") getting flags of pipe: \"%s\".", errno,
(char *)errorstring, pathname);
} else {
flags = fcntl(fdis, F_SETFL, flags & ~O_NONBLOCK);
if (flags == -1) {
char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ getErrorText((char *)errorstring, sizeof(errorstring));
debug(1, "try_to_open_pipe -- error %d (\"%s\") unsetting NONBLOCK of pipe: \"%s\".", errno,
(char *)errorstring, pathname);
}
rem = req;
} while ((result == -1) && (errno == EINTR));
if (result == -1)
- debug(1, "Error in sps_nanosleep of %d sec and %ld nanoseconds: %d.", sec, nanosec, errno);
+ debug(1, "Error in sps_nanosleep of %" PRIdMAX " sec and %ld nanoseconds: %d.", (intmax_t)sec, nanosec, errno);
}
// Mac OS X doesn't have pthread_mutex_timedlock
int _debug_mutex_lock(pthread_mutex_t *mutex, useconds_t dally_time, const char *mutexname,
const char *filename, const int line, int debuglevel) {
- if ((debuglevel > debuglev) || (debuglevel == 0))
+ if ((debuglevel > debug_level()) || (debuglevel == 0))
return pthread_mutex_lock(mutex);
int oldState;
pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
int _debug_mutex_unlock(pthread_mutex_t *mutex, const char *mutexname, const char *filename,
const int line, int debuglevel) {
- if ((debuglevel > debuglev) || (debuglevel == 0))
+ if ((debuglevel > debug_level()) || (debuglevel == 0))
return pthread_mutex_unlock(mutex);
int oldState;
pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
snprintf(dstring, sizeof(dstring), "%s:%d", filename, line);
debug(debuglevel, "mutex_unlock \"%s\" at \"%s\".", mutexname, dstring);
int r = pthread_mutex_unlock(mutex);
- if ((debuglevel != 0) && (r != 0))
- debug(1, "error %d: \"%s\" unlocking mutex \"%s\" at \"%s\".", r,
- strerror_r(r, errstr, sizeof(errstr)), mutexname, dstring);
+ if ((debuglevel != 0) && (r != 0)) {
+ if (strerror_r(r, errstr, sizeof(errstr)) == 0) {
+ debug(1, "error %d: \"%s\" unlocking mutex \"%s\" at \"%s\".", r, errstr, mutexname, dstring);
+ } else {
+ debug(1, "error %d: unlocking mutex \"%s\" at \"%s\".", r, mutexname, dstring);
+ }
+ }
pthread_setcancelstate(oldState, NULL);
return r;
}
void malloc_cleanup(void *arg) {
- // debug(1, "malloc cleanup freeing %" PRIxPTR ".", arg);
- free(arg);
+ // the address of the malloc variable is passed in case a realloc is done as some time
+ // debug(1, "malloc cleanup called.");
+ void **allocation = arg;
+ void *ref = *allocation;
+ if (ref != NULL)
+ free(ref);
}
#ifdef CONFIG_AIRPLAY_2
void socket_cleanup(void *arg) {
intptr_t fdp = (intptr_t)arg;
- debug(3, "socket_cleanup called for socket: %" PRIdPTR ".", fdp);
+ int soc = fdp;
+ debug(3, "socket_cleanup called for socket: %d.", soc);
close(fdp);
}
pthread_mutex_destroy(mutex);
}
-void mutex_unlock(void *arg) { pthread_mutex_unlock((pthread_mutex_t *)arg); }
-
void rwlock_unlock(void *arg) { pthread_rwlock_unlock((pthread_rwlock_t *)arg); }
+void mutex_unlock(void *arg) { pthread_mutex_unlock((pthread_mutex_t *)arg); }
+
void thread_cleanup(void *arg) {
debug(3, "thread_cleanup called.");
pthread_t *thread = (pthread_t *)arg;
#ifdef CONFIG_APPLE_ALAC
strcat(version_string, "-alac");
#endif
+#ifndef CONFIG_AIRPLAY_2
+#ifdef CONFIG_FFMPEG
+ strcat(version_string, "-FFmpeg");
+#endif
+#endif
#ifdef CONFIG_LIBDAEMON
strcat(version_string, "-libdaemon");
#endif
#ifdef CONFIG_AO
strcat(version_string, "-ao");
#endif
-#ifdef CONFIG_PA
- strcat(version_string, "-pa");
+#ifdef CONFIG_PULSEAUDIO
+ strcat(version_string, "-PulseAudio");
#endif
-#ifdef CONFIG_PW
- strcat(version_string, "-pw");
+#ifdef CONFIG_PIPEWIRE
+ strcat(version_string, "-PipeWire");
#endif
#ifdef CONFIG_SOUNDIO
strcat(version_string, "-soundio");
return version_string;
}
-int64_t generate_zero_frames(char *outp, size_t number_of_frames, sps_format_t format,
- int with_dither, int64_t random_number_in) {
- // return the last random number used
- // assuming the buffer has been assigned
-
- // add a TPDF dither -- see
- // http://educypedia.karadimov.info/library/DitherExplained.pdf
- // and the discussion around https://www.hydrogenaud.io/forums/index.php?showtopic=16963&st=25
-
- // I think, for a 32 --> 16 bits, the range of
- // random numbers needs to be from -2^16 to 2^16, i.e. from -65536 to 65536 inclusive, not from
- // -32768 to +32767
-
- // Actually, what would be generated here is from -65535 to 65535, i.e. one less on the limits.
-
- // See the original paper at
- // http://www.ece.rochester.edu/courses/ECE472/resources/Papers/Lipshitz_1992.pdf
- // by Lipshitz, Wannamaker and Vanderkooy, 1992.
-
- int64_t dither_mask = 0;
- switch (format) {
- case SPS_FORMAT_S32:
- case SPS_FORMAT_S32_LE:
- case SPS_FORMAT_S32_BE:
- dither_mask = (int64_t)1 << (64 - 32);
- break;
- case SPS_FORMAT_S24:
- case SPS_FORMAT_S24_LE:
- case SPS_FORMAT_S24_BE:
- case SPS_FORMAT_S24_3LE:
- case SPS_FORMAT_S24_3BE:
- dither_mask = (int64_t)1 << (64 - 24);
- break;
- case SPS_FORMAT_S16:
- case SPS_FORMAT_S16_LE:
- case SPS_FORMAT_S16_BE:
- dither_mask = (int64_t)1 << (64 - 16);
- break;
- case SPS_FORMAT_S8:
- case SPS_FORMAT_U8:
- dither_mask = (int64_t)1 << (64 - 8);
- break;
- case SPS_FORMAT_UNKNOWN:
- die("Unexpected SPS_FORMAT_UNKNOWN while calculating dither mask.");
- break;
- case SPS_FORMAT_AUTO:
- die("Unexpected SPS_FORMAT_AUTO while calculating dither mask.");
- break;
- case SPS_FORMAT_INVALID:
- die("Unexpected SPS_FORMAT_INVALID while calculating dither mask.");
- break;
- }
- dither_mask -= 1;
-
+int64_t generate_zero_frames(char *outp, size_t number_of_frames, int with_dither,
+ int64_t random_number_in, uint32_t encoded_output_format) {
int64_t previous_random_number = random_number_in;
- char *p = outp;
- size_t sample_number;
- r64_lock; // the random number generator is not thread safe, so we need to lock it while using it
- for (sample_number = 0; sample_number < number_of_frames * 2; sample_number++) {
-
- int64_t hyper_sample = 0;
- int64_t r = r64i();
+ if (encoded_output_format != 0) {
+ unsigned int channels = CHANNELS_FROM_ENCODED_FORMAT(encoded_output_format);
+ sps_format_t format = (sps_format_t)FORMAT_FROM_ENCODED_FORMAT(encoded_output_format);
+ // return the last random number used
+ // assuming the buffer has been assigned
- int64_t tpdf = (r & dither_mask) - (previous_random_number & dither_mask);
+ // add a TPDF dither -- see
+ // http://educypedia.karadimov.info/library/DitherExplained.pdf
+ // and the discussion around https://www.hydrogenaud.io/forums/index.php?showtopic=16963&st=25
- // add dither if permitted -- no need to check for clipping, as the sample is, uh, zero
+ // I think, for a 32 --> 16 bits, the range of
+ // random numbers needs to be from -2^16 to 2^16, i.e. from -65536 to 65536 inclusive, not from
+ // -32768 to +32767
- if (with_dither != 0)
- hyper_sample += tpdf;
+ // Actually, what would be generated here is from -65535 to 65535, i.e. one less on the limits.
- // move the result to the desired position in the int64_t
- char *op = p;
- int sample_length; // this is the length of the sample
+ // See the original paper at
+ // http://www.ece.rochester.edu/courses/ECE472/resources/Papers/Lipshitz_1992.pdf
+ // by Lipshitz, Wannamaker and Vanderkooy, 1992.
+ int64_t dither_mask = 0;
switch (format) {
case SPS_FORMAT_S32:
- hyper_sample >>= (64 - 32);
- *(int32_t *)op = hyper_sample;
- sample_length = 4;
- break;
case SPS_FORMAT_S32_LE:
- *op++ = (uint8_t)(hyper_sample >> (64 - 32)); // 32 bits, ls byte
- *op++ = (uint8_t)(hyper_sample >> (64 - 32 + 8)); // 32 bits, less significant middle byte
- *op++ = (uint8_t)(hyper_sample >> (64 - 32 + 16)); // 32 bits, more significant middle byte
- *op = (uint8_t)(hyper_sample >> (64 - 32 + 24)); // 32 bits, ms byte
- sample_length = 4;
- break;
case SPS_FORMAT_S32_BE:
- *op++ = (uint8_t)(hyper_sample >> (64 - 32 + 24)); // 32 bits, ms byte
- *op++ = (uint8_t)(hyper_sample >> (64 - 32 + 16)); // 32 bits, more significant middle byte
- *op++ = (uint8_t)(hyper_sample >> (64 - 32 + 8)); // 32 bits, less significant middle byte
- *op = (uint8_t)(hyper_sample >> (64 - 32)); // 32 bits, ls byte
- sample_length = 4;
- break;
- case SPS_FORMAT_S24_3LE:
- *op++ = (uint8_t)(hyper_sample >> (64 - 24)); // 24 bits, ls byte
- *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 8)); // 24 bits, middle byte
- *op = (uint8_t)(hyper_sample >> (64 - 24 + 16)); // 24 bits, ms byte
- sample_length = 3;
- break;
- case SPS_FORMAT_S24_3BE:
- *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 16)); // 24 bits, ms byte
- *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 8)); // 24 bits, middle byte
- *op = (uint8_t)(hyper_sample >> (64 - 24)); // 24 bits, ls byte
- sample_length = 3;
+ dither_mask = (int64_t)1 << (64 - 32);
break;
case SPS_FORMAT_S24:
- hyper_sample >>= (64 - 24);
- *(int32_t *)op = hyper_sample;
- sample_length = 4;
- break;
case SPS_FORMAT_S24_LE:
- *op++ = (uint8_t)(hyper_sample >> (64 - 24)); // 24 bits, ls byte
- *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 8)); // 24 bits, middle byte
- *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 16)); // 24 bits, ms byte
- *op = 0;
- sample_length = 4;
- break;
case SPS_FORMAT_S24_BE:
- *op++ = 0;
- *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 16)); // 24 bits, ms byte
- *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 8)); // 24 bits, middle byte
- *op = (uint8_t)(hyper_sample >> (64 - 24)); // 24 bits, ls byte
- sample_length = 4;
+ case SPS_FORMAT_S24_3LE:
+ case SPS_FORMAT_S24_3BE:
+ dither_mask = (int64_t)1 << (64 - 24);
break;
+ case SPS_FORMAT_S16:
case SPS_FORMAT_S16_LE:
- *op++ = (uint8_t)(hyper_sample >> (64 - 16));
- *op++ = (uint8_t)(hyper_sample >> (64 - 16 + 8)); // 16 bits, ms byte
- sample_length = 2;
- break;
case SPS_FORMAT_S16_BE:
- *op++ = (uint8_t)(hyper_sample >> (64 - 16 + 8)); // 16 bits, ms byte
- *op = (uint8_t)(hyper_sample >> (64 - 16));
- sample_length = 2;
- break;
- case SPS_FORMAT_S16:
- *(int16_t *)op = (int16_t)(hyper_sample >> (64 - 16));
- sample_length = 2;
+ dither_mask = (int64_t)1 << (64 - 16);
break;
case SPS_FORMAT_S8:
- *op = (int8_t)(hyper_sample >> (64 - 8));
- sample_length = 1;
- break;
case SPS_FORMAT_U8:
- *op = 128 + (uint8_t)(hyper_sample >> (64 - 8));
- sample_length = 1;
+ dither_mask = (int64_t)1 << (64 - 8);
+ break;
+ case SPS_FORMAT_UNKNOWN:
+ die("Unexpected SPS_FORMAT_UNKNOWN while calculating dither mask.");
+ break;
+ case SPS_FORMAT_AUTO:
+ die("Unexpected SPS_FORMAT_AUTO while calculating dither mask.");
+ break;
+ case SPS_FORMAT_INVALID:
+ die("Unexpected SPS_FORMAT_INVALID while calculating dither mask.");
break;
- default:
- sample_length = 0; // stop a compiler warning
- die("Unexpected SPS_FORMAT_* with index %d while outputting silence", format);
}
- p += sample_length;
- previous_random_number = r;
+ dither_mask -= 1;
+
+ char *p = outp;
+ size_t sample_number;
+ r64_lock; // the random number generator is not thread safe, so we need to lock it while using
+ // it
+ for (sample_number = 0; sample_number < number_of_frames * channels; sample_number++) {
+
+ int64_t hyper_sample = 0;
+ int64_t r = r64i();
+
+ int64_t tpdf = (r & dither_mask) - (previous_random_number & dither_mask);
+
+ // add dither if permitted -- no need to check for clipping, as the sample is, uh, zero
+
+ if (with_dither != 0)
+ hyper_sample += tpdf;
+
+ /*
+ {
+ // hack to generate low level white noise instead of adding dither
+ hyper_sample = r;
+ hyper_sample = hyper_sample / (1 << 8); // keep the sign
+ }
+ */
+
+ // move the result to the desired position in the int64_t
+ char *op = p;
+ int sample_length; // this is the length of the sample
+
+ switch (format) {
+ case SPS_FORMAT_S32:
+ hyper_sample >>= (64 - 32);
+ *(int32_t *)op = hyper_sample;
+ sample_length = 4;
+ break;
+ case SPS_FORMAT_S32_LE:
+ *op++ = (uint8_t)(hyper_sample >> (64 - 32)); // 32 bits, ls byte
+ *op++ = (uint8_t)(hyper_sample >> (64 - 32 + 8)); // 32 bits, less significant middle byte
+ *op++ = (uint8_t)(hyper_sample >> (64 - 32 + 16)); // 32 bits, more significant middle byte
+ *op = (uint8_t)(hyper_sample >> (64 - 32 + 24)); // 32 bits, ms byte
+ sample_length = 4;
+ break;
+ case SPS_FORMAT_S32_BE:
+ *op++ = (uint8_t)(hyper_sample >> (64 - 32 + 24)); // 32 bits, ms byte
+ *op++ = (uint8_t)(hyper_sample >> (64 - 32 + 16)); // 32 bits, more significant middle byte
+ *op++ = (uint8_t)(hyper_sample >> (64 - 32 + 8)); // 32 bits, less significant middle byte
+ *op = (uint8_t)(hyper_sample >> (64 - 32)); // 32 bits, ls byte
+ sample_length = 4;
+ break;
+ case SPS_FORMAT_S24_3LE:
+ *op++ = (uint8_t)(hyper_sample >> (64 - 24)); // 24 bits, ls byte
+ *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 8)); // 24 bits, middle byte
+ *op = (uint8_t)(hyper_sample >> (64 - 24 + 16)); // 24 bits, ms byte
+ sample_length = 3;
+ break;
+ case SPS_FORMAT_S24_3BE:
+ *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 16)); // 24 bits, ms byte
+ *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 8)); // 24 bits, middle byte
+ *op = (uint8_t)(hyper_sample >> (64 - 24)); // 24 bits, ls byte
+ sample_length = 3;
+ break;
+ case SPS_FORMAT_S24:
+ hyper_sample >>= (64 - 24);
+ *(int32_t *)op = hyper_sample;
+ sample_length = 4;
+ break;
+ case SPS_FORMAT_S24_LE:
+ *op++ = (uint8_t)(hyper_sample >> (64 - 24)); // 24 bits, ls byte
+ *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 8)); // 24 bits, middle byte
+ *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 16)); // 24 bits, ms byte
+ *op = 0;
+ sample_length = 4;
+ break;
+ case SPS_FORMAT_S24_BE:
+ *op++ = 0;
+ *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 16)); // 24 bits, ms byte
+ *op++ = (uint8_t)(hyper_sample >> (64 - 24 + 8)); // 24 bits, middle byte
+ *op = (uint8_t)(hyper_sample >> (64 - 24)); // 24 bits, ls byte
+ sample_length = 4;
+ break;
+ case SPS_FORMAT_S16_LE:
+ *op++ = (uint8_t)(hyper_sample >> (64 - 16));
+ *op++ = (uint8_t)(hyper_sample >> (64 - 16 + 8)); // 16 bits, ms byte
+ sample_length = 2;
+ break;
+ case SPS_FORMAT_S16_BE:
+ *op++ = (uint8_t)(hyper_sample >> (64 - 16 + 8)); // 16 bits, ms byte
+ *op = (uint8_t)(hyper_sample >> (64 - 16));
+ sample_length = 2;
+ break;
+ case SPS_FORMAT_S16:
+ *(int16_t *)op = (int16_t)(hyper_sample >> (64 - 16));
+ sample_length = 2;
+ break;
+ case SPS_FORMAT_S8:
+ *op = (int8_t)(hyper_sample >> (64 - 8));
+ sample_length = 1;
+ break;
+ case SPS_FORMAT_U8:
+ *op = 128 + (uint8_t)(hyper_sample >> (64 - 8));
+ sample_length = 1;
+ break;
+ default:
+ sample_length = 0; // stop a compiler warning
+ die("Unexpected SPS_FORMAT_* with index %d while outputting silence", format);
+ }
+ p += sample_length;
+ previous_random_number = r;
+ }
+ r64_unlock;
+ } else {
+ debug(1, "No output configuration!");
}
- r64_unlock;
return previous_random_number;
}
return response;
}
-// the difference between two unsigned 32-bit modulo values as a signed 32-bit result
-// now, if the two numbers are constrained to be within 2^(n-1)-1 of one another,
-// we can use their as a signed 2^n bit number which will be positive
-// if the first number is the same or "after" the second, and
-// negative otherwise
-
-int32_t mod32Difference(uint32_t a, uint32_t b) {
- int32_t result = a - b;
- return result;
-}
-
int get_device_id(uint8_t *id, int int_length) {
uint64_t wait_time = 10000000000L; // wait up to this (ns) long to get a MAC address
#ifdef AF_PACKET
if ((ifa->ifa_addr) && (ifa->ifa_addr->sa_family == AF_PACKET)) {
struct sockaddr_ll *s = (struct sockaddr_ll *)ifa->ifa_addr;
- if ((strcmp(ifa->ifa_name, "lo") != 0)) {
+ if (
+ ((ifa->ifa_flags & IFF_UP) != 0) &&
+ ((ifa->ifa_flags & IFF_RUNNING) != 0) &&
+ ((ifa->ifa_flags & IFF_LOOPBACK) == 0) &&
+ (ifa->ifa_addr != 0)
+ ) {
found = 1;
response = 0;
for (i = 0; ((i < s->sll_halen) && (i < int_length)); i++) {
warn("Can't create a device ID -- no valid MAC address can be found.");
return response;
}
+
+char *bnprintf(char *buffer, ssize_t max_bytes, const char *format, ...) {
+ int oldState;
+ pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
+ va_list args;
+ va_start(args, format);
+ vsnprintf(buffer, max_bytes, format, args);
+ va_end(args);
+ pthread_setcancelstate(oldState, NULL);
+ // debug(1,"bnprintf string is: \"%s\"", buffer);
+ return buffer;
+}
+
+int do_pthread_setname(pthread_t *restrict thread, const char *format, ...) {
+#ifdef COMPILE_FOR_OSX
+ return 0;
+#else
+ // pthread_setname_np/2 not defined in macOS
+ char actual_name[16];
+ va_list args;
+ va_start(args, format);
+ vsnprintf(actual_name, sizeof(actual_name), format, args);
+ va_end(args);
+ return pthread_setname_np(*thread, actual_name);
+#endif
+}
+
+int named_pthread_create(pthread_t *thread, const pthread_attr_t *attr,
+ void *(*start_routine)(void *), void *arg, const char *format, ...) {
+ char actual_name[16];
+ va_list args;
+ va_start(args, format);
+ vsnprintf(actual_name, sizeof(actual_name), format, args);
+ va_end(args);
+ int response = pthread_create(thread, attr, start_routine, arg);
+ if (response != 0) {
+ debug(1, "error creating thread \"%s\"", actual_name);
+ }
+#ifndef COMPILE_FOR_OSX
+ else {
+ pthread_setname_np(*thread, actual_name);
+ }
+#endif
+ return response;
+}
+
+int named_pthread_create_with_priority(pthread_t *thread, int priority,
+ void *(*start_routine)(void *), void *arg,
+ const char *format, ...) {
+
+ // if this gets a permissions error, it'll try to create a thread without any special
+ // priority or scheduling
+
+ static int failed_to_set_rt = 0;
+
+ struct sched_param param;
+ pthread_attr_t attr;
+ int ret = 0;
+
+ char actual_name[16];
+ va_list args;
+ va_start(args, format);
+ vsnprintf(actual_name, sizeof(actual_name), format, args);
+ va_end(args);
+
+ /* Initialize pthread attributes (default values) */
+ ret = pthread_attr_init(&attr);
+ if (ret == 0) {
+ /* Set scheduler policy and priority of pthread */
+ ret = pthread_attr_setschedpolicy(&attr, SCHED_FIFO);
+ if (ret == 0) {
+ param.sched_priority = priority;
+ ret = pthread_attr_setschedparam(&attr, ¶m);
+ if (ret == 0) {
+ /* Use scheduling parameters of attr */
+ ret = pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED);
+ if (ret != 0) {
+ debug(1, "pthread setinheritsched failed");
+ }
+ } else {
+ debug(1, "pthread setschedparam failed");
+ }
+ } else {
+ debug(1, "pthread setschedpolicy failed");
+ }
+ } else {
+ debug(1, "init pthread attributes failed");
+ }
+ // ret == 0 if creating and setting up the attributes was successful
+ if (ret == 0) {
+ ret = pthread_create(thread, &attr, start_routine, arg);
+ pthread_attr_destroy(&attr);
+ }
+ // ret will be non-zero if there was a problem creating the attribute or creating the prioritized
+ // thread
+ if (ret != 0) {
+ ret = pthread_create(thread, NULL, start_routine, arg);
+ if (failed_to_set_rt == 0) {
+ inform("Can not set realtime properties of a thread.");
+ failed_to_set_rt = 1;
+ }
+ }
+#ifndef COMPILE_FOR_OSX
+ if (ret == 0) {
+ pthread_setname_np(*thread, actual_name);
+ } else {
+ die("named_pthread_create_with_priority failed with error %d", ret);
+ }
+#endif
+ return ret;
+}
+
+#ifdef CONFIG_CONVOLUTION
+/* Parse comma-separated filenames with optional quotes
+ * Returns array of ir_file_info_t structs (caller must free both array and filenames)
+ * count is set to number of filenames found
+ * Returns NULL on error
+ */
+ir_file_info_t *parse_ir_filenames(const char *input, unsigned int *file_count) {
+ if (!input || !file_count)
+ return NULL;
+
+ *file_count = 0;
+ unsigned int capacity = 10;
+ ir_file_info_t *files = malloc(capacity * sizeof(ir_file_info_t));
+ if (!files)
+ return NULL;
+
+ const char *p = input;
+
+ while (*p) {
+ /* Skip whitespace before filename */
+ while (isspace((unsigned char)*p))
+ p++;
+ if (!*p)
+ break;
+
+ /* Check if we need to resize array */
+ if (*file_count >= capacity) {
+ capacity *= 2;
+ ir_file_info_t *temp = realloc(files, capacity * sizeof(ir_file_info_t));
+ if (!temp) {
+ for (unsigned int i = 0; i < *file_count; i++)
+ free(files[i].filename);
+ free(files);
+ return NULL;
+ }
+ files = temp;
+ }
+
+ /* Parse one filename */
+ char quote_char = 0;
+ char *buffer = NULL;
+ size_t buf_len = 0;
+ size_t buf_cap = 64;
+
+ if (*p == '"' || *p == '\'') {
+ /* Quoted filename */
+ quote_char = *p;
+ p++;
+
+ buffer = malloc(buf_cap);
+ if (!buffer) {
+ for (unsigned int i = 0; i < *file_count; i++)
+ free(files[i].filename);
+ free(files);
+ return NULL;
+ }
+
+ /* Parse quoted string with escape handling */
+ while (*p && *p != quote_char) {
+ if (*p == '\\' && *(p + 1)) {
+ /* Escape sequence */
+ p++;
+ if (buf_len >= buf_cap - 1) {
+ buf_cap *= 2;
+ char *temp = realloc(buffer, buf_cap);
+ if (!temp) {
+ free(buffer);
+ for (unsigned int i = 0; i < *file_count; i++)
+ free(files[i].filename);
+ free(files);
+ return NULL;
+ }
+ buffer = temp;
+ }
+ buffer[buf_len++] = *p++;
+ } else {
+ if (buf_len >= buf_cap - 1) {
+ buf_cap *= 2;
+ char *temp = realloc(buffer, buf_cap);
+ if (!temp) {
+ free(buffer);
+ for (unsigned int i = 0; i < *file_count; i++)
+ free(files[i].filename);
+ free(files);
+ return NULL;
+ }
+ buffer = temp;
+ }
+ buffer[buf_len++] = *p++;
+ }
+ }
+ buffer[buf_len] = '\0';
+ if (*p == quote_char)
+ p++; /* Skip closing quote */
+
+ files[*file_count].samplerate = 0;
+ // files[*file_count].evaluation = ev_unchecked;
+ files[*file_count].filename = buffer;
+ (*file_count)++;
+ } else {
+ /* Unquoted filename - read until comma or end, handle escapes */
+ buffer = malloc(buf_cap);
+ if (!buffer) {
+ for (unsigned int i = 0; i < *file_count; i++)
+ free(files[i].filename);
+ free(files);
+ return NULL;
+ }
+
+ while (*p && *p != ',') {
+ if (*p == '\\' && *(p + 1)) {
+ /* Escape sequence */
+ p++;
+ if (buf_len >= buf_cap - 1) {
+ buf_cap *= 2;
+ char *temp = realloc(buffer, buf_cap);
+ if (!temp) {
+ free(buffer);
+ for (unsigned int i = 0; i < *file_count; i++)
+ free(files[i].filename);
+ free(files);
+ return NULL;
+ }
+ buffer = temp;
+ }
+ buffer[buf_len++] = *p++;
+ } else {
+ if (buf_len >= buf_cap - 1) {
+ buf_cap *= 2;
+ char *temp = realloc(buffer, buf_cap);
+ if (!temp) {
+ free(buffer);
+ for (unsigned int i = 0; i < *file_count; i++)
+ free(files[i].filename);
+ free(files);
+ return NULL;
+ }
+ buffer = temp;
+ }
+ buffer[buf_len++] = *p++;
+ }
+ }
+
+ /* Trim trailing whitespace */
+ while (buf_len > 0 && isspace((unsigned char)buffer[buf_len - 1])) {
+ buf_len--;
+ }
+ buffer[buf_len] = '\0';
+
+ files[*file_count].samplerate = 0;
+ files[*file_count].channels = 0;
+ // files[*file_count].evaluation = ev_unchecked;
+ files[*file_count].filename = buffer;
+ (*file_count)++;
+ }
+
+ /* Skip comma and whitespace */
+ while (isspace((unsigned char)*p))
+ p++;
+ if (*p == ',') {
+ p++;
+ while (isspace((unsigned char)*p))
+ p++;
+ }
+ }
+
+ return files;
+}
+
+/* Do a quick sanity check on the files -- see if they can be opened as sound files */
+void sanity_check_ir_files(const int option_print_level, ir_file_info_t *files,
+ unsigned int count) {
+ if (files != NULL) {
+ debug(option_print_level, "convolution impulse response files: %d found.", count);
+ for (unsigned int i = 0; i < count; i++) {
+
+ SF_INFO sfinfo = {};
+ // sfinfo.format = 0;
+
+ SNDFILE *file = sf_open(files[i].filename, SFM_READ, &sfinfo);
+ if (file) {
+ // files[i].evaluation = ev_okay;
+ files[i].samplerate = sfinfo.samplerate;
+ files[i].channels = sfinfo.channels;
+ debug(option_print_level,
+ "convolution impulse response file \"%s\": %" PRId64
+ " frames (%.1f seconds), %d channel%s at %d frames per second.",
+ files[i].filename, sfinfo.frames, (float)sfinfo.frames / sfinfo.samplerate,
+ sfinfo.channels, sfinfo.channels == 1 ? "" : "s", sfinfo.samplerate);
+ sf_close(file);
+ } else {
+ // files[i].evaluation = ev_invalid;
+ debug(option_print_level, "convolution impulse response file \"%s\" %s", files[i].filename,
+ sf_strerror(NULL));
+ warn("Error accessing the convolution impulse response file \"%s\". %s", files[i].filename,
+ sf_strerror(NULL));
+ }
+ }
+ } else {
+ debug(option_print_level, "no convolution impulse response files found.");
+ }
+}
+
+/* Free the array returned by parse_filenames */
+void free_ir_filenames(ir_file_info_t *files, unsigned int file_count) {
+ if (!files)
+ return;
+ for (unsigned int i = 0; i < file_count; i++) {
+ free(files[i].filename);
+ }
+ free(files);
+}
+#endif
#ifndef _COMMON_H
#define _COMMON_H
-#include <libconfig.h>
-#include <pthread.h>
-#include <signal.h>
-#include <stdint.h>
-#include <sys/socket.h>
-#include <sys/stat.h>
-#include <unistd.h>
-
-#include "audio.h"
+#define _GNU_SOURCE
+
+#include <sys/types.h> // for mode_t
+#include <unistd.h> // for useconds_t
+
#include "config.h"
#include "definitions.h"
#include "mdns.h"
#define SAFAMILY sa_family
#endif
+#if defined(CONFIG_CONVOLUTION)
+typedef enum { ev_unchecked, ev_okay, ev_invalid } ir_file_evaluation;
+
+typedef struct {
+ unsigned int samplerate; // initialized to 0, will be filter frame rate
+ unsigned int channels;
+ char *filename; // the parsed filename
+} ir_file_info_t;
+#endif
+
#if defined(CONFIG_DBUS_INTERFACE) || defined(CONFIG_MPRIS_INTERFACE)
+#include <glib.h>
typedef enum {
- DBT_system = 0, // use the session bus
- DBT_session, // use the system bus
-} dbus_session_type;
+ DBT_default = 0,
+ DBT_system, // use the system bus
+ DBT_session, // use the session bus
+} dbus_message_bus_t;
#endif
typedef enum {
TOE_normal,
TOE_emergency,
- TOE_dbus // a request was made on a D-Bus interface (the native D-Bus or MPRIS interfaces)-- don't
- // wait for the dbus thread to exit
} type_of_exit_type;
#define sps_extra_code_output_stalled 32768
typedef enum {
ST_basic = 0, // straight deletion or insertion of a frame in a 352-frame packet
+ ST_vernier, // interpolate from 352/1024 samples to 353/1025 or 351/1023
ST_soxr, // use libsoxr to make a 352 frame packet one frame longer or shorter
ST_auto, // use soxr if compiled for it and if the soxr_index is low enough
} stuffing_type;
typedef enum {
decoder_hammerton = 0,
decoder_apple_alac,
+ decoder_ffmpeg_alac,
} decoders_supported_type;
typedef enum {
// the following enum is for the formats recognised -- currently only S16LE is recognised for input,
// so these are output only for the present
+// ensure sps_format_sample_size_array and sps_format_description_string_array are in sync with
+// this!
typedef enum {
SPS_FORMAT_UNKNOWN = 0,
SPS_FORMAT_S8,
+ SPS_FORMAT_LOWEST = SPS_FORMAT_S8,
SPS_FORMAT_U8,
- SPS_FORMAT_S16,
SPS_FORMAT_S16_LE,
SPS_FORMAT_S16_BE,
- SPS_FORMAT_S24,
SPS_FORMAT_S24_LE,
SPS_FORMAT_S24_BE,
SPS_FORMAT_S24_3LE,
SPS_FORMAT_S24_3BE,
- SPS_FORMAT_S32,
SPS_FORMAT_S32_LE,
SPS_FORMAT_S32_BE,
+ SPS_FORMAT_HIGHEST_NATIVE = SPS_FORMAT_S32_BE,
+ SPS_FORMAT_S16,
+ SPS_FORMAT_S24,
+ SPS_FORMAT_S32,
SPS_FORMAT_AUTO,
SPS_FORMAT_INVALID,
} sps_format_t;
+typedef enum {
+ SPS_RATE_UNKNOWN = 0,
+ SPS_RATE_5512,
+ SPS_RATE_LOWEST = SPS_RATE_5512,
+ SPS_RATE_8000,
+ SPS_RATE_11025,
+ SPS_RATE_16000,
+ SPS_RATE_22050,
+ SPS_RATE_32000,
+ SPS_RATE_44100,
+ SPS_RATE_48000,
+ SPS_RATE_64000,
+ SPS_RATE_88200,
+ SPS_RATE_96000,
+ SPS_RATE_176400,
+ SPS_RATE_192000,
+ SPS_RATE_352800,
+ SPS_RATE_384000,
+ SPS_RATE_HIGHEST = SPS_RATE_384000,
+ SPS_RATE_ILLEGAL,
+} sps_rate_t;
+
+// these sets omit the _UNKNOWN, _AUTO and _ILLEGAL values
+#define SPS_FORMAT_SET (((1 << (SPS_FORMAT_HIGHEST_NATIVE + 1)) - 1) - (1 << SPS_FORMAT_UNKNOWN))
+#define SPS_RATE_SET (((1 << (SPS_RATE_HIGHEST + 1)) - 1) - (1 << SPS_RATE_UNKNOWN))
+#define SPS_CHANNEL_SET (((1 << (8 + 1)) - 1) - (1 << 0)) // channels 1 to 8, not 0-based!
+
+#ifndef CONFIG_AIRPLAY_2
+#define SPS_FORMAT_NON_FFMPEG_SET SPS_FORMAT_SET
+#define SPS_RATE_NON_FFMPEG_SET \
+ ((1 << SPS_RATE_44100) | (1 << SPS_RATE_88200) | (1 << SPS_RATE_176400) | (1 << SPS_RATE_352800))
+#define SPS_CHANNNEL_NON_FFMPEG_SET (1 << 2)
+#endif
+
+// up to 1048576 fps, but must be an even number
+#define RATE_FROM_ENCODED_FORMAT(encoded_format) (((encoded_format >> 6) & 0x7FFFF) * 2)
+#define RATE_TO_ENCODED_FORMAT(rate) (((rate / 2) & 0x7FFFF) << 6)
+
+// up to 127 channels
+#define CHANNELS_FROM_ENCODED_FORMAT(encoded_format) ((encoded_format >> 25) & 0x7F)
+#define CHANNELS_TO_ENCODED_FORMAT(channels) ((channels & 0x7F) << 25)
+
+// up to 64 different SPS_FORMATs
+#define FORMAT_FROM_ENCODED_FORMAT(encoded_format) (encoded_format & 0x3F)
+#define FORMAT_TO_ENCODED_FORMAT(format) (format & 0x3F)
+
+const char *short_format_description(int32_t encoded_format);
const char *sps_format_description_string(sps_format_t format);
+unsigned int sps_format_sample_size(sps_format_t format);
+unsigned int sps_rate_actual_rate(sps_rate_t rate);
+
typedef struct {
double missing_port_dacp_scan_interval_seconds; // if no DACP port number can be found, check at
// these intervals
int endianness;
double airplay_volume; // stored here for reloading when necessary
double default_airplay_volume;
- double high_threshold_airplay_volume;
- uint64_t last_access_to_volume_info_time;
- int limit_to_high_volume_threshold_time_in_minutes; // revert to the high threshold volume level
- // if the existing volume level exceeds this
- // and hasn't been used for this amount of
- // time (0 means never revert)
char *appName; // normally the app is called shairport-syn, but it may be symlinked
char *password;
char *service_name; // the name for the shairport service, e.g. "Shairport Sync Version %v running
// on host %h"
-#ifdef CONFIG_PA
+#ifdef CONFIG_PULSEAUDIO
char *pa_server; // the pulseaudio server address that Shairport Sync will play on.
char *pa_application_name; // the name under which Shairport Sync shows up as an "Application" in
// the Sound Preferences in most desktop Linuxes.
- // Defaults to "Shairport Sync".
+ // Defaults to "Shairport Sync". Shairport Sync must be playing to see it.
char *pa_sink; // the name (or id) of the sink that Shairport Sync will play on.
#endif
-#ifdef CONFIG_PW
- char *pw_application_name; // the name under which Shairport Sync shows up as an "Application" in
- // the Sound Preferences in most desktop Linuxes.
- // Defaults to "Shairport Sync".
+#ifdef CONFIG_PIPEWIRE
+ char *pw_application_name; // the name under which Shairport Sync shows up as an "Application" in
+ // the Sound Preferences in most desktop Linuxes.
+ // Defaults to "Shairport Sync".
- char *pw_node_name; // defaults to the application's name, usually "shairport-sync".
+ char *pw_node_name; // defaults to the application's name, usually "shairport-sync".
char *pw_sink_target; // leave this unset if you don't want to change the sink_target.
#endif
#ifdef CONFIG_METADATA
int mqtt_publish_raw;
int mqtt_publish_parsed;
int mqtt_publish_cover;
+ int mqtt_publish_retain;
int mqtt_enable_remote;
int mqtt_enable_autodiscovery;
char *mqtt_autodiscovery_prefix;
int ignore_volume_control;
int volume_max_db_set; // set to 1 if a maximum volume db has been set
int volume_max_db;
- int no_sync; // disable synchronisation, even if it's available
- int no_mmap; // disable use of mmap-based output, even if it's available
- double resync_threshold; // if it gets out of whack by more than this number of seconds, do a
- // resync. if zero, never do a resync.
- double resync_recovery_time; // if sync is late, drop the delay but also drop the following frames
- // up to the resync_recovery_time
+ int no_sync; // disable synchronisation, even if it's available
+ int no_mmap; // disable use of mmap-based output, even if it's available
+ double resync_threshold; // if it gets out of whack by more than this number of seconds, do a
+ // resync. if zero, never do a resync.
int allow_session_interruption;
int timeout; // while in play mode, exit if no packets of audio come in for more than this number
// of seconds . Zero means never exit.
int soxr_delay_threshold; // the soxr delay must be less or equal to this for soxr interpolation
// to be enabled under the auto setting
int decoders_supported;
- int use_apple_decoder; // set to 1 if you want to use the apple decoder instead of the original by
- // David Hammerton
+ int decoder_in_use;
// char *logfile;
// char *errfile;
char *configfile;
double audio_backend_buffer_interpolation_threshold_in_seconds; // below this, soxr interpolation
// will not occur -- it'll be
// basic interpolation instead.
+ double audio_decoded_buffer_desired_length; // the length of the buffer of fully decoded audio
+ // prior to being sent to the output device
double disable_standby_mode_silence_threshold; // below this, silence will be added to the output
// buffer
double disable_standby_mode_silence_scan_interval; // check the threshold this often
// before using
// sw attenuation
volume_control_profile_type volume_control_profile;
-
int output_format_auto_requested; // true if the configuration requests auto configuration
- sps_format_t output_format;
- int output_rate_auto_requested; // true if the configuration requests auto configuration
- unsigned int output_rate;
+ int output_rate_auto_requested; // true if the configuration requests auto configuration
+ uint32_t current_output_configuration;
+ // these are the formats/rate and channel configurations permitted by the settings or defaults
+ uint32_t format_set;
+ uint32_t rate_set;
+ uint32_t channel_set;
#ifdef CONFIG_CONVOLUTION
- int convolution;
- int convolver_valid;
- char *convolution_ir_file;
+ int convolution_enabled;
+ unsigned int convolution_rate; // 0 means the convolver has never been initialised, so ignore
+ // convolver_valid.
+ // but if this is the same as the current rate and convolver_valid is false, it means that an
+ // attempt to initialise the convolver has failed.
+ size_t convolution_block_size;
+ unsigned int convolution_ir_file_count;
+ ir_file_info_t *convolution_ir_files; // NULL or an array of information about all the impulse
+ // response files loaded
+ int convolution_ir_files_updated; // set to true if the convolution_ir_files are changed. Cleared
+ // when the convolver has been initialised
+ int convolver_valid; // set to true if the convolver can be initialised
+ unsigned int convolution_threads; // number of threads in the convolver thread pool
float convolution_gain;
- int convolution_max_length;
+ double convolution_max_length_in_seconds;
#endif
- int loudness;
+ int loudness_enabled;
float loudness_reference_volume_db;
int alsa_use_hardware_mute;
double alsa_maximum_stall_time;
volatile int keep_dac_busy;
yna_type use_precision_timing; // defaults to no
-#if defined(CONFIG_DBUS_INTERFACE)
- dbus_session_type dbus_service_bus_type;
-#endif
-#if defined(CONFIG_MPRIS_INTERFACE)
- dbus_session_type mpris_service_bus_type;
-#endif
-
#ifdef CONFIG_METADATA_HUB
char *cover_art_cache_dir;
int retain_coverart;
// can't use IP numbers as they might be given to different devices
// can't get hold of MAC addresses.
// can't define the nvll linked list struct here
+ char *firmware_version;
+ // use these in information requests
+ char *model;
+ char *srcvers;
+ char *osvers;
#ifdef CONFIG_AIRPLAY_2
uint64_t airplay_features;
uint32_t airplay_statusflags;
+ char *airplay_fex; // a base64-encoded version of the airplay_features in little-endian form
char *airplay_device_id; // for the Bonjour advertisement and the GETINFO PList
char *airplay_pin; // non-NULL, 4 char PIN, if required for pairing
char *airplay_pi; // UUID in the Bonjour advertisement and the GETINFO Plist
+ char *airplay_pgid; // UUID in the txtAirPlay data sent on the event channel
+ char *airplay_psi; // type 4 fixed UUID
+ uint8_t airplay_pk[32]; // public key
+ char *pk_string;
char *nqptp_shared_memory_interface_name; // client name for nqptp service
#endif
int unfixable_error_reported; // only report once.
+
+ uint64_t eight_channel_layout; // non-zero means enabled and is a channel layout
+ uint64_t six_channel_layout; // non-zero means enabled and is a channel layout
+
+ int mixdown_enable;
+ uint64_t mixdown_channel_layout; // if mixdown_enable is true, 0 signifies auto, based on number
+ // of channels
+ int output_channel_mapping_enable; // 0 means off, non-zero means on. If on and
+ // output_channel_map_size is 0, use the device's channel map
+ const char *output_channel_map[8]; // names of the output channels
+ unsigned int output_channel_map_size; // number of output channels
+
+#if defined(CONFIG_DBUS_INTERFACE) || defined(CONFIG_MPRIS_INTERFACE)
+ GMainLoop *glib_worker_loop;
+ // for clean quitting from a dbus interface quit request (from the DBus or MPRIS interfaces)
+ int quit_requested_from_glib_mainloop; // remember that it initialised to zero.
+ dbus_message_bus_t dbus_default_message_bus;
+
+#if defined(CONFIG_DBUS_INTERFACE)
+ dbus_message_bus_t dbus_service_bus_type;
+#endif
+#if defined(CONFIG_MPRIS_INTERFACE)
+ dbus_message_bus_t mpris_service_bus_type;
+#endif
+
+#endif
+
} shairport_cfg;
uint32_t nctohl(const uint8_t *p); // read 4 characters from *p and do ntohl on them
#define inform(...) _inform(__FILE__, __LINE__, __VA_ARGS__)
#define debug_print_buffer(...) _debug_print_buffer(__FILE__, __LINE__, __VA_ARGS__)
+// Thanks to https://stackoverflow.com/a/1597129 for the inspiration for this identifier generation
+#define MAKEUNIQUEID2(x, y) x##y
+#define MADEID(x, y) MAKEUNIQUEID2(x, y)
+
+// do X once, and never again until the app is restarted
+#define once(X) \
+ static int MADEID(once_flag_, __LINE__) = 0; \
+ if (MADEID(once_flag_, __LINE__) == 0) { \
+ X; \
+ MADEID(once_flag_, __LINE__) = 1; \
+ }
+
+// do X once, and then ignore repeated calls until they stop for more than one second
+#define once_per_1_second_burst(X) \
+ static uint64_t MADEID(time_, __LINE__) = 0; \
+ int64_t MADEID(interval_, __LINE__) = get_absolute_time_in_ns() - MADEID(time_, __LINE__); \
+ if ((MADEID(time_, __LINE__) == 0) || (MADEID(interval_, __LINE__) > 1000000000L)) \
+ X; \
+ MADEID(time_, __LINE__) = get_absolute_time_in_ns()
+
+void getErrorText(char *destinationString, size_t destinationStringLength);
+
uint8_t *base64_dec(char *input, int *outlen);
char *base64_enc(uint8_t *input, int length);
+char *base64_encode_so(const unsigned char *data, size_t input_length, char *encoded_data,
+ size_t *output_length);
+
#define RSA_MODE_AUTH (0)
#define RSA_MODE_KEY (1)
uint8_t *rsa_apply(uint8_t *input, int inlen, int *outlen, int mode);
uint64_t get_monotonic_time_in_ns(void); // NTP-disciplined
// time at startup for debugging timing
-extern uint64_t ns_time_at_startup, ns_time_at_last_debug_message;
+// extern uint64_t ns_time_at_startup, ns_time_at_last_debug_message;
// this is for reading an unsigned 32 bit number, such as an RTP timestamp
extern config_t config_file_stuff;
extern int type_of_exit_cleanup; // normal, emergency, dbus requested...
+extern uint64_t minimum_dac_queue_size;
+
+int config_lookup_non_empty_string(const config_t *cfg, const char *path, const char **value);
int config_set_lookup_bool(config_t *cfg, char *where, int *dst);
+int check_string_or_list_setting(config_setting_t *setting, const char *item);
+int check_int_or_list_setting(config_setting_t *setting, const int item);
+
+unsigned int config_get_string_settings_as_string_array(config_setting_t *setting,
+ const char ***result);
+unsigned int config_get_int_settings_as_int_array(config_setting_t *setting, int **result);
void command_start(void);
void command_stop(void);
int mkpath(const char *path, mode_t mode);
-void shairport_shutdown();
+void sps_shutdown(type_of_exit_type shutdown_type); // TOE_normal, TOE_emergency, TOE_dbus
extern sigset_t pselect_sigset;
#define config_unlock pthread_mutex_unlock(&config.lock)
+int do_pthread_setname(pthread_t *restrict thread, const char *format, ...);
+
+int named_pthread_create(pthread_t *restrict thread, const pthread_attr_t *restrict attr,
+ void *(*start_routine)(void *), void *restrict arg, const char *format,
+ ...);
+int named_pthread_create_with_priority(pthread_t *thread, int priority,
+ void *(*start_routine)(void *), void *arg,
+ const char *format, ...);
+
extern pthread_mutex_t r64_mutex;
#define r64_lock pthread_mutex_lock(&r64_mutex)
char *get_version_string(); // mallocs a string space -- remember to free it afterwards
-int64_t generate_zero_frames(char *outp, size_t number_of_frames, sps_format_t format,
- int with_dither, int64_t random_number_in);
+int64_t generate_zero_frames(char *outp, size_t number_of_frames, int with_dither,
+ int64_t random_number_in, uint32_t encoded_output_format);
void malloc_cleanup(void *arg);
// analogous to strndup;
void *memdup(const void *mem, size_t size);
-// the difference between two unsigned 32-bit modulo values as a signed 32-bit result
-// now, if the two numbers are constrained to be within 2^(n-1)-1 of one another,
-// we can use their as a signed 2^n bit number which will be positive
-// if the first number is the same or "after" the second, and
-// negative otherwise
+int get_device_id(uint8_t *id, int int_length);
-int32_t mod32Difference(uint32_t a, uint32_t b);
+char *bnprintf(char *buffer, ssize_t max_bytes, const char *format, ...);
-int get_device_id(uint8_t *id, int int_length);
+#ifdef CONFIG_CONVOLUTION
+
+/* Parse comma-separated filenames with optional quotes from the input string
+ * Returns array of ir_file_info_t structs (caller must free both array and filenames)
+ * count is set to number of filenames found
+ * Returns NULL on error
+ */
+ir_file_info_t *parse_ir_filenames(const char *input, unsigned int *file_count);
+// Access: files[i].filename, files[i].rate, files[i].evaluation
+
+/* Do a quick sanity check on the files -- see if they can be opened as sound files */
+void sanity_check_ir_files(const int option_print_level, ir_file_info_t *files, unsigned int count);
+
+/* Free the array returned by parse_filenames */
+void free_ir_filenames(ir_file_info_t *files, unsigned int file_count);
+
+#endif
#ifdef CONFIG_USE_GIT_VERSION_STRING
extern char git_version_string[];
-# -*- Autoconf -*-
# Process this file with autoconf to produce a configure script.
AC_PREREQ([2.50])
-AC_INIT([shairport-sync], [4.3.7], [4265913+mikebrady@users.noreply.github.com])
+AC_INIT([shairport-sync], [5.0], [4265913+mikebrady@users.noreply.github.com])
+: ${CFLAGS="-O3"}
+: ${CXXFLAGS="-O3"}
AM_INIT_AUTOMAKE([subdir-objects])
AC_CONFIG_SRCDIR([shairport.c])
AC_CONFIG_HEADERS([config.h])
AC_PROG_RANLIB
-
-AC_CHECK_PROGS([GIT], [git])
-if test -n "$GIT" && test -e "$srcdir/.git/index" ; then
- AC_DEFINE([CONFIG_USE_GIT_VERSION_STRING], 1, [Use the version string produced by running 'git describe --dirty'.])
-fi
-AM_CONDITIONAL([USE_GIT_VERSION], [test -n "$GIT" && test -e "$srcdir/.git/index" ])
+AC_SUBST([HOME], [$HOME])
# Derived from the Avahi configure.ac file
# Specifying the OS type, defaulting to linux.
AC_CHECK_TOOL(AR, ar, :)
AC_PROG_INSTALL
+# see if we have git -- not fatal if we don't
+AC_CHECK_PROGS([GIT], [git])
+if test -n "$GIT" && test -e "$srcdir/.git/index" ; then
+ AC_DEFINE([CONFIG_USE_GIT_VERSION_STRING], 1, [Use the version string produced by running 'git describe --dirty'.])
+fi
+AM_CONDITIONAL([USE_GIT_VERSION], [test -n "$GIT" && test -e "$srcdir/.git/index" ])
+
+# see if we have xmltoman -- not fatal if we don't
+AC_CHECK_PROGS([XMLTOMAN], [xmltoman])
+AM_CONDITIONAL([USE_XMLTOMAN], [ test -n "$XMLTOMAN" ])
+
PKG_PROG_PKG_CONFIG([0.9.0])
# Checks for libraries.
fi
AM_CONDITIONAL([USE_STDOUT], [test "x$with_stdout" = "xyes"])
-AC_ARG_WITH([pipe],[AS_HELP_STRING([--with-pipe],[include the pipe audio back end])])
+AC_ARG_WITH([pipe],[AS_HELP_STRING([--with-pipe],[build the unix pipe backend (not the PipeWire backend!) into Shairport Sync])])
if test "x$with_pipe" = "xyes" ; then
AC_DEFINE([CONFIG_PIPE], 1, [Include an audio backend to output to a unix pipe.])
fi
# Check to see if we should include the System V initscript
-AC_ARG_WITH([systemv],[AS_HELP_STRING([--with-systemv],[install a System V startup script during a make install])])
-AM_CONDITIONAL([INSTALL_SYSTEMV], [test "x$with_systemv" = "xyes"])
+AC_ARG_WITH([systemv-startup],[AS_HELP_STRING([--with-systemv-startup],[install a System V startup script during a make install])])
+AM_CONDITIONAL([INSTALL_SYSTEMV_STARTUP], [test "x$with_systemv_startup" = "xyes"])
# Check to see if we should include the systemd stuff to define it as a service
-AC_ARG_WITH([systemd],[AS_HELP_STRING([--with-systemd],[install a systemd startup script during a make install])])
-AM_CONDITIONAL([INSTALL_SYSTEMD], [test "x$with_systemd" = "xyes"])
+AC_ARG_WITH([systemd-startup],[AS_HELP_STRING([--with-systemd-startup],[install systemd startup scripts to start Shairport Sync after system startup or after user login])])
+AM_CONDITIONAL([INSTALL_SYSTEMD_STARTUP], [test "x$with_systemd_startup" = "xyes"])
-AC_ARG_WITH([freebsd-service],[AS_HELP_STRING([--with-freebsd-service],[install a FreeBSD startup script during a make install])])
-AM_CONDITIONAL([INSTALL_FREEBSD_SERVICE], [test "x$with_freebsd_service" = "xyes"])
+AC_ARG_WITH([freebsd-startup],[AS_HELP_STRING([--with-freebsd-startup],[install a FreeBSD startup script to start Shairport Sync after system startup])])
+AM_CONDITIONAL([INSTALL_FREEBSD_STARTUP], [test "x$with_freebsd_startup" = "xyes"])
-AC_ARG_WITH([cygwin-service],[AS_HELP_STRING([--with-cygwin-service],[install a CYGWIN config script during a make install])])
-AM_CONDITIONAL([INSTALL_CYGWIN_SERVICE], [test "x$with_cygwin_service" = "xyes"])
+AC_ARG_WITH([cygwin-startup],[AS_HELP_STRING([--with-cygwin-startup],[install a CYGWIN startup script to start Shairport Sync after startup])])
+AM_CONDITIONAL([INSTALL_CYGWIN_STARTUP], [test "x$with_cygwin_startup" = "xyes"])
AC_ARG_WITH([external-mdns],[AS_HELP_STRING([--with-external-mdns],[support the use of 'avahi-publish-service' or 'mDNSPublish' to advertise the service on Bonjour/ZeroConf])])
if test "x$with_external_mdns" = xyes ; then
AC_MSG_RESULT(include external mdns support)
- AC_DEFINE([CONFIG_EXTERNAL_MDNS], 1, [Use 'avahi-publish-service' or 'mDNSPublish' to advertise.])
+ AC_DEFINE([CONFIG_EXTERNAL_MDNS], 1, [Use 'avahi-publish-service' or 'mDNSPublish' to advertise the service on Bonjour/ZeroConf])
fi
AM_CONDITIONAL([USE_EXTERNAL_MDNS], [test "x$with_external_mdns" = "xyes" ])
AC_ARG_WITH([configfiles],[AS_HELP_STRING([--with-configfiles],[install configuration files during a make install])], ,[with_configfiles=yes])
AM_CONDITIONAL([INSTALL_CONFIG_FILES], [test "x$with_configfiles" = "xyes"])
-# Look for Apple ALAC flag
-AC_ARG_WITH(apple-alac,[AS_HELP_STRING([--with-apple-alac],[include support for the Apple ALAC decoder])])
-if test "x${with_apple_alac}" = "xyes" ; then
- AC_DEFINE([CONFIG_APPLE_ALAC], 1, [Include support for using the Apple ALAC Decoder])
- if test "x${with_pkg_config}" = xyes ; then
- PKG_CHECK_MODULES([alac], [alac], [LIBS="${alac_LIBS} ${LIBS}"], AC_MSG_ERROR(Apple ALAC Decoder support requires the ALAC library. See https://github.com/mikebrady/alac.))
- else
- AC_CHECK_LIB([alac], [BitBufferInit], , AC_MSG_ERROR(Apple ALAC Decoder support requires the ALAC library. See https://github.com/mikebrady/alac.))
- fi
-fi
-AM_CONDITIONAL([USE_APPLE_ALAC], [test "x${with_apple_alac}" = "xyes"])
-
# Look for piddir flag
AC_ARG_WITH(piddir, [AS_HELP_STRING([--with-piddir=<pathname>],[Specify a pathname to a directory in which to write the PID file.])])
if test "x${with_piddir}" != "x" ; then
AC_DEFINE([CONFIG_AVAHI], 1, [Include Avahi-based mDNS support.])
AC_CHECK_LIB([avahi-client], [avahi_client_new], , AC_MSG_ERROR(Avahi support requires the avahi-client library!))
AC_CHECK_LIB([avahi-common],[avahi_strerror], , AC_MSG_ERROR(Avahi support requires the avahi-common library!))
- AC_CONFIG_FILES([scripts/shairport-sync.service-avahi])
-else
- AC_CONFIG_FILES([scripts/shairport-sync.service])
+ systemd_after_args="${systemd_after_args} avahi-daemon.service"
+ systemd_requires_args="${systemd_requires_args} avahi-daemon.service"
fi
AM_CONDITIONAL([USE_AVAHI], [test "x$with_avahi" = "xyes"])
AC_ARG_WITH(sndio, [AS_HELP_STRING([--with-sndio],[choose SNDIO API support])])
if test "x$with_sndio" = "xyes" ; then
AC_DEFINE([CONFIG_SNDIO], 1, [Include a sndio-compatible audio backend.])
- AC_CHECK_LIB([sndio], [sio_open], , AC_MSG_ERROR(SNDIO support requires the sndio library -- libsndio-dev suggested))
+ AC_CHECK_LIB([sndio], [sio_open], , AC_MSG_ERROR(SNDIO support requires the sndio library -- sndio suggested for FreeBSD and libsndio-dev for Debian))
fi
AM_CONDITIONAL([USE_SNDIO], [test "x$with_sndio" = "xyes"])
AM_CONDITIONAL([USE_SOUNDIO], [test "x$with_soundio" = "xyes"])
# Look for pulseaudio flag
-AC_ARG_WITH(pa, [AS_HELP_STRING([--with-pa],[choose PulseAudio support.])])
-if test "x$with_pa" = "xyes" ; then
- AC_DEFINE([CONFIG_PA], 1, [Include PulseAudio support.])
+AC_ARG_WITH(pulseaudio, [AS_HELP_STRING([--with-pulseaudio],[build the PulseAudio backend into Shairport Sync])])
+if test "x$with_pulseaudio" = "xyes" ; then
+ AC_DEFINE([CONFIG_PULSEAUDIO], 1, [Include PulseAudio support.])
if test "x${with_pkg_config}" = xyes ; then
PKG_CHECK_MODULES(
[PULSEAUDIO], [libpulse >= 0.9.2],
- [LIBS="${PULSEAUDIO_LIBS} ${LIBS}"],[AC_MSG_ERROR(PulseAudio support requires the libpulse-dev library!)])
+ [LIBS="${PULSEAUDIO_LIBS} ${LIBS}"],[AC_MSG_ERROR(PulseAudio support requires the libpulse library -- libpulse-dev suggested!)])
else
- AC_CHECK_LIB([pulse-simple], [pa_simple_new], , AC_MSG_ERROR(PulseAudio support requires the libpulse library!))
- AC_CHECK_LIB([pulse], [pa_stream_peek], , AC_MSG_ERROR(PulseAudio support requires the libpulse-dev library.))
+ AC_CHECK_LIB([pulse-simple], [pa_simple_new], , AC_MSG_ERROR(PulseAudio support requires the libpulse library -- libpulse-dev suggested!))
+ AC_CHECK_LIB([pulse], [pa_stream_peek], , AC_MSG_ERROR(PulseAudio support requires the libpulse library -- libpulse-dev suggested!))
fi
fi
-AM_CONDITIONAL([USE_PA], [test "x$with_pa" = "xyes"])
+AM_CONDITIONAL([USE_PULSEAUDIO], [test "x$with_pulseaudio" = "xyes"])
# Look for pipewire flag
-AC_ARG_WITH(pw, [AS_HELP_STRING([--with-pw],[choose Pipewire support.])])
-if test "x$with_pw" = "xyes" ; then
- AC_DEFINE([CONFIG_PW], 1, [Include Pipewire support.])
+AC_ARG_WITH(pipewire, [AS_HELP_STRING([--with-pipewire],[build the Pipewire backend into Shairport Sync.])])
+if test "x$with_pipewire" = "xyes" ; then
+ AC_DEFINE([CONFIG_PIPEWIRE], 1, [Include Pipewire support.])
if test "x${with_pkg_config}" = xyes ; then
PKG_CHECK_MODULES(
- [PIPEWIRE], [libpipewire-0.3 >= 0.3.24],
+ PIPEWIRE, libpipewire-0.3 >= 1.0.5,
[CFLAGS="${PIPEWIRE_CFLAGS} ${CFLAGS} -Wno-missing-field-initializers" LIBS="${PIPEWIRE_LIBS} ${LIBS}"],
- [AC_MSG_ERROR(Pipewire support requires the libpipewire library -- libpipewire-0.3-dev suggested!)])
+ [AC_MSG_ERROR(Pipewire support requires libpipewire-0.3 version 1.0.5 or later -- libpipewire-0.3-dev suggested!)])
else
- AC_CHECK_LIB([pipewire], [pw_stream_queue_buffer], , AC_MSG_ERROR(Pipewire support requires the libpipewire library -- libpipewire-0.3-dev suggested!))
+ AC_CHECK_LIB([pipewire], [pw_stream_queue_buffer], , AC_MSG_ERROR(Pipewire support requires libpipewire-0.3 version 1.1.0 or later -- libpipewire-0.3-dev suggested!))
fi
fi
-AM_CONDITIONAL([USE_PW], [test "x$with_pw" = "xyes"])
+AM_CONDITIONAL([USE_PIPEWIRE], [test "x$with_pipewire" = "xyes"])
# Look for Convolution flag
AC_ARG_WITH(convolution, [AS_HELP_STRING([--with-convolution],[choose audio DSP convolution support])])
else
AC_CHECK_LIB([sndfile], [sf_open], , AC_MSG_ERROR(Convolution support requires the sndfile library -- libsndfile1-dev suggested!))
fi
+
fi
AM_CONDITIONAL([USE_CONVOLUTION], [test "x$with_convolution" = "xyes"])
if test "x$with_mqtt_client" = "xyes" ; then
AC_DEFINE([CONFIG_MQTT], 1, [Include a client for MQTT, the Message Queuing Telemetry Transport protocol])
AC_CHECK_LIB([mosquitto], [mosquitto_lib_init], , AC_MSG_ERROR(MQTT support requires the mosquitto library -- libmosquitto-dev suggested!))
+ systemd_after_args="${systemd_after_args} mosquitto.service"
+ systemd_requires_args="${systemd_requires_args} mosquitto.service"
fi
AM_CONDITIONAL([USE_MQTT], [test "x$with_mqtt_client" = "xyes"])
AC_ARG_WITH(airplay-2, [AS_HELP_STRING([--with-airplay-2],[Build for AirPlay 2])])
if test "x$with_airplay_2" = "xyes" ; then
AC_DEFINE([CONFIG_AIRPLAY_2], 1, [Build for AirPlay 2])
+ AC_CHECK_PROG(PLISTUTIL_CHECK,plistutil,yes)
+ AS_IF([test x"$PLISTUTIL_CHECK" != x"yes"], [AC_MSG_ERROR([plistutil can not be found. Please install plistutil for building for AirPlay 2. Maybe it's in the apt package libplist-utils.])])
AC_CHECK_PROG(XXD_CHECK,xxd,yes)
AS_IF([test x"$XXD_CHECK" != x"yes"], [AC_MSG_ERROR([xxd can not be found. Please install xxd for building for AirPlay 2.])])
LIBPLIST_PACKAGE=libplist
[AC_MSG_ERROR([Airplay 2 support requires libgcrypt -- libgcrypt-dev suggested])]
)
])
+ AC_CHECK_LIB([uuid],[uuid_generate], [], [AC_MSG_ERROR([AirPlay 2 support requires the uuid library -- uuid-dev suggested])])
+fi
+AM_CONDITIONAL([USE_AIRPLAY_2], [test "x$with_airplay_2" = "xyes"])
+
+# Look for ffmpeg flag
+
+AC_ARG_WITH(ffmpeg, [AS_HELP_STRING([--with-ffmpeg],[Build with FFMPEG support (recommended)])])
+if test "x$with_ffmpeg" = "xyes" -o "x$with_airplay_2" = "xyes" ; then
+ using_ffmpeg=true
+ AC_DEFINE([CONFIG_FFMPEG], 1, [Include support for FFmpeg decoders, resampler and utilities.])
PKG_CHECK_MODULES(
[libavutil], [libavutil],
[CFLAGS="${libavutil_CFLAGS} ${CFLAGS}" LIBS="${libavutil_LIBS} ${LIBS}"],
[AC_MSG_ERROR([AirPlay 2 support requires the uuid library -- uuid-dev suggested])]
)]
)
+else
+ using_ffmpeg=false
fi
-AM_CONDITIONAL([USE_AIRPLAY_2], [test "x$with_airplay_2" = "xyes"])
-if test "x${with_systemd}" = xyes ; then
+if test $using_ffmpeg = false ; then
+ AC_DEFINE([CONFIG_HAMMERTON], 1, [Include support for using the ALAC Decoder by David Hammerton (deprecated)])
+fi
+AM_CONDITIONAL([USE_HAMMERTON], [test $using_ffmpeg = false])
+
+# Look for Apple ALAC flag
+AC_ARG_WITH(apple-alac,[AS_HELP_STRING([--with-apple-alac],[include support for the Apple ALAC decoder (deprecated)])])
+if test "x${with_apple_alac}" = "xyes" ; then
+ if test $using_ffmpeg = true ; then
+ AC_MSG_RESULT([Note: the FFMPEG decoder, included in the build, supercedes the Apple ALAC decoder, which is deprecated. Consider omitting the Apple ALAC decoder.])
+ fi
+ AC_DEFINE([CONFIG_APPLE_ALAC], 1, [Include support for using the Apple ALAC Decoder (deprecated)])
+ if test "x${with_pkg_config}" = xyes ; then
+ PKG_CHECK_MODULES([alac], [alac], [LIBS="${alac_LIBS} ${LIBS}"], AC_MSG_ERROR(Apple ALAC Decoder support requires the ALAC library. See https://github.com/mikebrady/alac.))
+ else
+ AC_CHECK_LIB([alac], [BitBufferInit], , AC_MSG_ERROR(Apple ALAC Decoder support requires the ALAC library. See https://github.com/mikebrady/alac.))
+ fi
+fi
+AM_CONDITIONAL([USE_APPLE_ALAC], [test "x${with_apple_alac}" = "xyes"])
+
+if test "x${with_systemd_startup}" = xyes ; then
# Find systemd unit dir
AC_ARG_WITH([systemdsystemunitdir],
[AS_HELP_STRING([--with-systemdsystemunitdir=DIR], [Directory for systemd service files])],,
def_systemdsystemunitdir=$($PKG_CONFIG --variable=systemdsystemunitdir systemd)
AS_IF([test "x$def_systemdsystemunitdir" = "x"],
- [AS_IF([test "x$with_systemdsystemunitdir" = "xyes"],
- [AC_MSG_ERROR([systemd support requested but pkg-config unable to query systemd package])])
+ [AS_IF([test "x$with_systemdsystemunitdir" = "xyes" -o "x$with_systemdsystemunitdir" = "xauto"],
+ [AC_MSG_ERROR([pkg-config unable to query systemd package -- install systemd-dev package suggested])])
with_systemdsystemunitdir=no],
[with_systemdsystemunitdir="$def_systemdsystemunitdir"])])
AS_IF([test "x$with_systemdsystemunitdir" != "xno"],
AC_FUNC_FORK
AC_CHECK_FUNCS([atexit clock_gettime gethostname inet_ntoa memchr memmove memset mkfifo pow select socket stpcpy strcasecmp strchr strdup strerror strstr strtol strtoul])
-# Note -- there are AC_CONFIG_FILES directives further back, conditional on Avahi
-AC_CONFIG_FILES([Makefile])
+if test "x${systemd_after_args}" != x ; then
+ systemd_after_args="After=${systemd_after_args}"
+fi
+
+if test "x${systemd_requires_args}" != x ; then
+ systemd_requires_args="Requires=${systemd_requires_args}"
+fi
+
+AC_SUBST([SYSTEMD_AFTER_ARGS], [${systemd_after_args}])
+AC_SUBST([SYSTEMD_REQUIRES_ARGS], [${systemd_requires_args}])
+
+# The man directory will not be recursively made unless you set the SUBDIR in Makefile.am.
+# But leave this as is to enable a distclean to be done.
+
+AC_CONFIG_FILES([Makefile man/Makefile])
+
+AC_CONFIG_FILES([scripts/shairport-sync.service])
AC_CONFIG_FILES([scripts/shairport-sync],[chmod +x scripts/shairport-sync])
AC_OUTPUT
/*
* DACP protocol handler. This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2017 -- 2020
+ * Copyright (c) Mike Brady 2017--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
};
void *response_realloc(__attribute__((unused)) void *opaque, void *ptr, int size) {
- void *t = realloc(ptr, size);
- if ((t == NULL) && (size != 0))
- debug(1, "Response realloc of size %d failed!", size);
+ void *t = NULL;
+ if (size == 0) {
+ // debug(1, "response_realloc of size 0 to ptr %" PRIxPTR " requested, and a pointer of NULL was
+ // returned.", size, ptr);
+ free(ptr);
+ } else {
+ if (ptr == NULL) {
+ t = malloc(size);
+ // debug(1, "response_realloc of size %d to a NULL pointer was requested, and a pointer of %"
+ // PRIxPTR " was returned.", size, t);
+ } else {
+ t = realloc(ptr, size);
+ }
+ if (t == NULL)
+ debug(1, "response_realloc of size %d to ptr %" PRIxPTR " failed!", size, (uintptr_t) ptr);
+ }
return t;
}
ssize_t wresp = send(sockfd, message, strlen(message), 0);
if (wresp == -1) {
char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ getErrorText((char *)errorstring, sizeof(errorstring));
debug(2, "dacp_send_command: write error %d: \"%s\".", errno, (char *)errorstring);
struct linger so_linger;
so_linger.l_onoff = 1; // "true"
response.body = malloc(2048); // it can resize this if necessary
response.malloced_size = 2048;
- pthread_cleanup_push(malloc_cleanup, response.body);
+ pthread_cleanup_push(malloc_cleanup, &response.body);
struct http_roundtripper rt;
http_init(&rt, responseFuncs, &response);
if (ndata <= 0) {
if (ndata == -1) {
char errorstring[1024];
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wunused-result"
strerror_r(errno, (char *)errorstring, sizeof(errorstring));
- debug(2, "dacp_send_command: receiving error %d: \"%s\".", errno,
- (char *)errorstring);
+#pragma GCC diagnostic pop
+ debug(2, "dacp_send_command: receiving error %d: \"%s\".", errno, errorstring);
struct linger so_linger;
so_linger.l_onoff = 1; // "true"
so_linger.l_linger = 0;
void dacp_monitor_port_update_callback(char *dacp_id, uint16_t port) {
debug_mutex_lock(&dacp_server_information_lock, 500000, 2);
- debug(2,
+ debug(3,
"dacp_monitor_port_update_callback with Remote ID \"%s\", target ID \"%s\" and port "
"number %d.",
dacp_id, dacp_server.dacp_id, port);
}
void *dacp_monitor_thread_code(__attribute__((unused)) void *na) {
- int scan_index = 0;
+ // #include <syscall.h>
+ // debug(1, "dacp_monitor_thread_code PID %d", syscall(SYS_gettid));
+ // int scan_index = 0;
int always_use_revision_number_1 = 0;
// char server_reply[10000];
// debug(1, "DACP monitor thread started.");
mdns_dacp_monitor_set_id(dacp_server.dacp_id);
}
} else {
- scan_index++;
+ // scan_index++;
// debug(1,"DACP Scan Result: %d.", result);
if ((result == 200) || (result == 400)) {
memset(&dacp_server, 0, sizeof(dacp_server_record));
- pthread_create(&dacp_monitor_thread, NULL, dacp_monitor_thread_code, NULL);
+ named_pthread_create(&dacp_monitor_thread, NULL, dacp_monitor_thread_code, NULL, "dacp");
dacp_monitor_initialised = 1;
}
int32_t active_speakers = 0;
for (i = 0; i < speaker_count; i++) {
if (speaker_info[i].speaker_number == machine_number) {
- debug(2, "Our speaker number found: %ld with relative volume.", machine_number,
+ debug(2, "Our speaker number found: %" PRId64 " with relative volume %" PRId32 ".", machine_number,
speaker_info[i].volume);
}
if (speaker_info[i].active == 1) {
/*
* This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2018 -- 2022
+ * Copyright (c) Mike Brady 2018--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
ShairportSync *shairportSyncSkeleton;
+static GBusType dbus_bus_type = G_BUS_TYPE_SYSTEM; // default is the dbus system message bus
int service_is_running = 0;
ShairportSyncDiagnostics *shairportSyncDiagnosticsSkeleton = NULL;
ShairportSyncRemoteControl *shairportSyncRemoteControlSkeleton = NULL;
ShairportSyncAdvancedRemoteControl *shairportSyncAdvancedRemoteControlSkeleton = NULL;
-guint ownerID = 0;
+static guint ownerID = 0;
void dbus_metadata_watcher(struct metadata_bundle *argc, __attribute__((unused)) void *userdata) {
char response[100];
}
}
+ if (argc->output_format) {
+ // debug(1, "Check output format");
+ th = shairport_sync_get_output_format(shairportSyncSkeleton);
+ if ((th == NULL) || (strcasecmp(th, argc->output_format) != 0)) {
+ // debug(1, "Output format string should be changed");
+ shairport_sync_set_output_format(shairportSyncSkeleton, argc->output_format);
+ }
+ }
+
+ if (argc->source_format) {
+ // debug(1, "Check source format");
+ th = shairport_sync_get_source_format(shairportSyncSkeleton);
+ if ((th == NULL) || (strcasecmp(th, argc->source_format) != 0)) {
+ // debug(1, "Source format string should be changed");
+ shairport_sync_set_source_format(shairportSyncSkeleton, argc->source_format);
+ }
+ }
+
switch (argc->player_state) {
case PS_NOT_AVAILABLE:
shairport_sync_remote_control_set_player_state(shairportSyncRemoteControlSkeleton,
}
// Build the metadata array
- debug(2, "Build metadata");
+ // debug(2, "Build metadata");
GVariantBuilder *dict_builder = g_variant_builder_new(G_VARIANT_TYPE("a{sv}"));
// Add in the artwork URI if it exists.
// debug(1, "\"notify_elapsed_time_callback\" called.");
if (shairport_sync_diagnostics_get_elapsed_time(skeleton)) {
config.debugger_show_elapsed_time = 1;
- debug(1, ">> start including elapsed time in logs");
+ debug(1, ">> include elapsed time in logs");
} else {
config.debugger_show_elapsed_time = 0;
- debug(1, ">> stop including elapsed time in logs");
+ debug(1, ">> do not include elapsed time in logs");
}
return TRUE;
}
// debug(1, "\"notify_delta_time_callback\" called.");
if (shairport_sync_diagnostics_get_delta_time(skeleton)) {
config.debugger_show_relative_time = 1;
- debug(1, ">> start including delta time in logs");
+ debug(1, ">> include delta time in logs");
} else {
config.debugger_show_relative_time = 0;
- debug(1, ">> stop including delta time in logs");
+ debug(1, ">> do not include delta time in logs");
}
return TRUE;
}
// debug(1, "\"notify_file_and_line_callback\" called.");
if (shairport_sync_diagnostics_get_file_and_line(skeleton)) {
config.debugger_show_file_and_line = 1;
- debug(1, ">> start including file and line in logs");
+ debug(1, ">> include file and line in logs");
} else {
config.debugger_show_file_and_line = 0;
- debug(1, ">> stop including file and line in logs");
+ debug(1, ">> do not include file and line in logs");
}
return TRUE;
}
__attribute__((unused)) gpointer user_data) {
// debug(1, "\"notify_statistics_callback\" called.");
if (shairport_sync_diagnostics_get_statistics(skeleton)) {
- debug(1, ">> start logging statistics");
+ debug(1, ">> log statistics");
if (config.statistics_requested == 0)
statistics_row = 0; // redraw the header line
config.statistics_requested = 1;
} else {
- debug(1, ">> stop logging statistics");
+ debug(1, ">> do not log statistics");
config.statistics_requested = 0;
}
return TRUE;
gint th = shairport_sync_diagnostics_get_verbosity(skeleton);
if ((th >= 0) && (th <= 3)) {
if (th == 0)
- debug(1, ">> log verbosity set to %d.", th);
- if (((debuglev == 0) && (th != 0)) || ((debuglev != 0) && (th == 0)))
+ debug(1, ">> set log verbosity to %d.", th);
+ if (((debug_level() == 0) && (th != 0)) || ((debug_level() != 0) && (th == 0)))
statistics_row = 0; // if the debug level changes, redraw the header line
- debuglev = th;
- debug(1, ">> log verbosity set to %d.", th);
+ set_debug_level(th);
+ debug(1, ">> set log verbosity to %d.", th);
} else {
debug(1, ">> invalid log verbosity: %d. Ignored.", th);
- shairport_sync_diagnostics_set_verbosity(skeleton, debuglev);
+ shairport_sync_diagnostics_set_verbosity(skeleton, debug_level());
}
return TRUE;
}
__attribute__((unused)) gpointer user_data) {
// debug(1, "\"notify_disable_standby_callback\" called.");
if (shairport_sync_get_disable_standby(skeleton)) {
- debug(1, ">> activating disable standby");
+ debug(1, ">> disable standby mode");
config.keep_dac_busy = 1;
} else {
- debug(1, ">> deactivating disable standby");
+ debug(1, ">> do not disable standby mode");
config.keep_dac_busy = 0;
}
return TRUE;
}
#ifdef CONFIG_CONVOLUTION
-gboolean notify_convolution_callback(ShairportSync *skeleton,
- __attribute__((unused)) gpointer user_data) {
+gboolean notify_convolution_enabled_callback(ShairportSync *skeleton,
+ __attribute__((unused)) gpointer user_data) {
// debug(1, "\"notify_convolution_callback\" called.");
- if (shairport_sync_get_convolution(skeleton)) {
- debug(1, ">> activating convolution");
- config.convolution = 1;
- config.convolver_valid =
- convolver_init(config.convolution_ir_file, config.convolution_max_length);
+ if (shairport_sync_get_convolution_enabled(skeleton)) {
+ debug(1, ">> activate convolution impulse response filter");
+ config.convolution_enabled = 1;
} else {
- debug(1, ">> deactivating convolution");
- config.convolution = 0;
+ debug(1, ">> deactivate convolution impulse response filter");
+ config.convolution_enabled = 0;
+ convolver_clear_state();
}
return TRUE;
}
#else
-gboolean notify_convolution_callback(__attribute__((unused)) ShairportSync *skeleton,
- __attribute__((unused)) gpointer user_data) {
+gboolean notify_convolution_enabled_callback(__attribute__((unused)) ShairportSync *skeleton,
+ __attribute__((unused)) gpointer user_data) {
+ warn(">> Convolution support is not built in to this build of Shairport Sync.");
+ return TRUE;
+}
+#endif
+
+#ifdef CONFIG_CONVOLUTION
+gboolean notify_convolution_maximum_length_in_seconds_callback(ShairportSync *skeleton,
+ __attribute__((unused))
+ gpointer user_data) {
+
+ gdouble th = shairport_sync_get_convolution_maximum_length_in_seconds(skeleton);
+ if ((th >= 0.0) && (th <= 15.0)) {
+ debug(1, ">> set convolution maximum length in seconds to %f.", th);
+ config.convolution_max_length_in_seconds = th;
+ } else {
+ debug(1, ">> invalid convolution gain: %f. Ignored.", th);
+ shairport_sync_set_convolution_maximum_length_in_seconds(
+ skeleton, config.convolution_max_length_in_seconds);
+ }
+ return TRUE;
+}
+#else
+gboolean notify_convolution_maximum_length_in_seconds_callback(__attribute__((unused))
+ ShairportSync *skeleton,
+ __attribute__((unused))
+ gpointer user_data) {
warn(">> Convolution support is not built in to this build of Shairport Sync.");
return TRUE;
}
__attribute__((unused)) gpointer user_data) {
gdouble th = shairport_sync_get_convolution_gain(skeleton);
- if ((th <= 0.0) && (th >= -100.0)) {
- debug(1, ">> setting convolution gain to %f.", th);
+ if ((th <= 18.0) && (th >= -60.0)) {
+ debug(1, ">> set convolution gain to %f.", th);
config.convolution_gain = th;
} else {
debug(1, ">> invalid convolution gain: %f. Ignored.", th);
}
#endif
#ifdef CONFIG_CONVOLUTION
-gboolean notify_convolution_impulse_response_file_callback(ShairportSync *skeleton,
- __attribute__((unused))
- gpointer user_data) {
- char *th = (char *)shairport_sync_get_convolution_impulse_response_file(skeleton);
- if (config.convolution_ir_file)
- free(config.convolution_ir_file);
- config.convolution_ir_file = strdup(th);
- debug(1, ">> setting configuration impulse response filter file to \"%s\".",
- config.convolution_ir_file);
- config.convolver_valid =
- convolver_init(config.convolution_ir_file, config.convolution_max_length);
+gboolean notify_convolution_impulse_response_files_callback(ShairportSync *skeleton,
+ __attribute__((unused))
+ gpointer user_data) {
+ char *th = (char *)shairport_sync_get_convolution_impulse_response_files(skeleton);
+ if (th != NULL) {
+ debug(1, ">> freeing current configuration impulse response filter files.");
+ free_ir_filenames(config.convolution_ir_files, config.convolution_ir_file_count);
+ config.convolution_ir_files = NULL;
+ config.convolution_ir_file_count = 0;
+
+ config.convolution_ir_files = parse_ir_filenames(th, &config.convolution_ir_file_count);
+ sanity_check_ir_files(1, config.convolution_ir_files, config.convolution_ir_file_count);
+ debug(1, ">> setting %d configuration impulse response filter%s",
+ config.convolution_ir_file_count, config.convolution_ir_file_count == 1 ? "" : "s");
+ config.convolution_ir_files_updated = 1;
+ }
return TRUE;
}
#else
-gboolean notify_convolution_impulse_response_file_callback(__attribute__((unused))
- ShairportSync *skeleton,
- __attribute__((unused))
- gpointer user_data) {
+gboolean notify_convolution_impulse_response_files_callback(__attribute__((unused))
+ ShairportSync *skeleton,
+ __attribute__((unused))
+ gpointer user_data) {
__attribute__((unused)) char *th =
- (char *)shairport_sync_get_convolution_impulse_response_file(skeleton);
+ (char *)shairport_sync_get_convolution_impulse_response_files(skeleton);
return TRUE;
}
#endif
-gboolean notify_loudness_callback(ShairportSync *skeleton,
- __attribute__((unused)) gpointer user_data) {
+gboolean notify_loudness_enabled_callback(ShairportSync *skeleton,
+ __attribute__((unused)) gpointer user_data) {
// debug(1, "\"notify_loudness_callback\" called.");
- if (shairport_sync_get_loudness(skeleton)) {
- debug(1, ">> activating loudness");
- config.loudness = 1;
+ if (shairport_sync_get_loudness_enabled(skeleton)) {
+ debug(1, ">> activate loudness filter");
+ config.loudness_enabled = 1;
} else {
- debug(1, ">> deactivating loudness");
- config.loudness = 0;
+ debug(1, ">> deactivate loudness filter");
+ config.loudness_enabled = 0;
}
return TRUE;
}
__attribute__((unused)) gpointer user_data) {
gdouble th = shairport_sync_get_loudness_threshold(skeleton);
if ((th <= 0.0) && (th >= -100.0)) {
- debug(1, ">> setting loudness threshold to %f.", th);
+ debug(1, ">> set loudness threshold to %f.", th);
config.loudness_reference_volume_db = th;
} else {
debug(1, ">> invalid loudness threshold: %f. Ignored.", th);
__attribute__((unused)) gpointer user_data) {
gdouble dt = shairport_sync_get_drift_tolerance(skeleton);
if ((dt >= 0.0) && (dt <= 2.0)) {
- debug(1, ">> setting drift tolerance to %f seconds.", dt);
+ debug(1, ">> set drift tolerance to %f seconds.", dt);
config.tolerance = dt;
} else {
debug(1, ">> invalid drift tolerance: %f seconds. Ignored.", dt);
__attribute__((unused)) gpointer user_data) {
gdouble iv = shairport_sync_get_volume(skeleton);
if (((iv >= -30.0) && (iv <= 0.0)) || (iv == -144.0)) {
- debug(2, ">> setting volume to %7.4f.", iv);
+ debug(2, ">> set volume to %7.4f.", iv);
pthread_rwlock_rdlock(&principal_conn_lock); // don't let the principal_conn be changed
pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
}
pthread_cleanup_pop(1); // release the principal_conn lock
config.airplay_volume = iv;
- config.last_access_to_volume_info_time = get_absolute_time_in_ns();
} else {
debug(1, ">> invalid volume: %f. Ignored.", iv);
shairport_sync_set_volume(skeleton, config.airplay_volume);
gboolean notify_alacdecoder_callback(ShairportSync *skeleton,
__attribute__((unused)) gpointer user_data) {
char *th = (char *)shairport_sync_get_alacdecoder(skeleton);
-#ifdef CONFIG_APPLE_ALAC
- if (strcasecmp(th, "hammerton") == 0)
- config.use_apple_decoder = 0;
- else if (strcasecmp(th, "apple") == 0)
- config.use_apple_decoder = 1;
+
+ if ((strcasecmp(th, "hammerton") == 0) &&
+ ((config.decoders_supported & (1 << decoder_hammerton)) != 0))
+ config.decoder_in_use = 1 << decoder_hammerton;
+ else if ((strcasecmp(th, "apple") == 0) &&
+ ((config.decoders_supported & (1 << decoder_apple_alac)) != 0))
+ config.decoder_in_use = 1 << decoder_apple_alac;
+ else if ((strcasecmp(th, "ffmpeg") == 0) &&
+ ((config.decoders_supported & (1 << decoder_ffmpeg_alac)) != 0))
+ config.decoder_in_use = 1 << decoder_ffmpeg_alac;
else {
- warn("An unrecognised ALAC decoder: \"%s\" was requested via D-Bus interface.", th);
- if (config.use_apple_decoder == 0)
- shairport_sync_set_alacdecoder(skeleton, "hammerton");
- else
- shairport_sync_set_alacdecoder(skeleton, "apple");
- }
-// debug(1,"Using the %s ALAC decoder.", ((config.use_apple_decoder==0) ? "Hammerton" : "Apple"));
-#else
- if (strcasecmp(th, "hammerton") == 0) {
- config.use_apple_decoder = 0;
- // debug(1,"Using the Hammerton ALAC decoder.");
- } else {
- warn("An unrecognised ALAC decoder: \"%s\" was requested via D-Bus interface. (Possibly "
+ warn("An unrecognised or unsupported ALAC decoder: \"%s\" was requested via D-Bus interface. "
+ "(Possibly "
"support for this decoder was not compiled "
"into this version of Shairport Sync.)",
th);
- shairport_sync_set_alacdecoder(skeleton, "hammerton");
}
-#endif
+
return TRUE;
}
gboolean notify_interpolation_callback(ShairportSync *skeleton,
__attribute__((unused)) gpointer user_data) {
char *th = (char *)shairport_sync_get_interpolation(skeleton);
-#ifdef CONFIG_SOXR
+ // #ifdef CONFIG_SOXR
if (strcasecmp(th, "basic") == 0)
config.packet_stuffing = ST_basic;
+#ifdef CONFIG_SOXR
else if (strcasecmp(th, "soxr") == 0)
config.packet_stuffing = ST_soxr;
+#endif
else if (strcasecmp(th, "auto") == 0)
config.packet_stuffing = ST_auto;
+ else if (strcasecmp(th, "vernier") == 0)
+ config.packet_stuffing = ST_vernier;
else {
+#ifdef CONFIG_SOXR
warn("An unrecognised interpolation method: \"%s\" was requested via the D-Bus interface.", th);
+#else
+ if (strcasecmp(th, "soxr") == 0) {
+ warn("Soxr interpolation is not supported in this edition of Shairport Sync.");
+ } else {
+ warn("An unrecognised interpolation method: \"%s\" was requested via the D-Bus interface.",
+ th);
+ }
+#endif
+ // set the shairport_sync_set_interpolation on the D-Bus interface back to what it is in the
+ // setting.
switch (config.packet_stuffing) {
case ST_basic:
shairport_sync_set_interpolation(skeleton, "basic");
case ST_soxr:
shairport_sync_set_interpolation(skeleton, "soxr");
break;
+ case ST_vernier:
+ shairport_sync_set_interpolation(skeleton, "vernier");
+ break;
case ST_auto:
shairport_sync_set_interpolation(skeleton, "auto");
break;
default:
- debug(1, "This should never happen!");
- shairport_sync_set_interpolation(skeleton, "basic");
+ debug(1, "This should never happen, but defaulting to \"vernier\" interpolation!");
+ shairport_sync_set_interpolation(skeleton, "vernier");
break;
}
}
-#else
- if (strcasecmp(th, "basic") == 0)
- config.packet_stuffing = ST_basic;
- else {
- warn("An unrecognised interpolation method: \"%s\" was requested via the D-Bus interface. "
- "(Possibly support for this method was not compiled "
- "into this version of Shairport Sync.)",
- th);
- shairport_sync_set_interpolation(skeleton, "basic");
- }
-#endif
return TRUE;
}
static gboolean on_handle_quit(ShairportSync *skeleton, GDBusMethodInvocation *invocation,
__attribute__((unused)) const gchar *command,
__attribute__((unused)) gpointer user_data) {
- debug(1, ">> quit requested");
- type_of_exit_cleanup = TOE_dbus; // request an exit cleanup that is compatible with dbus
- exit(EXIT_SUCCESS);
+ debug(1, ">> quit request...");
+ config.quit_requested_from_glib_mainloop = 1;
+ g_main_loop_quit(config.glib_worker_loop);
shairport_sync_complete_quit(skeleton, invocation);
return TRUE;
}
static gboolean on_handle_drop_session(ShairportSync *skeleton, GDBusMethodInvocation *invocation,
__attribute__((unused)) gpointer user_data) {
- get_play_lock(NULL, 1); // stop any current session and don't replace it
+ release_play_lock(NULL); // stop any current session and don't replace it
shairport_sync_complete_drop_session(skeleton, invocation);
return TRUE;
}
static void on_dbus_name_acquired(GDBusConnection *connection, const gchar *name,
__attribute__((unused)) gpointer user_data) {
-
- // debug(1, "Shairport Sync native D-Bus interface \"%s\" acquired on the %s bus.", name,
- // (config.dbus_service_bus_type == DBT_session) ? "session" : "system");
+ debug(2, "Shairport Sync native D-Bus interface \"%s\" acquired on the %s bus.", name,
+ (dbus_bus_type == G_BUS_TYPE_SESSION) ? "session" : "system");
shairportSyncSkeleton = shairport_sync_skeleton_new();
g_dbus_interface_skeleton_export(G_DBUS_INTERFACE_SKELETON(shairportSyncSkeleton), connection,
G_CALLBACK(notify_volume_control_profile_callback), NULL);
g_signal_connect(shairportSyncSkeleton, "notify::disable-standby",
G_CALLBACK(notify_disable_standby_callback), NULL);
- g_signal_connect(shairportSyncSkeleton, "notify::convolution",
- G_CALLBACK(notify_convolution_callback), NULL);
+ g_signal_connect(shairportSyncSkeleton, "notify::convolution-enabled",
+ G_CALLBACK(notify_convolution_enabled_callback), NULL);
g_signal_connect(shairportSyncSkeleton, "notify::convolution-gain",
G_CALLBACK(notify_convolution_gain_callback), NULL);
- g_signal_connect(shairportSyncSkeleton, "notify::convolution-impulse-response-file",
- G_CALLBACK(notify_convolution_impulse_response_file_callback), NULL);
- g_signal_connect(shairportSyncSkeleton, "notify::loudness", G_CALLBACK(notify_loudness_callback),
- NULL);
+ g_signal_connect(shairportSyncSkeleton, "notify::convolution-maximum-length-in-seconds",
+ G_CALLBACK(notify_convolution_maximum_length_in_seconds_callback), NULL);
+ g_signal_connect(shairportSyncSkeleton, "notify::convolution-impulse-response-files",
+ G_CALLBACK(notify_convolution_impulse_response_files_callback), NULL);
+ g_signal_connect(shairportSyncSkeleton, "notify::loudness-enabled",
+ G_CALLBACK(notify_loudness_enabled_callback), NULL);
g_signal_connect(shairportSyncSkeleton, "notify::loudness-threshold",
G_CALLBACK(notify_loudness_threshold_callback), NULL);
g_signal_connect(shairportSyncSkeleton, "notify::drift-tolerance",
shairport_sync_set_drift_tolerance(SHAIRPORT_SYNC(shairportSyncSkeleton), config.tolerance);
shairport_sync_set_volume(SHAIRPORT_SYNC(shairportSyncSkeleton), config.airplay_volume);
-#ifdef CONFIG_APPLE_ALAC
- if (config.use_apple_decoder == 0) {
+ if ((config.decoder_in_use & (1 << decoder_hammerton)) != 0) {
shairport_sync_set_alacdecoder(SHAIRPORT_SYNC(shairportSyncSkeleton), "hammerton");
debug(1, ">> ALACDecoder set to \"hammerton\"");
- } else {
+ } else if ((config.decoder_in_use & (1 << decoder_apple_alac)) != 0) {
shairport_sync_set_alacdecoder(SHAIRPORT_SYNC(shairportSyncSkeleton), "apple");
debug(1, ">> ALACDecoder set to \"apple\"");
+ } else if ((config.decoder_in_use & (1 << decoder_ffmpeg_alac)) != 0) {
+ shairport_sync_set_alacdecoder(SHAIRPORT_SYNC(shairportSyncSkeleton), "ffmpeg");
+ debug(1, ">> ALACDecoder set to \"ffmpeg\"");
}
-#else
- shairport_sync_set_alacdecoder(SHAIRPORT_SYNC(shairportSyncSkeleton), "hammerton");
- debug(1, ">> ALACDecoder set to \"hammerton\"");
-
-#endif
shairport_sync_set_active(SHAIRPORT_SYNC(shairportSyncSkeleton), FALSE);
debug(1, ">> Active set to \"false\"");
debug(1, "invalid disable_standby mode!");
break;
}
-
-#ifdef CONFIG_SOXR
if (config.packet_stuffing == ST_basic) {
shairport_sync_set_interpolation(SHAIRPORT_SYNC(shairportSyncSkeleton), "basic");
- debug(1, ">> interpolation set to \"basic\" (soxr support built in)");
+ debug(1, ">> interpolation set to \"basic\"");
} else if (config.packet_stuffing == ST_auto) {
shairport_sync_set_interpolation(SHAIRPORT_SYNC(shairportSyncSkeleton), "auto");
- debug(1, ">> interpolation set to \"auto\" (soxr support built in)");
+ debug(1, ">> interpolation set to \"auto\"");
+ } else if (config.packet_stuffing == ST_vernier) {
+ shairport_sync_set_interpolation(SHAIRPORT_SYNC(shairportSyncSkeleton), "vernier");
+ debug(1, ">> interpolation set to \"vernier\"");
} else {
shairport_sync_set_interpolation(SHAIRPORT_SYNC(shairportSyncSkeleton), "soxr");
debug(1, ">> interpolation set to \"soxr\"");
}
-#else
- if (config.packet_stuffing == ST_basic) {
- shairport_sync_set_interpolation(SHAIRPORT_SYNC(shairportSyncSkeleton), "basic");
- debug(1, ">> interpolation set to \"basic\" (no soxr support)");
- } else if (config.packet_stuffing == ST_auto) {
- shairport_sync_set_interpolation(SHAIRPORT_SYNC(shairportSyncSkeleton), "auto");
- debug(1, ">> interpolation set to \"auto\" (no soxr support)");
- }
-#endif
-
if (config.volume_control_profile == VCP_standard)
shairport_sync_set_volume_control_profile(SHAIRPORT_SYNC(shairportSyncSkeleton), "standard");
else if (config.volume_control_profile == VCP_dasl_tapered)
shairport_sync_set_disable_standby(SHAIRPORT_SYNC(shairportSyncSkeleton), TRUE);
}
- if (config.loudness == 0) {
- shairport_sync_set_loudness(SHAIRPORT_SYNC(shairportSyncSkeleton), FALSE);
+ if (config.loudness_enabled == 0) {
+ debug(1, ">> loudness_enabled is false");
+ shairport_sync_set_loudness_enabled(SHAIRPORT_SYNC(shairportSyncSkeleton), FALSE);
} else {
- shairport_sync_set_loudness(SHAIRPORT_SYNC(shairportSyncSkeleton), TRUE);
+ debug(1, ">> loudness_enabled is true");
+ shairport_sync_set_loudness_enabled(SHAIRPORT_SYNC(shairportSyncSkeleton), TRUE);
}
#ifdef CONFIG_CONVOLUTION
- if (config.convolution == 0) {
- shairport_sync_set_convolution(SHAIRPORT_SYNC(shairportSyncSkeleton), FALSE);
+ if (config.convolution_enabled == 0) {
+ debug(1, ">> convolution_enabled is false");
+ shairport_sync_set_convolution_enabled(SHAIRPORT_SYNC(shairportSyncSkeleton), FALSE);
} else {
- shairport_sync_set_convolution(SHAIRPORT_SYNC(shairportSyncSkeleton), TRUE);
+ debug(1, ">> convolution_enabled is true");
+ shairport_sync_set_convolution_enabled(SHAIRPORT_SYNC(shairportSyncSkeleton), TRUE);
}
- if (config.convolution_ir_file)
- shairport_sync_set_convolution_impulse_response_file(SHAIRPORT_SYNC(shairportSyncSkeleton),
- config.convolution_ir_file);
-// else
-// shairport_sync_set_convolution_impulse_response_file(SHAIRPORT_SYNC(shairportSyncSkeleton),
-// NULL);
+
+ const char *str = NULL;
+ if ((config.cfg != NULL) &&
+ (config_lookup_non_empty_string(config.cfg, "dsp.convolution_ir_files", &str))) {
+ shairport_sync_set_convolution_impulse_response_files(SHAIRPORT_SYNC(shairportSyncSkeleton),
+ str);
+ } else {
+ shairport_sync_set_convolution_impulse_response_files(SHAIRPORT_SYNC(shairportSyncSkeleton),
+ NULL);
+ }
+ shairport_sync_set_convolution_maximum_length_in_seconds(
+ SHAIRPORT_SYNC(shairportSyncSkeleton), config.convolution_max_length_in_seconds);
#endif
shairport_sync_set_service_name(SHAIRPORT_SYNC(shairportSyncSkeleton), config.service_name);
- shairport_sync_set_output_rate(SHAIRPORT_SYNC(shairportSyncSkeleton), config.output_rate);
- shairport_sync_set_output_format(SHAIRPORT_SYNC(shairportSyncSkeleton),
- sps_format_description_string(config.output_format));
#ifdef CONFIG_AIRPLAY_2
shairport_sync_set_protocol(SHAIRPORT_SYNC(shairportSyncSkeleton), "AirPlay 2");
free(vs);
shairport_sync_diagnostics_set_verbosity(
- SHAIRPORT_SYNC_DIAGNOSTICS(shairportSyncDiagnosticsSkeleton), debuglev);
+ SHAIRPORT_SYNC_DIAGNOSTICS(shairportSyncDiagnosticsSkeleton), debug_level());
// debug(2,">> log verbosity is %d.",debuglev);
"Not Available");
debug(1, "Shairport Sync native D-Bus service started at \"%s\" on the %s bus.", name,
- (config.dbus_service_bus_type == DBT_session) ? "session" : "system");
+ (dbus_bus_type == G_BUS_TYPE_SESSION) ? "session" : "system");
service_is_running = 1;
}
-static void on_dbus_name_lost_again(__attribute__((unused)) GDBusConnection *connection,
- __attribute__((unused)) const gchar *name,
- __attribute__((unused)) gpointer user_data) {
- warn("could not acquire a Shairport Sync native D-Bus interface \"%s\" on the %s bus.", name,
- (config.dbus_service_bus_type == DBT_session) ? "session" : "system");
-}
-
static void on_dbus_name_lost(__attribute__((unused)) GDBusConnection *connection,
__attribute__((unused)) const gchar *name,
__attribute__((unused)) gpointer user_data) {
- // debug(1, "could not acquire a Shairport Sync native D-Bus interface \"%s\" on the %s bus --
- // will try adding the process "
- // "number to the end of it.",
- // name, (config.dbus_service_bus_type == DBT_session) ? "session" : "system");
- pid_t pid = getpid();
- char interface_name[256] = "";
- snprintf(interface_name, sizeof(interface_name), "org.gnome.ShairportSync.i%d", pid);
- GBusType dbus_bus_type = G_BUS_TYPE_SYSTEM;
- if (config.dbus_service_bus_type == DBT_session)
- dbus_bus_type = G_BUS_TYPE_SESSION;
- // debug(1, "Looking for a Shairport Sync native D-Bus interface \"%s\" on the %s bus.",
- // interface_name,(config.dbus_service_bus_type == DBT_session) ? "session" : "system");
- g_bus_own_name(dbus_bus_type, interface_name, G_BUS_NAME_OWNER_FLAGS_NONE, NULL,
- on_dbus_name_acquired, on_dbus_name_lost_again, NULL, NULL);
+ warn("could not acquire a Shairport Sync native D-Bus interface \"%s\" on the %s bus.", name,
+ (dbus_bus_type == G_BUS_TYPE_SESSION) ? "session" : "system");
+ ownerID = 0;
}
int start_dbus_service() {
- // shairportSyncSkeleton = NULL;
- GBusType dbus_bus_type = G_BUS_TYPE_SYSTEM;
- if (config.dbus_service_bus_type == DBT_session)
+
+ // set up default message bus
+ if (config.dbus_default_message_bus == DBT_session)
+ dbus_bus_type = G_BUS_TYPE_SESSION;
+
+ // look for explicit overrides
+ if (config.dbus_service_bus_type == DBT_system)
+ dbus_bus_type = G_BUS_TYPE_SYSTEM;
+ else if (config.dbus_service_bus_type == DBT_session)
dbus_bus_type = G_BUS_TYPE_SESSION;
- // debug(1, "Looking for a Shairport Sync native D-Bus interface \"org.gnome.ShairportSync\" on
- // the %s bus.",(config.dbus_service_bus_type == DBT_session) ? "session" : "system");
+
+ debug(1,
+ "Looking for a Shairport Sync native D-Bus interface \"org.gnome.ShairportSync\" on the %s "
+ "bus.",
+ (dbus_bus_type == G_BUS_TYPE_SESSION) ? "session" : "system");
ownerID = g_bus_own_name(dbus_bus_type, "org.gnome.ShairportSync", G_BUS_NAME_OWNER_FLAGS_NONE,
NULL, on_dbus_name_acquired, on_dbus_name_lost, NULL, NULL);
+ debug(2, "ownerID: %d.", ownerID);
return 0; // this is just to quieten a compiler warning
}
void stop_dbus_service() {
- debug(2, "stopping dbus service");
if (ownerID) {
+ debug(2, "stopping dbus service -- unowning ownerID %d.", ownerID);
g_bus_unown_name(ownerID);
} else if (service_is_running != 0) {
debug(1, "Zero OwnerID for running \"org.gnome.ShairportSync\" dbus service.");
ARG NQPTP_BRANCH=main
+ARG FFMPEG_BRANCH=release/7.1
ARG SHAIRPORT_SYNC_BRANCH=.
+ARG ALPINE_VERSION=20250108
+
+##### FFMPEGLITE #####
+FROM alpine:$ALPINE_VERSION AS ffmpeglite
+RUN apk -U add build-base git nasm pkgconf
+RUN mkdir -p /ffmpeg_sources /ffmpeg_build
+WORKDIR /ffmpeg_sources
+ARG FFMPEG_BRANCH
+RUN git clone --depth=1 -b "$FFMPEG_BRANCH" https://github.com/FFmpeg/FFmpeg
+WORKDIR /ffmpeg_sources/FFmpeg
+RUN ./configure \
+ --prefix="/ffmpeg_build" \
+ --extra-libs="-lpthread -lm" \
+ --ld="g++" \
+ --disable-static \
+ --enable-shared \
+ --enable-gpl \
+ --disable-programs \
+ --disable-everything \
+ --disable-doc \
+ --disable-iconv \
+ --disable-avdevice \
+ --disable-avfilter \
+ --disable-swscale \
+ --disable-network \
+ --disable-iamf \
+ --disable-pixelutils \
+ --disable-postproc \
+ --enable-decoder=alac \
+ --enable-decoder=aac
+RUN make -j $(nproc)
+RUN make install
+##### FFMPEGLITE END #####
-FROM alpine:3.20 AS builder
+##### NQPTP #####
+FROM alpine:$ALPINE_VERSION AS nqptp
RUN apk -U add \
+ musl-dev \
+ autoconf \
+ automake \
+ build-base \
+ git \
+ linux-headers
+
+ARG NQPTP_BRANCH
+RUN git clone --depth=1 -b "$NQPTP_BRANCH" https://github.com/mikebrady/nqptp
+WORKDIR /nqptp
+RUN autoreconf -i
+RUN ./configure
+RUN make -j $(nproc)
+WORKDIR /
+##### NQPTP END #####
+
+##### SPS #####
+FROM alpine:$ALPINE_VERSION AS shairport-sync
+RUN apk -U add \
+ pkgconf \
alsa-lib-dev \
autoconf \
automake \
avahi-dev \
build-base \
dbus \
- ffmpeg-dev \
git \
libconfig-dev \
libgcrypt-dev \
libplist-dev \
- libressl-dev \
+ openssl-dev \
libsndfile-dev \
libsodium-dev \
libtool \
popt-dev \
pulseaudio-dev \
soxr-dev \
+ libplist-util \
xxd
-##### ALAC #####
-FROM builder AS alac
-RUN git clone --depth=1 https://github.com/mikebrady/alac
-WORKDIR /alac
-RUN autoreconf -i
-RUN ./configure
-RUN make -j $(nproc)
-RUN make install
-WORKDIR /
-##### ALAC END #####
-
-##### NQPTP #####
-FROM builder AS nqptp
-ARG NQPTP_BRANCH
-RUN git clone --depth=1 -b "$NQPTP_BRANCH" https://github.com/mikebrady/nqptp
-WORKDIR /nqptp
-RUN autoreconf -i
-RUN ./configure
-RUN make -j $(nproc)
-WORKDIR /
-##### NQPTP END #####
-
-##### SPS #####
-# Note: apple-alac requires alac build first.
-FROM alac AS shairport-sync
ARG SHAIRPORT_SYNC_BRANCH
-
WORKDIR /shairport-sync
COPY . .
-RUN git checkout "$SHAIRPORT_SYNC_BRANCH"
+RUN rm -rf build
+# the above three lines or the following...
+# RUN git clone --depth=1 -b "SHAIRPORT_SYNC_BRANCH" https://github.com/mikebrady/sps-private shairport-sync
+
+COPY --from=ffmpeglite /ffmpeg_build/lib/ /usr/local/lib
+COPY --from=ffmpeglite /ffmpeg_build/lib/pkgconfig/*.pc /usr/local/lib/pkgconfig/
+COPY --from=ffmpeglite /ffmpeg_build/include/ /usr/local/include
+
+# RUN git checkout "$SHAIRPORT_SYNC_BRANCH"
+# the following is to remove any build directory that might be there
+# not needed if the source is a clone of the repository
+# RUN rm -rf build
WORKDIR /shairport-sync/build
RUN autoreconf -i ../
-RUN CFLAGS="-O3" CXXFLAGS="-O3" ../configure --sysconfdir=/etc --with-alsa --with-pa --with-soxr --with-avahi --with-ssl=openssl \
+RUN ../configure --sysconfdir=/etc --with-alsa --with-pulseaudio --with-soxr --with-avahi --with-ssl=openssl \
--with-airplay-2 --with-metadata --with-dummy --with-pipe --with-dbus-interface \
--with-stdout --with-mpris-interface --with-mqtt-client \
- --with-apple-alac --with-convolution --with-pw
+ --with-convolution --with-pipewire
RUN make -j $(nproc)
RUN DESTDIR=install make install
WORKDIR /
# Add run script that will start SPS
COPY --chmod=755 ./docker/run.sh ./run.sh
-COPY ./docker/etc/s6-overlay/s6-rc.d /etc/s6-overlay/s6-rc.d
COPY ./docker/etc/pulse /etc/pulse
##### END STATIC FILES #####
##### BUILD FILES #####
FROM scratch AS build-files
-
+COPY --from=ffmpeglite /ffmpeg_build/lib/ /usr/local/lib
COPY --from=shairport-sync /shairport-sync/build/install/usr/local/bin/shairport-sync /usr/local/bin/shairport-sync
COPY --from=shairport-sync /shairport-sync/build/install/usr/local/share/man/man1 /usr/share/man/man1
COPY --from=nqptp /nqptp/nqptp /usr/local/bin/nqptp
-COPY --from=alac /usr/local/lib/libalac.* /usr/local/lib/
COPY --from=shairport-sync /shairport-sync/build/install/etc/shairport-sync.conf /etc/
COPY --from=shairport-sync /shairport-sync/build/install/etc/shairport-sync.conf.sample /etc/
COPY --from=shairport-sync /shairport-sync/build/install/etc/dbus-1/system.d/shairport-sync-dbus.conf /etc/dbus-1/system.d/
##### END BUILD FILES #####
# Shairport Sync Runtime System
-FROM crazymax/alpine-s6:3.20-3.2.0.2
-
-ENV S6_CMD_WAIT_FOR_SERVICES=1
-ENV S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0
-
+FROM alpine:$ALPINE_VERSION
+COPY --from=build-files / /
RUN apk -U add \
alsa-lib \
avahi \
- avahi-tools \
dbus \
- ffmpeg \
glib \
- less \
- less-doc \
libconfig \
libgcrypt \
libplist \
+ pipewire \
libpulse \
- libressl3.8-libcrypto \
- libsndfile \
+ libcrypto3 \
libsodium \
libuuid \
- pipewire \
- man-pages \
- mandoc \
mosquitto \
popt \
soxr \
curl
RUN rm -rfv /lib/apk/db/* && \
- rm -rfv /etc/avahi/services/*.service && \
- addgroup shairport-sync && \
- adduser -D shairport-sync -G shairport-sync && \
- addgroup -g 29 docker_audio && \
- addgroup shairport-sync docker_audio && \
- addgroup shairport-sync audio && \
- mkdir -p /run/dbus
-
-# Remove anything we don't need.
-# Remove any statically-defined Avahi services, e.g. SSH and SFTP
-
-# Create non-root user for running the container -- running as the user 'shairport-sync' also allows
+ rm -rfv /etc/avahi/services/*.service && \
+ rm -rf /usr/share/man && \
+ addgroup shairport-sync && \
+ adduser -D shairport-sync -G shairport-sync && \
+ addgroup -g 29 docker_audio && \
+ addgroup shairport-sync docker_audio && \
+ addgroup shairport-sync audio && \
+ mkdir -p /run/dbus
+
+# Removed anything we don't need.
+# Removed any statically-defined Avahi services, e.g. SSH and SFTP
+
+# Created non-root user for running the container -- running as the user 'shairport-sync' also allows
# Shairport Sync to provide the D-Bus and MPRIS interfaces within the container
-# Add the shairport-sync user to the pre-existing audio group, which has ID 29, for access to the ALSA stuff
+# Added the shairport-sync user to the pre-existing audio group, which has ID 29, for access to the ALSA stuff
COPY --from=files / /
-COPY --from=build-files / /
-
-ENTRYPOINT ["/init","./run.sh"]
+ENV ENABLE_AVAHI=1
+ENTRYPOINT ["./run.sh"]
To run the latest release of Shairport Sync, which provides AirPlay 2 service:
```
-$ docker run -d --restart unless-stopped --net host --device /dev/snd \
+$ docker run -d --cap-add=SYS_NICE --restart unless-stopped --net host --device /dev/snd \
mikebrady/shairport-sync:latest
```
To run the classic version:
```
-$ docker run -d --restart unless-stopped --net host --device /dev/snd \
+$ docker run -d --cap-add=SYS_NICE --restart unless-stopped --net host --device /dev/snd \
mikebrady/shairport-sync:latest-classic
```
Command line options will be passed to Shairport Sync. Here is an example:
```
-$ docker run -d --restart unless-stopped --net host --device /dev/snd \
+$ docker run -d --cap-add=SYS_NICE --restart unless-stopped --net host --device /dev/snd \
mikebrady/shairport-sync:latest \
-v --statistics -a DenSystem -- -d hw:0 -c PCM
```
This will send audio to alsa hardware device `hw:0` and make use of the that device's mixer control called `PCM`. The service will be visible as `DenSystem` on the network.
-The image is built with PulseAudio backend support. To use it, refer to [`docker-compose.yaml`](docker-compose.yaml) for required environment variables and mounts. You might need to adjust authentication on your PulseAudio server ([PA documentation](https://www.freedesktop.org/wiki/Software/PulseAudio/Documentation/User/Modules/#module-native-protocol-unixtcp)) and set default backend to `pa` via either command line option `-o` or `general.output_backend` field in config file.
+The image is built with PipeWire and PulseAudio backend support. To use it, refer to [`docker-compose.yaml`](docker-compose.yaml) for required environment variables and mounts.
+
+To use the PipeWire backend, set the backend to `pipewire` via either command line option `-o pipewire ` or the `output_backend` field in the `general` section of the configuration file.
+
+Similarly, to use the PulseAudio backend, set the backend to `pulseaudio` via either command line option `-o pulseaudio ` or the `output_backend` field in the `general` section of the configuration file.
+For use with PulseAudio, you might need to adjust authentication on your PulseAudio server ([PA documentation](https://www.freedesktop.org/wiki/Software/PulseAudio/Documentation/User/Modules/#module-native-protocol-unixtcp)).
## Configuration File
-ARG SHAIRPORT_SYNC_BRANCH=.
-
-FROM alpine:3.20 AS builder
+# Classic Shairport-Sync build. Note: the only decoder in FFmpeg is ALAC.
-# Classic (aka AirPlay 1) Build
+ARG SHAIRPORT_SYNC_BRANCH=.
+ARG FFMPEG_BRANCH=release/7.1
+ARG ALPINE_VERSION=20250108
+
+##### FFMPEGLITE #####
+FROM alpine:$ALPINE_VERSION AS ffmpeglite
+RUN apk -U add build-base git nasm pkgconf
+RUN mkdir -p /ffmpeg_sources /ffmpeg_build
+WORKDIR /ffmpeg_sources
+ARG FFMPEG_BRANCH
+RUN git clone --depth=1 -b "$FFMPEG_BRANCH" https://github.com/FFmpeg/FFmpeg
+WORKDIR /ffmpeg_sources/FFmpeg
+RUN ./configure \
+ --prefix="/ffmpeg_build" \
+ --extra-libs="-lpthread -lm" \
+ --ld="g++" \
+ --disable-static \
+ --enable-shared \
+ --enable-gpl \
+ --disable-programs \
+ --disable-everything \
+ --disable-doc \
+ --disable-iconv \
+ --disable-avdevice \
+ --disable-avfilter \
+ --disable-swscale \
+ --disable-network \
+ --disable-iamf \
+ --disable-pixelutils \
+ --disable-postproc \
+ --enable-decoder=alac
+RUN make
+RUN make install
+##### FFMPEGLITE END #####
-# Check required arguments exist. These will be provided by the Github Action
-# Workflow and are required to ensure the correct branches are being used.
-ARG SHAIRPORT_SYNC_BRANCH
-RUN test -n "$SHAIRPORT_SYNC_BRANCH"
+##### SPS #####
+FROM alpine:$ALPINE_VERSION AS shairport-sync
RUN apk -U add \
+ pkgconf \
alsa-lib-dev \
autoconf \
automake \
dbus \
git \
libconfig-dev \
+ openssl-dev \
libsndfile-dev \
libtool \
- openssl-dev \
+ pipewire-dev \
mosquitto-dev \
popt-dev \
pulseaudio-dev \
- soxr-dev
-
-##### ALAC #####
-RUN git clone https://github.com/mikebrady/alac
-WORKDIR /alac
-RUN autoreconf -i
-RUN ./configure
-RUN make -j $(nproc)
-RUN make install
-WORKDIR /
-##### ALAC END #####
+ soxr-dev \
+ xxd
-##### SPS #####
+ARG SHAIRPORT_SYNC_BRANCH
WORKDIR /shairport-sync
COPY . .
-RUN git checkout "$SHAIRPORT_SYNC_BRANCH"
+RUN rm -rf build
+# the above three lines or the following...
+# RUN git clone --depth=1 -b "SHAIRPORT_SYNC_BRANCH" https://github.com/mikebrady/sps-private shairport-sync
+
+COPY --from=ffmpeglite /ffmpeg_build/lib/ /usr/local/lib
+COPY --from=ffmpeglite /ffmpeg_build/lib/pkgconfig/*.pc /usr/local/lib/pkgconfig/
+COPY --from=ffmpeglite /ffmpeg_build/include/ /usr/local/include
+
+# RUN git checkout "$SHAIRPORT_SYNC_BRANCH"
+# the following is to remove any build directory that might be there
+# not needed if the source is a clone of the repository
+# RUN rm -rf build
WORKDIR /shairport-sync/build
RUN autoreconf -i ../
-RUN CFLAGS="-O3" CXXFLAGS="-O3" ../configure --sysconfdir=/etc --with-alsa --with-pa --with-soxr --with-avahi --with-ssl=openssl \
- --with-metadata --with-dummy --with-pipe --with-dbus-interface \
+RUN ../configure --sysconfdir=/etc --with-alsa --with-pulseaudio --with-soxr --with-avahi --with-ssl=openssl \
+ --with-ffmpeg --with-metadata --with-dummy --with-pipe --with-dbus-interface \
--with-stdout --with-mpris-interface --with-mqtt-client \
- --with-apple-alac --with-convolution
-RUN make -j $(nproc)
+ --with-convolution --with-pipewire
+RUN make
RUN DESTDIR=install make install
WORKDIR /
##### SPS END #####
+##### STATIC FILES #####
+FROM scratch AS files
+
+# Add run script that will start SPS
+COPY --chmod=755 ./docker/classic/run.sh ./run.sh
+COPY ./docker/etc/pulse /etc/pulse
+##### END STATIC FILES #####
+
+##### BUILD FILES #####
+FROM scratch AS build-files
+COPY --from=ffmpeglite /ffmpeg_build/lib/ /usr/local/lib
+COPY --from=shairport-sync /shairport-sync/build/install/usr/local/bin/shairport-sync /usr/local/bin/shairport-sync
+COPY --from=shairport-sync /shairport-sync/build/install/usr/local/share/man/man1 /usr/share/man/man1
+COPY --from=shairport-sync /shairport-sync/build/install/etc/shairport-sync.conf /etc/
+COPY --from=shairport-sync /shairport-sync/build/install/etc/shairport-sync.conf.sample /etc/
+COPY --from=shairport-sync /shairport-sync/build/install/etc/dbus-1/system.d/shairport-sync-dbus.conf /etc/dbus-1/system.d/
+COPY --from=shairport-sync /shairport-sync/build/install/etc/dbus-1/system.d/shairport-sync-mpris.conf /etc/dbus-1/system.d/
+##### END BUILD FILES #####
+
# Shairport Sync Runtime System
-FROM crazymax/alpine-s6:3.20-3.2.0.2
+FROM alpine:$ALPINE_VERSION
-ENV S6_CMD_WAIT_FOR_SERVICES=1
-ENV S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0
+COPY --from=build-files / /
-RUN apk -U add \
+RUN apk --no-cache add \
alsa-lib \
avahi \
- avahi-tools \
dbus \
glib \
- less \
- less-doc \
libconfig \
- libsndfile-dev \
+ pipewire \
libpulse \
- man-pages \
- mandoc \
libcrypto3 \
mosquitto \
popt \
soxr \
- curl
-
-# Copy build files.
-COPY --from=builder /shairport-sync/build/install/usr/local/bin/shairport-sync /usr/local/bin/shairport-sync
-COPY --from=builder /shairport-sync/build/install/usr/local/share/man/man1 /usr/share/man/man1
-COPY --from=builder /usr/local/lib/libalac.* /usr/local/lib/
-COPY --from=builder /shairport-sync/build/install/etc/shairport-sync.conf /etc/
-COPY --from=builder /shairport-sync/build/install/etc/shairport-sync.conf.sample /etc/
-COPY --from=builder /shairport-sync/build/install/etc/dbus-1/system.d/shairport-sync-dbus.conf /etc/dbus-1/system.d/
-COPY --from=builder /shairport-sync/build/install/etc/dbus-1/system.d/shairport-sync-mpris.conf /etc/dbus-1/system.d/
-
-COPY ./docker/classic/etc/s6-overlay/s6-rc.d /etc/s6-overlay/s6-rc.d
-RUN chmod +x /etc/s6-overlay/s6-rc.d/01-startup/script.sh
-
-# Create non-root user for running the container -- running as the user 'shairport-sync' also allows
+ curl && \
+ rm -rfv /lib/apk/db/* && \
+ rm -rfv /etc/avahi/services/*.service && \
+ rm -rf /usr/share/man && \
+ addgroup shairport-sync && \
+ adduser -D shairport-sync -G shairport-sync && \
+ addgroup -g 29 docker_audio && \
+ addgroup shairport-sync docker_audio && \
+ addgroup shairport-sync audio && \
+ mkdir -p /run/dbus
+
+# Removed anything we don't need.
+# Removed any statically-defined Avahi services, e.g. SSH and SFTP
+
+# Created non-root user for running the container -- running as the user 'shairport-sync' also allows
# Shairport Sync to provide the D-Bus and MPRIS interfaces within the container
+# Added the shairport-sync user to the pre-existing audio group, which has ID 29, for access to the ALSA stuff
-RUN addgroup shairport-sync
-RUN adduser -D shairport-sync -G shairport-sync
-
-# Add the shairport-sync user to the pre-existing audio group, which has ID 29, for access to the ALSA stuff
-RUN addgroup -g 29 docker_audio && addgroup shairport-sync docker_audio && addgroup shairport-sync audio
-
-# Remove anything we don't need.
-RUN rm -rf /lib/apk/db/*
-
-# Remove any statically-defined Avahi services, e.g. SSH and SFTP
-RUN rm -rf /etc/avahi/services/*.service
-
-# Add run script that will start SPS
-COPY ./docker/run.sh ./run.sh
-RUN chmod +x /run.sh
-
-# D-Bus might need this directory
-RUN mkdir -p /run/dbus
-
-ENTRYPOINT ["/init","./run.sh"]
+COPY --from=files / /
+ENV ENABLE_AVAHI=1
+ENTRYPOINT ["./run.sh"]
+++ /dev/null
-#!/bin/sh
-echo "STARTING - $(date)"
+++ /dev/null
-/etc/s6-overlay/s6-rc.d/01-startup/script.sh
+++ /dev/null
-#!/bin/sh
-exec dbus-send --system / org.freedesktop.DBus.Peer.Ping > /dev/null 2> /dev/null
+++ /dev/null
-01-startup
+++ /dev/null
-#!/command/execlineb -S0
-/run/s6/basedir/bin/halt
+++ /dev/null
-#!/command/with-contenv sh
-
-# Set the limit to the same value Docker has been using in earlier version.
-ulimit -n 1048576
-
-echo "Starting dbus"
-exec s6-notifyoncheck dbus-daemon --system --nofork --nopidfile
+++ /dev/null
-#!/bin/sh
-state="$(dbus-send --system --dest=org.freedesktop.Avahi --print-reply / org.freedesktop.Avahi.Server.GetState | grep int32 | awk '{printf $2}')"
-
-# Avahi will return 'state=2' when 'Server startup complete'
-if [ "$state" = 2 ]; then
- exit 0
-else
- exit 1
-fi
-
+++ /dev/null
-#!/command/execlineb -S0
-/run/s6/basedir/bin/halt
+++ /dev/null
-#!/command/with-contenv sh
-echo "Starting avahi"
-exec s6-notifyoncheck avahi-daemon --no-chroot
#!/bin/sh
+
+# exist if any command returns a non-zero result
+set -e
+
+echo "Shairport Sync Startup ($(date))"
+
+if [ -z ${ENABLE_AVAHI+x} ] || [ $ENABLE_AVAHI -eq 1 ]; then
+ rm -rf /run/dbus/dbus.pid
+ rm -rf /run/avahi-daemon/pid
+
+ dbus-uuidgen --ensure
+ dbus-daemon --system
+
+ avahi-daemon --daemonize --no-chroot
+fi
+
while [ ! -f /var/run/avahi-daemon/pid ]; do
- echo "Warning: avahi is not running, sleeping for 1 second before trying to start shairport-sync"
- sleep 1
+ echo "Warning: avahi is not running, sleeping for 5 seconds before trying to start shairport-sync"
+ sleep 5
done
-echo "Starting shairport-sync"
-# pass all commandline options to shairport-sync
-/usr/local/bin/shairport-sync "$@"
+
+# for PipeWire
+export XDG_RUNTIME_DIR=/tmp
+
+# for PulseAudio
+export PULSE_SERVER=unix:/tmp/pulseaudio.socket
+export PULSE_COOKIE=/tmp/pulseaudio.cookie
+
+echo "Finished startup tasks ($(date)), starting Shairport Sync."
+
+exec /usr/local/bin/shairport-sync "$@"
services:
shairport-sync:
image: mikebrady/shairport-sync:latest
- network_mode: host
+ network_mode: host # Required for AirPlay 2
restart: unless-stopped
# environment:
- # S6_KEEP_ENV: 1 # Allow S6 to pass environment variables from compose file
# PULSE_SERVER: unix:/tmp/pulseaudio.socket # Path for PulseAudio socket
# PULSE_COOKIE: /tmp/pulseaudio.cookie # Path for PulseAudio cookie
# XDG_RUNTIME_DIR: /tmp # Path for pipewire
+ # ENABLE_AVAHI: 0 # Disable DBus and Avahi daemon inside the container
devices:
- "/dev/snd" # ALSA device, omit if using PulseAudio
# volumes:
# - ./volumes/shairport-sync/shairport-sync.conf:/etc/shairport-sync.conf # Customised Shairport Sync configuration file.
# - /run/user/1000/pulse/native:/tmp/pulseaudio.socket # PulseAudio socket when using that backend
# - /run/user/1000/pipewire-0:/tmp/pipewire-0 # Pipewire socket when using pipewire
+ # - /var/run/dbus:/var/run/dbus # DBus when ENABLE_AVAHI set to 0
+ # - /var/run/avahi-daemon:/var/run/avahi-daemon # Avahi socket when ENABLE_AVAHI set to 0
# command: -o pw # You can specify the desired output with command:
logging:
options:
+++ /dev/null
-#!/bin/sh
-echo "STARTING - $(date)"
+++ /dev/null
-/etc/s6-overlay/s6-rc.d/01-startup/script.sh
+++ /dev/null
-#!/bin/sh
-exec dbus-send --system / org.freedesktop.DBus.Peer.Ping > /dev/null 2> /dev/null
+++ /dev/null
-01-startup
+++ /dev/null
-#!/command/execlineb -S0
-/run/s6/basedir/bin/halt
+++ /dev/null
-#!/command/with-contenv sh
-
-# Set the limit to the same value Docker has been using in earlier version.
-ulimit -n 1048576
-
-echo "Starting dbus"
-exec s6-notifyoncheck dbus-daemon --system --nofork --nopidfile
+++ /dev/null
-#!/bin/sh
-state="$(dbus-send --system --dest=org.freedesktop.Avahi --print-reply / org.freedesktop.Avahi.Server.GetState | grep int32 | awk '{printf $2}')"
-
-# Avahi will return 'state=2' when 'Server startup complete'
-if [ "$state" = 2 ]; then
- exit 0
-else
- exit 1
-fi
-
+++ /dev/null
-#!/command/execlineb -S0
-/run/s6/basedir/bin/halt
+++ /dev/null
-#!/command/with-contenv sh
-echo "Starting avahi"
-exec s6-notifyoncheck avahi-daemon --no-chroot
+++ /dev/null
-01-startup
+++ /dev/null
-#!/command/execlineb -S0
-/run/s6/basedir/bin/halt
+++ /dev/null
-#!/command/with-contenv sh
-echo "Starting nqptp"
-exec /usr/local/bin/nqptp
#!/bin/sh
+
+# exist if any command returns a non-zero result
+set -e
+
+echo "Shairport Sync Startup ($(date))"
+
+if [ -z ${ENABLE_AVAHI+x} ] || [ $ENABLE_AVAHI -eq 1 ]; then
+ rm -rf /run/dbus/dbus.pid
+ rm -rf /run/avahi-daemon/pid
+
+ dbus-uuidgen --ensure
+ dbus-daemon --system
+
+ avahi-daemon --daemonize --no-chroot
+fi
+
+echo "Starting NQPTP ($(date))"
+
+(/usr/local/bin/nqptp > /dev/null 2>&1) &
+
while [ ! -f /var/run/avahi-daemon/pid ]; do
- echo "Warning: avahi is not running, sleeping for 1 second before trying to start shairport-sync"
- sleep 1
+ echo "Warning: avahi is not running, sleeping for 5 seconds before trying to start shairport-sync"
+ sleep 5
done
-echo "Starting shairport-sync"
-# pass all commandline options to shairport-sync
-/usr/local/bin/shairport-sync "$@"
+
+# for PipeWire
+export XDG_RUNTIME_DIR=/tmp
+
+# for PulseAudio
+export PULSE_SERVER=unix:/tmp/pulseaudio.socket
+export PULSE_COOKIE=/tmp/pulseaudio.cookie
+
+echo "Finished startup tasks ($(date)), starting Shairport Sync."
+
+exec /usr/local/bin/shairport-sync "$@"
dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Set string:org.gnome.ShairportSync string:DriftTolerance variant:double:0.001
# Is Loudness Enabled:
-dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Get string:org.gnome.ShairportSync string:LoudnessThreshold
+dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Get string:org.gnome.ShairportSync string:LoudnessEnabled
# Enable Loudness Filter
-dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Set string:org.gnome.ShairportSync string:Loudness variant:boolean:true
+dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Set string:org.gnome.ShairportSync string:LoudnessEnabled variant:boolean:true
# Get Loudness Threshold
dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Get string:org.gnome.ShairportSync string:LoudnessThreshold
dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Set string:org.gnome.ShairportSync string:LoudnessThreshold variant:double:-15.0
# Is Convolution enabled:
-dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Get string:org.gnome.ShairportSync string:Convolution
+dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Get string:org.gnome.ShairportSync string:ConvolutionEnabled
# Enable Convolution
-dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Set string:org.gnome.ShairportSync string:Convolution variant:boolean:true
+dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Set string:org.gnome.ShairportSync string:ConvolutionEnabled variant:boolean:true
# Get Convolution Gain:
dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Get string:org.gnome.ShairportSync string:ConvolutionGain
-# Set Convolution Gain -- the gain applied before convolution is applied -- to -10.0 dB
+# Set Convolution Gain -- the gain applied after convolution is applied -- to -10.0 dB
dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Set string:org.gnome.ShairportSync string:ConvolutionGain variant:double:-10
-# Get Convolution Impulse Response File:
-dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Get string:org.gnome.ShairportSync string:ConvolutionImpulseResponseFile
+# Get Convolution Impulse Response Files:
+dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Get string:org.gnome.ShairportSync string:ConvolutionImpulseResponseFiles
+
+# Set Convolution Impulse Response Files:
+dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Set string:org.gnome.ShairportSync string:ConvolutionImpulseResponseFiles variant:string:"'/home/pi/filters/Sennheiser HD 205 minimum phase 48000Hz.wav', '/home/pi/filters/Sennheiser HD 205 minimum phase 44100Hz.wav'"
+
+# Get Convolution Impulse Response File Maximum Length:
+dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Get string:org.gnome.ShairportSync string:ConvolutionMaximumLengthInSeconds
-# Set Convolution Impulse Response File:
-dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Set string:org.gnome.ShairportSync string:ConvolutionImpulseResponseFile variant:string:"/etc/shairport-sync/boom.wav"
+# Set Convolution Impulse Response File Maximum Length:
+dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Set string:org.gnome.ShairportSync string:ConvolutionMaximumLengthInSeconds variant:double:1
# Get the Protocol Shairport Sync was built for -- AirPlay or AirPlay 2:
dbus-send --print-reply --system --dest=org.gnome.ShairportSync /org/gnome/ShairportSync org.freedesktop.DBus.Properties.Get string:org.gnome.ShairportSync string:Protocol
#include "common.h"
#include <math.h>
-loudness_processor loudness_r;
-loudness_processor loudness_l;
+#define MAXCHANNELS 8
-void _loudness_set_volume(loudness_processor *p, float volume) {
+// loudness_processor_dynamic loudness_r;
+// loudness_processor_dynamic loudness_l;
+
+loudness_processor_dynamic loudness_filters[MAXCHANNELS];
+
+// if the rate or the loudness_reference_volume_db change, recalculate the
+// loudness volume parameters
+
+static int loudness_fix_volume_parameter = 0;
+static unsigned int loudness_rate_parameter = 0;
+static float loudness_volume_reference_parameter = 0.0;
+static loudness_processor_static lps = {0.0, 0.0, 0.0, 0.0, 0.0};
+
+void _loudness_set_volume(loudness_processor_static *p, float volume, unsigned int sample_rate) {
float gain = -(volume - config.loudness_reference_volume_db) * 0.5;
- if (gain < 0)
+ if (gain < 0) {
gain = 0;
+ }
+ debug(2, "Volume: %.1f dB - Loudness gain @10Hz: %.1f dB", volume, gain);
float Fc = 10.0;
float Q = 0.5;
// Formula from http://www.earlevel.com/main/2011/01/02/biquad-formulas/
- float Fs = 44100.0;
+ float Fs = sample_rate * 1.0;
float K = tan(M_PI * Fc / Fs);
float V = pow(10.0, gain / 20.0);
p->b2 = (1 - 1 / Q * K + K * K) * norm;
}
-float loudness_process(loudness_processor *p, float i0) {
- float o0 = p->a0 * i0 + p->a1 * p->i1 + p->a2 * p->i2 - p->b1 * p->o1 - p->b2 * p->o2;
+float loudness_process(loudness_processor_dynamic *p, float i0) {
+ float o0 = lps.a0 * i0 + lps.a1 * p->i1 + lps.a2 * p->i2 - lps.b1 * p->o1 - lps.b2 * p->o2;
p->o2 = p->o1;
p->o1 = o0;
return o0;
}
-void loudness_set_volume(float volume) {
- float gain = -(volume - config.loudness_reference_volume_db) * 0.5;
- if (gain < 0)
- gain = 0;
+void loudness_update(rtsp_conn_info *conn) {
+ // first, see if loudness can be enabled
+ int do_loudness = config.loudness_enabled;
+ if ((config.output->parameters != NULL) && (config.output->parameters()->volume_range != NULL)) {
+ do_loudness = 0; // if we are using external (usually hardware) volume controls.
+ }
- debug(2, "Volume: %.1f dB - Loudness gain @10Hz: %.1f dB", volume, gain);
- _loudness_set_volume(&loudness_l, volume);
- _loudness_set_volume(&loudness_r, volume);
+ if (do_loudness) {
+ // check the volume parameters
+ if ((conn->fix_volume != loudness_fix_volume_parameter) ||
+ (conn->input_rate != loudness_rate_parameter) ||
+ (config.loudness_reference_volume_db != loudness_volume_reference_parameter)) {
+ debug(1, "update loudness parameters");
+ float new_volume = 20 * log10((double)conn->fix_volume / 65536);
+ _loudness_set_volume(&lps, new_volume, conn->input_rate);
+ // _loudness_set_volume(&loudness_r, new_volume, conn->input_rate);
+ loudness_fix_volume_parameter = conn->fix_volume;
+ loudness_rate_parameter = conn->input_rate;
+ loudness_volume_reference_parameter = config.loudness_reference_volume_db;
+ }
+ }
+ conn->do_loudness = do_loudness;
}
+
+void loudness_process_blocks(float *fbufs, unsigned int channel_length,
+ unsigned int number_of_channels, float gain) {
+ unsigned int channel_number, sample_index;
+ float *sample_pointer = fbufs;
+ for (channel_number = 0; channel_number < number_of_channels; channel_number++) {
+ for (sample_index = 0; sample_index < channel_length; sample_index++) {
+ *sample_pointer = loudness_process(&loudness_filters[channel_number], *sample_pointer * gain);
+ sample_pointer++;
+ }
+ }
+}
+
+void loudness_reset() {
+ unsigned int i;
+ for (i = 0; i < MAXCHANNELS; i++) {
+ loudness_filters[i].i1 = 0.0;
+ loudness_filters[i].i2 = 0.0;
+ loudness_filters[i].o1 = 0.0;
+ loudness_filters[i].o2 = 0.0;
+ }
+}
\ No newline at end of file
#pragma once
+#include "player.h"
#include <stdio.h>
typedef struct {
float a0, a1, a2, b1, b2;
+} loudness_processor_static;
+
+typedef struct {
float i1, i2, o1, o2;
-} loudness_processor;
+} loudness_processor_dynamic;
-extern loudness_processor loudness_r;
-extern loudness_processor loudness_l;
+// extern loudness_processor_dynamic loudness_r;
+// extern loudness_processor_dynamic loudness_l;
void loudness_set_volume(float volume);
-float loudness_process(loudness_processor *p, float sample);
+float loudness_process(loudness_processor_dynamic *p, float sample);
+void loudness_update(rtsp_conn_info *conn);
+
+void loudness_reset();
+void loudness_process_blocks(float *fbufs, unsigned int channel_length,
+ unsigned int number_of_channels, float gain);
\ No newline at end of file
+++ /dev/null
-all: shairport-sync.1 shairport-sync.html
-
-%.1: %.1.xml
- xmltoman $*.1.xml > $*.tmp && mv $*.tmp $*.1
-
-%.html: %.1.xml
- xsltproc xmltoman.xsl $*.1.xml > $*.tmp && mv $*.tmp $*.html
-
-clean:
- rm shairport-sync.1
- rm shairport-sync.html
--- /dev/null
+man_MANS = shairport-sync.1
+
+if USE_XMLTOMAN
+shairport-sync.1: shairport-sync.1.xml
+ xmltoman $< > $@
+
+# It seems that xmlmantohtml is not working anymore (20240905)
+# all-local: shairport-sync.html
+
+
+# shairport-sync.html: shairport-sync.1.xml
+# xmlmantohtml $< > $@
+
+clean:
+ rm -f shairport-sync.1
+# rm -f shairport-sync.html
+endif
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="iso-8859-15"?>
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html xmlns="http://www.w3.org/1999/xhtml">
- <head>
- <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-15" />
- <title>shairport-sync(7)</title>
- <style type="text/css">
- body { color: black; background-color: white; }
- a:link, a:visited { color: #900000; }
- h1 { text-transform:uppercase; font-size: 18pt; }
- p { margin-left:1cm; margin-right:1cm; }
- .cmd { font-family:monospace; }
- .file { font-family:monospace; }
- .arg { font-family:monospace; font-style: italic; }
- .opt { font-family:monospace; font-weight: bold; }
- .manref { font-family:monospace; }
- .option .optdesc { margin-left:2cm; }
- </style>
- </head>
- <body><h1>Name</h1><p>shairport-sync - AirPlay and AirPlay 2 Audio Player</p>
-
- <h1>Synopsis</h1>
-
- <p class="cmd">shairport-sync <span class="opt">[-djvw]</span>
- <span class="opt">[-a </span><span class="arg">service-name</span><span class="opt"> | --name=</span><span class="arg">service-name</span><span class="opt">]</span>
- <span class="opt">[-B </span><span class="arg">command</span><span class="opt"> | --onstart=</span><span class="arg">command</span><span class="opt">]</span>
- <span class="opt">[-c </span><span class="arg">configurationfile</span><span class="opt"> | --configfile=</span><span class="arg">configurationfile</span><span class="opt">]</span>
- <span class="opt">[-d | --daemon]</span>
- <span class="opt">[-E </span><span class="arg">command</span><span class="opt"> | --onstop=</span><span class="arg">command</span><span class="opt">]</span>
- <span class="opt">[-g | --get-cover-art]</span>
- <span class="opt">[-j | --justDaemoniseNoPIDFile]</span>
- <span class="opt">[--logOutputLevel]</span>
- <span class="opt">[--log-to-syslog]</span>
- <span class="opt">[-L </span><span class="arg">latency</span><span class="opt"> | --latency=</span><span class="arg">latency</span><span class="opt">]</span>
- <span class="opt">[-m </span><span class="arg">backend</span><span class="opt"> | --mdns=</span><span class="arg">backend</span><span class="opt">]</span>
- <span class="opt">[-M | --metadata-enable]</span>
- <span class="opt">[-o </span><span class="arg">backend</span><span class="opt"> | --output=</span><span class="arg">backend</span><span class="opt">]</span>
- <span class="opt">[-p </span><span class="arg">port</span><span class="opt"> | --port=</span><span class="arg">port</span><span class="opt">]</span>
- <span class="opt">[--password=</span><span class="arg">secret</span><span class="opt">]</span>
- <span class="opt">[-r </span><span class="arg">threshold</span><span class="opt"> | --resync=</span><span class="arg">threshold</span><span class="opt">]</span>
- <span class="opt">[--statistics]</span>
- <span class="opt">[-S </span><span class="arg">mode</span><span class="opt"> | --stuffing=</span><span class="arg">mode</span><span class="opt">]</span>
- <span class="opt">[-t </span><span class="arg">timeout</span><span class="opt"> | --timeout=</span><span class="arg">timeout</span><span class="opt">]</span>
- <span class="opt">[--tolerance=</span><span class="arg">frames</span><span class="opt">]</span>
- <span class="opt">[-v | --verbose]</span>
- <span class="opt">[-w | --wait-cmd]</span>
- <span class="opt">[-- </span><span class="arg">audio_backend_options</span><span class="opt">]</span>
- </p>
- <p class="cmd">shairport-sync <span class="opt">-X | --displayConfig</span></p>
- <p class="cmd">shairport-sync <span class="opt">-h</span></p>
- <p class="cmd">shairport-sync <span class="opt">-k</span></p>
- <p class="cmd">shairport-sync <span class="opt">-V</span></p>
-
-
- <h1>Description</h1>
- <p>Shairport Sync plays AirPlay audio.
- It can be built to stream either from "classic" AirPlay (aka "AirPlay 1")
- or from AirPlay 2 devices.</p>
-
- <p>AirPlay 2 support is limited, and AirPlay 2 from iTunes for Windows is not supported.
- For AirPlay 2 operation, a companion program called <span class="opt">nqptp</span> must be installed.</p>
-
- <p>Please see <a class="url" href="https://github.com/mikebrady/shairport-sync">https://github.com/mikebrady/shairport-sync</a> for details.</p>
-
- <p>The name of the Shairport Sync executable is <span class="opt">shairport-sync</span>.</p>
-
-
-
-
- <h1>Configuration File Settings</h1>
- <p>You should use the configuration file for setting up Shairport Sync because --
- apart from a few special-purpose commands -- it has a much richer set of options
- than are available on the command line.
- This file is usually <span class="file">shairport-sync.conf</span> and is generally located in the
- System Configuration Directory, which is normally the <span class="file">/etc</span> directory in
- Linux or the <span class="file">/usr/local/etc</span> directory in BSD unixes.
- You may need to have root privileges to modify it.</p>
-
- <p>(Note: Shairport Sync may have been compiled to use a different configuration
- directory. You can determine which by performing the command <span class="file">$ shairport-sync
- -V</span>. The last item in the output string is the value of the
- <span class="opt">sysconfdir</span>, i.e. the System Configuration Directory.)</p>
-
- <p>Within the configuration file, settings are organised into groups, for
- example, there is a <span class="opt">general</span> group of
- standard settings, and there is an <span class="opt">alsa</span> group with settings
- that pertain to the <span class="opt">ALSA</span> back end.
- Here is an example of a typical configuration file:</p>
-
- <p><span class="opt">general = {</span></p>
- <p><p><span class="opt">name = "Mike's Boombox";</span></p></p>
- <p><span class="opt">};</span></p>
- <p><span class="opt"></span></p>
- <p><span class="opt">alsa = {</span></p>
- <p><p><span class="opt">output_device = "hw:0";</span></p></p>
- <p><p><span class="opt">mixer_control_name = "PCM";</span></p></p>
- <p><span class="opt">};</span></p>
-
- <p>Users generally only need to set (1) the service name and
- (2) the output device.
-
- If the <span class="opt">name</span> setting is omitted, the service name is derived from the system's hostname.
-
- By default, the <span class="opt">ALSA</span> backend will be chosen if included in the build.
-
- If the (alsa) output device has a mixer that can be used for volume
- control, then (3) the mixer name should be specified. It is important
- to do this if the mixer exists. Otherwise, the
- maximum output from the output device will be whatever setting the mixer happens to
- have, which will be a matter of chance and which could be very low or even silent.</p>
-
- <p>A sample configuration file with all possible settings, but with all of them
- commented out, is installed at <span class="file">shairport-sync.conf.sample</span>, within the
- System Configuration Directory -- <span class="file">/etc</span> in Linux,
- <span class="file">/usr/local/etc</span> in BSD unixes.</p>
-
- <p>The sample configuration file includes extensive documentation of the settings.
- and is also available at
- <a class="url" href="https://github.com/mikebrady/shairport-sync/blob/master/scripts/shairport-sync.conf">https://github.com/mikebrady/shairport-sync/blob/master/scripts/shairport-sync.conf</a>.
- Please refer to it for the most up-to-date information on configuration file settings.</p>
-
-
-
- <h1>Options</h1>
-
- <p>There are two kinds of command-line options for shairport-sync:
- regular <span class="opt">program options</span> and <span class="opt">audio backend options</span>.
- Program options are
- always listed first, followed by any audio backend options, preceded by
- a <span class="opt">--</span> symbol.</p>
-
- <p>See the EXAMPLES section for sample usages.</p>
-
- <h1>Program Options</h1>
- <p>Program Options are used by shairport-sync itself.</p>
-
-
- <div class="option">
- <p><span class="opt">-a </span><span class="arg">service name</span><span class="opt"> | --name=</span><span class="arg">service
- name</span></p>
- <div class="optdesc"><p>
- Use this <span class="arg">service name</span> to identify this player in iTunes, etc.</p>
-
- <p>The following substitutions are allowed:
- <span class="opt">%h</span> for the computer's hostname,
- <span class="opt">%H</span> for the computer's hostname with the first letter capitalised (ASCII
- only),
- <span class="opt">%v</span> for the shairport-sync version number, e.g. "3.0.1" and
- <span class="opt">%V</span> for the shairport-sync version string, e.g.
- "3.0.1-OpenSSL-Avahi-ALSA-soxr-metadata-sysconfdir:/etc".</p>
- <p>The default is "%H", which is replaced by the hostname with the first letter
- capitalised.</p>
- </div>
- </div>
-
- <div class="option">
- <p><span class="opt">-B </span><span class="arg">program</span><span class="opt"> | --on-start=</span><span class="arg">program</span></p>
- <div class="optdesc"><p>
- Execute <span class="arg">program</span> when playback is about to begin. Specify the
- full path to the program, e.g. <span class="file">/usr/bin/logger</span>.
- Executable scripts can be used, but they must have the appropriate shebang
- (<span class="file">#!/bin/sh</span>) in the headline.</p>
-
- <p>If you want shairport-sync to wait until the command has
- completed before starting to play, select the <span class="opt">-w</span> option as well.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-c </span><span class="arg">filename</span><span class="opt"> | --configfile=</span><span class="arg">filename</span></p>
- <div class="optdesc"><p>
- Read configuration settings from <span class="arg">filename</span>. The default is to read them from
- the <span class="file">shairport-sync.conf</span> in the System Configuration Directory --
- <span class="file">/etc</span> in Linux, <span class="file">/usr/local/etc</span> in BSD unixes.
- For information about configuration settings, see the "Configuration File Settings"
- section above.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-d | --daemon</span></p>
- <div class="optdesc"><p>
- Instruct shairport-sync to demonise itself. It will write its
- Process ID (PID) to a file, usually at
- <span class="file">/var/run/shairport-sync/shairport-sync.pid</span>, which is used by the
- <span class="opt">-k</span>, <span class="opt">-D</span> and <span class="opt">-R</span> options to locate
- the daemon at a later time. See also the <span class="opt">-j</span> option. Only available if
- shairport-sync has been compiled with libdaemon support.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-E </span><span class="arg">program</span><span class="opt"> | --on-stop=</span><span class="arg">program</span></p>
- <div class="optdesc"><p>
- Execute <span class="arg">program</span> when playback has ended. Specify the
- full path to the program, e.g. <span class="file">/usr/bin/logger</span>.
- Executable scripts can be used, but they must have the appropriate shebang
- (<span class="file">#!/bin/sh</span>) in the headline.</p>
- <p>If you want shairport-sync to wait until the command has
- completed before continuing, select the <span class="opt">-w</span> option as well.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-g | --get-coverart</span></p>
- <div class="optdesc"><p>
- This option requires the <span class="opt">-M | --metadata-enable</span> option to be set, and enables
- shairport-sync to request cover art from the source and to process it as metadata.</p>
- </div>
- </div>
-
- <div class="option">
- <p><span class="opt">-h | --help</span></p>
- <div class="optdesc"><p>
- Print brief help message and exit.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-j | justDaemoniseNoPIDFile</span></p>
- <div class="optdesc"><p>
- Instruct shairport-sync to demonise itself. Unlike the <span class="opt">-d</span> option, it will
- not write a Process ID (PID) to a file -- it will just (hence the "j") demonise
- itself. Only available if shairport-sync has been compiled with libdaemon support.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-k | --kill</span></p>
- <div class="optdesc"><p>
- Kill the shairport-sync daemon and exit. (Requires that the daemon has
- written its PID to an agreed file -- see the <span class="opt">-d</span> option. Only available if
- shairport-sync has been compiled with libdaemon support.)
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">--logOutputLevel</span></p>
- <div class="optdesc"><p>
- Use this to log the volume level when the volume is changed. It may be useful if you
- are trying to determine a suitable value for the maximum volume level. Not available
- as a configuration file setting.
- </p>
- </div>
- </div>
-
- <div class="option">
- <p><span class="opt">--log-to-syslog</span></p>
- <div class="optdesc"><p>
- Warnings, error messages and messages are sent, by default, to <span class="file">STDERR</span>.
- Use this option to route these messages to the <span class="opt">syslog</span> instead.
- This is intended for use when Shairport Sync is operating as a daemon.
- </p><p>See also <span class="opt">--displayConfig</span>.</p>
- </div>
- </div>
-
- <div class="option">
- <p><span class="opt">-L | --latency=</span><span class="arg">latency</span></p>
- <div class="optdesc"><p>
- Use this to set the <span class="arg">default latency</span>, in frames, for audio coming from an
- unidentified source or from an iTunes Version 9 or earlier source. The standard value
- for the <span class="arg">default latency</span> is 88,200 frames, where there are 44,100
- frames to the second.
- </p>
- <p>Please note that this feature is deprecated and will be removed in a future version
- of shairport-sync.</p>
- </div>
- </div>
-
- <div class="option">
- <p><span class="opt">-M | --metadata-enable</span></p>
- <div class="optdesc"><p>
- Ask the client to send metadata. It will be sent, along with metadata generated
- by shairport-sync itself, to a pipe and will also be
- sent as UDP packets.
- If you add the <span class="opt">-g | --get-cover-art</span>
- then cover art included, where available. See <a class="url" href="https://github.com/mikebrady/shairport-sync-metadata-reader">https://github.com/mikebrady/shairport-sync-metadata-reader</a>
- for a sample metadata reader.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">--metadata-pipename=</span><span class="arg">pathname</span></p>
- <div class="optdesc"><p>
- Specify the path name for the metadata pipe.
- Note that <span class="opt">shairport-sync</span> will need write permission on that directory and pipe.
- The default is <span class="file">/tmp/shairport-sync-metadata</span>.
- If you rename the <span class="opt">shairport-sync</span> executable, the default pipe name will change accordingly.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-m </span><span class="arg">mdnsbackend</span><span class="opt"> | --mdns=</span><span class="arg">mdnsbackend</span></p>
- <div class="optdesc"><p>
- Force the use of the specified mDNS backend to advertise the
- player on the network. The default is to try all mDNS backends in order until one
- works.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-o </span><span class="arg">outputbackend</span><span class="opt"> |
- --output=</span><span class="arg">outputbackend</span></p>
- <div class="optdesc"><p>
- Force the use of the specified output backend to play the audio.
- The default is to try the first one.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-p </span><span class="arg">port</span><span class="opt"> | --port=</span><span class="arg">port</span></p>
- <div class="optdesc"><p>
- Listen for play requests on <span class="arg">port</span>. The default is to use port
- 5000 for AirPlay and 7000 for AirPlay 2.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">--password=</span><span class="arg">secret</span></p>
- <div class="optdesc"><p>
- Require the password <span class="arg">secret</span> to be able to connect and stream to the
- service. (This only works for AirPlay and not for AirPlay 2.)
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-r </span><span class="arg">threshold</span><span class="opt"> | --resync=</span><span class="arg">threshold</span></p>
- <div class="optdesc"><p>
- Resynchronise if timings differ by more than <span class="arg">threshold</span> frames.
- If the output timing differs from the source timing by more than
- the threshold, output will be muted and a full resynchronisation
- will occur. The default threshold is 2,205 frames, i.e. 50
- milliseconds. Specify <span class="opt">0</span> to disable resynchronisation. This setting is
- deprecated and will be removed in a future version of shairport-sync.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">--statistics</span></p>
- <div class="optdesc"><p>
- Print some performance information to <span class="file">STDERR</span>, or to <span class="opt">syslog</span> if the <span class="opt">-log-to-syslog</span> command line option is also chosen.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-S </span><span class="arg">mode</span><span class="opt"> | --stuffing=</span><span class="arg">mode</span></p>
- <div class="optdesc"><p>
- Interpolate ("stuff") the audio stream using the <span class="arg">mode</span>.
- "Stuffing" refers to the
- process of adding or removing frames of audio to or from the
- stream sent to the output device in order to keep it synchronised
- with the player.
- The <span class="opt">basic</span> mode is normally almost completely inaudible.
- The alternative mode, <span class="opt">soxr</span>, is even less obtrusive but
- requires much more processing power. For this mode, support for
- <span class="opt">libsoxr</span>, the SoX Resampler Library, must be selected when
- <span class="opt">shairport-sync</span> is built.
-
- The default setting, <span class="opt">auto</span>, allows Shairport Sync to choose
- <span class="opt">soxr</span> mode if the system is powerful enough.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-t </span><span class="arg">timeout</span><span class="opt"> | --timeout=</span><span class="arg">timeout</span></p>
- <div class="optdesc"><p>
- Exit play mode if the stream disappears for more than <span class="arg">timeout</span>
- seconds.</p>
- <p>When shairport-sync plays an audio stream, it starts a play
- session and will return a busy signal to any other sources that
- attempt to use it. If the audio stream disappears for longer
- than <span class="arg">timeout</span> seconds, the play session will be terminated.
- If you specify a timeout time of <span class="opt">0</span>,
- shairport-sync will never signal that
- it is busy and will not prevent other sources from "barging in"
- on an existing play session. The default value is 120 seconds.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">--tolerance=</span><span class="arg">frames</span></p>
- <div class="optdesc"><p>
- Allow playback to be up to <span class="arg">frames</span> out of exact synchronization before
- attempting to correct it.
- The default is 88 frames, i.e. 2 ms. The smaller the tolerance, the more likely it is
- that overcorrection will occur.
- Overcorrection is when more corrections (insertions and deletions) are made than are
- strictly necessary to keep the stream in sync. Use the <span class="opt">--statistics</span> option
- to monitor correction levels. Corrections should not greatly exceed net corrections.
- This setting is deprecated and will be removed in a future version of shairport-sync.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-V | --version</span></p>
- <div class="optdesc"><p>
- Print version information and exit.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-v | --verbose</span></p>
- <div class="optdesc"><p>
- Print debug information to the <span class="file">STDERR</span>, or to <span class="opt">syslog</span> if the <span class="opt">-log-to-syslog</span> command line option is also chosen.
- Repeat up to three times (i.e. <span class="opt">-vv</span> or <span class="opt">-vvv</span>) for more detail. You should use <span class="opt">-vvv</span> very sparingly -- it is really noisy.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-w | --wait-cmd</span></p>
- <div class="optdesc"><p>
- Wait for commands specified using <span class="opt">-B</span> or <span class="opt">-E</span> to complete before
- continuing execution.
- </p></div>
- </div>
-
- <div class="option">
- <p><span class="opt">-X | --displayConfig</span></p>
- <div class="optdesc"><p>
- This logs information relating to the configuration of Shairport Sync.
- It can be very useful for debugging. The information logged is
- some host OS information, the Shairport Sync version string
- (which indicates the build options used when <span class="opt">shairport-sync</span> was built),
- the contents of the command line that invoked Shairport Sync,
- the name of the configuration file and the active settings therein.</p>
- <p>If this is the only option on the command line, <span class="opt">shairport-sync</span> will
- terminate after displaying the information.</p>
- </div>
- </div>
-
- <h1>Audio Backend Options</h1>
- <p>Audio Backend Options are command-line options that are passed to the chosen audio backend.
- They are always preceded by the <span class="opt">--</span> symbol to introduce them and to separate them from
- any preceding program options. In this way, option letters can be used as program
- options and reused as audio backend options without ambiguity.</p>
-
- <p>Audio backends are listed with their corresponding Audio Backend Options in the help text provided by the help (<span class="opt">-h</span> or <span class="opt">--help</span>) option.</p>
-
-
-
- <h1>Examples</h1>
- <p>Here is a slightly contrived example:</p>
- <p class="cmd">shairport-sync
- <span class="opt">-a "Joe's Stereo"</span>
- <span class="opt">-o alsa</span>
- <span class="opt">--</span>
- <span class="opt">-d hw:1,0</span>
- <span class="opt">-m hw:1</span>
- <span class="opt">-c PCM</span>
- </p>
- <p>The program will be visible as
- "Joe's Stereo" ( <span class="opt">-a "Joe's Stereo"</span> ).
- The program option <span class="opt">-o alsa</span> specifies that the <span class="opt">alsa</span> backend be used, thus that audio should be output into the <span class="opt">ALSA</span> audio subsystem.
- The audio backend options following the <span class="opt">--</span> separator are passed to the <span class="opt">alsa</span> backend and specify
- that the audio will be output on subdevice 0 of soundcard 1
- ( <span class="opt">-d hw:1,0</span> ) and will take advantage of the same sound card's mixer
- ( <span class="opt">-m hw:1</span> ) using the level control named "PCM" ( <span class="opt">-c "PCM"</span> ).
- </p>
- <p>The example above is slightly contrived:
- Firstly, if the <span class="opt">alsa</span> backend has been included in the build, it will be the default, so it doesn't need to be specified and the <span class="opt">-o alsa</span> option could be omitted.
- Secondly, subdevice 0 is the default for a soundcard, so the output device could simply be written <span class="opt">-d hw:1</span>.
- Thirdly, when a mixer name is given ( <span class="opt">-c "PCM"</span> ), the default is that the mixer is on the output device, so the <span class="opt">-m hw:1</span> is unnecessary here.
- Using these defaults and simplifications gives the following command:</p>
- <p class="cmd">shairport-sync
- <span class="opt">-a "Joe's Stereo"</span>
- <span class="opt">--</span>
- <span class="opt">-d hw:1</span>
- <span class="opt">-c PCM</span>
- </p>
-
-
-
- <h1>Credits</h1>
- <p>Mike Brady (<a class="url" href="https://github.com/mikebrady">https://github.com/mikebrady</a>) developed Shairport Sync from Shairport by James Wah (<a class="url" href="https://github.com/abrasive">https://github.com/abrasive</a>).</p>
-
-
- <h1>Comments</h1>
- <p>This man page was written using <a class="manref" href="http://masqmail.cx/xml2man/">xml2man(1)</a> by Oliver Kurth.</p>
-
-
- </body>
-</html>
/*
* mDNS registration handler. This file is part of Shairport.
* Copyright (c) James Laird 2013
- * Modifications, updates and additions (c) Mike Brady 2014 -- 2020
+ * Modifications, updates and additions (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
}
if (*b == NULL)
- warn("%s mDNS backend not found");
+ warn("mDNS backend not found");
} else {
// default -- pick the first back end
for (b = mdns_backends; *b; b++) {
/*
#define MDNS_RECORD_WITH_METADATA \
- "tp=UDP", "sm=false", "ek=1", "et=0,1", "cn=0,1", "ch=2", METADATA_EXPRESSION, "ss=16", \
+ "tp=UDP", "sm=false", "ek=1", "et=0,1", "cn=0,1", "ch=2", METADATA_EXPRESSION, "ss=16", \
"sr=44100", "vn=3", "txtvers=1", config.password ? "pw=true" : "pw=false"
*/
#define MDNS_RECORD_WITH_METADATA \
"sf=0x4", "fv=76400.10", "am=ShairportSync", "vs=105.1", "tp=TCP,UDP", "vn=65537", \
- METADATA_EXPRESSION, "ss=16", "sr=44100", "da=true", "sv=false", "et=0,1", "ek=1", \
- "cn=0,1", "ch=2", "txtvers=1", config.password ? "pw=true" : "pw=false"
+ METADATA_EXPRESSION, "ss=16", "sr=44100", "da=true", "sv=false", "et=0,1", "ek=1", "cn=0,1", \
+ "ch=2", "txtvers=1", config.password ? "pw=true" : "pw=false"
#endif
#define MDNS_RECORD_WITHOUT_METADATA \
"sf=0x4", "fv=76400.10", "am=ShairportSync", "vs=105.1", "tp=TCP,UDP", "vn=65537", "ss=16", \
- "sr=44100", "da=true", "sv=false", "et=0,1", "ek=1", "cn=0,1", "ch=2", "txtvers=1", \
+ "sr=44100", "da=true", "sv=false", "et=0,1", "ek=1", "cn=0,1", "ch=2", "txtvers=1", \
config.password ? "pw=true" : "pw=false"
#endif // _MDNS_H
/*
* Embedded Avahi client. This file is part of Shairport.
* Copyright (c) James Laird 2013
- * Additions for metadata and for detecting IPv6 Copyright (c) Mike Brady 2015--2019
+ * Additions for maintenance, metadata, AirPlay 2 and IPv6 Copyright (c) Mike Brady 2015--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
AVAHI_GCC_UNUSED AvahiProtocol protocol, AvahiResolverEvent event,
const char *name, const char *type, const char *domain,
__attribute__((unused)) const char *host_name,
- __attribute__((unused)) const AvahiAddress *address, uint16_t port,
+#ifdef CONFIG_METADATA
+ __attribute__((unused)) const AvahiAddress *address, uint16_t lport,
+#else
+ __attribute__((unused)) const AvahiAddress *address,
+ __attribute__((unused)) uint16_t lport,
+#endif
__attribute__((unused)) AvahiStringList *txt,
__attribute__((unused)) AvahiLookupResultFlags flags, void *userdata) {
// debug(1,"resolve_callback, event %d.", event);
/* Called whenever a service has been resolved successfully or timed out */
switch (event) {
case AVAHI_RESOLVER_FAILURE:
- debug(2, "(Resolver) Failed to resolve service '%s' of type '%s' in domain '%s': %s.", name,
+ debug(3, "(Resolver) Failed to resolve service '%s' of type '%s' in domain '%s': %s.", name,
type, domain, avahi_strerror(avahi_client_errno(avahi_service_resolver_get_client(r))));
break;
case AVAHI_RESOLVER_FOUND: {
dacp_monitor_port_update_callback(dacpid, port);
#endif
#ifdef CONFIG_METADATA
- char portstring[20];
- memset(portstring, 0, sizeof(portstring));
- snprintf(portstring, sizeof(portstring), "%u", port);
- send_ssnc_metadata('dapo', portstring, strlen(portstring), 0);
+ uint32_t nport = lport;
+ nport = htonl(nport);
+ send_ssnc_metadata('dapo', (const char *)&nport, sizeof(nport), 0);
#endif
}
} else {
/* Called whenever a new services becomes available on the LAN or is removed from the LAN */
switch (event) {
case AVAHI_BROWSER_FAILURE:
- warn("avahi: browser failure.",
+ warn("avahi: browser failure: %s.",
avahi_strerror(avahi_client_errno(avahi_service_browser_get_client(b))));
avahi_threaded_poll_quit(tpoll);
break;
avahi_strerror(avahi_client_errno(client)));
break;
case AVAHI_BROWSER_REMOVE:
- debug(2, "(Browser) REMOVE: service '%s' of type '%s' in domain '%s'.", name, type, domain);
+ debug(3, "(Browser) REMOVE: service '%s' of type '%s' in domain '%s'.", name, type, domain);
#ifdef CONFIG_DACP_CLIENT
dacp_browser_struct *dbs = (dacp_browser_struct *)userdata;
char *dacpid = strstr(name, "iTunes_Ctrl_");
/* A service name collision with a remote service
* happened. Let's pick a new name */
- debug(1, "avahi name collision -- look for another");
+ if (service_name)
+ debug(1, "avahi name collision with \"%s\" -- look for another", service_name);
+ else
+ debug(1, "avahi name collision (name not available) -- look for another");
+
n = avahi_alternative_service_name(service_name);
if (service_name)
avahi_free(service_name);
/*
* Embedded dns-sd client. This file is part of Shairport.
* Copyright (c) Paul Lietar 2013
+ * Copyright (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
/*
* mDNS registration handler. This file is part of Shairport.
* Copyright (c) Paul Lietar 2013
- * Amendments and updates copyright (c) Mike Brady 2014 -- 2019
+ * Amendments and updates copyright (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
/*
* mDNS registration handler. This file is part of Shairport.
* Copyright (c) Paul Lietar 2013
+ * Copyright (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
* then you need a metadata hub,
* where everything is stored
* This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2017--2022
+ * Copyright (c) Mike Brady 2017--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
// debug(1, "locking metadata hub for writing");
if (pthread_rwlock_trywrlock(&metadata_hub_re_lock) != 0) {
if (last_metadata_hub_modify_prolog_file)
- debug(2, "Metadata_hub write lock at \"%s:%d\" is already taken at \"%s:%d\" -- must wait.",
+ debug(3, "Metadata_hub write lock at \"%s:%d\" is already taken at \"%s:%d\" -- must wait.",
filename, linenumber, last_metadata_hub_modify_prolog_file,
last_metadata_hub_modify_prolog_line);
else
debug(2, "Metadata_hub write lock is already taken by unknown -- must wait.");
metadata_hub_re_lock_access_is_delayed = 0;
pthread_rwlock_wrlock(&metadata_hub_re_lock);
- debug(2, "Okay -- acquired the metadata_hub write lock at \"%s:%d\".", filename, linenumber);
+ debug(3, "Okay -- acquired the metadata_hub write lock at \"%s:%d\".", filename, linenumber);
} else {
if (last_metadata_hub_modify_prolog_file) {
free(last_metadata_hub_modify_prolog_file);
// get the one-byte number as an unsigned number
int song_data_kind = data[0]; // one byte
song_data_kind = song_data_kind & 0xFF; // unsigned
- debug(2, "MH Song Data Kind seen: \"%d\" of length %u.", song_data_kind, length);
+ debug(3, "MH Song Data Kind seen: \"%d\" of length %u.", song_data_kind, length);
if ((song_data_kind != metadata_store.song_data_kind) ||
(metadata_store.song_data_kind_is_valid == 0)) {
metadata_store.song_data_kind = song_data_kind;
metadata_store.song_data_kind_changed = 1;
metadata_store.song_data_kind_is_valid = 1;
- debug(2, "MH Song Data Kind set to: \"%d\"", metadata_store.song_data_kind);
+ debug(3, "MH Song Data Kind set to: \"%d\"", metadata_store.song_data_kind);
metadata_packet_item_changed = 1;
}
} break;
vl = vl << 32; // shift them into the correct location
uint64_t ul = ntohl(*(uint32_t *)(data + sizeof(uint32_t))); // and the low order 32 bits
vl = vl + ul;
- debug(2, "MH Item ID seen: \"%" PRIx64 "\" of length %u.", vl, length);
+ debug(3, "MH Item ID seen: \"%" PRIx64 "\" of length %u.", vl, length);
if ((vl != metadata_store.item_id) || (metadata_store.item_id_is_valid == 0)) {
metadata_store.item_id = vl;
metadata_store.item_id_changed = 1;
metadata_store.item_id_is_valid = 1;
- debug(2, "MH Item ID set to: \"%" PRIx64 "\"", metadata_store.item_id);
+ debug(3, "MH Item ID set to: \"%" PRIx64 "\"", metadata_store.item_id);
metadata_packet_item_changed = 1;
}
} break;
case 'astm': {
uint32_t ui = ntohl(*(uint32_t *)data);
- debug(2, "MH Song Time seen: \"%u\" of length %u.", ui, length);
+ debug(3, "MH Song Time seen: \"%u\" of length %u.", ui, length);
if ((ui != metadata_store.songtime_in_milliseconds) ||
(metadata_store.songtime_in_milliseconds_is_valid == 0)) {
metadata_store.songtime_in_milliseconds = ui;
metadata_store.songtime_in_milliseconds_changed = 1;
metadata_store.songtime_in_milliseconds_is_valid = 1;
- debug(2, "MH Song Time set to: \"%u\"", metadata_store.songtime_in_milliseconds);
+ debug(3, "MH Song Time set to: \"%u\"", metadata_store.songtime_in_milliseconds);
metadata_packet_item_changed = 1;
}
} break;
case 'asal':
cs = strndup(data, length);
if (string_update(&metadata_store.album_name, &metadata_store.album_name_changed, cs)) {
- debug(2, "MH Album name set to: \"%s\"", metadata_store.album_name);
+ debug(3, "MH Album name set to: \"%s\"", metadata_store.album_name);
metadata_packet_item_changed = 1;
}
free(cs);
case 'asar':
cs = strndup(data, length);
if (string_update(&metadata_store.artist_name, &metadata_store.artist_name_changed, cs)) {
- debug(2, "MH Artist name set to: \"%s\"", metadata_store.artist_name);
+ debug(3, "MH Artist name set to: \"%s\"", metadata_store.artist_name);
metadata_packet_item_changed = 1;
}
free(cs);
cs = strndup(data, length);
if (string_update(&metadata_store.album_artist_name,
&metadata_store.album_artist_name_changed, cs)) {
- debug(2, "MH Album Artist name set to: \"%s\"", metadata_store.album_artist_name);
+ debug(3, "MH Album Artist name set to: \"%s\"", metadata_store.album_artist_name);
metadata_packet_item_changed = 1;
}
free(cs);
case 'ascm':
cs = strndup(data, length);
if (string_update(&metadata_store.comment, &metadata_store.comment_changed, cs)) {
- debug(2, "MH Comment set to: \"%s\"", metadata_store.comment);
+ debug(3, "MH Comment set to: \"%s\"", metadata_store.comment);
metadata_packet_item_changed = 1;
}
free(cs);
case 'asgn':
cs = strndup(data, length);
if (string_update(&metadata_store.genre, &metadata_store.genre_changed, cs)) {
- debug(2, "MH Genre set to: \"%s\"", metadata_store.genre);
+ debug(3, "MH Genre set to: \"%s\"", metadata_store.genre);
metadata_packet_item_changed = 1;
}
free(cs);
case 'minm':
cs = strndup(data, length);
if (string_update(&metadata_store.track_name, &metadata_store.track_name_changed, cs)) {
- debug(2, "MH Track Name set to: \"%s\"", metadata_store.track_name);
+ debug(3, "MH Track Name set to: \"%s\"", metadata_store.track_name);
metadata_packet_item_changed = 1;
}
free(cs);
case 'ascp':
cs = strndup(data, length);
if (string_update(&metadata_store.composer, &metadata_store.composer_changed, cs)) {
- debug(2, "MH Composer set to: \"%s\"", metadata_store.composer);
+ debug(3, "MH Composer set to: \"%s\"", metadata_store.composer);
metadata_packet_item_changed = 1;
}
free(cs);
cs = strndup(data, length);
if (string_update(&metadata_store.song_description, &metadata_store.song_description_changed,
cs)) {
- debug(2, "MH Song Description set to: \"%s\"", metadata_store.song_description);
+ debug(3, "MH Song Description set to: \"%s\"", metadata_store.song_description);
}
free(cs);
break;
cs = strndup(data, length);
if (string_update(&metadata_store.song_album_artist,
&metadata_store.song_album_artist_changed, cs)) {
- debug(2, "MH Song Album Artist set to: \"%s\"", metadata_store.song_album_artist);
+ debug(3, "MH Song Album Artist set to: \"%s\"", metadata_store.song_album_artist);
metadata_packet_item_changed = 1;
}
free(cs);
case 'assn':
cs = strndup(data, length);
if (string_update(&metadata_store.sort_name, &metadata_store.sort_name_changed, cs)) {
- debug(2, "MH Sort Name set to: \"%s\"", metadata_store.sort_name);
+ debug(3, "MH Sort Name set to: \"%s\"", metadata_store.sort_name);
metadata_packet_item_changed = 1;
}
free(cs);
case 'assa':
cs = strndup(data, length);
if (string_update(&metadata_store.sort_artist, &metadata_store.sort_artist_changed, cs)) {
- debug(2, "MH Sort Artist set to: \"%s\"", metadata_store.sort_artist);
+ debug(3, "MH Sort Artist set to: \"%s\"", metadata_store.sort_artist);
metadata_packet_item_changed = 1;
}
free(cs);
case 'assu':
cs = strndup(data, length);
if (string_update(&metadata_store.sort_album, &metadata_store.sort_album_changed, cs)) {
- debug(2, "MH Sort Album set to: \"%s\"", metadata_store.sort_album);
+ debug(3, "MH Sort Album set to: \"%s\"", metadata_store.sort_album);
metadata_packet_item_changed = 1;
}
free(cs);
case 'assc':
cs = strndup(data, length);
if (string_update(&metadata_store.sort_composer, &metadata_store.sort_composer_changed, cs)) {
- debug(2, "MH Sort Composer set to: \"%s\"", metadata_store.sort_composer);
+ debug(3, "MH Sort Composer set to: \"%s\"", metadata_store.sort_composer);
metadata_packet_item_changed = 1;
}
free(cs);
case 'pcen':
break;
case 'mdst':
- debug(2, "MH Metadata stream processing start.");
+ debug(3, "MH Metadata stream processing start.");
metadata_packet_item_changed = 0;
break;
case 'mden':
if (metadata_packet_item_changed != 0)
- debug(2, "MH Metadata stream processing end with changes.");
+ debug(3, "MH Metadata stream processing end with changes.");
else
- debug(2, "MH Metadata stream processing end without changes.");
+ debug(3, "MH Metadata stream processing end without changes.");
changed = metadata_packet_item_changed;
break;
case 'PICT':
- debug(2, "MH Picture received, length %u bytes.", length);
+ debug(3, "MH Picture received, length %u bytes.", length);
char uri[2048];
if ((length > 16) &&
cs = strndup(data, length);
if (string_update(&metadata_store.client_ip, &metadata_store.client_ip_changed, cs)) {
changed = 1;
- debug(2, "MH Client IP set to: \"%s\"", metadata_store.client_ip);
+ debug(3, "MH Client IP set to: \"%s\"", metadata_store.client_ip);
}
free(cs);
break;
cs = strndup(data, length);
if (string_update(&metadata_store.client_name, &metadata_store.client_name_changed, cs)) {
changed = 1;
- debug(2, "MH Client Name set to: \"%s\"", metadata_store.client_name);
+ debug(3, "MH Client Name set to: \"%s\"", metadata_store.client_name);
}
free(cs);
break;
if (string_update(&metadata_store.progress_string, &metadata_store.progress_string_changed,
cs)) {
changed = 1;
- debug(2, "MH Progress String set to: \"%s\"", metadata_store.progress_string);
+ debug(3, "MH Progress String set to: \"%s\"", metadata_store.progress_string);
}
free(cs);
break;
if (string_update(&metadata_store.frame_position_string,
&metadata_store.frame_position_string_changed, cs)) {
changed = 1;
- debug(2, "MH Frame Position String set to: \"%s\"", metadata_store.frame_position_string);
+ debug(3, "MH Frame Position String set to: \"%s\"", metadata_store.frame_position_string);
}
free(cs);
break;
if (string_update(&metadata_store.first_frame_position_string,
&metadata_store.first_frame_position_string_changed, cs)) {
changed = 1;
- debug(2, "MH First Frame Position String set to: \"%s\"",
+ debug(3, "MH First Frame Position String set to: \"%s\"",
metadata_store.first_frame_position_string);
}
free(cs);
cs = strndup(data, length);
if (string_update(&metadata_store.stream_type, &metadata_store.stream_type_changed, cs)) {
changed = 1;
- debug(2, "MH Stream Type set to: \"%s\"", metadata_store.stream_type);
+ debug(3, "MH Stream Type set to: \"%s\"", metadata_store.stream_type);
+ }
+ free(cs);
+ break;
+ case 'sdsc':
+ cs = strndup(data, length);
+ if (string_update(&metadata_store.source_format, &metadata_store.source_format_changed, cs)) {
+ changed = 1;
+ debug(3, "MH Source Format set to: \"%s\"", metadata_store.source_format);
+ }
+ free(cs);
+ break;
+ case 'odsc':
+ cs = strndup(data, length);
+ if (string_update(&metadata_store.output_format, &metadata_store.output_format_changed, cs)) {
+ changed = 1;
+ debug(3, "MH Output Format set to: \"%s\"", metadata_store.output_format);
}
free(cs);
break;
cs = strndup(data, length);
if (string_update(&metadata_store.server_ip, &metadata_store.server_ip_changed, cs)) {
changed = 1;
- debug(2, "MH Server IP set to: \"%s\"", metadata_store.server_ip);
+ debug(3, "MH Server IP set to: \"%s\"", metadata_store.server_ip);
}
free(cs);
break;
char *stream_type; // Realtime or Buffered
int stream_type_changed;
+ char *source_format; // Format of incoming audio, e.g. AAC/44100/S16_LE/2
+ int source_format_changed;
+
+ char *output_format; // Format of outgoing audio, e.g. 44100/S32_LE/2 (always PCM)
+ int output_format_changed;
+
char *progress_string; // progress string, emitted by the source from time to time
int progress_string_changed;
/*
* This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2018 -- 2020
+ * Copyright (c) Mike Brady 2018--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
#include "metadata_hub.h"
#include "mpris-service.h"
+static guint ownerID = 0;
+static GBusType mpris_bus_type = G_BUS_TYPE_SYSTEM; // default is the dbus system message bus
+
MediaPlayer2 *mprisPlayerSkeleton;
MediaPlayer2Player *mprisPlayerPlayerSkeleton;
static gboolean on_handle_quit(MediaPlayer2 *skeleton, GDBusMethodInvocation *invocation,
__attribute__((unused)) gpointer user_data) {
- debug(1, "quit requested (MPRIS interface).");
- type_of_exit_cleanup = TOE_dbus; // request an exit cleanup that is compatible with dbus
- exit(EXIT_SUCCESS);
+ debug(1, ">> quit request...");
+ config.quit_requested_from_glib_mainloop = 1;
+ g_main_loop_quit(config.glib_worker_loop);
media_player2_complete_quit(skeleton, invocation);
return TRUE;
}
const char *empty_string_array[] = {NULL};
- // debug(1, "MPRIS well-known interface name \"%s\" acquired on the %s bus.", name,
- // (config.mpris_service_bus_type == DBT_session) ? "session" : "system");
+ debug(2, "MPRIS well-known interface name \"%s\" acquired on the %s bus.", name,
+ (mpris_bus_type == G_BUS_TYPE_SESSION) ? "session" : "system");
mprisPlayerSkeleton = media_player2_skeleton_new();
mprisPlayerPlayerSkeleton = media_player2_player_skeleton_new();
add_metadata_watcher(mpris_metadata_watcher, NULL);
debug(1, "MPRIS service started at \"%s\" on the %s bus.", name,
- (config.mpris_service_bus_type == DBT_session) ? "session" : "system");
-}
-
-static void on_mpris_name_lost_again(__attribute__((unused)) GDBusConnection *connection,
- const gchar *name,
- __attribute__((unused)) gpointer user_data) {
- warn("could not acquire an MPRIS interface named \"%s\" on the %s bus.", name,
- (config.mpris_service_bus_type == DBT_session) ? "session" : "system");
+ (mpris_bus_type == G_BUS_TYPE_SESSION) ? "session" : "system");
}
static void on_mpris_name_lost(__attribute__((unused)) GDBusConnection *connection,
- __attribute__((unused)) const gchar *name,
- __attribute__((unused)) gpointer user_data) {
- // debug(1, "Could not acquire MPRIS interface \"%s\" on the %s bus -- will try adding the process
- // "
- // "number to the end of it.",
- // name,(mpris_bus_type==G_BUS_TYPE_SESSION) ? "session" : "system");
- pid_t pid = getpid();
- char interface_name[256] = "";
- snprintf(interface_name, sizeof(interface_name), "org.mpris.MediaPlayer2.ShairportSync.i%d", pid);
- GBusType mpris_bus_type = G_BUS_TYPE_SYSTEM;
- if (config.mpris_service_bus_type == DBT_session)
- mpris_bus_type = G_BUS_TYPE_SESSION;
- // debug(1, "Looking for an MPRIS interface \"%s\" on the %s bus.",interface_name,
- // (mpris_bus_type==G_BUS_TYPE_SESSION) ? "session" : "system");
- g_bus_own_name(mpris_bus_type, interface_name, G_BUS_NAME_OWNER_FLAGS_NONE, NULL,
- on_mpris_name_acquired, on_mpris_name_lost_again, NULL, NULL);
+ const gchar *name, __attribute__((unused)) gpointer user_data) {
+ warn("could not acquire an MPRIS interface named \"%s\" on the %s bus.", name,
+ (mpris_bus_type == G_BUS_TYPE_SESSION) ? "session" : "system");
+ ownerID = 0;
}
int start_mpris_service() {
mprisPlayerSkeleton = NULL;
mprisPlayerPlayerSkeleton = NULL;
- GBusType mpris_bus_type = G_BUS_TYPE_SYSTEM;
- if (config.mpris_service_bus_type == DBT_session)
+
+ // set up default message bus
+
+ if (config.dbus_default_message_bus == DBT_session)
+ mpris_bus_type = G_BUS_TYPE_SESSION;
+
+ // look for explicit overrides
+ if (config.mpris_service_bus_type == DBT_system)
+ mpris_bus_type = G_BUS_TYPE_SYSTEM;
+ else if (config.mpris_service_bus_type == DBT_session)
mpris_bus_type = G_BUS_TYPE_SESSION;
- // debug(1, "Looking for an MPRIS interface \"org.mpris.MediaPlayer2.ShairportSync\" on the %s
- // bus.",(mpris_bus_type==G_BUS_TYPE_SESSION) ? "session" : "system");
- g_bus_own_name(mpris_bus_type, "org.mpris.MediaPlayer2.ShairportSync",
- G_BUS_NAME_OWNER_FLAGS_NONE, NULL, on_mpris_name_acquired, on_mpris_name_lost,
- NULL, NULL);
+
+ debug(1, "Looking for an MPRIS interface \"org.mpris.MediaPlayer2.ShairportSync\" on the %s bus.",
+ (mpris_bus_type == G_BUS_TYPE_SESSION) ? "session" : "system");
+ ownerID = g_bus_own_name(mpris_bus_type, "org.mpris.MediaPlayer2.ShairportSync",
+ G_BUS_NAME_OWNER_FLAGS_NONE, NULL, on_mpris_name_acquired,
+ on_mpris_name_lost, NULL, NULL);
return 0; // this is just to quieten a compiler warning
}
+
+void stop_mpris_service() {
+ if (ownerID) {
+ debug(2, "stopping MPRIS service -- unowning ownerID %d.", ownerID);
+ g_bus_unown_name(ownerID);
+ }
+}
extern MediaPlayer2Player *mprisPlayerPlayerSkeleton;
int start_mpris_service();
+void stop_mpris_service();
#endif /* #ifndef MPRIS_SERVICE_H */
// this holds the mosquitto client
struct mosquitto *global_mosq = NULL;
-char *topic = NULL;
int connected = 0;
// mosquitto logging
int level, const char *str) {
switch (level) {
case MOSQ_LOG_DEBUG:
- debug(3, str);
+ debug(3, "%s", str);
break;
case MOSQ_LOG_INFO:
- debug(3, str);
+ debug(3, "%s", str);
break;
case MOSQ_LOG_NOTICE:
- debug(3, str);
+ debug(3, "%s", str);
break;
case MOSQ_LOG_WARNING:
- inform(str);
+ inform("%s", str);
break;
case MOSQ_LOG_ERR: {
die("MQTT: Error: %s\n", str);
debug(2, "[MQTT]: received Message on topic %s: %s\n", msg->topic, payload);
// All recognized commands
- char *commands[] = {"command", "beginff", "beginrew", "mutetoggle", "nextitem",
- "previtem", "pause", "playpause", "play", "stop",
- "playresume", "shuffle_songs", "volumedown", "volumeup", NULL};
+ char *commands[] = {"command", "beginff", "beginrew", "mutetoggle",
+ "nextitem", "previtem", "pause", "playpause",
+ "play", "stop", "playresume", "shuffle_songs",
+ "volumedown", "volumeup", "disconnect", NULL};
int it = 0;
while (commands[it] != NULL) {
if ((size_t)msg->payloadlen >= strlen(commands[it]) &&
strncmp(msg->payload, commands[it], strlen(commands[it])) == 0) {
- debug(2, "[MQTT]: DACP Command: %s\n", commands[it]);
- send_simple_dacp_command(commands[it]);
+ debug(2, "[MQTT]: Received Recognized Command: %s\n", commands[it]);
+ if (strcmp(commands[it], "disconnect") == 0) {
+ debug(2, "[MQTT]: Disconnect Command: %s\n", commands[it]);
+ release_play_lock(NULL); // stop any current session and don't replace it
+ } else {
+ debug(2, "[MQTT]: DACP Command: %s\n", commands[it]);
+ send_simple_dacp_command(commands[it]);
+ }
break;
}
it++;
// function to send autodiscovery messages for Home Assistant
void send_autodiscovery_messages(struct mosquitto *mosq) {
- const char *device_name = config.service_name;
+ const char *device_name = config.service_name;
#ifdef CONFIG_AIRPLAY_2
- const char *device_id = config.airplay_device_id ? config.airplay_device_id : config.service_name;
+ const char *device_id = config.airplay_device_id ? config.airplay_device_id : config.service_name;
#else
- const char *device_id = config.service_name;
+ const char *device_id = config.service_name;
#endif
- const char *device_id_no_colons = str_replace(device_id, ":", "");
- const char *sw_version = get_version_string();
- const char *model = "shairport-sync";
- const char *model_friendly = "Shairport Sync";
- const char *manufacturer = "Mike Brady";
- const char *autodiscovery_prefix = (config.mqtt_autodiscovery_prefix != NULL) ?
- config.mqtt_autodiscovery_prefix : "homeassistant";
-
- char topic[512];
- char payload[1280];
- char device_payload[512];
- char id_string[128];
-
- snprintf(device_payload, sizeof(device_payload),
- "\"device\": {"
- "\"identifiers\": [\"%s\"],"
- "\"name\": \"%s\","
- "\"model\": \"%s\","
- "\"sw_version\": \"%s\","
- "\"manufacturer\": \"%s\""
+ const char *device_id_no_colons = str_replace(device_id, ":", "");
+ const char *sw_version = get_version_string();
+ const char *model = "shairport-sync";
+ const char *model_friendly = "Shairport Sync";
+ const char *manufacturer = "Mike Brady";
+ const char *autodiscovery_prefix = (config.mqtt_autodiscovery_prefix != NULL)
+ ? config.mqtt_autodiscovery_prefix
+ : "homeassistant";
+
+ char topic[512];
+ char payload[1280];
+ char device_payload[512];
+ char id_string[128];
+
+ snprintf(device_payload, sizeof(device_payload),
+ "\"device\": {"
+ "\"identifiers\": [\"%s\"],"
+ "\"name\": \"%s\","
+ "\"model\": \"%s\","
+ "\"sw_version\": \"%s\","
+ "\"manufacturer\": \"%s\""
+ "}",
+ device_id, device_name, model_friendly, sw_version, manufacturer);
+
+ // when adding sensors here, be sure to also update sensor_names and icons below!
+ const char *sensors[] = {"artist",
+ "album",
+ "title",
+ "genre",
+ "format",
+ "output_format",
+ "output_frame_rate",
+ "track_id",
+ "client_ip",
+ "client_mac_address",
+ "client_name",
+ "client_model",
+ "client_device_id",
+ "server_ip",
+ "volume",
+ "active",
+ "playing",
+ NULL};
+
+ const char *sensor_names[] = {"Artist",
+ "Album",
+ "Title",
+ "Genre",
+ "Format",
+ "Output Format",
+ "Output Frame Rate",
+ "Track ID",
+ "Client IP",
+ "Client MAC Address",
+ "Client Name",
+ "Client Model",
+ "Client Device ID",
+ "Server IP",
+ "Volume",
+ "Active Session",
+ "Playing"};
+
+ const char *icons[] = {
+ "mdi:account-music", // artist
+ "mdi:album", // album
+ "mdi:music", // title
+ "mdi:music-box-multiple", // genre
+ "mdi:file", // format
+ "mdi:file", // output format
+ "mdi:file-chart", // output frame rate
+ "mdi:identifier", // track ID
+ "mdi:ip", // client IP
+ "mdi:hexadecimal", // client MAC address
+ "mdi:cellphone-text", // client name
+ "mdi:cellphone-text", // client model
+ "mdi:hexadecimal", // client device ID
+ "mdi:ip-network", // server IP
+ "mdi:volume-high", // volume
+ "mdi:play-box-multiple", // active
+ "mdi:play-box-multiple-outline" // playing
+ };
+
+ for (int i = 0; sensors[i] != NULL; i++) {
+ bool is_binary_sensor =
+ (strcmp(sensors[i], "active") == 0 || strcmp(sensors[i], "playing") == 0);
+ bool is_volume_sensor = strcmp(sensors[i], "volume") == 0;
+
+ snprintf(topic, sizeof(topic), "%s/%ssensor/%s_%s/%s/config", autodiscovery_prefix,
+ is_binary_sensor ? "binary_" : "", model, device_id_no_colons, sensors[i]);
+
+ snprintf(id_string, sizeof(id_string), "%s_%s_%s", model, device_name, sensors[i]);
+
+ snprintf(
+ payload, sizeof(payload),
+ "{"
+ "\"name\": \"%s\","
+ "\"state_topic\": \"%s/%s\","
+ "\"icon\": \"%s\","
+ "\"unique_id\": \"%s\","
+ "\"object_id\": \"%s\","
+ "%s%s%s"
"}",
- device_id, device_name, model_friendly, sw_version, manufacturer);
-
- // when adding sensors here, be sure to also update sensor_names and icons below!
- const char *sensors[] = {
- "artist",
- "album",
- "title",
- "genre",
- "format",
- "output_format",
- "output_frame_rate",
- "track_id",
- "client_ip",
- "client_mac_address",
- "client_name",
- "client_model",
- "client_device_id",
- "server_ip",
- "volume",
- "active",
- "playing",
- NULL
- };
-
- const char *sensor_names[] = {
- "Artist",
- "Album",
- "Title",
- "Genre",
- "Format",
- "Output Format",
- "Output Frame Rate",
- "Track ID",
- "Client IP",
- "Client MAC Address",
- "Client Name",
- "Client Model",
- "Client Device ID",
- "Server IP",
- "Volume",
- "Active Session",
- "Playing"
- };
-
- const char *icons[] = {
- "mdi:account-music", // artist
- "mdi:album", // album
- "mdi:music", // title
- "mdi:music-box-multiple", // genre
- "mdi:file", // format
- "mdi:file", // output format
- "mdi:file-chart", // output frame rate
- "mdi:identifier", // track ID
- "mdi:ip", // client IP
- "mdi:hexadecimal", // client MAC address
- "mdi:cellphone-text", // client name
- "mdi:cellphone-text", // client model
- "mdi:hexadecimal", // client device ID
- "mdi:ip-network", // server IP
- "mdi:volume-high", // volume
- "mdi:play-box-multiple", // active
- "mdi:play-box-multiple-outline" // playing
- };
-
- for (int i = 0; sensors[i] != NULL; i++) {
- bool is_binary_sensor = (strcmp(sensors[i], "active") == 0 || strcmp(sensors[i], "playing") == 0);
- bool is_volume_sensor = strcmp(sensors[i], "volume") == 0;
-
- snprintf(topic, sizeof(topic), "%s/%ssensor/%s_%s/%s/config",
- autodiscovery_prefix, is_binary_sensor ? "binary_" : "",
- model, device_id_no_colons, sensors[i]);
-
- snprintf(id_string, sizeof(id_string), "%s_%s_%s", model, device_name, sensors[i]);
-
- snprintf(payload, sizeof(payload),
- "{"
- "\"name\": \"%s\","
- "\"state_topic\": \"%s/%s\","
- "\"icon\": \"%s\","
- "\"unique_id\": \"%s\","
- "\"object_id\": \"%s\","
- "%s%s%s"
- "}",
- sensor_names[i], config.mqtt_topic, sensors[i], icons[i], id_string, id_string,
- is_binary_sensor ? "\"payload_on\": \"1\",\"payload_off\": \"0\"," : "",
- is_volume_sensor ? "\"value_template\": \"{{ ((value | regex_findall_index("
- "find='^(.+?),', index=0, ignorecase=False) | float / 30 + 1) * 100) | round(0) }}\","
- "\"unit_of_measurement\": \"%\"," : "",
- device_payload);
-
- mosquitto_publish(mosq, NULL, topic, strlen(payload), payload, 0, true);
- debug(2, "[MQTT]: published autodiscovery for %s", id_string);
- }
+ sensor_names[i], config.mqtt_topic, sensors[i], icons[i], id_string, id_string,
+ is_binary_sensor ? "\"payload_on\": \"1\",\"payload_off\": \"0\"," : "",
+ is_volume_sensor
+ ? "\"value_template\": \"{{ ((value | regex_findall_index("
+ "find='^(.+?),', index=0, ignorecase=False) | float / 30 + 1) * 100) | round(0) }}\","
+ "\"unit_of_measurement\": \"%\","
+ : "",
+ device_payload);
+
+ mosquitto_publish(mosq, NULL, topic, strlen(payload), payload, 0, true);
+ debug(2, "[MQTT]: published autodiscovery for %s", id_string);
+ }
}
// helper function to publish under a topic and automatically append the main topic
debug(2, "[MQTT]: publishing under %s", fulltopic);
int rc;
- if ((rc = mosquitto_publish(global_mosq, NULL, fulltopic, length, data, 0, 0)) !=
+ if ((rc = mosquitto_publish(global_mosq, NULL, fulltopic, length, data, 0,
+ config.mqtt_publish_retain)) !=
MOSQ_ERR_SUCCESS) {
switch (rc) {
case MOSQ_ERR_NO_CONN:
int keepalive = 60;
mosquitto_lib_init();
if (!(global_mosq = mosquitto_new(config.service_name, true, NULL))) {
- die("[MQTT]: FATAL: Could not create mosquitto object! %d\n", global_mosq);
+ die("[MQTT]: FATAL: Could not create mosquitto object!");
}
if (config.mqtt_cafile != NULL || config.mqtt_capath != NULL || config.mqtt_certfile != NULL ||
/*
* This file is part of the nqptp distribution (https://github.com/mikebrady/nqptp).
- * Copyright (c) 2021--2023 Mike Brady.
+ * Copyright (c) 2021--2025 Mike Brady.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
<property name='Active' type='b' access='read'/>
<property name="DisableStandby" type="b" access="readwrite" />
<property name="DisableStandbyMode" type="s" access="readwrite" />
- <property name="Loudness" type="b" access="readwrite" />
+ <property name="LoudnessEnabled" type="b" access="readwrite" />
<property name="LoudnessThreshold" type="d" access="readwrite" />
- <property name="Convolution" type="b" access="readwrite" />
+ <property name="ConvolutionEnabled" type="b" access="readwrite" />
<property name="ConvolutionGain" type="d" access="readwrite" />
- <property name="ConvolutionImpulseResponseFile" type="s" access="readwrite" />
+ <property name="ConvolutionImpulseResponseFiles" type="s" access="readwrite" />
+ <property name="ConvolutionMaximumLengthInSeconds" type="d" access="readwrite" />
<property name="DriftTolerance" type="d" access="readwrite" />
<property name='Volume' type='d' access='readwrite'/>
<method name="DropSession"/>
<property name="Protocol" type="s" access="read" />
<property name="ServiceName" type="s" access="read" />
<property name="OutputFormat" type="s" access="read" />
- <property name="OutputRate" type="i" access="read" />
+ <property name="SourceFormat" type="s" access="read" />
<property name="FirstFramePosition" type="s" access="read" />
<property name="FramePosition" type="s" access="read" />
<method name="SetFramePositionUpdateInterval">
int (*pair_remove)(uint8_t **out, size_t *out_len, pair_cb cb, void *cb_arg, const uint8_t *in, size_t in_len);
int (*pair_list)(uint8_t **out, size_t *out_len, pair_list_cb cb, void *cb_arg, const uint8_t *in, size_t in_len);
- struct pair_cipher_context *(*pair_cipher_new)(struct pair_definition *type, int channel, const uint8_t *shared_secret, size_t shared_secret_len);
+ struct pair_cipher_context *(*pair_cipher_new)(struct pair_definition *type, int channel, const uint8_t *shared_secret, size_t shared_secret_len, const char *dynamic_salt_suffix);
void (*pair_cipher_free)(struct pair_cipher_context *cctx);
ssize_t (*pair_encrypt)(uint8_t **ciphertext, size_t *ciphertext_len, const uint8_t *plaintext, size_t plaintext_len, struct pair_cipher_context *cctx);
}
struct pair_cipher_context *
-pair_cipher_new(enum pair_type type, int channel, const uint8_t *shared_secret, size_t shared_secret_len)
+pair_cipher_new(enum pair_type type, int channel, const uint8_t *shared_secret, size_t shared_secret_len, const char *dynamic_salt_suffix)
{
if (!pair[type]->pair_cipher_new)
return NULL;
- return pair[type]->pair_cipher_new(pair[type], channel, shared_secret, shared_secret_len);
+ return pair[type]->pair_cipher_new(pair[type], channel, shared_secret, shared_secret_len, dynamic_salt_suffix);
}
void
* create a ciphering context.
*/
struct pair_cipher_context *
-pair_cipher_new(enum pair_type type, int channel, const uint8_t *shared_secret, size_t shared_secret_len);
+pair_cipher_new(enum pair_type type, int channel, const uint8_t *shared_secret, size_t shared_secret_len, const char *dynamic_salt_suffix);
void
pair_cipher_free(struct pair_cipher_context *cctx);
PAIR_CONTROL_READ,
PAIR_EVENTS_WRITE,
PAIR_EVENTS_READ,
+ PAIR_DATA_WRITE,
+ PAIR_DATA_READ,
};
struct pair_keys_map
// Encryption/decryption of event channel
{ 0, "Events-Salt", "Events-Write-Encryption-Key", "" },
{ 0, "Events-Salt", "Events-Read-Encryption-Key", "" },
+
+ // Encryption/decryption of data channel
+ // The salt is dynamic -- the 64-bit `seed` provided during SETUP, treated as an unsigned unmber, is appended:
+ // e.g. DataStream-Salt15014746994705656022.
+ // Sincere thanks to Pierre Stahl, (latex: St\aahl) https://github.com/postlund.
+ // Please see https://pyatv.dev/documentation/protocols/#remote-control, AirPlay 2 / Remote Control / Data Channel / Encryption
+ { 0, "DataStream-Salt", "DataStream-Output-Encryption-Key", "" },
+ { 0, "DataStream-Salt", "DataStream-Input-Encryption-Key", "" },
};
enum pair_method {
else if (error->value[0] == TLVError_MaxPeers)
*errmsg = "Max peers trying to connect to device\n";
else if (error->value[0] == TLVError_MaxTries)
- *errmsg = "Max pairing attemps reached\n";
+ *errmsg = "Max pairing attempts reached\n";
else if (error->value[0] == TLVError_Unavailable)
*errmsg = "Device is unuavailble at this time\n";
else
hkdfExtract(SHA512, salt, salt_len, ikm, ikm_len, prk);
hkdfExpand(SHA512, prk, SHA512_LEN, info, info_len, okm, okm_len);
+
+ add an explicit salt value to allow for a unique salt to be given at initialisation
*/
static int
-hkdf_extract_expand(uint8_t *okm, size_t okm_len, const uint8_t *ikm, size_t ikm_len, enum pair_keys pair_key)
+hkdf_extract_expand_with_salt_and_info(uint8_t *okm, size_t okm_len, const uint8_t *ikm, size_t ikm_len, const char *salt, const char *info)
{
#ifdef CONFIG_OPENSSL
#include <openssl/kdf.h>
goto error;
if (EVP_PKEY_CTX_set_hkdf_md(pctx, EVP_sha512()) <= 0)
goto error;
- if (EVP_PKEY_CTX_set1_hkdf_salt(pctx, (const unsigned char *)pair_keys_map[pair_key].salt, strlen(pair_keys_map[pair_key].salt)) <= 0)
+ if (EVP_PKEY_CTX_set1_hkdf_salt(pctx, salt, strlen(salt)) <= 0)
goto error;
if (EVP_PKEY_CTX_set1_hkdf_key(pctx, ikm, ikm_len) <= 0)
goto error;
- if (EVP_PKEY_CTX_add1_hkdf_info(pctx, (const unsigned char *)pair_keys_map[pair_key].info, strlen(pair_keys_map[pair_key].info)) <= 0)
+ if (EVP_PKEY_CTX_add1_hkdf_info(pctx, info, strlen(info)) <= 0)
goto error;
if (EVP_PKEY_derive(pctx, okm, &okm_len) <= 0)
goto error;
return -1; // Below calculation not valid if output is larger than hash size
if (gcry_md_open(&hmac_handle, GCRY_MD_SHA512, GCRY_MD_FLAG_HMAC) != GPG_ERR_NO_ERROR)
return -1;
- if (gcry_md_setkey(hmac_handle, (const unsigned char *)pair_keys_map[pair_key].salt, strlen(pair_keys_map[pair_key].salt)) != GPG_ERR_NO_ERROR)
+ if (gcry_md_setkey(hmac_handle, salt, strlen(salt)) != GPG_ERR_NO_ERROR)
goto error;
gcry_md_write(hmac_handle, ikm, ikm_len);
memcpy(prk, gcry_md_read(hmac_handle, 0), sizeof(prk));
if (gcry_md_setkey(hmac_handle, prk, sizeof(prk)) != GPG_ERR_NO_ERROR)
goto error;
- gcry_md_write(hmac_handle, (const unsigned char *)pair_keys_map[pair_key].info, strlen(pair_keys_map[pair_key].info));
+ gcry_md_write(hmac_handle, info, strlen(info));
gcry_md_putc(hmac_handle, 1);
memcpy(okm, gcry_md_read(hmac_handle, 0), okm_len);
#endif
}
+/* Executes SHA512 RFC 5869 extract + expand, writing a derived key to okm
+
+ hkdfExtract(SHA512, salt, salt_len, ikm, ikm_len, prk);
+ hkdfExpand(SHA512, prk, SHA512_LEN, info, info_len, okm, okm_len);
+*/
+static int
+hkdf_extract_expand(uint8_t *okm, size_t okm_len, const uint8_t *ikm, size_t ikm_len, enum pair_keys pair_key) {
+ // pass in the salt that is in the pair_key_maps table
+ return hkdf_extract_expand_with_salt_and_info(okm, okm_len, ikm, ikm_len, pair_keys_map[pair_key].salt, pair_keys_map[pair_key].info);
+}
+
static int
encrypt_chacha(uint8_t *cipher, const uint8_t *plain, size_t plain_len, const uint8_t *key, size_t key_len, const void *ad, size_t ad_len, uint8_t *tag, size_t tag_len, const uint8_t nonce[NONCE_LENGTH])
{
}
static struct pair_cipher_context *
-cipher_new(struct pair_definition *type, int channel, const uint8_t *shared_secret, size_t shared_secret_len)
+cipher_new_with_salt_and_info(struct pair_definition *type, const uint8_t *shared_secret, size_t shared_secret_len, const char *write_salt, const char *write_info, const char *read_salt, const char *read_info)
{
struct pair_cipher_context *cctx;
- enum pair_keys write_key;
- enum pair_keys read_key;
int ret;
- // Note that events is opposite, probably because it is a reverse connection
- switch (channel)
- {
- case 0:
- write_key = PAIR_CONTROL_WRITE;
- read_key = PAIR_CONTROL_READ;
- break;
- case 1:
- write_key = PAIR_EVENTS_READ;
- read_key = PAIR_EVENTS_WRITE;
- break;
- case 2:
- write_key = PAIR_CONTROL_READ;
- read_key = PAIR_CONTROL_WRITE;
- break;
- case 3:
- write_key = PAIR_EVENTS_WRITE;
- read_key = PAIR_EVENTS_READ;
- break;
- default:
- return NULL;
- }
-
cctx = calloc(1, sizeof(struct pair_cipher_context));
if (!cctx)
goto error;
cctx->type = type;
- ret = hkdf_extract_expand(cctx->encryption_key, sizeof(cctx->encryption_key), shared_secret, shared_secret_len, write_key);
+ ret = hkdf_extract_expand_with_salt_and_info(cctx->encryption_key, sizeof(cctx->encryption_key), shared_secret, shared_secret_len, write_salt, write_info);
if (ret < 0)
goto error;
- ret = hkdf_extract_expand(cctx->decryption_key, sizeof(cctx->decryption_key), shared_secret, shared_secret_len, read_key);
+ ret = hkdf_extract_expand_with_salt_and_info(cctx->decryption_key, sizeof(cctx->decryption_key), shared_secret, shared_secret_len, read_salt, read_info);
if (ret < 0)
goto error;
return NULL;
}
+static struct pair_cipher_context *
+cipher_new(struct pair_definition *type, int channel, const uint8_t *shared_secret, size_t shared_secret_len, const char *dynamic_salt_suffix)
+{
+
+ enum pair_keys write_key;
+ enum pair_keys read_key;
+
+ // Note that events is opposite, probably because it is a reverse connection
+ switch (channel)
+ {
+ case 0: // control
+ write_key = PAIR_CONTROL_WRITE;
+ read_key = PAIR_CONTROL_READ;
+ break;
+ case 1: // events
+ write_key = PAIR_EVENTS_READ;
+ read_key = PAIR_EVENTS_WRITE;
+ break;
+ case 2: // data
+ write_key = PAIR_DATA_WRITE;
+ read_key = PAIR_DATA_READ;
+ break;
+ case 3: // control
+ write_key = PAIR_CONTROL_READ;
+ read_key = PAIR_CONTROL_WRITE;
+ break;
+ case 4: // events
+ write_key = PAIR_EVENTS_WRITE;
+ read_key = PAIR_EVENTS_READ;
+ break;
+ case 5: // data
+ write_key = PAIR_DATA_READ;
+ read_key = PAIR_DATA_WRITE;
+ break;
+ default:
+ return NULL;
+ }
+ //if ((dynamic_salt_suffix == NULL) || (dynamic_salt_suffix[0] == '\0')) {
+ // return cipher_new_with_salt_and_info(type, shared_secret, shared_secret_len, pair_keys_map[write_key].salt, pair_keys_map[write_key].info, pair_keys_map[read_key].salt, pair_keys_map[read_key].info);
+ //} else {
+ char write_salt[256] = "";
+ char read_salt[256] = "";
+ snprintf(write_salt, sizeof(write_salt), "%s%s", pair_keys_map[write_key].salt, dynamic_salt_suffix);
+ //printf("channel %d, dynamic_salt_suffix: \"%s\", write_salt: \"%s\"\n", channel, dynamic_salt_suffix, write_salt);
+ snprintf(read_salt, sizeof(read_salt), "%s%s", pair_keys_map[read_key].salt, dynamic_salt_suffix);
+ //printf("channel %d, dynamic_salt_suffix: \"%s\", read_salt: \"%s\"\n", channel, dynamic_salt_suffix, read_salt);
+ return cipher_new_with_salt_and_info(type, shared_secret, shared_secret_len, write_salt, pair_keys_map[write_key].info, read_salt, pair_keys_map[read_key].info);
+ //}
+}
+
static ssize_t
encrypt(uint8_t **ciphertext, size_t *ciphertext_len, const uint8_t *plaintext, size_t plaintext_len, struct pair_cipher_context *cctx)
{
* All rights reserved.
*
* Modifications for audio synchronisation, AirPlay 2
- * and related work, copyright (c) Mike Brady 2014 -- 2023
+ * and related work, copyright (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
#include "ptp-utilities.h"
#endif
+#ifdef CONFIG_FFMPEG
+#include <libavutil/version.h>
+#endif
+
#include "loudness.h"
#include "activity_monitor.h"
-// make the first audio packet deliberately early to bias the sync error of
-// the very first packet, making the error more likely to be too early
-// rather than too late. It it's too early,
-// a delay exactly compensating for it can be sent just before the
-// first packet. This should exactly compensate for the error.
-
-int64_t first_frame_early_bias = 8;
+const unsigned int silent_channel_index = 65;
+const unsigned int front_mono_channel_index = 66;
// default buffer size
// needs to be a power of 2 because of the way BUFIDX(seqno) works
// #define BUFFER_FRAMES 512
-#define MAX_PACKET 2048
// DAC buffer occupancy stuff
#define DAC_BUFFER_QUEUE_MINIMUM_LENGTH 2500
// static abuf_t audio_buffer[BUFFER_FRAMES];
#define BUFIDX(seqno) ((seq_t)(seqno) % BUFFER_FRAMES)
-int32_t modulo_32_offset(uint32_t from, uint32_t to) { return to - from; }
-
void do_flush(uint32_t timestamp, rtsp_conn_info *conn);
+#ifdef CONFIG_FFMPEG
+size_t avflush(rtsp_conn_info *conn);
+#endif
+
+int free_audio_buffer_payload(abuf_t *abuf) {
+ int items_freed = 0;
+ if (abuf) {
+ if (abuf->data != NULL) {
+ free(abuf->data);
+ items_freed++;
+ abuf->data = NULL;
+ }
+#ifdef CONFIG_FFMPEG
+ if (abuf->avframe != NULL) {
+ av_frame_free(&abuf->avframe);
+ items_freed++;
+ abuf->avframe = NULL;
+ abuf->ssrc = SSRC_NONE;
+ }
+#endif
+ } else {
+ debug(1, "null buffer pointer!");
+ }
+ return items_freed;
+}
+
void ab_resync(rtsp_conn_info *conn) {
int i;
for (i = 0; i < BUFFER_FRAMES; i++) {
+ free_audio_buffer_payload(&conn->audio_buffer[i]);
conn->audio_buffer[i].ready = 0;
conn->audio_buffer[i].resend_request_number = 0;
conn->audio_buffer[i].resend_time =
conn->audio_buffer[i].sequence_number = 0;
}
conn->ab_synced = 0;
- conn->last_seqno_read = -1;
+ conn->last_seqno_valid = 0;
conn->ab_buffering = 1;
}
-// the sequence numbers will wrap pretty often.
-// this returns true if the second arg is strictly after the first
-static inline int is_after(seq_t a, seq_t b) {
- int16_t d = b - a;
- return d > 0;
-}
-
void reset_input_flow_metrics(rtsp_conn_info *conn) {
conn->play_number_after_flush = 0;
conn->packet_count_since_flush = 0;
conn->initial_reference_timestamp = 0;
}
-void unencrypted_packet_decode(unsigned char *packet, int length, short *dest, int *outsize,
- int size_limit, rtsp_conn_info *conn) {
+void unencrypted_packet_decode(rtsp_conn_info *conn, unsigned char *packet, int length,
+ short *dest) {
if (conn->stream.type == ast_apple_lossless) {
#ifdef CONFIG_APPLE_ALAC
- if (config.use_apple_decoder) {
- if (conn->decoder_in_use != 1 << decoder_apple_alac) {
- debug(2, "Apple ALAC Decoder used on encrypted audio.");
- conn->decoder_in_use = 1 << decoder_apple_alac;
- }
- apple_alac_decode_frame(packet, length, (unsigned char *)dest, outsize);
- *outsize = *outsize * 4; // bring the size to bytes
+ if (config.decoder_in_use == 1 << decoder_apple_alac) {
+ int frames_decoded;
+ apple_alac_decode_frame(packet, length, (unsigned char *)dest, &frames_decoded);
+ } else
+#endif
+#ifdef CONFIG_HAMMERTON
+ if (config.decoder_in_use == 1 << decoder_hammerton) {
+ int buffer_size = conn->frames_per_packet * conn->input_bytes_per_frame;
+ alac_decode_frame(conn->decoder_info, packet, (unsigned char *)dest, &buffer_size);
} else
#endif
{
- if (conn->decoder_in_use != 1 << decoder_hammerton) {
- debug(2, "Hammerton Decoder used on encrypted audio.");
- conn->decoder_in_use = 1 << decoder_hammerton;
- }
- alac_decode_frame(conn->decoder_info, packet, (unsigned char *)dest, outsize);
+ die("No ALAC decoder included!");
}
} else if (conn->stream.type == ast_uncompressed) {
- int length_to_use = length;
- if (length_to_use > size_limit) {
- warn("unencrypted_packet_decode: uncompressed audio packet too long (size: %d bytes) to "
- "process -- truncated",
- length);
- length_to_use = size_limit;
- }
int i;
short *source = (short *)packet;
- for (i = 0; i < (length_to_use / 2); i++) {
+ for (i = 0; i < length / 2; i++) {
+ // assuming each input sample is 16 bits.
*dest = ntohs(*source);
dest++;
source++;
}
- *outsize = length_to_use;
}
}
-#ifdef CONFIG_OPENSSL
-// Thanks to
-// https://stackoverflow.com/questions/27558625/how-do-i-use-aes-cbc-encrypt-128-openssl-properly-in-ubuntu
-// for inspiration. Changed to a 128-bit key and no padding.
-
-int openssl_aes_decrypt_cbc(unsigned char *ciphertext, int ciphertext_len, unsigned char *key,
- unsigned char *iv, unsigned char *plaintext) {
- EVP_CIPHER_CTX *ctx;
- int len;
- int plaintext_len = 0;
- ctx = EVP_CIPHER_CTX_new();
- if (ctx != NULL) {
- if (EVP_DecryptInit_ex(ctx, EVP_aes_128_cbc(), NULL, key, iv) == 1) {
- EVP_CIPHER_CTX_set_padding(ctx, 0); // no padding -- always returns 1
- // no need to allow space for padding in the output, as padding is disabled
- if (EVP_DecryptUpdate(ctx, plaintext, &len, ciphertext, ciphertext_len) == 1) {
- plaintext_len = len;
- if (EVP_DecryptFinal_ex(ctx, plaintext + len, &len) == 1) {
- plaintext_len += len;
- } else {
- debug(1, "EVP_DecryptFinal_ex error \"%s\".", ERR_error_string(ERR_get_error(), NULL));
- }
- } else {
- debug(1, "EVP_DecryptUpdate error \"%s\".", ERR_error_string(ERR_get_error(), NULL));
- }
- } else {
- debug(1, "EVP_DecryptInit_ex error \"%s\".", ERR_error_string(ERR_get_error(), NULL));
- }
- EVP_CIPHER_CTX_free(ctx);
- } else {
- debug(1, "EVP_CIPHER_CTX_new error \"%s\".", ERR_error_string(ERR_get_error(), NULL));
- }
- return plaintext_len;
-}
-#endif
-int audio_packet_decode(short *dest, int *destlen, uint8_t *buf, int len, rtsp_conn_info *conn) {
- // parameters: where the decoded stuff goes, its length in samples,
- // the incoming packet, the length of the incoming packet in bytes
- // destlen should contain the allowed max number of samples on entry
-
- if (len > MAX_PACKET) {
- warn("Incoming audio packet size is too large at %d; it should not exceed %d.", len,
- MAX_PACKET);
- return -1;
- }
- unsigned char packet[MAX_PACKET];
- // unsigned char packetp[MAX_PACKET];
- assert(len <= MAX_PACKET);
- int reply = 0; // everything okay
- int outsize = conn->input_bytes_per_frame * (*destlen); // the size the output should be, in bytes
- int maximum_possible_outsize = outsize;
-
- if (conn->stream.encrypted) {
- unsigned char iv[16];
- int aeslen = len & ~0xf;
- memcpy(iv, conn->stream.aesiv, sizeof(iv));
-#ifdef CONFIG_MBEDTLS
- mbedtls_aes_crypt_cbc(&conn->dctx, MBEDTLS_AES_DECRYPT, aeslen, iv, buf, packet);
-#endif
-#ifdef CONFIG_POLARSSL
- aes_crypt_cbc(&conn->dctx, AES_DECRYPT, aeslen, iv, buf, packet);
-#endif
-#ifdef CONFIG_OPENSSL
- openssl_aes_decrypt_cbc(buf, aeslen, conn->stream.aeskey, iv, packet);
-#endif
- memcpy(packet + aeslen, buf + aeslen, len - aeslen);
- unencrypted_packet_decode(packet, len, dest, &outsize, maximum_possible_outsize, conn);
- } else {
- // not encrypted
- unencrypted_packet_decode(buf, len, dest, &outsize, maximum_possible_outsize, conn);
- }
-
- if (outsize > maximum_possible_outsize) {
- debug(2,
- "Output from alac_decode larger (%d bytes, not frames) than expected (%d bytes) -- "
- "truncated, but buffer overflow possible! Encrypted = %d.",
- outsize, maximum_possible_outsize, conn->stream.encrypted);
- reply = -1; // output packet is the wrong size
- }
-
- if (conn->input_bytes_per_frame != 0)
- *destlen = outsize / conn->input_bytes_per_frame;
- else
- die("Unexpectedly, conn->input_bytes_per_frame is zero.");
- if ((outsize % conn->input_bytes_per_frame) != 0)
- debug(1,
- "Number of audio frames (%d) does not correspond exactly to the number of bytes (%d) "
- "and the audio frame size (%d).",
- *destlen, outsize, conn->input_bytes_per_frame);
- return reply;
-}
-
+#ifdef CONFIG_HAMMERTON
static int init_alac_decoder(int32_t fmtp[12], rtsp_conn_info *conn) {
// clang-format off
// clang-format on
- alac_file *alac;
+ alac_file *alac;
+
+ alac = alac_create(conn->input_bit_depth,
+ conn->input_num_channels); // no pthread cancellation point in here
+ if (!alac)
+ return 1;
+ conn->decoder_info = alac;
+
+ alac->setinfo_max_samples_per_frame = conn->frames_per_packet;
+ alac->setinfo_7a = fmtp[2];
+ alac->setinfo_sample_size = conn->input_bit_depth;
+ alac->setinfo_rice_historymult = fmtp[4];
+ alac->setinfo_rice_initialhistory = fmtp[5];
+ alac->setinfo_rice_kmodifier = fmtp[6];
+ alac->setinfo_7f = fmtp[7];
+ alac->setinfo_80 = fmtp[8];
+ alac->setinfo_82 = fmtp[9];
+ alac->setinfo_86 = fmtp[10];
+ alac->setinfo_8a_rate = fmtp[11];
+ alac_allocate_buffers(alac); // no pthread cancellation point in here
+ return 0;
+}
+#endif
+
+static void init_buffer(rtsp_conn_info *conn) {
+ int i;
+ for (i = 0; i < BUFFER_FRAMES; i++) {
+ conn->audio_buffer[i].data = NULL;
+#ifdef CONFIG_FFMPEG
+ conn->audio_buffer[i].avframe = NULL;
+ conn->audio_buffer[i].ssrc = SSRC_NONE;
+#endif
+ }
+}
+
+static void free_audio_buffers(rtsp_conn_info *conn) {
+ int i;
+ for (i = 0; i < BUFFER_FRAMES; i++) {
+ free_audio_buffer_payload(&conn->audio_buffer[i]);
+ }
+}
+
+int first_possibly_missing_frame = -1;
+
+void reset_buffer(rtsp_conn_info *conn) {
+ pthread_cleanup_debug_mutex_lock(&conn->ab_mutex, 30000, 0);
+ ab_resync(conn);
+ pthread_cleanup_pop(1);
+#if CONFIG_FFMPEG
+ avflush(conn);
+#endif
+ if (config.output->flush) {
+ config.output->flush(); // no cancellation points
+ // debug(1, "reset_buffer: flush output device.");
+ }
+}
+
+// returns the total number of blocks and the number occupied, but not their size,
+// because the size is determined by the block size sent
+
+size_t get_audio_buffer_occupancy(rtsp_conn_info *conn) {
+ size_t response = 0;
+ pthread_cleanup_debug_mutex_lock(&conn->ab_mutex, 30000, 0);
+ if (conn->ab_synced) {
+ int16_t occ =
+ conn->ab_write - conn->ab_read; // will be zero or positive if read and write are within
+ // 2^15 of each other and write is at or after read
+ response = occ;
+ }
+ pthread_cleanup_pop(1);
+ return response;
+}
+
+const char *get_category_string(airplay_stream_c cat) {
+ char *category;
+ switch (cat) {
+ case unspecified_stream_category:
+ category = "unspecified stream";
+ break;
+ case ptp_stream:
+ category = "PTP stream";
+ break;
+ case ntp_stream:
+ category = "NTP stream";
+ break;
+ case remote_control_stream:
+ category = "Remote Control stream";
+ break;
+ case classic_airplay_stream:
+ category = "Classic AirPlay stream";
+ break;
+ default:
+ category = "Unexpected stream code";
+ break;
+ }
+ return category;
+}
+
+#ifdef CONFIG_FFMPEG
+
+static void avcodec_alloc_context3_cleanup_handler(void *arg) {
+ debug(3, "avcodec_alloc_context3_cleanup_handler");
+ AVCodecContext *codec_context = arg;
+ av_free(codec_context);
+}
+
+static void avcodec_open2_cleanup_handler(__attribute__((unused)) void *arg) {
+ debug(3, "avcodec_open2_cleanup_handler");
+ // AVCodecContext *codec_context = arg;
+ // avcodec_close(codec_context);
+}
+
+static void swr_alloc_cleanup_handler(void *arg) {
+ debug(3, "swr_alloc_cleanup_handler");
+ SwrContext **swr = arg;
+ swr_free(swr);
+}
+
+static void av_packet_alloc_cleanup_handler(void *arg) {
+ debug(3, "av_packet_alloc_cleanup_handler");
+ AVPacket **pkt = arg;
+ av_packet_free(pkt);
+}
+
+/*
+static void av_frame_alloc_cleanup_handler(void *arg) {
+ debug(3, "av_frame_alloc_cleanup_handler");
+ AVFrame **frame = arg;
+ av_frame_free(frame);
+}
+*/
+
+void clear_decoding_chain(rtsp_conn_info *conn) {
+ if (conn->incoming_ssrc != 0) {
+ // debug_mutex_lock(&conn->ab_mutex, 30000, 0);
+ // ab_resync(conn);
+ // debug_mutex_unlock(&conn->ab_mutex, 0);
+ pthread_cleanup_push(avcodec_alloc_context3_cleanup_handler, conn->codec_context);
+ pthread_cleanup_push(malloc_cleanup, &conn->codec_context->extradata);
+ pthread_cleanup_push(avcodec_open2_cleanup_handler, conn->codec_context);
+ pthread_cleanup_pop(1); // avcodec_open2_cleanup_handler
+ pthread_cleanup_pop(1); // deallocate the malloc
+ pthread_cleanup_pop(1); // avcodec_alloc_context3_cleanup_handler
+ conn->incoming_ssrc = SSRC_NONE;
+ }
+}
+
+void clear_software_resampler(rtsp_conn_info *conn) {
+ if (conn->swr != NULL) {
+ debug(2, "clear_software_resampler");
+ pthread_cleanup_push(swr_alloc_cleanup_handler, &conn->swr);
+ pthread_cleanup_pop(1); // deallocate the swr
+ conn->swr = NULL;
+ conn->resampler_ssrc = SSRC_NONE;
+ }
+}
+
+int ssrc_is_recognised(ssrc_t ssrc) {
+ int response = 0;
+ switch (ssrc) {
+ case ALAC_44100_S16_2:
+ case ALAC_48000_S24_2:
+ case AAC_44100_F24_2:
+ case AAC_48000_F24_2:
+ case AAC_48000_F24_5P1:
+ case AAC_48000_F24_7P1:
+ response = 1;
+ break;
+ default:
+ break;
+ }
+ return response;
+}
+
+int ssrc_is_aac(ssrc_t ssrc) {
+ int response = 0;
+ switch (ssrc) {
+ case AAC_44100_F24_2:
+ case AAC_48000_F24_2:
+ case AAC_48000_F24_5P1:
+ case AAC_48000_F24_7P1:
+ response = 1;
+ break;
+ default:
+ break;
+ }
+ return response;
+}
+
+char ssrc_name[1024];
+const char *get_ssrc_name(ssrc_t ssrc) {
+ const char *response = NULL;
+ switch (ssrc) {
+ case ALAC_44100_S16_2:
+ response = "ALAC/44100/S16_LE/2";
+ break;
+ case ALAC_48000_S24_2:
+ response = "ALAC/48000/S24_LE/2";
+ break;
+ case AAC_44100_F24_2:
+ response = "AAC/44100/F24/2";
+ break;
+ case AAC_48000_F24_2:
+ response = "AAC/48000/F24/2";
+ break;
+ case AAC_48000_F24_5P1:
+ response = "AAC/48000/F24/5.1";
+ break;
+ case AAC_48000_F24_7P1:
+ response = "AAC/48000/F24/7.1";
+ break;
+ case SSRC_NONE:
+ response = "None (0)";
+ break;
+ default: {
+ snprintf(ssrc_name, sizeof(ssrc_name), "<unknown ssrc> (0x%" PRIx32 ")", ssrc);
+ response = ssrc_name;
+ } break;
+ }
+ return response;
+}
+
+uint32_t get_ssrc_rate(ssrc_t ssrc) {
+ uint32_t response = 0;
+ switch (ssrc) {
+ case ALAC_44100_S16_2:
+ case AAC_44100_F24_2:
+ response = 44100;
+ break;
+ case ALAC_48000_S24_2:
+ case AAC_48000_F24_2:
+ case AAC_48000_F24_5P1:
+ case AAC_48000_F24_7P1:
+ response = 48000;
+ break;
+ default:
+ break;
+ }
+ return response;
+}
+
+size_t get_ssrc_block_length(ssrc_t ssrc) {
+ size_t response = 0;
+ switch (ssrc) {
+ case ALAC_44100_S16_2:
+ case ALAC_48000_S24_2:
+ response = 352;
+ break;
+ case AAC_44100_F24_2:
+ case AAC_48000_F24_2:
+ case AAC_48000_F24_5P1:
+ case AAC_48000_F24_7P1:
+ response = 1024;
+ break;
+ default:
+ break;
+ }
+ return response;
+}
+
+int setup_software_resampler(rtsp_conn_info *conn, ssrc_t ssrc) {
+ int response = 0;
+
+ unsigned int channels;
+
+ // the output from the software resampler will be the input to the rest of
+ // the player chain, so we need to set those parameters according to the SSRC:
+
+ // default values...
+ conn->input_bit_depth = 16;
+ conn->input_effective_bit_depth = 16;
+ conn->input_bytes_per_frame = 4;
+ conn->frames_per_packet = 352;
+
+ // most common values first, changed in the switch statement
+ conn->input_rate = 48000;
+ channels = 2;
+ conn->frames_per_packet = 1024;
+
+ sps_format_t suggested_output_format = SPS_FORMAT_S32; // this may be ignored
+
+ switch (ssrc) {
+ case ALAC_44100_S16_2:
+ conn->input_rate = 44100;
+ conn->frames_per_packet = 352;
+ suggested_output_format = SPS_FORMAT_S16;
+ break;
+ case ALAC_48000_S24_2:
+ conn->frames_per_packet = 352;
+ suggested_output_format = SPS_FORMAT_S24;
+ break;
+ case AAC_44100_F24_2:
+ conn->input_rate = 44100;
+ break;
+ case AAC_48000_F24_2:
+ break;
+ case AAC_48000_F24_5P1:
+ channels = 6;
+ break;
+ case AAC_48000_F24_7P1:
+ channels = 8;
+ break;
+ default:
+ debug(1, "Can't set rate for %s.", get_ssrc_name(ssrc));
+ break;
+ }
+
+// Now we ask the backend for its best format, giving it the channels, rate and format
+
+// default format is S32_LE/48000/2 for AP2, S16_LE/44100/2 otherwise
+#ifdef CONFIG_AIRPLAY_2
+ uint32_t output_configuration = CHANNELS_TO_ENCODED_FORMAT(2) | RATE_TO_ENCODED_FORMAT(48000) |
+ FORMAT_TO_ENCODED_FORMAT(SPS_FORMAT_S32_LE);
+#else
+ uint32_t output_configuration = CHANNELS_TO_ENCODED_FORMAT(2) | RATE_TO_ENCODED_FORMAT(44100) |
+ FORMAT_TO_ENCODED_FORMAT(SPS_FORMAT_S16_LE);
+#endif
+
+ int output_configuration_changed = 0;
+
+ if (config.output->get_configuration) {
+ output_configuration =
+ config.output->get_configuration(channels, conn->input_rate, suggested_output_format);
+ }
+
+ // if you can set up a configuration...
+ if (output_configuration != 0) {
+ if (config.current_output_configuration != output_configuration) {
+ output_configuration_changed = 1;
+ debug(2, "Connection %d: outgoing audio switching to: %s.", conn->connection_number,
+ short_format_description(output_configuration));
+ }
+ config.current_output_configuration = output_configuration;
+ char *output_device_channel_map = NULL;
+ if (config.output->configure) {
+ config.output->configure(output_configuration, &output_device_channel_map);
+ }
+
+ // create a software resampler
+ if (conn->swr != NULL) {
+ debug(3, "software resampler already set up");
+ if (swr_is_initialized(conn->swr)) {
+ debug(3, "software resampler already initialised -- close it...");
+ swr_close(conn->swr);
+ }
+ debug(3, "software resampler free it...");
+ swr_free(&conn->swr);
+ if (conn->swr == NULL) {
+ debug(3, "software resampler released");
+ }
+ }
+
+ // input channels to the player
+ conn->input_num_channels = CHANNELS_FROM_ENCODED_FORMAT(output_configuration);
+
+ SwrContext *swr = swr_alloc();
+ conn->swr = swr;
+ if (swr == NULL) {
+ die("can not allocate an swr context");
+ }
+
+ // push a deallocator -- av_packet_free(pkt);
+ pthread_cleanup_push(swr_alloc_cleanup_handler, &conn->swr);
+
+ enum AVSampleFormat input_format = AV_SAMPLE_FMT_FLTP; // default
+ int64_t input_layout = AV_CH_LAYOUT_STEREO; // default
+ int64_t output_layout = AV_CH_LAYOUT_STEREO; // default
+
+ switch (ssrc) {
+ case ALAC_44100_S16_2:
+ case ALAC_48000_S24_2: {
+ // seems as if the codec_context is correctly set up for ALAC but not for AAC-LC
+ input_format = conn->codec_context->sample_fmt;
+ } break;
+ case AAC_44100_F24_2:
+ case AAC_48000_F24_2: {
+ // defaults are fine...
+ } break;
+ case AAC_48000_F24_5P1: {
+ input_layout = config.six_channel_layout;
+ output_layout = config.six_channel_layout; // assume no mixdown
+ } break;
+ case AAC_48000_F24_7P1: {
+ input_layout = config.eight_channel_layout;
+ output_layout = config.eight_channel_layout; // assume no mixdown
+ } break;
+ default:
+ debug(1, "unexpected SSRC: 0x%0x", ssrc);
+ break;
+ }
+
+ av_opt_set_sample_fmt(swr, "in_sample_fmt", input_format, 0);
+
+ // remember that if mixdown is enabled,
+ // set the resampler's channel layout either automatically or use the
+ // setting that has been given
+
+#if LIBAVUTIL_VERSION_MAJOR >= 57
+ {
+ AVChannelLayout input_channel_layout;
+ av_channel_layout_from_mask(&input_channel_layout, input_layout);
+ av_opt_set_chlayout(swr, "in_chlayout", &input_channel_layout, 0);
+ av_channel_layout_uninit(&input_channel_layout);
+
+ AVChannelLayout output_channel_layout;
+ if (config.mixdown_enable != 0) {
+ if (config.mixdown_channel_layout == 0) {
+ av_channel_layout_default(&output_channel_layout,
+ CHANNELS_FROM_ENCODED_FORMAT(output_configuration));
+ } else {
+ av_channel_layout_from_mask(&output_channel_layout, config.mixdown_channel_layout);
+ }
+ } else {
+ av_channel_layout_from_mask(&output_channel_layout, output_layout);
+ }
+ av_opt_set_chlayout(swr, "out_chlayout", &output_channel_layout, 0);
+ av_channel_layout_uninit(&output_channel_layout);
+ }
+#else
+ av_opt_set_int(swr, "in_channel_layout", input_layout, 0);
+ if (config.mixdown_enable != 0) {
+ if (config.mixdown_channel_layout == 0) {
+ output_layout =
+ av_get_default_channel_layout(CHANNELS_FROM_ENCODED_FORMAT(output_configuration));
+ } else {
+ output_layout = config.mixdown_channel_layout;
+ }
+ }
+ av_opt_set_int(swr, "out_channel_layout", output_layout, 0); // assume no mixdown
+#endif
+
+ av_opt_set_int(swr, "in_sample_rate", conn->input_rate, 0);
+ // now set the resampler's output rate to match the output device's rate
+ av_opt_set_int(swr, "out_sample_rate", RATE_FROM_ENCODED_FORMAT(output_configuration), 0);
+
+ // Ask for S16 output for AAC/S16 input and for S32 output from resampler for F24 and S24.
+ // This is to avoid FFmpeg unnecessarily transcoding S16 to S32.
+ // Dither will be added by Shairport Sync itself later, if needed.
+
+ if (ssrc == ALAC_44100_S16_2) {
+ av_opt_set_sample_fmt(swr, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0);
+ conn->input_bytes_per_frame =
+ 2 * CHANNELS_FROM_ENCODED_FORMAT(
+ output_configuration); // the output from the decoder will be input to the player
+ conn->input_bit_depth = 16;
+ conn->input_effective_bit_depth = 16;
+ } else {
+ av_opt_set_sample_fmt(swr, "out_sample_fmt", AV_SAMPLE_FMT_S32, 0);
+ conn->input_bytes_per_frame =
+ 4 * CHANNELS_FROM_ENCODED_FORMAT(
+ output_configuration); // the output from the decoder will be input to the player
+ conn->input_bit_depth = 32;
+ // this is important when it comes to deciding on dither
+ // AFAIK 24-bit ALAC comes out in 32-bit format but is actually 24 bit
+ // so don't dither if it is truncated from 32 to 24 bit
+ if (ssrc == ALAC_48000_S24_2)
+ conn->input_effective_bit_depth = 24;
+ else
+ conn->input_effective_bit_depth = 32;
+ }
+
+ // now, having set up the resampler, we can initialise it
+
+ // disabling this, as the soxr-based resampler seems not to give exactly the right number of
+ // frames going from 44100 to 48000 and requires stuffing to compensate.
+
+ // also, the soxr resampling engine isn't included in the Docker image.
+ // #ifdef CONFIG_SOXR
+ // av_opt_set(swr, "resampler", "soxr", 0);
+ // #endif
+ int sres = swr_init(swr);
+ if (sres != 0)
+ debug(1, "swr_init returned %d with SSRC of 0x%0x and LIBAVUTIL_VERSION_MAJOR of %u.", sres,
+ ssrc, LIBAVUTIL_VERSION_MAJOR);
+
+ typedef struct {
+ char *name;
+ int allocated;
+ } channel_info_t;
+
+ char resampler_channel_list[1024] = "";
+ unsigned int c;
+ channel_info_t resampler_channels[64]; // can't be more than 64. This will list the channel
+ // names in the order they appear in the output from
+ // the software resampler.
+ for (c = 0; c < sizeof(resampler_channels) / sizeof(channel_info_t); c++) {
+ resampler_channels[c].name = NULL;
+ resampler_channels[c].allocated = 0;
+ }
+
+ // get information about the output from the resampler
+ int64_t resampler_output_format = 0;
+ int resampler_channels_found = 0;
+
+#if LIBAVUTIL_VERSION_MAJOR >= 57
+
+ AVChannelLayout output_channel_layout = {0};
+ av_opt_get_chlayout(swr, "out_chlayout", 0, &output_channel_layout);
+ conn->resampler_output_channels = output_channel_layout.nb_channels;
+ for (c = 0; c < 64; c++) {
+ enum AVChannel channel = av_channel_layout_channel_from_index(&output_channel_layout, c);
+ if (channel != AV_CHAN_NONE) {
+ char buffer[32];
+ if (av_channel_name(buffer, 32, channel) > 0) {
+ if (resampler_channels_found == 0) {
+ strcat(resampler_channel_list, "\"");
+ } else {
+ strcat(resampler_channel_list, "\", \"");
+ }
+ strcat(resampler_channel_list, buffer);
+ resampler_channels[resampler_channels_found].name = strdup(buffer);
+ resampler_channels_found++;
+ }
+ }
+ }
+ av_channel_layout_uninit(&output_channel_layout);
+
+#else
+
+ int64_t resampler_output_channel_layout = 0;
+ {
+ int res = av_opt_get_int(swr, "out_channel_layout", 0, &resampler_output_channel_layout);
+ if (res == 0) {
+ conn->resampler_output_channels =
+ (int64_t)av_get_channel_layout_nb_channels((uint64_t)resampler_output_channel_layout);
+ } else {
+ debug(1, "Error %d getting resampler output channel layout.", res);
+ }
+ }
+ int64_t mask = 1;
+ for (c = 0; c < 64; c++) {
+ if ((resampler_output_channel_layout & mask) != 0) {
+ if (resampler_channels_found == 0) {
+ strcat(resampler_channel_list, "\"");
+ } else {
+ strcat(resampler_channel_list, "\", \"");
+ }
+ strcat(resampler_channel_list, av_get_channel_name(1 << c));
+ resampler_channels[resampler_channels_found].name = strdup(av_get_channel_name(1 << c));
+ resampler_channels_found++;
+ }
+ mask = mask << 1;
+ }
+
+#endif
+
+ if (resampler_channels_found != 0) {
+ strcat(resampler_channel_list, "\"");
+ }
+
+ if (strlen(resampler_channel_list) == 0) {
+ debug(3, "resampler output channel list is empty.");
+ } else {
+ debug(3, "resampler output channel list: %s.", resampler_channel_list);
+ }
+
+ if (output_device_channel_map != NULL) {
+ debug(3, "output device's channel map is: \"%s\".", output_device_channel_map);
+ // free(output_device_channel_map);
+ // output_device_channel_map = NULL;
+ }
+ int output_channel_map_faulty = 0;
+ if (resampler_channels_found != 0) {
+ // now we have the names of the channels produced by the resampler in the order they
+ // appear in the output from the resampler. We need to map them to the channel ordering
+ // of the output device.
+
+ // create an output channel list. It will be 64 channels long.
+ // It may not have names for all channels.
+ // In fact, it will have no names at all if mapping is disabled or set to auto
+ // with no device channel map. That will be okay, as unallocated resampler
+ // channels will be assigned to unused output channels at the end anyway
+
+ channel_info_t
+ output_channels[64]; // can't be more than 64. This will list the output channel names
+ // in the order they appear in the device channel map.
+ for (c = 0; c < sizeof(output_channels) / sizeof(channel_info_t); c++) {
+ output_channels[c].name = NULL;
+ output_channels[c].allocated = 0;
+ }
+
+ // if channel mapping is enabled
+ if (config.output_channel_mapping_enable != 0) {
+ // if a channel map is given
+ if (config.output_channel_map_size != 0) {
+ for (c = 0; c < config.output_channel_map_size; c++) {
+ output_channels[c].name = strdup(config.output_channel_map[c]);
+ }
+ } else if (output_device_channel_map != NULL) { // if there is a device channel map...
+ char *device_channels = strdup(output_device_channel_map);
+ char delim[] = " ";
+ char *ptr = strtok(device_channels, delim);
+ c = 0;
+ while (ptr != NULL) {
+ output_channels[c].name = strdup(ptr);
+ if (strcasecmp(ptr, "UNKNOWN") == 0)
+ output_channel_map_faulty = 1;
+ ptr = strtok(NULL, delim);
+ c++;
+ }
+ free(device_channels);
+ }
+ }
+
+ if (output_channel_map_faulty != 0) {
+ once(inform("The output device's %u-channel map is incomplete or faulty: \"%s\".",
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration),
+ output_device_channel_map));
+ }
+
+ // at this point, we should have two arrays
+ // the first is all the resampler channels
+ // the second is device channel map channels, which may be empty or incomplete
+
+ for (c = 0; c < 64; c++)
+ if (resampler_channels[c].name != NULL)
+ debug(3, "audio channel %u is \"%s\".", c, resampler_channels[c].name);
+ for (c = 0; c < 64; c++)
+ if (output_channels[c].name != NULL)
+ debug(3, "output device channel %u is \"%s\".", c, output_channels[c].name);
+
+ conn->output_channel_map_size =
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration);
+
+ // construct a map to match named resampler channels to named output channels
+
+ unsigned int cmi;
+ for (cmi = 0; cmi < conn->output_channel_map_size; cmi++) {
+ // debug(1,"checking output channel %u, (\"%s\").", cmi, output_channels[cmi].name);
+ conn->output_channel_to_resampler_channel_map[cmi] =
+ silent_channel_index; // by default the channel is silent
+ if ((output_channels[cmi].name != NULL) && (strcmp(output_channels[cmi].name, "--") == 0)) {
+ conn->output_channel_to_resampler_channel_map[cmi] = silent_channel_index;
+ output_channels[cmi].allocated = 1;
+ debug(1, "output device channel %u (\"--\") will be silent.", cmi);
+ } else {
+ int resampler_channel_index;
+ int found = 0;
+ for (resampler_channel_index = 0;
+ ((resampler_channel_index < resampler_channels_found) && (found == 0));
+ resampler_channel_index++) {
+ if ((output_channels[cmi].name != NULL) &&
+ (resampler_channels[resampler_channel_index].name != NULL) &&
+ (strcmp(output_channels[cmi].name,
+ resampler_channels[resampler_channel_index].name) == 0)) {
+ conn->output_channel_to_resampler_channel_map[cmi] = resampler_channel_index;
+ output_channels[cmi].allocated = 1;
+ resampler_channels[resampler_channel_index].allocated = 1;
+ found = 1;
+ if ((resampler_channels_found > 2) && (output_configuration_changed != 0)) {
+ if (output_channel_map_faulty != 0)
+ debug(3, "%s -> %s/%u.", resampler_channels[resampler_channel_index].name,
+ output_channels[cmi].name, cmi);
+ else
+ debug(3, "%s -> %s/%u.", resampler_channels[resampler_channel_index].name,
+ output_channels[cmi].name, cmi);
+ }
+ }
+ }
+ }
+ }
+
+ // now there may be unmapped resampler channels and unallocated output channels
+ // allocate them on a first-come-first-served basis
+
+ for (cmi = 0; cmi < conn->output_channel_map_size; cmi++) {
+ if (output_channels[cmi].allocated == 0)
+ debug(3, "output device channel %u (\"%s\") is unallocated.", cmi,
+ output_channels[cmi].name);
+ }
+ for (c = 0; c < 64; c++) {
+ if ((resampler_channels[c].name != NULL) && (resampler_channels[c].allocated == 0))
+ debug(3, "audio channel %u (\"%s\") is unmapped.", c, resampler_channels[c].name);
+ }
+
+ c = 0; // for indexing through the unmapped resampler channels
+ for (cmi = 0; (cmi < conn->output_channel_map_size) && (c < 64); cmi++) {
+ if (output_channels[cmi].allocated == 0) {
+ do {
+ if ((resampler_channels[c].name != NULL) && (resampler_channels[c].allocated == 0)) {
+ output_channels[cmi].allocated = 1;
+ resampler_channels[c].allocated = 1;
+ conn->output_channel_to_resampler_channel_map[cmi] = c;
+ if (output_channel_map_faulty != 0)
+ debug(3, "%s -> %s/%u.", resampler_channels[c].name, output_channels[cmi].name,
+ cmi);
+ else
+ debug(3, "%s -> %s/%u.", resampler_channels[c].name, output_channels[cmi].name,
+ cmi);
+ } else {
+ c++;
+ }
+ } while ((output_channels[cmi].allocated == 0) && (c < 64));
+ }
+ }
+
+ if (output_configuration_changed != 0) {
+ char channel_mapping_list[256] = "";
+ for (c = 0; c < 8; c++) {
+ if ((output_channels[c].allocated != 0) &&
+ (conn->output_channel_to_resampler_channel_map[c] != silent_channel_index)) {
+ char channel_mapping[32] = "";
+ if (output_channels[c].name != NULL)
+ snprintf(channel_mapping, sizeof(channel_mapping) - 1, " %u (\"%s\") <- %s |", c,
+ output_channels[c].name,
+ resampler_channels[conn->output_channel_to_resampler_channel_map[c]].name);
+ else
+ snprintf(channel_mapping, sizeof(channel_mapping) - 1, " %u <- %s |", c,
+ resampler_channels[conn->output_channel_to_resampler_channel_map[c]].name);
+ strncat(channel_mapping_list, channel_mapping,
+ sizeof(channel_mapping_list) - 1 - strlen(channel_mapping_list));
+ }
+ }
+ debug(1, "Channel Mapping: |%s", channel_mapping_list);
+ }
+
+ for (c = 0; c < 64; c++) {
+ if (output_channels[c].name != NULL)
+ free(output_channels[c].name);
+ }
+ for (c = 0; c < 64; c++) {
+ if (resampler_channels[c].name != NULL)
+ free(resampler_channels[c].name);
+ }
+ }
+
+ {
+ int res = av_opt_get_int(swr, "out_sample_fmt", 0, &resampler_output_format);
+ if (res == 0) {
+ conn->resampler_output_bytes_per_sample = av_get_bytes_per_sample(resampler_output_format);
+ debug(3, "resampler output bytes per sample in swr: %d.",
+ conn->resampler_output_bytes_per_sample);
+ } else {
+ debug(1, "Error %d getting resampler output bytes per sample.", res);
+ }
+ }
+ conn->resampler_ssrc = ssrc;
+
+ pthread_cleanup_pop(0); // successful exit -- don't deallocate the swr
+ } else {
+ debug(1, "Error setting the configuration of the output backend.");
+ }
+ return response; // 0 if everything is okay
+}
+void prepare_decoding_chain(rtsp_conn_info *conn, ssrc_t ssrc) {
+ if ((ssrc_is_recognised(ssrc)) && (ssrc != conn->incoming_ssrc)) {
+
+ if ((config.statistics_requested != 0) && (ssrc != SSRC_NONE) &&
+ (conn->incoming_ssrc != SSRC_NONE)) {
+ debug(2, "Connection %d: incoming audio switching to \"%s\".", conn->connection_number,
+ get_ssrc_name(ssrc));
+#ifdef CONFIG_METADATA
+ send_ssnc_metadata('sdsc', get_ssrc_name(ssrc), strlen(get_ssrc_name(ssrc)), 1);
+#endif
+ }
+
+ // the ssrc of the incoming packet is different to the ssrc of the decoding chain
+ // so the decoding chain must be rebuilt
+
+ clear_decoding_chain(conn);
+ // create the new decoding chain
+ conn->incoming_ssrc = ssrc;
+ if (conn->incoming_ssrc != SSRC_NONE) {
+
+ // get a codec
+ // ideas and some code from https://rodic.fr/blog/libavcodec-tutorial-decode-audio-file/
+ // with thanks
+
+ // Set up the decoder depending on the ssrc code.
+ switch (ssrc) {
+ case ALAC_44100_S16_2:
+ case ALAC_48000_S24_2:
+ conn->codec = avcodec_find_decoder(AV_CODEC_ID_ALAC);
+ break;
+ case AAC_44100_F24_2:
+ case AAC_48000_F24_2:
+ case AAC_48000_F24_5P1:
+ case AAC_48000_F24_7P1:
+ conn->codec = avcodec_find_decoder(AV_CODEC_ID_AAC);
+ break;
+ default:
+ die("Can't find a suitable codec for SSRC: %s", get_ssrc_name(ssrc));
+ break;
+ }
+
+ // Get a decoder-dependent codec context
+ conn->codec_context = avcodec_alloc_context3(conn->codec);
+ if (conn->codec_context == NULL) {
+ debug(1, "Could not allocate a codec context!");
+ }
+ // push a deallocator -- av_free(codec_context)
+ pthread_cleanup_push(avcodec_alloc_context3_cleanup_handler, conn->codec_context);
+
+ // prepare to open the codec context with that codec
+ // but first, if it's the ALAC decoder, prepare a magic cookie
+ if ((ssrc == ALAC_48000_S24_2) || (ssrc == ALAC_44100_S16_2)) {
+ alac_ffmpeg_magic_cookie *extradata =
+ malloc(sizeof(alac_ffmpeg_magic_cookie)); // might not use it
+ if (extradata == NULL)
+ die("Could not allocate memory for a magic cookie.");
+ // creata a magic cookie preceded by the 12-byte "atom" (?) expected by FFMPEG (?)
+ memset(extradata, 0, sizeof(alac_ffmpeg_magic_cookie));
+ extradata->cookie_size = htonl(sizeof(alac_ffmpeg_magic_cookie));
+ extradata->cookie_tag = htonl('alac');
+ extradata->alac_config.frameLength = htonl(352);
+ if (ssrc == ALAC_48000_S24_2) {
+ extradata->alac_config.bitDepth = 24; // Seems to be S24
+ extradata->alac_config.sampleRate = htonl(48000);
+ } else {
+ extradata->alac_config.bitDepth = 16; // Seems to be S16
+ extradata->alac_config.sampleRate = htonl(44100);
+ }
+ extradata->alac_config.pb = 40;
+ extradata->alac_config.mb = 10;
+ extradata->alac_config.kb = 14;
+ extradata->alac_config.numChannels = 2;
+ extradata->alac_config.maxRun = htons(255);
+ conn->codec_context->extradata = (uint8_t *)extradata;
+ conn->codec_context->extradata_size = sizeof(alac_ffmpeg_magic_cookie);
+ } else {
+ conn->codec_context->extradata = NULL;
+ }
+ pthread_cleanup_push(malloc_cleanup, &conn->codec_context->extradata);
+
+ if (avcodec_open2(conn->codec_context, conn->codec, NULL) < 0) {
+ die("Could not initialise the codec context");
+ }
- alac = alac_create(conn->input_bit_depth,
- conn->input_num_channels); // no pthread cancellation point in here
- if (!alac)
- return 1;
- conn->decoder_info = alac;
+ // push a closer -- avcodec_close(codec_context);
+ pthread_cleanup_push(avcodec_open2_cleanup_handler, conn->codec_context);
- alac->setinfo_max_samples_per_frame = conn->max_frames_per_packet;
- alac->setinfo_7a = fmtp[2];
- alac->setinfo_sample_size = conn->input_bit_depth;
- alac->setinfo_rice_historymult = fmtp[4];
- alac->setinfo_rice_initialhistory = fmtp[5];
- alac->setinfo_rice_kmodifier = fmtp[6];
- alac->setinfo_7f = fmtp[7];
- alac->setinfo_80 = fmtp[8];
- alac->setinfo_82 = fmtp[9];
- alac->setinfo_86 = fmtp[10];
- alac->setinfo_8a_rate = fmtp[11];
- alac_allocate_buffers(alac); // no pthread cancellation point in here
+ conn->input_rate = get_ssrc_rate(ssrc);
+ if ((ssrc == ALAC_48000_S24_2) || (ssrc == ALAC_44100_S16_2)) {
+ conn->frames_per_packet = 352;
+ } else {
+ conn->frames_per_packet = 1024;
+ }
-#ifdef CONFIG_APPLE_ALAC
- apple_alac_init(fmtp); // no pthread cancellation point in here
-#endif
+ conn->codec_context->sample_rate = conn->input_rate;
- return 0;
+ conn->ffmpeg_decoding_chain_initialised = 1;
+ pthread_cleanup_pop(0); // successful exit -- don't run the avcodec_open2_cleanup_handler
+ pthread_cleanup_pop(0); // successful exit -- don't deallocate the malloc
+ pthread_cleanup_pop(
+ 0); // successful exit -- don't run the avcodec_alloc_context3_cleanup_handler
+ }
+ }
}
-static void terminate_decoders(rtsp_conn_info *conn) {
- alac_free(conn->decoder_info);
-#ifdef CONFIG_APPLE_ALAC
- apple_alac_terminate();
-#endif
+// take an AV Frame, run it through the swr resampler and map the output to the
+// appropriate channels for the output device
+
+// returns the length of time in nanoseconds associated with the frames that are being retained
+
+int64_t avframe_to_audio(rtsp_conn_info *conn, AVFrame *decoded_frame, uint8_t **decoded_audio,
+ size_t *decoded_audio_data_length, size_t *decoded_audio_samples_count) {
+ uint8_t *pcm_audio = NULL;
+ int dst_linesize;
+
+ int number_of_output_samples_expected = swr_get_out_samples(conn->swr, decoded_frame->nb_samples);
+
+ debug(3, "A maximum of %d output samples expected for %d input samples.",
+ number_of_output_samples_expected, decoded_frame->nb_samples);
+ // allocate enough space for the required number of output channels
+ // and the number of samples decoded
+ // the format is always S32
+ av_samples_alloc(&pcm_audio, &dst_linesize, conn->resampler_output_channels,
+ number_of_output_samples_expected, AV_SAMPLE_FMT_S32, 0);
+ uint64_t conversion_start_time = get_absolute_time_in_ns();
+ int samples_generated =
+ swr_convert(conn->swr, &pcm_audio, number_of_output_samples_expected,
+ (const uint8_t **)decoded_frame->extended_data, decoded_frame->nb_samples);
+ debug(3, "conversion time for %u incoming samples: %.3f milliseconds.", decoded_frame->nb_samples,
+ (get_absolute_time_in_ns() - conversion_start_time) * 0.000001);
+ if (samples_generated > 0) {
+ debug(3, "swr generated %d frames of %" PRId64 " channels.", samples_generated,
+ conn->resampler_output_channels);
+ // samples_generated will be different from
+ // the number of samples input if the output rate is different from the input
+ // now, allocate a buffer and transfer the audio into it.
+
+ ssize_t sample_buffer_size = conn->resampler_output_bytes_per_sample *
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration) *
+ samples_generated;
+ void *sample_buffer = malloc(sample_buffer_size);
+
+ memset(sample_buffer, 0, sample_buffer_size); // silence
+
+ unsigned int input_stride = conn->resampler_output_channels;
+ unsigned int output_stride = CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration);
+
+ unsigned int channels_to_map =
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration); // the output width
+ // if (conn->output_channel_map_size < channels_to_map)
+ // channels_to_map = conn->output_channel_map_size; // or the channel map given
+
+ switch (conn->resampler_output_bytes_per_sample) {
+ case 4: {
+ int32_t *inframe = (int32_t *)pcm_audio;
+ int32_t *outframe = (int32_t *)sample_buffer;
+ int i;
+ for (i = 0; i < samples_generated; i++) {
+ unsigned int j;
+ for (j = 0; j < channels_to_map; j++) {
+ if (conn->output_channel_to_resampler_channel_map[j] == front_mono_channel_index) {
+ // asking for the "FM" channel, which is a made-up name for
+ // Front Mono
+ int32_t monoValue = (inframe[0] / 2) + (inframe[1] / 2);
+ outframe[j] = monoValue;
+ } else if (conn->output_channel_to_resampler_channel_map[j] !=
+ silent_channel_index) // means you're asking for the
+ // silent channel
+ outframe[j] = inframe[conn->output_channel_to_resampler_channel_map[j]];
+ }
+ inframe += input_stride; // address increment is scaled by the
+ // size of an int32_t
+ outframe += output_stride; // address increment is scaled by the
+ // size of an int32_t
+ }
+ } break;
+ case 2: {
+ int16_t *inframe = (int16_t *)pcm_audio;
+ int16_t *outframe = (int16_t *)sample_buffer;
+ int i;
+ for (i = 0; i < samples_generated; i++) {
+ unsigned int j;
+ for (j = 0; j < channels_to_map; j++) {
+ if (conn->output_channel_to_resampler_channel_map[j] == front_mono_channel_index) {
+ // asking for the "FM" channel, which is a made-up name for
+ // Front Mono
+ int16_t monoValue = (inframe[0] / 2) + (inframe[1] / 2);
+ outframe[j] = monoValue;
+ } else if (conn->output_channel_to_resampler_channel_map[j] !=
+ silent_channel_index) // means you're asking for the
+ // silent channel
+ outframe[j] = inframe[conn->output_channel_to_resampler_channel_map[j]];
+ }
+ inframe += input_stride; // address increment is scaled by the
+ // size of an int16_t
+ outframe += output_stride; // address increment is scaled by the
+ // size of an int16_t
+ }
+ } break;
+ default:
+ debug(1, "resampler output byte size of %u not handled.",
+ conn->resampler_output_bytes_per_sample);
+ break;
+ }
+
+ *decoded_audio = sample_buffer;
+ *decoded_audio_data_length = sample_buffer_size;
+ *decoded_audio_samples_count = samples_generated;
+ } else {
+ // samples_generated contains the negative of the error code
+ debug(1, "swr_convert error %d. No samples generated from this avframe", -samples_generated);
+ *decoded_audio = NULL;
+ *decoded_audio_data_length = 0;
+ *decoded_audio_samples_count = 0;
+ }
+ av_freep(&pcm_audio);
+ return swr_get_delay(
+ conn->swr,
+ RATE_FROM_ENCODED_FORMAT(
+ config.current_output_configuration)); // number of frames left in the resampler
}
-uint64_t buffers_allocated = 0;
-uint64_t buffers_released = 0;
-static void init_buffer(rtsp_conn_info *conn) {
- // debug(1,"input_bytes_per_frame: %d.", conn->input_bytes_per_frame);
- // debug(1,"input_bit_depth: %d.", conn->input_bit_depth);
- int i;
- for (i = 0; i < BUFFER_FRAMES; i++) {
- // conn->audio_buffer[i].data = malloc(conn->input_bytes_per_frame *
- // conn->max_frames_per_packet);
- void *allocation = malloc(8 * conn->max_frames_per_packet);
- if (allocation == NULL) {
- die("could not allocate memory for audio buffers. %" PRId64 " buffers allocated, %" PRId64
- " buffers released.",
- buffers_allocated, buffers_released);
+// take a block of incoming data and decode it.
+// it might get decoded into fltp ot lpcm or something -- it'll be
+// transcoded and maybe resampled later
+AVFrame *block_to_avframe(rtsp_conn_info *conn, uint8_t *incoming_data,
+ size_t incoming_data_length) {
+ AVFrame *decoded_frame = NULL;
+ if (incoming_data_length > 8) {
+ AVPacket *pkt = av_packet_alloc();
+ if (pkt) {
+ // push a deallocator -- av_packet_free(pkt);
+ pthread_cleanup_push(av_packet_alloc_cleanup_handler, &pkt);
+ pkt->data = incoming_data;
+ pkt->size = incoming_data_length;
+ int ret = avcodec_send_packet(conn->codec_context, pkt);
+ if (ret == 0) {
+ decoded_frame = av_frame_alloc();
+ if (decoded_frame == NULL) {
+ debug(1, "Can't allocate an AVFrame!");
+ } else {
+ ret = avcodec_receive_frame(conn->codec_context, decoded_frame);
+
+ if (ret < 0) {
+ av_frame_free(&decoded_frame);
+ decoded_frame = NULL;
+ debug(1, "error %d during decoding. Data size: %zd", ret, incoming_data_length);
+ /*
+ char *obf = malloc(incoming_data_length * 3);
+ char *obfp = obf;
+ unsigned int obfc;
+ for (obfc = 0; obfc < incoming_data_length; obfc++) {
+ snprintf(obfp, 3, "%02X", incoming_data[obfc]);
+ obfp += 2;
+ if ((obfc & 7) == 7) {
+ snprintf(obfp, 2, " ");
+ obfp += 1;
+ }
+ };
+ *obfp = 0;
+ debug(1, "%s...", obf);
+ free(obf);
+ */
+ }
+ }
+ } else {
+ debug(1, "error %d during decoding. Gross data size: %zd", ret, incoming_data_length);
+ /*
+ char obf[128];
+ char *obfp = obf;
+ int obfc;
+ for (obfc = 0; obfc < 32; obfc++) {
+ snprintf(obfp, 3, "%02X", incoming_data[obfc]);
+ obfp += 2;
+ if ((obfc & 7) == 7) {
+ snprintf(obfp, 2, " ");
+ obfp += 1;
+ }
+ }
+ *obfp = 0;
+ debug(1, "%s", obf);
+ */
+ }
+ pthread_cleanup_pop(1); // deallocate the AVPacket;
} else {
- conn->audio_buffer[i].data = allocation;
- buffers_allocated++;
+ debug(1, "Can't allocate an AVPacket!");
}
}
+ return decoded_frame;
}
-static void free_audio_buffers(rtsp_conn_info *conn) {
- int i;
- for (i = 0; i < BUFFER_FRAMES; i++) {
- free(conn->audio_buffer[i].data);
- buffers_released++;
+size_t avflush(rtsp_conn_info *conn) {
+ size_t response = 0;
+ if (conn->swr != NULL) {
+
+ int number_of_output_samples_expected = swr_get_out_samples(conn->swr, 0);
+ debug(3, "avflush of %d samples.", number_of_output_samples_expected);
+ int ret = swr_init(conn->swr);
+ if (ret)
+ debug(1, "error %d in swr_init().", ret);
+ response = (size_t)number_of_output_samples_expected;
}
- debug(2, "%" PRId64 " buffers allocated, %" PRId64 " buffers released.", buffers_allocated,
- buffers_released);
+ return response;
}
-int first_possibly_missing_frame = -1;
+#endif
-void reset_buffer(rtsp_conn_info *conn) {
- debug_mutex_lock(&conn->ab_mutex, 30000, 0);
- ab_resync(conn);
- debug_mutex_unlock(&conn->ab_mutex, 0);
- if (config.output->flush) {
- config.output->flush(); // no cancellation points
- // debug(1, "reset_buffer: flush output device.");
+#ifdef CONFIG_OPENSSL
+// Thanks to
+// https://stackoverflow.com/questions/27558625/how-do-i-use-aes-cbc-encrypt-128-openssl-properly-in-ubuntu
+// for inspiration. Changed to a 128-bit key and no padding.
+
+int openssl_aes_decrypt_cbc(unsigned char *ciphertext, int ciphertext_len, unsigned char *key,
+ unsigned char *iv, unsigned char *plaintext) {
+ EVP_CIPHER_CTX *ctx;
+ int len;
+ int plaintext_len = 0;
+ ctx = EVP_CIPHER_CTX_new();
+ if (ctx != NULL) {
+ if (EVP_DecryptInit_ex(ctx, EVP_aes_128_cbc(), NULL, key, iv) == 1) {
+ EVP_CIPHER_CTX_set_padding(ctx, 0); // no padding -- always returns 1
+ // no need to allow space for padding in the output, as padding is disabled
+ if (EVP_DecryptUpdate(ctx, plaintext, &len, ciphertext, ciphertext_len) == 1) {
+ plaintext_len = len;
+ if (EVP_DecryptFinal_ex(ctx, plaintext + len, &len) == 1) {
+ plaintext_len += len;
+ } else {
+ debug(1, "EVP_DecryptFinal_ex error \"%s\".", ERR_error_string(ERR_get_error(), NULL));
+ }
+ } else {
+ debug(1, "EVP_DecryptUpdate error \"%s\".", ERR_error_string(ERR_get_error(), NULL));
+ }
+ } else {
+ debug(1, "EVP_DecryptInit_ex error \"%s\".", ERR_error_string(ERR_get_error(), NULL));
+ }
+ EVP_CIPHER_CTX_free(ctx);
+ } else {
+ debug(1, "EVP_CIPHER_CTX_new error \"%s\".", ERR_error_string(ERR_get_error(), NULL));
}
+ return plaintext_len;
}
+#endif
-void get_audio_buffer_size_and_occupancy(unsigned int *size, unsigned int *occupancy,
- rtsp_conn_info *conn) {
- debug_mutex_lock(&conn->ab_mutex, 30000, 0);
- *size = BUFFER_FRAMES;
- if (conn->ab_synced) {
- int16_t occ =
- conn->ab_write - conn->ab_read; // will be zero or positive if read and write are within
- // 2^15 of each other and write is at or after read
- *occupancy = occ;
- } else {
- *occupancy = 0;
+#ifdef CONFIG_AIRPLAY_2
+
+#ifdef CONFIG_AIRPLAY_2
+// This is a big dirty hack to try to accommodate packets that come in in sequence but are timed to
+// be earlier that what went before them. This happens when the feed is switching from AAC to ALAC.
+// So basically we will look back through the buffers in the queue until we find the last buffer
+// that predates the incoming one. We will make the subsequent buffer the revised_seqno. If we can't
+// find an older buffer, that means we can't go back far enough to find an older buffer and then the
+// ab_read buffer becomes the revised_seqno.
+seq_t get_revised_seqno(rtsp_conn_info *conn, uint32_t timestamp) {
+ // go back through the buffers to find the first buffer following a buffer that predates
+ // the given timestamp, if any.
+ seq_t revised_seqno = conn->ab_write;
+ pthread_cleanup_debug_mutex_lock(&conn->ab_mutex, 30000, 0);
+ int older_seqno_found = 0;
+ while ((older_seqno_found == 0) && (revised_seqno != conn->ab_read)) {
+ revised_seqno--;
+ abuf_t *tbuf = conn->audio_buffer + BUFIDX(revised_seqno);
+ if (tbuf->ready != 0) {
+ int32_t timestamp_difference = timestamp - tbuf->timestamp;
+ if (timestamp_difference >= 0) {
+ older_seqno_found = 1;
+ }
+ }
+ }
+ if (older_seqno_found)
+ revised_seqno++;
+
+ pthread_cleanup_pop(1);
+ return revised_seqno;
+}
+
+void clear_buffers_from(rtsp_conn_info *conn, seq_t from_here) {
+ seq_t bi = from_here;
+ while (bi != conn->ab_write) {
+ abuf_t *tbuf = conn->audio_buffer + BUFIDX(bi);
+ free_audio_buffer_payload(tbuf);
+ bi++;
}
- debug_mutex_unlock(&conn->ab_mutex, 0);
}
-void player_put_packet(int original_format, seq_t seqno, uint32_t actual_timestamp, uint8_t *data,
- int len, rtsp_conn_info *conn) {
+#endif
+
+#endif
+
+#ifdef CONFIG_FFMPEG
+uint32_t player_put_packet(uint32_t ssrc, seq_t seqno, uint32_t actual_timestamp, uint8_t *data,
+ size_t len, int mute, int32_t timestamp_gap, rtsp_conn_info *conn) {
+#else
+uint32_t player_put_packet(uint32_t ssrc, seq_t seqno, uint32_t actual_timestamp, uint8_t *data,
+ size_t len, __attribute__((unused)) int mute, int32_t timestamp_gap,
+ rtsp_conn_info *conn) {
+#endif
+
+ // clang-format off
+
+ // The timestamp_gap is the difference between the timestamp and the expected timestamp.
+ // It should normally be zero.
+
+ // It can be decoded by the Hammerton or Apple ALAC decoders, or by the FFmpeg decoder.
+
+ // The SSRC signifies the encoding used for that block of audio.
+ // It is used to select the type of decoding to be done by the FFMPEG-based
+ // decoding chain
- // if it's original format, it has a valid seqno and must be decoded
- // otherwise, it can take the next seqno and doesn't need decoding.
+ // If mute is true, then decode the packet to get its length, but mute it -- i.e.
+ // replace it with the same duration of silence.
+ // This is useful because the first block of an AAC play sequence usually contains
+ // noisy transients.
+ // Not needed in Classic airPlay as there's no AAC in it.
+
+ // Function returns the number of samples in the packet so that callers can watch for
+ // anomalies in sequencing.
+ // clang-format on
+
+ uint32_t input_packets_used = 0;
// ignore a request to flush that has been made before the first packet...
if (conn->packet_count == 0) {
debug_mutex_unlock(&conn->flush_mutex, 3);
}
- debug_mutex_lock(&conn->ab_mutex, 30000, 0);
+ pthread_cleanup_debug_mutex_lock(&conn->ab_mutex, 30000, 0);
uint64_t time_now = get_absolute_time_in_ns();
conn->packet_count++;
conn->packet_count_since_flush++;
conn->ab_read = seqno;
conn->ab_synced = 1;
conn->first_packet_timestamp = 0;
- debug(2, "Connection %d: synced by first packet, seqno %u.", conn->connection_number, seqno);
- } else if (original_format == 0) {
- // if the packet is coming in original format, the sequence number is important
- // otherwise, ignore is by setting it equal to the expected sequence number in ab_write
- seqno = conn->ab_write;
+ debug(2, "Connection %d: synced by first packet, timestamp %u, seqno %u.",
+ conn->connection_number, actual_timestamp, seqno);
}
if (conn->ab_write ==
seqno) { // if this is the expected packet (which could be the first packet...)
conn->frames_inward_measurement_time = time_now;
conn->frames_inward_frames_received_at_measurement_time = actual_timestamp;
abuf = conn->audio_buffer + BUFIDX(seqno);
- conn->ab_write = seqno + 1; // move the write pointer to the next free space
- } else if (is_after(conn->ab_write, seqno)) { // newer than expected
- int32_t gap = seqno - conn->ab_write;
- if (gap <= 0)
- debug(1, "Unexpected gap size: %d.", gap);
- int i;
- for (i = 0; i < gap; i++) {
- abuf = conn->audio_buffer + BUFIDX(conn->ab_write + i);
- abuf->ready = 0; // to be sure, to be sure
- abuf->resend_request_number = 0;
- abuf->initialisation_time =
- time_now; // this represents when the packet was noticed to be missing
- abuf->status = 1 << 0; // signifying missing
- abuf->resend_time = 0;
- abuf->given_timestamp = 0;
- abuf->sequence_number = 0;
+ conn->ab_write = seqno + 1; // move the write pointer to the next free space
+ } else {
+ int16_t after_ab_write_gap = seqno - conn->ab_write;
+ if (after_ab_write_gap > 0) {
+ int i;
+ for (i = 0; i < after_ab_write_gap; i++) {
+ abuf = conn->audio_buffer + BUFIDX(conn->ab_write + i);
+ abuf->ready = 0; // to be sure, to be sure
+ abuf->resend_request_number = 0;
+ abuf->initialisation_time =
+ time_now; // this represents when the packet was noticed to be missing
+ abuf->status = 1 << 0; // signifying missing
+ abuf->resend_time = 0;
+ abuf->timestamp = 0;
+ abuf->sequence_number = 0;
+ }
+ abuf = conn->audio_buffer + BUFIDX(seqno);
+ // rtp_request_resend(ab_write, gap);
+ // resend_requests++;
+ conn->ab_write = seqno + 1;
+ } else {
+ int16_t after_ab_read_gap = seqno - conn->ab_read;
+ if (after_ab_read_gap >= 0) { // older than expected but not too late
+ debug(3, "buffer %u is older than expected but not too late", seqno);
+ conn->late_packets++;
+ abuf = conn->audio_buffer + BUFIDX(seqno);
+ } else { // too late.
+ debug(3, "buffer %u is too late", seqno);
+ conn->too_late_packets++;
+ }
}
- abuf = conn->audio_buffer + BUFIDX(seqno);
- // rtp_request_resend(ab_write, gap);
- // resend_requests++;
- conn->ab_write = seqno + 1;
- } else if (is_after(conn->ab_read, seqno)) { // older than expected but not too late
- conn->late_packets++;
- abuf = conn->audio_buffer + BUFIDX(seqno);
- } else { // too late.
- conn->too_late_packets++;
}
-
if (abuf) {
- int datalen = conn->max_frames_per_packet;
+ if (free_audio_buffer_payload(abuf)) {
+ if (seqno == abuf->sequence_number)
+ debug(3, "audio block %u received for a second (or more) time?", seqno);
+ else
+ debug(3, "audio block %u with prior sequence number %u -- payload not freed until now!",
+ seqno, abuf->sequence_number);
+ }
abuf->initialisation_time = time_now;
abuf->resend_time = 0;
- if ((original_format != 0) &&
- (audio_packet_decode(abuf->data, &datalen, data, len, conn) == 0)) {
+ abuf->length = 0; // may not be needed
+
+ if (ssrc == ALAC_44100_S16_2) {
+ // This could be a Classic AirPlay or an AirPlay 2 Realtime packet.
+ // It always has a length of 352 frames per packet.
+ // And it's always 16-bit interleaved stereo.
+ uint8_t *data_to_use = data;
+ uint8_t *intermediate_buffer = malloc(len); // encryption is not compression...
+
+ // decrypt it if necessary
+ if (conn->stream.encrypted) {
+ unsigned char iv[16];
+ int aeslen = len & ~0xf;
+ memcpy(iv, conn->stream.aesiv, sizeof(iv));
+#ifdef CONFIG_MBEDTLS
+ mbedtls_aes_crypt_cbc(&conn->dctx, MBEDTLS_AES_DECRYPT, aeslen, iv, data,
+ intermediate_buffer);
+#endif
+#ifdef CONFIG_POLARSSL
+ aes_crypt_cbc(&conn->dctx, AES_DECRYPT, aeslen, iv, data, intermediate_buffer);
+#endif
+#ifdef CONFIG_OPENSSL
+ openssl_aes_decrypt_cbc(data, aeslen, conn->stream.aeskey, iv, intermediate_buffer);
+ // AES_cbc_encrypt(data, intermediate_buffer, aeslen, &conn->aes, iv, AES_DECRYPT);
+#endif
+ memcpy(intermediate_buffer + aeslen, data + aeslen, len - aeslen);
+ data_to_use = intermediate_buffer;
+ }
+
+ // Use the selected decoder
+ if ((config.decoder_in_use == 1 << decoder_hammerton) ||
+ (config.decoder_in_use == 1 << decoder_apple_alac)) {
+ abuf->data = malloc(conn->frames_per_packet * conn->input_bytes_per_frame);
+ if (abuf->data != NULL) {
+ unencrypted_packet_decode(conn, data_to_use, len, abuf->data);
+ input_packets_used = conn->frames_per_packet; // return this to the caller
+ abuf->length = conn->frames_per_packet; // these decoders don't transcode
+ } else {
+ debug(1, "audio block not allocated!");
+ }
+ } else if (config.decoder_in_use == 1 << decoder_ffmpeg_alac) {
+#ifdef CONFIG_FFMPEG
+ prepare_decoding_chain(conn, ALAC_44100_S16_2);
+ // if (len > 8) {
+ abuf->avframe = block_to_avframe(conn, data_to_use, len);
+ abuf->ssrc = ALAC_44100_S16_2;
+ if (abuf->avframe) {
+ input_packets_used = abuf->avframe->nb_samples;
+ }
+ if (mute) {
+ // it's important to have already run it through the decoder before dropping it
+ // especially if it an AAC decoder
+ debug(2, "ap1 muting frame %u.", actual_timestamp);
+ abuf->length = abuf->avframe->nb_samples;
+ av_frame_free(&abuf->avframe);
+ abuf->avframe = NULL;
+ }
+ // } else {
+ if (len <= 8) {
+ debug(1,
+ "Using the FFMPEG ALAC_44100_S16_2 decoder, a short audio packet %u, rtptime %u, of length %zu has been decoded but not discarded. Contents follow:", seqno,
+ actual_timestamp, len);
+ debug_print_buffer(1, data, len);
+ // abuf->length = conn->frames_per_packet;
+ // abuf->avframe = NULL;
+ }
+#else
+ debug(1, "FFMPEG support has not been built into this version Shairport Sync!");
+#endif
+ } else {
+ debug(1, "Unknown decoder!");
+ }
+
+ // may be used during decryption
+ if (intermediate_buffer != NULL) {
+ free(intermediate_buffer);
+ intermediate_buffer = NULL;
+ }
+
abuf->ready = 1;
abuf->status = 0; // signifying that it was received
- abuf->length = datalen;
- abuf->given_timestamp = actual_timestamp;
+ abuf->timestamp = actual_timestamp;
+ abuf->timestamp_gap = timestamp_gap; // needed to decide if a resync is needed
abuf->sequence_number = seqno;
- } else if (original_format == 0) {
- memcpy(abuf->data, data, len * conn->input_bytes_per_frame);
+ } else {
+ // This is AirPlay 2 -- always use FFmpeg
+#ifdef CONFIG_FFMPEG
+
+ // Use the appropriate FFMPEG decoder
+
+ // decoding is done now, transcoding to S32 and resampling is
+ // deferred to the player thread, to be sure all the blocks
+ // of data are present
+
+ prepare_decoding_chain(conn, ssrc); // dynamically set the decoding environment
+
+ // if (len > 8) {
+ abuf->avframe = block_to_avframe(conn, data, len);
+ abuf->ssrc = ssrc; // tag the avframe with its specific SSRC
+ if (abuf->avframe) {
+ input_packets_used = abuf->avframe->nb_samples;
+ }
+ if (mute) {
+ // it's important to have already run it through the decoder before dropping it
+ debug(2, "ap2 muting frame %u.", actual_timestamp);
+ abuf->length = abuf->avframe->nb_samples;
+ av_frame_free(&abuf->avframe);
+ abuf->avframe = NULL;
+ }
+ //} else {
+ if (len <= 8) {
+ debug(1,
+ "Using an FFMPEG decoder, a short audio packet %u, rtptime %u, of length %zu has been decoded but not discarded. Contents follow:", seqno,
+ actual_timestamp, len);
+ debug_print_buffer(1, data, len);
+ // abuf->length = 0;
+ // abuf->avframe = NULL;
+ }
abuf->ready = 1;
abuf->status = 0; // signifying that it was received
- abuf->length = len;
- abuf->given_timestamp = actual_timestamp;
+ abuf->timestamp = actual_timestamp;
+ abuf->timestamp_gap = timestamp_gap;
abuf->sequence_number = seqno;
- } else {
- debug(1, "Bad audio packet detected and discarded.");
+#else
+ debug(1, "FFMPEG support has not been included, so the packet is discarded.");
abuf->ready = 0;
abuf->status = 1 << 1; // bad packet, discarded
abuf->resend_request_number = 0;
- abuf->given_timestamp = 0;
+ abuf->timestamp = 0;
+ abuf->timestamp_gap = 0;
abuf->sequence_number = 0;
+#endif
}
}
-
+ /*
+ {
+ uint64_t the_time_this_frame_should_be_played;
+ frame_to_local_time(abuf->timestamp,
+ &the_time_this_frame_should_be_played, conn);
+ int64_t lead_time = the_time_this_frame_should_be_played - get_absolute_time_in_ns();
+ debug(1, "put_packet %u, lead time is %.3f ms.", abuf->timestamp, lead_time * 0.000001);
+ }
+ */
int rc = pthread_cond_signal(&conn->flowcontrol);
if (rc)
debug(1, "Error signalling flowcontrol.");
(uint64_t)(config.resend_control_first_check_time * (uint64_t)1000000000);
uint64_t resend_repeat_interval =
(uint64_t)(config.resend_control_check_interval_time * (uint64_t)1000000000);
- uint64_t minimum_remaining_time = (uint64_t)(
- (config.resend_control_last_check_time + config.audio_backend_buffer_desired_length) *
- (uint64_t)1000000000);
+ uint64_t minimum_remaining_time = (uint64_t)((config.resend_control_last_check_time +
+ config.audio_backend_buffer_desired_length) *
+ (uint64_t)1000000000);
uint64_t latency_time = (uint64_t)(conn->latency * (uint64_t)1000000000);
latency_time = latency_time / (uint64_t)conn->input_rate;
debug(3, "request resend of %d packets starting at seqno %u.", missing_frame_run_count,
start_of_missing_frame_run);
if (config.disable_resend_requests == 0) {
- debug_mutex_unlock(&conn->ab_mutex, 3);
+ // debug_mutex_unlock(&conn->ab_mutex, 3);
rtp_request_resend(start_of_missing_frame_run, missing_frame_run_count, conn);
- debug_mutex_lock(&conn->ab_mutex, 20000, 1);
+ // debug_mutex_lock(&conn->ab_mutex, 20000, 1);
conn->resend_requests++;
}
start_of_missing_frame_run = -1;
first_possibly_missing_frame = conn->ab_write;
}
}
- debug_mutex_unlock(&conn->ab_mutex, 0);
+ pthread_cleanup_pop(1);
+ // debug_mutex_unlock(&conn->ab_mutex, 0);
+ return input_packets_used;
}
int32_t rand_in_range(int32_t exclusive_range_limit) {
static inline void process_sample(int32_t sample, char **outp, sps_format_t format, int volume,
int dither, rtsp_conn_info *conn) {
- /*
- {
- static int old_volume = 0;
- if (volume != old_volume) {
- debug(1,"Volume is now %d.",volume);
- old_volume = volume;
- }
- }
- */
-
int64_t hyper_sample = sample;
int result = 0;
- if (config.loudness) {
+ if (conn->do_loudness != 0) {
hyper_sample <<=
32; // Do not apply volume as it has already been done with the Loudness DSP filter
} else {
// next, do dither, if necessary
if (dither) {
- // add a TPDF dither -- see
+ // Add a TPDF dither -- see
// http://educypedia.karadimov.info/library/DitherExplained.pdf
// and the discussion around https://www.hydrogenaud.io/forums/index.php?showtopic=16963&st=25
int64_t tpdf = (r & dither_mask) - (conn->previous_random_number & dither_mask);
conn->previous_random_number = r;
// add dither, allowing for clipping
+
if (tpdf >= 0) {
if (INT64_MAX - tpdf >= hyper_sample)
hyper_sample += tpdf;
*outp += result;
}
-void buffer_get_frame_cleanup_handler(void *arg) {
- rtsp_conn_info *conn = (rtsp_conn_info *)arg;
- debug_mutex_unlock(&conn->ab_mutex, 0);
+void buffer_get_frame_cleanup_handler(__attribute__((unused)) void *arg) {
+ // rtsp_conn_info *conn = (rtsp_conn_info *)arg;
+ // debug_mutex_unlock(&conn->ab_mutex, 0);
}
// get the next frame, when available. return 0 if underrun/stream reset.
-static abuf_t *buffer_get_frame(rtsp_conn_info *conn) {
+static abuf_t *buffer_get_frame(rtsp_conn_info *conn, int resync_requested) {
// int16_t buf_fill;
- uint64_t local_time_now;
// struct timespec tn;
+
+ /*
+ {
+ abuf_t *curframe = conn->audio_buffer + BUFIDX(conn->ab_read);
+ if (curframe != NULL) {
+ debug(1, "get seqno %u with ready: %u.", curframe->sequence_number, curframe->ready);
+ }
+ }
+ */
+
abuf_t *curframe = NULL;
int notified_buffer_empty = 0; // diagnostic only
- debug_mutex_lock(&conn->ab_mutex, 30000, 0);
+ pthread_cleanup_debug_mutex_lock(&conn->ab_mutex, 30000, 0);
int wait;
long dac_delay = 0; // long because alsa returns a long
- int have_sent_prefiller_silence =
+ int output_device_has_been_primed =
0; // set to true when we have sent at least one silent frame to the DAC
pthread_cleanup_push(buffer_get_frame_cleanup_handler,
(void *)conn); // undo what's been done so far
do {
- pthread_testcancel(); // even if no packets are coming in...
- // get the time
- local_time_now = get_absolute_time_in_ns(); // type okay
// debug(3, "buffer_get_frame is iterating");
-
// we must have timing information before we can do anything here
if (have_timestamp_timing_information(conn)) {
debug_mutex_unlock(&conn->flush_mutex, 0);
}
- debug_mutex_lock(&conn->flush_mutex, 1000, 0);
- pthread_cleanup_push(mutex_unlock, &conn->flush_mutex);
+ pthread_cleanup_debug_mutex_lock(&conn->flush_mutex, 1000, 0);
if (conn->flush_requested == 1) {
- if (conn->flush_output_flushed == 0)
+ if (conn->flush_output_flushed == 0) {
+#if CONFIG_FFMPEG
+ avflush(conn);
+#endif
if (config.output->flush) {
config.output->flush(); // no cancellation points
debug(2, "flush request: flush output device.");
}
+ }
conn->flush_output_flushed = 1;
}
// now check to see it the flush request is for frames in the buffer or not
abuf_t *firstPacket = conn->audio_buffer + BUFIDX(conn->ab_read);
abuf_t *lastPacket = conn->audio_buffer + BUFIDX(conn->ab_write - 1);
if ((firstPacket != NULL) && (firstPacket->ready)) {
- uint32_t first_frame_in_buffer = firstPacket->given_timestamp;
+ uint32_t first_frame_in_buffer = firstPacket->timestamp;
int32_t offset_from_first_frame = conn->flush_rtp_timestamp - first_frame_in_buffer;
if ((lastPacket != NULL) && (lastPacket->ready)) {
// we have enough information to check if the flush is needed or can be discarded
- uint32_t last_frame_in_buffer =
- lastPacket->given_timestamp + lastPacket->length - 1;
+ uint32_t last_frame_in_buffer = lastPacket->timestamp + lastPacket->length - 1;
// clang-format off
// Now we have to work out if the flush frame is in the buffer.
current_packet = conn->audio_buffer + BUFIDX(conn->ab_read);
if (current_packet != NULL) {
uint32_t last_frame_in_current_packet =
- current_packet->given_timestamp + current_packet->length - 1;
+ current_packet->timestamp + current_packet->length - 1;
offset_to_flush_frame =
conn->flush_rtp_timestamp - last_frame_in_current_packet;
if (offset_to_flush_frame > 0) {
debug(2,
"flush to %u request: flush buffer %u, from "
- "%u to %u. ab_write is: %u.",
- conn->flush_rtp_timestamp, conn->ab_read,
- current_packet->given_timestamp,
- current_packet->given_timestamp + current_packet->length - 1,
+ "%u to %zu. ab_write is: %u.",
+ conn->flush_rtp_timestamp, conn->ab_read, current_packet->timestamp,
+ current_packet->timestamp + current_packet->length - 1,
conn->ab_write);
conn->ab_read++;
}
} else {
debug(1, "NULL current_packet");
}
+ pthread_testcancel(); // even if no packets are coming in...
} while ((current_packet == NULL) || (offset_to_flush_frame > 0));
// now remove any frames from the buffer that are before the flush frame itself.
int32_t frames_to_remove =
- conn->flush_rtp_timestamp - current_packet->given_timestamp;
+ conn->flush_rtp_timestamp - current_packet->timestamp;
if (frames_to_remove > 0) {
debug(2, "%u frames to remove from current buffer", frames_to_remove);
void *dest = (void *)current_packet->data;
void *source = dest + conn->input_bytes_per_frame * frames_to_remove;
size_t frames_remaining = (current_packet->length - frames_to_remove);
memmove(dest, source, frames_remaining * conn->input_bytes_per_frame);
- current_packet->given_timestamp = conn->flush_rtp_timestamp;
+ current_packet->timestamp = conn->flush_rtp_timestamp;
current_packet->length = frames_remaining;
}
debug(
"flush request: flush frame %u complete -- buffer contains %u frames, from "
"%u to %u -- flushed to %u in buffer %u, with %u frames remaining.",
conn->flush_rtp_timestamp, last_frame_in_buffer - first_frame_in_buffer + 1,
- first_frame_in_buffer, last_frame_in_buffer,
- current_packet->given_timestamp, conn->ab_read,
- last_frame_in_buffer - current_packet->given_timestamp + 1);
+ first_frame_in_buffer, last_frame_in_buffer, current_packet->timestamp,
+ conn->ab_read, last_frame_in_buffer - current_packet->timestamp + 1);
drop_request = 1;
} else {
if (conn->flush_rtp_timestamp == last_frame_in_buffer + 1) {
conn->first_packet_timestamp = 0;
conn->first_packet_time_to_play = 0;
conn->time_since_play_started = 0;
- have_sent_prefiller_silence = 0;
+ output_device_has_been_primed = 0;
dac_delay = 0;
}
if (drop_request) {
int out_of_date = 1;
uint32_t should_be_frame;
- uint64_t time_to_aim_for = local_time_now;
- uint64_t desired_lead_time = 120000000;
+ uint64_t time_to_aim_for = get_absolute_time_in_ns();
+ uint64_t desired_lead_time = 0;
if (conn->first_packet_timestamp == 0)
time_to_aim_for = time_to_aim_for + desired_lead_time;
if ((thePacket != NULL) && (thePacket->ready)) {
local_time_to_frame(time_to_aim_for, &should_be_frame, conn);
// debug(1,"should_be frame is %u.",should_be_frame);
- int32_t frame_difference = thePacket->given_timestamp - should_be_frame;
+ int32_t frame_difference = thePacket->timestamp - should_be_frame;
if (frame_difference < 0) {
- debug(2, "Dropping out of date packet %u with timestamp %u. Lead time is %f seconds.",
- conn->ab_read, thePacket->given_timestamp,
- frame_difference * 1.0 / 44100.0 + desired_lead_time * 0.000000001);
+ debug(3,
+ "Connection %d: dropping-out-of-date packet %u with timestamp %u. Lead time is "
+ "%f seconds.",
+ conn->connection_number, conn->ab_read, thePacket->timestamp,
+ frame_difference * 1.0 / conn->input_rate + desired_lead_time * 0.000000001);
+ free_audio_buffer_payload(thePacket);
+ conn->last_seqno_read = conn->ab_read;
conn->ab_read++;
} else {
if (conn->first_packet_timestamp == 0)
- debug(2, "Accepting packet %u with timestamp %u. Lead time is %f seconds.",
- conn->ab_read, thePacket->given_timestamp,
- frame_difference * 1.0 / 44100.0 + desired_lead_time * 0.000000001);
+ debug(3,
+ "Connection %d: accepting packet sequence number %u, ab_read: %u with "
+ "timestamp %u. Lead time is %f seconds.",
+ conn->connection_number, thePacket->sequence_number, conn->ab_read,
+ thePacket->timestamp,
+ frame_difference * 1.0 / conn->input_rate + desired_lead_time * 0.000000001);
out_of_date = 0;
}
} else {
- debug(2, "Packet %u empty or not ready.", conn->ab_read);
+ if (thePacket == NULL)
+ debug(2, "Connection %d: packet %u is empty.", conn->connection_number, conn->ab_read);
+ else
+ debug(3, "Connection %d: packet %u not ready.", conn->connection_number, conn->ab_read);
conn->ab_read++;
+ conn->last_seqno_read++; // don' let it trigger the missing packet warning...
}
+ pthread_testcancel(); // even if no packets are coming in...
}
+ int16_t buffers_available = conn->ab_write - conn->ab_read;
- if (conn->ab_synced) {
+ if ((conn->ab_synced) && (buffers_available > 0)) {
curframe = conn->audio_buffer + BUFIDX(conn->ab_read);
- if (curframe != NULL) {
- uint64_t should_be_time;
- frame_to_local_time(curframe->given_timestamp, &should_be_time, conn);
- int64_t time_difference = should_be_time - local_time_now;
- debug(3, "Check packet from buffer %u, timestamp %u, %f seconds ahead.", conn->ab_read,
- curframe->given_timestamp, 0.000000001 * time_difference);
- } else {
- debug(3, "Check packet from buffer %u, empty.", conn->ab_read);
+ if (resync_requested != 0) {
+ /*
+ if (((curframe != NULL) && ((conn->first_packet_timestamp != curframe->timestamp) &&
+ (curframe->timestamp_gap < 0))) ||
+ (resync_requested != 0)) {
+ // ignore a timestamp gap that occurs before the first_packet_timestamp
+ if (curframe == NULL)
+ debug(1, "Connection %d: reset first_packet_timestamp because curframe is NULL.",
+ conn->connection_number);
+ if (curframe->timestamp_gap != 0)
+ debug(1,
+ "Connection %d: reset first_packet_timestamp because curframe %u's timestamp_gap
+ is negative: "
+ "%d.",
+ conn->connection_number, curframe->timestamp, curframe->timestamp_gap);
+ if (resync_requested != 0)
+ */
+ debug(2, "Connection %d: reset first_packet_timestamp resync_requested.",
+ conn->connection_number);
+ conn->ab_buffering = 1;
+ conn->first_packet_timestamp = 0;
+ conn->first_packet_time_to_play = 0;
+ output_device_has_been_primed = 1; // so that it can rely on the delay provided by it
}
- if ((conn->ab_read != conn->ab_write) &&
- (curframe->ready)) { // it could be synced and empty, under
- // exceptional circumstances, with the
- // frame unused, thus apparently ready
-
- if (curframe->sequence_number != conn->ab_read) {
- // some kind of sync problem has occurred.
- if (BUFIDX(curframe->sequence_number) == BUFIDX(conn->ab_read)) {
- // it looks like aliasing has happened
- // jump to the new incoming stuff...
- conn->ab_read = curframe->sequence_number;
- debug(1, "Aliasing of buffer index -- reset.");
- } else {
- debug(1, "Inconsistent sequence numbers detected");
- }
- }
- }
+ if (conn->ab_synced) {
- if ((curframe) && (curframe->ready)) {
- notified_buffer_empty = 0; // at least one buffer now -- diagnostic only.
- if (conn->ab_buffering) { // if we are getting packets but not yet forwarding them to the
- // player
- if (conn->first_packet_timestamp == 0) { // if this is the very first packet
-
- if (config.output->prepare_to_play) // tell the player to get ready
- config.output->prepare_to_play(); // there could be more than one of these sent
-
- conn->first_packet_timestamp =
- curframe->given_timestamp; // we will keep buffering until we are
- // supposed to start playing this
+ if (curframe != NULL) {
+ uint64_t should_be_time;
+ frame_to_local_time(curframe->timestamp, &should_be_time, conn);
+ int64_t time_difference = should_be_time - get_absolute_time_in_ns();
+ debug(3, "Check packet from buffer %u, timestamp %u, %f seconds ahead.", conn->ab_read,
+ curframe->timestamp, 0.000000001 * time_difference);
+ } else {
+ debug(3, "Check packet from buffer %u, empty.", conn->ab_read);
+ }
- // Here, calculate when we should start playing. We need to know when to allow the
- // packets to be sent to the player.
+ if ((conn->ab_read != conn->ab_write) &&
+ (curframe->ready)) { // it could be synced and empty, under
+ // exceptional circumstances, with the
+ // frame unused, thus apparently ready
+
+ if (curframe->sequence_number != conn->ab_read) {
+ // some kind of sync problem has occurred.
+ if (BUFIDX(curframe->sequence_number) == BUFIDX(conn->ab_read)) {
+ // it looks like aliasing has happened
+ // jump to the new incoming stuff...
+ conn->ab_read = curframe->sequence_number;
+ debug(1, "Connection %d: aliasing of buffer index -- reset.",
+ conn->connection_number);
+ } else {
+ debug(1, "Connection %d: inconsistent sequence numbers detected",
+ conn->connection_number);
+ }
+ }
+ }
- // every second or so, we get a reference on when a particular packet should be
- // played.
+ if ((curframe) && (curframe->ready)) {
+ notified_buffer_empty = 0; // at least one buffer now -- diagnostic only.
+ if (conn->ab_buffering) { // if we are getting packets but not yet forwarding them to
+ // the player
+ if (conn->first_packet_timestamp == 0) { // if this is the very first packet
+ conn->first_packet_timestamp =
+ curframe->timestamp; // we will keep buffering until we are
+ // supposed to start playing this
+ debug(2, "Connection %d: first packet timestamp is %u.", conn->connection_number,
+ conn->first_packet_timestamp);
+
+ // Even though it'll be some time before the first frame will be output
+ // (and thus some time before the resampling chain is needed),
+ // we need to set up the output device to correspond to
+ // the input format w.r.t. rate, depth and channels
+ // because we'll be sending silence before the first real frame.
+ debug(3, "reset loudness filters.");
+ loudness_reset();
+#ifdef CONFIG_FFMPEG
+ // Set up the output chain, including the software resampler.
+ debug(2, "set up the output chain to %s for FFmpeg.",
+ get_ssrc_name(curframe->ssrc));
+ setup_software_resampler(conn, curframe->ssrc);
+ conn->output_sample_ratio = 1; // it's always 1 if we're using FFmpeg
+#else
+ // here, in the non-FFmpeg decoder case, we have the first frame, so
+ // we should set up the output device now.
+ debug(3,
+ "set up the output chain for the non-FFmpeg case with incoming audio at %u "
+ "FPS.",
+ conn->input_rate);
+
+ // ask the backend if it can give us its best choice for a non-ffmpeg configuration:
+ if (config.output->get_configuration) {
+ config.current_output_configuration = config.output->get_configuration(
+ 2, conn->input_rate, (unsigned int)(SPS_FORMAT_S16));
+ } else {
+ // otherwise, use the standard 44100/S16_LE/2 for non-ffmpeg operation
+ config.current_output_configuration = CHANNELS_TO_ENCODED_FORMAT(2) |
+ RATE_TO_ENCODED_FORMAT(44100) |
+ FORMAT_TO_ENCODED_FORMAT(SPS_FORMAT_S16_LE);
+ }
- // It probably won't be the timestamp of our first packet, however, so we might
- // have to do some calculations.
+ // tell the output device, if possible
+ if (config.output->configure) {
+ config.output->configure(config.current_output_configuration, NULL);
+ }
- // To calculate when the first packet will be played, we figure out the exact time
- // the packet should be played according to its timestamp and the reference time.
- // The desired latency, typically 88200 frames, will be calculated for in rtp.c,
- // and any desired backend latency offset included in it there.
+ if (conn->input_rate == 0)
+ debug(1, "input rate not set!");
+ else
+ conn->output_sample_ratio =
+ RATE_FROM_ENCODED_FORMAT(config.current_output_configuration) /
+ conn->input_rate;
- uint64_t should_be_time;
+#endif
+ // calculate the output bit depth
+ conn->output_bit_depth = 16; // default;
+ switch (FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration)) {
+ case SPS_FORMAT_S8:
+ case SPS_FORMAT_U8:
+ conn->output_bit_depth = 8;
+ break;
+ case SPS_FORMAT_S16:
+ case SPS_FORMAT_S16_LE:
+ case SPS_FORMAT_S16_BE:
+ conn->output_bit_depth = 16;
+ break;
+ case SPS_FORMAT_S24:
+ case SPS_FORMAT_S24_LE:
+ case SPS_FORMAT_S24_BE:
+ case SPS_FORMAT_S24_3LE:
+ case SPS_FORMAT_S24_3BE:
+ conn->output_bit_depth = 24;
+ break;
+ case SPS_FORMAT_S32:
+ case SPS_FORMAT_S32_LE:
+ case SPS_FORMAT_S32_BE:
+ conn->output_bit_depth = 32;
+ break;
+ case SPS_FORMAT_UNKNOWN:
+ die("An unknown format was encountered while choosing output bit depth. Please "
+ "check your configuration and settings.");
+ break;
+ case SPS_FORMAT_AUTO:
+ die("Invalid format -- SPS_FORMAT_AUTO -- choosing output bit depth. Please "
+ "check your configuration and settings.");
+ break;
+ case SPS_FORMAT_INVALID:
+ die("Invalid format -- SPS_FORMAT_INVALID -- choosing output bit depth. Please "
+ "check your configuration and settings.");
+ break;
+ }
+ debug(3, "output bit depth is %u.", conn->output_bit_depth);
- frame_to_local_time(conn->first_packet_timestamp, // this will go modulo 2^32
- &should_be_time, conn);
+ uint64_t should_be_time;
+ frame_to_local_time(conn->first_packet_timestamp, // this will go modulo 2^32
+ &should_be_time, conn);
- conn->first_packet_time_to_play = should_be_time;
+ conn->first_packet_time_to_play = should_be_time;
- int64_t lt = conn->first_packet_time_to_play - local_time_now;
+ int64_t lt = conn->first_packet_time_to_play - get_absolute_time_in_ns();
- // can't be too late because we skipped late packets already, FLW.
- debug(2, "Connection %d: Lead time for first frame %" PRId64 ": %f seconds.",
- conn->connection_number, conn->first_packet_timestamp, lt * 0.000000001);
-#ifdef CONFIG_METADATA
- // say we have started receiving frames here
- send_ssnc_metadata(
- 'pffr', NULL, 0,
- 0); // "first frame received", but don't wait if the queue is locked
-#endif
- }
+ // can't be too late because we skipped late packets already, FLW.
+ debug(2, "Connection %d: lead time for first frame %u: %f seconds.",
+ conn->connection_number, conn->first_packet_timestamp, lt * 0.000000001);
+ }
- if (conn->first_packet_time_to_play != 0) {
- // Now that we know the timing of the first packet...
- if (config.output->delay) {
- // and that the output device is capable of synchronization...
+ if (conn->first_packet_time_to_play != 0) {
+ // Now that we know the timing of the first packet...
+ if (config.output->delay) {
+ // and that the output device is capable of synchronization...
- // We may send packets of
- // silence from now until the time the first audio packet should be sent
- // and then we will send the first packet, which will be followed by
- // the subsequent packets.
- // here, we figure out whether and what silence to send.
+ // We may send packets of
+ // silence from now until the time the first audio packet should be sent
+ // and then we will send the first packet, which will be followed by
+ // the subsequent packets.
+ // here, we figure out whether and what silence to send.
- uint64_t should_be_time;
+ uint64_t should_be_time;
- // readjust first packet time to play
- frame_to_local_time(conn->first_packet_timestamp, // this will go modulo 2^32
- &should_be_time, conn);
+ // readjust first packet time to play
+ frame_to_local_time(conn->first_packet_timestamp, &should_be_time, conn);
- int64_t change_in_should_be_time =
- (int64_t)(should_be_time - conn->first_packet_time_to_play);
+ int64_t change_in_should_be_time =
+ (int64_t)(should_be_time - conn->first_packet_time_to_play);
- if (fabs(0.000001 * change_in_should_be_time) >
- 0.001) // the clock drift estimation might be nudging the estimate, and we can
- // ignore this unless if's more than a microsecond
- debug(2,
+ if (fabs(0.000001 * change_in_should_be_time) >
+ 0.001) // ignore this unless if's more than a microsecond
+ debug(
+ 2,
"Change in estimated first_packet_time: %f milliseconds for first_packet.",
0.000001 * change_in_should_be_time);
- conn->first_packet_time_to_play = should_be_time;
+ conn->first_packet_time_to_play = should_be_time;
- int64_t lead_time =
- conn->first_packet_time_to_play - local_time_now; // negative means late
- if (lead_time < 0) {
- debug(1, "Gone past starting time for %u by %" PRId64 " nanoseconds.",
- conn->first_packet_timestamp, -lead_time);
- conn->ab_buffering = 0;
- } else {
- // do some calculations
- if ((config.audio_backend_silent_lead_in_time_auto == 1) ||
- (lead_time <=
- (int64_t)(config.audio_backend_silent_lead_in_time * (int64_t)1000000000))) {
- // debug(1, "Lead time: %" PRId64 " nanoseconds.", lead_time);
- int resp = 0;
- dac_delay = 0;
- if (have_sent_prefiller_silence != 0)
- resp = config.output->delay(
- &dac_delay); // we know the output device must have a delay function
- if (resp == 0) {
- int64_t gross_frame_gap =
- ((conn->first_packet_time_to_play - local_time_now) *
- config.output_rate) /
- 1000000000;
- int64_t exact_frame_gap = gross_frame_gap - dac_delay;
- int64_t frames_needed_to_maintain_desired_buffer =
- (int64_t)(config.audio_backend_buffer_desired_length *
- config.output_rate) -
- dac_delay;
- // below, remember that exact_frame_gap and
- // frames_needed_to_maintain_desired_buffer could both be negative
- int64_t fs = frames_needed_to_maintain_desired_buffer;
-
- // if there isn't enough time to have the desired buffer size
- if (exact_frame_gap <= frames_needed_to_maintain_desired_buffer) {
- fs = conn->max_frames_per_packet * 2;
- }
- // if we are very close to the end of buffering, i.e. within two
- // frame-lengths, add the remaining silence needed and end buffering
- if (exact_frame_gap <= conn->max_frames_per_packet * 2) {
- fs = exact_frame_gap;
- if (fs > first_frame_early_bias)
- fs = fs - first_frame_early_bias; // deliberately make the first packet a
- // tiny bit early so that the player may
- // compensate for it at the last minute
- conn->ab_buffering = 0;
- }
- void *silence;
- if (fs > 0) {
- silence = malloc(conn->output_bytes_per_frame * fs);
- if (silence == NULL)
- debug(1, "Failed to allocate %d byte silence buffer.", fs);
- else {
- // generate frames of silence with dither if necessary
- conn->previous_random_number = generate_zero_frames(
- silence, fs, config.output_format, conn->enable_dither,
- conn->previous_random_number);
- config.output->play(silence, fs, play_samples_are_untimed, 0, 0);
- debug(3, "Sent %" PRId64 " frames of silence", fs);
- free(silence);
- have_sent_prefiller_silence = 1;
+ int64_t lead_time = conn->first_packet_time_to_play -
+ get_absolute_time_in_ns(); // negative means late
+ if (lead_time < 0) {
+ debug(2, "Gone past starting time for %u by %" PRId64 " nanoseconds.",
+ conn->first_packet_timestamp, -lead_time);
+ conn->ab_buffering = 0;
+ } else {
+ // do some calculations
+ if ((config.audio_backend_silent_lead_in_time_auto == 1) ||
+ (lead_time <= (int64_t)(config.audio_backend_silent_lead_in_time *
+ (int64_t)1000000000))) {
+ debug(3, "Lead time: %" PRId64 " nanoseconds.", lead_time);
+ int resp = 0;
+ dac_delay = 0;
+ if (output_device_has_been_primed != 0)
+ resp = config.output->delay(
+ &dac_delay); // we know the output device must have a delay function
+ if (resp == 0) {
+ int64_t gross_frame_gap =
+ ((conn->first_packet_time_to_play - get_absolute_time_in_ns()) *
+ RATE_FROM_ENCODED_FORMAT(config.current_output_configuration)) /
+ 1000000000;
+ int64_t exact_frame_gap = gross_frame_gap - dac_delay;
+ debug(3,
+ "Exact frame gap: %" PRId64
+ ". DAC delay: %ld. First packet timestamp: %u.",
+ exact_frame_gap, dac_delay, conn->first_packet_timestamp);
+ // int64_t frames_needed_to_maintain_desired_buffer =
+ // (int64_t)(config.audio_backend_buffer_desired_length *
+ // config.current_output_configuration->rate) -
+ // dac_delay;
+ // below, remember that exact_frame_gap and
+ // frames_needed_to_maintain_desired_buffer could both be negative
+ int64_t fs =
+ (RATE_FROM_ENCODED_FORMAT(config.current_output_configuration) * 100) /
+ 1000; // 100 milliseconds
+ // if there isn't enough time to have the desired buffer size
+ // if (exact_frame_gap <= fs) {
+ // fs = conn->frames_per_packet * 2;
+ // }
+ // if we are close to the end of buffering,
+ // just add the remaining silence needed and end buffering
+ if (exact_frame_gap < fs) {
+ debug(3, "exact frame below fs of %" PRId64 " frames.", fs);
+ fs = exact_frame_gap;
+ conn->ab_buffering = 0;
}
- }
- } else {
-
- if (resp == sps_extra_code_output_stalled) {
- if (config.unfixable_error_reported == 0) {
- config.unfixable_error_reported = 1;
- if (config.cmd_unfixable) {
- command_execute(config.cmd_unfixable, "output_device_stalled", 1);
- } else {
- die("an unrecoverable error, \"output_device_stalled\", has been "
- "detected.");
+ void *silence;
+ if (fs > 0) {
+ silence = malloc(
+ sps_format_sample_size(
+ FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration)) *
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration) *
+ fs);
+ if (silence == NULL)
+ debug(1, "Failed to allocate %" PRId64 " byte silence buffer.", fs);
+ else {
+ // generate frames of silence with dither if necessary
+ pthread_cleanup_push(malloc_cleanup, &silence);
+
+ conn->previous_random_number = generate_zero_frames(
+ silence, fs, conn->enable_dither, conn->previous_random_number,
+ config.current_output_configuration);
+
+ debug(3, "Send %" PRId64 " frames of silence.", fs);
+ config.output->play(silence, fs, play_samples_are_untimed, 0, 0);
+ debug(3, "Sent %" PRId64 " frames of silence.", fs);
+ pthread_cleanup_pop(1); // deallocate silence
+ output_device_has_been_primed = 1;
}
}
} else {
- debug(3, "Unexpected response to getting dac delay: %d.", resp);
+ if ((resp == -EAGAIN) || (resp == -EIO) || (resp == -ENODEV)) {
+ debug(2, "delay() information not (yet, hopefully!) available.");
+ } else {
+ debug(1, "delay() error %d: \"%s\".", -resp, strerror(-resp));
+ }
+ if (resp == sps_extra_code_output_stalled) {
+ if (config.unfixable_error_reported == 0) {
+ config.unfixable_error_reported = 1;
+ if (config.cmd_unfixable) {
+ command_execute(config.cmd_unfixable, "output_device_stalled", 1);
+ } else {
+ die("an unrecoverable error, \"output_device_stalled\", has been "
+ "detected.");
+ }
+ }
+ } else {
+ debug(3, "Unexpected response to getting dac delay: %d.", resp);
+ }
}
}
}
- }
- } else {
- // if the output device doesn't have a delay, we simply send the lead-in
- int64_t lead_time =
- conn->first_packet_time_to_play - local_time_now; // negative if we are late
- void *silence;
- int64_t frame_gap = (lead_time * config.output_rate) / 1000000000;
- // debug(1,"%d frames needed.",frame_gap);
- while (frame_gap > 0) {
- ssize_t fs = config.output_rate / 10;
- if (fs > frame_gap)
- fs = frame_gap;
-
- silence = malloc(conn->output_bytes_per_frame * fs);
- if (silence == NULL)
- debug(1, "Failed to allocate %d frame silence buffer.", fs);
- else {
- // debug(1, "No delay function -- outputting %d frames of silence.", fs);
- conn->previous_random_number =
- generate_zero_frames(silence, fs, config.output_format, conn->enable_dither,
- conn->previous_random_number);
- config.output->play(silence, fs, play_samples_are_untimed, 0, 0);
- free(silence);
+ } else {
+ // if the output device doesn't have a delay, we simply send the lead-in
+ int64_t lead_time = conn->first_packet_time_to_play -
+ get_absolute_time_in_ns(); // negative if we are late
+ void *silence;
+ int64_t frame_gap =
+ (lead_time * RATE_FROM_ENCODED_FORMAT(config.current_output_configuration)) /
+ 1000000000;
+ // debug(1,"%d frames needed.",frame_gap);
+ while (frame_gap > 0) {
+ int64_t fs = RATE_FROM_ENCODED_FORMAT(config.current_output_configuration) / 10;
+
+ if (fs > frame_gap)
+ fs = frame_gap;
+
+ silence = malloc(
+ sps_format_sample_size(
+ FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration)) *
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration) * fs);
+ if (silence == NULL)
+ debug(1, "Failed to allocate %" PRId64 " frame silence buffer.", fs);
+ else {
+ // debug(1, "No delay function -- outputting %d frames of silence.", fs);
+ pthread_cleanup_push(malloc_cleanup, &silence);
+ conn->previous_random_number = generate_zero_frames(
+ silence, fs, conn->enable_dither, conn->previous_random_number,
+ config.current_output_configuration);
+ config.output->play(silence, fs, play_samples_are_untimed, 0, 0);
+ pthread_cleanup_pop(1); // deallocate silence
+ }
+ frame_gap -= fs;
}
- frame_gap -= fs;
+ conn->ab_buffering = 0;
}
- conn->ab_buffering = 0;
}
- }
#ifdef CONFIG_METADATA
- if (conn->ab_buffering == 0) {
- send_ssnc_metadata('prsm', NULL, 0,
- 0); // "resume", but don't wait if the queue is locked
- }
+ if (conn->ab_buffering == 0) {
+ if ((curframe) && (curframe->ready) && (curframe->timestamp))
+ debug(3, "Current frame timestamp at \"resume\" is %u.", curframe->timestamp);
+ else
+ debug(1, "Current frame at \"resume\" is not known.");
+
+ send_ssnc_metadata('prsm', NULL, 0,
+ 0); // "resume", but don't wait if the queue is locked
+ }
#endif
+ }
}
}
+ } else {
+ // if (conn->ab_synced)
+ // debug(1, "no buffers available at seqno %u.", conn->ab_read);
}
// Here, we work out whether to release a packet or wait
// Note: the last three items are expressed in frames and must be converted to time.
int do_wait = 0; // don't wait unless we can really prove we must
- if ((conn->ab_synced) && (curframe) && (curframe->ready) && (curframe->given_timestamp)) {
+ if ((conn->ab_synced) && (curframe) && (curframe->ready) && (curframe->timestamp)) {
do_wait = 1; // if the current frame exists and is ready, then wait unless it's time to let
// it go...
// we must enable packets to be released early enough for the
// audio buffer to be filled to the desired length
- uint32_t buffer_latency_offset =
+ uint32_t desired_buffer_latency =
(uint32_t)(config.audio_backend_buffer_desired_length * conn->input_rate);
- frame_to_local_time(curframe->given_timestamp -
- buffer_latency_offset, // this will go modulo 2^32
- &time_to_play, conn);
-
- if (local_time_now >= time_to_play) {
+ frame_to_local_time(curframe->timestamp - desired_buffer_latency, &time_to_play, conn);
+ uint64_t current_buffer_delay = 0;
+ int resp = -1;
+ if (config.output->delay) {
+ long l_delay;
+ resp = config.output->delay(&l_delay);
+ if (resp == 0) { // no error
+ if (l_delay >= 0)
+ current_buffer_delay = l_delay;
+ else {
+ debug(2, "Underrun of %ld frames reported, but ignored.", l_delay);
+ current_buffer_delay =
+ 0; // could get a negative value if there was underrun, but ignore it.
+ }
+ }
+ }
+ // If it's the first packet, or we don't have a working delay() function in the backend,
+ // then wait until it's time to play it
+ if ((((conn->first_packet_timestamp == curframe->timestamp) || (resp != 0)) &&
+ (get_absolute_time_in_ns() >= time_to_play)) ||
+ // Otherwise, if it isn't the first packet and we have a valid delay from the backend,
+ // ensure the buffer stays nearly full
+ ((conn->first_packet_timestamp != curframe->timestamp) && (resp == 0) &&
+ (current_buffer_delay < desired_buffer_latency))) {
do_wait = 0;
}
// here, do a sanity check. if the time_to_play is not within a few seconds of the
// The sign indicates the direction: positive means clockwise (upwards) from the
// second number to the first (i.e. the first number comes "after" the second).
- int64_t time_difference = local_time_now - time_to_play;
+ int64_t time_difference = get_absolute_time_in_ns() - time_to_play;
if ((time_difference > 10000000000) || (time_difference < -10000000000)) {
debug(2,
"crazy time interval of %f seconds between time now: 0x%" PRIx64
" and time of packet: %" PRIx64 ".",
- 0.000000001 * time_difference, local_time_now, time_to_play);
- debug(2, "packet rtptime: %u, reference_timestamp: %u", curframe->given_timestamp,
+ 0.000000001 * time_difference, get_absolute_time_in_ns(), time_to_play);
+ debug(2, "packet rtptime: %u, reference_timestamp: %u", curframe->timestamp,
conn->anchor_rtptime);
do_wait = 0; // let it go
}
}
if (do_wait == 0)
+ // wait if the buffer is empty
if ((conn->ab_synced != 0) && (conn->ab_read == conn->ab_write)) { // the buffer is empty!
if (notified_buffer_empty == 0) {
debug(2, "Connection %d: Buffer Empty", conn->connection_number);
notified_buffer_empty = 1;
// reset_input_flow_metrics(conn); // don't do a full flush parameters reset
- conn->initial_reference_time = 0;
- conn->initial_reference_timestamp = 0;
- conn->first_packet_timestamp = 0; // make sure the first packet isn't late
+ // conn->initial_reference_time = 0;
+ // conn->initial_reference_timestamp = 0;
+ // conn->first_packet_timestamp = 0; // make sure the first packet isn't late
}
do_wait = 1;
}
wait = 1; // keep waiting until the timing information becomes available
}
if (wait) {
- if (conn->input_rate == 0)
- die("input_rate is zero -- should never happen!");
- uint64_t time_to_wait_for_wakeup_ns =
- 1000000000 / conn->input_rate; // this is time period of one frame
- time_to_wait_for_wakeup_ns *= 12 * 352; // two full 352-frame packets
- time_to_wait_for_wakeup_ns /= 3; // two thirds of a packet time
+ if (conn->frames_per_packet == 0)
+ debug(1, "frames_per_packet is zero!");
+
+ uint64_t time_to_wait_for_wakeup_ns = 20000000; // default if no input rate is set
+ if (conn->input_rate != 0) {
+ time_to_wait_for_wakeup_ns =
+ 1000000000 / conn->input_rate; // this is time period of one frame
+ time_to_wait_for_wakeup_ns *= 4 * conn->frames_per_packet;
+ }
#ifdef COMPILE_FOR_LINUX_AND_FREEBSD_AND_CYGWIN_AND_OPENBSD
uint64_t time_of_wakeup_ns = get_realtime_in_ns() + time_to_wait_for_wakeup_ns;
int rc = pthread_cond_timedwait(&conn->flowcontrol, &conn->ab_mutex,
&time_of_wakeup); // this is a pthread cancellation point
if ((rc != 0) && (rc != ETIMEDOUT))
+ // if (rc)
debug(3, "pthread_cond_timedwait returned error code %d.", rc);
#endif
#ifdef COMPILE_FOR_OSX
if (!curframe->ready) {
// debug(1, "Supplying a silent frame for frame %u", read);
conn->missing_packets++;
- curframe->given_timestamp = 0; // indicate a silent frame should be substituted
+ curframe->timestamp = 0; // indicate a silent frame should be substituted
}
curframe->ready = 0;
}
conn->ab_read++;
- pthread_cleanup_pop(1);
+
+ pthread_cleanup_pop(1); // unlock the ab_mutex
+ pthread_cleanup_pop(1); // buffer_get_frame_cleanup_handler
+ // debug(1, "Release frame %u.", curframe->timestamp);
+#ifdef CONFIG_FFMPEG
+
+#ifndef CONFIG_AIRPLAY_2
+ if (config.decoder_in_use == 1 << decoder_ffmpeg_alac) {
+#endif
+ // clang-format off
+ // If we're using the Hammerton or ALAC decoder, then curframe->data will
+ // point to a malloced buffer of the stereo interleaved LPCM/44100/S16/2 audio
+ // But here, we must be using the FFMPEG decoder.
+ // With the FFmpeg decoder we have an AVFrame in curframe->avframe.
+ // The format could be anything -- it'll be transcoded here and placed in
+ // malloc memory pointed to by curframe->data and the AVFrame will be freed.
+ // If the avframe is NULL, then the length will be the number of frames of silence
+ // to be inserted into the audio stream to replace an AVFrame of the same length
+ // that is to be muted. Phew.
+ // clang-format on
+
+ if (curframe) {
+ if (conn->resampler_ssrc != curframe->ssrc) {
+ if (conn->resampler_ssrc == SSRC_NONE) {
+ debug(2, "setting up software resampler for %s for the first time.",
+ get_ssrc_name(curframe->ssrc));
+ } else {
+ debug(1, "Connection %d: queued audio buffers switching to \"%s\".", conn->connection_number,
+ get_ssrc_name(curframe->ssrc));
+ clear_software_resampler(conn);
+ // ask the backend if it can give us its best choice for an ffmpeg configuration:
+ }
+ debug(3, "setup software resampler for %s", get_ssrc_name(curframe->ssrc));
+ if (curframe->ssrc != SSRC_NONE) {
+ setup_software_resampler(conn, curframe->ssrc);
+ } else {
+ debug(1, "attempt to setup_software_resampler for SSRC_NONE");
+ }
+ }
+ size_t number_of_output_frames;
+ uint8_t *pp;
+ size_t pl;
+ if (curframe->avframe) {
+ conn->frames_retained_in_the_resampler =
+ avframe_to_audio(conn, curframe->avframe, &pp, &pl, &number_of_output_frames);
+ curframe->data = (short *)pp;
+ curframe->length = number_of_output_frames;
+ av_frame_free(&curframe->avframe);
+ curframe->avframe = NULL;
+ } else if (curframe->length != 0) {
+ // if there's no data and no avframe, then the length is
+ // the number of frames of silence requested.
+ int ret = swr_inject_silence(conn->swr, curframe->length); // hardwired, ugh!
+ if (ret)
+ debug(1, "error %d", ret);
+ // We need to get those frames of silence out of the resampler
+ // so we'll pass in an empty AVFrame to flush them through
+ AVFrame *avf = av_frame_alloc(); // empty frame
+ conn->frames_retained_in_the_resampler =
+ avframe_to_audio(conn, avf, &pp, &pl, &number_of_output_frames);
+ curframe->data = (short *)pp;
+ curframe->length = number_of_output_frames;
+ av_frame_free(&avf);
+ }
+ }
+#ifndef CONFIG_AIRPLAY_2
+ }
+#endif
+
+#endif
+
+#ifdef CONFIG_METADATA
+ if ((curframe != NULL) && (conn->first_packet_timestamp) &&
+ (conn->first_packet_timestamp == curframe->timestamp)) {
+ char buffer[32];
+ memset(buffer, 0, sizeof(buffer));
+ // if this is not a resumption after a discontinuity,
+ // say we have started receiving frames here
+ if (curframe->timestamp_gap == 0) {
+ snprintf(buffer, sizeof(buffer), "%" PRIu32 "/%" PRIu64 "", curframe->timestamp,
+ conn->first_packet_time_to_play);
+ send_ssnc_metadata('pffr', buffer, strlen(buffer),
+ 0); // "first frame received", but don't wait if the queue is locked
+ debug(3, "pffr: \"%s\"", buffer);
+ } else {
+ // otherwise, say a discontinuity occurred
+ snprintf(buffer, sizeof(buffer), "%" PRIu32 "/%" PRId32 "", curframe->timestamp,
+ curframe->timestamp_gap);
+ send_ssnc_metadata(
+ 'pdis', buffer, strlen(buffer),
+ 0); // "a discontinuity of this many frames", but don't wait if the queue is locked
+ debug(3, "pdis: \"%s\"", buffer);
+ }
+ }
+#endif
+
+ if (curframe) {
+ // check sequencing
+ if (conn->last_seqno_valid == 0) {
+ conn->last_seqno_valid = 1;
+ conn->last_seqno_read = curframe->sequence_number;
+ } else {
+ conn->last_seqno_read++;
+ if (curframe->sequence_number != conn->last_seqno_read) {
+ debug(1,
+ "Player: packets out of sequence: expected: %u, got: %u, with ab_read: %u "
+ "and ab_write: %u.",
+ conn->last_seqno_read, curframe->sequence_number, conn->ab_read, conn->ab_write);
+ conn->last_seqno_read = curframe->sequence_number; // reset warning...
+ }
+ }
+ }
+
return curframe;
}
return r;
}
-// this takes an array of signed 32-bit integers and (a) removes or inserts a frame as specified in
-// stuff,
+// this takes an array of channels of signed 32-bit integers and
+// (a) removes or inserts a frame as specified in "stuff",
// (b) multiplies each sample by the fixedvolume (a 16-bit quantity)
// (c) dithers the result to the output size 32/24/16/8 bits
// (d) outputs the result in the approprate format
-// formats accepted so far include U8, S8, S16, S24, S24_3LE, S24_3BE and S32
+// formats accepted include U8, S8, S16, S24, S24_3LE, S24_3BE and S32
+// can only accept a plus or minus 1
// stuff: 1 means add 1; 0 means do nothing; -1 means remove 1
static int stuff_buffer_basic_32(int32_t *inptr, int length, sps_format_t l_output_format,
char *outptr, int stuff, int dither, rtsp_conn_info *conn) {
- if (length < 3)
- die("buffer length expected to be 3 or more, but is %d!", length);
- int tstuff = stuff;
- char *l_outptr = outptr;
- if ((stuff > 1) || (stuff < -1) || (length < 100)) {
- // debug(1, "Stuff argument to stuff_buffer must be from -1 to +1 and length >100.");
- tstuff = 0; // if any of these conditions hold, don't stuff anything/
+ int tstuff = 0;
+ if (length >= 3) {
+ tstuff = stuff;
+ if (tstuff)
+ debug(3, "stuff_buffer_basic_32 %+d.", tstuff);
+ char *l_outptr = outptr;
+ if (stuff > 1)
+ stuff = 1;
+ if (stuff < -1)
+ stuff = -1;
+ if ((stuff > 1) || (stuff < -1) || (length < 100)) {
+ // debug(1, "Stuff argument to stuff_buffer must be from -1 to +1 and length >100.");
+ tstuff = 0; // if any of these conditions hold, don't stuff anything/
+ }
+
+ int i;
+ int stuffsamp = length;
+ if (tstuff)
+ // stuffsamp = rand() % (length - 1);
+ stuffsamp =
+ (rand() % (length - 2)) + 1; // ensure there's always a sample before and after the item
+
+ for (i = 0; i < stuffsamp; i++) { // the whole frame, if no stuffing
+ unsigned int channel;
+ for (channel = 0; channel < conn->input_num_channels; channel++)
+ process_sample(*inptr++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
+ };
+ if (tstuff) {
+ if (tstuff == 1) {
+ // debug(3, "+++++++++");
+ // interpolate one sample
+ unsigned int channel;
+ for (channel = 0; channel < conn->input_num_channels; channel++)
+ process_sample(mean_32(inptr[-2], inptr[0]), &l_outptr, l_output_format, conn->fix_volume,
+ dither, conn);
+ } else if (stuff == -1) {
+ // debug(3, "---------");
+ unsigned int channel;
+ for (channel = 0; channel < conn->input_num_channels; channel++)
+ inptr++;
+ }
+
+ // if you're removing, i.e. stuff < 0, copy that much less over. If you're adding, do all the
+ // rest.
+ int remainder = length;
+ if (tstuff < 0)
+ remainder = remainder + tstuff; // don't run over the correct end of the output buffer
+
+ for (i = stuffsamp; i < remainder; i++) {
+ unsigned int channel;
+ for (channel = 0; channel < conn->input_num_channels; channel++)
+ process_sample(*inptr++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
+ }
+ }
}
+ return length + tstuff;
+}
- int i;
- int stuffsamp = length;
- if (tstuff)
- // stuffsamp = rand() % (length - 1);
- stuffsamp =
- (rand() % (length - 2)) + 1; // ensure there's always a sample before and after the item
-
- for (i = 0; i < stuffsamp; i++) { // the whole frame, if no stuffing
- process_sample(*inptr++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
- process_sample(*inptr++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
- };
- if (tstuff) {
- if (tstuff == 1) {
- // debug(3, "+++++++++");
- // interpolate one sample
- process_sample(mean_32(inptr[-2], inptr[0]), &l_outptr, l_output_format, conn->fix_volume,
- dither, conn);
- process_sample(mean_32(inptr[-1], inptr[1]), &l_outptr, l_output_format, conn->fix_volume,
- dither, conn);
- } else if (stuff == -1) {
- // debug(3, "---------");
- inptr++;
- inptr++;
- }
-
- // if you're removing, i.e. stuff < 0, copy that much less over. If you're adding, do all the
- // rest.
- int remainder = length;
- if (tstuff < 0)
- remainder = remainder + tstuff; // don't run over the correct end of the output buffer
-
- for (i = stuffsamp; i < remainder; i++) {
- process_sample(*inptr++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
- process_sample(*inptr++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
+// this takes an array of channels of n signed 32-bit integers and
+// (a) replaces all of them with channels of n+stuff (+/-1) signed 32-bit integers,
+// by first order interpolation.
+// (b) multiplies each sample by the fixedvolume (a 16-bit quantity)
+// (c) dithers the result to the output size 32/24/16/8 bits
+// (d) outputs the result in the approprate format
+// formats accepted include U8, S8, S16, S24, S24_3LE, S24_3BE and S32
+
+// stuff: 1 means add 1; 0 means do nothing; -1 means remove 1
+
+static int stuff_buffer_vernier(int32_t *inptr, int length, sps_format_t l_output_format,
+ char *outptr, int stuff, int dither, rtsp_conn_info *conn) {
+ int tstuff = 0;
+ if (length >= 3) {
+ tstuff = stuff;
+ if ((stuff > INTERPOLATION_LIMIT) || (stuff < -INTERPOLATION_LIMIT) || (length < 100)) {
+ debug(2,
+ "Stuff argument %d to stuff_buffer_vernier of length %d must be from -%d to +%d and "
+ "length > 100.",
+ stuff, length, INTERPOLATION_LIMIT, INTERPOLATION_LIMIT);
+ tstuff = 0; // if any of these conditions hold, don't stuff anything/
+ }
+
+ char *l_outptr = outptr;
+ int i;
+
+ if (tstuff == 0) {
+ for (i = 0; i < length; i++) { // the whole frame, if no stuffing
+ unsigned int channel;
+ for (channel = 0; channel < conn->input_num_channels; channel++)
+ process_sample(*inptr++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
+ }
+ } else {
+ // we are using 64 bit integers to represent fixed point numbers
+ // the high 32 bits are the integer value and the low 32 bits are the fraction.
+ int64_t one_fp = 0x100000000L;
+
+ // this result will always be less than or equal to the exact true value.
+ int64_t step_size_fp = one_fp * (length - 1);
+ step_size_fp = step_size_fp / (length + tstuff - 1);
+
+ // the interpolation is done between the previous sample, starting
+ // with the zeroth sample, and the next one.
+ // the very first and very last sample of the stuffed frame should
+ // correspond 100% to the first and last samples of the original frame.
+
+ // the first sample will be calculated as 100% of the first sample and 0% of the next sample
+ // however, the last sample can not be calculated as 100% of the last sample and
+ // 0% of the next one, because there isn't a "next" sample after the last one, duh.
+
+ // however, rather than add extra code to deal with the last sample,
+ // simply use a copy of the last sample as the "next" one, and the maths will work out.
+
+ int64_t current_input_sample_index_fp = 0;
+ for (i = 0; i < length + tstuff; i++) {
+ int64_t current_input_sample_floor_index =
+ current_input_sample_index_fp >> 32; // this is the (integer) index of the sample before
+ // where the new sample will be interpolated
+ if (current_input_sample_floor_index == length) {
+ // generate the whole and fractional parts of current_input_sample_index_fp for printing
+ // without converting to floating point, which may do rounding.
+ int64_t current_input_sample_index_int = current_input_sample_index_fp >> 32;
+ int64_t current_input_sample_index_low = current_input_sample_index_fp & 0xFFFFFFFF;
+ current_input_sample_index_low =
+ current_input_sample_index_low * 100000; // 100000 for 5 decimal places
+ current_input_sample_index_low = current_input_sample_index_low >> 32;
+ debug(1,
+ "Can't see how this could ever happen, but "
+ "current_input_sample_floor_index %" PRId64
+ " has just stepped outside the frame of %d samples, with stuff %d and current_input_sample_index_fp at %" PRId64 ".%05" PRId64
+ ".",
+ current_input_sample_floor_index, length, stuff, current_input_sample_index_int,
+ current_input_sample_index_low);
+ current_input_sample_floor_index = length - 1; // hack
+ }
+
+ // increment the ceiling index, but ensure it stays within the frame
+ int64_t current_input_sample_ceil_index = current_input_sample_floor_index + 1;
+ if (current_input_sample_ceil_index == length) {
+ if (current_input_sample_floor_index == length - 1) {
+ current_input_sample_ceil_index = length - 1;
+ } else {
+ // generate the whole and fractional parts of current_input_sample_index_fp for printing
+ // without converting to floating point, which may do rounding.
+ int64_t current_input_sample_index_int = current_input_sample_index_fp >> 32;
+ int64_t current_input_sample_index_low = current_input_sample_index_fp & 0xFFFFFFFF;
+ current_input_sample_index_low =
+ current_input_sample_index_low * 100000; // 100000 for 5 decimal places
+ current_input_sample_index_low = current_input_sample_index_low >> 32;
+ debug(1,
+ "Can't see how this could ever happen, but "
+ "current_input_sample_ceil_index %" PRId64
+ " has just stepped outside the frame of %d samples, with stuff %d and current_input_sample_index_fp at %" PRId64
+ ".%05" PRId64 ".",
+ current_input_sample_floor_index, length, stuff, current_input_sample_index_int,
+ current_input_sample_index_low);
+ }
+ }
+
+ /*
+ {
+ // generate the whole and fractional parts of current_input_sample_index_fp for printing
+ // without converting to floating point, which may do rounding.
+ int64_t current_input_sample_index_int = current_input_sample_index_fp >> 32;
+ int64_t current_input_sample_index_low = current_input_sample_index_fp & 0xFFFFFFFF;
+ current_input_sample_index_low =
+ current_input_sample_index_low * 100000; // 100000 for 5 decimal places
+ current_input_sample_index_low = current_input_sample_index_low >> 32;
+ debug(1,
+ "samples: %u, stuff: %d, output_sample: %d, current_input_sample_index_fp: %" PRId64
+ ".%05" PRId64 ", floor: %" PRId64 ", ceil: %" PRId64 ".",
+ length, stuff, i, current_input_sample_index_int, current_input_sample_index_low,
+ current_input_sample_floor_index, current_input_sample_ceil_index);
+ }
+ */
+ unsigned int channel;
+ for (channel = 0; channel < conn->input_num_channels; channel++) {
+ int32_t current_sample =
+ inptr[current_input_sample_floor_index * conn->input_num_channels + channel];
+ int32_t next_sample =
+ inptr[current_input_sample_ceil_index * conn->input_num_channels + channel];
+ int64_t current_sample_fp = current_sample;
+ // current_sample_fp = current_sample_fp << 32;
+ int64_t next_sample_fp = next_sample;
+ // next_sample_fp = next_sample_fp << 32;
+ int64_t offset_from_floor_fp = current_input_sample_index_fp & 0xffffffff;
+ int64_t offset_to_ceil_fp = one_fp - offset_from_floor_fp;
+ int64_t interpolated_sample_value_fp =
+ current_sample_fp * offset_to_ceil_fp + next_sample_fp * offset_from_floor_fp;
+ interpolated_sample_value_fp =
+ interpolated_sample_value_fp / one_fp; // back to a 32-bit samplle
+ int32_t interpolated_sample_value = interpolated_sample_value_fp;
+ process_sample(interpolated_sample_value, &l_outptr, l_output_format, conn->fix_volume,
+ dither, conn);
+ }
+ current_input_sample_index_fp = current_input_sample_index_fp + step_size_fp;
+ }
}
}
- conn->amountStuffed = tstuff;
return length + tstuff;
}
// (b) multiplies each sample by the fixedvolume (a 16-bit quantity)
// (c) dithers the result to the output size 32/24/16/8 bits
// (d) outputs the result in the approprate format
-// formats accepted so far include U8, S8, S16, S24, S24_3LE, S24_3BE and S32
+// formats accepted include U8, S8, S16, S24, S24_3LE, S24_3BE and S32
int32_t stat_n = 0;
double stat_mean = 0.0;
double longest_soxr_execution_time = 0.0;
int64_t packets_processed = 0;
-int stuff_buffer_soxr_32(int32_t *inptr, int32_t *scratchBuffer, int length,
- sps_format_t l_output_format, char *outptr, int stuff, int dither,
- rtsp_conn_info *conn) {
- if (scratchBuffer == NULL) {
- die("soxr scratchBuffer not initialised.");
- }
+int stuff_buffer_soxr_32(int32_t *inptr, int length, sps_format_t l_output_format, char *outptr,
+ int stuff, int dither, rtsp_conn_info *conn) {
+ // if (scratchBuffer == NULL) {
+ // die("soxr scratchBuffer not initialised.");
+ //}
packets_processed++;
int tstuff = stuff;
- if ((stuff > 1) || (stuff < -1) || (length < 100)) {
- // debug(1, "Stuff argument to stuff_buffer must be from -1 to +1 and length >100.");
+ if ((stuff > INTERPOLATION_LIMIT) || (stuff < -INTERPOLATION_LIMIT) || (length < 100)) {
+ debug(2,
+ "Stuff argument %d to stuff_buffer_soxr_32 of length %d must be from -%d to +%d and "
+ "length > 100.",
+ stuff, length, INTERPOLATION_LIMIT, INTERPOLATION_LIMIT);
tstuff = 0; // if any of these conditions hold, don't stuff anything/
}
if (tstuff) {
- // debug(1,"Stuff %d.",stuff);
-
- soxr_io_spec_t io_spec;
- io_spec.itype = SOXR_INT32_I;
- io_spec.otype = SOXR_INT32_I;
- io_spec.scale = 1.0; // this seems to crash if not = 1.0
- io_spec.e = NULL;
- io_spec.flags = 0;
-
- size_t odone;
-
- uint64_t soxr_start_time = get_absolute_time_in_ns();
-
- soxr_error_t error = soxr_oneshot(length, length + tstuff, 2, // Rates and # of chans.
- inptr, length, NULL, // Input.
- scratchBuffer, length + tstuff, &odone, // Output.
- &io_spec, // Input, output and transfer spec.
- NULL, NULL); // Default configuration.
-
- if (error)
- die("soxr error: %s\n", "error: %s\n", soxr_strerror(error));
-
- if (odone > (size_t)(length + 1))
- die("odone = %u!\n", odone);
-
- // mean and variance calculations from "online_variance" algorithm at
- // https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Online_algorithm
-
- double soxr_execution_time = (get_absolute_time_in_ns() - soxr_start_time) * 0.000000001;
- // debug(1,"soxr_execution_time_us: %10.1f",soxr_execution_time_us);
- if (soxr_execution_time > longest_soxr_execution_time)
- longest_soxr_execution_time = soxr_execution_time;
- stat_n += 1;
- double stat_delta = soxr_execution_time - stat_mean;
- if (stat_n != 0)
- stat_mean += stat_delta / stat_n;
- else
- warn("calculation error for stat_n");
- stat_M2 += stat_delta * (soxr_execution_time - stat_mean);
+ // debug(1, "stuff_buffer_soxr_32 %+d.",stuff);
+ int32_t *scratchBuffer =
+ malloc(sizeof(int32_t) * CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration) *
+ (length + tstuff));
+ if (scratchBuffer != NULL) {
+ soxr_io_spec_t io_spec;
+ io_spec.itype = SOXR_INT32_I;
+ io_spec.otype = SOXR_INT32_I;
+ io_spec.scale = 1.0; // this seems to crash if not = 1.0
+ io_spec.e = NULL;
+ io_spec.flags = 0;
+
+ size_t odone;
+
+ uint64_t soxr_start_time = get_absolute_time_in_ns();
+
+ soxr_error_t error =
+ soxr_oneshot(length, length + tstuff, conn->input_num_channels, // Rates and # of chans.
+ inptr, length, NULL, // Input.
+ scratchBuffer, length + tstuff, &odone, // Output.
+ &io_spec, // Input, output and transfer spec.
+ NULL, NULL); // Default configuration.
+
+ if (error)
+ die("soxr error: %s\n", soxr_strerror(error));
+
+ if (odone > (size_t)(length + INTERPOLATION_LIMIT))
+ die("odone = %zu!\n", odone);
+
+ // mean and variance calculations from "online_variance" algorithm at
+ // https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Online_algorithm
+
+ double soxr_execution_time = (get_absolute_time_in_ns() - soxr_start_time) * 0.000000001;
+ // debug(1,"soxr_execution_time_us: %10.1f",soxr_execution_time_us);
+ if (soxr_execution_time > longest_soxr_execution_time)
+ longest_soxr_execution_time = soxr_execution_time;
+ stat_n += 1;
+ double stat_delta = soxr_execution_time - stat_mean;
+ if (stat_n != 0)
+ stat_mean += stat_delta / stat_n;
+ else
+ warn("calculation error for stat_n");
+ stat_M2 += stat_delta * (soxr_execution_time - stat_mean);
- int i;
- int32_t *ip, *op;
- ip = inptr;
- op = scratchBuffer;
+ int i;
+ int32_t *ip, *op;
+ ip = inptr;
+ op = scratchBuffer;
+
+ const int gpm = 5;
+ // keep the first (dpm) samples, to mitigate the Gibbs phenomenon
+ for (i = 0; i < gpm; i++) {
+ unsigned int channel;
+ for (channel = 0; channel < conn->input_num_channels; channel++)
+ *op++ = *ip++;
+ }
- const int gpm = 5;
- // keep the first (dpm) samples, to mitigate the Gibbs phenomenon
- for (i = 0; i < gpm; i++) {
- *op++ = *ip++;
- *op++ = *ip++;
- }
+ // keep the last (dpm) samples, to mitigate the Gibbs phenomenon
- // keep the last (dpm) samples, to mitigate the Gibbs phenomenon
+ // pointer arithmetic, baby -- it's da bomb.
+ op = scratchBuffer + (length + tstuff - gpm) * conn->input_num_channels;
+ ip = inptr + (length - gpm) * conn->input_num_channels;
+ for (i = 0; i < gpm; i++) {
+ unsigned int channel;
+ for (channel = 0; channel < conn->input_num_channels; channel++)
+ *op++ = *ip++;
+ }
- // pointer arithmetic, baby -- it's da bomb.
- op = scratchBuffer + (length + tstuff - gpm) * 2;
- ip = inptr + (length - gpm) * 2;
- for (i = 0; i < gpm; i++) {
- *op++ = *ip++;
- *op++ = *ip++;
+ // now, do the volume, dither and formatting processing
+ ip = scratchBuffer;
+ char *l_outptr = outptr;
+ for (i = 0; i < length + tstuff; i++) {
+ unsigned int channel;
+ for (channel = 0; channel < conn->input_num_channels; channel++)
+ process_sample(*ip++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
+ };
+ free(scratchBuffer);
+ } else {
+ debug(1, "Cannot allocate scratchbuffer");
}
-
- // now, do the volume, dither and formatting processing
- ip = scratchBuffer;
- char *l_outptr = outptr;
- for (i = 0; i < length + tstuff; i++) {
- process_sample(*ip++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
- process_sample(*ip++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
- };
-
} else { // the whole frame, if no stuffing
// now, do the volume, dither and formatting processing
int i;
for (i = 0; i < length; i++) {
- process_sample(*ip++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
- process_sample(*ip++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
+ unsigned int channel;
+ for (channel = 0; channel < conn->input_num_channels; channel++)
+ process_sample(*ip++, &l_outptr, l_output_format, conn->fix_volume, dither, conn);
};
}
longest_soxr_execution_time = 0.0;
}
- conn->amountStuffed = tstuff;
return length + tstuff;
}
#endif
-void player_thread_initial_cleanup_handler(__attribute__((unused)) void *arg) {
- rtsp_conn_info *conn = (rtsp_conn_info *)arg;
- debug(3, "Connection %d: player thread main loop exit via player_thread_initial_cleanup_handler.",
- conn->connection_number);
-}
-
char line_of_stats[1024];
int statistics_row; // statistics_line 0 means print the headings; anything else 1 means print the
// values. Set to 0 the first time out.
// be printed -- 2 means print, 1 means print only in a debug mode, 0 means skip
// clang-format off
-int ap1_synced_statistics_print_profile[] = {2, 2, 2, 0, 2, 1, 1, 2, 1, 1, 1, 0, 1, 1, 2, 2, 1, 1};
-int ap1_nosync_statistics_print_profile[] = {2, 0, 0, 0, 2, 1, 1, 2, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0};
-int ap1_nodelay_statistics_print_profile[] = {0, 0, 0, 0, 2, 1, 1, 2, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0};
+int ap1_synced_statistics_print_profile[] = {2, 1, 2, 2, 0, 2, 1, 1, 2, 1, 1, 1, 0, 1, 1, 2, 2};
+int ap1_nosync_statistics_print_profile[] = {2, 0, 0, 0, 0, 2, 1, 1, 2, 1, 1, 1, 0, 1, 1, 0, 0};
+int ap1_nodelay_statistics_print_profile[] = {0, 0, 0, 0, 0, 2, 1, 1, 2, 0, 1, 1, 0, 1, 1, 0, 0};
-int ap2_realtime_synced_stream_statistics_print_profile[] = {2, 2, 2, 0, 2, 1, 1, 2, 1, 1, 1, 0, 0, 1, 2, 2, 0, 0};
-int ap2_realtime_nosync_stream_statistics_print_profile[] = {2, 0, 0, 0, 2, 1, 1, 2, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0};
-int ap2_realtime_nodelay_stream_statistics_print_profile[] = {0, 0, 0, 0, 2, 1, 1, 2, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0};
+int ap2_realtime_synced_stream_statistics_print_profile[] = {2, 1, 2, 2, 0, 2, 1, 1, 2, 1, 1, 1, 0, 0, 1, 2, 2};
+int ap2_realtime_nosync_stream_statistics_print_profile[] = {2, 0, 0, 0, 0, 2, 1, 1, 2, 1, 1, 1, 0, 0, 1, 0, 0};
+int ap2_realtime_nodelay_stream_statistics_print_profile[] = {0, 0, 0, 0, 0, 2, 1, 1, 2, 0, 1, 1, 0, 0, 1, 0, 0};
-int ap2_buffered_synced_stream_statistics_print_profile[] = {2, 2, 2, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 2, 2, 0, 0};
-int ap2_buffered_nosync_stream_statistics_print_profile[] = {2, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0};
-int ap2_buffered_nodelay_stream_statistics_print_profile[] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0};
+int ap2_buffered_synced_stream_statistics_print_profile[] = {2, 2, 2, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 2, 2};
+int ap2_buffered_nosync_stream_statistics_print_profile[] = {2, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0};
+int ap2_buffered_nodelay_stream_statistics_print_profile[] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0};
// clang-format on
void statistics_item(const char *heading, const char *format, ...) {
- if (((statistics_print_profile[statistics_column] == 1) && (debuglev != 0)) ||
+ if (((statistics_print_profile[statistics_column] == 1) && (debug_level() != 0)) ||
(statistics_print_profile[statistics_column] == 2)) { // include this column?
if (was_a_previous_column != 0) {
if (statistics_row == 0)
double response = config.airplay_volume;
if ((conn != NULL) && (conn->own_airplay_volume_set != 0)) {
response = conn->own_airplay_volume;
- } else if (config.airplay_volume > config.high_threshold_airplay_volume) {
- int64_t volume_validity_time = config.limit_to_high_volume_threshold_time_in_minutes;
- // zero means never check the volume
- if (volume_validity_time != 0) {
- // If the volume is higher than the high volume threshold
- // and enough time has gone past, suggest the default volume.
- uint64_t time_now = get_absolute_time_in_ns();
- int64_t time_since_last_access_to_volume_info =
- time_now - config.last_access_to_volume_info_time;
-
- volume_validity_time = volume_validity_time * 60; // to seconds
- volume_validity_time = volume_validity_time * 1000000000; // to nanoseconds
-
- if ((config.airplay_volume > config.high_threshold_airplay_volume) &&
- ((config.last_access_to_volume_info_time == 0) ||
- (time_since_last_access_to_volume_info > volume_validity_time))) {
-
- debug(2,
- "the current volume %.6f is higher than the high volume threshold %.6f, so the "
- "default volume %.6f is suggested.",
- config.airplay_volume, config.high_threshold_airplay_volume,
- config.default_airplay_volume);
- response = config.default_airplay_volume;
- }
- }
}
return response;
}
+#ifdef CONFIG_METADATA
+void send_ssnc_stream_description(const char *type, const char *description) {
+ send_ssnc_metadata('styp', type, strlen(type), 1);
+ send_ssnc_metadata('sdsc', description, strlen(description), 1);
+}
+#endif
+
void player_thread_cleanup_handler(void *arg) {
rtsp_conn_info *conn = (rtsp_conn_info *)arg;
+ // debug(1, "Connection %d: player_thread_cleanup_handler start.", conn->connection_number);
+
+#ifdef CONFIG_FFMPEG
+ // debug(1, "FFmpeg clearup");
+ clear_software_resampler(conn);
+ clear_decoding_chain(conn);
+ // debug(1, "FFmpeg clearup done");
+#endif
if (config.output->stop) {
+#ifdef CONFIG_FFMPEG
+ if (avflush(conn) > 1)
+ debug(3, "ffmpeg flush at stop!");
+#endif
+ debug(2, "Connection %d: player: stop the output backend.", conn->connection_number);
config.output->stop();
}
debug(3, "Connection %d: player thread main loop exit via player_thread_cleanup_handler.",
conn->connection_number);
- if (config.statistics_requested) {
+ if ((conn->at_least_one_frame_seen_this_session != 0) && (config.statistics_requested)) {
int64_t time_playing = get_absolute_time_in_ns() - conn->playstart;
time_playing = time_playing / 1000000000;
int64_t elapsedHours = time_playing / 3600;
#ifdef CONFIG_AIRPLAY_2
if (conn->airplay_type == ap_2) {
debug(2, "Cancelling AP2 timing, control and audio threads...");
-
if (conn->airplay_stream_type == realtime_stream) {
debug(2, "Connection %d: Delete Realtime Audio Stream thread", conn->connection_number);
pthread_cancel(conn->rtp_realtime_audio_thread);
} else if (conn->airplay_stream_type == buffered_stream) {
- debug(2, "Connection %d: Delete Buffered Audio Stream thread", conn->connection_number);
+ debug(3,
+ "Connection %d: Delete Buffered Audio Stream thread by player_thread_cleanup_handler",
+ conn->connection_number);
pthread_cancel(conn->rtp_buffered_audio_thread);
pthread_join(conn->rtp_buffered_audio_thread, NULL);
-
+ debug(3,
+ "Connection %d: Deleted Buffered Audio Stream thread by player_thread_cleanup_handler",
+ conn->connection_number);
} else {
die("Unrecognised Stream Type");
}
- debug(2, "Connection %d: Delete AirPlay 2 Control thread");
+ debug(2, "Connection %d: Delete AirPlay 2 Control thread", conn->connection_number);
pthread_cancel(conn->rtp_ap2_control_thread);
pthread_join(conn->rtp_ap2_control_thread, NULL);
-
} else {
debug(2, "Cancelling AP1-compatible timing, control and audio threads...");
#else
debug(3, "Join audio thread.");
pthread_join(conn->rtp_audio_thread, NULL);
debug(3, "Audio thread terminated.");
+
#ifdef CONFIG_AIRPLAY_2
}
- // ptp_send_control_message_string("T"); // remove all timing peers to force the master to 0
- reset_anchor_info(conn);
+ ptp_send_control_message_string("E");
#endif
if (conn->outbuf) {
free(conn->outbuf);
conn->outbuf = NULL;
}
- if (conn->sbuf) {
- free(conn->sbuf);
- conn->sbuf = NULL;
- }
if (conn->tbuf) {
free(conn->tbuf);
conn->tbuf = NULL;
}
- if (conn->statistics) {
- free(conn->statistics);
- conn->statistics = NULL;
- }
free_audio_buffers(conn);
- if (conn->stream.type == ast_apple_lossless)
- terminate_decoders(conn);
+ if (conn->stream.type == ast_apple_lossless) {
+#ifdef CONFIG_APPLE_ALAC
+ if (config.decoder_in_use == 1 << decoder_apple_alac) {
+ apple_alac_terminate();
+ }
+#endif
+
+#ifdef CONFIG_HAMMERTON
+ if (config.decoder_in_use == 1 << decoder_hammerton) {
+ alac_free(conn->decoder_info);
+ }
+#endif
+ }
conn->rtp_running = 0;
+
pthread_setcancelstate(oldState, NULL);
debug(2, "Connection %d: player terminated.", conn->connection_number);
}
void *player_thread_func(void *arg) {
rtsp_conn_info *conn = (rtsp_conn_info *)arg;
+ // if (config.output->prepare)
+ // config.output->prepare(); // give the backend its first chance to prepare itself, knowing it
+ // has access to the output device (i.e. knowing that it should not be in use by another program
+ // at this time).
#ifdef CONFIG_METADATA
uint64_t time_of_last_metadata_progress_update =
0; // the assignment is to stop a compiler warning...
#endif
+
+#ifdef CONFIG_CONVOLUTION
+ double highest_convolver_output_db = 0.0;
+#endif
+
uint64_t previous_frames_played = 0; // initialised to avoid a "possibly uninitialised" warning
uint64_t previous_raw_measurement_time =
0; // initialised to avoid a "possibly uninitialised" warning
0; // initialised to avoid a "possibly uninitialised" warning
int previous_frames_played_valid = 0;
- // pthread_cleanup_push(player_thread_initial_cleanup_handler, arg);
conn->latency_warning_issued =
0; // be permitted to generate a warning each time a play is attempted
conn->packet_count = 0;
conn->flush_rtp_timestamp = 0; // it seems this number has a special significance -- it seems to
// be used as a null operand, so we'll use it like that too
conn->fix_volume = 0x10000;
+ conn->frames_per_packet = 352; // for ALAC -- will be changed if necessary
#ifdef CONFIG_AIRPLAY_2
- conn->ap2_flush_requested = 0;
- conn->ap2_flush_from_valid = 0;
conn->ap2_rate = 0;
conn->ap2_play_enabled = 0;
+
+ unsigned int f = 0;
+ for (f = 0; f < MAX_DEFERRED_FLUSH_REQUESTS; f++) {
+ conn->ap2_deferred_flush_requests[f].inUse = 0;
+ conn->ap2_deferred_flush_requests[f].active = 0;
+ }
#endif
- // reset_anchor_info(conn);
+ const unsigned int sync_history_length = 40;
+ int64_t sync_samples[sync_history_length];
+ int64_t sync_samples_highest_error = 0;
+ int64_t sync_samples_lowest_error = 0;
+ int64_t sync_samples_second_highest_error;
+ int64_t sync_samples_second_lowest_error;
+ conn->sync_samples_index = 0;
+ conn->sync_samples_count = 0;
- if (conn->stream.type == ast_apple_lossless)
- init_alac_decoder((int32_t *)&conn->stream.fmtp,
- conn); // this sets up incoming rate, bit depth, channels.
- // No pthread cancellation point in here
+ if (conn->stream.type == ast_apple_lossless) {
+#ifdef CONFIG_HAMMERTON
+ if (config.decoder_in_use == 1 << decoder_hammerton) {
+ init_alac_decoder((int32_t *)&conn->stream.fmtp,
+ conn); // this sets up incoming rate, bit depth, channels.
+ // No pthread cancellation point in here
+ }
+#endif
+#ifdef CONFIG_APPLE_ALAC
+ if (config.decoder_in_use == 1 << decoder_apple_alac) {
+ apple_alac_init(conn->stream.fmtp); // no pthread cancellation point in here
+ }
+#endif
+ }
// This must be after init_alac_decoder
init_buffer(conn); // will need a corresponding deallocation. No cancellation points in here
ab_resync(conn);
#endif
}
- conn->timestamp_epoch = 0; // indicate that the next timestamp will be the first one.
- conn->maximum_timestamp_interval = conn->input_rate * 60; // actually there shouldn't be more than
- // about 13 seconds of a gap between
- // successive rtptimes, at worst
-
- conn->output_sample_ratio = config.output_rate / conn->input_rate;
-
- // Sign extending rtptime calculations to 64 bit is needed from time to time.
-
- // The standard rtptime is unsigned 32 bits,
- // so you can do modulo 2^32 difference calculations
- // and get a signed result simply by typing the result as a signed 32-bit number.
-
- // So long as you can be sure the numbers are within 2^31 of each other,
- // the sign of the result calculated in this way indicates the order of the operands.
- // For example, if you subtract a from b and the result is positive, you can conclude
- // b is the same as or comes after a in module 2^32 order.
-
- // We want to do the same with the rtptime calculations for multiples of
- // the rtptimes (1, 2, 4 or 8 times), and we want to do this in signed 64-bit/
- // Therefore we need to sign extend these modulo 2^32, 2^33, 2^34, or 2^35 bit unsigned
- // numbers on the same basis.
-
- // That is what the output_rtptime_sign_bit, output_rtptime_mask, output_rtptime_mask_not and
- // output_rtptime_sign_mask are for -- see later, calculating the sync error.
-
- int output_rtptime_sign_bit;
- switch (conn->output_sample_ratio) {
- case 1:
- output_rtptime_sign_bit = 31;
- break;
- case 2:
- output_rtptime_sign_bit = 32;
- break;
- case 4:
- output_rtptime_sign_bit = 33;
- break;
- case 8:
- output_rtptime_sign_bit = 34;
- break;
- default:
- debug(1, "error with output ratio -- can't calculate sign bit number");
- output_rtptime_sign_bit = 31;
- break;
- }
-
- // debug(1, "Output sample ratio is %d.", conn->output_sample_ratio);
- // debug(1, "Output output_rtptime_sign_bit: %d.", output_rtptime_sign_bit);
-
- int64_t output_rtptime_mask = 1;
- output_rtptime_mask = output_rtptime_mask << (output_rtptime_sign_bit + 1);
- output_rtptime_mask = output_rtptime_mask - 1;
-
- int64_t output_rtptime_mask_not = output_rtptime_mask;
- output_rtptime_mask_not = ~output_rtptime_mask;
-
- int64_t output_rtptime_sign_mask = 1;
- output_rtptime_sign_mask = output_rtptime_sign_mask << output_rtptime_sign_bit;
-
- conn->max_frame_size_change =
- 1 * conn->output_sample_ratio; // we add or subtract one frame at the nominal
- // rate, multiply it by the frame ratio.
- // but, on some occasions, more than one frame could be added
-
- switch (config.output_format) {
- case SPS_FORMAT_S24_3LE:
- case SPS_FORMAT_S24_3BE:
- conn->output_bytes_per_frame = 6;
- break;
-
- case SPS_FORMAT_S24:
- case SPS_FORMAT_S24_LE:
- case SPS_FORMAT_S24_BE:
- conn->output_bytes_per_frame = 8;
- break;
- case SPS_FORMAT_S32:
- case SPS_FORMAT_S32_LE:
- case SPS_FORMAT_S32_BE:
- conn->output_bytes_per_frame = 8;
- break;
- default:
- conn->output_bytes_per_frame = 4;
- }
-
- debug(3, "Output frame bytes is %d.", conn->output_bytes_per_frame);
-
- conn->dac_buffer_queue_minimum_length = (uint64_t)(
- config.audio_backend_buffer_interpolation_threshold_in_seconds * config.output_rate);
- debug(3, "dac_buffer_queue_minimum_length is %" PRIu64 " frames.",
- conn->dac_buffer_queue_minimum_length);
-
conn->session_corrections = 0;
conn->connection_state_to_output = get_requested_connection_state_to_output();
-// this is about half a minute
-// #define trend_interval 3758
-
-// this is about 8 seconds
-#define trend_interval 1003
int number_of_statistics, oldest_statistic, newest_statistic;
- int frames_seen_in_this_logging_interval = 0;
- int at_least_one_frame_seen_this_session = 0;
- int64_t tsum_of_sync_errors, tsum_of_corrections, tsum_of_insertions_and_deletions,
- tsum_of_drifts;
- int64_t previous_sync_error = 0, previous_correction = 0;
- uint64_t minimum_dac_queue_size = 0;
- int32_t minimum_buffer_occupancy = 0;
- int32_t maximum_buffer_occupancy = 0;
+ uint32_t frames_since_last_stats_logged = 0;
+ int at_least_one_frame_seen = 0;
+ int64_t tsum_of_sync_errors, tsum_of_corrections, tsum_of_insertions_and_deletions;
+ size_t tsum_of_frames;
+ minimum_dac_queue_size = UINT64_MAX;
+ int64_t tsum_of_gaps;
+ int32_t minimum_buffer_occupancy = INT32_MAX;
+ int32_t maximum_buffer_occupancy = INT32_MIN;
#ifdef CONFIG_AIRPLAY_2
conn->ap2_audio_buffer_minimum_size = -1;
#endif
+ conn->at_least_one_frame_seen_this_session = 0;
conn->raw_frame_rate = 0.0;
conn->corrected_frame_rate = 0.0;
conn->frame_rate_valid = 0;
uint64_t current_delay;
int play_number = 0;
conn->play_number_after_flush = 0;
- // int last_timestamp = 0; // for debugging only
conn->time_of_last_audio_packet = 0;
// conn->shutdown_requested = 0;
number_of_statistics = oldest_statistic = newest_statistic = 0;
- tsum_of_sync_errors = tsum_of_corrections = tsum_of_insertions_and_deletions = tsum_of_drifts = 0;
+ tsum_of_sync_errors = tsum_of_corrections = tsum_of_insertions_and_deletions = 0;
+ tsum_of_frames = 0;
+ tsum_of_gaps = 0;
- const int print_interval = trend_interval; // don't ask...
// I think it's useful to keep this prime to prevent it from falling into a pattern with some
// other process.
static char rnstate[256];
initstate(time(NULL), rnstate, 256);
- signed short *inbuf;
+ // signed short *inbuf;
int inbuflength;
- unsigned int output_bit_depth = 16; // default;
-
- switch (config.output_format) {
- case SPS_FORMAT_S8:
- case SPS_FORMAT_U8:
- output_bit_depth = 8;
- break;
- case SPS_FORMAT_S16:
- case SPS_FORMAT_S16_LE:
- case SPS_FORMAT_S16_BE:
- output_bit_depth = 16;
- break;
- case SPS_FORMAT_S24:
- case SPS_FORMAT_S24_LE:
- case SPS_FORMAT_S24_BE:
- case SPS_FORMAT_S24_3LE:
- case SPS_FORMAT_S24_3BE:
- output_bit_depth = 24;
- break;
- case SPS_FORMAT_S32:
- case SPS_FORMAT_S32_LE:
- case SPS_FORMAT_S32_BE:
- output_bit_depth = 32;
- break;
- case SPS_FORMAT_UNKNOWN:
- die("Unknown format choosing output bit depth");
- break;
- case SPS_FORMAT_AUTO:
- die("Invalid format -- SPS_FORMAT_AUTO -- choosing output bit depth");
- break;
- case SPS_FORMAT_INVALID:
- die("Invalid format -- SPS_FORMAT_INVALID -- choosing output bit depth");
- break;
- }
-
- debug(3, "Output bit depth is %d.", output_bit_depth);
-
- if (conn->input_bit_depth > output_bit_depth) {
- debug(3, "Dithering will be enabled because the input bit depth is greater than the output bit "
- "depth");
- }
- if (config.output->parameters == NULL) {
- debug(3, "Dithering will be enabled because the output volume is being altered in software");
- }
+ // remember, the output device may never have been initialised prior to this call
+#ifdef CONFIG_FFMPEG
+ if (avflush(conn) > 1)
+ debug(1, "ffmpeg flush at start!");
+#endif
- if ((config.output->parameters == NULL) || (conn->input_bit_depth > output_bit_depth) ||
- (config.playback_mode == ST_mono))
- conn->enable_dither = 1;
-
- // call the backend's start() function if it exists.
+ // leave this relic -- jack and soundio still use it
if (config.output->start != NULL)
- config.output->start(config.output_rate, config.output_format);
-
- // we need an intermediate "transition" buffer
-
- conn->tbuf = malloc(
- sizeof(int32_t) * 2 *
- (conn->max_frames_per_packet * conn->output_sample_ratio + conn->max_frame_size_change));
- if (conn->tbuf == NULL)
- die("Failed to allocate memory for the transition buffer.");
-
- // initialise this, because soxr stuffing might be chosen later
-
- conn->sbuf = malloc(
- sizeof(int32_t) * 2 *
- (conn->max_frames_per_packet * conn->output_sample_ratio + conn->max_frame_size_change));
- if (conn->sbuf == NULL)
- die("Failed to allocate memory for the sbuf buffer.");
-
- // The size of these dependents on the number of frames, the size of each frame and the maximum
- // size change
- conn->outbuf = malloc(
- conn->output_bytes_per_frame *
- (conn->max_frames_per_packet * conn->output_sample_ratio + conn->max_frame_size_change));
- if (conn->outbuf == NULL)
- die("Failed to allocate memory for an output buffer.");
+ config.output->start(44100, SPS_FORMAT_S16_LE);
+
conn->first_packet_timestamp = 0;
conn->missing_packets = conn->late_packets = conn->too_late_packets = conn->resend_requests = 0;
int sync_error_out_of_bounds =
0; // number of times in a row that there's been a serious sync error
- conn->statistics = malloc(sizeof(stats_t) * trend_interval);
- if (conn->statistics == NULL)
- die("Failed to allocate a statistics buffer");
+ // conn->statistics = malloc(sizeof(stats_t) * trend_samples);
+ // if (conn->statistics == NULL)
+ // die("Failed to allocate a statistics buffer");
conn->framesProcessedInThisEpoch = 0;
conn->framesGeneratedInThisEpoch = 0;
if (conn->airplay_type == ap_2) {
if (conn->airplay_stream_type == realtime_stream) {
if (config.output->delay) {
- if (config.no_sync == 0)
- statistics_print_profile = ap2_realtime_synced_stream_statistics_print_profile;
- else
- statistics_print_profile = ap2_realtime_nosync_stream_statistics_print_profile;
+ // if (config.no_sync == 0)
+ statistics_print_profile = ap2_realtime_synced_stream_statistics_print_profile;
+ // else
+ // statistics_print_profile = ap2_realtime_nosync_stream_statistics_print_profile;
} else {
statistics_print_profile = ap2_realtime_nodelay_stream_statistics_print_profile;
}
} else {
if (config.output->delay) {
- if (config.no_sync == 0)
- statistics_print_profile = ap2_buffered_synced_stream_statistics_print_profile;
- else
- statistics_print_profile = ap2_buffered_nosync_stream_statistics_print_profile;
+ // if (config.no_sync == 0)
+ statistics_print_profile = ap2_buffered_synced_stream_statistics_print_profile;
+ // else
+ // statistics_print_profile = ap2_buffered_nosync_stream_statistics_print_profile;
} else {
statistics_print_profile = ap2_buffered_nodelay_stream_statistics_print_profile;
}
} else {
#endif
if (config.output->delay) {
- if (config.no_sync == 0)
- statistics_print_profile = ap1_synced_statistics_print_profile;
- else
- statistics_print_profile = ap1_nosync_statistics_print_profile;
+ // if (config.no_sync == 0)
+ statistics_print_profile = ap1_synced_statistics_print_profile;
+ // else
+ // statistics_print_profile = ap1_nosync_statistics_print_profile;
} else {
statistics_print_profile = ap1_nodelay_statistics_print_profile;
}
#endif
// create and start the timing, control and audio receiver threads
- pthread_create(&conn->rtp_audio_thread, NULL, &rtp_audio_receiver, (void *)conn);
- pthread_create(&conn->rtp_control_thread, NULL, &rtp_control_receiver, (void *)conn);
- pthread_create(&conn->rtp_timing_thread, NULL, &rtp_timing_receiver, (void *)conn);
+ named_pthread_create(&conn->rtp_audio_thread, NULL, &rtp_audio_receiver, (void *)conn,
+ "ap1_audio_%d", conn->connection_number);
+ named_pthread_create(&conn->rtp_control_thread, NULL, &rtp_control_receiver, (void *)conn,
+ "ap1_control_%d", conn->connection_number);
+ named_pthread_create(&conn->rtp_timing_thread, NULL, &rtp_timing_receiver, (void *)conn,
+ "ap1_tim_rcv_%d", conn->connection_number);
#ifdef CONFIG_AIRPLAY_2
}
player_volume(initial_volume, conn); // will contain a cancellation point if asked to wait
debug(2, "Play begin");
+
+#ifdef CONFIG_FFMPEG
+ int64_t frames_previously_retained_in_the_resampler = 0;
+#endif
+
+ // uint32_t flush_to_frame;
+ // int enable_flush_to_frame = 0;
+ int request_resync = 0; // will be set if a big discontinuity is detected
+ uint32_t frames_to_skip = 0; // when a discontinuity is registered
+ int skipping_frames_at_start_of_play = 0;
+ // debug(1, "player begin processing packets");
while (1) {
+
#ifdef CONFIG_METADATA
int this_is_the_first_frame = 0; // will be set if it is
#endif
- // check a few parameters to ensure they are non-zero
- if (config.output_rate == 0)
- debug(1, "config.output_rate is zero!");
- if (conn->output_sample_ratio == 0)
- debug(1, "conn->output_sample_ratio is zero!");
- if (conn->input_rate == 0)
- debug(1, "conn->input_rate is zero!");
- if (conn->input_bytes_per_frame == 0)
- debug(1, "conn->input_bytes_per_frame is zero!");
-
- abuf_t *inframe = buffer_get_frame(conn); // this has a (needed!) deliberate cancellation point in it.
- uint64_t local_time_now = get_absolute_time_in_ns(); // types okay
- config.last_access_to_volume_info_time =
- local_time_now; // ensure volume info remains seen as valid
- if (inframe) {
- inbuf = inframe->data;
- inbuflength = inframe->length;
- if (inbuf) {
- if (play_number == 0)
- conn->playstart = get_absolute_time_in_ns();
- play_number++;
- // if (play_number % 100 == 0)
- // debug(3, "Play frame %d.", play_number);
- conn->play_number_after_flush++;
- if (inframe->given_timestamp == 0) {
- debug(2,
- "Player has supplied a silent frame, (possibly frame %u) for play number %d, "
- "status 0x%X after %u resend requests.",
- conn->last_seqno_read + 1, play_number, inframe->status,
- inframe->resend_request_number);
- conn->last_seqno_read =
- ((conn->last_seqno_read + 1) & 0xffff); // manage the packet out of sequence minder
-
- void *silence = malloc(conn->output_bytes_per_frame * conn->max_frames_per_packet *
- conn->output_sample_ratio);
- if (silence == NULL) {
- debug(1, "Failed to allocate memory for a silent frame silence buffer.");
- } else {
- // the player may change the contents of the buffer, so it has to be zeroed each time;
- // might as well malloc and free it locally
- conn->previous_random_number = generate_zero_frames(
- silence, conn->max_frames_per_packet * conn->output_sample_ratio,
- config.output_format, conn->enable_dither, conn->previous_random_number);
- config.output->play(silence, conn->max_frames_per_packet * conn->output_sample_ratio,
- play_samples_are_untimed, 0, 0);
- free(silence);
- }
- } else if (conn->play_number_after_flush < 10) {
- /*
- int64_t difference = 0;
- if (last_timestamp)
- difference = inframe->timestamp - last_timestamp;
- last_timestamp = inframe->timestamp;
- debug(1, "Play number %d, monotonic timestamp %llx, difference
- %lld.",conn->play_number_after_flush,inframe->timestamp,difference);
- */
- void *silence = malloc(conn->output_bytes_per_frame * conn->max_frames_per_packet *
- conn->output_sample_ratio);
- if (silence == NULL) {
- debug(1, "Failed to allocate memory for a flush silence buffer.");
- } else {
- // the player may change the contents of the buffer, so it has to be zeroed each time;
- // might as well malloc and free it locally
- conn->previous_random_number = generate_zero_frames(
- silence, conn->max_frames_per_packet * conn->output_sample_ratio,
- config.output_format, conn->enable_dither, conn->previous_random_number);
- config.output->play(silence, conn->max_frames_per_packet * conn->output_sample_ratio,
- play_samples_are_untimed, 0, 0);
- free(silence);
- }
- } else {
+ pthread_testcancel(); // allow a pthread_cancel request to take effect.
- if (((config.output->parameters == NULL) && (config.ignore_volume_control == 0) &&
- (config.airplay_volume != 0.0)) ||
- (conn->input_bit_depth > output_bit_depth) || (config.playback_mode == ST_mono))
- conn->enable_dither = 1;
- else
- conn->enable_dither = 0;
-
- // here, let's transform the frame of data, if necessary
-
- switch (conn->input_bit_depth) {
- case 16: {
- int i, j;
- int16_t ls, rs;
- int32_t ll = 0, rl = 0;
- int16_t *inps = inbuf;
- // int16_t *outps = tbuf;
- int32_t *outpl = (int32_t *)conn->tbuf;
- for (i = 0; i < inbuflength; i++) {
- ls = *inps++;
- rs = *inps++;
-
- // here, do the mode stuff -- mono / reverse stereo / leftonly / rightonly
- // also, raise the 16-bit samples to 32 bits.
-
- switch (config.playback_mode) {
- case ST_mono: {
- int32_t lsl = ls;
- int32_t rsl = rs;
- int32_t both = lsl + rsl;
- both = both << (16 - 1); // keep all 17 bits of the sum of the 16bit left and right
- // -- the 17th bit will influence dithering later
- ll = both;
- rl = both;
- } break;
- case ST_reverse_stereo: {
- ll = rs;
- rl = ls;
- ll = ll << 16;
- rl = rl << 16;
- } break;
- case ST_left_only:
- rl = ls;
- ll = ls;
- ll = ll << 16;
- rl = rl << 16;
- break;
- case ST_right_only:
- ll = rs;
- rl = rs;
- ll = ll << 16;
- rl = rl << 16;
- break;
- case ST_stereo:
- ll = ls;
- rl = rs;
- ll = ll << 16;
- rl = rl << 16;
- break; // nothing extra to do
- }
+ // if we are using the software attenuator or downsampling or mixing to mono, enable dithering
- // here, replicate the samples if you're upsampling
+ if ((conn->fix_volume != 0x10000) || // if not 0x10000, it is attenuating...
+ ((conn->output_bit_depth > 0) &&
+ (conn->input_effective_bit_depth > conn->output_bit_depth)) ||
+ (config.playback_mode == ST_mono)) {
+ if (conn->enable_dither == 0)
+ debug(2, "enabling dither");
+ conn->enable_dither = 1;
+ } else {
+ if (conn->enable_dither != 0)
+ debug(2, "disabling dither");
+ conn->enable_dither = 0;
+ }
- for (j = 0; j < conn->output_sample_ratio; j++) {
- *outpl++ = ll;
- *outpl++ = rl;
- }
+ abuf_t *inframe = buffer_get_frame(
+ conn, request_resync); // this has a guaranteed [and needed!] cancellation point
+ request_resync = 0;
+ if (inframe) {
+ if (inframe->data != NULL) {
+ /*
+ {
+ uint64_t the_time_this_frame_should_be_played;
+ frame_to_local_time(inframe->timestamp,
+ &the_time_this_frame_should_be_played, conn);
+ int64_t lead_time = the_time_this_frame_should_be_played - get_absolute_time_in_ns();
+ debug(1, "get_packet %u, lead time is %3.f ms.", inframe->timestamp, lead_time *
+ 0.000001);
+ }
+ */
+ unsigned int last_sample_index;
+ int frames_played = 0;
+ int64_t sync_error = 0;
+ int amount_to_stuff = 0;
+ if (inframe->data) {
+ if (play_number == 0)
+ conn->playstart = get_absolute_time_in_ns();
+ play_number++;
+ // if (play_number % 100 == 0)
+ // debug(3, "Play frame %d.", play_number);
+ conn->play_number_after_flush++;
+
+ if (inframe->timestamp == 0) {
+ debug(2,
+ "Player has supplied a silent frame, (possibly frame %u) for play number %d, "
+ "status 0x%X after %u resend requests.",
+ conn->last_seqno_read + 1, play_number, inframe->status,
+ inframe->resend_request_number);
+ conn->last_seqno_read++; // manage the packet out of sequence minder
+
+ void *silence =
+ malloc(sps_format_sample_size(
+ FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration)) *
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration) *
+ conn->frames_per_packet);
+ if (silence == NULL) {
+ debug(1, "Failed to allocate memory for a silent frame silence buffer.");
+ } else {
+ // the player may change the contents of the buffer, so it has to be zeroed each
+ // time; might as well malloc and free it locally
+ conn->previous_random_number = generate_zero_frames(
+ silence, conn->frames_per_packet, conn->enable_dither,
+ conn->previous_random_number, config.current_output_configuration);
+ config.output->play(silence, conn->frames_per_packet, play_samples_are_untimed, 0, 0);
+ free(silence);
+ frames_played += conn->frames_per_packet;
}
- } break;
- case 32: {
- int i, j;
- int32_t ls, rs;
- int32_t ll = 0, rl = 0;
- int32_t *inps = (int32_t *)inbuf;
- int32_t *outpl = (int32_t *)conn->tbuf;
- for (i = 0; i < inbuflength; i++) {
- ls = *inps++;
- rs = *inps++;
-
- // here, do the mode stuff -- mono / reverse stereo / leftonly / rightonly
-
- switch (config.playback_mode) {
- case ST_mono: {
- int64_t lsl = ls;
- int64_t rsl = rs;
- int64_t both = lsl + rsl;
- both = both >> 1;
- uint32_t both32 = both;
- ll = both32;
- rl = both32;
- } break;
- case ST_reverse_stereo: {
- ll = rs;
- rl = ls;
- } break;
- case ST_left_only:
- rl = ls;
- ll = ls;
- break;
- case ST_right_only:
- ll = rs;
- rl = rs;
- break;
- case ST_stereo:
- ll = ls;
- rl = rs;
- break; // nothing extra to do
- }
-
- // here, replicate the samples if you're upsampling
+ } else {
+ // process the frame
+ // here, let's transform the frame of data, if necessary
+ // we need an intermediate "transition" buffer
- for (j = 0; j < conn->output_sample_ratio; j++) {
- *outpl++ = ll;
- *outpl++ = rl;
- }
+ if (conn->tbuf != NULL) {
+ debug(1, "conn->tbuf not free'd");
+ free(conn->tbuf);
}
- } break;
+ conn->tbuf =
+ malloc(sizeof(int32_t) *
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration) *
+ ((inframe->length) * conn->output_sample_ratio + INTERPOLATION_LIMIT));
+ if (conn->tbuf == NULL)
+ die("Failed to allocate memory for the transition buffer.");
+ // size change
+ conn->outbuf =
+ malloc(sps_format_sample_size(
+ FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration)) *
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration) *
+ ((inframe->length) * conn->output_sample_ratio + INTERPOLATION_LIMIT));
+ if (conn->outbuf == NULL)
+ die("Failed to allocate memory for an output buffer.");
+
+ if (conn->input_num_channels == 2) {
+ // if (0) {
+
+ switch (conn->input_bit_depth) {
+ case 16: {
+ unsigned int i, j;
+ int16_t ls, rs;
+ int32_t ll = 0, rl = 0;
+ int16_t *inps = inframe->data;
+ // int16_t *outps = tbuf;
+ int32_t *outpl = (int32_t *)conn->tbuf;
+ for (i = 0; i < (inframe->length); i++) {
+ ls = *inps++;
+ rs = *inps++;
+
+ // here, do the mode stuff -- mono / reverse stereo / leftonly / rightonly
+ // also, raise the 16-bit samples to 32 bits.
+
+ switch (config.playback_mode) {
+ case ST_mono: {
+ int32_t lsl = ls;
+ int32_t rsl = rs;
+ int32_t both = lsl + rsl;
+ both =
+ both
+ << (16 -
+ 1); // keep all 17 bits of the sum of the 16 bit left and right channels
+ // -- the 17th bit will influence dithering later
+ ll = both;
+ rl = both;
+ } break;
+ case ST_reverse_stereo: {
+ ll = rs;
+ rl = ls;
+ ll = ll << 16;
+ rl = rl << 16;
+ } break;
+ case ST_left_only:
+ rl = ls;
+ ll = ls;
+ ll = ll << 16;
+ rl = rl << 16;
+ break;
+ case ST_right_only:
+ ll = rs;
+ rl = rs;
+ ll = ll << 16;
+ rl = rl << 16;
+ break;
+ case ST_stereo:
+ ll = ls;
+ rl = rs;
+ ll = ll << 16;
+ rl = rl << 16;
+ break; // nothing extra to do
+ }
- default:
- die("Shairport Sync only supports 16 or 32 bit input");
- }
+ // here, replicate the samples if you're upsampling
+
+ for (j = 0; j < conn->output_sample_ratio; j++) {
+ *outpl++ = ll;
+ *outpl++ = rl;
+ }
+ }
+ } break;
+ case 32: {
+ unsigned int i, j;
+ int32_t ls, rs;
+ int32_t ll = 0, rl = 0;
+ int32_t *inps = (int32_t *)inframe->data;
+ int32_t *outpl = (int32_t *)conn->tbuf;
+ for (i = 0; i < (inframe->length); i++) {
+ ls = *inps++;
+ rs = *inps++;
+
+ // here, do the mode stuff -- mono / reverse stereo / leftonly / rightonly
+
+ switch (config.playback_mode) {
+ case ST_mono: {
+ int64_t lsl = ls;
+ int64_t rsl = rs;
+ int64_t both = lsl + rsl;
+ both = both >> 1;
+ uint32_t both32 = both;
+ ll = both32;
+ rl = both32;
+ } break;
+ case ST_reverse_stereo: {
+ ll = rs;
+ rl = ls;
+ } break;
+ case ST_left_only:
+ rl = ls;
+ ll = ls;
+ break;
+ case ST_right_only:
+ ll = rs;
+ rl = rs;
+ break;
+ case ST_stereo:
+ ll = ls;
+ rl = rs;
+ break; // nothing extra to do
+ }
- inbuflength *= conn->output_sample_ratio;
+ // here, replicate the samples if you're upsampling
+
+ for (j = 0; j < conn->output_sample_ratio; j++) {
+ *outpl++ = ll;
+ *outpl++ = rl;
+ }
+ }
+ } break;
- // We have a frame of data. We need to see if we want to add or remove a frame from it to
- // keep in sync.
- // So we calculate the timing error for the first frame in the DAC.
- // If it's ahead of time, we add one audio frame to this frame to delay a subsequent frame
- // If it's late, we remove an audio frame from this frame to bring a subsequent frame
- // forward in time
+ default:
+ die("Shairport Sync only supports 16 or 32 bit input (stereo)");
+ }
+ } else {
+ // multichannel -- don't do anything odd here
+ if (conn->input_bit_depth == 16) {
+ unsigned int i;
+ int16_t ss;
+ int32_t sl;
+ int16_t *inps = inframe->data;
+ int32_t *outpl = (int32_t *)conn->tbuf;
+ for (i = 0; i < (inframe->length) * conn->input_num_channels; i++) {
+ ss = *inps++;
+ sl = ss;
+ sl = sl << 16;
+ unsigned int j;
+ for (j = 0; j < conn->output_sample_ratio; j++) {
+ *outpl++ = sl;
+ }
+ }
+ } else if (conn->input_bit_depth == 32) {
+ unsigned int i;
+ int32_t *inpl = (int32_t *)inframe->data;
+ int32_t *outpl = (int32_t *)conn->tbuf;
+ for (i = 0; i < (inframe->length) * conn->input_num_channels; i++) {
+ int32_t sl = *inpl++;
+ unsigned int j;
+ for (j = 0; j < conn->output_sample_ratio; j++) {
+ *outpl++ = sl;
+ }
+ }
+ } else {
+ die("Shairport Sync only supports 16 or 32 bit input (multichannel)");
+ }
+ }
- // now, go back as far as the total latency less, say, 100 ms, and check the presence of
- // frames from then onwards
+ inbuflength = (inframe->length) * conn->output_sample_ratio;
- frames_seen_in_this_logging_interval++;
+ // We have a frame of data. We need to see if we want to add or remove a frame from
+ // it to keep in sync. So we calculate the timing error for the first frame in the
+ // DAC. If it's ahead of time, we add one audio frame to this frame to delay a
+ // subsequent frame If it's late, we remove an audio frame from this frame to bring
+ // a subsequent frame forward in time
- // This is the timing error for the next audio frame in the DAC, if applicable
- int64_t sync_error = 0;
+ // now, go back as far as the total latency less, say, 100 ms, and check the
+ // presence of frames from then onwards
- int amount_to_stuff = 0;
+ at_least_one_frame_seen = 1;
- // check sequencing
- if (conn->last_seqno_read == -1)
- conn->last_seqno_read =
- inframe->sequence_number; // int32_t from seq_t, i.e. uint16_t, so okay.
- else {
- conn->last_seqno_read =
- (conn->last_seqno_read + 1) & 0xffff; // int32_t from seq_t, i.e. uint16_t, so okay.
- if (inframe->sequence_number !=
- conn->last_seqno_read) { // seq_t, ei.e. uint16_t and int32_t, so okay
- debug(2,
- "Player: packets out of sequence: expected: %u, got: %u, with ab_read: %u "
- "and ab_write: %u.",
- conn->last_seqno_read, inframe->sequence_number, conn->ab_read, conn->ab_write);
- conn->last_seqno_read = inframe->sequence_number; // reset warning...
- }
- }
+ int16_t bo = conn->ab_write - conn->ab_read; // do this in 16 bits
+ conn->buffer_occupancy = bo; // 32 bits
- int16_t bo = conn->ab_write - conn->ab_read; // do this in 16 bits
- conn->buffer_occupancy = bo; // 32 bits
+ if (conn->buffer_occupancy < minimum_buffer_occupancy)
+ minimum_buffer_occupancy = conn->buffer_occupancy;
- if ((frames_seen_in_this_logging_interval == 1) ||
- (conn->buffer_occupancy < minimum_buffer_occupancy))
- minimum_buffer_occupancy = conn->buffer_occupancy;
+ if (conn->buffer_occupancy > maximum_buffer_occupancy)
+ maximum_buffer_occupancy = conn->buffer_occupancy;
- if ((frames_seen_in_this_logging_interval == 1) ||
- (conn->buffer_occupancy > maximum_buffer_occupancy))
- maximum_buffer_occupancy = conn->buffer_occupancy;
+ // now, before outputting anything to the output device, check the stats
- // now, before outputting anything to the output device, check the stats
+ uint32_t stats_logging_interval_in_frames =
+ 8 * RATE_FROM_ENCODED_FORMAT(config.current_output_configuration);
+ if ((stats_logging_interval_in_frames != 0) &&
+ (frames_since_last_stats_logged > stats_logging_interval_in_frames)) {
- if (play_number % print_interval == 0) {
+ // here, calculate the input and output frame rates, where possible, even if
+ // statistics have not been requested this is to calculate them in case they are
+ // needed by the D-Bus interface or elsewhere.
- // here, calculate the input and output frame rates, where possible, even if statistics
- // have not been requested
- // this is to calculate them in case they are needed by the D-Bus interface or
- // elsewhere.
+ if (conn->input_frame_rate_starting_point_is_valid) {
+ uint64_t elapsed_reception_time, frames_received;
+ elapsed_reception_time = conn->frames_inward_measurement_time -
+ conn->frames_inward_measurement_start_time;
+ frames_received = conn->frames_inward_frames_received_at_measurement_time -
+ conn->frames_inward_frames_received_at_measurement_start_time;
+ conn->input_frame_rate = (1.0E9 * frames_received) /
+ elapsed_reception_time; // an IEEE double calculation
+ // with two 64-bit integers
+ } else {
+ conn->input_frame_rate = 0.0;
+ }
- if (conn->input_frame_rate_starting_point_is_valid) {
- uint64_t elapsed_reception_time, frames_received;
- elapsed_reception_time =
- conn->frames_inward_measurement_time - conn->frames_inward_measurement_start_time;
- frames_received = conn->frames_inward_frames_received_at_measurement_time -
- conn->frames_inward_frames_received_at_measurement_start_time;
- conn->input_frame_rate =
- (1.0E9 * frames_received) /
- elapsed_reception_time; // an IEEE double calculation with two 64-bit integers
- } else {
- conn->input_frame_rate = 0.0;
- }
+ int stats_status = 0;
+ if ((config.output->delay) && (config.output->stats)) {
+ uint64_t frames_sent_for_play;
+ uint64_t raw_measurement_time;
+ uint64_t corrected_measurement_time;
+ uint64_t actual_delay;
+ stats_status =
+ config.output->stats(&raw_measurement_time, &corrected_measurement_time,
+ &actual_delay, &frames_sent_for_play);
+ // debug(1,"status: %d, actual_delay: %" PRIu64 ", frames_sent_for_play: %"
+ // PRIu64
+ // ", frames_played: %" PRIu64 ".", stats_status, actual_delay,
+ // frames_sent_for_play, frames_sent_for_play - actual_delay);
+ uint64_t frames_played_by_output_device = frames_sent_for_play - actual_delay;
+ // If the status is zero, it means that there were no output problems since the
+ // last time the stats call was made. Thus, the frame rate should be valid.
+ if ((stats_status == 0) && (previous_frames_played_valid != 0)) {
+ uint64_t frames_played_in_this_interval =
+ frames_played_by_output_device - previous_frames_played;
+ int64_t raw_interval = raw_measurement_time - previous_raw_measurement_time;
+ int64_t corrected_interval =
+ corrected_measurement_time - previous_corrected_measurement_time;
+ if (raw_interval != 0) {
+ conn->raw_frame_rate = (1e9 * frames_played_in_this_interval) / raw_interval;
+ conn->corrected_frame_rate =
+ (1e9 * frames_played_in_this_interval) / corrected_interval;
+ conn->frame_rate_valid = 1;
+ // debug(1,"frames_played_in_this_interval: %" PRIu64 ", interval: %" PRId64
+ // ", rate: %f.",
+ // frames_played_in_this_interval, interval, conn->frame_rate);
+ }
+ }
- int stats_status = 0;
- if ((config.output->delay) && (config.no_sync == 0) && (config.output->stats)) {
- uint64_t frames_sent_for_play;
- uint64_t raw_measurement_time;
- uint64_t corrected_measurement_time;
- uint64_t actual_delay;
- stats_status =
- config.output->stats(&raw_measurement_time, &corrected_measurement_time,
- &actual_delay, &frames_sent_for_play);
- // debug(1,"status: %d, actual_delay: %" PRIu64 ", frames_sent_for_play: %" PRIu64 ",
- // frames_played: %" PRIu64 ".", stats_status, actual_delay, frames_sent_for_play,
- // frames_sent_for_play - actual_delay);
- uint64_t frames_played = frames_sent_for_play - actual_delay;
- // If the status is zero, it means that there were no output problems since the
- // last time the stats call was made. Thus, the frame rate should be valid.
- if ((stats_status == 0) && (previous_frames_played_valid != 0)) {
- uint64_t frames_played_in_this_interval = frames_played - previous_frames_played;
- int64_t raw_interval = raw_measurement_time - previous_raw_measurement_time;
- int64_t corrected_interval =
- corrected_measurement_time - previous_corrected_measurement_time;
- if (raw_interval != 0) {
- conn->raw_frame_rate = (1e9 * frames_played_in_this_interval) / raw_interval;
- conn->corrected_frame_rate =
- (1e9 * frames_played_in_this_interval) / corrected_interval;
- conn->frame_rate_valid = 1;
- // debug(1,"frames_played_in_this_interval: %" PRIu64 ", interval: %" PRId64 ",
- // rate: %f.",
- // frames_played_in_this_interval, interval, conn->frame_rate);
+ // uncomment the if statement if your want to get as long a period for
+ // calculating the frame rate as possible without an output break or error
+ if ((stats_status != 0) || (previous_frames_played_valid == 0)) {
+ // if we have just detected an outputting error, or if we have no
+ // starting information
+ if (stats_status != 0)
+ conn->frame_rate_valid = 0;
+ previous_frames_played = frames_played_by_output_device;
+ previous_raw_measurement_time = raw_measurement_time;
+ previous_corrected_measurement_time = corrected_measurement_time;
+ previous_frames_played_valid = 1;
}
}
- // uncomment the if statement if your want to get as long a period for
- // calculating the frame rate as possible without an output break or error
- if ((stats_status != 0) || (previous_frames_played_valid == 0)) {
- // if we have just detected an outputting error, or if we have no
- // starting information
- if (stats_status != 0)
- conn->frame_rate_valid = 0;
- previous_frames_played = frames_played;
- previous_raw_measurement_time = raw_measurement_time;
- previous_corrected_measurement_time = corrected_measurement_time;
- previous_frames_played_valid = 1;
+ // we can now calculate running averages for sync error (frames), corrections
+ // (ppm), insertions plus deletions (ppm)
+ double average_sync_error = 0.0;
+ double average_gap_ms = 0.0;
+ double corrections_ppm = 0.0;
+ double insertions_plus_deletions_ppm = 0.0;
+ if (number_of_statistics == 0) {
+ debug(1, "number_of_statistics is zero!");
+ } else {
+ average_sync_error =
+ (1000.0 * tsum_of_sync_errors) /
+ (number_of_statistics *
+ RATE_FROM_ENCODED_FORMAT(config.current_output_configuration));
+ average_gap_ms = ((1.0 * tsum_of_gaps) / number_of_statistics) * 0.000001;
+ if (tsum_of_frames != 0) {
+ corrections_ppm = (1000000.0 * tsum_of_corrections) / tsum_of_frames;
+ insertions_plus_deletions_ppm =
+ (1000000.0 * tsum_of_insertions_and_deletions) / tsum_of_frames;
+ } else {
+ debug(3, "tsum_of_frames: %zu.", tsum_of_frames);
+ }
}
- }
-
- // we can now calculate running averages for sync error (frames), corrections (ppm),
- // insertions plus deletions (ppm), drift (ppm)
- double moving_average_sync_error = 0.0;
- double moving_average_correction = 0.0;
- double moving_average_insertions_plus_deletions = 0.0;
- if (number_of_statistics == 0) {
- debug(2, "number_of_statistics is zero!");
- } else {
- moving_average_sync_error = (1.0 * tsum_of_sync_errors) / number_of_statistics;
- moving_average_correction = (1.0 * tsum_of_corrections) / number_of_statistics;
- moving_average_insertions_plus_deletions =
- (1.0 * tsum_of_insertions_and_deletions) / number_of_statistics;
- // double moving_average_drift = (1.0 * tsum_of_drifts) / number_of_statistics;
- }
- // if ((play_number/print_interval)%20==0)
- // figure out which statistics profile to use, depending on the kind of stream
-
- if (config.statistics_requested) {
-
- if (frames_seen_in_this_logging_interval) {
- do {
- line_of_stats[0] = '\0';
- statistics_column = 0;
- was_a_previous_column = 0;
- statistics_item("Sync Error ms", "%*.2f", 13,
- 1000 * moving_average_sync_error / config.output_rate);
- statistics_item("Net Sync PPM", "%*.1f", 12,
- moving_average_correction * 1000000 /
- (352 * conn->output_sample_ratio));
- statistics_item("All Sync PPM", "%*.1f", 12,
- moving_average_insertions_plus_deletions * 1000000 /
- (352 * conn->output_sample_ratio));
- statistics_item(" Packets", "%*d", 11, play_number);
- statistics_item("Missing", "%*" PRIu64 "", 7, conn->missing_packets);
- statistics_item(" Late", "%*" PRIu64 "", 6, conn->late_packets);
- statistics_item("Too Late", "%*" PRIu64 "", 8, conn->too_late_packets);
- statistics_item("Resend Reqs", "%*" PRIu64 "", 11, conn->resend_requests);
- statistics_item("Min DAC Queue", "%*" PRIu64 "", 13, minimum_dac_queue_size);
- statistics_item("Min Buffers", "%*" PRIu32 "", 11, minimum_buffer_occupancy);
- statistics_item("Max Buffers", "%*" PRIu32 "", 11, maximum_buffer_occupancy);
+ if (config.statistics_requested) {
+ if (at_least_one_frame_seen) {
+ do {
+ line_of_stats[0] = '\0';
+ statistics_column = 0;
+ was_a_previous_column = 0;
+ statistics_item("Av Sync Error (ms)", "%*.2f", 18, average_sync_error);
+ statistics_item("Net Sync PPM", "%*.1f", 12, corrections_ppm);
+ statistics_item("All Sync PPM", "%*.1f", 12, insertions_plus_deletions_ppm);
+ statistics_item("Av Sync Window (ms)", "%*.2f", 19, average_gap_ms);
+ statistics_item(" Packets", "%*d", 11, play_number);
+ statistics_item("Missing", "%*" PRIu64 "", 7, conn->missing_packets);
+ statistics_item(" Late", "%*" PRIu64 "", 6, conn->late_packets);
+ statistics_item("Too Late", "%*" PRIu64 "", 8, conn->too_late_packets);
+ statistics_item("Resend Reqs", "%*" PRIu64 "", 11, conn->resend_requests);
+ statistics_item("Min DAC Queue", "%*" PRIu64 "", 13, minimum_dac_queue_size);
+ statistics_item("Min Buffers", "%*" PRIu32 "", 11, minimum_buffer_occupancy);
+ statistics_item("Max Buffers", "%*" PRIu32 "", 11, maximum_buffer_occupancy);
#ifdef CONFIG_AIRPLAY_2
- if (conn->ap2_audio_buffer_minimum_size > 10 * 1024)
- statistics_item("Min Buffer Size", "%*" PRIu32 "k", 14,
- conn->ap2_audio_buffer_minimum_size / 1024);
- else
- statistics_item("Min Buffer Size", "%*" PRIu32 "", 15,
- conn->ap2_audio_buffer_minimum_size);
+ if (conn->ap2_audio_buffer_minimum_size > 10 * 1024)
+ statistics_item("Min Buffer Size", "%*" PRIu32 "k", 14,
+ conn->ap2_audio_buffer_minimum_size / 1024);
+ else
+ statistics_item("Min Buffer Size", "%*" PRIu32 "", 15,
+ conn->ap2_audio_buffer_minimum_size);
#else
- statistics_item("N/A", " "); // dummy -- should never be visible
-#endif
- statistics_item("Nominal FPS", "%*.2f", 11, conn->remote_frame_rate);
- statistics_item("Received FPS", "%*.2f", 12, conn->input_frame_rate);
- // only make the next two columns appear if we are getting stats information from
- // the back end
- if (config.output->stats) {
- if (conn->frame_rate_valid) {
- statistics_item("Output FPS (r)", "%*.2f", 14, conn->raw_frame_rate);
- statistics_item("Output FPS (c)", "%*.2f", 14, conn->corrected_frame_rate);
+ statistics_item("N/A", " "); // dummy -- should never be visible
+#endif
+ statistics_item("Nominal FPS", "%*.2f", 11, conn->remote_frame_rate);
+ statistics_item("Received FPS", "%*.2f", 12, conn->input_frame_rate);
+ // only make the next two columns appear if we are getting stats information
+ // from the back end
+ if (config.output->stats) {
+ if (conn->frame_rate_valid) {
+ statistics_item("Output FPS (r)", "%*.2f", 14, conn->raw_frame_rate);
+ statistics_item("Output FPS (c)", "%*.2f", 14, conn->corrected_frame_rate);
+ } else {
+ statistics_item("Output FPS (r)", " N/A");
+ statistics_item("Output FPS (c)", " N/A");
+ }
} else {
- statistics_item("Output FPS (r)", " N/A");
- statistics_item("Output FPS (c)", " N/A");
+ statistics_column = statistics_column + 2;
}
- } else {
- statistics_column = statistics_column + 2;
- }
- statistics_item("Source Drift PPM", "%*.2f", 16,
- (conn->local_to_remote_time_gradient - 1.0) * 1000000);
- statistics_item("Drift Samples", "%*d", 13,
- conn->local_to_remote_time_gradient_sample_count);
- /*
- statistics_item("estimated (unused) correction ppm", "%*.2f",
- strlen("estimated (unused) correction ppm"),
- (conn->frame_rate_valid != 0)
- ? ((conn->frame_rate -
- conn->remote_frame_rate * conn->output_sample_ratio *
- conn->local_to_remote_time_gradient) *
- 1000000) /
- conn->frame_rate
- : 0.0);
- */
- statistics_row++;
- inform(line_of_stats);
- } while (statistics_row < 2);
- } else {
- inform("No frames received in the last sampling interval.");
- }
- }
-#ifdef CONFIG_AIRPLAY_2
- conn->ap2_audio_buffer_minimum_size = -1;
-#endif
- }
-
- // here, we want to check (a) if we are meant to do synchronisation,
- // (b) if we have a delay procedure, (c) if we can get the delay.
-
- // If any of these are false, we don't do any synchronisation stuff
-
- int resp = -1; // use this as a flag -- if negative, we can't rely on a real known delay
- current_delay = -1; // use this as a failure flag
-
- if (config.output->delay) {
- long l_delay;
- resp = config.output->delay(&l_delay);
- if (resp == 0) { // no error
- current_delay = l_delay;
- if (l_delay >= 0)
- current_delay = l_delay;
- else {
- debug(2, "Underrun of %ld frames reported, but ignored.", l_delay);
- current_delay =
- 0; // could get a negative value if there was underrun, but ignore it.
- }
- if ((frames_seen_in_this_logging_interval == 1) ||
- (current_delay < minimum_dac_queue_size)) {
- minimum_dac_queue_size = current_delay; // update for display later
- }
- } else {
- current_delay = 0;
- if ((resp == sps_extra_code_output_stalled) &&
- (config.unfixable_error_reported == 0)) {
- config.unfixable_error_reported = 1;
- if (config.cmd_unfixable) {
- warn("Connection %d: An unfixable error has been detected -- output device is "
- "stalled. Executing the "
- "\"run_this_if_an_unfixable_error_is_detected\" command.",
- conn->connection_number);
- command_execute(config.cmd_unfixable, "output_device_stalled", 1);
+ statistics_row++;
+ inform("%s", line_of_stats);
+ } while (statistics_row < 2);
} else {
- warn("Connection %d: An unfixable error has been detected -- output device is "
- "stalled. \"No "
- "run_this_if_an_unfixable_error_is_detected\" command provided -- nothing "
- "done.",
- conn->connection_number);
+ inform("No frames received in the last sampling interval.");
}
- } else {
- if ((resp != -EBUSY) &&
- (resp != -ENODEV)) // delay and not-there errors can be reported if the device
- // is (hopefully temporarily) busy or unavailable
- debug(1, "Delay error %d when checking running latency.", resp);
}
+ tsum_of_sync_errors = 0;
+ tsum_of_corrections = 0;
+ tsum_of_insertions_and_deletions = 0;
+ number_of_statistics = 0;
+ tsum_of_frames = 0;
+ tsum_of_gaps = 0;
+ minimum_dac_queue_size = UINT64_MAX; // hack reset
+ maximum_buffer_occupancy = INT32_MIN; // can't be less than this
+ minimum_buffer_occupancy = INT32_MAX; // can't be more than this
+#ifdef CONFIG_AIRPLAY_2
+ conn->ap2_audio_buffer_minimum_size = -1;
+#endif
+ at_least_one_frame_seen = 0;
+ frames_since_last_stats_logged = 0;
}
- }
- if (resp == 0) {
-
- uint32_t should_be_frame_32;
- // this is denominated in the frame rate of the incoming stream
- local_time_to_frame(local_time_now, &should_be_frame_32, conn);
-
- int64_t should_be_frame = should_be_frame_32;
- should_be_frame = should_be_frame * conn->output_sample_ratio;
-
- // current_delay is denominated in the frame rate of the outgoing stream
- int64_t will_be_frame = inframe->given_timestamp;
- will_be_frame = will_be_frame * conn->output_sample_ratio;
- will_be_frame = (will_be_frame - current_delay) &
- output_rtptime_mask; // this is to make sure it's unsigned modulo 2^bits
- // for the rtptime
-
- // Now we have a tricky piece of calculation to perform.
- // We know the rtptimes are unsigned in 32 or more bits -- call it r bits. We have to
- // calculate the difference between them. on the basis that they should be within
- // 2^(r-1) of one another, so that the unsigned subtraction result, modulo 2^r, if
- // interpreted as a signed number, should yield the difference _and_ the ordering.
-
- sync_error = should_be_frame - will_be_frame; // this is done in int64_t form
-
- // int64_t t_ping = should_be_frame - conn->anchor_rtptime;
- // if (t_ping < 0)
- // debug(1, "Frame %" PRIu64 " is %" PRId64 " frames before anchor time %" PRIu64 ".",
- // should_be_frame, -t_ping, conn->anchor_rtptime);
-
- // sign-extend the r-bit unsigned int calculation by treating it as an r-bit signed
- // integer
- if ((sync_error & output_rtptime_sign_mask) !=
- 0) { // check what would be the sign bit in "r" bit unsigned arithmetic
- // result is negative
- sync_error = sync_error | output_rtptime_mask_not;
- } else {
- // result is positive
- sync_error = sync_error & output_rtptime_mask;
- }
+ if (conn->at_least_one_frame_seen_this_session == 0) {
+ conn->at_least_one_frame_seen_this_session = 1;
- if (at_least_one_frame_seen_this_session == 0) {
- at_least_one_frame_seen_this_session = 1;
#ifdef CONFIG_METADATA
this_is_the_first_frame = 1;
#endif
- // debug(2,"first frame real sync error (positive --> late): %" PRId64 " frames.",
- // sync_error);
-
- // this is a sneaky attempt to make a final adjustment to the timing of the first
- // packet
-
- // the very first packet generally has a first_frame_early_bias subtracted from its
- // timing to make it more likely that it will be early than late, making it possible
- // to compensate for it be adding a few frames of silence.
-
- // debug(2,"first frame real sync error (positive --> late): %" PRId64 " frames.",
- // sync_error);
-
- // remove the bias when reporting the error to make it the true error
- debug(2,
- "first frame sync error (positive --> late): %" PRId64
- " frames, %.3f mS at %d frames per second output.",
- sync_error + first_frame_early_bias,
- (1000.0 * (sync_error + first_frame_early_bias)) / config.output_rate,
- config.output_rate);
-
- // if the packet is early, add the frames needed to put it in sync.
- if (sync_error < 0) {
- size_t final_adjustment_length_sized = -sync_error;
- char *final_adjustment_silence =
- malloc(conn->output_bytes_per_frame * final_adjustment_length_sized);
- if (final_adjustment_silence) {
-
- conn->previous_random_number = generate_zero_frames(
- final_adjustment_silence, final_adjustment_length_sized, config.output_format,
- conn->enable_dither, conn->previous_random_number);
- int final_adjustment = -sync_error;
- final_adjustment = final_adjustment - first_frame_early_bias;
- debug(2,
- "final sync adjustment: %" PRId64
- " silent frames added with a bias of %" PRId64 " frames.",
- -sync_error, first_frame_early_bias);
- config.output->play(final_adjustment_silence, final_adjustment_length_sized,
- play_samples_are_untimed, 0, 0);
- free(final_adjustment_silence);
- } else {
- warn("Failed to allocate memory for a final_adjustment_silence buffer of %d "
- "frames for a "
- "sync error of %d frames.",
- final_adjustment_length_sized, sync_error);
- }
- sync_error = 0; // say the error was fixed!
- }
+ char short_description[256];
+ snprintf(short_description, sizeof(short_description), "%u/%s/%u",
+ RATE_FROM_ENCODED_FORMAT(config.current_output_configuration),
+ sps_format_description_string(
+ FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration)),
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration));
// since this is the first frame of audio, inform the user if requested...
#ifdef CONFIG_AIRPLAY_2
if (conn->airplay_stream_type == realtime_stream) {
if (conn->airplay_type == ap_1) {
#ifdef CONFIG_METADATA
- send_ssnc_metadata('styp', "Classic", strlen("Classic"), 1);
+ send_ssnc_stream_description("Classic", get_ssrc_name(conn->incoming_ssrc));
#endif
if (config.statistics_requested)
- inform("Connection %d: Playback started at frame %" PRId64
- " -- Classic AirPlay (\"AirPlay 1\") Compatible.",
- conn->connection_number, inframe->given_timestamp);
+ inform("Connection %d: Classic AirPlay (\"AirPlay 1\") Compatible playback. "
+ "Input format: %s. Output format: %s.",
+ conn->connection_number, get_ssrc_name(conn->incoming_ssrc),
+ short_description);
} else {
#ifdef CONFIG_METADATA
- send_ssnc_metadata('styp', "Realtime", strlen("Realtime"), 1);
+ send_ssnc_stream_description("Realtime", get_ssrc_name(conn->incoming_ssrc));
#endif
- if (config.statistics_requested)
- inform("Connection %d: Playback started at frame %" PRId64
- " -- AirPlay 2 Realtime.",
- conn->connection_number, inframe->given_timestamp);
+ if (config.statistics_requested) {
+ if (conn->ap2_client_name == NULL)
+ inform("Connection %d: AirPlay 2 Realtime playback. "
+ "Input format: %s. Output format: %s.",
+ conn->connection_number, get_ssrc_name(conn->incoming_ssrc), "");
+ else
+ inform("Connection %d: AirPlay 2 Realtime playback. "
+ "Source: \"%s\". Input format: %s. Output format: %s.",
+ conn->connection_number, conn->ap2_client_name,
+ get_ssrc_name(conn->incoming_ssrc), short_description);
+ }
}
} else {
#ifdef CONFIG_METADATA
- send_ssnc_metadata('styp', "Buffered", strlen("Buffered"), 1);
+ send_ssnc_stream_description("Buffered", get_ssrc_name(conn->incoming_ssrc));
#endif
- if (config.statistics_requested)
- inform("Connection %d: Playback started at frame %" PRId64
- " -- AirPlay 2 Buffered.",
- conn->connection_number, inframe->given_timestamp);
+
+ if (config.statistics_requested) {
+
+ if (conn->ap2_client_name == NULL)
+ inform("Connection %d: AirPlay 2 Buffered playback. "
+ "Input format: %s. Output format: %s.",
+ conn->connection_number, get_ssrc_name(conn->incoming_ssrc),
+ short_description);
+ else
+ inform("Connection %d: AirPlay 2 Buffered playback. "
+ "Source: \"%s\". Input format: %s. Output format: %s.",
+ conn->connection_number, conn->ap2_client_name,
+ get_ssrc_name(conn->incoming_ssrc), short_description);
+ }
}
#else
#ifdef CONFIG_METADATA
- send_ssnc_metadata('styp', "Classic", strlen("Classic"), 1);
+ send_ssnc_stream_description("AirPlay", "ALAC/44100/S16/2");
#endif
if (config.statistics_requested)
- inform("Connection %d: Playback started at frame %" PRId64
- " -- Classic AirPlay (\"AirPlay 1\").",
- conn->connection_number, inframe->given_timestamp);
+ inform("Connection %d: Classic AirPlay (\"AirPlay 1\") playback. "
+ "Input format: ALAC/44100/S16/2. Output format: %s.",
+ conn->connection_number, short_description);
+#endif
+
+#ifdef CONFIG_METADATA
+ send_ssnc_metadata('odsc', short_description, strlen(short_description), 1);
#endif
- }
- // not too sure if abs() is implemented for int64_t, so we'll do it manually
- int64_t abs_sync_error = sync_error;
- if (abs_sync_error < 0)
- abs_sync_error = -abs_sync_error;
-
- if ((config.no_sync == 0) && (inframe->given_timestamp != 0) &&
- (config.resync_threshold > 0.0) &&
- (abs_sync_error > config.resync_threshold * config.output_rate)) {
- sync_error_out_of_bounds++;
- } else {
- sync_error_out_of_bounds = 0;
}
- if (sync_error_out_of_bounds > 3) {
- // debug(1, "lost sync with source for %d consecutive packets -- flushing and "
- // "resyncing. Error: %lld.",
- // sync_error_out_of_bounds, sync_error);
- sync_error_out_of_bounds = 0;
+ // here, we want to check (a) if we are meant to do synchronisation,
+ // (b) if we have a delay procedure, (c) if we can get the delay.
- uint64_t frames_sent_for_play = 0;
- uint64_t actual_delay = 0;
+ // If any of these are false, we don't do any synchronisation stuff
- if ((config.output->delay) && (config.no_sync == 0) && (config.output->stats)) {
- uint64_t raw_measurement_time;
- uint64_t corrected_measurement_time;
- config.output->stats(&raw_measurement_time, &corrected_measurement_time,
- &actual_delay, &frames_sent_for_play);
- }
+ int resp = -1; // use this as a flag -- if negative, we can't rely on a real known delay
+ current_delay = -1; // use this as a failure flag
- int64_t filler_length =
- (int64_t)(config.resync_threshold * config.output_rate); // number of samples
- if ((sync_error > 0) && (sync_error > filler_length)) {
- debug(1,
- "Large positive (i.e. late) sync error of %" PRId64
- " frames (%f seconds), at frame: %" PRIu32 ".",
- sync_error, (sync_error * 1.0) / config.output_rate,
- inframe->given_timestamp);
- // debug(1, "%" PRId64 " frames sent to DAC. DAC buffer contains %" PRId64 "
- // frames.",
- // frames_sent_for_play, actual_delay);
- // the sync error is output frames, but we have to work out how many source frames
- // to drop there may be a multiple (the conn->output_sample_ratio) of output frames
- // per input frame...
- int64_t source_frames_to_drop = sync_error;
- source_frames_to_drop = source_frames_to_drop / conn->output_sample_ratio;
-
- // drop some extra frames to give the pipeline a chance to recover
- int64_t extra_frames_to_drop =
- (int64_t)(conn->input_rate * config.resync_recovery_time);
- source_frames_to_drop += extra_frames_to_drop;
-
- uint32_t frames_to_drop = source_frames_to_drop;
- uint32_t flush_to_frame = inframe->given_timestamp + frames_to_drop;
-
- do_flush(flush_to_frame, conn);
-
- } else if ((sync_error < 0) && ((-sync_error) > filler_length)) {
- debug(1,
- "Large negative (i.e. early) sync error of %" PRId64
- " frames (%f seconds), at frame: %" PRIu32 ".",
- sync_error, (sync_error * 1.0) / config.output_rate,
- inframe->given_timestamp);
- debug(3, "%" PRId64 " frames sent to DAC. DAC buffer contains %" PRId64 " frames.",
- frames_sent_for_play, actual_delay);
- int64_t silence_length = -sync_error;
- if (silence_length > (filler_length * 5))
- silence_length = filler_length * 5;
- size_t silence_length_sized = silence_length;
- char *long_silence = malloc(conn->output_bytes_per_frame * silence_length_sized);
- if (long_silence) {
-
- conn->previous_random_number =
- generate_zero_frames(long_silence, silence_length_sized, config.output_format,
- conn->enable_dither, conn->previous_random_number);
-
- debug(2, "Play a silence of %d frames.", silence_length_sized);
- config.output->play(long_silence, silence_length_sized, play_samples_are_untimed,
- 0, 0);
- free(long_silence);
+ // if making the measurements takes too long (e.g. due to scheduling) , don't use
+ // it.
+
+ uint64_t mst = get_absolute_time_in_ns(); // measurement start time
+ uint64_t delay_measurement_time = 0;
+ if (config.output->delay) {
+ long l_delay;
+ resp = config.output->delay(&l_delay);
+ delay_measurement_time =
+ get_absolute_time_in_ns(); // put this after delay() returns, as it may take an
+ // appreciable amount of time to run
+ if (resp == 0) { // no error
+ current_delay = l_delay;
+ if (l_delay >= 0)
+ current_delay = l_delay;
+ else {
+ debug(2, "Underrun of %ld frames reported, but ignored.", l_delay);
+ current_delay =
+ 0; // could get a negative value if there was underrun, but ignore it.
+ }
+ if (current_delay < minimum_dac_queue_size) {
+ minimum_dac_queue_size = current_delay; // update for display later
+ }
+ } else {
+ current_delay = 0;
+ if ((resp == sps_extra_code_output_stalled) &&
+ (config.unfixable_error_reported == 0)) {
+ config.unfixable_error_reported = 1;
+ if (config.cmd_unfixable) {
+ warn("Connection %d: An unfixable error has been detected -- output device "
+ "is "
+ "stalled. Executing the "
+ "\"run_this_if_an_unfixable_error_is_detected\" command.",
+ conn->connection_number);
+ command_execute(config.cmd_unfixable, "output_device_stalled", 1);
+ } else {
+ warn("Connection %d: An unfixable error has been detected -- output device "
+ "is "
+ "stalled. \"No "
+ "run_this_if_an_unfixable_error_is_detected\" command provided -- "
+ "nothing "
+ "done.",
+ conn->connection_number);
+ }
} else {
- warn("Failed to allocate memory for a long_silence buffer of %d frames for a "
- "sync error of %d frames.",
- silence_length_sized, sync_error);
+ if ((resp != -EBUSY) &&
+ (resp != -ENODEV)) // delay and not-there errors can be reported if the
+ // device is (hopefully temporarily) busy or unavailable
+ // Note: ENODATA (a better fit) is not availabe in FreeBSD.
+ debug(1, "Delay error %d when checking running latency.", resp);
}
- reset_input_flow_metrics(conn);
}
- } else {
+ // debug(1, "resp is %d, delay is %ld.", resp, l_delay);
+ }
+ if (resp == 0) {
+
+ uint64_t the_time_this_frame_should_be_played;
+ frame_to_local_time(inframe->timestamp, &the_time_this_frame_should_be_played, conn);
+
+ uint64_t output_buffer_delay_time = current_delay;
+
+#ifdef CONFIG_FFMPEG
+ // the current delay should also include the frames that were kept in swr
+ // before the current block was requested
+ output_buffer_delay_time =
+ output_buffer_delay_time + frames_previously_retained_in_the_resampler;
+ // debug(1,"Allowing for %" PRId64 " frames previously held in the resampler.",
+ // frames_previously_retained_in_the_resampler);
+ // now we'll update frames_previously_retained_in_the_resampler
+ // to the figure after the current block
+ frames_previously_retained_in_the_resampler = conn->frames_retained_in_the_resampler;
+#endif
- /*
- // before we finally commit to this frame, check its sequencing and timing
- // require a certain error before bothering to fix it...
- if (sync_error > config.tolerance * config.output_rate) { // int64_t > int, okay
- amount_to_stuff = -1;
- }
- if (sync_error < -config.tolerance * config.output_rate) {
- amount_to_stuff = 1;
- }
- */
-
- if (amount_to_stuff == 0) {
- // use a "V" shaped function to decide if stuffing should occur
- int64_t s = r64i();
- s = s >> 31;
- s = s * config.tolerance * config.output_rate;
- s = (s >> 32) + config.tolerance * config.output_rate; // should be a number from 0
- // to config.tolerance *
- // config.output_rate;
- if ((sync_error > 0) && (sync_error > s)) {
- // debug(1,"Extra stuff -1");
- amount_to_stuff = -1;
+ output_buffer_delay_time =
+ output_buffer_delay_time *
+ 1000000000; // there should be plenty of space in a uint64_t for any
+ // conceivable current_delay value
+ output_buffer_delay_time =
+ output_buffer_delay_time /
+ RATE_FROM_ENCODED_FORMAT(config.current_output_configuration);
+ debug(3,
+ "current_delay: %" PRId64 ", output_buffer_delay_time: %.3f, output rate: %u.",
+ current_delay, output_buffer_delay_time * 0.000000001,
+ RATE_FROM_ENCODED_FORMAT(config.current_output_configuration));
+
+ uint64_t the_time_this_frame_will_be_played =
+ delay_measurement_time + output_buffer_delay_time;
+
+ double centered_sync_error_time = 0.0;
+ int64_t sync_error_ns = 0;
+ int64_t measurement_time = get_absolute_time_in_ns() - mst;
+
+ // debug(1, "measurement time: %" PRId64 " ns.", measurement_time);
+
+ if (measurement_time < 2000000) {
+
+ sync_error_ns =
+ the_time_this_frame_will_be_played - the_time_this_frame_should_be_played;
+ sync_error = (sync_error_ns *
+ RATE_FROM_ENCODED_FORMAT(config.current_output_configuration)) /
+ 1000000000;
+
+ // debug(1, "measurement time: %" PRId64 " ns. Sync error: %" PRId64 " ns, %" PRId64
+ // " frames, skipping_frames_at_start_of_play is %d.", measurement_time,
+ // sync_error_ns, sync_error, skipping_frames_at_start_of_play);
+
+ // A timestamp gap is when the timstamp of the next packet of frames is not equal to
+ // the previous packet's timstamp + number o frames therein.
+
+ // But wait! If there is a timestamp gap, this isn't really an error.
+
+ // Also, if it's a sync error at the start of a play sequence, then
+ // it can be dealt with by inserting a silence or skipping frames.
+
+ // So, here we have enough information to decide what to do with the "frame" of
+ // audio.
+
+ // If we are already skipping frames because of a prior first frame,
+ // we might need to adjust the skipping count due to a better time estimate
+
+ if (skipping_frames_at_start_of_play != 0) {
+ if (sync_error <= 0) {
+ debug(3,
+ "cancel skipping at start of play -- skip estimate was: %" PRId32
+ ", but sync_error is now: %" PRId64 ".",
+ frames_to_skip, sync_error);
+ frames_to_skip = 0;
+ skipping_frames_at_start_of_play = 0;
+ } else if (frames_to_skip != sync_error) {
+ debug(3,
+ "updating skipping at start of play count from: %" PRId32 " to: %" PRId64
+ ".",
+ frames_to_skip, sync_error);
+ frames_to_skip = sync_error;
+ }
}
- if ((sync_error < 0) && (sync_error < (-s))) {
- // debug(1,"Extra stuff +1");
- amount_to_stuff = 1;
+
+ // If it's the first frame or if it's at a timestamp discontinuity, then we can
+ // deal with it straight away.
+ if ((inframe != NULL) && ((conn->first_packet_timestamp == inframe->timestamp) ||
+ (inframe->timestamp_gap != 0))) {
+
+ // By default, when there is a sync error and some kind of discontinuity,
+ // e.g. a gap between timestamps of adjacent packets or a first packet,
+ // then we try to fix the sync error, either by skipping frames or by inserting a
+ // silence. However, if it's a negative timestamp gap between packets, only try to
+ // fix the timestamp_gap. The reason for this is that we don't know the purpose of
+ // the negative gaps. For all we know, it may be that the audio frames before and
+ // after the gap are meant to be contiguous.
+
+ int64_t gap_to_fix = sync_error; // this is what we look at normally
+
+ if (conn->first_packet_timestamp == inframe->timestamp) {
+ debug(3, "first frame: %u, sync_error %" PRId64 " frames.", inframe->timestamp,
+ sync_error);
+ skipping_frames_at_start_of_play = 1;
+ } else {
+ debug(3, "timestamp_gap: %d on frame %u, sync_error %" PRId64 " frames.",
+ inframe->timestamp_gap, inframe->timestamp, sync_error);
+ if (inframe->timestamp_gap < 0) {
+ gap_to_fix = -inframe->timestamp_gap; // this is frames at the input rate
+ int64_t gap_to_fix_ns = (gap_to_fix * 1000000000) / conn->input_rate;
+ gap_to_fix = (gap_to_fix_ns *
+ RATE_FROM_ENCODED_FORMAT(config.current_output_configuration)) /
+ 1000000000; // this is frames at the output rate
+ // debug(3, "due to timstamp gap of %d frames, skip %" PRId64 " output
+ // frames.", inframe->timestamp_gap, gap_to_fix);
+ }
+ }
+
+ if (gap_to_fix > 0) {
+ // debug(1, "drop %u frames, timestamp: %u, skipping_frames_at_start_of_play is
+ // %d.", gap_to_fix, inframe->timestamp, skipping_frames_at_start_of_play);
+ frames_to_skip += gap_to_fix;
+ sync_error_ns = 0; // don't invoke any sync checking
+ } else if (gap_to_fix < 0) { // this packet is early, so insert the right number
+ // of frames to zero the error
+ frames_to_skip = 0;
+ skipping_frames_at_start_of_play = 0;
+ int64_t gap = -gap_to_fix;
+ void *silence = malloc(
+ sps_format_sample_size(
+ FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration)) *
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration) * gap);
+ if (silence == NULL) {
+ debug(1, "Failed to allocate memory for a silent gap.");
+ } else {
+ // the player may change the contents of the buffer, so it has to be zeroed
+ // each time; might as well malloc and free it locally
+ conn->previous_random_number = generate_zero_frames(
+ silence, gap, conn->enable_dither, conn->previous_random_number,
+ config.current_output_configuration);
+ config.output->play(silence, gap, play_samples_are_untimed, 0, 0);
+ free(silence);
+ frames_played += gap;
+ // debug(1,"sent %d frames of silence.", gap);
+ sync_error_ns = 0; // don't invoke any sync checking
+ sync_error = 0;
+ }
+ }
}
- }
+ // debug(1, "frames_to_skip: %u.", frames_to_skip);
+ // don't do any sync error calculations if you're skipping frames
+ if (frames_to_skip == 0) {
+ // first, make room in the array if it's full
+ if (conn->sync_samples_count == sync_history_length) {
+ conn->sync_samples_count--;
+ }
+ last_sample_index = conn->sync_samples_index;
+ sync_samples[conn->sync_samples_index] = sync_error_ns;
+ conn->sync_samples_count++;
+ conn->sync_samples_index = (conn->sync_samples_index + 1) % sync_history_length;
+
+ // now find the lowest and highest errors
+ sync_samples_highest_error = sync_samples[0];
+ sync_samples_lowest_error = sync_samples[0];
+ sync_samples_second_highest_error = sync_samples[0];
+ sync_samples_second_lowest_error = sync_samples[0];
+ unsigned int s;
+ int64_t mean = 0;
+ for (s = 0; s < conn->sync_samples_count; s++) {
+ mean += sync_samples[s];
+ if (sync_samples[s] > sync_samples_highest_error) {
+ sync_samples_second_highest_error = sync_samples_highest_error;
+ sync_samples_highest_error = sync_samples[s];
+ } else if (sync_samples[s] > sync_samples_second_highest_error) {
+ sync_samples_second_highest_error = sync_samples[s];
+ } else if (sync_samples[s] < sync_samples_lowest_error) {
+ sync_samples_second_lowest_error = sync_samples_lowest_error;
+ sync_samples_lowest_error = sync_samples[s];
+ } else if (sync_samples[s] < sync_samples_second_lowest_error) {
+ sync_samples_second_lowest_error = sync_samples[s];
+ }
+ }
- // try to keep the corrections definitely below 1 in 1000 audio frames
+ if (conn->sync_samples_count != 0)
+ mean = mean / conn->sync_samples_count;
- // calculate the time elapsed since the play session started.
+ tsum_of_gaps = tsum_of_gaps + sync_samples_second_highest_error -
+ sync_samples_second_lowest_error;
- if (amount_to_stuff) {
- if ((local_time_now) && (conn->first_packet_time_to_play) &&
- (local_time_now >= conn->first_packet_time_to_play)) {
+ int64_t centered_sync_error_ns =
+ (sync_samples_second_highest_error + sync_samples_second_lowest_error) / 2;
+ centered_sync_error_time = centered_sync_error_ns * 0.000000001;
- int64_t tp =
- (local_time_now - conn->first_packet_time_to_play) /
- 1000000000; // seconds int64_t from uint64_t which is always positive, so ok
+ // debug(1, "centered_sync_error_ns: %" PRId64 ", %.3f sec.",
+ // centered_sync_error_ns, centered_sync_error_time);
+
+ // int64_t centered_sync_error =
+ // (centered_sync_error_ns * config.current_output_configuration->rate) /
+ // 1000000000;
+
+ // decide whether to do a stuff
- if (tp < 5)
- amount_to_stuff = 0; // wait at least five seconds
/*
- else if (tp < 30) {
- if ((random() % 1000) >
- 352) // keep it to about 1:1000 for the first thirty seconds
- amount_to_stuff = 0;
+ // calculate the standard deviation
+
+ double sd = 0.0;
+ for (s = 0; s < conn->sync_samples_count; s++) {
+ sd += pow(sync_samples[s] - mean, 2);
}
+ if (conn->sync_samples_count != 0)
+ sd = sqrt(sd / conn->sync_samples_count);
+
+ // debug(1, "samples: %u, mean: %" PRId64 ", standard deviation: %f.",
+ // conn->sync_samples_count, mean, sd);
*/
+
+ // it seems (?) that the standard deviation settles down markedly after 10 samples
+ // == 1024 * 10 frames in AAC
+ if (play_number * inbuflength >= 10 * 1024) {
+ // the tolerance is on either side of the correct, thus it contributes twice
+ // to the overall gap
+ int64_t tolerance_ns = (int64_t)(config.tolerance * 1000000000L);
+ // int64_t gap = 2 * tolerance_ns + sync_samples_second_highest_error -
+ // sync_samples_second_lowest_error;
+ // int64_t gap = 2 * tolerance_ns;
+ // since the gap should be symmetrical about 0, stuff accordingly
+ if (centered_sync_error_ns > tolerance_ns) {
+ amount_to_stuff = -1 * (inbuflength / 350);
+ if (amount_to_stuff == 0)
+ amount_to_stuff = -1;
+ debug(3, "drop a frame, inbuflength is %d, amount_to_stuff is %d.",
+ inbuflength, amount_to_stuff);
+ } else if (centered_sync_error_ns < (-tolerance_ns)) {
+ amount_to_stuff = +1 * (inbuflength / 350);
+ if (amount_to_stuff == 0)
+ amount_to_stuff = 1;
+ debug(3, "add a frame, inbuflength is %d, amount_to_stuff is %d.",
+ inbuflength, amount_to_stuff);
+ } else {
+ debug(3,
+ "error is within tolerance: centered_sync_error_ns: %" PRId64
+ ", tolerance_ns: %" PRId64 " ns.",
+ centered_sync_error_ns, tolerance_ns);
+ }
+ }
}
}
- if (config.no_sync != 0)
- amount_to_stuff = 0; // no stuffing if it's been disabled
+ if (amount_to_stuff)
+ debug(3,
+ // "stuff: %+d, sync_error: %+5.3f milliseconds.",
+ // amount_to_stuff, sync_error * 1000);
+ "stuff: %+d, sync_errors actual: %+5.3f milliseconds, bufferlength: %d, "
+ "sync window : %+5.3f "
+ "milliseconds, prior second highest: %+5.3f milliseconds, prior second "
+ "lowest: %+5.3f "
+ "milliseconds.",
+ amount_to_stuff, sync_error_ns * 0.000001, inbuflength,
+ (sync_samples_second_highest_error - sync_samples_second_lowest_error) *
+ 0.000001,
+ sync_samples_second_highest_error * 0.000001,
+ sync_samples_second_lowest_error * 0.000001);
+
+ // now, deal with sync errors and anomalies
- // Apply DSP here
+ if ((config.no_sync == 0) && (inframe->timestamp != 0) &&
+ (config.resync_threshold > 0.0) &&
+ // (fabs(sync_error) > config.resync_threshold)) {
+ (fabs(centered_sync_error_time) > config.resync_threshold) &&
+ // don't count it if the error max and min values bracket (i.e. are on either
+ // size of) zero
+ !((sync_samples_highest_error >= 0) && ((sync_samples_lowest_error <= 0))) &&
+ (conn->sync_samples_count == sync_history_length)) {
+ sync_error_out_of_bounds++;
+ } else {
+ sync_error_out_of_bounds = 0;
+ }
- // check the state of loudness and convolution flags here and don't change them for
- // the frame
+ if (sync_error_out_of_bounds != 0) {
+ debug(2,
+ "sync error for frame %" PRIu32
+ " out of bounds on %d successive occasions. Error is %.3f milliseconds -- "
+ "resync requested (%u, %" PRId64 ", %" PRId64 ").",
+ inframe->timestamp, sync_error_out_of_bounds, centered_sync_error_time * 1000,
+ conn->sync_samples_count, sync_samples_highest_error,
+ sync_samples_lowest_error);
+
+ if (centered_sync_error_time < 0) {
+ request_resync = 1; // ask for a resync
+ } else {
+ int16_t occ = conn->ab_write - conn->ab_read;
+ debug(2,
+ "drop late packet, timestamp: %u, late by: %.3f ms, packets remaining in "
+ "the buffer: %u.",
+ inframe->timestamp, centered_sync_error_time * 1000, occ);
+ unsigned int s;
+ for (s = 0; s < conn->sync_samples_count; s++) {
+ debug(3, "sample: %u, value: %.3f ms", s, sync_samples[s] * 0.000001);
+ }
+ debug(3, "sync_history_length: %u, samples_count: %u, sample_index: %u",
+ sync_history_length, conn->sync_samples_count, last_sample_index);
+ }
+ sync_error_out_of_bounds = 0;
+ // conn->sync_samples_index = 0;
+ // conn->sync_samples_count = 0;
+ } else {
- int do_loudness = config.loudness;
+ if (config.no_sync != 0)
+ amount_to_stuff = 0; // no stuffing if it's been disabled
-#ifdef CONFIG_CONVOLUTION
- int do_convolution = 0;
- if ((config.convolution) && (config.convolver_valid))
- do_convolution = 1;
+ // Apply DSP here
- // we will apply the convolution gain if convolution is enabled, even if there is no
- // valid convolution happening
+ loudness_update(conn);
- int convolution_is_enabled = 0;
- if (config.convolution)
- convolution_is_enabled = 1;
+ if (conn->do_loudness
+#ifdef CONFIG_CONVOLUTION
+ || config.convolution_enabled
#endif
+ ) {
+
+ float(*fbufs)[1024] = malloc(conn->input_num_channels * sizeof(*fbufs));
+ // debug(1, "size of array allocated is %d bytes.", conn->input_num_channels *
+ // sizeof(*fbufs));
+ int32_t *tbuf32 = conn->tbuf;
+
+ // Deinterleave, and convert to float
+ unsigned int i, j;
+ for (i = 0; i < inframe->length; i++) {
+ for (j = 0; j < conn->input_num_channels; j++) {
+ fbufs[j][i] = tbuf32[conn->input_num_channels * i + j];
+ }
+ }
- if (do_loudness
#ifdef CONFIG_CONVOLUTION
- || convolution_is_enabled
-#endif
- ) {
- int32_t *tbuf32 = (int32_t *)conn->tbuf;
- float fbuf_l[inbuflength];
- float fbuf_r[inbuflength];
-
- // Deinterleave, and convert to float
- int i;
- for (i = 0; i < inbuflength; ++i) {
- fbuf_l[i] = tbuf32[2 * i];
- fbuf_r[i] = tbuf32[2 * i + 1];
- }
+ // Apply convolution
+ // First, have we got the right convolution setup?
+
+ static int convolver_is_valid = 0;
+ static size_t current_convolver_block_size = 0;
+ static unsigned int current_convolver_rate = 0;
+ static unsigned int current_convolver_channels = 0;
+ static double current_convolver_maximum_length_in_seconds = 0;
+
+ if (config.convolution_enabled) {
+ if (
+ // if any of these are true, we need to create a new convolver
+ // (conn->convolver_is_valid == 0) ||
+ (current_convolver_block_size != inframe->length) ||
+ (current_convolver_rate != conn->input_rate) ||
+ !((current_convolver_channels == 1) ||
+ (current_convolver_channels == conn->input_num_channels)) ||
+ (current_convolver_maximum_length_in_seconds !=
+ config.convolution_max_length_in_seconds) ||
+ (config.convolution_ir_files_updated == 1)) {
+
+ // look for a convolution ir file with a matching rate and channel count
+
+ convolver_is_valid = 0; // declare any current convolver as invalid
+ current_convolver_block_size = inframe->length;
+ current_convolver_rate = conn->input_rate;
+ current_convolver_channels = conn->input_num_channels;
+ current_convolver_maximum_length_in_seconds =
+ config.convolution_max_length_in_seconds;
+ config.convolution_ir_files_updated = 0;
+ debug(2, "try to initialise a %u/%u convolver.", current_convolver_rate,
+ current_convolver_channels);
+ char *convolver_file_found = NULL;
+ unsigned int ir = 0;
+ while ((ir < config.convolution_ir_file_count) &&
+ (convolver_file_found == NULL)) {
+ if ((config.convolution_ir_files[ir].samplerate ==
+ current_convolver_rate) &&
+ (config.convolution_ir_files[ir].channels ==
+ current_convolver_channels)) {
+ convolver_file_found = config.convolution_ir_files[ir].filename;
+ } else {
+ ir++;
+ }
+ }
-#ifdef CONFIG_CONVOLUTION
- // Apply convolution
- if (do_convolution) {
- convolver_process_l(fbuf_l, inbuflength);
- convolver_process_r(fbuf_r, inbuflength);
- }
- if (convolution_is_enabled) {
- float gain = pow(10.0, config.convolution_gain / 20.0);
- for (i = 0; i < inbuflength; ++i) {
- fbuf_l[i] *= gain;
- fbuf_r[i] *= gain;
+ // if no luck, try for a single-channel IR file
+ if (convolver_file_found == NULL) {
+ current_convolver_channels = 1;
+ ir = 0;
+ while ((ir < config.convolution_ir_file_count) &&
+ (convolver_file_found == NULL)) {
+ if ((config.convolution_ir_files[ir].samplerate ==
+ current_convolver_rate) &&
+ (config.convolution_ir_files[ir].channels ==
+ current_convolver_channels)) {
+ convolver_file_found = config.convolution_ir_files[ir].filename;
+ } else {
+ ir++;
+ }
+ }
+ }
+ if (convolver_file_found != NULL) {
+ // we have an apparently suitable convolution ir file, so lets initialise
+ // a convolver
+ convolver_is_valid = convolver_init(
+ convolver_file_found, conn->input_num_channels,
+ config.convolution_max_length_in_seconds, inframe->length);
+ convolver_wait_for_all();
+ // if (convolver_is_valid)
+ // debug(1, "convolver_init for %u channels was successful.",
+ // conn->input_num_channels); convolver_is_valid = convolver_init(
+ // convolver_file_found, conn->input_num_channels,
+ // config.convolution_max_length_in_seconds, inframe->length);
+ }
+
+ if (convolver_is_valid == 0)
+ debug(1, "can not initialise a %u/%u convolver.", current_convolver_rate,
+ conn->input_num_channels);
+ else
+ debug(1, "convolver: \"%s\".", convolver_file_found);
+ }
+ if (convolver_is_valid != 0) {
+ for (j = 0; j < conn->input_num_channels; j++) {
+ // convolver_process(j, fbufs[j], inframe->length);
+ convolver_process(j, fbufs[j], inframe->length);
+ }
+ convolver_wait_for_all();
+ }
+
+ // apply convolution gain even if no convolution is done...
+ float gain = pow(10.0, config.convolution_gain / 20.0);
+ for (i = 0; i < inframe->length; ++i) {
+ for (j = 0; j < conn->input_num_channels; j++) {
+ float output_level_db = 0.0;
+ if (fbufs[j][i] < 0.0)
+ output_level_db = 20 * log10(fbufs[j][i] / (float)INT32_MIN * 1.0);
+ else
+ output_level_db = 20 * log10(fbufs[j][i] / (float)INT32_MAX);
+ if (output_level_db > highest_convolver_output_db) {
+ highest_convolver_output_db = output_level_db;
+ if ((highest_convolver_output_db + config.convolution_gain) > 0.0)
+ warn("clipping %.1f dB with convolution gain set to %.1f dB!",
+ highest_convolver_output_db + config.convolution_gain,
+ config.convolution_gain);
+ }
+ fbufs[j][i] *= gain;
+ }
+ }
}
- }
-#endif
- if (do_loudness) {
- // Apply volume and loudness
- // Volume must be applied here because the loudness filter will increase the
- // signal level and it would saturate the int32_t otherwise
- float gain = conn->fix_volume / 65536.0f;
- // float gain_db = 20 * log10(gain);
- // debug(1, "Applying soft volume dB: %f k: %f", gain_db, gain);
+#endif
+ if (conn->do_loudness) {
+ loudness_process_blocks((float *)fbufs, inframe->length,
+ conn->input_num_channels,
+ (float)conn->fix_volume / 65536);
+ }
- for (i = 0; i < inbuflength; ++i) {
- fbuf_l[i] = loudness_process(&loudness_l, fbuf_l[i] * gain);
- fbuf_r[i] = loudness_process(&loudness_r, fbuf_r[i] * gain);
+ // Interleave and convert back to int32_t
+ for (i = 0; i < inframe->length; i++) {
+ for (j = 0; j < conn->input_num_channels; j++) {
+ tbuf32[conn->input_num_channels * i + j] = fbufs[j][i];
+ }
}
- }
- // Interleave and convert back to int32_t
- for (i = 0; i < inbuflength; ++i) {
- tbuf32[2 * i] = fbuf_l[i];
- tbuf32[2 * i + 1] = fbuf_r[i];
+ if (fbufs != NULL) {
+ free(fbufs);
+ fbufs = NULL;
+ }
}
- }
-
+ // }
#ifdef CONFIG_SOXR
- if ((current_delay < conn->dac_buffer_queue_minimum_length) ||
- (config.packet_stuffing == ST_basic) ||
- (config.soxr_delay_index == 0) || // not computed
- ((config.packet_stuffing == ST_auto) &&
- (config.soxr_delay_index >
- config.soxr_delay_threshold)) // if the CPU is deemed too slow
- ) {
-#endif
- play_samples =
- stuff_buffer_basic_32((int32_t *)conn->tbuf, inbuflength, config.output_format,
- conn->outbuf, amount_to_stuff, conn->enable_dither, conn);
-#ifdef CONFIG_SOXR
- } else { // soxr requested or auto requested with the index less or equal to the
- // threshold
- play_samples = stuff_buffer_soxr_32((int32_t *)conn->tbuf, (int32_t *)conn->sbuf,
- inbuflength, config.output_format, conn->outbuf,
- amount_to_stuff, conn->enable_dither, conn);
- }
+ double t = config.audio_backend_buffer_interpolation_threshold_in_seconds *
+ RATE_FROM_ENCODED_FORMAT(config.current_output_configuration);
+
+ // figure out if we're going for soxr or something else
+
+ if ((current_delay < t) || // delay is too small we definitely won't do soxr
+ (config.packet_stuffing == ST_basic) ||
+ (config.packet_stuffing == ST_vernier) ||
+ ((config.packet_stuffing == ST_auto) &&
+ ((config.soxr_delay_index == 0) || // soxr processing time unknown
+ (config.soxr_delay_index > config.soxr_delay_threshold) // too slow
+ ))) {
+ debug(3, "current_delay: %" PRIu64 ", dac buffer queue minimum length: %f.",
+ current_delay, t);
#endif
+ if (config.packet_stuffing == ST_basic)
+ play_samples = stuff_buffer_basic_32(
+ (int32_t *)conn->tbuf, inbuflength,
+ FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration),
+ conn->outbuf, amount_to_stuff, conn->enable_dither, conn);
+ else
+ play_samples = stuff_buffer_vernier(
+ (int32_t *)conn->tbuf, inbuflength,
+ FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration),
+ conn->outbuf, amount_to_stuff, conn->enable_dither, conn);
- /*
- {
- int co;
- int is_silent=1;
- short *p = outbuf;
- for (co=0;co<play_samples;co++) {
- if (*p!=0)
- is_silent=0;
- p++;
+#ifdef CONFIG_SOXR
+ } else { // soxr requested or auto requested with the index less or equal to the
+ // threshold
+ play_samples = stuff_buffer_soxr_32(
+ (int32_t *)conn->tbuf, inbuflength,
+ FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration), conn->outbuf,
+ amount_to_stuff, conn->enable_dither, conn);
}
- if (is_silent)
- debug(1,"Silence!");
- }
- */
+#endif
- if (conn->outbuf == NULL)
- debug(1, "NULL outbuf to play -- skipping it.");
- else {
- if (play_samples == 0)
- debug(1, "play_samples==0 skipping it (1).");
+ if (conn->outbuf == NULL)
+ debug(1, "NULL outbuf to play -- skipping it.");
else {
- if (conn->software_mute_enabled) {
- generate_zero_frames(conn->outbuf, play_samples, config.output_format,
- conn->enable_dither, conn->previous_random_number);
- }
- uint64_t should_be_time;
- frame_to_local_time(inframe->given_timestamp, &should_be_time, conn);
+ if (play_samples == 0)
+ debug(2, "nothing to play.");
+ else {
+ if (conn->software_mute_enabled) {
+ generate_zero_frames(conn->outbuf, play_samples, conn->enable_dither,
+ conn->previous_random_number,
+ config.current_output_configuration);
+ }
+ uint64_t should_be_time;
+ frame_to_local_time(inframe->timestamp, &should_be_time, conn);
+ // debug(1, "play frame %u.", inframe->timestamp);
+
+ // now, see if we are skipping some or all of these frames
+ if (frames_to_skip == 0) {
+ config.output->play(conn->outbuf, play_samples, play_samples_are_timed,
+ inframe->timestamp, should_be_time);
+ frames_played += play_samples;
+ } else {
+ if (frames_to_skip > (unsigned int)play_samples) {
+ debug(3, "skipping a packet of %u frames.", play_samples);
+ debug_print_buffer(
+ 3, conn->outbuf,
+ play_samples *
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration) *
+ sps_format_sample_size(FORMAT_FROM_ENCODED_FORMAT(
+ config.current_output_configuration)));
+ frames_to_skip -= play_samples;
+ } else {
+
+ char *offset = conn->outbuf;
+ offset +=
+ frames_to_skip *
+ CHANNELS_FROM_ENCODED_FORMAT(config.current_output_configuration) *
+ sps_format_sample_size(
+ FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration));
+ config.output->play(offset, play_samples - frames_to_skip,
+ play_samples_are_timed, inframe->timestamp,
+ should_be_time);
+
+ debug(3, "skipping the first %u frames in a packet of %u frames.",
+ frames_to_skip, play_samples);
+ debug_print_buffer(3, conn->outbuf, offset - conn->outbuf);
+
+ frames_played += play_samples - frames_to_skip;
+ frames_to_skip = 0;
+ skipping_frames_at_start_of_play = 0;
+ }
+ }
- config.output->play(conn->outbuf, play_samples, play_samples_are_timed,
- inframe->given_timestamp, should_be_time);
#ifdef CONFIG_METADATA
- // debug(1,"config.metadata_progress_interval is %f.",
- // config.metadata_progress_interval);
- if (config.metadata_progress_interval != 0.0) {
- char hb[128];
- if (this_is_the_first_frame != 0) {
- memset(hb, 0, 128);
- snprintf(hb, 127, "%" PRIu32 "/%" PRId64 "", inframe->given_timestamp,
- should_be_time);
- send_ssnc_metadata('phb0', hb, strlen(hb), 1);
- send_ssnc_metadata('phbt', hb, strlen(hb), 1);
- time_of_last_metadata_progress_update = local_time_now;
- } else {
- uint64_t mx = 1000000000;
- uint64_t iv = config.metadata_progress_interval * mx;
- iv = iv + time_of_last_metadata_progress_update;
- int64_t delta = iv - local_time_now;
- if (delta <= 0) {
+ // debug(1,"config.metadata_progress_interval is %f.",
+ // config.metadata_progress_interval);
+ if (config.metadata_progress_interval != 0.0) {
+ char hb[128];
+ if (this_is_the_first_frame != 0) {
memset(hb, 0, 128);
- snprintf(hb, 127, "%" PRIu32 "/%" PRId64 "", inframe->given_timestamp,
+ snprintf(hb, 127, "%" PRIu32 "/%" PRId64 "", inframe->timestamp,
should_be_time);
+ send_ssnc_metadata('phb0', hb, strlen(hb), 1);
send_ssnc_metadata('phbt', hb, strlen(hb), 1);
- time_of_last_metadata_progress_update = local_time_now;
+ time_of_last_metadata_progress_update = get_absolute_time_in_ns();
+ } else {
+ uint64_t mx = 1000000000;
+ uint64_t iv = config.metadata_progress_interval * mx;
+ iv = iv + time_of_last_metadata_progress_update;
+ int64_t delta = iv - get_absolute_time_in_ns();
+ if (delta <= 0) {
+ memset(hb, 0, 128);
+ snprintf(hb, 127, "%" PRIu32 "/%" PRId64 "", inframe->timestamp,
+ should_be_time);
+ send_ssnc_metadata('phbt', hb, strlen(hb), 1);
+ time_of_last_metadata_progress_update = get_absolute_time_in_ns();
+ }
}
}
- }
#endif
+ }
}
}
+ } else {
- // check for loss of sync
- // timestamp of zero means an inserted silent frame in place of a missing frame
- /*
- if ((config.no_sync == 0) && (inframe->timestamp != 0) &&
- && (config.resync_threshold > 0.0) &&
- (abs_sync_error > config.resync_threshold * config.output_rate)) {
- sync_error_out_of_bounds++;
- // debug(1,"Sync error out of bounds: Error: %lld; previous error: %lld; DAC: %lld;
- // timestamp: %llx, time now
- //
- %llx",sync_error,previous_sync_error,current_delay,inframe->timestamp,local_time_now);
- if (sync_error_out_of_bounds > 3) {
- debug(1, "Lost sync with source for %d consecutive packets -- flushing and "
- "resyncing. Error: %lld.",
- sync_error_out_of_bounds, sync_error);
- sync_error_out_of_bounds = 0;
- player_flush(nt, conn);
- }
- } else {
- sync_error_out_of_bounds = 0;
- }
- */
- }
- } else {
-
- // if this is the first frame, see if it's close to when it's supposed to be
- // release, which will be its time plus latency and any offset_time
- if (at_least_one_frame_seen_this_session == 0) {
-#ifdef CONFIG_METADATA
- this_is_the_first_frame = 1;
-#endif
- at_least_one_frame_seen_this_session = 1;
- }
+ // if this is the first frame, see if it's close to when it's supposed to be
+ // released, which will be its time plus latency and any offset_time
- play_samples =
- stuff_buffer_basic_32((int32_t *)conn->tbuf, inbuflength, config.output_format,
- conn->outbuf, 0, conn->enable_dither, conn);
- if (conn->outbuf == NULL)
- debug(1, "NULL outbuf to play -- skipping it.");
- else {
- if (conn->software_mute_enabled) {
- generate_zero_frames(conn->outbuf, play_samples, config.output_format,
- conn->enable_dither, conn->previous_random_number);
- }
- uint64_t should_be_time;
- frame_to_local_time(inframe->given_timestamp, &should_be_time, conn);
- config.output->play(conn->outbuf, play_samples, play_samples_are_timed,
- inframe->given_timestamp, should_be_time);
+ if (config.packet_stuffing == ST_basic)
+ play_samples = stuff_buffer_basic_32(
+ (int32_t *)conn->tbuf, inbuflength,
+ FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration), conn->outbuf,
+ 0, conn->enable_dither, conn);
+ else
+ play_samples = stuff_buffer_vernier(
+ (int32_t *)conn->tbuf, inbuflength,
+ FORMAT_FROM_ENCODED_FORMAT(config.current_output_configuration), conn->outbuf,
+ 0, conn->enable_dither, conn);
+ if (conn->outbuf == NULL)
+ debug(1, "NULL outbuf to play -- skipping it.");
+ else {
+ if (conn->software_mute_enabled) {
+ generate_zero_frames(conn->outbuf, play_samples, conn->enable_dither,
+ conn->previous_random_number,
+ config.current_output_configuration);
+ }
+ uint64_t should_be_time;
+ frame_to_local_time(inframe->timestamp, &should_be_time, conn);
+ debug(3, "play frame %u.", inframe->timestamp);
+ config.output->play(conn->outbuf, play_samples, play_samples_are_timed,
+ inframe->timestamp, should_be_time);
+ frames_played += play_samples;
#ifdef CONFIG_METADATA
- // debug(1,"config.metadata_progress_interval is %f.",
- // config.metadata_progress_interval);
- if (config.metadata_progress_interval != 0.0) {
- char hb[128];
- if (this_is_the_first_frame != 0) {
- memset(hb, 0, 128);
- snprintf(hb, 127, "%" PRIu32 "/%" PRId64 "", inframe->given_timestamp,
- should_be_time);
- send_ssnc_metadata('phb0', hb, strlen(hb), 1);
- send_ssnc_metadata('phbt', hb, strlen(hb), 1);
- time_of_last_metadata_progress_update = local_time_now;
- } else {
- uint64_t mx = 1000000000;
- uint64_t iv = config.metadata_progress_interval * mx;
- iv = iv + time_of_last_metadata_progress_update;
- int64_t delta = iv - local_time_now;
- if (delta <= 0) {
+ // debug(1,"config.metadata_progress_interval is %f.",
+ // config.metadata_progress_interval);
+ if (config.metadata_progress_interval != 0.0) {
+ char hb[128];
+ if (this_is_the_first_frame != 0) {
memset(hb, 0, 128);
- snprintf(hb, 127, "%" PRIu32 "/%" PRId64 "", inframe->given_timestamp,
+ snprintf(hb, 127, "%" PRIu32 "/%" PRId64 "", inframe->timestamp,
should_be_time);
+ send_ssnc_metadata('phb0', hb, strlen(hb), 1);
send_ssnc_metadata('phbt', hb, strlen(hb), 1);
- time_of_last_metadata_progress_update = local_time_now;
+ time_of_last_metadata_progress_update = get_absolute_time_in_ns();
+ } else {
+ uint64_t mx = 1000000000;
+ uint64_t iv = config.metadata_progress_interval * mx;
+ iv = iv + time_of_last_metadata_progress_update;
+ int64_t delta = iv - get_absolute_time_in_ns();
+ if (delta <= 0) {
+ memset(hb, 0, 128);
+ snprintf(hb, 127, "%" PRIu32 "/%" PRId64 "", inframe->timestamp,
+ should_be_time);
+ send_ssnc_metadata('phbt', hb, strlen(hb), 1);
+ time_of_last_metadata_progress_update = get_absolute_time_in_ns();
+ }
}
}
- }
#endif
+ }
}
- }
-
- // mark the frame as finished
- inframe->given_timestamp = 0;
- inframe->sequence_number = 0;
- inframe->resend_time = 0;
- inframe->initialisation_time = 0;
- // if we've just printed out statistics, note that in the next interval
- // we haven't seen any frames yet
-
- if (play_number % print_interval == 0) {
- frames_seen_in_this_logging_interval = 0;
- }
-
- // debug(1,"Sync error %lld frames. Amount to stuff %d." ,sync_error,amount_to_stuff);
-
- // new stats calculation. We want a running average of sync error, drift, adjustment,
- // number of additions+subtractions
-
- // this is a misleading hack -- the statistics should include some data on the number of
- // valid samples and the number of times sync wasn't checked due to non availability of a
- // delay figure.
- // for the present, stats are only updated when sync has been checked
- if (config.output->delay != NULL) {
- if (number_of_statistics == trend_interval) {
- // here we remove the oldest statistical data and take it from the summaries as well
- tsum_of_sync_errors -= conn->statistics[oldest_statistic].sync_error;
- tsum_of_drifts -= conn->statistics[oldest_statistic].drift;
- if (conn->statistics[oldest_statistic].correction > 0)
- tsum_of_insertions_and_deletions -= conn->statistics[oldest_statistic].correction;
- else
- tsum_of_insertions_and_deletions += conn->statistics[oldest_statistic].correction;
- tsum_of_corrections -= conn->statistics[oldest_statistic].correction;
- oldest_statistic = (oldest_statistic + 1) % trend_interval;
- number_of_statistics--;
+ if (conn->tbuf) {
+ free(conn->tbuf);
+ conn->tbuf = NULL;
}
- conn->statistics[newest_statistic].sync_error = sync_error;
- conn->statistics[newest_statistic].correction = conn->amountStuffed;
-
- if (number_of_statistics == 0)
- conn->statistics[newest_statistic].drift = 0;
- else
- conn->statistics[newest_statistic].drift =
- sync_error - previous_sync_error - previous_correction;
-
- previous_sync_error = sync_error;
- previous_correction = conn->amountStuffed;
-
- tsum_of_sync_errors += sync_error;
- tsum_of_drifts += conn->statistics[newest_statistic].drift;
- if (conn->amountStuffed > 0) {
- tsum_of_insertions_and_deletions += conn->amountStuffed;
- } else {
- tsum_of_insertions_and_deletions -= conn->amountStuffed;
+ if (conn->outbuf) {
+ free(conn->outbuf);
+ conn->outbuf = NULL;
}
- tsum_of_corrections += conn->amountStuffed;
- conn->session_corrections += conn->amountStuffed;
-
- newest_statistic = (newest_statistic + 1) % trend_interval;
+ }
+ tsum_of_frames = tsum_of_frames + frames_played;
+ if (frames_played) {
number_of_statistics++;
+ frames_since_last_stats_logged += frames_played;
+ // stats accumulation. We want an average of sync error, drift, adjustment,
+ // number of additions+subtractions
+ tsum_of_sync_errors = tsum_of_sync_errors + sync_error;
+ tsum_of_corrections = tsum_of_corrections + amount_to_stuff;
+ tsum_of_insertions_and_deletions =
+ tsum_of_insertions_and_deletions + abs(amount_to_stuff);
}
}
+ // free buffers and mark the frame as finished
+#ifdef CONFIG_FFMPEG
+ if (inframe->avframe != NULL) {
+ av_frame_free(&inframe->avframe);
+ inframe->avframe = NULL;
+ }
+#endif
+ inframe->timestamp = 0;
+ inframe->sequence_number = 0;
+ inframe->resend_time = 0;
+ inframe->initialisation_time = 0;
+ inframe->timestamp_gap = 0;
+
+ } else {
+ debug(1, "audio block sequence number %u, ready status: %u with no data!",
+ inframe->sequence_number, inframe->ready);
}
+ free_audio_buffer_payload(inframe);
}
}
pthread_exit(NULL);
}
-static void player_send_volume_metadata(uint8_t vol_mode_both, double airplay_volume, double scaled_attenuation, int32_t max_db, int32_t min_db, int32_t hw_max_db)
-{
+static void player_send_volume_metadata(uint8_t vol_mode_both, double airplay_volume,
+ double scaled_attenuation, int32_t max_db, int32_t min_db,
+ int32_t hw_max_db) {
#ifdef CONFIG_METADATA
- // here, send the 'pvol' metadata message when the airplay volume information
- // is being used by shairport sync to control the output volume
- char dv[128];
- memset(dv, 0, 128);
- if (config.ignore_volume_control == 0) {
- if (vol_mode_both == 1) {
- // normalise the maximum output to the hardware device's max output
- snprintf(dv, 127, "%.2f,%.2f,%.2f,%.2f", airplay_volume,
- (scaled_attenuation - max_db + hw_max_db) / 100.0,
- (min_db - max_db + hw_max_db) / 100.0, (max_db - max_db + hw_max_db) / 100.0);
- } else {
- snprintf(dv, 127, "%.2f,%.2f,%.2f,%.2f", airplay_volume, scaled_attenuation / 100.0,
- min_db / 100.0, max_db / 100.0);
- }
+ // here, send the 'pvol' metadata message when the airplay volume information
+ // is being used by shairport sync to control the output volume
+ char dv[128];
+ memset(dv, 0, 128);
+ if (config.ignore_volume_control == 0) {
+ if (vol_mode_both == 1) {
+ // normalise the maximum output to the hardware device's max output
+ snprintf(dv, 127, "%.2f,%.2f,%.2f,%.2f", airplay_volume,
+ (scaled_attenuation - max_db + hw_max_db) / 100.0,
+ (min_db - max_db + hw_max_db) / 100.0, (max_db - max_db + hw_max_db) / 100.0);
} else {
- snprintf(dv, 127, "%.2f,%.2f,%.2f,%.2f", airplay_volume, 0.0, 0.0, 0.0);
+ snprintf(dv, 127, "%.2f,%.2f,%.2f,%.2f", airplay_volume, scaled_attenuation / 100.0,
+ min_db / 100.0, max_db / 100.0);
}
- send_ssnc_metadata('pvol', dv, strlen(dv), 1);
+ } else {
+ snprintf(dv, 127, "%.2f,%.2f,%.2f,%.2f", airplay_volume, 0.0, 0.0, 0.0);
+ }
+ send_ssnc_metadata('pvol', dv, strlen(dv), 1);
#else
(void)vol_mode_both;
(void)airplay_volume;
int32_t hw_max_db = 0, hw_min_db = 0; // zeroed to quieten an incorrect uninitialised warning
int32_t sw_max_db = 0, sw_min_db = -9630;
- if (config.output->parameters) {
+ // if the device is giving us a decibel-denominated volume range
+ if ((config.output->parameters != NULL) && (config.output->parameters()->volume_range != NULL)) {
volume_mode = vol_hw_only;
- audio_parameters audio_information;
- config.output->parameters(&audio_information);
- hw_max_db = audio_information.maximum_volume_dB;
- hw_min_db = audio_information.minimum_volume_dB;
+ hw_max_db = config.output->parameters()->volume_range->maximum_volume_dB;
+ hw_min_db = config.output->parameters()->volume_range->minimum_volume_dB;
if (config.volume_max_db_set) {
if (((config.volume_max_db * 100) <= hw_max_db) &&
((config.volume_max_db * 100) >= hw_min_db))
volume_mode, airplay_volume);
}
}
-
uint8_t vol_mode_both = (volume_mode == vol_both) ? 1 : 0;
player_send_volume_metadata(vol_mode_both, airplay_volume, 0, 0, 0, 0);
} else {
double temp_fix_volume = 65536.0 * pow(10, software_attenuation / 2000);
if (config.ignore_volume_control == 0)
- debug(2, "Software attenuation set to %f, i.e %f out of 65,536, for airplay volume of %f",
+ debug(3, "Software attenuation set to %f, i.e %f out of 65,536, for airplay volume of %f",
software_attenuation, temp_fix_volume, airplay_volume);
else
- debug(2, "Software attenuation set to %f, i.e %f out of 65,536. Volume control is ignored.",
+ debug(3, "Software attenuation set to %f, i.e %f out of 65,536. Volume control is ignored.",
software_attenuation, temp_fix_volume);
conn->fix_volume = temp_fix_volume;
-
- // if (config.loudness)
- loudness_set_volume(software_attenuation / 100);
}
+ if (conn != NULL)
+ debug(3, "Connection %d: AirPlay Volume set to %.3f, Output Level set to: %.2f dB.",
+ conn->connection_number, airplay_volume, scaled_attenuation / 100.0);
+ else
+ debug(3, "AirPlay Volume set to %.3f, Output Level set to: %.2f dB. NULL conn.",
+ airplay_volume, scaled_attenuation / 100.0);
if (config.logOutputLevel) {
inform("Output Level set to: %.2f dB.", scaled_attenuation / 100.0);
}
uint8_t vol_mode_both = (volume_mode == vol_both) ? 1 : 0;
- player_send_volume_metadata(vol_mode_both, airplay_volume, scaled_attenuation, max_db, min_db, hw_max_db);
+ player_send_volume_metadata(vol_mode_both, airplay_volume, scaled_attenuation, max_db, min_db,
+ hw_max_db);
if (config.output->mute)
config.output->mute(0);
conn->software_mute_enabled = 0;
- debug(2,
+ debug(3,
"player_volume_without_notification: volume mode is %d, airplay volume is %.2f, "
"software_attenuation dB: %.2f, hardware_attenuation dB: %.2f, muting "
"is disabled.",
void player_flush(uint32_t timestamp, rtsp_conn_info *conn) {
debug(3, "player_flush");
do_flush(timestamp, conn);
+#ifdef CONFIG_CONVOLUTION
+ convolver_clear_state();
+#endif
#ifdef CONFIG_METADATA
// only send a flush metadata message if the first packet has been seen -- it's a bogus message
// otherwise
#endif
}
-/*
-void player_full_flush(rtsp_conn_info *conn) {
- debug(3, "player_full_flush");
- // this basically flushes everything from the player
- // here, find the rtptime of the last from in the buffer and add 1 to it
- // so as to ask to flush everything
- int flush_needed = 0;
- uint32_t rtpTimestamp;
- debug_mutex_lock(&conn->ab_mutex, 30000, 0);
- if ((conn->ab_synced != 0) && (conn->ab_write != conn->ab_read)) {
- abuf_t *abuf = NULL;
- seq_t last_seqno_written;
- do {
- last_seqno_written = conn->ab_write - 1;
- abuf = conn->audio_buffer + BUFIDX(last_seqno_written);
- } while ((abuf->ready == 0) && (last_seqno_written != conn->ab_read));
- if ((abuf != NULL) && (abuf->ready != 0)) {
- rtpTimestamp = abuf->given_timestamp + abuf->length + 1;
- debug(2, "full flush needed to %u", rtpTimestamp);
- flush_needed = 1;
- } else {
- debug(2, "full flush not needed");
- }
- } else {
- debug(2, "full flush not needed -- buffers empty or not synced");
- }
- debug_mutex_unlock(&conn->ab_mutex, 0);
- if (flush_needed)
- player_flush(rtpTimestamp, conn);
-}
-*/
-
-// perpare_to_play and play are split so that we can get the capabilities of the
-// dac etc. before initialising any decoders etc.
-// for example, if we have 32-bit DACs, we can ask for 32 bit decodes
-
-int player_prepare_to_play(rtsp_conn_info *conn) {
- // need to use conn in place of stream below. Need to put the stream as a parameter to he
- if (conn->player_thread != NULL)
- die("Trying to create a second player thread for this RTSP session");
- if (config.buffer_start_fill > BUFFER_FRAMES)
- die("specified buffer starting fill %d > buffer size %d", config.buffer_start_fill,
- BUFFER_FRAMES);
- // active, and should be before play's command hook, command_start()
- command_start();
- conn->input_bytes_per_frame = 4; // default -- may be changed later
- // call on the output device to prepare itself
- if ((config.output) && (config.output->prepare))
- config.output->prepare();
- return 0;
-}
-
int player_play(rtsp_conn_info *conn) {
debug(2, "Connection %d: player_play.", conn->connection_number);
+ command_start(); // before startup, and before the prepare() method runs
+ // give the output device as much advance warning as possible to get ready
+ // and make sure it's done before launching the player thread
+ if (config.output->prepare)
+ config.output->prepare(); // give the backend its first chance to prepare itself, knowing it has
+ // access to the output device (i.e. knowing that it should not be in
+ // use by another program at this time).
+
pthread_cleanup_debug_mutex_lock(&conn->player_create_delete_mutex, 5000, 1);
if (conn->player_thread == NULL) {
pthread_t *pt = malloc(sizeof(pthread_t));
if (pt == NULL)
die("Couldn't allocate space for pthread_t");
- int rc = pthread_create(pt, NULL, player_thread_func, (void *)conn);
+
+ int rc = named_pthread_create_with_priority(pt, 3, player_thread_func, (void *)conn,
+ "player_%d", conn->connection_number);
if (rc)
debug(1, "Connection %d: error creating player_thread: %s", conn->connection_number,
strerror(errno));
#ifdef CONFIG_METADATA
send_ssnc_metadata('pbeg', NULL, 0, 1); // contains cancellation points
#endif
+ conn->is_playing = 1;
return 0;
}
debug(2, "Connection %d: player_stop successful.", conn->connection_number);
}
free(pt);
+ // reset_anchor_info(conn); // say the clock is no longer valid
+#ifdef CONFIG_CONVOLUTION
+ convolver_clear_state();
+#endif
response = 0; // deleted
} else {
debug(2, "Connection %d: no player thread.", conn->connection_number);
}
pthread_cleanup_pop(1); // release the player_create_delete_mutex
if (response == 0) { // if the thread was just stopped and deleted...
+ conn->is_playing = 0;
+/*
+// this is done in the player cleanup handler
#ifdef CONFIG_AIRPLAY_2
ptp_send_control_message_string("E"); // signify play is "E"nding
#endif
+*/
#ifdef CONFIG_METADATA
send_ssnc_metadata('pend', NULL, 0, 1); // contains cancellation points
#endif
#include <polarssl/havege.h>
#endif
-#ifdef CONFIG_OPENSSL
-#include <openssl/aes.h>
-#endif
-
#ifdef CONFIG_AIRPLAY_2
+#define MAX_DEFERRED_FLUSH_REQUESTS 10
#include "pair_ap/pair.h"
#include <plist/plist.h>
#endif
+#ifdef CONFIG_FFMPEG
+#include <libavcodec/avcodec.h>
+#include <libavformat/avformat.h>
+#include <libavutil/channel_layout.h>
+#include <libavutil/opt.h>
+#include <libswresample/swresample.h>
+#endif
+
#include "alac.h"
#include "audio.h"
+// clang-format off
+
+/*
+__________________________________________________________________________________________________________________________________
+* ALAC Specific Info (24 bytes) (mandatory)
+__________________________________________________________________________________________________________________________________
+
+The Apple Lossless codec stores specific information about the encoded stream in the ALACSpecificConfig. This
+info is vended by the encoder and is used to setup the decoder for a given encoded bitstream.
+
+When read from and written to a file, the fields of this struct must be in big-endian order.
+When vended by the encoder (and received by the decoder) the struct values will be in big-endian order.
+
+ struct ALACSpecificConfig (defined in ALACAudioTypes.h)
+ abstract This struct is used to describe codec provided information about the encoded Apple Lossless bitstream.
+ It must accompany the encoded stream in the containing audio file and be provided to the decoder.
+
+ field frameLength uint32_t indicating the frames per packet when no explicit frames per packet setting is
+ present in the packet header. The encoder frames per packet can be explicitly set
+ but for maximum compatibility, the default encoder setting of 4096 should be used.
+
+ field compatibleVersion uint8_t indicating compatible version,
+ value must be set to 0
+
+ field bitDepth uint8_t describes the bit depth of the source PCM data (maximum value = 32)
+
+ field pb uint8_t currently unused tuning parameter.
+ value should be set to 40
+
+ field mb uint8_t currently unused tuning parameter.
+ value should be set to 10
+
+ field kb uint8_t currently unused tuning parameter.
+ value should be set to 14
+
+ field numChannels uint8_t describes the channel count (1 = mono, 2 = stereo, etc...)
+ when channel layout info is not provided in the 'magic cookie', a channel count > 2
+ describes a set of discreet channels with no specific ordering
+
+ field maxRun uint16_t currently unused.
+ value should be set to 255
+
+ field maxFrameBytes uint32_t the maximum size of an Apple Lossless packet within the encoded stream.
+ value of 0 indicates unknown
+
+ field avgBitRate uint32_t the average bit rate in bits per second of the Apple Lossless stream.
+ value of 0 indicates unknown
+
+ field sampleRate uint32_t sample rate of the encoded stream
+ */
+
+// clang-format on
+
+typedef struct __attribute__((__packed__)) ALACSpecificConfig {
+ uint32_t frameLength;
+ uint8_t compatibleVersion;
+ uint8_t bitDepth;
+ uint8_t pb;
+ uint8_t mb;
+ uint8_t kb;
+ uint8_t numChannels;
+ uint16_t maxRun;
+ uint32_t maxFrameBytes;
+ uint32_t avgBitRate;
+ uint32_t sampleRate;
+
+} ALACSpecificConfig;
+
+// everything in here is big-endian, i.e. network byte order
+typedef struct __attribute__((__packed__)) alac_ffmpeg_magic_cookie {
+ uint32_t cookie_size; // 36 bytes
+ uint32_t cookie_tag; // 'alac'
+ uint32_t cookie_version; // 0
+ ALACSpecificConfig alac_config;
+} alac_ffmpeg_magic_cookie;
+
#define time_ping_history_power_of_two 7
// this must now be zero, otherwise bad things will happen
#define time_ping_history \
typedef uint16_t seq_t;
+// these are the values coming in on a buffered audio RTP packet's SSRC field
+// the apparent significances are as indicated.
+// Dolby Atmos seems to be 7P1
+typedef enum {
+ SSRC_NONE = 0,
+ ALAC_44100_S16_2 = 0x0000FACE, // this is made up
+ ALAC_48000_S24_2 = 0x15000000,
+ AAC_44100_F24_2 = 0x16000000,
+ AAC_48000_F24_2 = 0x17000000,
+ AAC_48000_F24_5P1 = 0x27000000,
+ AAC_48000_F24_7P1 = 0x28000000,
+} ssrc_t;
+
typedef struct audio_buffer_entry { // decoded audio packets
uint8_t ready;
uint8_t status; // flags
uint64_t initialisation_time; // the time the packet was added or the time it was noticed the
// packet was missing
uint64_t resend_time; // time of last resend request or zero
- uint32_t given_timestamp; // for debugging and checking
- int length; // the length of the decoded data
+ uint32_t timestamp; // for timing
+ int32_t timestamp_gap; // the difference between the timestamp and the expected timestamp.
+ size_t length; // the length of the decoded data (or silence requested) in input frames
+#ifdef CONFIG_FFMPEG
+ ssrc_t ssrc; // this is the type of this specific frame.
+ AVFrame *avframe; // In AP2 and optionally in AP1, an AVFrame will be
+ // used to carry audio rather than just a malloced memory space.
+#endif
} abuf_t;
typedef struct stats { // statistics for running averages
+ uint32_t timestamp; // timestamp (denominated in input frames)
+ size_t frames; // number of audio frames in the block (denominated in output frames)
int64_t sync_error, correction, drift;
} stats_t;
#define BUFFER_FRAMES 1024
+// maximum number of frames that can be added or removed from a packet_count
+#define INTERPOLATION_LIMIT 20
+
typedef enum {
ast_unknown,
ast_uncompressed, // L16/44100/2
sized_buffer encrypted_read_buffer;
sized_buffer plaintext_read_buffer;
int is_encrypted;
+ char *description;
} pair_cipher_bundle; // cipher context and buffers
typedef struct {
struct pair_setup_context *setup_ctx;
struct pair_verify_context *verify_ctx;
pair_cipher_bundle control_cipher_bundle;
+ pair_cipher_bundle event_cipher_bundle;
+ pair_cipher_bundle data_cipher_bundle;
+ char *data_cipher_salt;
} ap2_pairing;
-// flush requests are stored in order of flushFromSeq
-// on the basis that block numbers are monotonic modulo 2^24
-typedef struct flush_request_t {
- int flushNow; // if true, the flushFrom stuff is invalid
- uint32_t flushFromSeq;
+typedef struct {
+ uint32_t inUse; // record free or contains a current flush record
+ uint32_t active; // set if blocks within the given range are being flushed.
uint32_t flushFromTS;
- uint32_t flushUntilSeq;
+ uint32_t flushFromSeq;
uint32_t flushUntilTS;
- struct flush_request_t *next;
-} flush_request_t;
+ uint32_t flushUntilSeq;
+} ap2_flush_request_t;
#endif
typedef struct {
- int connection_number; // for debug ID purposes, nothing else...
- int resend_interval; // this is really just for debugging
- int rtsp_link_is_idle; // if true, this indicates if the client asleep
- char *UserAgent; // free this on teardown
- int AirPlayVersion; // zero if not an AirPlay session. Used to help calculate latency
+ int connection_number; // for debug ID purposes, nothing else...
+ int is_playing; // set true by player_play, set false by player_stop
+ unsigned int sync_samples_index; // for estimating the gap between the highest and lowest timing
+ // error over the past n samples
+ unsigned int sync_samples_count; // the array of samples is defined locally
+ int at_least_one_frame_seen_this_session; // set when the first frame is output
+ int resend_interval; // this is really just for debugging
+ char *UserAgent; // free this on teardown
+ int AirPlayVersion; // zero if not an AirPlay session. Used to help calculate latency
int latency_warning_issued;
uint32_t latency; // the actual latency used for this play session
uint32_t minimum_latency; // set if an a=min-latency: line appears in the ANNOUNCE message; zero
// buffers to delete on exit
int32_t *tbuf;
- int32_t *sbuf;
char *outbuf;
// for generating running statistics...
- stats_t *statistics;
+ // stats_t *statistics;
// for holding the output rate information until printed out at the end of a session
double raw_frame_rate;
// other stuff...
pthread_t *player_thread;
abuf_t audio_buffer[BUFFER_FRAMES];
- unsigned int max_frames_per_packet, input_num_channels, input_bit_depth, input_rate;
- int input_bytes_per_frame, output_bytes_per_frame, output_sample_ratio;
- int max_frame_size_change;
+ unsigned int frames_per_packet, input_num_channels, input_bit_depth, input_effective_bit_depth,
+ input_rate;
+ int input_bytes_per_frame;
+ unsigned int output_sample_ratio;
+ unsigned int output_bit_depth;
int64_t previous_random_number;
alac_file *decoder_info;
uint64_t packet_count;
uint64_t missing_packets, late_packets, too_late_packets, resend_requests;
int decoder_in_use;
// debug variables
- int32_t last_seqno_read;
+ int last_seqno_valid;
+ seq_t last_seqno_read;
// mutexes and condition variables
pthread_cond_t flowcontrol;
pthread_mutex_t ab_mutex, flush_mutex, volume_control_mutex, player_create_delete_mutex;
double own_airplay_volume;
int own_airplay_volume_set;
- uint32_t timestamp_epoch, last_timestamp,
- maximum_timestamp_interval; // timestamp_epoch of zero means not initialised, could start at 2
- // or 1.
int ab_buffering, ab_synced;
- int64_t first_packet_timestamp;
+ uint32_t first_packet_timestamp;
int flush_requested;
int flush_output_flushed; // true if the output device has been flushed.
uint32_t flush_rtp_timestamp;
uint64_t time_of_last_audio_packet;
seq_t ab_read, ab_write;
+ int do_loudness; // if loudness is requested and there is no external mixer
+
#ifdef CONFIG_MBEDTLS
mbedtls_aes_context dctx;
#endif
aes_context dctx;
#endif
- int amountStuffed;
-
int32_t framesProcessedInThisEpoch;
int32_t framesGeneratedInThisEpoch;
int32_t correctionsRequestedInThisEpoch;
pthread_t rtp_realtime_audio_thread;
pthread_t rtp_buffered_audio_thread;
+ int ap2_event_receiver_exited;
+
int last_anchor_info_is_valid;
uint32_t last_anchor_rtptime;
uint64_t last_anchor_local_time;
uint64_t last_anchor_time_of_update;
uint64_t last_anchor_validity_start_time;
+ int ap2_immediate_flush_requested;
+ uint32_t ap2_immediate_flush_until_rtp_timestamp;
+ uint32_t ap2_immediate_flush_until_sequence_number;
+
+ ap2_flush_request_t ap2_deferred_flush_requests[MAX_DEFERRED_FLUSH_REQUESTS];
+
ssize_t ap2_audio_buffer_size;
ssize_t ap2_audio_buffer_minimum_size;
- flush_request_t *flush_requests; // if non-null, there are flush requests, mutex protected
- int ap2_flush_requested;
- int ap2_flush_from_valid;
- uint32_t ap2_flush_from_rtp_timestamp;
- uint32_t ap2_flush_from_sequence_number;
- uint32_t ap2_flush_until_rtp_timestamp;
- uint32_t ap2_flush_until_sequence_number;
+
int ap2_rate; // protect with flush mutex, 0 means don't play, 1 means play
int ap2_play_enabled; // protect with flush mutex
ap2_pairing ap2_pairing_context;
+ struct pair_result *pair_setup_result; // need to keep the shared secret
int event_socket;
int data_socket;
uint64_t audio_format;
uint64_t compression;
unsigned char *session_key; // needs to be free'd at the end
+ char *ap2_client_name; // needs to be free'd at teardown phase 2
uint64_t frames_packet;
- uint64_t type;
- uint64_t networkTimeTimelineID; // the clock ID used by the player
+ uint64_t type; // 96 (Realtime Audio), 103 (Buffered Audio), 130 (Remote Control)
+ uint64_t networkTimeTimelineID; // the clock ID used by the player
uint8_t groupContainsGroupLeader; // information coming from the SETUP
+ uint64_t compressionType;
+#endif
+
+#ifdef CONFIG_FFMPEG
+ ssrc_t incoming_ssrc; // The SSRC of incoming packets. In AirPlay 2, the RTP SSRC seems to encode
+ // something about the contents of the packet -- Atmos/etc. We use it also
+ // even in AP1 as a code
+ ssrc_t resampler_ssrc; // the SSRC of packets for which the software resampler has been set up.
+ // normally it's the same as that of incoming packets, but if the encoding of incoming packets
+ // changes dynamically and the decoding chain hasn't been reset, the resampler will have to deal
+ // with queued AVFrames encoded according to the previous SSRC.
+ const AVCodec *codec;
+ AVCodecContext *codec_context;
+ // the swr can't be used just after the incoming packet has been decoded as explained below
+
+ // The reasons that resampling can not occur when the packet initially arrives are twofold.
+ // Resampling requires input samples from before and after the resampling instant.
+ // So, at the end of a block, since the subsequent samples aren't in the block, resampling
+ // is deferred until the next block is loaded. From this, the two reasons follow:
+ // First, the "next" block to be provided in player_put_packet is not guaranteed to be
+ // the next block in sequence -- packets can arrive out of sequence in UDP transmission.
+ // Second, not all the frames that should be generated for a block will be generated
+ // by a call to swr_convert. The frames that can't be calculated will not be provided, and
+ // will be held back and provided to the subsequent call.
+ // Tht means that the first frame output by swr_convert will not in general,
+ // not correspond to the first frame provided to it, throwing
+ // timing calculations off.
+
+ // In summary, we have to wait until (1) we have all the blocks in order,
+ // and (2) we have to track the number of resampler output frames to
+ // keep the correspondence between them and the input frames.
+
+ // We can calculate the "deficit" between the number of frames that should be generated
+ // versus the number of frames actually generated.
+
+ // For example, converting 352 frames at 44,100 to 48,000 should result
+ // in 352 * 48000 / 44100, or 383.129252 frames.
+
+ // Say only 360 frames are actually produced, then the deficit is 23.129252.
+
+ // If those 360 frames are sent to the output device, then the timing of the next
+ // block of 352 frames will be ahead by 23.129252 frames at 48,000 fps -- about 0.48 ms.
+
+ // We need to add the delay corresponding to the frames that should have been sent to
+ // keep timing correct. I.e. when calculating the buffer delay at the start of the following
+ // block, those 23.129252 frames the were not actually sent should be added to it.
+
+ // The "deficit" can readily be kept up to date and can be always added to the
+ // DAC buffer delay to exactly compensate for the
+
+ SwrContext *swr; // this will do transcoding anf resampling, if necessary, just prior to output
+ int ffmpeg_decoding_chain_initialised;
+ int64_t resampler_output_channels;
+ int resampler_output_bytes_per_sample;
+ int64_t frames_retained_in_the_resampler; // swr will retain frames it hasn't finished processing
+ // they'll come out before the frames corresponding to the start of next block passed to
+ // swrconvert so we need to compensate for their absence in sync timing
+ unsigned int output_channel_to_resampler_channel_map[8];
+ unsigned int output_channel_map_size;
#endif
// used as the initials values for calculating the rate at which the source thinks it's sending
void *dapo_private_storage; // this is used for compatibility, if dacp stuff isn't enabled.
int enable_dither; // needed for filling silences before play actually starts
- uint64_t dac_buffer_queue_minimum_length;
} rtsp_conn_info;
extern int statistics_row; // will be reset to zero when debug level changes or statistics enabled
void reset_buffer(rtsp_conn_info *conn);
-void get_audio_buffer_size_and_occupancy(unsigned int *size, unsigned int *occupancy,
- rtsp_conn_info *conn);
+size_t get_audio_buffer_occupancy(rtsp_conn_info *conn);
int32_t modulo_32_offset(uint32_t from, uint32_t to);
void ab_resync(rtsp_conn_info *conn);
-int player_prepare_to_play(rtsp_conn_info *conn);
int player_play(rtsp_conn_info *conn);
int player_stop(rtsp_conn_info *conn);
void player_volume_without_notification(double f, rtsp_conn_info *conn);
void player_flush(uint32_t timestamp, rtsp_conn_info *conn);
// void player_full_flush(rtsp_conn_info *conn);
-void player_put_packet(int original_format, seq_t seqno, uint32_t actual_timestamp, uint8_t *data,
- int len, rtsp_conn_info *conn);
+
+seq_t get_revised_seqno(rtsp_conn_info *conn, uint32_t timestamp);
+void clear_buffers_from(rtsp_conn_info *conn, seq_t from_here);
+uint32_t player_put_packet(uint32_t ssrc, seq_t seqno, uint32_t actual_timestamp, uint8_t *data,
+ size_t len, int mute, int32_t timestamp_gap, rtsp_conn_info *conn);
int64_t monotonic_timestamp(uint32_t timestamp,
rtsp_conn_info *conn); // add an epoch to the timestamp. The monotonic
// timestamp guaranteed to start between 2^32 2^33
double suggested_volume(rtsp_conn_info *conn); // volume suggested for the connection
+const char *get_ssrc_name(ssrc_t ssrc);
+size_t get_ssrc_block_length(ssrc_t ssrc);
+
+const char *get_category_string(airplay_stream_c cat);
+
+int ssrc_is_recognised(ssrc_t ssrc);
+int ssrc_is_aac(ssrc_t ssrc); // used to decide if a mute might be needed (AAC only)
+void prepare_decoding_chain(rtsp_conn_info *conn, ssrc_t ssrc); // also sets up timing stuff
+void clear_decoding_chain(rtsp_conn_info *conn); // tear down the decoding chain
+
#endif //_PLAYER_H
<?xml version="1.0" encoding="UTF-8"?>
-<!-- Comment -->
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
- <key>audioLatencies</key>
- <array>
- <dict>
- <key>inputLatencyMicros</key>
- <integer>0</integer>
- <key>outputLatencyMicros</key>
- <integer>400000</integer>
- <key>type</key>
- <integer>100</integer>
- </dict>
- <dict>
- <key>audioType</key>
- <string>default</string>
- <key>inputLatencyMicros</key>
- <integer>0</integer>
- <key>outputLatencyMicros</key>
- <integer>400000</integer>
- <key>type</key>
- <integer>100</integer>
- </dict>
- <dict>
- <key>audioType</key>
- <string>media</string>
- <key>inputLatencyMicros</key>
- <integer>0</integer>
- <key>outputLatencyMicros</key>
- <integer>400000</integer>
- <key>type</key>
- <integer>100</integer>
- </dict>
- <dict>
- <key>audioType</key>
- <string>media</string>
- <key>inputLatencyMicros</key>
- <integer>0</integer>
- <key>outputLatencyMicros</key>
- <integer>400000</integer>
- <key>type</key>
- <integer>102</integer>
- </dict>
- </array>
- <key>keepAliveLowPower</key>
- <true/>
+ <key>vv</key>
+ <integer>2</integer>
+ <key>playbackCapabilities</key>
+ <dict>
+ <key>supportsInterstitials</key>
+ <false/>
+ <key>supportsFPSSecureStop</key>
+ <false/>
+ <key>supportsUIForAudioOnlyContent</key>
+ <false/>
+ </dict>
+ <key>canRecordScreenStream</key>
+ <false/>
<key>keepAliveSendStatsAsBody</key>
- <true/>
- <key>manufacturer</key>
- <string>Shairport Sync</string>
- <key>nameIsFactoryDefault</key>
<false/>
<key>protocolVersion</key>
<string>1.1</string>
- <key>sdk</key>
- <string>AirPlay;2.0.2</string>
- <key>sourceVersion</key>
- <string>366.0</string>
- <key>statusFlags</key>
- <integer>4</integer>
- <!-- key values for deviceID, features, pi, name, model and a txtAirPlay data field are added programatically -->
+ <key>volumeControlType</key>
+ <integer>3</integer>
+ <!-- <key>senderAddress</key> --> <!-- hoping we don't need this -->
+ <!-- <string>192.168.50.63:45008</string> -->
+ <key>screenDemoMode</key>
+ <false/>
+ <!-- <key>initialVolume</key> --> <!-- added programmatically -->
+ <!-- <real>-18.612697601318359</real> -->
+ <!-- <key>featuresEx</key> -->
+ <!-- <string>AMp/StBLNbw</string> --> <!-- added programmatically -->
+ <!-- <key>supportedFormats</key> --> <!-- added programmatically -->
+ <!-- <dict> -->
+ <!-- <key>audioStream</key> -->
+ <!--<integer>21235712</integer> --> <!-- looks like: 0x1440800 -->
+ <!--<key>bufferStream</key> -->
+ <!-- This seems to tell the client what formats it supports. -->
+ <!-- See https://emanuelecozzi.net/docs/airplay2/audio for an incomplete list -->
+ <!-- <integer>4017212516896604160></integer> --> <!-- looks like: 0x37C0018000E00000, from ATV4K: lots of unknown + AAC-LC/48K/F24/7.1, AAC-LC/48K/F24/5.1, ALAC/48000/S24/2, AAC-LC/48000/F24/2 and AAC-LC/44100/F24/2 -->
+ <!-- <integer>2190448001024</integer> --> <!-- looks like: 0x1FE00E00000, lots of guesses of unknowns + AAC-LC/48K/F24/7.1, AAC-LC/48K/F24/5.1, ALAC/48000/S24/2, AAC-LC/48000/F24/2 and AAC-LC/44100/F24/2 -->
+ <!-- <integer>1649282121728</integer> --> <!-- looks like: 0x18000E00000, AAC-LC/48K/F24/7.1, AAC-LC/48K/F24/5.1, ALAC/48000/S24/2, AAC-LC/48000/F24/2 and AAC-LC/44100/F24/2 -->
+ <!-- <integer>-577021992845180928</integer> --> <!-- looks like: 0xF7FE 018E 00E0 0000, from HPm 16.3.2 -->
+ <!-- 0x10000000000 is AAC-LC/48K/F24/7.1 -->
+ <!-- 0x08000000000 is AAC-LC/48K/F24/5.1 -->
+ <!-- 0x00000800000 is AAC-LC/48K/F24/2 -->
+ <!-- 0x00000400000 is AAC-LC/44.1K/F24/2 -->
+ <!-- 0x00000200000 is ALAC/48K/F24/2 -->
+
+ </dict>
+ <!-- <key>sourceVersion</key> --> <!-- added programmatically -->
+ <!-- <string>620.8.2</string> -->
+ <!-- <key>macAddress</key> --> <!-- hoping we don't need this -->
+ <!-- <string>b8:27:eb:82:d0:87</string> -->
+ <key>receiverHDRCapability</key>
+ <string>4k60</string>
+ <!-- key values for initialVolume, sourceVersion, deviceID, features, pi, name, model, featuresEx and a txtAirPlay data field are added programatically -->
<!-- key values for firmwareBuildDate (string), build (string) and PTPInfo (string)
should be added programatically -->
</dict>
-</plist>
+</plist>
\ No newline at end of file
/*
* This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2020 -- 2023
+ * Copyright (c) Mike Brady 2020--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
#ifdef COMPILE_FOR_FREEBSD
#include <netinet/in.h>
#endif
+#include <netdb.h>
#include <sys/socket.h>
#include <sys/types.h>
-#include <netdb.h>
#define __STDC_FORMAT_MACROS
#include "common.h"
#include "ptp-utilities.h"
int ptp_shm_interface_open() {
int response = 0;
- debug(2, "ptp_shm_interface_open with mapped_addr = %" PRIuPTR "", mapped_addr);
+ debug(3, "ptp_shm_interface_open with mapped_addr = %" PRIuPTR "", (uintptr_t)mapped_addr);
if ((mapped_addr == NULL) || (mapped_addr == MAP_FAILED)) {
response = -1;
if (mapped_addr == NULL)
debug(1, "No config.nqptp_shared_memory_interface_name");
}
if (response == 0)
- debug(2, "ptp_shm_interface_open -- success!");
+ debug(3, "ptp_shm_interface_open -- success!");
else
- debug(2, "ptp_shm_interface_open -- fail!");
+ debug(3, "ptp_shm_interface_open -- fail!");
} else {
- debug(2, "ptp_shm_interface_open -- already open!");
+ debug(3, "ptp_shm_interface_open -- already open!");
}
return response;
}
}
/* Send the message in buf to the server */
- if (sendto(s, full_message, full_message_size, 0, info->ai_addr, info->ai_addrlen) <
- 0) {
+ if (sendto(s, full_message, full_message_size, 0, info->ai_addr, info->ai_addrlen) < 0) {
die("error sending timing_peer_list to NQPTP");
}
/* Deallocate the socket */
-/*
- * This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2020 -- 2023
- * All rights reserved.
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or
- * sell copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
#ifndef __PTP_UTILITIES_H
#define __PTP_UTILITIES_H
/*
* Apple RTP protocol handler. This file is part of Shairport.
* Copyright (c) James Laird 2013
- * Copyright (c) Mike Brady 2014 -- 2019
+ * Copyright (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
#include <netdb.h>
#include <netinet/in.h>
#include <pthread.h>
+#include <stdarg.h>
#include <stdio.h>
#include <stdlib.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <time.h>
#include <unistd.h>
+
#ifdef CONFIG_AIRPLAY_2
+// #include "plist_xml_strings.h"
#include "ptp-utilities.h"
+#include "utilities/structured_buffer.h"
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
+#include <libavutil/channel_layout.h>
#include <libavutil/opt.h>
#include <libswresample/swresample.h>
#include <sodium.h>
#endif
+#ifdef CONFIG_CONVOLUTION
+#include "FFTConvolver/convolver.h"
+#endif
+
struct Nvll {
char *name;
double value;
uint64_t local_to_remote_time_jitter;
uint64_t local_to_remote_time_jitter_count;
-typedef struct {
- int closed;
- int error_code;
- int sock_fd;
- char *buffer;
- char *toq;
- char *eoq;
- size_t buffer_max_size;
- size_t buffer_occupancy;
- pthread_mutex_t mutex;
- pthread_cond_t not_empty_cv;
- pthread_cond_t not_full_cv;
-} buffered_tcp_desc;
+/*
+ char obf[4096];
+ char *obfp = obf;
+ size_t obfc;
+ for (obfc=0; obfc < strlen(buffer); obfc++) {
+ snprintf(obfp, 3, "%02X", buffer[obfc]);
+ obfp+=2;
+ };
+ *obfp=0;
+ debug(1,"Writing: \"%s\"",obf);
+
+*/
void check64conversion(const char *prompt, const uint8_t *source, uint64_t value) {
char converted_value[128];
}
void *rtp_audio_receiver(void *arg) {
- debug(3, "rtp_audio_receiver start");
+ // #include <syscall.h>
+ // debug(1, "rtp_audio_receiver PID %d", syscall(SYS_gettid));
pthread_cleanup_push(rtp_audio_receiver_cleanup_handler, arg);
rtsp_conn_info *conn = (rtsp_conn_info *)arg;
if (plen >= 16) {
if ((config.diagnostic_drop_packet_fraction == 0.0) ||
(drand48() > config.diagnostic_drop_packet_fraction))
- player_put_packet(1, seqno, actual_timestamp, pktp, plen,
- conn); // the '1' means is original format
+ player_put_packet(ALAC_44100_S16_2, seqno, actual_timestamp, pktp, plen, 0, 0,
+ conn); // original format, no mute, not discontinuous
else
debug(3, "Dropping audio packet %u to simulate a bad connection.", seqno);
continue;
debug(2, "resend-related request packet received, ignoring.");
continue;
}
- debug(1, "Audio receiver -- Unknown RTP packet of type 0x%02X length %d seqno %d", type,
+ debug(1, "Audio receiver -- Unknown RTP packet of type 0x%02X length %zd seqno %d", type,
nread, seqno);
}
- warn("Audio receiver -- Unknown RTP packet of type 0x%02X length %d.", type, nread);
+ warn("Audio receiver -- Unknown RTP packet of type 0x%02X length %zd.", type, nread);
} else {
char em[1024];
strerror_r(errno, em, sizeof(em));
}
void *rtp_control_receiver(void *arg) {
- debug(2, "rtp_control_receiver start");
+ // #include <syscall.h>
+ // debug(1, "rtp_control_receiver PID %d", syscall(SYS_gettid));
pthread_cleanup_push(rtp_control_handler_cleanup_handler, arg);
rtsp_conn_info *conn = (rtsp_conn_info *)arg;
ssize_t nread;
while (1) {
nread = recv(conn->control_socket, packet, sizeof(packet), 0);
- if (conn->rtsp_link_is_idle == 0) {
- if (nread >= 0) {
- if ((config.diagnostic_drop_packet_fraction == 0.0) ||
- (drand48() > config.diagnostic_drop_packet_fraction)) {
-
- ssize_t plen = nread;
- if (packet[1] == 0xd4) { // sync data
- // clang-format off
+ if (nread >= 0) {
+ if ((config.diagnostic_drop_packet_fraction == 0.0) ||
+ (drand48() > config.diagnostic_drop_packet_fraction)) {
+
+ ssize_t plen = nread;
+ if (packet[1] == 0xd4) { // sync data
+ // clang-format off
/*
// the following stanza is for debugging only -- normally commented out.
{
respectively.",monotonic_timestamp(rt, conn),monotonic_timestamp(rtlt, conn));
}
*/
- // clang-format off
- if (conn->local_to_remote_time_difference) { // need a time packet to be interchanged
- // first...
- uint64_t ps, pn;
+ // clang-format on
+ if (conn->local_to_remote_time_difference) { // need a time packet to be interchanged
+ // first...
+ uint64_t ps, pn;
- ps = nctohl(&packet[8]);
- ps = ps * 1000000000; // this many nanoseconds from the whole seconds
- pn = nctohl(&packet[12]);
- pn = pn * 1000000000;
- pn = pn >> 32; // this many nanoseconds from the fractional part
- remote_time_of_sync = ps + pn;
+ ps = nctohl(&packet[8]);
+ ps = ps * 1000000000; // this many nanoseconds from the whole seconds
+ pn = nctohl(&packet[12]);
+ pn = pn * 1000000000;
+ pn = pn >> 32; // this many nanoseconds from the fractional part
+ remote_time_of_sync = ps + pn;
- // debug(1,"Remote Sync Time: " PRIu64 "",remote_time_of_sync);
+ // debug(1,"Remote Sync Time: " PRIu64 "",remote_time_of_sync);
- sync_rtp_timestamp = nctohl(&packet[16]);
- uint32_t rtp_timestamp_less_latency = nctohl(&packet[4]);
+ sync_rtp_timestamp = nctohl(&packet[16]);
+ uint32_t rtp_timestamp_less_latency = nctohl(&packet[4]);
- // debug(1,"Sync timestamp is %u.",ntohl(*((uint32_t *)&packet[16])));
+ // debug(1,"Sync timestamp is %u.",ntohl(*((uint32_t *)&packet[16])));
- if (config.userSuppliedLatency) {
- if (config.userSuppliedLatency != conn->latency) {
- debug(1, "Using the user-supplied latency: %" PRIu32 ".",
- config.userSuppliedLatency);
- }
- conn->latency = config.userSuppliedLatency;
- } else {
+ if (config.userSuppliedLatency) {
+ if (config.userSuppliedLatency != conn->latency) {
+ debug(1, "Using the user-supplied latency: %" PRIu32 ".",
+ config.userSuppliedLatency);
+ }
+ conn->latency = config.userSuppliedLatency;
+ } else {
- // It seems that the second pair of bytes in the packet indicate whether a fixed
- // delay of 11,025 frames should be added -- iTunes set this field to 7 and
- // AirPlay sets it to 4.
+ // It seems that the second pair of bytes in the packet indicate whether a fixed
+ // delay of 11,025 frames should be added -- iTunes set this field to 7 and
+ // AirPlay sets it to 4.
- // However, on older versions of AirPlay, the 11,025 frames seem to be necessary too
+ // However, on older versions of AirPlay, the 11,025 frames seem to be necessary too
- // The value of 11,025 (0.25 seconds) is a guess based on the "Audio-Latency"
- // parameter
- // returned by an AE.
+ // The value of 11,025 (0.25 seconds) is a guess based on the "Audio-Latency"
+ // parameter
+ // returned by an AE.
- // Sigh, it would be nice to have a published protocol...
+ // Sigh, it would be nice to have a published protocol...
- uint16_t flags = nctohs(&packet[2]);
- uint32_t la = sync_rtp_timestamp - rtp_timestamp_less_latency; // note, this might
- // loop around in
- // modulo. Not sure if
- // you'll get an error!
- // debug(1, "Latency from the sync packet is %" PRIu32 " frames.", la);
+ uint16_t flags = nctohs(&packet[2]);
+ uint32_t la = sync_rtp_timestamp - rtp_timestamp_less_latency; // note, this might
+ // loop around in
+ // modulo. Not sure if
+ // you'll get an error!
+ // debug(1, "Latency from the sync packet is %" PRIu32 " frames.", la);
- if ((flags == 7) || ((conn->AirPlayVersion > 0) && (conn->AirPlayVersion <= 353)) ||
- ((conn->AirPlayVersion > 0) && (conn->AirPlayVersion >= 371))) {
- la += config.fixedLatencyOffset;
- // debug(1, "Latency offset by %" PRIu32" frames due to the source flags and version
- // giving a latency of %" PRIu32 " frames.", config.fixedLatencyOffset, la);
- }
- if ((conn->maximum_latency) && (conn->maximum_latency < la))
- la = conn->maximum_latency;
- if ((conn->minimum_latency) && (conn->minimum_latency > la))
- la = conn->minimum_latency;
-
- const uint32_t max_frames = ((3 * BUFFER_FRAMES * 352) / 4) - 11025;
-
- if (la > max_frames) {
- warn("An out-of-range latency request of %" PRIu32
- " frames was ignored. Must be %" PRIu32
- " frames or less (44,100 frames per second). "
- "Latency remains at %" PRIu32 " frames.",
- la, max_frames, conn->latency);
- } else {
-
- // here we have the latency but it does not yet account for the
- // audio_backend_latency_offset
- int32_t latency_offset =
- (int32_t)(config.audio_backend_latency_offset * conn->input_rate);
-
- // debug(1,"latency offset is %" PRId32 ", input rate is %u", latency_offset,
- // conn->input_rate);
- int32_t adjusted_latency = latency_offset + (int32_t)la;
- if ((adjusted_latency < 0) ||
- (adjusted_latency >
- (int32_t)(conn->max_frames_per_packet *
- (BUFFER_FRAMES - config.minimum_free_buffer_headroom))))
- warn("audio_backend_latency_offset out of range -- ignored.");
- else
- la = adjusted_latency;
-
- if (la != conn->latency) {
- conn->latency = la;
- debug(2,
- "New latency: %" PRIu32 ", sync latency: %" PRIu32
- ", minimum latency: %" PRIu32 ", maximum "
- "latency: %" PRIu32 ", fixed offset: %" PRIu32
- ", audio_backend_latency_offset: %f.",
- conn->latency, sync_rtp_timestamp - rtp_timestamp_less_latency,
- conn->minimum_latency, conn->maximum_latency, config.fixedLatencyOffset,
- config.audio_backend_latency_offset);
- }
+ if ((flags == 7) || ((conn->AirPlayVersion > 0) && (conn->AirPlayVersion <= 353)) ||
+ ((conn->AirPlayVersion > 0) && (conn->AirPlayVersion >= 371))) {
+ la += config.fixedLatencyOffset;
+ // debug(1, "Latency offset by %" PRIu32" frames due to the source flags and
+ // version giving a latency of %" PRIu32 " frames.", config.fixedLatencyOffset,
+ // la);
+ }
+ if ((conn->maximum_latency) && (conn->maximum_latency < la))
+ la = conn->maximum_latency;
+ if ((conn->minimum_latency) && (conn->minimum_latency > la))
+ la = conn->minimum_latency;
+
+ const uint32_t max_frames = ((3 * BUFFER_FRAMES * 352) / 4) - 11025;
+
+ if (la > max_frames) {
+ warn("An out-of-range latency request of %" PRIu32
+ " frames was ignored. Must be %" PRIu32
+ " frames or less (44,100 frames per second). "
+ "Latency remains at %" PRIu32 " frames.",
+ la, max_frames, conn->latency);
+ } else {
+
+ // here we have the latency but it does not yet account for the
+ // audio_backend_latency_offset
+ int32_t latency_offset =
+ (int32_t)(config.audio_backend_latency_offset * conn->input_rate);
+
+ // debug(1,"latency offset is %" PRId32 ", input rate is %u", latency_offset,
+ // conn->input_rate);
+ int32_t adjusted_latency = latency_offset + (int32_t)la;
+ if ((adjusted_latency < 0) ||
+ (adjusted_latency >
+ (int32_t)(conn->frames_per_packet *
+ (BUFFER_FRAMES - config.minimum_free_buffer_headroom))))
+ warn("audio_backend_latency_offset out of range -- ignored.");
+ else
+ la = adjusted_latency;
+
+ if (la != conn->latency) {
+ conn->latency = la;
+ debug(2,
+ "New latency: %" PRIu32 ", sync latency: %" PRIu32
+ ", minimum latency: %" PRIu32 ", maximum "
+ "latency: %" PRIu32 ", fixed offset: %" PRIu32
+ ", audio_backend_latency_offset: %f.",
+ conn->latency, sync_rtp_timestamp - rtp_timestamp_less_latency,
+ conn->minimum_latency, conn->maximum_latency, config.fixedLatencyOffset,
+ config.audio_backend_latency_offset);
}
}
+ }
- // here, we apply the latency to the sync_rtp_timestamp
+ // here, we apply the latency to the sync_rtp_timestamp
- sync_rtp_timestamp = sync_rtp_timestamp - conn->latency;
+ sync_rtp_timestamp = sync_rtp_timestamp - conn->latency;
- debug_mutex_lock(&conn->reference_time_mutex, 1000, 0);
+ debug_mutex_lock(&conn->reference_time_mutex, 1000, 0);
- if (conn->initial_reference_time == 0) {
- if (conn->packet_count_since_flush > 0) {
- conn->initial_reference_time = remote_time_of_sync;
- conn->initial_reference_timestamp = sync_rtp_timestamp;
- }
- } else {
- uint64_t remote_frame_time_interval =
- conn->anchor_time -
- conn->initial_reference_time; // here, this should never be zero
- if (remote_frame_time_interval) {
- conn->remote_frame_rate =
- (1.0E9 * (conn->anchor_rtptime - conn->initial_reference_timestamp)) /
- remote_frame_time_interval;
- } else {
- conn->remote_frame_rate = 0.0; // use as a flag.
- }
+ if (conn->initial_reference_time == 0) {
+ if (conn->packet_count_since_flush > 0) {
+ conn->initial_reference_time = remote_time_of_sync;
+ conn->initial_reference_timestamp = sync_rtp_timestamp;
}
-
- // this is for debugging
- uint64_t old_remote_reference_time = conn->anchor_time;
- uint32_t old_reference_timestamp = conn->anchor_rtptime;
- // int64_t old_latency_delayed_timestamp = conn->latency_delayed_timestamp;
- if (conn->anchor_remote_info_is_valid != 0) {
- int64_t time_difference = remote_time_of_sync - conn->anchor_time;
- int32_t frame_difference = sync_rtp_timestamp - conn->anchor_rtptime;
- double time_difference_in_frames = (1.0 * time_difference * conn->input_rate) / 1000000000;
- double frame_change = frame_difference - time_difference_in_frames;
- debug(2,"AP1 control thread: set_ntp_anchor_info: rtptime: %" PRIu32 ", networktime: %" PRIx64 ", frame adjustment: %7.3f.", sync_rtp_timestamp, remote_time_of_sync, frame_change);
+ } else {
+ uint64_t remote_frame_time_interval =
+ conn->anchor_time -
+ conn->initial_reference_time; // here, this should never be zero
+ if (remote_frame_time_interval) {
+ conn->remote_frame_rate =
+ (1.0E9 * (conn->anchor_rtptime - conn->initial_reference_timestamp)) /
+ remote_frame_time_interval;
} else {
- debug(2,"AP1 control thread: set_ntp_anchor_info: rtptime: %" PRIu32 ", networktime: %" PRIx64 ".", sync_rtp_timestamp, remote_time_of_sync);
+ conn->remote_frame_rate = 0.0; // use as a flag.
}
-
- conn->anchor_time = remote_time_of_sync;
- // conn->reference_timestamp_time =
- // remote_time_of_sync - local_to_remote_time_difference_now(conn);
- conn->anchor_rtptime = sync_rtp_timestamp;
- conn->anchor_remote_info_is_valid = 1;
-
-
- conn->latency_delayed_timestamp = rtp_timestamp_less_latency;
- debug_mutex_unlock(&conn->reference_time_mutex, 0);
-
- conn->reference_to_previous_time_difference =
- remote_time_of_sync - old_remote_reference_time;
- if (old_reference_timestamp == 0)
- conn->reference_to_previous_frame_difference = 0;
- else
- conn->reference_to_previous_frame_difference =
- sync_rtp_timestamp - old_reference_timestamp;
- } else {
- debug(2, "Sync packet received before we got a timing packet back.");
}
- } else if (packet[1] == 0xd6) { // resent audio data in the control path -- whaale only?
- pktp = packet + 4;
- plen -= 4;
- seq_t seqno = ntohs(*(uint16_t *)(pktp + 2));
- debug(3, "Control Receiver -- Retransmitted Audio Data Packet %u received.", seqno);
-
- uint32_t actual_timestamp = ntohl(*(uint32_t *)(pktp + 4));
-
- pktp += 12;
- plen -= 12;
-
- // check if packet contains enough content to be reasonable
- if (plen >= 16) {
- player_put_packet(1, seqno, actual_timestamp, pktp, plen,
- conn); // the '1' means is original format
- continue;
+
+ // this is for debugging
+ uint64_t old_remote_reference_time = conn->anchor_time;
+ uint32_t old_reference_timestamp = conn->anchor_rtptime;
+ // int64_t old_latency_delayed_timestamp = conn->latency_delayed_timestamp;
+ if (conn->anchor_remote_info_is_valid != 0) {
+ int64_t time_difference = remote_time_of_sync - conn->anchor_time;
+ int32_t frame_difference = sync_rtp_timestamp - conn->anchor_rtptime;
+ double time_difference_in_frames =
+ (1.0 * time_difference * conn->input_rate) / 1000000000;
+ double frame_change = frame_difference - time_difference_in_frames;
+ debug(2,
+ "AP1 control thread: set_ntp_anchor_info: rtptime: %" PRIu32
+ ", networktime: %" PRIx64 ", frame adjustment: %7.3f.",
+ sync_rtp_timestamp, remote_time_of_sync, frame_change);
} else {
- debug(3, "Too-short retransmitted audio packet received in control port, ignored.");
+ debug(2,
+ "AP1 control thread: set_ntp_anchor_info: rtptime: %" PRIu32
+ ", networktime: %" PRIx64 ".",
+ sync_rtp_timestamp, remote_time_of_sync);
}
- } else
- debug(1, "Control Receiver -- Unknown RTP packet of type 0x%02X length %d, ignored.",
- packet[1], nread);
- } else {
- debug(3, "Control Receiver -- dropping a packet to simulate a bad network.");
- }
- } else {
- char em[1024];
- strerror_r(errno, em, sizeof(em));
- debug(1, "Control Receiver -- error %d receiving a packet: \"%s\".", errno, em);
+ conn->anchor_time = remote_time_of_sync;
+ // conn->reference_timestamp_time =
+ // remote_time_of_sync - local_to_remote_time_difference_now(conn);
+ conn->anchor_rtptime = sync_rtp_timestamp;
+ conn->anchor_remote_info_is_valid = 1;
+
+ conn->latency_delayed_timestamp = rtp_timestamp_less_latency;
+ debug_mutex_unlock(&conn->reference_time_mutex, 0);
+
+ conn->reference_to_previous_time_difference =
+ remote_time_of_sync - old_remote_reference_time;
+ if (old_reference_timestamp == 0)
+ conn->reference_to_previous_frame_difference = 0;
+ else
+ conn->reference_to_previous_frame_difference =
+ sync_rtp_timestamp - old_reference_timestamp;
+ } else {
+ debug(2, "Sync packet received before we got a timing packet back.");
+ }
+ } else if (packet[1] == 0xd6) { // resent audio data in the control path -- whaale only?
+ pktp = packet + 4;
+ plen -= 4;
+ seq_t seqno = ntohs(*(uint16_t *)(pktp + 2));
+ debug(3, "Control Receiver -- Retransmitted Audio Data Packet %u received.", seqno);
+
+ uint32_t actual_timestamp = ntohl(*(uint32_t *)(pktp + 4));
+
+ pktp += 12;
+ plen -= 12;
+
+ // check if packet contains enough content to be reasonable
+ if (plen >= 16) {
+ // i.e. ssrc, sequence number, timestamp, data, data_length_in_bytes, mute,
+ // discontinuous, conn
+ player_put_packet(ALAC_44100_S16_2, seqno, actual_timestamp, pktp, plen, 0, 0,
+ conn); // original format, no mute, not discontinuous
+ continue;
+ } else {
+ debug(3, "Too-short retransmitted audio packet received in control port, ignored.");
+ }
+ } else
+ debug(1, "Control Receiver -- Unknown RTP packet of type 0x%02X length %zd, ignored.",
+ packet[1], nread);
+ } else {
+ debug(3, "Control Receiver -- dropping a packet to simulate a bad network.");
}
+ } else {
+
+ char em[1024];
+ strerror_r(errno, em, sizeof(em));
+ debug(1, "Control Receiver -- error %d receiving a packet: \"%s\".", errno, em);
}
}
debug(1, "Control RTP thread \"normal\" exit -- this can't happen. Hah!");
}
void *rtp_timing_sender(void *arg) {
- debug(2, "rtp_timing_sender start");
+ // #include <syscall.h>
+ // debug(1, "rtp_timing_sender PID %d", syscall(SYS_gettid));
pthread_cleanup_push(rtp_timing_sender_cleanup_handler, arg);
rtsp_conn_info *conn = (rtsp_conn_info *)arg;
struct timing_request {
conn->time_ping_count = 0;
while (1) {
- if (conn->rtsp_link_is_idle == 0) {
- if (conn->udp_clock_sender_is_initialised == 0) {
- request_number = 0;
- conn->udp_clock_sender_is_initialised = 1;
- debug(2,"AP1 clock sender thread: initialised.");
- }
- // debug(1,"Send a timing request");
+ if (conn->udp_clock_sender_is_initialised == 0) {
+ request_number = 0;
+ conn->udp_clock_sender_is_initialised = 1;
+ debug(2, "AP1 clock sender thread: initialised.");
+ }
+ // debug(1,"Send a timing request");
- if (!conn->rtp_running)
- debug(1, "rtp_timing_sender called without active stream in RTSP conversation thread %d!",
- conn->connection_number);
+ if (!conn->rtp_running)
+ debug(1, "rtp_timing_sender called without active stream in RTSP conversation thread %d!",
+ conn->connection_number);
- // debug(1, "Requesting ntp timestamp exchange.");
+ // debug(1, "Requesting ntp timestamp exchange.");
- req.filler = 0;
- req.origin = req.receive = req.transmit = 0;
+ req.filler = 0;
+ req.origin = req.receive = req.transmit = 0;
- conn->departure_time = get_absolute_time_in_ns();
- socklen_t msgsize = sizeof(struct sockaddr_in);
- #ifdef AF_INET6
- if (conn->rtp_client_timing_socket.SAFAMILY == AF_INET6) {
- msgsize = sizeof(struct sockaddr_in6);
- }
- #endif
- if ((config.diagnostic_drop_packet_fraction == 0.0) ||
- (drand48() > config.diagnostic_drop_packet_fraction)) {
- if (sendto(conn->timing_socket, &req, sizeof(req), 0,
- (struct sockaddr *)&conn->rtp_client_timing_socket, msgsize) == -1) {
- char em[1024];
- strerror_r(errno, em, sizeof(em));
- debug(1, "Error %d using send-to to the timing socket: \"%s\".", errno, em);
- }
- } else {
- debug(3, "Timing Sender Thread -- dropping outgoing packet to simulate bad network.");
+ conn->departure_time = get_absolute_time_in_ns();
+ socklen_t msgsize = sizeof(struct sockaddr_in);
+#ifdef AF_INET6
+ if (conn->rtp_client_timing_socket.SAFAMILY == AF_INET6) {
+ msgsize = sizeof(struct sockaddr_in6);
+ }
+#endif
+ if ((config.diagnostic_drop_packet_fraction == 0.0) ||
+ (drand48() > config.diagnostic_drop_packet_fraction)) {
+ if (sendto(conn->timing_socket, &req, sizeof(req), 0,
+ (struct sockaddr *)&conn->rtp_client_timing_socket, msgsize) == -1) {
+ char em[1024];
+ strerror_r(errno, em, sizeof(em));
+ debug(1, "Error %d using send-to to the timing socket: \"%s\".", errno, em);
}
-
- request_number++;
-
- if (request_number <= 3)
- usleep(300000); // these are thread cancellation points
- else
- usleep(3000000);
} else {
- usleep(100000); // wait until sleep is over
+ debug(3, "Timing Sender Thread -- dropping outgoing packet to simulate bad network.");
}
+
+ request_number++;
+
+ if (request_number <= 3)
+ usleep(300000); // these are thread cancellation points
+ else
+ usleep(3000000);
}
debug(3, "rtp_timing_sender thread interrupted. This should never happen.");
pthread_cleanup_pop(0); // don't execute anything here.
while ((gradients) && (strcasecmp((const char *)&conn->client_ip_string, gradients->name) != 0))
gradients = gradients->next;
- // if gradients comes out of this non-null, it is pointing to the DACP and it's last-known
+ // if gradients comes out of this non-null, it is pointing to the DACP and its last-known
// gradient
if (gradients) {
gradients->value = conn->local_to_remote_time_gradient;
}
void *rtp_timing_receiver(void *arg) {
- debug(3, "rtp_timing_receiver start");
+ // #include <syscall.h>
+ // debug(1, "rtp_timing_receiver PID %d", syscall(SYS_gettid));
pthread_cleanup_push(rtp_timing_receiver_cleanup_handler, arg);
rtsp_conn_info *conn = (rtsp_conn_info *)arg;
uint8_t packet[2048];
ssize_t nread;
- pthread_create(&conn->timer_requester, NULL, &rtp_timing_sender, arg);
+ named_pthread_create(&conn->timer_requester, NULL, &rtp_timing_sender, arg, "ap1_tim_req_%d",
+ conn->connection_number);
// struct timespec att;
uint64_t distant_receive_time, distant_transmit_time, arrival_time, return_time;
local_to_remote_time_jitter = 0;
while ((gradients) && (strcasecmp((const char *)&conn->client_ip_string, gradients->name) != 0))
gradients = gradients->next;
- // if gradients comes out of this non-null, it is pointing to the IP and it's last-known gradient
+ // if gradients comes out of this non-null, it is pointing to the IP and its last-known gradient
if (gradients) {
conn->local_to_remote_time_gradient = gradients->value;
// debug(1,"Using a stored drift of %.2f ppm for \"%s\".", (conn->local_to_remote_time_gradient
while (1) {
nread = recv(conn->timing_socket, packet, sizeof(packet), 0);
- if (conn->rtsp_link_is_idle == 0) {
- if (conn->udp_clock_is_initialised == 0) {
- debug(2,"AP1 clock receiver thread: initialised.");
- local_to_remote_time_jitter = 0;
- local_to_remote_time_jitter_count = 0;
+ if (conn->udp_clock_is_initialised == 0) {
+ debug(2, "AP1 clock receiver thread: initialised.");
+ local_to_remote_time_jitter = 0;
+ local_to_remote_time_jitter_count = 0;
- first_local_to_remote_time_difference = 0;
+ first_local_to_remote_time_difference = 0;
- sequence_number = 0;
- stat_n = 0;
- stat_mean = 0.0;
- conn->udp_clock_is_initialised = 1;
- }
- if (nread >= 0) {
- if ((config.diagnostic_drop_packet_fraction == 0.0) ||
- (drand48() > config.diagnostic_drop_packet_fraction)) {
- arrival_time = get_absolute_time_in_ns();
-
- // ssize_t plen = nread;
- // debug(1,"Packet Received on Timing Port.");
- if (packet[1] == 0xd3) { // timing reply
-
- return_time = arrival_time - conn->departure_time;
- debug(2, "clock synchronisation request: return time is %8.3f milliseconds.",
- 0.000001 * return_time);
-
- if (return_time < 200000000) { // must be less than 0.2 seconds
- // distant_receive_time =
- // ((uint64_t)ntohl(*((uint32_t*)&packet[16])))<<32+ntohl(*((uint32_t*)&packet[20]));
-
- uint64_t ps, pn;
-
- ps = nctohl(&packet[16]);
- ps = ps * 1000000000; // this many nanoseconds from the whole seconds
- pn = nctohl(&packet[20]);
- pn = pn * 1000000000;
- pn = pn >> 32; // this many nanoseconds from the fractional part
- distant_receive_time = ps + pn;
-
- // distant_transmit_time =
- // ((uint64_t)ntohl(*((uint32_t*)&packet[24])))<<32+ntohl(*((uint32_t*)&packet[28]));
-
- ps = nctohl(&packet[24]);
- ps = ps * 1000000000; // this many nanoseconds from the whole seconds
- pn = nctohl(&packet[28]);
- pn = pn * 1000000000;
- pn = pn >> 32; // this many nanoseconds from the fractional part
- distant_transmit_time = ps + pn;
-
- uint64_t remote_processing_time = 0;
-
- if (distant_transmit_time >= distant_receive_time)
- remote_processing_time = distant_transmit_time - distant_receive_time;
- else {
- debug(1, "Yikes: distant_transmit_time is before distant_receive_time; remote "
- "processing time set to zero.");
- }
- // debug(1,"Return trip time: %" PRIu64 " nS, remote processing time: %" PRIu64 "
- // nS.",return_time, remote_processing_time);
-
- if (remote_processing_time < return_time)
- return_time -= remote_processing_time;
+ sequence_number = 0;
+ stat_n = 0;
+ stat_mean = 0.0;
+ conn->udp_clock_is_initialised = 1;
+ }
+ if (nread >= 0) {
+ if ((config.diagnostic_drop_packet_fraction == 0.0) ||
+ (drand48() > config.diagnostic_drop_packet_fraction)) {
+ arrival_time = get_absolute_time_in_ns();
+
+ // ssize_t plen = nread;
+ // debug(1,"Packet Received on Timing Port.");
+ if (packet[1] == 0xd3) { // timing reply
+
+ return_time = arrival_time - conn->departure_time;
+ debug(2, "clock synchronisation request: return time is %8.3f milliseconds.",
+ 0.000001 * return_time);
+
+ if (return_time < 200000000) { // must be less than 0.2 seconds
+ // distant_receive_time =
+ // ((uint64_t)ntohl(*((uint32_t*)&packet[16])))<<32+ntohl(*((uint32_t*)&packet[20]));
+
+ uint64_t ps, pn;
+
+ ps = nctohl(&packet[16]);
+ ps = ps * 1000000000; // this many nanoseconds from the whole seconds
+ pn = nctohl(&packet[20]);
+ pn = pn * 1000000000;
+ pn = pn >> 32; // this many nanoseconds from the fractional part
+ distant_receive_time = ps + pn;
+
+ // distant_transmit_time =
+ // ((uint64_t)ntohl(*((uint32_t*)&packet[24])))<<32+ntohl(*((uint32_t*)&packet[28]));
+
+ ps = nctohl(&packet[24]);
+ ps = ps * 1000000000; // this many nanoseconds from the whole seconds
+ pn = nctohl(&packet[28]);
+ pn = pn * 1000000000;
+ pn = pn >> 32; // this many nanoseconds from the fractional part
+ distant_transmit_time = ps + pn;
+
+ uint64_t remote_processing_time = 0;
+
+ if (distant_transmit_time >= distant_receive_time)
+ remote_processing_time = distant_transmit_time - distant_receive_time;
+ else {
+ debug(1, "Yikes: distant_transmit_time is before distant_receive_time; remote "
+ "processing time set to zero.");
+ }
+ // debug(1,"Return trip time: %" PRIu64 " nS, remote processing time: %" PRIu64 "
+ // nS.",return_time, remote_processing_time);
+
+ if (remote_processing_time < return_time)
+ return_time -= remote_processing_time;
+ else
+ debug(1, "Remote processing time greater than return time -- ignored.");
+
+ int cc;
+ // debug(1, "time ping history is %d entries.", time_ping_history);
+ for (cc = time_ping_history - 1; cc > 0; cc--) {
+ conn->time_pings[cc] = conn->time_pings[cc - 1];
+ // if ((conn->time_ping_count) && (conn->time_ping_count < 10))
+ // conn->time_pings[cc].dispersion =
+ // conn->time_pings[cc].dispersion * pow(2.14,
+ // 1.0/conn->time_ping_count);
+ if (conn->time_pings[cc].dispersion > UINT64_MAX / dispersion_factor)
+ debug(1, "dispersion factor is too large at %" PRIu64 ".", dispersion_factor);
else
- debug(1, "Remote processing time greater than return time -- ignored.");
-
- int cc;
- // debug(1, "time ping history is %d entries.", time_ping_history);
- for (cc = time_ping_history - 1; cc > 0; cc--) {
- conn->time_pings[cc] = conn->time_pings[cc - 1];
- // if ((conn->time_ping_count) && (conn->time_ping_count < 10))
- // conn->time_pings[cc].dispersion =
- // conn->time_pings[cc].dispersion * pow(2.14,
- // 1.0/conn->time_ping_count);
- if (conn->time_pings[cc].dispersion > UINT64_MAX / dispersion_factor)
- debug(1, "dispersion factor is too large at %" PRIu64 ".");
- else
- conn->time_pings[cc].dispersion =
- (conn->time_pings[cc].dispersion * dispersion_factor) /
- 100; // make the dispersions 'age' by this rational factor
- }
- // these are used for doing a least squares calculation to get the drift
- conn->time_pings[0].local_time = arrival_time;
- conn->time_pings[0].remote_time = distant_transmit_time + return_time / 2;
- conn->time_pings[0].sequence_number = sequence_number++;
- conn->time_pings[0].chosen = 0;
- conn->time_pings[0].dispersion = return_time;
- if (conn->time_ping_count < time_ping_history)
- conn->time_ping_count++;
-
- // here, calculate the mean and standard deviation of the return times
-
- // mean and variance calculations from "online_variance" algorithm at
- // https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Online_algorithm
-
- stat_n += 1;
- double stat_delta = return_time - stat_mean;
- stat_mean += stat_delta / stat_n;
- // stat_M2 += stat_delta * (return_time - stat_mean);
- // debug(1, "Timing packet return time stats: current, mean and standard deviation over
- // %d packets: %.1f, %.1f, %.1f (nanoseconds).",
- // stat_n,return_time,stat_mean, sqrtf(stat_M2 / (stat_n - 1)));
-
- // here, pick the record with the least dispersion, and record that it's been chosen
-
- // uint64_t local_time_chosen = arrival_time;
- // uint64_t remote_time_chosen = distant_transmit_time;
- // now pick the timestamp with the lowest dispersion
- uint64_t rt = conn->time_pings[0].remote_time;
- uint64_t lt = conn->time_pings[0].local_time;
- uint64_t tld = conn->time_pings[0].dispersion;
- int chosen = 0;
- for (cc = 1; cc < conn->time_ping_count; cc++)
- if (conn->time_pings[cc].dispersion < tld) {
- chosen = cc;
- rt = conn->time_pings[cc].remote_time;
- lt = conn->time_pings[cc].local_time;
- tld = conn->time_pings[cc].dispersion;
- // local_time_chosen = conn->time_pings[cc].local_time;
- // remote_time_chosen = conn->time_pings[cc].remote_time;
- }
- // debug(1,"Record %d has the lowest dispersion with %0.2f us
- // dispersion.",chosen,1.0*((tld * 1000000) >> 32));
- conn->time_pings[chosen].chosen = 1; // record the fact that it has been used for timing
-
- conn->local_to_remote_time_difference =
- rt - lt; // make this the new local-to-remote-time-difference
- conn->local_to_remote_time_difference_measurement_time = lt; // done at this time.
-
- if (first_local_to_remote_time_difference == 0) {
- first_local_to_remote_time_difference = conn->local_to_remote_time_difference;
- // first_local_to_remote_time_difference_time = get_absolute_time_in_fp();
+ conn->time_pings[cc].dispersion =
+ (conn->time_pings[cc].dispersion * dispersion_factor) /
+ 100; // make the dispersions 'age' by this rational factor
+ }
+ // these are used for doing a least squares calculation to get the drift
+ conn->time_pings[0].local_time = arrival_time;
+ conn->time_pings[0].remote_time = distant_transmit_time + return_time / 2;
+ conn->time_pings[0].sequence_number = sequence_number++;
+ conn->time_pings[0].chosen = 0;
+ conn->time_pings[0].dispersion = return_time;
+ if (conn->time_ping_count < time_ping_history)
+ conn->time_ping_count++;
+
+ // here, calculate the mean and standard deviation of the return times
+
+ // mean and variance calculations from "online_variance" algorithm at
+ // https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Online_algorithm
+
+ stat_n += 1;
+ double stat_delta = return_time - stat_mean;
+ stat_mean += stat_delta / stat_n;
+ // stat_M2 += stat_delta * (return_time - stat_mean);
+ // debug(1, "Timing packet return time stats: current, mean and standard deviation
+ // over %d packets: %.1f, %.1f, %.1f (nanoseconds).",
+ // stat_n,return_time,stat_mean, sqrtf(stat_M2 / (stat_n - 1)));
+
+ // here, pick the record with the least dispersion, and record that it's been chosen
+
+ // uint64_t local_time_chosen = arrival_time;
+ // uint64_t remote_time_chosen = distant_transmit_time;
+ // now pick the timestamp with the lowest dispersion
+ uint64_t rt = conn->time_pings[0].remote_time;
+ uint64_t lt = conn->time_pings[0].local_time;
+ uint64_t tld = conn->time_pings[0].dispersion;
+ int chosen = 0;
+ for (cc = 1; cc < conn->time_ping_count; cc++)
+ if (conn->time_pings[cc].dispersion < tld) {
+ chosen = cc;
+ rt = conn->time_pings[cc].remote_time;
+ lt = conn->time_pings[cc].local_time;
+ tld = conn->time_pings[cc].dispersion;
+ // local_time_chosen = conn->time_pings[cc].local_time;
+ // remote_time_chosen = conn->time_pings[cc].remote_time;
}
+ // debug(1,"Record %d has the lowest dispersion with %0.2f us
+ // dispersion.",chosen,1.0*((tld * 1000000) >> 32));
+ conn->time_pings[chosen].chosen = 1; // record the fact that it has been used for timing
- // here, let's try to use the timing pings that were selected because of their short
- // return times to
- // estimate a figure for drift between the local clock (x) and the remote clock (y)
-
- // if we plug in a local interval, we will get back what that is in remote time
-
- // calculate the line of best fit for relating the local time and the remote time
- // we will calculate the slope, which is the drift
- // see https://www.varsitytutors.com/hotmath/hotmath_help/topics/line-of-best-fit
+ conn->local_to_remote_time_difference =
+ rt - lt; // make this the new local-to-remote-time-difference
+ conn->local_to_remote_time_difference_measurement_time = lt; // done at this time.
- uint64_t y_bar = 0; // remote timestamp average
- uint64_t x_bar = 0; // local timestamp average
- int sample_count = 0;
+ if (first_local_to_remote_time_difference == 0) {
+ first_local_to_remote_time_difference = conn->local_to_remote_time_difference;
+ // first_local_to_remote_time_difference_time = get_absolute_time_in_fp();
+ }
- // approximate time in seconds to let the system settle down
- const int settling_time = 60;
- // number of points to have for calculating a valid drift
- const int sample_point_minimum = 8;
+ // here, let's try to use the timing pings that were selected because of their short
+ // return times to
+ // estimate a figure for drift between the local clock (x) and the remote clock (y)
+
+ // if we plug in a local interval, we will get back what that is in remote time
+
+ // calculate the line of best fit for relating the local time and the remote time
+ // we will calculate the slope, which is the drift
+ // see https://www.varsitytutors.com/hotmath/hotmath_help/topics/line-of-best-fit
+
+ uint64_t y_bar = 0; // remote timestamp average
+ uint64_t x_bar = 0; // local timestamp average
+ int sample_count = 0;
+
+ // approximate time in seconds to let the system settle down
+ const int settling_time = 60;
+ // number of points to have for calculating a valid drift
+ const int sample_point_minimum = 8;
+ for (cc = 0; cc < conn->time_ping_count; cc++)
+ if ((conn->time_pings[cc].chosen) &&
+ (conn->time_pings[cc].sequence_number >
+ (settling_time / 3))) { // wait for a approximate settling time
+ // have to scale them down so that the sum, possibly
+ // over every term in the array, doesn't overflow
+ y_bar += (conn->time_pings[cc].remote_time >> time_ping_history_power_of_two);
+ x_bar += (conn->time_pings[cc].local_time >> time_ping_history_power_of_two);
+ sample_count++;
+ }
+ conn->local_to_remote_time_gradient_sample_count = sample_count;
+ if (sample_count > sample_point_minimum) {
+ y_bar = y_bar / sample_count;
+ x_bar = x_bar / sample_count;
+
+ int64_t xid, yid;
+ double mtl, mbl;
+ mtl = 0;
+ mbl = 0;
for (cc = 0; cc < conn->time_ping_count; cc++)
if ((conn->time_pings[cc].chosen) &&
- (conn->time_pings[cc].sequence_number >
- (settling_time / 3))) { // wait for a approximate settling time
- // have to scale them down so that the sum, possibly over
- // every term in the array, doesn't overflow
- y_bar += (conn->time_pings[cc].remote_time >> time_ping_history_power_of_two);
- x_bar += (conn->time_pings[cc].local_time >> time_ping_history_power_of_two);
- sample_count++;
- }
- conn->local_to_remote_time_gradient_sample_count = sample_count;
- if (sample_count > sample_point_minimum) {
- y_bar = y_bar / sample_count;
- x_bar = x_bar / sample_count;
-
- int64_t xid, yid;
- double mtl, mbl;
- mtl = 0;
- mbl = 0;
- for (cc = 0; cc < conn->time_ping_count; cc++)
- if ((conn->time_pings[cc].chosen) &&
- (conn->time_pings[cc].sequence_number > (settling_time / 3))) {
-
- uint64_t slt = conn->time_pings[cc].local_time >> time_ping_history_power_of_two;
- if (slt > x_bar)
- xid = slt - x_bar;
- else
- xid = -(x_bar - slt);
-
- uint64_t srt = conn->time_pings[cc].remote_time >> time_ping_history_power_of_two;
- if (srt > y_bar)
- yid = srt - y_bar;
- else
- yid = -(y_bar - srt);
-
- mtl = mtl + (1.0 * xid) * yid;
- mbl = mbl + (1.0 * xid) * xid;
- }
- if (mbl)
- conn->local_to_remote_time_gradient = mtl / mbl;
- else {
- // conn->local_to_remote_time_gradient = 1.0;
- debug(1, "mbl is zero. Drift remains at %.2f ppm.",
- (conn->local_to_remote_time_gradient - 1.0) * 1000000);
- }
+ (conn->time_pings[cc].sequence_number > (settling_time / 3))) {
- // scale the numbers back up
- uint64_t ybf = y_bar << time_ping_history_power_of_two;
- uint64_t xbf = x_bar << time_ping_history_power_of_two;
+ uint64_t slt = conn->time_pings[cc].local_time >> time_ping_history_power_of_two;
+ if (slt > x_bar)
+ xid = slt - x_bar;
+ else
+ xid = -(x_bar - slt);
- conn->local_to_remote_time_difference =
- ybf - xbf; // make this the new local-to-remote-time-difference
- conn->local_to_remote_time_difference_measurement_time = xbf;
+ uint64_t srt = conn->time_pings[cc].remote_time >> time_ping_history_power_of_two;
+ if (srt > y_bar)
+ yid = srt - y_bar;
+ else
+ yid = -(y_bar - srt);
- } else {
- debug(3, "not enough samples to estimate drift -- remaining at %.2f ppm.",
- (conn->local_to_remote_time_gradient - 1.0) * 1000000);
+ mtl = mtl + (1.0 * xid) * yid;
+ mbl = mbl + (1.0 * xid) * xid;
+ }
+ if (mbl)
+ conn->local_to_remote_time_gradient = mtl / mbl;
+ else {
// conn->local_to_remote_time_gradient = 1.0;
+ debug(1, "mbl is zero. Drift remains at %.2f ppm.",
+ (conn->local_to_remote_time_gradient - 1.0) * 1000000);
}
- // debug(1,"local to remote time gradient is %12.2f ppm, based on %d
- // samples.",conn->local_to_remote_time_gradient*1000000,sample_count);
- // debug(1,"ntp set offset and measurement time"); // iin PTP terms, this is the local-to-network offset and the local measurement time
+
+ // scale the numbers back up
+ uint64_t ybf = y_bar << time_ping_history_power_of_two;
+ uint64_t xbf = x_bar << time_ping_history_power_of_two;
+
+ conn->local_to_remote_time_difference =
+ ybf - xbf; // make this the new local-to-remote-time-difference
+ conn->local_to_remote_time_difference_measurement_time = xbf;
+
} else {
- debug(1,
- "Time ping turnaround time: %" PRIu64
- " ns -- it looks like a timing ping was lost.",
- return_time);
+ debug(3, "not enough samples to estimate drift -- remaining at %.2f ppm.",
+ (conn->local_to_remote_time_gradient - 1.0) * 1000000);
+ // conn->local_to_remote_time_gradient = 1.0;
}
+ // debug(1,"local to remote time gradient is %12.2f ppm, based on %d
+ // samples.",conn->local_to_remote_time_gradient*1000000,sample_count);
+ // debug(1,"ntp set offset and measurement time"); // iin PTP terms, this is the
+ // local-to-network offset and the local measurement time
} else {
- debug(1, "Timing port -- Unknown RTP packet of type 0x%02X length %d.", packet[1], nread);
+ debug(1,
+ "Time ping turnaround time: %" PRIu64
+ " ns -- it looks like a timing ping was lost.",
+ return_time);
}
} else {
- debug(3, "Timing Receiver Thread -- dropping incoming packet to simulate a bad network.");
+ debug(1, "Timing port -- Unknown RTP packet of type 0x%02X length %zd.", packet[1], nread);
}
} else {
- debug(1, "Timing receiver -- error receiving a packet.");
+ debug(3, "Timing Receiver Thread -- dropping incoming packet to simulate a bad network.");
}
+ } else {
+ debug(1, "Timing receiver -- error receiving a packet.");
}
}
int frame_to_ntp_local_time(uint32_t timestamp, uint64_t *time, rtsp_conn_info *conn) {
// a zero result is good
if (conn->anchor_remote_info_is_valid == 0)
- debug(1,"no anchor information");
+ debug(1, "no anchor information");
debug_mutex_lock(&conn->reference_time_mutex, 1000, 0);
int result = -1;
- if (conn->anchor_remote_info_is_valid != 0) {
+ if (conn->anchor_remote_info_is_valid != 0) {
uint64_t remote_time_of_timestamp;
int32_t timestamp_interval = timestamp - conn->anchor_rtptime;
int64_t timestamp_interval_time = timestamp_interval;
timestamp_interval_time = timestamp_interval_time * 1000000000;
timestamp_interval_time =
- timestamp_interval_time / 44100; // this is the nominal time, based on the
+ timestamp_interval_time / conn->input_rate; // this is the nominal time, based on the
// fps specified between current and
// previous sync frame.
remote_time_of_timestamp =
// a zero result is good
debug_mutex_lock(&conn->reference_time_mutex, 1000, 0);
int result = -1;
- if (conn->anchor_remote_info_is_valid != 0) {
+ if (conn->anchor_remote_info_is_valid != 0) {
// first, get from [local] time to remote time.
uint64_t remote_time = time + local_to_remote_time_difference_now(conn);
// next, get the remote time interval from the remote_time to the reference time
// now, convert the remote time interval into frames using the frame rate we have observed or
// which has been nominated
int64_t frame_interval = 0;
- frame_interval = (offset * 44100) / 1000000000;
+ frame_interval = (offset * conn->input_rate) / 1000000000;
int32_t frame_interval_32 = frame_interval;
uint32_t new_frame = conn->anchor_rtptime + frame_interval_32;
// debug(1,"frame is %u.", new_frame);
if (frame != NULL)
*frame = new_frame;
result = 0;
- }
+ }
debug_mutex_unlock(&conn->reference_time_mutex, 0);
return result;
}
void set_ptp_anchor_info(rtsp_conn_info *conn, uint64_t clock_id, uint32_t rtptime,
uint64_t networktime) {
- if ((conn->anchor_clock != 0) && (conn->anchor_clock == clock_id) && (conn->anchor_remote_info_is_valid != 0)) {
+ if ((conn->anchor_clock != 0) && (conn->anchor_clock == clock_id) &&
+ (conn->anchor_remote_info_is_valid != 0)) {
// check change in timing
int64_t time_difference = networktime - conn->anchor_time;
int32_t frame_difference = rtptime - conn->anchor_rtptime;
double time_difference_in_frames = (1.0 * time_difference * conn->input_rate) / 1000000000;
double frame_change = frame_difference - time_difference_in_frames;
- debug(2,"set_ptp_anchor_info: clock: %" PRIx64 ", rtptime: %" PRIu32 ", networktime: %" PRIx64 ", frame adjustment: %7.3f.", clock_id, rtptime, networktime, frame_change);
+ debug(3,
+ "Connection %d: set_ptp_anchor_info: clock: %" PRIx64 ", rtptime: %" PRIu32
+ ", networktime: %" PRIx64 ", frame adjustment: %7.3f.",
+ conn->connection_number, clock_id, rtptime, networktime, frame_change);
} else {
- debug(2,"set_ptp_anchor_info: clock: %" PRIx64 ", rtptime: %" PRIu32 ", networktime: %" PRIx64 ".", clock_id, rtptime, networktime);
+ debug(2,
+ "Connection %d: set_ptp_anchor_info: clock: %" PRIx64 ", rtptime: %" PRIu32
+ ", networktime: %" PRIx64 ".",
+ conn->connection_number, clock_id, rtptime, networktime);
}
if (conn->anchor_clock != clock_id) {
debug(2, "Connection %d: Set Anchor Clock: %" PRIx64 ".", conn->connection_number, clock_id);
int64_t last_anchor_validity_duration = time_now - conn->last_anchor_validity_start_time;
if (last_anchor_validity_duration < 5000000000) {
if (conn->airplay_stream_type == buffered_stream)
- debug(1,
+ debug(2,
"Connection %d: Note: anchor parameters have changed before clock %" PRIx64
" has stabilised.",
conn->connection_number, clock_id);
conn->anchor_remote_info_is_valid = 1;
// these can be modified if the master clock changes over time
+
conn->anchor_rtptime = rtptime;
conn->anchor_time = networktime;
conn->anchor_clock = clock_id;
+ debug(2, "set_ptp_anchor_info done.");
}
+int long_time_notifcation_done = 0;
+
+uint64_t previous_offset = 0;
+uint64_t previous_clock_id = 0;
+
void reset_ptp_anchor_info(rtsp_conn_info *conn) {
debug(2, "Connection %d: Clear anchor information.", conn->connection_number);
conn->last_anchor_info_is_valid = 0;
conn->anchor_remote_info_is_valid = 0;
+ long_time_notifcation_done = 0;
+ previous_offset = 0;
+ previous_clock_id = 0;
}
-int long_time_notifcation_done = 0;
-
int get_ptp_anchor_local_time_info(rtsp_conn_info *conn, uint32_t *anchorRTP,
uint64_t *anchorLocalTime) {
- int response = clock_not_valid;
- uint64_t actual_clock_id;
- if (conn->rtsp_link_is_idle == 0) {
+ int response = clock_no_anchor_info; // no anchor information
+ if (conn->anchor_remote_info_is_valid != 0) {
+ response = clock_not_valid;
+ uint64_t actual_clock_id;
uint64_t actual_time_of_sample, actual_offset, start_of_mastership;
response = ptp_get_clock_info(&actual_clock_id, &actual_time_of_sample, &actual_offset,
- &start_of_mastership);
+ &start_of_mastership);
if (response == clock_ok) {
uint64_t time_now = get_absolute_time_in_ns();
- int64_t time_since_sample = time_now - actual_time_of_sample;
- if (time_since_sample > 300000000000) {
- if (long_time_notifcation_done == 0) {
- debug(1, "The last PTP timing sample is pretty old: %f seconds.",
+ int64_t time_since_start_of_mastership = time_now - start_of_mastership;
+ if (time_since_start_of_mastership >= 400000000L) {
+ int64_t time_since_sample = time_now - actual_time_of_sample;
+ if (time_since_sample > 300000000000L) {
+ if (long_time_notifcation_done == 0) {
+ debug(1, "The last PTP timing sample is pretty old: %f seconds.",
+ 0.000000001 * time_since_sample);
+ long_time_notifcation_done = 1;
+ }
+ } else if ((time_since_sample < 2000000000) && (long_time_notifcation_done != 0)) {
+ debug(1, "The last PTP timing sample is no longer too old: %f seconds.",
0.000000001 * time_since_sample);
- long_time_notifcation_done = 1;
+ long_time_notifcation_done = 0;
}
- } else if ((time_since_sample < 2000000000) && (long_time_notifcation_done != 0)) {
- debug(1, "The last PTP timing sample is no longer too old: %f seconds.",
- 0.000000001 * time_since_sample);
- long_time_notifcation_done = 0;
- }
- if (conn->anchor_remote_info_is_valid !=
- 0) { // i.e. if we have anchor clock ID and anchor time / rtptime
+ int64_t jitter = actual_offset - previous_offset;
+
+ if ((previous_offset != 0) && (previous_clock_id == actual_clock_id) &&
+ ((jitter > 3000000) || (jitter < -3000000)))
+ debug(1,
+ "Clock jitter: %.3f mS. Time since sample: %.3f mS. Time since start of mastership: %.3f "
+ "seconds.",
+ jitter * 0.000001, time_since_sample * 0.000001, time_since_start_of_mastership * 0.000000001);
+
+ previous_offset = actual_offset;
+ previous_clock_id = actual_clock_id;
if (actual_clock_id == conn->anchor_clock) {
conn->last_anchor_rtptime = conn->anchor_rtptime;
// the anchor clock and the actual clock are different
if (conn->last_anchor_info_is_valid != 0) {
-
+
int64_t time_since_last_update =
get_absolute_time_in_ns() - conn->last_anchor_time_of_update;
if (time_since_last_update > 5000000000) {
"Connection %d: Master clock has changed to %" PRIx64
". History: %.3f milliseconds.",
conn->connection_number, actual_clock_id, 0.000001 * duration_of_mastership);
-
+
// Now, the thing is that while the anchor clock and master clock for a
// buffered session start off the same,
// the master clock can change without the anchor clock changing.
conn->anchor_time = conn->last_anchor_local_time + actual_offset;
conn->anchor_clock = actual_clock_id;
-
}
-
+
} else {
response = clock_not_valid; // no current clock information and no previous clock info
}
}
+
} else {
- // debug(1, "anchor_remote_info_is_valid not valid");
- response = clock_no_anchor_info; // no anchor information
+ // debug(1, "mastership time: %f s.", time_since_start_of_mastership * 0.000000001);
+ response = clock_not_valid; // hasn't been master for long enough...
}
}
- }
- // here, check and update the clock status
- if ((clock_status_t)response != conn->clock_status) {
- switch (response) {
- case clock_ok:
- debug(2, "Connection %d: NQPTP master clock %" PRIx64 ".", conn->connection_number,
- actual_clock_id);
- break;
- case clock_not_ready:
- debug(2, "Connection %d: NQPTP master clock %" PRIx64 " is available but not ready.",
- conn->connection_number, actual_clock_id);
- break;
- case clock_service_unavailable:
- debug(1, "Connection %d: NQPTP clock is not available.", conn->connection_number);
- warn("Can't access the NQPTP clock. Is NQPTP running?");
- break;
- case clock_access_error:
- debug(2, "Connection %d: Error accessing the NQPTP clock interface.",
- conn->connection_number);
- break;
- case clock_data_unavailable:
- debug(1, "Connection %d: Can not access NQPTP clock information.", conn->connection_number);
- break;
- case clock_no_master:
- debug(2, "Connection %d: No NQPTP master clock.", conn->connection_number);
- break;
- case clock_no_anchor_info:
- debug(2, "Connection %d: Awaiting clock anchor information.", conn->connection_number);
- break;
- case clock_version_mismatch:
- debug(2, "Connection %d: NQPTP clock interface mismatch.", conn->connection_number);
- warn("This version of Shairport Sync is not compatible with the installed version of NQPTP. "
- "Please update.");
- break;
- case clock_not_synchronised:
- debug(1, "Connection %d: NQPTP clock is not synchronised.", conn->connection_number);
- break;
- case clock_not_valid:
- debug(2, "Connection %d: NQPTP clock information is not valid.", conn->connection_number);
- break;
- default:
- debug(1, "Connection %d: NQPTP clock reports an unrecognised status: %u.",
- conn->connection_number, response);
- break;
+ // here, check and update the clock status
+ if ((clock_status_t)response != conn->clock_status) {
+ switch (response) {
+ case clock_ok:
+ debug(2, "Connection %d: NQPTP master clock %" PRIx64 ".", conn->connection_number,
+ actual_clock_id);
+ break;
+ case clock_not_ready:
+ debug(2, "Connection %d: NQPTP master clock %" PRIx64 " is available but not ready.",
+ conn->connection_number, actual_clock_id);
+ break;
+ case clock_service_unavailable:
+ debug(1, "Connection %d: NQPTP clock is not available.", conn->connection_number);
+ warn("Can't access the NQPTP clock. Is NQPTP running?");
+ break;
+ case clock_access_error:
+ debug(2, "Connection %d: Error accessing the NQPTP clock interface.",
+ conn->connection_number);
+ break;
+ case clock_data_unavailable:
+ debug(1, "Connection %d: Can not access NQPTP clock information.", conn->connection_number);
+ break;
+ case clock_no_master:
+ debug(2, "Connection %d: No NQPTP master clock.", conn->connection_number);
+ break;
+ case clock_no_anchor_info:
+ debug(2, "Connection %d: Awaiting clock anchor information.", conn->connection_number);
+ break;
+ case clock_version_mismatch:
+ debug(2, "Connection %d: NQPTP clock interface mismatch.", conn->connection_number);
+ warn(
+ "This version of Shairport Sync is not compatible with the installed version of NQPTP. "
+ "Please update.");
+ break;
+ case clock_not_synchronised:
+ debug(1, "Connection %d: NQPTP clock is not synchronised.", conn->connection_number);
+ break;
+ case clock_not_valid:
+ debug(2, "Connection %d: NQPTP clock information is not valid.", conn->connection_number);
+ break;
+ default:
+ debug(1, "Connection %d: NQPTP clock reports an unrecognised status: %u.",
+ conn->connection_number, response);
+ break;
+ }
+ conn->clock_status = response;
}
- conn->clock_status = response;
- }
- if (conn->last_anchor_info_is_valid != 0) {
- if (anchorRTP != NULL)
- *anchorRTP = conn->last_anchor_rtptime;
- if (anchorLocalTime != NULL)
- *anchorLocalTime = conn->last_anchor_local_time;
+ if (conn->last_anchor_info_is_valid != 0) {
+ if (anchorRTP != NULL)
+ *anchorRTP = conn->last_anchor_rtptime;
+ if (anchorLocalTime != NULL)
+ *anchorLocalTime = conn->last_anchor_local_time;
+ }
}
-
return response;
}
*time = ltime;
result = 0;
} else {
- debug(3, "frame_to_ptp_local_time can't get anchor local time information");
+ debug(2, "frame_to_ptp_local_time can't get anchor local time information");
}
return result;
}
*frame = lframe;
result = 0;
} else {
- debug(3, "local_ptp_time_to_frame can't get anchor local time information");
+ debug(2, "local_ptp_time_to_frame can't get anchor local time information");
}
return result;
}
-void rtp_data_receiver_cleanup_handler(void *arg) {
- rtsp_conn_info *conn = (rtsp_conn_info *)arg;
- debug(2, "Connection %d: AP2 Data Receiver Cleanup.", conn->connection_number);
-}
-
-void *rtp_data_receiver(void *arg) {
- rtsp_conn_info *conn = (rtsp_conn_info *)arg;
- if (conn->airplay_stream_category == remote_control_stream)
- debug(1, "Connection %d (RC): AP2 Data Receiver started", conn->connection_number);
- else
- debug(1, "Connection %d: AP2 Data Receiver started", conn->connection_number);
-
- pthread_cleanup_push(rtp_data_receiver_cleanup_handler, arg);
-
- listen(conn->data_socket, 5);
-
- uint8_t packet[4096];
- ssize_t nread;
- SOCKADDR remote_addr;
- memset(&remote_addr, 0, sizeof(remote_addr));
- socklen_t addr_size = sizeof(remote_addr);
-
- int fd = accept(conn->data_socket, (struct sockaddr *)&remote_addr, &addr_size);
- debug(1,
- "Connection %d: rtp_data_receiver accepted a connection on socket %d and moved to a new "
- "socket %d.",
- conn->connection_number, conn->data_socket, fd);
- intptr_t pfd = fd;
- pthread_cleanup_push(socket_cleanup, (void *)pfd);
- int finished = 0;
- do {
- nread = recv(fd, packet, sizeof(packet), 0);
-
- if (nread < 0) {
- char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
- debug(1, "Connection %d: error in ap2 rtp_data_receiver %d: \"%s\". Could not recv a packet.",
- conn->connection_number, errno, errorstring);
- // if ((config.diagnostic_drop_packet_fraction == 0.0) ||
- // (drand48() > config.diagnostic_drop_packet_fraction)) {
- } else if (nread > 0) {
-
- // ssize_t plen = nread;
- debug(1, "Connection %d: Packet Received on Data Port.", conn->connection_number);
- // } else {
- // debug(3, "Event Receiver Thread -- dropping incoming packet to simulate a bad network.");
- // }
- } else {
- finished = 1;
- }
- } while (finished == 0);
- pthread_cleanup_pop(1); // close the socket
- pthread_cleanup_pop(1); // do the cleanup
- debug(2, "Connection %d: AP2 Data Receiver RTP thread \"normal\" exit.", conn->connection_number);
- pthread_exit(NULL);
-}
-
-void rtp_event_receiver_cleanup_handler(void *arg) {
- rtsp_conn_info *conn = (rtsp_conn_info *)arg;
- debug(2, "Connection %d: AP2 Event Receiver Cleanup.", conn->connection_number);
-}
-
-void *rtp_event_receiver(void *arg) {
- rtsp_conn_info *conn = (rtsp_conn_info *)arg;
- if (conn->airplay_stream_category == remote_control_stream)
- debug(2, "Connection %d (RC): AP2 Event Receiver started", conn->connection_number);
- else
- debug(2, "Connection %d: AP2 Event Receiver started", conn->connection_number);
- pthread_cleanup_push(rtp_event_receiver_cleanup_handler, arg);
-
- // listen(conn->event_socket, 5); // this is now done in the handle_setup_2 code
-
- uint8_t packet[4096];
- ssize_t nread;
- SOCKADDR remote_addr;
- memset(&remote_addr, 0, sizeof(remote_addr));
- socklen_t addr_size = sizeof(remote_addr);
-
- int fd = accept(conn->event_socket, (struct sockaddr *)&remote_addr, &addr_size);
- debug(2,
- "Connection %d: rtp_event_receiver accepted a connection on socket %d and moved to a new "
- "socket %d.",
- conn->connection_number, conn->event_socket, fd);
- intptr_t pfd = fd;
- pthread_cleanup_push(socket_cleanup, (void *)pfd);
- int finished = 0;
- do {
- nread = recv(fd, packet, sizeof(packet), 0);
-
- if (nread < 0) {
- char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
- debug(1,
- "Connection %d: error in ap2 rtp_event_receiver %d: \"%s\". Could not recv a packet.",
- conn->connection_number, errno, errorstring);
- // if ((config.diagnostic_drop_packet_fraction == 0.0) ||
- // (drand48() > config.diagnostic_drop_packet_fraction)) {
- } else if (nread > 0) {
-
- // ssize_t plen = nread;
- debug(1, "Connection %d: Packet Received on Event Port.", conn->connection_number);
- if (packet[1] == 0xD7) {
- debug(1,
- "Connection %d: AP2 Event Receiver -- Time Announce RTP packet of type 0x%02X length "
- "%d received.",
- conn->connection_number, packet[1], nread);
- } else {
- debug(1,
- "Connection %d: AP2 Event Receiver -- Unknown RTP packet of type 0x%02X length %d "
- "received.",
- conn->connection_number, packet[1], nread);
- }
- // } else {
- // debug(3, "Event Receiver Thread -- dropping incoming packet to simulate a bad network.");
- // }
- } else {
- finished = 1;
- }
- } while (finished == 0);
- pthread_cleanup_pop(1); // close the socket
- pthread_cleanup_pop(1); // do the cleanup
- debug(2, "Connection %d: AP2 Event Receiver RTP thread \"normal\" exit.",
- conn->connection_number);
- pthread_exit(NULL);
-}
-
void rtp_ap2_control_handler_cleanup_handler(void *arg) {
rtsp_conn_info *conn = (rtsp_conn_info *)arg;
debug(2, "Connection %d: AP2 Control Receiver Cleanup.", conn->connection_number);
memcpy(×tamp, ciphered_audio_alt + sizeof(uint16_t), sizeof(uint32_t));
timestamp = ntohl(timestamp);
- /*
- uint32_t ssrc;
- memcpy(&ssrc, packet+8, sizeof(uint32_t));
- ssrc = ntohl(ssrc);
- */
-
- // debug(1, "Realtime Audio Receiver Packet received. Version: %u, Padding: %u, Extension:
- // %u, Csrc Count: %u, Marker: %u, Payload Type: %u, Sequence Number: %u, Timestamp: %u,
- // SSRC: %u.", version, padding, extension, csrc_count, marker, payload_type,
- // sequence_number, timestamp, ssrc);
-
if (conn->session_key != NULL) {
unsigned char nonce[12];
memset(nonce, 0, sizeof(nonce));
&new_payload_length, // mlen_p
NULL, // nsec,
ciphered_audio_alt +
- 10, // the ciphertext starts 10 bytes in and is followed by the MAC tag,
- nread - (8 + 10), // clen -- the last 8 bytes are the nonce
+ 10, // the ciphertext starts 10 bytes in and is followed by the MAC tag,
+ nread - (8 + 10), // clen -- the last 8 bytes are the nonce
ciphered_audio_alt + 2, // authenticated additional data
8, // authenticated additional data length
nonce,
if (new_payload_length > max_int)
debug(1, "Madly long payload length!");
int plen = new_payload_length; //
- // debug(1," Write packet to buffer %d, timestamp %u.", sequence_number, timestamp);
- player_put_packet(1, sequence_number, timestamp, m, plen,
- conn); // the '1' means is original format
+ // debug(1," Write packet to buffer %d,
+ // timestamp %u.", sequence_number, timestamp);
+ player_put_packet(ALAC_44100_S16_2, sequence_number, timestamp, m, plen, 0, 0,
+ conn); // 0 = no mute, 0 = non discontinuous
} else {
debug(2, "No session key, so the audio packet can not be deciphered -- skipped.");
}
}
void *rtp_ap2_control_receiver(void *arg) {
+ // #include <syscall.h>
+ // debug(1, "rtp_ap2_control_receiver PID %d", syscall(SYS_gettid));
pthread_cleanup_push(rtp_ap2_control_handler_cleanup_handler, arg);
rtsp_conn_info *conn = (rtsp_conn_info *)arg;
uint8_t packet[4096];
SOCKADDR from_sock_addr;
socklen_t from_sock_addr_length = sizeof(SOCKADDR);
memset(&from_sock_addr, 0, sizeof(SOCKADDR));
-
+
nread = recvfrom(conn->ap2_control_socket, packet, sizeof(packet), 0,
(struct sockaddr *)&from_sock_addr, &from_sock_addr_length);
uint64_t time_now = get_absolute_time_in_ns();
int64_t time_since_start = time_now - start_time;
-
- if (conn->rtsp_link_is_idle == 0) {
- if (conn->udp_clock_is_initialised == 0) {
- packet_number = 0;
- conn->udp_clock_is_initialised = 1;
- debug(1,"AP2 Realtime Clock receiver initialised.");
- }
-
- // debug(1,"Connection %d: AP2 Control Packet received.", conn->connection_number);
- if (nread >= 28) { // must have at least 28 bytes for the timing information
- if ((time_since_start < 2000000) && ((packet[0] & 0x10) == 0)) {
- debug(1,
- "Dropping what looks like a (non-sentinel) packet left over from a previous session "
- "at %f ms.",
- 0.000001 * time_since_start);
- } else {
- packet_number++;
- // debug(1,"AP2 Packet %" PRIu64 ".", packet_number);
+ if (conn->udp_clock_is_initialised == 0) {
+ packet_number = 0;
+ conn->udp_clock_is_initialised = 1;
+ debug(2, "AP2 Realtime Clock receiver initialised.");
+ }
- if (packet_number == 1) {
- if ((packet[0] & 0x10) != 0) {
- debug(2, "First packet is a sentinel packet.");
- } else {
- debug(2, "First packet is a not a sentinel packet!");
- }
+ // debug(1,"Connection %d: AP2 Control Packet received.", conn->connection_number);
+
+ if (nread >= 28) { // must have at least 28 bytes for the timing information
+ if ((time_since_start < 2000000) && ((packet[0] & 0x10) == 0)) {
+ debug(1,
+ "Dropping what looks like a (non-sentinel) packet left over from a previous session "
+ "at %f ms.",
+ 0.000001 * time_since_start);
+ } else {
+ packet_number++;
+ // debug(1,"AP2 Packet %" PRIu64 ".", packet_number);
+
+ if (packet_number == 1) {
+ if ((packet[0] & 0x10) != 0) {
+ debug(2, "First packet is a sentinel packet.");
+ } else {
+ debug(2, "First packet is a not a sentinel packet!");
}
- // debug(1,"rtp_ap2_control_receiver coded: %u, %u", packet[0], packet[1]);
- // you might want to set this higher to specify how many initial timings to ignore
- if (packet_number >= 1) {
+ }
+ // debug(1,"rtp_ap2_control_receiver coded: %u, %u", packet[0], packet[1]);
+ // you might want to set this higher to specify how many initial timings to ignore
+ if (packet_number >= 1) {
if ((config.diagnostic_drop_packet_fraction == 0.0) ||
(drand48() > config.diagnostic_drop_packet_fraction)) {
// store the from_sock_addr if we haven't already done so
check64conversion("clock_id", packet + 20, clock_id);
// debug(1, "we have clock_id: %" PRIx64 ".", clock_id);
- // debug(1,"remote_packet_time_ns: %" PRIx64 ", local_realtime_now_ns: %" PRIx64 ".",
- // remote_packet_time_ns, local_realtime_now);
+ // debug(1,"remote_packet_time_ns: %" PRIx64 ", local_realtime_now_ns: %" PRIx64
+ // ".", remote_packet_time_ns, local_realtime_now);
uint32_t frame_1 =
nctohl(packet + 4); // this seems to be the frame with latency of 77165 included
check32conversion("frame_1", packet + 4, frame_1);
- uint32_t frame_2 = nctohl(packet + 16); // this seems to be the frame the time refers to
+ uint32_t frame_2 =
+ nctohl(packet + 16); // this seems to be the frame the time refers to
check32conversion("frame_2", packet + 16, frame_2);
// this just updates the anchor information contained in the packet
// the frame and its remote time
debug(1, "Notified latency is %d frames.", notified_latency);
int32_t added_latency =
(int32_t)(config.audio_backend_latency_offset * conn->input_rate);
- // the actual latency is the notified latency plus the fixed latency + the added latency
+ // the actual latency is the notified latency plus the fixed latency + the added
+ // latency
int32_t net_latency =
notified_latency + 11035 +
added_latency; // this is the latency between incoming frames and the DAC
- net_latency = net_latency -
- (int32_t)(config.audio_backend_buffer_desired_length * conn->input_rate);
+ net_latency = net_latency - (int32_t)(config.audio_backend_buffer_desired_length *
+ conn->input_rate);
// debug(1, "Net latency is %d frames.", net_latency);
if (net_latency <= 0) {
if (conn->latency_warning_issued == 0) {
- warn("The stream latency (%f seconds) it too short to accommodate an offset of %f "
+ warn("The stream latency (%f seconds) it too short to accommodate an offset of "
+ "%f "
"seconds and a backend buffer of %f seconds.",
((notified_latency + 11035) * 1.0) / conn->input_rate,
config.audio_backend_latency_offset,
set_ptp_anchor_info(conn, clock_id, frame_1 - 11035 - added_latency,
remote_packet_time_ns);
if (conn->anchor_clock != clock_id) {
- debug(2, "Connection %d: Change Anchor Clock: %" PRIx64 ".", conn->connection_number,
- clock_id);
+ debug(2, "Connection %d: Change Anchor Clock: %" PRIx64 ".",
+ conn->connection_number, clock_id);
}
} break;
default: {
char *packet_in_hex_cstring =
debug_malloc_hex_cstring(packet, nread); // remember to free this afterwards
- debug(
- 1,
- "AP2 Control Receiver Packet of first byte 0x%02X, type 0x%02X length %d received: "
- "\"%s\".",
- packet[0], packet[1], nread, packet_in_hex_cstring);
+ debug(1,
+ "AP2 Control Receiver Packet of first byte 0x%02X, type 0x%02X length %zd "
+ "received: "
+ "\"%s\".",
+ packet[0], packet[1], nread, packet_in_hex_cstring);
free(packet_in_hex_cstring);
} break;
}
} else {
debug(1, "AP2 Control Receiver -- dropping a packet.");
}
- }
}
- } else {
- if (nread == -1) {
- if ((errno == EAGAIN) || (errno == EWOULDBLOCK)) {
- if (conn->airplay_stream_type == realtime_stream) {
- debug(1, "Connection %d: no control packets for the last 7 seconds -- resetting anchor info", conn->connection_number);
- reset_ptp_anchor_info(conn);
- packet_number = 0; // start over in allowing the packet to set anchor information
- }
- } else {
- debug(2, "Connection %d: AP2 Control Receiver -- error %d receiving a packet.", conn->connection_number, errno);
+ }
+ } else {
+ if (nread == -1) {
+ if ((errno == EAGAIN) || (errno == EWOULDBLOCK)) {
+ if (conn->airplay_stream_type == realtime_stream) {
+ debug(1,
+ "Connection %d: no control packets for the last 7 seconds -- resetting anchor "
+ "info",
+ conn->connection_number);
+ reset_ptp_anchor_info(conn);
+ packet_number = 0; // start over in allowing the packet to set anchor information
}
} else {
- debug(2, "Connection %d: AP2 Control Receiver -- malformed packet, %d bytes long.", conn->connection_number, nread);
+ debug(2, "Connection %d: AP2 Control Receiver -- error %d receiving a packet.",
+ conn->connection_number, errno);
}
+ } else {
+ debug(2, "Connection %d: AP2 Control Receiver -- malformed packet, %zd bytes long.",
+ conn->connection_number, nread);
}
}
}
debug(2, "Realtime Audio Receiver Cleanup Start.");
rtsp_conn_info *conn = (rtsp_conn_info *)arg;
close(conn->realtime_audio_socket);
- debug(2, "Connection %d: closing realtime audio port %u", conn->local_realtime_audio_port);
+ debug(2, "Connection %d: closing realtime audio port %u", conn->connection_number, conn->local_realtime_audio_port);
conn->realtime_audio_socket = 0;
debug(2, "Realtime Audio Receiver Cleanup Done.");
}
void *rtp_realtime_audio_receiver(void *arg) {
+ // #include <syscall.h>
+ // debug(1, "rtp_realtime_audio_receiver PID %d", syscall(SYS_gettid));
pthread_cleanup_push(rtp_realtime_audio_cleanup_handler, arg);
rtsp_conn_info *conn = (rtsp_conn_info *)arg;
uint8_t packet[4096];
*/
// if (have_ptp_timing_information(conn)) {
if (1) {
- int32_t seqno = decipher_player_put_packet(packet + 2, nread - 2, conn);
- if (seqno >= 0) {
- if (last_seqno == -1) {
- last_seqno = seqno;
+ int32_t seqno = decipher_player_put_packet(packet + 2, nread - 2, conn);
+ if (seqno >= 0) {
+ if (last_seqno == -1) {
+ last_seqno = seqno;
+ } else {
+ last_seqno = (last_seqno + 1) & 0xffff;
+ // if (seqno != last_seqno)
+ // debug(3, "RTP: Packets out of sequence: expected: %d, got %d.", last_seqno,
+ // seqno);
+ last_seqno = seqno; // reset warning...
+ }
} else {
- last_seqno = (last_seqno + 1) & 0xffff;
- // if (seqno != last_seqno)
- // debug(3, "RTP: Packets out of sequence: expected: %d, got %d.", last_seqno, seqno);
- last_seqno = seqno; // reset warning...
+ debug(1, "Realtime Audio Receiver -- bad packet dropped.");
}
- } else {
- debug(1, "Realtime Audio Receiver -- bad packet dropped.");
- }
}
} else {
debug(3, "Realtime Audio Receiver -- dropping a packet.");
pthread_exit(NULL);
}
-ssize_t buffered_read(buffered_tcp_desc *descriptor, void *buf, size_t count,
- size_t *bytes_remaining) {
- ssize_t response = -1;
- if (pthread_mutex_lock(&descriptor->mutex) != 0)
- debug(1, "problem with mutex");
- pthread_cleanup_push(mutex_unlock, (void *)&descriptor->mutex);
- if (descriptor->closed == 0) {
- if ((descriptor->buffer_occupancy == 0) && (descriptor->error_code == 0)) {
- if (count == 2)
- debug(2, "buffered_read: waiting for %u bytes (okay at start of a track).", count);
- else
- debug(2, "buffered_read: waiting for %u bytes.", count);
- }
- while ((descriptor->buffer_occupancy == 0) && (descriptor->error_code == 0)) {
- if (pthread_cond_wait(&descriptor->not_empty_cv, &descriptor->mutex))
- debug(1, "Error waiting for buffered read");
- }
- }
- if (descriptor->buffer_occupancy != 0) {
- ssize_t bytes_to_move = count;
-
- if (descriptor->buffer_occupancy < count) {
- bytes_to_move = descriptor->buffer_occupancy;
- }
-
- ssize_t top_gap = descriptor->buffer + descriptor->buffer_max_size - descriptor->toq;
- if (top_gap < bytes_to_move)
- bytes_to_move = top_gap;
-
- memcpy(buf, descriptor->toq, bytes_to_move);
- descriptor->toq += bytes_to_move;
- if (descriptor->toq == descriptor->buffer + descriptor->buffer_max_size)
- descriptor->toq = descriptor->buffer;
- descriptor->buffer_occupancy -= bytes_to_move;
- if (bytes_remaining != NULL)
- *bytes_remaining = descriptor->buffer_occupancy;
- response = bytes_to_move;
- if (pthread_cond_signal(&descriptor->not_full_cv))
- debug(1, "Error signalling");
- } else if (descriptor->error_code) {
- errno = descriptor->error_code;
- response = -1;
- } else if (descriptor->closed != 0) {
- response = 0;
- }
-
- pthread_cleanup_pop(1); // release the mutex
- return response;
-}
-
-#define STANDARD_PACKET_SIZE 4096
-
-void buffered_tcp_reader_cleanup_handler(__attribute__((unused)) void *arg) {
- debug(2, "Buffered TCP Reader Thread Exit via Cleanup.");
-}
-
-void *buffered_tcp_reader(void *arg) {
- pthread_cleanup_push(buffered_tcp_reader_cleanup_handler, NULL);
- buffered_tcp_desc *descriptor = (buffered_tcp_desc *)arg;
-
- // listen(descriptor->sock_fd, 5); // this is done in the handle_setup_2 code to ensure it's open
- // when the client hears about it...
- ssize_t nread;
- SOCKADDR remote_addr;
- memset(&remote_addr, 0, sizeof(remote_addr));
- socklen_t addr_size = sizeof(remote_addr);
- int finished = 0;
- int fd = accept(descriptor->sock_fd, (struct sockaddr *)&remote_addr, &addr_size);
- intptr_t pfd = fd;
- pthread_cleanup_push(socket_cleanup, (void *)pfd);
-
- do {
- if (pthread_mutex_lock(&descriptor->mutex) != 0)
- debug(1, "problem with mutex");
- pthread_cleanup_push(mutex_unlock, (void *)&descriptor->mutex);
- while ((descriptor->buffer_occupancy == descriptor->buffer_max_size) ||
- (descriptor->error_code != 0) || (descriptor->closed != 0)) {
- if (pthread_cond_wait(&descriptor->not_full_cv, &descriptor->mutex))
- debug(1, "Error waiting for buffered read");
- }
- pthread_cleanup_pop(1); // release the mutex
-
- // now we know it is not full, so go ahead and try to read some more into it
-
- // wrap
- if ((size_t)(descriptor->eoq - descriptor->buffer) == descriptor->buffer_max_size)
- descriptor->eoq = descriptor->buffer;
-
- // figure out how much to ask for
- size_t bytes_to_request = STANDARD_PACKET_SIZE;
- size_t free_space = descriptor->buffer_max_size - descriptor->buffer_occupancy;
- if (bytes_to_request > free_space)
- bytes_to_request = free_space; // don't ask for more than will fit
-
- size_t gap_to_end_of_buffer =
- descriptor->buffer + descriptor->buffer_max_size - descriptor->eoq;
- if (gap_to_end_of_buffer < bytes_to_request)
- bytes_to_request =
- gap_to_end_of_buffer; // only ask for what will fill to the top of the buffer
-
- // do the read
- // debug(1, "Request buffered read of up to %d bytes.", bytes_to_request);
- nread = recv(fd, descriptor->eoq, bytes_to_request, 0);
- // debug(1, "Received %d bytes for a buffer size of %d bytes.",nread, descriptor->buffer_occupancy + nread);
- if (pthread_mutex_lock(&descriptor->mutex) != 0)
- debug(1, "problem with not empty mutex");
- pthread_cleanup_push(mutex_unlock, (void *)&descriptor->mutex);
- if (nread < 0) {
- char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
- debug(1, "error in buffered_tcp_reader %d: \"%s\". Could not recv a packet.", errno,
- errorstring);
- descriptor->error_code = errno;
- } else if (nread == 0) {
- descriptor->closed = 1;
- } else if (nread > 0) {
- descriptor->eoq += nread;
- descriptor->buffer_occupancy += nread;
- } else {
- debug(1, "buffered audio port closed!");
- }
- // signal if we got data or an error or the file closed
- if (pthread_cond_signal(&descriptor->not_empty_cv))
- debug(1, "Error signalling");
- pthread_cleanup_pop(1); // release the mutex
- } while (finished == 0);
-
- debug(1, "Buffered TCP Reader Thread Exit \"Normal\" Exit Begin.");
- pthread_cleanup_pop(1); // close the socket
- pthread_cleanup_pop(1); // cleanup
- debug(1, "Buffered TCP Reader Thread Exit \"Normal\" Exit -- Shouldn't happen!.");
- pthread_exit(NULL);
-}
-
-void avcodec_alloc_context3_cleanup_handler(void *arg) {
- debug(3, "avcodec_alloc_context3_cleanup_handler");
- AVCodecContext *codec_context = arg;
- av_free(codec_context);
-}
-
-void avcodec_open2_cleanup_handler(__attribute__((unused)) void *arg) {
- debug(3, "avcodec_open2_cleanup_handler -- does nothing right now");
- // AVCodecContext *codec_context = arg;
- // avcodec_free_context(&codec_context);
-}
-
-void av_parser_init_cleanup_handler(void *arg) {
- debug(3, "av_parser_init_cleanup_handler");
- AVCodecParserContext *codec_parser_context = arg;
- av_parser_close(codec_parser_context);
-}
-
-void swr_alloc_cleanup_handler(void *arg) {
- debug(3, "swr_alloc_cleanup_handler");
- SwrContext **swr = arg;
- swr_free(swr);
-}
-
-void av_packet_alloc_cleanup_handler(void *arg) {
- debug(3, "av_packet_alloc_cleanup_handler");
- AVPacket **pkt = arg;
- av_packet_free(pkt);
-}
-
-// this will read a block of the size specified to the buffer
-// and will return either with the block or on error
-ssize_t lread_sized_block(buffered_tcp_desc *descriptor, void *buf, size_t count,
- size_t *bytes_remaining) {
- ssize_t response, nread;
- size_t inbuf = 0; // bytes already in the buffer
- int keep_trying = 1;
-
- do {
- nread = buffered_read(descriptor, buf + inbuf, count - inbuf, bytes_remaining);
- if (nread == 0) {
- // a blocking read that returns zero means eof -- implies connection closed
- debug(3, "read_sized_block connection closed.");
- keep_trying = 0;
- } else if (nread < 0) {
- if (errno == EAGAIN) {
- debug(1, "read_sized_block getting Error 11 -- EAGAIN from a blocking read!");
- }
- if ((errno != ECONNRESET) && (errno != EAGAIN) && (errno != EINTR)) {
- char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
- debug(1, "read_sized_block read error %d: \"%s\".", errno, (char *)errorstring);
- keep_trying = 0;
- }
- } else {
- inbuf += (size_t)nread;
- }
- } while ((keep_trying != 0) && (inbuf < count));
- if (nread <= 0)
- response = nread;
- else
- response = inbuf;
- return response;
-}
-
-// From
-// https://stackoverflow.com/questions/18862715/how-to-generate-the-aac-adts-elementary-stream-with-android-mediacodec
-// with thanks!
-/**
- * Add ADTS header at the beginning of each and every AAC packet.
- * This is needed as MediaCodec encoder generates a packet of raw
- * AAC data.
- *
- * Note the packetLen must count in the ADTS header itself.
- **/
-void addADTStoPacket(uint8_t *packet, int packetLen) {
- int profile = 2; // AAC LC
- // 39=MediaCodecInfo.CodecProfileLevel.AACObjectELD;
- int freqIdx = 4; // 44.1KHz
- int chanCfg = 2; // CPE
-
- // fill in ADTS data
- packet[0] = 0xFF;
- packet[1] = 0xF9;
- packet[2] = ((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2);
- packet[3] = ((chanCfg & 3) << 6) + (packetLen >> 11);
- packet[4] = (packetLen & 0x7FF) >> 3;
- packet[5] = ((packetLen & 7) << 5) + 0x1F;
- packet[6] = 0xFC;
-}
-
-void rtp_buffered_audio_cleanup_handler(__attribute__((unused)) void *arg) {
- debug(2, "Buffered Audio Receiver Cleanup Start.");
- rtsp_conn_info *conn = (rtsp_conn_info *)arg;
- close(conn->buffered_audio_socket);
- debug(2, "Connection %d: TCP Buffered Audio port closed: %u.", conn->connection_number,
- conn->local_buffered_audio_port);
- conn->buffered_audio_socket = 0;
- debug(2, "Buffered Audio Receiver Cleanup Done.");
-}
-
-// not used right now, but potentially useful for understanding flush requests
-void display_flush_requests(int activeOnly, uint32_t currentSeq, uint32_t currentTS,
- rtsp_conn_info *conn) {
- if (conn->flush_requests == NULL) {
- if (activeOnly == 0)
- debug(1, "No flush requests.");
- } else {
- flush_request_t *t = conn->flush_requests;
- do {
- if (t->flushNow) {
- debug(1, "immediate flush to untilSeq: %u, untilTS: %u.", t->flushUntilSeq,
- t->flushUntilTS);
- } else {
- if (activeOnly == 0)
- debug(1, "fromSeq: %u, fromTS: %u, to untilSeq: %u, untilTS: %u.", t->flushFromSeq,
- t->flushFromTS, t->flushUntilSeq, t->flushUntilTS);
- else if ((activeOnly == 1) &&
- (currentSeq >=
- (t->flushFromSeq -
- 1))) // the -1 is because you might have to trim the end of the previous block
- debug(1,
- "fromSeq: %u, fromTS: %u, to untilSeq: %u, untilTS: %u, with currentSeq: %u, "
- "currentTS: %u.",
- t->flushFromSeq, t->flushFromTS, t->flushUntilSeq, t->flushUntilTS, currentSeq,
- currentTS);
- }
- t = t->next;
- } while (t != NULL);
- }
-}
-
-void *rtp_buffered_audio_processor(void *arg) {
- rtsp_conn_info *conn = (rtsp_conn_info *)arg;
- pthread_cleanup_push(rtp_buffered_audio_cleanup_handler, arg);
-
- pthread_t *buffered_reader_thread = malloc(sizeof(pthread_t));
- if (buffered_reader_thread == NULL)
- debug(1, "cannot allocate a buffered_reader_thread!");
- memset(buffered_reader_thread, 0, sizeof(pthread_t));
- pthread_cleanup_push(malloc_cleanup, buffered_reader_thread);
-
- buffered_tcp_desc *buffered_audio = malloc(sizeof(buffered_tcp_desc));
- if (buffered_audio == NULL)
- debug(1, "cannot allocate a buffered_tcp_desc!");
- // initialise the descriptor
- memset(buffered_audio, 0, sizeof(buffered_tcp_desc));
- pthread_cleanup_push(malloc_cleanup, buffered_audio);
-
- if (pthread_mutex_init(&buffered_audio->mutex, NULL))
- debug(1, "Connection %d: error %d initialising buffered_audio mutex.", conn->connection_number,
- errno);
- pthread_cleanup_push(mutex_cleanup, &buffered_audio->mutex);
-
- if (pthread_cond_init(&buffered_audio->not_empty_cv, NULL))
- die("Connection %d: error %d initialising not_empty cv.", conn->connection_number, errno);
- pthread_cleanup_push(cv_cleanup, &buffered_audio->not_empty_cv);
-
- if (pthread_cond_init(&buffered_audio->not_full_cv, NULL))
- die("Connection %d: error %d initialising not_full cv.", conn->connection_number, errno);
- pthread_cleanup_push(cv_cleanup, &buffered_audio->not_full_cv);
-
- // initialise the buffer data structure
- buffered_audio->buffer_max_size = conn->ap2_audio_buffer_size;
- buffered_audio->buffer = malloc(conn->ap2_audio_buffer_size);
- if (buffered_audio->buffer == NULL)
- debug(1, "cannot allocate an audio buffer of %u bytes!", buffered_audio->buffer_max_size);
- pthread_cleanup_push(malloc_cleanup, buffered_audio->buffer);
-
- // pthread_mutex_lock(&conn->buffered_audio_mutex);
- buffered_audio->toq = buffered_audio->buffer;
- buffered_audio->eoq = buffered_audio->buffer;
-
- buffered_audio->sock_fd = conn->buffered_audio_socket;
-
- pthread_create(buffered_reader_thread, NULL, &buffered_tcp_reader, buffered_audio);
- pthread_cleanup_push(thread_cleanup, buffered_reader_thread);
-
- // ideas and some code from https://rodic.fr/blog/libavcodec-tutorial-decode-audio-file/
- // with thanks
-
- const AVCodec *codec = avcodec_find_decoder(AV_CODEC_ID_AAC);
- if (codec == NULL) {
- debug(1, "Can't find an AAC decoder!");
- }
-
- AVCodecContext *codec_context = avcodec_alloc_context3(codec);
- if (codec_context == NULL) {
- debug(1, "Could not allocate audio codec context!");
- }
- // push a deallocator -- av_free(codec_context)
- pthread_cleanup_push(avcodec_alloc_context3_cleanup_handler, codec_context);
-
- if (avcodec_open2(codec_context, codec, NULL) < 0) {
- debug(1, "Could not open a codec into the audio codec context");
- }
- // push a closer -- avcodec_close(codec_context);
- pthread_cleanup_push(avcodec_open2_cleanup_handler, codec_context);
-
- AVCodecParserContext *codec_parser_context = av_parser_init(codec->id);
- if (codec_parser_context == NULL) {
- debug(1, "Can't initialise a parser context!");
- }
- // push a closer -- av_parser_close(codec_parser_context);
- pthread_cleanup_push(av_parser_init_cleanup_handler, codec_parser_context);
-
- AVPacket *pkt = av_packet_alloc();
- if (pkt == NULL) {
- debug(1, "Can't allocate an AV packet");
- }
- // push a deallocator -- av_packet_free(pkt);
- pthread_cleanup_push(av_packet_alloc_cleanup_handler, &pkt);
-
- AVFrame *decoded_frame = NULL;
- int dst_linesize;
- int dst_bufsize;
-
- // Prepare software resampler to convert floating point (?)
- SwrContext *swr = swr_alloc();
- if (swr == NULL) {
- debug(1, "can not allocate a swr context");
- }
- // push a deallocator -- av_packet_free(pkt);
- pthread_cleanup_push(swr_alloc_cleanup_handler, &swr);
-
-
-// FFmpeg 5.1 or later...
-#if LIBAVUTIL_VERSION_MAJOR >= 57
- av_opt_set_chlayout(swr, "in_chlayout", &(AVChannelLayout)AV_CHANNEL_LAYOUT_STEREO, 0);
- av_opt_set_chlayout(swr, "out_chlayout", &(AVChannelLayout)AV_CHANNEL_LAYOUT_STEREO, 0);
-#else
- av_opt_set_int(swr, "in_channel_layout", AV_CH_LAYOUT_STEREO, 0);
- av_opt_set_int(swr, "out_channel_layout", AV_CH_LAYOUT_STEREO, 0);
-#endif
- av_opt_set_int(swr, "in_sample_rate", conn->input_rate, 0);
- av_opt_set_int(swr, "out_sample_rate", conn->input_rate,
- 0); // must match or the timing will be wrong`
- av_opt_set_sample_fmt(swr, "in_sample_fmt", AV_SAMPLE_FMT_FLTP, 0);
-
- enum AVSampleFormat av_format;
- switch (config.output_format) {
- case SPS_FORMAT_S32:
- case SPS_FORMAT_S32_LE:
- case SPS_FORMAT_S32_BE:
- case SPS_FORMAT_S24:
- case SPS_FORMAT_S24_LE:
- case SPS_FORMAT_S24_BE:
- case SPS_FORMAT_S24_3LE:
- case SPS_FORMAT_S24_3BE:
- av_format = AV_SAMPLE_FMT_S32;
- conn->input_bytes_per_frame = 8; // the output from the decoder will be input to the player
- conn->input_bit_depth = 32;
- debug(2, "32-bit output format chosen");
- break;
- case SPS_FORMAT_S16:
- case SPS_FORMAT_S16_LE:
- case SPS_FORMAT_S16_BE:
- av_format = AV_SAMPLE_FMT_S16;
- conn->input_bytes_per_frame = 4;
- conn->input_bit_depth = 16;
- break;
- case SPS_FORMAT_U8:
- av_format = AV_SAMPLE_FMT_U8;
- conn->input_bytes_per_frame = 2;
- conn->input_bit_depth = 8;
- break;
- default:
- debug(1, "Unsupported DAC output format %u. AV_SAMPLE_FMT_S16 decoding chosen. Good luck!",
- config.output_format);
- av_format = AV_SAMPLE_FMT_S16;
- conn->input_bytes_per_frame = 4; // the output from the decoder will be input to the player
- conn->input_bit_depth = 16;
- break;
- };
-
- av_opt_set_sample_fmt(swr, "out_sample_fmt", av_format, 0);
- int swr_err = swr_init(swr);
- if (swr_err !=0){
- die("FFMpeg swr_init() failed Error %d (%s)",
- swr_err, av_err2str(swr_err));
- }
-
- uint8_t packet[16 * 1024];
- unsigned char m[16 * 1024]; // leave the first 7 bytes blank to make room for the ADTS
- uint8_t *pcm_audio = NULL; // the S16 output
- unsigned char *data_to_process;
- ssize_t data_remaining;
- uint32_t seq_no = 0; // audio packet number. Initialised to avoid a "possibly uninitialised" warning.
- uint32_t previous_seq_no = 0;
- int new_buffer_needed = 0;
- ssize_t nread;
-
- int finished = 0;
- int pcm_buffer_size = (1024 + 352) * conn->input_bytes_per_frame;
- uint8_t pcm_buffer[pcm_buffer_size];
-
- int pcm_buffer_occupancy = 0;
- int pcm_buffer_read_point = 0; // offset to where the next buffer should come from
- uint32_t pcm_buffer_read_point_rtptime = 0;
- uint32_t pcm_buffer_read_point_rtptime_offset = 0; // hack
- uint32_t expected_pcm_buffer_read_point_rtptime = 0;
-
- uint64_t blocks_read = 0;
- uint64_t blocks_read_in_sequence = 0; // since the start of this sequence -- reset by start or flush
- int flush_requested = 0;
- uint32_t expected_timestamp = 0;
- int expected_timesamp_is_reasonable = 0;
- uint32_t timestamp = 0; // initialised to avoid a "possibly uninitialised" warning.
- int packets_played_in_this_sequence = 0;
- int play_enabled = 0;
- uint32_t flush_from_timestamp = 0; // initialised to avoid a "possibly uninitialised" warning.
- double requested_lead_time = 0.0; // normal lead time minimum -- maybe it should be about 0.1
-
- // wait until our timing information is valid
-
- // debug(1,"rtp_buffered_audio_processor ready.");
- while (have_ptp_timing_information(conn) == 0)
- usleep(1000);
-
- reset_buffer(conn); // in case there is any garbage in the player
- // int not_first_time_out = 0;
-
- // quick check of parameters
- if (conn->input_bytes_per_frame == 0)
- die("conn->input_bytes_per_frame is zero!");
- do {
- int flush_is_delayed = 0;
- int flush_newly_requested = 0;
- int flush_newly_complete = 0;
- int play_newly_stopped = 0;
- // are we in in flush mode, or just about to leave it?
- debug_mutex_lock(&conn->flush_mutex, 25000, 1); // 25 ms is a long time to wait!
- uint32_t flushUntilSeq = conn->ap2_flush_until_sequence_number;
- uint32_t flushUntilTS = conn->ap2_flush_until_rtp_timestamp;
-
- int flush_request_active = 0;
- if (conn->ap2_flush_requested) {
- if (conn->ap2_flush_from_valid == 0) { // i.e. a flush from right now
- flush_request_active = 1;
- flush_is_delayed = 0;
- } else {
- flush_is_delayed = 1;
- flush_from_timestamp = conn->ap2_flush_from_rtp_timestamp;
- int32_t blocks_to_start_of_flush = conn->ap2_flush_from_sequence_number - seq_no;
- if (blocks_to_start_of_flush <= 0) {
- flush_request_active = 1;
- }
- }
- }
- // if we are in flush mode
- if (flush_request_active) {
- if (flush_requested == 0) {
- // here, a flush has been newly requested
-
- debug(2, "Flush requested.");
- if (conn->ap2_flush_from_valid) {
- debug(2, " fromTS: %u", conn->ap2_flush_from_rtp_timestamp);
- debug(2, " fromSeq: %u", conn->ap2_flush_from_sequence_number);
- debug(2, "--");
- }
- debug(2, " untilTS: %u", conn->ap2_flush_until_rtp_timestamp);
- debug(2, " untilSeq: %u", conn->ap2_flush_until_sequence_number);
- debug(2, "--");
- debug(2, " currentTS_Start: %u", pcm_buffer_read_point_rtptime);
- uint32_t fib = (pcm_buffer_occupancy - pcm_buffer_read_point) / 4;
- debug(2, " framesInBuffer: %u", fib);
- uint32_t endTS = fib + pcm_buffer_read_point_rtptime;
- debug(2, " currentTS_End: %u", endTS); // a frame occupies 4 bytes
- debug(2, " currentSeq: %u", seq_no);
-
- flush_newly_requested = 1;
- }
- // blocks_read to ensure seq_no is valid
- if ((blocks_read != 0) && (seq_no >= flushUntilSeq)) {
- // we have reached or overshot the flushUntilSeq block
- if (flushUntilSeq != seq_no)
- debug(2,
- "flush request ended with flushUntilSeq %u overshot at %u, flushUntilTS: %u, "
- "incoming timestamp: %u.",
- flushUntilSeq, seq_no, flushUntilTS, timestamp);
- else
- debug(2,
- "flush request ended with seqNo = flushUntilSeq at %u, flushUntilTS: %u, incoming timestamp: %u",
- flushUntilSeq, flushUntilTS, timestamp);
- conn->ap2_flush_requested = 0;
- flush_request_active = 0;
- flush_newly_requested = 0;
- }
- }
-
- // flush_requested = conn->ap2_flush_requested;
- if ((play_enabled) && (conn->ap2_play_enabled == 0)) {
- play_newly_stopped = 1;
- debug(2,"Play stopped.");
- pcm_buffer_read_point_rtptime_offset = 0;
- blocks_read_in_sequence = 0; // This may be set to 1 by a flush, so don't zero it during start.
- packets_played_in_this_sequence = 0;
- pcm_buffer_occupancy = 0;
- pcm_buffer_read_point = 0;
- }
-
- if ((play_enabled == 0) && (conn->ap2_play_enabled != 0)) {
- // play newly started
- debug(2,"Play started.");
- }
-
-
-
- if ((flush_requested) && (flush_request_active == 0)) {
- if (play_enabled)
- debug(1,"Flush completed while play_enabled is true.");
- flush_newly_complete = 1;
- blocks_read_in_sequence = 1; // the last block always (?) becomes the first block after the flush
- }
- flush_requested = flush_request_active;
-
- play_enabled = conn->ap2_play_enabled;
-
- debug_mutex_unlock(&conn->flush_mutex, 3);
-
- // do this outside the flush mutex
- if (flush_newly_complete) {
- debug(2, "Flush Complete.");
- }
-
- if (play_newly_stopped != 0)
- reset_buffer(conn); // stop play ASAP
-
- if (flush_newly_requested) {
- reset_buffer(conn);
-
- if (flush_is_delayed == 0) {
- debug(2, "Immediate Buffered Audio Flush Started.");
- // player_full_flush(conn);
- packets_played_in_this_sequence = 0;
- pcm_buffer_occupancy = 0;
- pcm_buffer_read_point = 0;
- } else {
- debug(2, "Delayed Buffered Audio Flush Started.");
- packets_played_in_this_sequence = 0;
- pcm_buffer_occupancy = 0;
- pcm_buffer_read_point = 0;
- }
- pcm_buffer_read_point_rtptime_offset = 0;
- }
-
- // now, if a flush is not requested, we can do the normal stuff
- if (flush_requested == 0) {
- // is there space in the player thread's buffer system?
- unsigned int player_buffer_size, player_buffer_occupancy;
- get_audio_buffer_size_and_occupancy(&player_buffer_size, &player_buffer_occupancy, conn);
- // debug(1,"player buffer size and occupancy: %u and %u", player_buffer_size,
- // player_buffer_occupancy);
- if (player_buffer_occupancy > ((requested_lead_time + 0.4) * conn->input_rate /
- 352)) { // must be greater than the lead time.
- // if there is enough stuff in the player's buffer, sleep for a while and try again
- debug(3, "sleep while full");
- usleep(20000); // wait for a while
- } else {
- if ((pcm_buffer_occupancy - pcm_buffer_read_point) >= (352 * conn->input_bytes_per_frame)) {
- new_buffer_needed = 0;
- // send a frame to the player if allowed
- // it it's way too late, it probably means that a new anchor time is needed
-
- /*
- uint32_t at_rtp = conn->reference_timestamp;
- at_rtp =
- at_rtp - (44100 * 10); // allow it to start a few seconds late, but not
- madly late int rtp_diff = pcm_buffer_read_point_rtptime - at_rtp;
- */
-
- if ((play_enabled) && (have_ptp_timing_information(conn) != 0)) {
- uint64_t buffer_should_be_time;
- if (frame_to_local_time(pcm_buffer_read_point_rtptime, &buffer_should_be_time, conn) ==
- 0) {
- int64_t lead_time = buffer_should_be_time - get_absolute_time_in_ns();
-
- // it seems that some garbage blocks can be left after the flush, so
- // only accept them if they have sensible lead times
- if ((lead_time < (int64_t)30000000000L) && (lead_time >= 0)) {
- // if it's the very first block (thus no priming needed)
- //if ((blocks_read == 1) || (blocks_read_in_sequence > 3)) {
- if ((lead_time >= (int64_t)(requested_lead_time * 1000000000L)) ||
- (packets_played_in_this_sequence != 0)) {
- if (packets_played_in_this_sequence == 0)
- debug(2,
- "Connection %d: buffered audio starting frame: %u, lead time: %f "
- "seconds.",
- conn->connection_number, pcm_buffer_read_point_rtptime,
- 0.000000001 * lead_time);
- // else {
- // if (expected_rtptime != pcm_buffer_read_point_rtptime)
- // debug(1,"actual rtptime is %u, expected rtptime is %u.",
- // pcm_buffer_read_point_rtptime, expected_rtptime);
- //}
- // expected_rtptime = pcm_buffer_read_point_rtptime + 352;
-
- // this is a diagnostic for introducing a timing error that will force the
- // processing chain to resync
- // clang-format off
- /*
- if ((not_first_time_out == 0) && (blocks_read >= 20)) {
- int timing_error = 150;
- debug(1, "Connection %d: Introduce a timing error of %d milliseconds.",
- conn->connection_number, timing_error);
- if (timing_error >= 0)
- pcm_buffer_read_point_rtptime += (conn->input_rate * timing_error) / 1000;
- else
- pcm_buffer_read_point_rtptime -= (conn->input_rate * (-timing_error)) / 1000;
- not_first_time_out = 1;
- }
- */
- // clang-format on
-
- // debug(1,"block timestamp: %u, packet timestamp: %u.", timestamp,
- // pcm_buffer_read_point_rtptime);
-
- int32_t timestamp_difference =
- pcm_buffer_read_point_rtptime - expected_pcm_buffer_read_point_rtptime;
- ;
- if (packets_played_in_this_sequence != 0) {
- if (timestamp_difference != 0)
- debug(
- 2,
- "Unexpected time difference between packets -- actual: %u, expected: %u, "
- "difference: %d. Packets played: %d. Blocks played since flush: %d. ",
- pcm_buffer_read_point_rtptime, expected_pcm_buffer_read_point_rtptime,
- timestamp_difference, packets_played_in_this_sequence,
- blocks_read_in_sequence);
- }
-
- // Very specific code to get around an apparent bug in AirPlay 2 from iOS 16 /
- // Ventura 13.0 It seems that the timestamp goes backwards by 2112 frames not
- // later than the 65th packet of 352 frames (64 * 352 = 22528 frames which is
- // exactly 22 blocks) So, if that happens, we'll add 2112 to the timstamp passed
- // to the player
-
- if ((timestamp_difference == -2112) && (packets_played_in_this_sequence <= 64)) {
- debug(1,
- "iOS 16.0 discontinuity detected with %d packets played in this "
- "sequence. Nothing done.",
- packets_played_in_this_sequence);
- // pcm_buffer_read_point_rtptime_offset = 2112; // this pretends the timestamps
- // after the discontinuity are 2112 frames later, but this just delays
- // everything by 2112 frames, pushing stuff out of sync, and i think you can
- // hear it.
- }
-
- // if it's not the very first block of AAC, but is from the first few blocks of a
- // new AAC sequence, it will contain noisy transients, so replace it with silence.
- if ((blocks_read_in_sequence <= 2) && (blocks_read_in_sequence != blocks_read)) {
- // debug(1,"Muting packet %u from block %u to avoid AAC transients because it's
- // not from a true starting block. Blocks_read is %" PRIu64 ".
- // blocks_read_in_sequence is %" PRIu64 ".", pcm_buffer_read_point_rtptime,
- // timestamp, blocks_read, blocks_read_in_sequence);
- conn->previous_random_number = generate_zero_frames(
- (char *)(pcm_buffer + pcm_buffer_read_point), 352, config.output_format,
- conn->enable_dither, conn->previous_random_number);
- }
-
- player_put_packet(
- 0, 0, pcm_buffer_read_point_rtptime + pcm_buffer_read_point_rtptime_offset,
- pcm_buffer + pcm_buffer_read_point, 352, conn);
- packets_played_in_this_sequence++;
- expected_pcm_buffer_read_point_rtptime = pcm_buffer_read_point_rtptime + 352;
- }
- // }
- } else {
- debug(3,
- "Dropping packet %u from block %u with out-of-range lead_time: %.3f seconds.",
- pcm_buffer_read_point_rtptime, seq_no, 0.000000001 * lead_time);
- expected_pcm_buffer_read_point_rtptime = pcm_buffer_read_point_rtptime + 352;
- }
-
- pcm_buffer_read_point_rtptime += 352;
- pcm_buffer_read_point += 352 * conn->input_bytes_per_frame;
- } else {
- debug(1, "frame to local time error");
- }
- } else {
- debug(3, "sleep until demand");
- usleep(20000); // wait before asking if play is enabled again
- }
- } else {
- // debug(1,"new buffer needed for buffer starting at %u because pcm_buffer_read_point
- // (frames) is %u and pcm_buffer_occupancy (frames) is %u.",
- // pcm_buffer_read_point_rtptime, pcm_buffer_read_point/conn->input_bytes_per_frame,
- // pcm_buffer_occupancy/conn->input_bytes_per_frame);
- new_buffer_needed = 1;
- if (pcm_buffer_read_point != 0) {
- // debug(1,"pcm_buffer_read_point (frames): %u, pcm_buffer_occupancy (frames): %u",
- // pcm_buffer_read_point/conn->input_bytes_per_frame,
- // pcm_buffer_occupancy/conn->input_bytes_per_frame); // if there is anything to move
- // down
- // to the front of the buffer, do it now;
- if ((pcm_buffer_occupancy - pcm_buffer_read_point) > 0) {
- // move the remaining frames down to the start of the buffer
- // debug(1,"move the remaining frames down to the start of the pcm_buffer");
- memcpy(pcm_buffer, pcm_buffer + pcm_buffer_read_point,
- pcm_buffer_occupancy - pcm_buffer_read_point);
- pcm_buffer_occupancy = pcm_buffer_occupancy - pcm_buffer_read_point;
- } else {
- // debug(1,"nothing to move to the front of the buffer");
- pcm_buffer_occupancy = 0;
- }
- pcm_buffer_read_point = 0;
- }
- }
- }
- }
- if ((flush_requested) || (new_buffer_needed)) {
-
- // debug(1,"pcm_buffer_read_point (frames): %u, pcm_buffer_occupancy (frames): %u",
- // pcm_buffer_read_point/conn->input_bytes_per_frame,
- // pcm_buffer_occupancy/conn->input_bytes_per_frame); ok, so here we know we need material
- // from the sender do we will get in a packet of audio
- uint16_t data_len;
- // here we read from the buffer that our thread has been reading
- size_t bytes_remaining_in_buffer;
- nread = lread_sized_block(buffered_audio, &data_len, sizeof(data_len),
- &bytes_remaining_in_buffer);
- if ((conn->ap2_audio_buffer_minimum_size < 0) ||
- (bytes_remaining_in_buffer < (size_t)conn->ap2_audio_buffer_minimum_size))
- conn->ap2_audio_buffer_minimum_size = bytes_remaining_in_buffer;
- if (nread < 0) {
- char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
- debug(1, "error in rtp_buffered_audio_processor %d: \"%s\". Could not recv a data_len .",
- errno, errorstring);
- // if ((config.diagnostic_drop_packet_fraction == 0.0) ||
- // (drand48() > config.diagnostic_drop_packet_fraction)) {
- }
- data_len = ntohs(data_len);
- // debug(1,"buffered audio packet of size %u detected.", data_len - 2);
- nread = lread_sized_block(buffered_audio, packet, data_len - 2, &bytes_remaining_in_buffer);
- if ((conn->ap2_audio_buffer_minimum_size < 0) ||
- (bytes_remaining_in_buffer < (size_t)conn->ap2_audio_buffer_minimum_size))
- conn->ap2_audio_buffer_minimum_size = bytes_remaining_in_buffer;
- // debug(1, "buffered audio packet of size %u received.", nread);
- if (nread < 0) {
- char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
- debug(1, "error in rtp_buffered_audio_processor %d: \"%s\". Could not recv a data packet.",
- errno, errorstring);
- } else if (nread > 0) {
- blocks_read++; // note, this doesn't mean they are valid audio blocks
- blocks_read_in_sequence++;
- // debug(1, "Realtime Audio Receiver Packet of length %d received.", nread);
- // now get hold of its various bits and pieces
- /*
- uint8_t version = (packet[0] & 0b11000000) >> 6;
- uint8_t padding = (packet[0] & 0b00100000) >> 5;
- uint8_t extension = (packet[0] & 0b00010000) >> 4;
- uint8_t csrc_count = packet[0] & 0b00001111;
- */
- previous_seq_no = seq_no;
- previous_seq_no++;
- seq_no = packet[1] * (1 << 16) + packet[2] * (1 << 8) + packet[3];
- if (previous_seq_no != seq_no) {
- debug(2, "block sequence number changed from expected %u to actual %u.", previous_seq_no,
- seq_no);
- }
- timestamp = nctohl(&packet[4]);
- // debug(1,"New block timestamp: %u.", timestamp);
- int32_t timestamp_difference = timestamp - expected_timestamp;
- if ((timestamp_difference != 0) && (expected_timesamp_is_reasonable != 0))
- debug(2,
- "Block with unexpected timestamp. Expected: %u, got: %u, difference: %d, "
- "blocks_read_in_sequence: %" PRIu64 ".",
- expected_timestamp, timestamp, timestamp_difference, blocks_read_in_sequence);
- expected_timestamp = timestamp;
- expected_timesamp_is_reasonable = 0; // must be validated each time by decoding the frame
-
- // debug(1, "immediately: block %u, rtptime %u", seq_no, timestamp);
- // uint32_t ssrc = nctohl(&packet[8]);
- // uint8_t marker = 0;
- // uint8_t payload_type = 0;
-
- // previous_seq_no = seq_no;
-
- // at this point, we can check if we can to flush this packet -- we won't have
- // to decipher it first
- // debug(1,"seq_no %u, timestamp %u", seq_no, timestamp);
-
- uint64_t local_should_be_time = 0;
- int have_time_information = frame_to_local_time(timestamp, &local_should_be_time, conn);
- int64_t local_lead_time = 0;
- int64_t requested_lead_time_ns = (int64_t)(requested_lead_time * 1000000000);
- // requested_lead_time_ns = (int64_t)(-300000000);
- // debug(1,"requested_lead_time_ns is actually %f milliseconds.", requested_lead_time_ns *
- // 1E-6);
- int outdated = 0;
- int too_soon_after_connection = 0;
- if (have_time_information == 0) {
- int64_t play_time_since_connection = local_should_be_time - conn->connection_start_time;
- int64_t time_since_connection = get_absolute_time_in_ns() - conn->connection_start_time;
- too_soon_after_connection =
- ((play_time_since_connection < 2000000000) && (time_since_connection < 2000000000));
- if (too_soon_after_connection)
- debug(3,
- "time_since_connection is %f milliseconds. play_time_since_connection is %f "
- "milliseconds. lead_time is %f milliseconds. too_soon_after_connection is %d.",
- time_since_connection * 1E-6, play_time_since_connection * 1E-6,
- (play_time_since_connection - time_since_connection) * 1E-6,
- too_soon_after_connection);
- local_lead_time = local_should_be_time - get_absolute_time_in_ns();
- // debug(1,"local_lead_time is actually %f milliseconds.", local_lead_time * 1E-6);
- outdated = (local_lead_time < requested_lead_time_ns);
- // if (outdated != 0)
- // debug(1,"Frame is outdated %d if lead_time %" PRId64 " is less than requested lead time
- // %" PRId64 " ns.", outdated, local_lead_time, requested_lead_time_ns);
- } else {
- debug(3, "Timing information not valid");
- }
-
- if ((flush_requested) && (seq_no >= flushUntilSeq)) {
- if ((have_time_information == 0) && (play_enabled)) {
- // play enabled will be off when this is a full flush and the anchor information is not
- // valid
- debug(2,
- "flush completed to seq: %u, flushUntilTS; %u with rtptime: %u, lead time: "
- "0x%" PRIx64 " nanoseconds, i.e. %f sec.",
- seq_no, flushUntilTS, timestamp, local_lead_time, local_lead_time * 0.000000001);
- } else {
- debug(2, "flush completed to seq: %u with rtptime: %u.", seq_no, timestamp);
- }
- }
-
- // if we are here because of a flush request, it must be the case that
- // flushing the pcm buffer wasn't enough, as the request would have been turned off by now
- // so we better indicate that the pcm buffer is empty and its contents invalid
-
- // also, if the incoming frame is outdated, set pcm_buffer_occupancy to 0;
- if ((flush_requested) || (outdated) || (too_soon_after_connection)) {
- pcm_buffer_occupancy = 0;
- }
-
- // decode the block and add it to or put it in the pcm buffer
-
- if (pcm_buffer_occupancy == 0) {
- // they should match and the read point should be zero
- // if ((blocks_read != 0) && (pcm_buffer_read_point_rtptime != timestamp)) {
- // debug(2, "set pcm_buffer_read_point_rtptime from %u to %u.",
- // pcm_buffer_read_point_rtptime, timestamp);
- pcm_buffer_read_point_rtptime = timestamp;
- pcm_buffer_read_point = 0;
- //}
- }
-
- if ((((flush_requested != 0) && (seq_no == flushUntilSeq)) ||
- ((flush_requested == 0) && (new_buffer_needed))) &&
- (too_soon_after_connection == 0)) {
- unsigned long long new_payload_length = 0;
- int response = -1; // guess that there is a problem
- if (conn->session_key != NULL) {
- unsigned char nonce[12];
- memset(nonce, 0, sizeof(nonce));
- memcpy(nonce + 4, packet + nread - 8,
- 8); // front-pad the 8-byte nonce received to get the 12-byte nonce expected
-
- // https://libsodium.gitbook.io/doc/secret-key_cryptography/aead/chacha20-poly1305/ietf_chacha20-poly1305_construction
- // Note: the eight-byte nonce must be front-padded out to 12 bytes.
-
- response = crypto_aead_chacha20poly1305_ietf_decrypt(
- m + 7, // m
- &new_payload_length, // mlen_p
- NULL, // nsec,
- packet + 12, // the ciphertext starts 12 bytes in and is followed by the MAC tag,
- nread - (8 + 12), // clen -- the last 8 bytes are the nonce
- packet + 4, // authenticated additional data
- 8, // authenticated additional data length
- nonce,
- conn->session_key); // *k
- if (response != 0)
- debug(1, "Error decrypting audio packet %u -- packet length %d.", seq_no, nread);
- } else {
- debug(2, "No session key, so the audio packet can not be deciphered -- skipped.");
- }
- if (response == 0) {
- // now pass it in to the regular processing chain
-
- unsigned long long max_int = INT_MAX; // put in the right format
- if (new_payload_length > max_int)
- debug(1, "Madly long payload length!");
- int payload_length = new_payload_length; // change from long long to int
- int aac_packet_length = payload_length + 7;
-
- // now, fill in the 7-byte ADTS information, which seems to be needed by the decoder
- // we made room for it in the front of the buffer
-
- addADTStoPacket(m, aac_packet_length);
-
- // now we are ready to send this to the decoder
-
- data_to_process = m;
- data_remaining = aac_packet_length;
- int ret = 0;
- // there can be more than one av packet (? terminology) in a block
- int frame_within_block = 0;
- while (data_remaining > 0) {
- if (decoded_frame == NULL) {
- decoded_frame = av_frame_alloc();
- if (decoded_frame == NULL)
- debug(1, "could not allocate av_frame");
- } else {
- ret = av_parser_parse2(codec_parser_context, codec_context, &pkt->data, &pkt->size,
- data_to_process, data_remaining, AV_NOPTS_VALUE,
- AV_NOPTS_VALUE, 0);
- if (ret < 0) {
- debug(1, "error while parsing deciphered audio packet.");
- } else {
- frame_within_block++;
- data_to_process += ret;
- data_remaining -= ret;
- // debug(1, "frame found");
- // now pass each packet to be decoded
- if (pkt->size) {
- // if (0) {
- if (pkt->size <= 7) { // no idea about this...
- debug(2, "malformed AAC packet skipped.");
- } else {
- ret = avcodec_send_packet(codec_context, pkt);
-
- if (ret < 0) {
- debug(1,
- "error sending frame %d of size %d to decoder, blocks_read: %u, "
- "blocks_read_in_sequence: %u.",
- frame_within_block, pkt->size, blocks_read, blocks_read_in_sequence);
- } else {
- while (ret >= 0) {
- ret = avcodec_receive_frame(codec_context, decoded_frame);
- if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
- break;
- else if (ret < 0) {
- debug(1, "error %d during decoding", ret);
- } else {
-#if LIBAVUTIL_VERSION_MAJOR >= 57
- av_samples_alloc(&pcm_audio, &dst_linesize,
- codec_context->ch_layout.nb_channels,
- decoded_frame->nb_samples, av_format, 1);
-#else
- av_samples_alloc(&pcm_audio, &dst_linesize, codec_context->channels,
- decoded_frame->nb_samples, av_format, 1);
-#endif
- // remember to free pcm_audio
- ret = swr_convert(swr, &pcm_audio, decoded_frame->nb_samples,
- (const uint8_t **)decoded_frame->extended_data,
- decoded_frame->nb_samples);
-#if LIBAVUTIL_VERSION_MAJOR >= 57
- dst_bufsize = av_samples_get_buffer_size(
- &dst_linesize, codec_context->ch_layout.nb_channels, ret, av_format,
- 1);
-#else
- dst_bufsize = av_samples_get_buffer_size(
- &dst_linesize, codec_context->channels, ret, av_format, 1);
-#endif
-
- // debug(1,"generated %d bytes of PCM", dst_bufsize);
- // copy the PCM audio into the PCM buffer.
- // make sure it's big enough first
-
- // also, check it if needs to be truncated but to an impending delayed
- // flush_is_delayed
- if (flush_is_delayed) {
- // see if the flush_from_timestamp is in the buffer
- int32_t samples_remaining =
- (flush_from_timestamp - pcm_buffer_read_point_rtptime);
- if ((samples_remaining > 0) &&
- ((samples_remaining * conn->input_bytes_per_frame) <
- dst_bufsize)) {
- debug(2,
- "samples remaining before flush: %d, number of samples %d. "
- "flushFromTS: %u, pcm_buffer_read_point_rtptime: %u.",
- samples_remaining, dst_bufsize / conn->input_bytes_per_frame,
- flush_from_timestamp, pcm_buffer_read_point_rtptime);
- dst_bufsize = samples_remaining * conn->input_bytes_per_frame;
- }
- }
- if ((pcm_buffer_size - pcm_buffer_occupancy) < dst_bufsize) {
- debug(1,
- "pcm_buffer_read_point (frames): %u, pcm_buffer_occupancy "
- "(frames): %u",
- pcm_buffer_read_point / conn->input_bytes_per_frame,
- pcm_buffer_occupancy / conn->input_bytes_per_frame);
- pcm_buffer_size = dst_bufsize + pcm_buffer_occupancy;
- debug(1, "fatal error! pcm buffer too small at %d bytes.",
- pcm_buffer_size);
- } else {
- memcpy(pcm_buffer + pcm_buffer_occupancy, pcm_audio, dst_bufsize);
- expected_timestamp += (dst_bufsize / conn->input_bytes_per_frame);
- expected_timesamp_is_reasonable = 1;
- pcm_buffer_occupancy += dst_bufsize;
- // debug(1,"frames added: pcm_buffer_read_point (frames): %u,
- // pcm_buffer_occupancy (frames): %u",
- // pcm_buffer_read_point/conn->input_bytes_per_frame,
- // pcm_buffer_occupancy/conn->input_bytes_per_frame);
- }
- // debug(1,"decoded %d samples", decoded_frame->nb_samples);
- // memcpy(sampleBuffer,outputBuffer16,dst_bufsize);
- av_freep(&pcm_audio);
- }
- }
- }
- }
- }
- }
- if (decoded_frame == NULL)
- debug(1, "decoded_frame is NULL");
- if (decoded_frame != NULL)
- av_frame_free(&decoded_frame);
- }
- }
-
- // revert the state of cancellability
- }
- } else {
- debug(3, "Dropping block %u with timestamp %u.", seq_no, timestamp);
- }
- } else {
- // nread is 0 -- the port has been closed
- debug(2, "buffered audio port closed!");
- finished = 1;
- }
- }
-
- } while (finished == 0);
- debug(2, "Buffered Audio Receiver RTP thread \"normal\" exit.");
- pthread_cleanup_pop(1); // deallocate the swr
- pthread_cleanup_pop(1); // deallocate the av_packet
- pthread_cleanup_pop(1); // av_parser_init_cleanup_handler
- pthread_cleanup_pop(1); // avcodec_open2_cleanup_handler
- pthread_cleanup_pop(1); // avcodec_alloc_context3_cleanup_handler
- pthread_cleanup_pop(1); // thread creation
- pthread_cleanup_pop(1); // buffer malloc
- pthread_cleanup_pop(1); // not_full_cv
- pthread_cleanup_pop(1); // not_empty_cv
- pthread_cleanup_pop(1); // mutex
- pthread_cleanup_pop(1); // descriptor malloc
- pthread_cleanup_pop(1); // pthread_t malloc
- pthread_cleanup_pop(1); // do the cleanup.
- pthread_exit(NULL);
-}
-
int frame_to_local_time(uint32_t timestamp, uint64_t *time, rtsp_conn_info *conn) {
if (conn->timing_type == ts_ptp)
return frame_to_ptp_local_time(timestamp, time, conn);
int local_time_to_frame(uint64_t time, uint32_t *frame, rtsp_conn_info *conn);
#ifdef CONFIG_AIRPLAY_2
-void *rtp_data_receiver(void *arg);
-void *rtp_event_receiver(void *arg);
+int have_ptp_timing_information(rtsp_conn_info *conn);
+int get_ptp_anchor_local_time_info(rtsp_conn_info *conn, uint32_t *anchorRTP,
+ uint64_t *anchorLocalTime);
void *rtp_ap2_control_receiver(void *arg);
void *rtp_realtime_audio_receiver(void *arg);
-void *rtp_buffered_audio_processor(void *arg);
void *rtp_ap2_timing_receiver(void *arg);
void *rtp_ap2_general_message_timing_receiver(void *arg);
void set_ptp_anchor_info(rtsp_conn_info *conn, uint64_t clock_id, uint32_t rtptime,
* RTSP protocol handler. This file is part of Shairport Sync
* Copyright (c) James Laird 2013
- * Modifications associated with audio synchronization, multithreading and
- * metadata handling copyright (c) Mike Brady 2014-2024
+ * Modifications, including those associated with audio synchronization, multithreading and
+ * metadata handling copyright (c) Mike Brady 2014--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
#ifdef CONFIG_OPENSSL
#include <openssl/evp.h>
+#include <openssl/md5.h>
#endif
#ifdef CONFIG_MBEDTLS
#include <polarssl/md5.h>
#endif
+#include "bonjour_strings.h"
#include "common.h"
#include "player.h"
#include "rtp.h"
#endif
#ifdef CONFIG_AIRPLAY_2
+#include "ap2_buffered_audio_processor.h"
+#include "ap2_event_receiver.h"
+#include "ap2_rc_event_receiver.h"
#include "pair_ap/pair.h"
#include "plist/plist.h"
-#include "plist_xml_strings.h"
+#include "plists/get_info_response.h"
#include "ptp-utilities.h"
#ifdef HAVE_LIBPLIST_GE_2_3_0
#endif
#endif
+#ifdef CONFIG_CONVOLUTION
+#include "FFTConvolver/convolver.h"
+#endif
+
#ifdef CONFIG_DBUS_INTERFACE
#include "dbus-service.h"
#endif
#include "mdns.h"
-
-// mDNS advertisement strings
-
-// Create these strings and then keep them updated.
-// When necessary, update the mDNS service records, using e.g. Avahi
-// from these sources.
-
-char *txt_records[64];
-char *secondary_txt_records[64];
-
-char firmware_version[64];
-char ap1_featuresString[64];
-char pkString[128];
-#ifdef CONFIG_AIRPLAY_2
-char deviceIdString[64];
-char featuresString[64];
-char statusflagsString[32];
-char piString[128];
-char gidString[128];
-#endif
+#include "utilities/network_utilities.h"
#define METADATA_SNDBUF (4 * 1024 * 1024)
enum rtsp_read_request_response {
rtsp_read_request_response_ok,
+ rtsp_read_request_response_pending,
rtsp_read_request_response_immediate_shutdown_requested,
rtsp_read_request_response_bad_packet,
rtsp_read_request_response_channel_closed,
rtsp_read_request_response_error
};
-static int nconns = 0; // i.e. the size if the conns array
rtsp_conn_info *principal_conn;
rtsp_conn_info **conns;
return response;
}
-static void pkString_make(char *str, size_t str_size, const char *device_id) {
- uint8_t public_key[32];
- if (str_size < 2 * sizeof(public_key) + 1) {
- warn("Insufficient string size");
- str[0] = '\0';
- return;
- }
- pair_public_key_get(PAIR_SERVER_HOMEKIT, public_key, device_id);
- char *ptr = str;
- for (size_t i = 0; i < sizeof(public_key); i++)
- ptr += sprintf(ptr, "%02x", public_key[i]);
-}
-#endif
-
-#ifdef CONFIG_AIRPLAY_2
-void build_bonjour_strings(rtsp_conn_info *conn) {
-#else
-void build_bonjour_strings(__attribute((unused)) rtsp_conn_info *conn) {
-#endif
-
- int entry_number = 0;
-
- // make up a firmware version
-#ifdef CONFIG_USE_GIT_VERSION_STRING
- if (git_version_string[0] != '\0')
- snprintf(firmware_version, sizeof(firmware_version), "fv=%s", git_version_string);
- else
-#endif
- snprintf(firmware_version, sizeof(firmware_version), "fv=%s", PACKAGE_VERSION);
-
-#ifdef CONFIG_AIRPLAY_2
- uint64_t features_hi = config.airplay_features;
- features_hi = (features_hi >> 32) & 0xffffffff;
- uint64_t features_lo = config.airplay_features;
- features_lo = features_lo & 0xffffffff;
- snprintf(ap1_featuresString, sizeof(ap1_featuresString), "ft=0x%" PRIX64 ",0x%" PRIX64 "",
- features_lo, features_hi);
- snprintf(pkString, sizeof(pkString), "pk=");
- pkString_make(pkString + strlen("pk="), sizeof(pkString) - strlen("pk="),
- config.airplay_device_id);
- txt_records[entry_number++] = "cn=0,1";
- txt_records[entry_number++] = "da=true";
- txt_records[entry_number++] = "et=0,1";
- txt_records[entry_number++] = ap1_featuresString;
- txt_records[entry_number++] = firmware_version;
-#ifdef CONFIG_METADATA
- if (config.get_coverart == 0)
- txt_records[entry_number++] = "md=0,2";
- else
- txt_records[entry_number++] = "md=0,1,2";
#endif
- txt_records[entry_number++] = "am=Shairport Sync";
- txt_records[entry_number++] = "sf=0x4";
- txt_records[entry_number++] = "tp=UDP";
- txt_records[entry_number++] = "vn=65537";
- txt_records[entry_number++] = "vs=366.0";
- txt_records[entry_number++] = pkString;
- txt_records[entry_number++] = NULL;
-
-#else
- // here, just replicate what happens in mdns.h when using those #defines
- txt_records[entry_number++] = "sf=0x4";
- txt_records[entry_number++] = firmware_version;
- txt_records[entry_number++] = "am=ShairportSync";
- txt_records[entry_number++] = "vs=105.1";
- txt_records[entry_number++] = "tp=TCP,UDP";
- txt_records[entry_number++] = "vn=65537";
-#ifdef CONFIG_METADATA
- if (config.get_coverart == 0)
- txt_records[entry_number++] = "md=0,2";
- else
- txt_records[entry_number++] = "md=0,1,2";
-#endif
- txt_records[entry_number++] = "ss=16";
- txt_records[entry_number++] = "sr=44100";
- txt_records[entry_number++] = "da=true";
- txt_records[entry_number++] = "sv=false";
- txt_records[entry_number++] = "et=0,1";
- txt_records[entry_number++] = "ek=1";
- txt_records[entry_number++] = "cn=0,1";
- txt_records[entry_number++] = "ch=2";
- txt_records[entry_number++] = "txtvers=1";
- if (config.password == 0)
- txt_records[entry_number++] = "pw=false";
- else
- txt_records[entry_number++] = "pw=true";
- txt_records[entry_number++] = NULL;
-#endif
-
-#ifdef CONFIG_AIRPLAY_2
- // make up a secondary set of text records
- entry_number = 0;
-
- secondary_txt_records[entry_number++] = "srcvers=366.0";
- snprintf(deviceIdString, sizeof(deviceIdString), "deviceid=%s", config.airplay_device_id);
- secondary_txt_records[entry_number++] = deviceIdString;
- snprintf(featuresString, sizeof(featuresString), "features=0x%" PRIX64 ",0x%" PRIX64 "",
- features_lo, features_hi);
- secondary_txt_records[entry_number++] = featuresString;
- snprintf(statusflagsString, sizeof(statusflagsString), "flags=0x%" PRIX32,
- config.airplay_statusflags);
-
- secondary_txt_records[entry_number++] = statusflagsString;
- secondary_txt_records[entry_number++] = "protovers=1.1";
- secondary_txt_records[entry_number++] = "acl=0";
- secondary_txt_records[entry_number++] = "rsf=0x0";
- secondary_txt_records[entry_number++] = firmware_version;
- secondary_txt_records[entry_number++] = "model=Shairport Sync";
- snprintf(piString, sizeof(piString), "pi=%s", config.airplay_pi);
- secondary_txt_records[entry_number++] = piString;
- if ((conn != NULL) && (conn->airplay_gid != 0)) {
- snprintf(gidString, sizeof(gidString), "gid=%s", conn->airplay_gid);
- } else {
- snprintf(gidString, sizeof(gidString), "gid=%s", config.airplay_pi);
- }
- secondary_txt_records[entry_number++] = gidString;
- if ((conn != NULL) && (conn->groupContainsGroupLeader != 0))
- secondary_txt_records[entry_number++] = "gcgl=1";
- else
- secondary_txt_records[entry_number++] = "gcgl=0";
- if ((conn != NULL) && (conn->airplay_gid != 0)) // if it's in a group
- secondary_txt_records[entry_number++] = "isGroupLeader=0";
- secondary_txt_records[entry_number++] = pkString;
- secondary_txt_records[entry_number++] = NULL;
-#endif
-}
#ifdef CONFIG_METADATA
typedef struct {
// debug(2, "destroying signals and locks done");
}
-int send_metadata(uint32_t type, uint32_t code, char *data, uint32_t length, rtsp_message *carrier,
- int block);
+int send_metadata(const uint32_t type, const uint32_t code, const char *data, const uint32_t length,
+ rtsp_message *carrier, int block);
-int send_ssnc_metadata(uint32_t code, char *data, uint32_t length, int block) {
+int send_ssnc_metadata(const uint32_t code, const char *data, const uint32_t length,
+ const int block) {
return send_metadata('ssnc', code, data, length, NULL, block);
}
if (rc == EBUSY)
return EBUSY;
} else
- rc = pthread_mutex_lock(&the_queue->pc_queue_lock);
+ rc = debug_mutex_lock(&the_queue->pc_queue_lock, 50000, 1);
if (rc)
debug(1, "Error %d (\"%s\") locking for pc_queue_add_item. Block is %d.", rc, strerror(rc),
block);
int pc_queue_get_item(pc_queue *the_queue, void *the_stuff) {
int rc;
if (the_queue) {
- rc = pthread_mutex_lock(&the_queue->pc_queue_lock);
+ rc = debug_mutex_lock(&the_queue->pc_queue_lock, 50000, 1);
if (rc)
debug(1, "metadata queue \"%s\": error locking for pc_queue_get_item", the_queue->name);
pthread_cleanup_push(pc_queue_cleanup_handler, (void *)the_queue);
#endif
-// note: connection numbers start at 1, so an except_this_one value of zero means "all threads"
-void cancel_all_RTSP_threads(airplay_stream_c stream_category, int except_this_one) {
- // if the stream category is unspecified_stream_category
- // all categories are elegible for cancellation
- // otherwise just the category itself
- debug_mutex_lock(&conns_lock, 1000000, 3);
- int i;
- for (i = 0; i < nconns; i++) {
- if ((conns[i] != NULL) && (conns[i]->running != 0) &&
- (conns[i]->connection_number != except_this_one) &&
- ((stream_category == unspecified_stream_category) ||
- (stream_category == conns[i]->airplay_stream_category))) {
- pthread_cancel(conns[i]->thread);
- debug(1, "Connection %d: cancelled.", conns[i]->connection_number);
- } else if (conns[i] != NULL) {
- debug(1, "Connection %d: not cancelled.", conns[i]->connection_number);
- }
- }
- for (i = 0; i < nconns; i++) {
- if ((conns[i] != NULL) && (conns[i]->running != 0) &&
- (conns[i]->connection_number != except_this_one) &&
- ((stream_category == unspecified_stream_category) ||
- (stream_category == conns[i]->airplay_stream_category))) {
- pthread_join(conns[i]->thread, NULL);
- debug(1, "Connection %d: joined.", conns[i]->connection_number);
-
- free(conns[i]);
- conns[i] = NULL;
- }
- }
- debug_mutex_unlock(&conns_lock, 3);
-}
-
// The principal_conn variable points to the connection that
// controls the mDNS status and flags and that is potentially
// in control of the playing subsystem to output audio to a backend
void release_play_lock(rtsp_conn_info *conn) {
// no need thread cancellation points in here
pthread_rwlock_wrlock(&principal_conn_lock);
- if (principal_conn == conn) { // if we have the player
- if (conn != NULL)
- debug(2, "Connection %d: principal_conn released.", conn->connection_number);
+ if ((principal_conn == conn) || (conn == NULL)) { // if we have the player
+ if (principal_conn != NULL) {
+#ifdef CONFIG_AIRPLAY_2
+ config.airplay_statusflags &= (0xffffffff - (1 << 11)); // DeviceSupportsRelay
+ if (principal_conn->airplay_gid) {
+ free(principal_conn->airplay_gid);
+ principal_conn->airplay_gid = NULL; // stop using the client's GID as our GID.
+ }
+ build_bonjour_strings(principal_conn);
+ mdns_update(NULL, secondary_txt_records);
+#endif
+ debug(2, "Connection %d: %s released principal_conn.", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
+ }
principal_conn = NULL; // let it go
}
pthread_rwlock_unlock(&principal_conn_lock);
// stop the current principal_conn from playing if necessary and make conn the principal_conn.
-int get_play_lock(rtsp_conn_info *conn, int allow_session_interruption) {
- int response = 0;
- pthread_rwlock_wrlock(&principal_conn_lock);
- pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
- if (principal_conn != NULL)
- debug(2, "Connection %d: is requested to relinquish principal_conn.",
- principal_conn->connection_number);
- if (conn != NULL)
- debug(2, "Connection %d: request to acquire principal_conn.", conn->connection_number);
- // returns -1 if it failed, 0 if it succeeded and 1 if it succeeded but
- // interrupted an existing session
- if (principal_conn == NULL) {
- principal_conn = conn;
- } else if (principal_conn == conn) {
- if (conn != NULL)
- warn("Connection %d: request to re-acquire principal_conn!",
- principal_conn->connection_number);
- } else if (allow_session_interruption != 0) {
- rtsp_conn_info *previous_principal_conn = principal_conn;
- // important -- demote the principal conn before cancelling it
- principal_conn = NULL;
- pthread_cancel(previous_principal_conn->thread);
- // the previous principal thread will block on the principal conn lock when exiting
- // so it's important not to wait for it here, e.g. don't put in a pthread_join here.
- // threads are garbage-collected later
- usleep(1000000); // don't know why this delay is needed.
- principal_conn = conn; // make the conn the new principal_conn
- response = 1; // interrupted an existing session
+play_lock_r get_play_lock(rtsp_conn_info *conn, int allow_session_interruption) {
+ play_lock_r response = play_lock_aquisition_failed;
+ if (conn != NULL) {
+ debug(2, "Connection %d: %s get_play_lock with allow_session_interruption of %d.",
+ conn->connection_number, get_category_string(conn->airplay_stream_category),
+ allow_session_interruption);
+
+ pthread_rwlock_wrlock(&principal_conn_lock);
+ pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
+
+ if (principal_conn == conn) {
+ debug(2, "Connection %d: %s already has principal_conn.", principal_conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
+ } else {
+ if (principal_conn != NULL)
+ debug(2, "Connection %d: %s is requested to relinquish principal_conn.",
+ principal_conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
+ if (conn != NULL)
+ debug(2, "Connection %d: %s request to acquire principal_conn.", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
+ }
+
+ if (principal_conn == conn) {
+ if (conn == NULL)
+ response = play_lock_already_released;
+ else
+ response = play_lock_already_acquired;
+ } else if (principal_conn == NULL) {
+ // already unlocked, and principal conn not NULL
+ principal_conn = conn;
+#ifdef CONFIG_AIRPLAY_2
+ config.airplay_statusflags |= (1 << 11); // DeviceSupportsRelay
+#endif
+ response = play_lock_acquired_without_breaking_in;
+ } else if (allow_session_interruption != 0) { // principal conn not NULL,
+ // important -- demote the principal conn before cancelling it
+ if (principal_conn->fd > 0) {
+ debug(2,
+ "Connection %d: %s is acquiring play_lock and is forcing termination of Connection "
+ "%d %s. Closing "
+ "RTSP connection socket %d: "
+ "from %s:%u to self at "
+ "%s:%u.",
+ conn->connection_number, get_category_string(conn->airplay_stream_category),
+ principal_conn->connection_number,
+ get_category_string(principal_conn->airplay_stream_category), principal_conn->fd,
+ principal_conn->client_ip_string, principal_conn->client_rtsp_port,
+ principal_conn->self_ip_string, principal_conn->self_rtsp_port);
+ close(principal_conn->fd);
+ // principal_conn->fd = 0;
+ }
+ rtsp_conn_info *previous_principal_conn = principal_conn;
+ principal_conn = conn; // make the conn the new principal_conn
+ pthread_cancel(previous_principal_conn->thread); // cancel the previous one...
+
+ if (principal_conn == NULL) {
+#ifdef CONFIG_AIRPLAY_2
+ config.airplay_statusflags &= (0xffffffff - (1 << 11)); // DeviceSupportsRelay
+ if (conn->airplay_gid) {
+ free(conn->airplay_gid);
+ conn->airplay_gid = NULL; // stop using the client's GID as our GID.
+ }
+ build_bonjour_strings(conn);
+ mdns_update(NULL, secondary_txt_records);
+#endif
+ response = play_lock_released;
+ } else {
+#ifdef CONFIG_AIRPLAY_2
+ config.airplay_statusflags |= (1 << 11); // DeviceSupportsRelay
+#endif
+ response = play_lock_acquired_by_breaking_in;
+ }
+ // usleep(1000000); // don't know why this delay is needed.
+ }
+ if ((principal_conn != NULL) && (response != play_lock_already_acquired))
+ debug(2, "Connection %d: %s has principal_conn.", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
+ pthread_cleanup_pop(1); // release the principal_conn lock
+
} else {
- response = -1; // can't get it...
+ debug(1, "Connection %d: %s get_play_lock must have a non-NULL conn.", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
}
- if (principal_conn != NULL)
- debug(3, "Connection %d has principal_conn.", principal_conn->connection_number);
- pthread_cleanup_pop(1); // release the principal_conn lock
return response;
}
+// keep track of the threads we have spawned so we can join() them
+static int nconns = 0;
static void track_thread(rtsp_conn_info *conn) {
debug_mutex_lock(&conns_lock, 1000000, 3);
// look for an empty slot first
debug_mutex_unlock(&conns_lock, 3);
}
+// note: connection numbers start at 1, so an except_this_one value of zero means "all threads"
+void cancel_all_RTSP_threads(airplay_stream_c stream_category, int except_this_one) {
+ // if the stream category is unspecified_stream_category
+ // all categories are elegible for cancellation
+ // otherwise just the category itself
+ debug_mutex_lock(&conns_lock, 1000000, 3);
+ int i;
+ for (i = 0; i < nconns; i++) {
+ if ((conns[i] != NULL) && (conns[i]->running != 0) &&
+ (conns[i]->connection_number != except_this_one) &&
+ ((conns[i]->airplay_stream_category == stream_category) ||
+ (stream_category == unspecified_stream_category))) {
+ pthread_cancel(conns[i]->thread);
+ debug(2, "Connection %d: %s cancelled.", conns[i]->connection_number,
+ get_category_string(conns[i]->airplay_stream_category));
+ }
+ }
+ for (i = 0; i < nconns; i++) {
+ if ((conns[i] != NULL) && (conns[i]->connection_number != except_this_one) &&
+ ((conns[i]->airplay_stream_category == stream_category) ||
+ (stream_category == unspecified_stream_category))) {
+ debug(2, "Connection %d: %s joining....", conns[i]->connection_number,
+ get_category_string(conns[i]->airplay_stream_category));
+ pthread_join(conns[i]->thread, NULL);
+ debug(2, "Connection %d: %s joined.", conns[i]->connection_number,
+ get_category_string(conns[i]->airplay_stream_category));
+ free(conns[i]);
+ conns[i] = NULL;
+ }
+ }
+ debug_mutex_unlock(&conns_lock, 3);
+}
+
int old_connection_count = -1;
void cleanup_threads(void) {
debug_mutex_lock(&conns_lock, 1000000, 3);
for (i = 0; i < nconns; i++) {
if ((conns[i] != NULL) && (conns[i]->running == 0)) {
- debug(2, "Found RTSP connection thread %d in a non-running state.",
+ debug(3, "found RTSP connection thread %d in a non-running state.",
conns[i]->connection_number);
pthread_join(conns[i]->thread, &retval);
- debug(2, "Connection %d: deleted.", conns[i]->connection_number);
+ debug(3, "Connection %d: deleted in cleanup.", conns[i]->connection_number);
free(conns[i]);
conns[i] = NULL;
}
if (old_connection_count != connection_count) {
if (connection_count == 0) {
- debug(2, "No active connections.");
+ debug(3, "No active connections.");
} else if (connection_count == 1)
- debug(2, "One active connection.");
+ debug(3, "One active connection.");
else
- debug(2, "%d active connections.", connection_count);
+ debug(3, "%d active connections.", connection_count);
old_connection_count = connection_count;
}
debug(3, "Airplay Volume for new connections is %.6f.", suggested_volume(NULL));
}
void msg_retain(rtsp_message *msg) {
- int rc = pthread_mutex_lock(&reference_counter_lock);
+ int rc = debug_mutex_lock(&reference_counter_lock, 500000, 1);
if (rc)
- debug(1, "Error %d locking reference counter lock");
+ debug(1, "Error %d locking reference counter lock", rc);
if (msg > (rtsp_message *)0x00010000) {
msg->referenceCount++;
debug(3, "msg_free increment reference counter message %d to %d.", msg->index_number,
// debug(1,"msg_retain -- item %d reference count %d.", msg->index_number, msg->referenceCount);
rc = pthread_mutex_unlock(&reference_counter_lock);
if (rc)
- debug(1, "Error %d unlocking reference counter lock");
+ debug(1, "Error %d unlocking reference counter lock", rc);
} else {
- debug(1, "invalid rtsp_message pointer 0x%x passed to retain", (uintptr_t)msg);
+ debug(1, "invalid rtsp_message pointer 0x%" PRIxPTR " passed to retain", (uintptr_t)msg);
}
}
rtsp_message *msg_init(void) {
// no thread cancellation points here
- int rc = pthread_mutex_lock(&reference_counter_lock);
+ int rc = debug_mutex_lock(&reference_counter_lock, 500000, 1);
if (rc)
debug(1, "Error %d locking reference counter lock", rc);
} else if (*msgh != NULL) {
debug(1,
"msg_free: error attempting to free an allocated but already-freed rtsp_message, number "
- "%d.",
+ "%" PRIxPTR ".",
(uintptr_t)*msgh);
}
debug_mutex_unlock(&reference_counter_lock, 0);
char *sp, *p;
sp = NULL; // this is to quieten a compiler warning
- debug(3, "RTSP Message Received: \"%s\".", line);
+ debug(3, "RTSP/HTTP Message Received: \"%s\".", line);
p = strtok_r(line, " ", &sp);
if (!p)
p = strtok_r(NULL, " ", &sp);
if (!p)
goto fail;
- if (strcmp(p, "RTSP/1.0"))
+ if ((strcmp(p, "RTSP/1.0") != 0) && (strcmp(p, "HTTP/1.1") != 0)) {
+ debug(1, "Problem with Message: \"%s\"", p);
goto fail;
+ }
return -1;
}
}
fail:
- debug(3, "msg_handle_line fail");
+ debug(1, "msg_handle_line fail");
msg_free(pmsg);
*pmsg = NULL;
return 0;
#ifdef CONFIG_AIRPLAY_2
-void add_flush_request(int flushNow, uint32_t flushFromSeq, uint32_t flushFromTS,
- uint32_t flushUntilSeq, uint32_t flushUntilTS, rtsp_conn_info *conn) {
- // immediate flush requests are added sequentially. Don't know how more than one could arise, TBH
- flush_request_t **t = &conn->flush_requests;
- int done = 0;
- do {
- flush_request_t *u = *t;
- if ((u == NULL) || ((u->flushNow == 0) && (flushNow != 0)) ||
- (flushFromSeq < u->flushFromSeq) ||
- ((flushFromSeq == u->flushFromSeq) && (flushFromTS < u->flushFromTS))) {
- flush_request_t *n = (flush_request_t *)calloc(1, sizeof(flush_request_t));
- n->flushNow = flushNow;
- n->flushFromSeq = flushFromSeq;
- n->flushFromTS = flushFromTS;
- n->flushUntilSeq = flushUntilSeq;
- n->flushUntilTS = flushUntilTS;
- n->next = u;
- *t = n;
- done = 1;
- } else {
- t = &u->next;
- }
- } while (done == 0);
-}
-
-void display_all_flush_requests(rtsp_conn_info *conn) {
- if (conn->flush_requests == NULL) {
- debug(1, "No flush requests.");
- } else {
- flush_request_t *t = conn->flush_requests;
- do {
- if (t->flushNow) {
- debug(1, "immediate flush to untilSeq: %u, untilTS: %u.", t->flushUntilSeq,
- t->flushUntilTS);
- } else {
- debug(1, "fromSeq: %u, fromTS: %u, to untilSeq: %u, untilTS: %u.", t->flushFromSeq,
- t->flushFromTS, t->flushUntilSeq, t->flushUntilTS);
- }
- t = t->next;
- } while (t != NULL);
- }
-}
-
int rtsp_message_contains_plist(rtsp_message *message) {
int reply = 0; // assume there is no plist in the message
if ((message->contentlength >= strlen("bplist00")) &&
return the_plist;
}
-char *plist_content(plist_t the_plist) {
+char *plist_as_xml_text(plist_t the_plist) {
// caller must free the returned character buffer
// convert it to xml format
uint32_t size;
memcpy(reply, plist_out, size);
reply[size] = '\0';
}
- if (the_plist)
- plist_free(the_plist);
if (plist_out)
free(plist_out);
return reply;
char *rtsp_plist_content(rtsp_message *message) {
char *reply = NULL;
// first, check if it has binary plist content
- if (rtsp_message_contains_plist(message)) {
- // get the plist from the content
-
- plist_t the_plist = plist_from_rtsp_content(message);
-
- // convert it to xml format
- uint32_t size;
- char *plist_out = NULL;
- plist_to_xml(the_plist, &plist_out, &size);
-
- // put it into a NUL-terminated string
- reply = malloc(size + 1);
- if (reply) {
- memcpy(reply, plist_out, size);
- reply[size] = '\0';
- }
- if (the_plist)
- plist_free(the_plist);
- if (plist_out)
- free(plist_out);
- }
+ if (rtsp_message_contains_plist(message))
+ reply = plist_as_xml_text(plist_from_rtsp_content(message));
return reply;
}
void _debug_log_rtsp_message(const char *filename, const int linenumber, int level, char *prompt,
rtsp_message *message) {
- if (level > debuglev)
+ if (level > debug_level())
return;
if ((prompt) && (*prompt != '\0')) // okay to pass NULL or an empty list...
- _debug(filename, linenumber, level, prompt);
+ _debug(filename, linenumber, level, "%s", prompt);
_debug_print_msg_headers(filename, linenumber, level, message);
#ifdef CONFIG_AIRPLAY_2
char *plist_content = rtsp_plist_content(message);
#ifdef CONFIG_AIRPLAY_2
static void buf_add(sized_buffer *buf, uint8_t *in, size_t in_len) {
if (buf->length + in_len > buf->size) {
- buf->size = buf->length + in_len + 2048; // Extra legroom to avoid future memcpy's
- uint8_t *new = malloc(buf->size);
- memcpy(new, buf->data, buf->length);
- free(buf->data);
- buf->data = new;
+ buf->size = buf->length + in_len + 2048; // Extra headroom to avoid future memcpy's
+ buf->data = realloc(buf->data, buf->size);
}
memcpy(buf->data + buf->length, in, in_len);
buf->length += in_len;
return bytes;
}
-static ssize_t read_encrypted(int fd, pair_cipher_bundle *ctx, void *buf, size_t count) {
- uint8_t in[4096];
- uint8_t *plain;
- size_t plain_len;
-
+ssize_t read_encrypted(int fd, pair_cipher_bundle *ctx, void *buf, size_t count) {
+ ssize_t response = 0;
// If there is leftover decoded content from the last pass just return that
if (ctx->plaintext_read_buffer.length > 0) {
- return buf_remove(&ctx->plaintext_read_buffer, buf, count);
- }
+ response = buf_remove(&ctx->plaintext_read_buffer, buf, count);
+ } else {
- do {
- ssize_t got = read(fd, in, sizeof(in));
- if (got <= 0)
- return got;
- buf_add(&ctx->encrypted_read_buffer, in, got);
+ // Otherwise read stuff in...
+ uint8_t in[4096];
+ uint8_t *plain = NULL; // may be allocated and reallocated by pair_decrypt
+ pthread_cleanup_push(malloc_cleanup, &plain);
+ size_t plain_len = 0;
+ do {
+ response = read(fd, in, sizeof(in));
+ if (response > 0) {
+ buf_add(&ctx->encrypted_read_buffer, in, response);
+ ssize_t consumed = pair_decrypt(&plain, &plain_len, ctx->encrypted_read_buffer.data,
+ ctx->encrypted_read_buffer.length, ctx->cipher_ctx);
+ if (consumed < 0) {
+ debug(1, "read_encrypted: abnormal exit from pair_decrypt: %zd.", consumed);
+ response = -1;
+ } else {
+ buf_drain(&ctx->encrypted_read_buffer, consumed);
+ }
+ }
+ } while ((plain_len == 0) && (response > 0));
- ssize_t consumed = pair_decrypt(&plain, &plain_len, ctx->encrypted_read_buffer.data,
- ctx->encrypted_read_buffer.length, ctx->cipher_ctx);
- if (consumed < 0)
- return -1;
- buf_drain(&ctx->encrypted_read_buffer, consumed);
- } while (plain_len == 0);
-
- // Fast path, avoids some memcpy + allocs in case of the normal, small message
- /* if (ctx->plaintext_read_buffer.len == 0 && plain_len < count) {
- memcpy(buf, plain, plain_len);
- free(plain);
- return plain_len;
+ if (response >= 0) {
+ buf_add(&ctx->plaintext_read_buffer, plain, plain_len);
+ response = buf_remove(&ctx->plaintext_read_buffer, buf, count);
}
- */
- buf_add(&ctx->plaintext_read_buffer, plain, plain_len);
- free(plain);
+ pthread_cleanup_pop(1);
+ }
- return buf_remove(&ctx->plaintext_read_buffer, buf, count);
+ return response;
}
-static ssize_t write_encrypted(int fd, pair_cipher_bundle *ctx, const void *buf, size_t count) {
+ssize_t write_encrypted(int fd, pair_cipher_bundle *ctx, const void *buf, size_t count) {
uint8_t *encrypted;
size_t encrypted_len;
ssize_t ret = pair_encrypt(&encrypted, &encrypted_len, buf, count, ctx->cipher_ctx);
if (ret < 0) {
- debug(1, pair_cipher_errmsg(ctx->cipher_ctx));
+ debug(1, "%s", pair_cipher_errmsg(ctx->cipher_ctx));
return -1;
}
size_t remain = encrypted_len;
+ // debug(1, "write to the \"%s\" channel", ctx->description);
+ // debug_print_buffer(1, (void *)buf, count);
+ // debug(1, "write encrypted:");
+ // debug_print_buffer(1, encrypted, encrypted_len);
while (remain > 0) {
ssize_t wrote = write(fd, encrypted + (encrypted_len - remain), remain);
if (wrote <= 0) {
return count;
}
-/*
-static ssize_t write_encrypted(rtsp_conn_info *conn, const void *buf, size_t count) {
- uint8_t *encrypted;
- size_t encrypted_len;
-
- ssize_t ret =
- pair_encrypt(&encrypted, &encrypted_len, buf, count, conn->ap2_pairing_context.cipher_ctx);
- if (ret < 0) {
- debug(1, pair_cipher_errmsg(conn->ap2_pairing_context.cipher_ctx));
- return -1;
- }
-
- size_t remain = encrypted_len;
- while (remain > 0) {
- ssize_t wrote = write(conn->fd, encrypted + (encrypted_len - remain), remain);
- if (wrote <= 0) {
- free(encrypted);
- return wrote;
- }
- remain -= wrote;
- }
- free(encrypted);
- return count;
-}
-*/
#endif
-ssize_t timed_read_from_rtsp_connection(rtsp_conn_info *conn, uint64_t wait_time, void *buf,
- size_t count) {
- // note: a wait time of zero means wait forever
+ssize_t read_from_rtsp_connection(rtsp_conn_info *conn, void *buf, size_t count) {
+ if (count == 0)
+ debug(1, "asking to read zero bytes!");
+
ssize_t result = 0; // closed
if (conn->fd > 0) {
- int64_t remaining_time = 0;
- uint64_t time_to_wait_to = get_absolute_time_in_ns();
- ;
- time_to_wait_to = time_to_wait_to + wait_time;
-
- int flags = 1;
- if (setsockopt(conn->fd, SOL_SOCKET, SO_KEEPALIVE, (void *)&flags, sizeof(flags))) {
- debug(1, "can't enable keepalive checking on the RTSP socket");
- }
+#ifdef CONFIG_AIRPLAY_2
+ if (conn->ap2_pairing_context.control_cipher_bundle.cipher_ctx) {
+ conn->ap2_pairing_context.control_cipher_bundle.is_encrypted = 1;
+ result =
+ read_encrypted(conn->fd, &conn->ap2_pairing_context.control_cipher_bundle, buf, count);
- // remaining_time will be zero if wait_time is zero
- if (wait_time != 0) {
- remaining_time = time_to_wait_to - get_absolute_time_in_ns();
+ } else {
+ result = read(conn->fd, buf, count);
}
- do {
- struct timeval tv;
- tv.tv_sec = remaining_time / 1000000000; // seconds
- tv.tv_usec = (remaining_time % 1000000000) / 1000; // microseconds
- if (setsockopt(conn->fd, SOL_SOCKET, SO_RCVTIMEO, (const char *)&tv, sizeof tv) != 0) {
- char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
- debug(1, "could not set time limit on timed_read_from_rtsp_connection -- error %d \"%s\".",
- errno, errorstring);
- }
-
-#ifdef CONFIG_AIRPLAY_2
- if (conn->ap2_pairing_context.control_cipher_bundle.cipher_ctx) {
- conn->ap2_pairing_context.control_cipher_bundle.is_encrypted = 1;
- result =
- read_encrypted(conn->fd, &conn->ap2_pairing_context.control_cipher_bundle, buf, count);
- } else {
- result = read(conn->fd, buf, count);
- if (result == 0) {
- debug(3, "AP2 read result 0, for a request count of %u.", count);
- }
- }
#else
- result = read(conn->fd, buf, count);
- if (result == 0) {
- debug(3, "AP1 read result 0, for a request count of %u.", count);
-
- }
+ result = read(conn->fd, buf, count);
+ // In AP1, the RTSP connection is closed in this way, so it's not unexpected
#endif
- if ((result == 0) && (errno != 0)) {
- char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
- debug(2, "Connection %d: read result 0, error %d: \"%s\".",
- conn->connection_number, errno, (char *)errorstring);
- }
-
- if (wait_time != 0)
- remaining_time = time_to_wait_to - get_absolute_time_in_ns();
- if ((((result == -1) || (result == 0)) && ((errno == EAGAIN) || (errno == EWOULDBLOCK))) && (remaining_time > 0))
- debug(1, "remaining time on a timed read is %" PRId64 " ns.", remaining_time);
- } while ((((result == -1) || (result == 0)) && ((errno == EAGAIN) || (errno == EWOULDBLOCK))) &&
- (remaining_time > 0));
-
+ if ((result <= 0) && (errno != 0)) {
+ char errorstring[1024];
+ strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ debug(3, "read_from_rtsp_connection error %d \"%s\" attempting to read up to %zu bytes.",
+ errno, errorstring, count);
+ }
} else {
debug(1, "Connection %d: attempt to read from a closed RTSP connection.",
conn->connection_number);
sizeof(timing_list_message) - 1 - strlen(timing_list_message));
ptp_send_control_message_string(timing_list_message);
}
-
-void clear_ptp_clock() { ptp_send_control_message_string("T"); }
#endif
-ssize_t read_from_rtsp_connection(rtsp_conn_info *conn, void *buf, size_t count) {
- // first try to read with a timeout, to see if there is any traffic...
- // ssize_t response = timed_read_from_rtsp_connection(conn, 20000000000L, buf, count);
- // actually don't use a timeout -- OwnTone doesn't supply regular traffic.
- ssize_t response = timed_read_from_rtsp_connection(conn, 0, buf, count);
- if ((response == -1) && ((errno == EAGAIN) || (errno == EWOULDBLOCK))) {
- if (conn->rtsp_link_is_idle == 0) {
- debug(1, "Connection %d: RTSP connection is idle.", conn->connection_number);
- conn->rtsp_link_is_idle = 1;
- conn->udp_clock_sender_is_initialised = 0;
- conn->udp_clock_is_initialised = 0;
- }
- response = timed_read_from_rtsp_connection(conn, 0, buf, count);
- }
- if (conn->rtsp_link_is_idle == 1) {
- conn->rtsp_link_is_idle = 0;
- debug(1, "Connection %d: RTSP connection traffic has resumed.", conn->connection_number);
-#ifdef CONFIG_AIRPLAY_2
- if (conn->airplay_stream_type == realtime_stream) {
- conn->last_anchor_info_is_valid = 0;
- conn->anchor_remote_info_is_valid = 0;
- conn->first_packet_timestamp = 0;
- conn->input_frame_rate_starting_point_is_valid = 0;
- ab_resync(conn);
- }
-#else
- conn->anchor_remote_info_is_valid = 0;
- conn->local_to_remote_time_difference_measurement_time = 0;
- conn->local_to_remote_time_difference = 0;
- conn->first_packet_timestamp = 0;
- conn->input_frame_rate_starting_point_is_valid = 0;
- ab_resync(conn);
-#endif
- }
- return response;
-}
-
enum rtsp_read_request_response rtsp_read_request(rtsp_conn_info *conn, rtsp_message **the_packet) {
-
+ enum rtsp_read_request_response reply = rtsp_read_request_response_pending;
*the_packet = NULL; // need this for error handling
-
- enum rtsp_read_request_response reply = rtsp_read_request_response_ok;
ssize_t buflen = 4096;
#ifdef CONFIG_METADATA
if ((config.metadata_enabled != 0) && (config.get_coverart != 0))
#endif
int release_buffer = 0; // on exit, don't deallocate the buffer if everything was okay
char *buf = malloc(buflen + 1); // add a NUL at the end
- if (!buf) {
- warn("Connection %d: rtsp_read_request: can't get a buffer.", conn->connection_number);
- return (rtsp_read_request_response_error);
- }
- pthread_cleanup_push(malloc_cleanup, buf);
- ssize_t nread;
- ssize_t inbuf = 0;
- int msg_size = -1;
-
- while (msg_size < 0) {
+ if (buf == NULL) {
+ debug(1, "Connection %d: rtsp_read_request: can't get a buffer.", conn->connection_number);
+ reply = rtsp_read_request_response_error;
+ } else {
+ debug(3, "buf is allocated at 0x%" PRIxPTR ".", (uintptr_t)buf);
+ pthread_cleanup_push(malloc_cleanup, &buf);
+ ssize_t nread;
+ ssize_t inbuf = 0;
+ int msg_size = -1;
- /*
+ while ((msg_size < 0) && (reply == rtsp_read_request_response_pending)) {
if (conn->stop != 0) {
- debug(3, "Connection %d: Shutdown requested by client.", conn->connection_number);
+ debug(3, "Connection %d: shutdown requested by client.", conn->connection_number);
reply = rtsp_read_request_response_immediate_shutdown_requested;
- goto shutdown;
- }
- */
-
- nread = read_from_rtsp_connection(conn, buf + inbuf, buflen - inbuf);
+ // goto shutdown;
+ } else {
- if (nread == 0) {
- // a blocking read that returns zero means eof -- implies connection closed by client
- debug(2, "Connection %d: Connection closed by client.", conn->connection_number);
- reply = rtsp_read_request_response_channel_closed;
- // Note: the socket will be closed when the thread exits
- goto shutdown;
- }
+ nread = read_from_rtsp_connection(conn, buf + inbuf, buflen - inbuf);
- // An ETIMEDOUT error usually means keepalive has failed.
+ if (nread <= 0) {
+ // ETIMEDOUT seems to be from the keepalive having failed.
+ // But it does seem as it it's not always sent, e.g. if another read() is outstanding (?)
+ // EAGAIN seems to be simply from the read() request timing out.
+ if (errno == ETIMEDOUT) {
+ debug(1,
+ "Connection %d has disappeared. As Yeats almost said, \"Too long a "
+ "silence / can make a stone "
+ "of the heart\". ETIMEOUT",
+ conn->connection_number);
+ reply = rtsp_read_request_response_immediate_shutdown_requested;
+ } else if (nread == 0) {
+ if (errno == 0) {
+ // a blocking read that returns zero means eof -- implies connection closed by client
+ debug(2, "Connection %d RTSP closed by client.", conn->connection_number);
+ } else {
+ char errorstring[1024];
+ strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ debug(2, "Connection %d RTSP port closed by client with error %d: \"%s\".",
+ conn->connection_number, errno, (char *)errorstring);
+ }
+ close(conn->fd); // close it from our end too...
+ conn->fd = 0;
+ reply = rtsp_read_request_response_channel_closed;
+ } else {
+ char errorstring[1024];
+ strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ debug(1, "Connection %d: rtsp_read_request_response_read_error %d: \"%s\".",
+ conn->connection_number, errno, (char *)errorstring);
+ reply = rtsp_read_request_response_read_error;
+ }
+ // goto shutdown;
+ } else {
- if (nread < 0) {
- if (errno == EINTR)
- continue;
- if (errno == EAGAIN) {
- debug(1, "Connection %d: getting Error 11 -- EAGAIN from a blocking read!",
- conn->connection_number);
- continue;
- }
- if (errno == ETIMEDOUT) {
- debug(1,
- "Connection %d: As Yeats almost said, \"Too long a silence / can make a stone "
- "of the heart\".",
- conn->connection_number);
- reply = rtsp_read_request_response_immediate_shutdown_requested;
- // Note: the socket will be closed when the thread exits
- goto shutdown;
- }
- if (errno != ECONNRESET) {
- char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
- if (errno != 0)
- debug(2, "Connection %d: rtsp_read_request_response_read_error %d: \"%s\".",
- conn->connection_number, errno, (char *)errorstring);
+ /* // this outputs the message received
+ {
+ void *pt = malloc(nread+1);
+ memset(pt, 0, nread+1);
+ memcpy(pt, buf + inbuf, nread);
+ debug(1, "Incoming string on port: \"%s\"",pt);
+ free(pt);
+ }
+ */
+ inbuf += nread;
+
+ char *next;
+ while ((reply == rtsp_read_request_response_pending) && (msg_size < 0) &&
+ (next = nextline(buf, inbuf))) {
+ msg_size = msg_handle_line(the_packet, buf);
+ if (!(*the_packet)) {
+ debug(1, "Connection %d: rtsp_read_request can't find an RTSP header.",
+ conn->connection_number);
+ reply = rtsp_read_request_response_bad_packet;
+ // goto shutdown;
+ } else {
+ inbuf -= next - buf;
+ if (inbuf)
+ memmove(buf, next, inbuf);
+ }
+ }
+ }
}
- reply = rtsp_read_request_response_read_error;
- goto shutdown;
}
- /* // this outputs the message received
- {
- void *pt = malloc(nread+1);
- memset(pt, 0, nread+1);
- memcpy(pt, buf + inbuf, nread);
- debug(1, "Incoming string on port: \"%s\"",pt);
- free(pt);
+ if ((reply == rtsp_read_request_response_pending) && (msg_size > 0)) {
+ // more input is needed...
+ uint64_t threshold_time =
+ get_absolute_time_in_ns() + ((uint64_t)15000000000); // i.e. fifteen seconds from now
+ int warning_message_sent = 0;
+
+ if (msg_size > buflen) {
+ buf = realloc(buf, msg_size + 1);
+ if (buf == NULL) {
+ warn("Connection %d: too much content.", conn->connection_number);
+ reply = rtsp_read_request_response_error;
+ // goto shutdown;
+ } else {
+ debug(3, "buf is reallocated at 0x%" PRIxPTR ".", (uintptr_t)buf);
+ buflen = msg_size;
}
- */
-
- inbuf += nread;
-
- char *next;
- while (msg_size < 0 && (next = nextline(buf, inbuf))) {
- msg_size = msg_handle_line(the_packet, buf);
-
- if (!(*the_packet)) {
- debug(1, "Connection %d: rtsp_read_request can't find an RTSP header.",
- conn->connection_number);
- reply = rtsp_read_request_response_bad_packet;
- goto shutdown;
}
- inbuf -= next - buf;
- if (inbuf)
- memmove(buf, next, inbuf);
- }
- }
-
- if (msg_size > buflen) {
- buf = realloc(buf, msg_size + 1);
- if (!buf) {
- warn("Connection %d: too much content.", conn->connection_number);
- reply = rtsp_read_request_response_error;
- goto shutdown;
- }
- buflen = msg_size;
- }
-
- uint64_t threshold_time =
- get_absolute_time_in_ns() + ((uint64_t)15000000000); // i.e. fifteen seconds from now
- int warning_message_sent = 0;
-
- // const size_t max_read_chunk = 1024 * 1024 / 16;
- while (inbuf < msg_size) {
+ // const size_t max_read_chunk = 1024 * 1024 / 16;
+ while ((inbuf < msg_size) && (reply == rtsp_read_request_response_pending)) {
- // we are going to read the stream in chunks and time how long it takes to
- // do so.
- // If it's taking too long, (and we find out about it), we will send an
- // error message as
- // metadata
+ // we are going to read the stream in chunks and time how long it takes to
+ // do so.
+ // If it's taking too long, (and we find out about it), we will send an
+ // error message as
+ // metadata
- if (warning_message_sent == 0) {
- uint64_t time_now = get_absolute_time_in_ns();
- if (time_now > threshold_time) { // it's taking too long
- debug(1, "Error receiving metadata from source -- transmission seems "
- "to be stalled.");
+ if (warning_message_sent == 0) {
+ uint64_t time_now = get_absolute_time_in_ns();
+ if (time_now > threshold_time) { // it's taking too long
+ debug(1, "Error receiving metadata from source -- transmission seems "
+ "to be stalled.");
#ifdef CONFIG_METADATA
- send_ssnc_metadata('stal', NULL, 0, 1);
+ send_ssnc_metadata('stal', NULL, 0, 1);
#endif
- warning_message_sent = 1;
- }
- }
+ warning_message_sent = 1;
+ }
+ }
- /*
if (conn->stop != 0) {
debug(1, "RTSP shutdown requested.");
reply = rtsp_read_request_response_immediate_shutdown_requested;
- goto shutdown;
+ // goto shutdown;
+ } else {
+ size_t read_chunk = msg_size - inbuf;
+ // if (read_chunk > max_read_chunk)
+ // read_chunk = max_read_chunk;
+ // usleep(80000); // wait about 80 milliseconds between reads of up to max_read_chunk
+ nread = read_from_rtsp_connection(conn, buf + inbuf, read_chunk);
+
+ if (nread <= 0) {
+ // ETIMEDOUT seems to be from the keepalive having failed.
+ // But it does seem as it it's not always sent, e.g. if another read() is outstanding
+ // (?) EAGAIN seems to be simply from the read() request timing out.
+ if (errno == ETIMEDOUT) {
+ debug(1,
+ "Connection %d has disappeared. As Yeats almost said, \"Too long a "
+ "silence / can make a stone "
+ "of the heart\". ETIMEOUT",
+ conn->connection_number);
+ reply = rtsp_read_request_response_immediate_shutdown_requested;
+ // Note: the socket will be closed when the thread exits
+ } else if (nread == 0) {
+ if (errno == 0) {
+ // a blocking read that returns zero means eof -- implies connection closed by
+ // client
+ debug(1, "Connection %d closed by client.", conn->connection_number);
+ } else {
+ char errorstring[1024];
+ strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ debug(1, "Connection %d closed by client with error %d: \"%s\".",
+ conn->connection_number, errno, (char *)errorstring);
+ }
+ reply = rtsp_read_request_response_channel_closed;
+ // Note: the socket will be closed when the thread exits
+ } else {
+ char errorstring[1024];
+ strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ debug(1, "Connection %d: rtsp_read_request_response_read_error %d: \"%s\".",
+ conn->connection_number, errno, (char *)errorstring);
+ reply = rtsp_read_request_response_read_error;
+ }
+ // goto shutdown;
+ } else {
+ inbuf += nread;
+ }
}
- */
-
- size_t read_chunk = msg_size - inbuf;
- // if (read_chunk > max_read_chunk)
- // read_chunk = max_read_chunk;
- // usleep(80000); // wait about 80 milliseconds between reads of up to max_read_chunk
- nread = read_from_rtsp_connection(conn, buf + inbuf, read_chunk);
- if (!nread) {
- reply = rtsp_read_request_response_error;
- goto shutdown;
- }
- if (nread < 0) {
- if (errno == EINTR)
- continue;
- if (errno == EAGAIN) {
- debug(1, "Getting Error 11 -- EAGAIN from a blocking read!");
- continue;
}
- if (errno != ECONNRESET) {
- char errorstring[1024];
- strerror_r(errno, (char *)errorstring, sizeof(errorstring));
- debug(1, "Connection %d: rtsp_read_request_response_read_error %d: \"%s\".",
- conn->connection_number, errno, (char *)errorstring);
- }
- reply = rtsp_read_request_response_read_error;
- goto shutdown;
}
- inbuf += nread;
- }
+ if (reply == rtsp_read_request_response_pending) {
+ reply = rtsp_read_request_response_ok;
+ rtsp_message *msg = *the_packet;
+ msg->contentlength = inbuf;
+ msg->content = buf;
+ char *jp = inbuf + buf;
+ *jp = '\0';
+ *the_packet = msg;
+ }
- rtsp_message *msg = *the_packet;
- msg->contentlength = inbuf;
- msg->content = buf;
- char *jp = inbuf + buf;
- *jp = '\0';
- *the_packet = msg;
-shutdown:
- if (reply != rtsp_read_request_response_ok) {
- if (*the_packet != NULL) {
- debug(3, "Freeing the_packet");
- msg_free(the_packet);
+ // shutdown:
+ if (reply != rtsp_read_request_response_ok) {
+ if (*the_packet != NULL) {
+ debug(3, "Freeing the_packet");
+ msg_free(the_packet);
+ }
+ release_buffer = 1; // allow the buffer to be released
}
- release_buffer = 1; // allow the buffer to be released
+ pthread_cleanup_pop(release_buffer);
}
- pthread_cleanup_pop(release_buffer);
return reply;
}
// Here, if there's content, write the Content-Length header ...
- if (resp->contentlength) {
- debug(3, "Responding with content of length %d", resp->contentlength);
+ // if (resp->contentlength) {
+ {
+ // debug(2, "Responding with content of length %d", resp->contentlength);
n = snprintf(p, pktfree, "Content-Length: %d\r\n", resp->contentlength);
pktfree -= n;
p += n;
return -4;
}
if (reply != p - pkt) {
- debug(1, "msg_write_response error -- requested bytes: %d not fully written: %d.", p - pkt,
+ debug(1, "msg_write_response error -- requested bytes: %zd not fully written: %zd.", p - pkt,
reply);
return -5;
}
return 0;
}
-char *get_category_string(airplay_stream_c cat) {
- char *category;
- switch (cat) {
- case unspecified_stream_category:
- category = "unspecified stream";
- break;
- case ptp_stream:
- category = "PTP stream";
- break;
- case ntp_stream:
- category = "NTP stream";
- break;
- case remote_control_stream:
- category = "Remote Control stream";
- break;
- case classic_airplay_stream:
- category = "Classic AirPlay stream";
- break;
- default:
- category = "Unexpected stream code";
- break;
- }
- return category;
-}
-
+#ifdef CONFIG_AIRPLAY_2
void handle_record_2(rtsp_conn_info *conn, __attribute((unused)) rtsp_message *req,
rtsp_message *resp) {
debug(2, "Connection %d: RECORD on %s", conn->connection_number,
get_category_string(conn->airplay_stream_category));
- // debug_log_rtsp_message(1, "RECORD incoming message", req);
+ debug_log_rtsp_message(3, "RECORD incoming message", req);
+ msg_add_header(resp, "Audio-Latency", "0");
resp->respcode = 200;
}
+#endif
void handle_record(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
debug(2, "Connection %d: RECORD", conn->connection_number);
if ((conn != NULL) && (principal_conn == conn)) {
- // if (have_play_lock(conn)) {
if (conn->player_thread)
warn("Connection %d: RECORD: Duplicate RECORD message -- ignored", conn->connection_number);
else {
- debug(2, "Connection %d: Classic AirPlay connection from %s:%u to self at %s:%u.",
- conn->connection_number, conn->client_ip_string, conn->client_rtsp_port,
- conn->self_ip_string, conn->self_rtsp_port);
activity_monitor_signify_activity(1);
- player_prepare_to_play(conn);
player_play(conn); // the thread better be 0
}
#ifdef CONFIG_AIRPLAY_2
-void handle_get_info(__attribute((unused)) rtsp_conn_info *conn, rtsp_message *req,
- rtsp_message *resp) {
+void generateTxtDataValueInfo(rtsp_conn_info *conn, void **response, size_t *responseLength) {
+ void *qualifier_response_data = NULL;
+ size_t qualifier_response_data_length = 0;
+ char localString[256];
+ if (add_pstring_to_malloc("acl=0", &qualifier_response_data, &qualifier_response_data_length) ==
+ 0)
+ debug(1, "Problem");
+ if (add_pstring_to_malloc("btaddr=00:00:00:00:00:00", &qualifier_response_data,
+ &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+ if (add_pstring_to_malloc(
+ bnprintf(localString, sizeof(localString), "deviceid=%s", config.airplay_device_id),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+ if (add_pstring_to_malloc(
+ bnprintf(localString, sizeof(localString), "fex=%s", config.airplay_fex),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ uint64_t features_hi = config.airplay_features;
+ features_hi = (features_hi >> 32) & 0xffffffff;
+ uint64_t features_lo = config.airplay_features;
+ features_lo = features_lo & 0xffffffff;
+
+ if (add_pstring_to_malloc(bnprintf(localString, sizeof(localString),
+ "features=0x%" PRIX64 ",0x%" PRIX64 "", features_lo,
+ features_hi),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+ // if (add_pstring_to_malloc("rsf=0x0", &qualifier_response_data,
+ // &qualifier_response_data_length) == 0)
+ // debug(1, "Problem");
+
+ if (add_pstring_to_malloc(
+ bnprintf(localString, sizeof(localString), "flags=0x%x", config.airplay_statusflags),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ if ((conn != NULL) && (conn->airplay_gid != 0)) {
+ snprintf(localString, sizeof(localString), "gid=%s", conn->airplay_gid);
+ } else {
+ snprintf(localString, sizeof(localString), "gid=%s", config.airplay_pi);
+ }
+
+ if (add_pstring_to_malloc(localString, &qualifier_response_data,
+ &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ int gcgl = 0;
+ if (conn != NULL)
+ gcgl = conn->groupContainsGroupLeader;
+
+ // should have igl here;
+ if (add_pstring_to_malloc(bnprintf(localString, sizeof(localString), "igl=%0"),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ if (add_pstring_to_malloc(bnprintf(localString, sizeof(localString), "gcgl=%d", gcgl),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ if (add_pstring_to_malloc(
+ bnprintf(localString, sizeof(localString), "pgid=%s", config.airplay_pgid),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+
+ if (add_pstring_to_malloc(bnprintf(localString, sizeof(localString), "pgcgl=%d", gcgl),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ if (add_pstring_to_malloc(bnprintf(localString, sizeof(localString), "model=%s", config.model),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+ if (add_pstring_to_malloc("protovers=1.1", &qualifier_response_data,
+ &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ if (add_pstring_to_malloc(bnprintf(localString, sizeof(localString), "pi=%s", config.airplay_pi),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ if (add_pstring_to_malloc(
+ bnprintf(localString, sizeof(localString), "psi=%s", config.airplay_psi),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ if (add_pstring_to_malloc(bnprintf(localString, sizeof(localString), "pk=%s", config.pk_string),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ if (add_pstring_to_malloc(
+ bnprintf(localString, sizeof(localString), "srcvers=%s", config.srcvers),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ if (add_pstring_to_malloc(bnprintf(localString, sizeof(localString), "osvers=%s", config.osvers),
+ &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ if (add_pstring_to_malloc("vv=2", &qualifier_response_data, &qualifier_response_data_length) == 0)
+ debug(1, "Problem");
+
+ *response = qualifier_response_data;
+ *responseLength = qualifier_response_data_length;
+}
+
+plist_t generateInfoPlist(rtsp_conn_info *conn) {
+ plist_t response_plist = NULL;
+
+ plist_from_memory((const char *)get_info_response_plist, get_info_response_plist_len,
+ &response_plist);
+
+ if (response_plist == NULL) {
+ debug(1, "generateInfoPlist plist not created!");
+ } else {
+ pthread_rwlock_rdlock(&principal_conn_lock); // don't let the principal_conn be changed
+ pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
+
+ // debug(1,"qualifier_response_data_length: %u.", qualifier_response_data_length);
+
+ plist_dict_set_item(response_plist, "featuresEx", plist_new_string(config.airplay_fex));
+
+ plist_dict_set_item(response_plist, "features", plist_new_uint(config.airplay_features));
+ plist_dict_set_item(response_plist, "statusFlags", plist_new_uint(config.airplay_statusflags));
+ plist_dict_set_item(response_plist, "deviceID", plist_new_string(config.airplay_device_id));
+ plist_dict_set_item(response_plist, "pi", plist_new_string(config.airplay_pi));
+ plist_dict_set_item(response_plist, "name", plist_new_string(config.service_name));
+ char *vs = get_version_string();
+ plist_dict_set_item(response_plist, "model", plist_new_string(config.model));
+ free(vs);
+ plist_dict_set_item(response_plist, "pk",
+ plist_new_data((const char *)config.airplay_pk, sizeof(config.airplay_pk)));
+ char senderAddress[256];
+ snprintf(senderAddress, sizeof(senderAddress), "%s:%u", conn->client_ip_string,
+ conn->client_rtsp_port);
+ plist_dict_set_item(response_plist, "senderAddress", plist_new_string(senderAddress));
+ plist_dict_set_item(response_plist, "initialVolume", plist_new_real(suggested_volume(conn)));
+ plist_dict_set_item(response_plist, "sourceVersion", plist_new_string(config.srcvers));
+ pthread_cleanup_pop(1); // release the principal_conn lock
+ // Create a dictionary of supported formats for the bufferStream
+ uint64_t bufferStreamFormats = 0L;
+ // bufferStreamFormats = 0xF7FE000E00000000; // don't know what these do (from the HPm)
+ plist_t supported_formats_plist = plist_new_dict();
+ if (supported_formats_plist != NULL) {
+ plist_dict_set_item(supported_formats_plist, "audioStream", plist_new_uint(21235712));
+ {
+ bufferStreamFormats |= 0x00000400000L; // AAC-LC/44.1K/F24/2
+ bufferStreamFormats |= 0x40000; // ALAC/44100/S16/2
+ }
+
+ {
+ bufferStreamFormats |= 0x00000200000L; // ALAC/48K/F24/2
+ bufferStreamFormats |= 0x00000800000L; // AAC-LC/48K/F24/2
+ }
+ {
+ if (config.eight_channel_layout != 0)
+ bufferStreamFormats |= 0x10000000000L; // AAC-LC/48K/F24/7.1
+ if (config.six_channel_layout != 0)
+ bufferStreamFormats |= 0x08000000000L; // AAC-LC/48K/F24/5.1
+ }
+ plist_dict_set_item(supported_formats_plist, "bufferStream",
+ plist_new_uint(bufferStreamFormats));
+ debug(3, "bufferedStream formats: 0x%" PRIX64 ".", bufferStreamFormats);
+ plist_dict_set_item(response_plist, "supportedFormats", supported_formats_plist);
+ }
+ }
+ return response_plist;
+}
+
+void handle_get_info(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
debug_log_rtsp_message(3, "GET /info:", req);
if (rtsp_message_contains_plist(req)) { // it's stage one
// get version of AirPlay -- it might be too old. Not using it yet.
}
}
- // In Stage 1, look for the DACP and Active-Remote
+ // in Stage 1, look for the DACP and Active-Remote
char *ar = msg_get_header(req, "Active-Remote");
if (ar) {
debug(3, "Connection %d: GET /info -- Active-Remote string seen: \"%s\".",
plist_free(info_plist);
free(qualifier_array_val_cstr);
- // uint8_t bt_addr[6] = {0xB8, 0x27, 0xEB, 0xB7, 0xD4, 0x0E};
- plist_t response_plist = NULL;
- plist_from_xml((const char *)plists_get_info_response_xml, plists_get_info_response_xml_len,
- &response_plist);
- if (response_plist == NULL) {
- debug(1, "GET /info Stage 1: response plist not created from XML!");
- } else {
- void *qualifier_response_data = NULL;
- size_t qualifier_response_data_length = 0;
-
- pthread_rwlock_rdlock(&principal_conn_lock); // don't let the principal_conn be changed
- pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
-
- if (add_pstring_to_malloc("acl=0", &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- if (add_pstring_to_malloc(deviceIdString, &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- if (add_pstring_to_malloc(featuresString, &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- if (add_pstring_to_malloc("rsf=0x0", &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- if (add_pstring_to_malloc("flags=0x4", &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- if (add_pstring_to_malloc("model=Shairport Sync", &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- if (add_pstring_to_malloc("manufacturer=", &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- if (add_pstring_to_malloc("serialNumber=", &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- if (add_pstring_to_malloc("protovers=1.1", &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- if (add_pstring_to_malloc("srcvers=366.0", &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- if (add_pstring_to_malloc(piString, &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- if (add_pstring_to_malloc(gidString, &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- if (add_pstring_to_malloc("gcgl=0", &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- snprintf(pkString, sizeof(pkString), "pk=");
- pkString_make(pkString + strlen("pk="), sizeof(pkString) - strlen("pk="),
- config.airplay_device_id);
- if (add_pstring_to_malloc(pkString, &qualifier_response_data,
- &qualifier_response_data_length) == 0)
- debug(1, "Problem");
- // debug(1,"qualifier_response_data_length: %u.", qualifier_response_data_length);
-
- plist_dict_set_item(response_plist, "txtAirPlay",
- plist_new_data(qualifier_response_data, qualifier_response_data_length));
-
- plist_dict_set_item(response_plist, "features", plist_new_uint(config.airplay_features));
- plist_dict_set_item(response_plist, "statusFlags",
- plist_new_uint(config.airplay_statusflags));
- plist_dict_set_item(response_plist, "deviceID", plist_new_string(config.airplay_device_id));
- plist_dict_set_item(response_plist, "pi", plist_new_string(config.airplay_pi));
- plist_dict_set_item(response_plist, "name", plist_new_string(config.service_name));
- char *vs = get_version_string();
- // plist_dict_set_item(response_plist, "model", plist_new_string(vs));
- plist_dict_set_item(response_plist, "model", plist_new_string("Shairport Sync"));
- free(vs);
- // pkString_make(pkString, sizeof(pkString), config.airplay_device_id);
- // plist_dict_set_item(response_plist, "pk", plist_new_string(pkString));
- pthread_cleanup_pop(1); // release the principal_conn lock
- plist_to_bin(response_plist, &resp->content, &resp->contentlength);
- if (resp->contentlength == 0)
- debug(1, "GET /info Stage 1: response bplist not created!");
- plist_free(response_plist);
- free(qualifier_response_data);
- }
+ plist_t response_plist = generateInfoPlist(conn);
+
+ if (response_plist == NULL)
+ goto user_fail;
+
+ void *txtData = NULL;
+ size_t txtDataLength = 0;
+ generateTxtDataValueInfo(conn, &txtData, &txtDataLength);
+ plist_dict_set_item(response_plist, "txtAirPlay", plist_new_data(txtData, txtDataLength));
+ free(txtData);
+ plist_to_bin(response_plist, &resp->content, &resp->contentlength);
+ if (resp->contentlength == 0)
+ debug(1, "GET /info Stage 1: response bplist not created!");
+ plist_free(response_plist);
+ /*
+ free(qualifier_response_data);
+ */
+
msg_add_header(resp, "Content-Type", "application/x-apple-binary-plist");
- debug_log_rtsp_message(3, "GET /info Stage 1 Response:", resp);
resp->respcode = 200;
+ debug_log_rtsp_message(3, "GET /info Stage 1 Response:", resp);
return;
user_fail:
resp->respcode = 400;
return;
} else { // stage two
- plist_t response_plist = NULL;
- plist_from_xml((const char *)plists_get_info_response_xml, plists_get_info_response_xml_len,
- &response_plist);
- plist_dict_set_item(response_plist, "features", plist_new_uint(config.airplay_features));
- plist_dict_set_item(response_plist, "statusFlags", plist_new_uint(config.airplay_statusflags));
- plist_dict_set_item(response_plist, "deviceID", plist_new_string(config.airplay_device_id));
- plist_dict_set_item(response_plist, "pi", plist_new_string(config.airplay_pi));
- plist_dict_set_item(response_plist, "name", plist_new_string(config.service_name));
- char *vs = get_version_string();
- // plist_dict_set_item(response_plist, "model", plist_new_string(vs));
- plist_dict_set_item(response_plist, "model", plist_new_string("Shairport Sync"));
- free(vs);
- // pkString_make(pkString, sizeof(pkString), config.airplay_device_id);
- // plist_dict_set_item(response_plist, "pk", plist_new_string(pkString));
+ plist_t response_plist = generateInfoPlist(conn);
+
+ if (response_plist == NULL)
+ goto user_fail;
+
+ void *txtData = NULL;
+ size_t txtDataLength = 0;
+ generateTxtDataValueInfo(conn, &txtData, &txtDataLength);
+ plist_dict_set_item(response_plist, "txtAirPlay", plist_new_data(txtData, txtDataLength));
+ free(txtData);
plist_to_bin(response_plist, &resp->content, &resp->contentlength);
plist_free(response_plist);
msg_add_header(resp, "Content-Type", "application/x-apple-binary-plist");
- debug_log_rtsp_message(3, "GET /info Stage 2 Response", resp);
resp->respcode = 200;
+ debug_log_rtsp_message(3, "GET /info Stage 2 Response", resp);
return;
}
}
void handle_flushbuffered(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
- debug(3, "Connection %d: FLUSHBUFFERED %s : Content-Length %d", conn->connection_number,
+ debug(2, "Connection %d: FLUSHBUFFERED %s : Content-Length %d", conn->connection_number,
req->path, req->contentlength);
- debug_log_rtsp_message(2, "FLUSHBUFFERED request", req);
+ debug_log_rtsp_message(3, "FLUSHBUFFERED request", req);
uint64_t flushFromSeq = 0;
uint64_t flushFromTS = 0;
if (messagePlist != NULL) {
plist_t item = plist_dict_get_item(messagePlist, "flushFromSeq");
if (item == NULL) {
- debug(2, "Can't find a flushFromSeq");
+ debug(3, "Can't find a flushFromSeq");
} else {
flushFromValid = 1;
plist_get_uint_val(item, &flushFromSeq);
- debug(2, "flushFromSeq is %" PRId64 ".", flushFromSeq);
+ debug(3, "flushFromSeq is %" PRId64 ".", flushFromSeq);
}
item = plist_dict_get_item(messagePlist, "flushFromTS");
if (flushFromValid != 0)
debug(1, "flushFromSeq without flushFromTS!");
else
- debug(2, "Can't find a flushFromTS");
+ debug(3, "Can't find a flushFromTS");
} else {
plist_get_uint_val(item, &flushFromTS);
if (flushFromValid == 0)
debug(1, "flushFromTS without flushFromSeq!");
- debug(2, "flushFromTS is %" PRId64 ".", flushFromTS);
+ debug(3, "flushFromTS is %" PRId64 ".", flushFromTS);
}
item = plist_dict_get_item(messagePlist, "flushUntilSeq");
debug(1, "Can't find the flushUntilSeq");
} else {
plist_get_uint_val(item, &flushUntilSeq);
- debug(2, "flushUntilSeq is %" PRId64 ".", flushUntilSeq);
+ debug(3, "flushUntilSeq is %" PRId64 ".", flushUntilSeq);
}
item = plist_dict_get_item(messagePlist, "flushUntilTS");
debug(1, "Can't find the flushUntilTS");
} else {
plist_get_uint_val(item, &flushUntilTS);
- debug(2, "flushUntilTS is %" PRId64 ".", flushUntilTS);
+ debug(3, "flushUntilTS is %" PRId64 ".", flushUntilTS);
}
debug_mutex_lock(&conn->flush_mutex, 1000, 1);
- // a flush with from... components will not be followed by a setanchor (i.e. a play)
- // if it's a flush that will be followed by a setanchor (i.e. a play) then stop play now.
- if (flushFromValid == 0)
- conn->ap2_play_enabled = 0;
-
- // add the exact request as made to the linked list (not used for anything but diagnostics now)
- // int flushNow = 0;
- // if (flushFromValid == 0)
- // flushNow = 1;
- // add_flush_request(flushNow, flushFromSeq, flushFromTS, flushUntilSeq, flushUntilTS, conn);
-
- // now, if it's an immediate flush, replace the existing request, if any
- // but it if's a deferred flush and there is an existing deferred request,
- // only update the flushUntil stuff -- that seems to preserve
- // the intended semantics
-
- // so, always replace these
- conn->ap2_flush_until_sequence_number = flushUntilSeq;
- conn->ap2_flush_until_rtp_timestamp = flushUntilTS;
-
- if ((conn->ap2_flush_requested != 0) && (conn->ap2_flush_from_valid != 0) &&
- (flushFromValid != 0)) {
- // if there is a request already, and it's a deferred request, and the current request is also
- // deferred... do nothing! -- leave the starting point in place. Yeah, yeah, we know de
- // Morgan's Law, but this seems clearer
+
+ if (flushFromValid == 0) {
+ // an immediate flush is requested
+ conn->ap2_immediate_flush_requested = 1;
+ conn->ap2_immediate_flush_until_sequence_number = flushUntilSeq & 0x7fffff;
+ conn->ap2_immediate_flush_until_rtp_timestamp = flushUntilTS;
+ debug(2,
+ "Connection %d: immediate flush request created: flushUntilTS: %" PRIu64 ", flushUntilSeq: %" PRIu64 ".",
+ conn->connection_number, flushUntilTS, flushUntilSeq & 0x7fffff);
+ conn->ap2_play_enabled = 0; // stop trying to play audio
+ ptp_send_control_message_string(
+ "P"); // "P"ause signify clock no longer valid and will be restarted by a subsequent play
} else {
- conn->ap2_flush_from_sequence_number = flushFromSeq;
- conn->ap2_flush_from_rtp_timestamp = flushFromTS;
+ // look for a record slot that isn't in use
+ unsigned int i = 0;
+ unsigned int found = 0;
+ while ((i < MAX_DEFERRED_FLUSH_REQUESTS) && (found == 0)) {
+ if (conn->ap2_deferred_flush_requests[i].inUse == 0) {
+ found = 1;
+ } else {
+ i++;
+ }
+ }
+ if (found != 0) {
+ conn->ap2_deferred_flush_requests[i].inUse = 1;
+ conn->ap2_deferred_flush_requests[i].active = 0;
+ conn->ap2_deferred_flush_requests[i].flushFromSeq = flushFromSeq & 0x7fffff;
+ conn->ap2_deferred_flush_requests[i].flushFromTS = flushFromTS;
+ conn->ap2_deferred_flush_requests[i].flushUntilSeq = flushUntilSeq & 0x7fffff;
+ conn->ap2_deferred_flush_requests[i].flushUntilTS = flushUntilTS;
+ debug(2,
+ "Connection %d: deferred flush request created: flushFromSeq: %" PRIu64 ", flushUntilSeq: %" PRIu64 ".",
+ conn->connection_number, flushFromSeq, flushUntilSeq);
+ } else {
+ debug(1, "Connection %d: no more room for deferred flush request records",
+ conn->connection_number);
+ }
}
- conn->ap2_flush_from_valid = flushFromValid;
- conn->ap2_flush_requested = 1;
-
- // reflect the possibly updated flush request
- // add_flush_request(flushNow, conn->ap2_flush_from_sequence_number,
- // conn->ap2_flush_from_rtp_timestamp, conn->ap2_flush_until_sequence_number,
- // conn->ap2_flush_until_rtp_timestamp, conn);
-
debug_mutex_unlock(&conn->flush_mutex, 3);
-
- if (flushFromValid)
- debug(2, "Deferred Flush Requested");
- else
- debug(2, "Immediate Flush Requested");
-
plist_free(messagePlist);
- // display_all_flush_requests(conn);
}
resp->respcode = 200;
}
void handle_setrate(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
- debug(3, "Connection %d: SETRATE %s : Content-Length %d", conn->connection_number, req->path,
+ debug(1, "Connection %d: SETRATE %s : Content-Length %d", conn->connection_number, req->path,
req->contentlength);
- debug_log_rtsp_message(2, "SETRATE request -- unimplemented", req);
+ debug_log_rtsp_message(1, "SETRATE request -- unimplemented", req);
resp->respcode = 501; // Not Implemented
}
}
void handle_setrateanchori(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
- debug(3, "Connection %d: SETRATEANCHORI %s :: Content-Length %d", conn->connection_number,
+ debug(2, "Connection %d: SETRATEANCHORI %s :: Content-Length %d", conn->connection_number,
req->path, req->contentlength);
-
+ debug_log_rtsp_message(3, "SETRATEANCHORI", req);
plist_t messagePlist = plist_from_rtsp_content(req);
if (messagePlist != NULL) {
} else {
uint64_t nid;
plist_get_uint_val(item_2, &nid);
- debug(2, "networkTimeTimelineID \"%" PRIx64 "\".", nid);
+ debug(3, "networkTimeTimelineID \"%" PRIx64 "\".", nid);
conn->networkTimeTimelineID = nid;
}
uint64_t networkTimeSecs;
plist_get_uint_val(item, &networkTimeSecs);
- debug(2, "anchor networkTimeSecs is %" PRIu64 ".", networkTimeSecs);
+ debug(3, "anchor networkTimeSecs is %" PRIu64 ".", networkTimeSecs);
item = plist_dict_get_item(messagePlist, "networkTimeFrac");
uint64_t networkTimeFrac;
plist_get_uint_val(item, &networkTimeFrac);
- debug(2, "anchor networkTimeFrac is 0%" PRIu64 ".", networkTimeFrac);
+ debug(3, "anchor networkTimeFrac is 0%" PRIu64 ".", networkTimeFrac);
// it looks like the networkTimeFrac is a fraction where the msb is work 1/2, the
// next 1/4 and so on
// now, convert the network time and fraction into nanoseconds
networkTimeSecs = networkTimeSecs * 1000000000; // turn the whole seconds into ns
uint64_t anchorTimeNanoseconds = networkTimeSecs + networkTimeFrac;
- debug(2, "anchorTimeNanoseconds looks like %" PRIu64 ".", anchorTimeNanoseconds);
+ debug(3, "anchorTimeNanoseconds looks like %" PRIu64 ".", anchorTimeNanoseconds);
item = plist_dict_get_item(messagePlist, "rtpTime");
uint64_t rtpTime;
uint64_t rate;
plist_get_uint_val(item, &rate);
debug(3, "anchor rate 0x%016" PRIx64 ".", rate);
- debug_mutex_lock(&conn->flush_mutex, 1000, 1);
- pthread_cleanup_push(mutex_unlock, &conn->flush_mutex);
+ pthread_cleanup_debug_mutex_lock(&conn->flush_mutex, 1000, 1);
conn->ap2_rate = rate;
if ((rate & 1) != 0) {
ptp_send_control_message_string(
"B"); // signify clock dependability period is "B"eginning (or resuming)
- debug(2, "Connection %d: Start playing, with anchor clock %" PRIx64 ".",
+ debug(2, "Connection %d: SETRATEANCHORI Start playing, with anchor clock %" PRIx64 ".",
conn->connection_number, conn->networkTimeTimelineID);
activity_monitor_signify_activity(1);
conn->ap2_play_enabled = 1;
} else {
ptp_send_control_message_string("P"); // signify play is "P"ausing
- debug(2, "Connection %d: Pause playing.", conn->connection_number);
+ debug(2, "Connection %d: SETRATEANCHORI Pause playing.", conn->connection_number);
conn->ap2_play_enabled = 0;
activity_monitor_signify_activity(0);
- reset_anchor_info(conn);
+
#ifdef CONFIG_METADATA
send_ssnc_metadata('paus', NULL, 0, 1); // pause -- contains cancellation points
#endif
- if (config.output->stop) {
- debug(2, "Connection %d: Stop the output backend.", conn->connection_number);
- config.output->stop();
- }
+ // if (config.output->stop) {
+ // debug(1, "Connection %d: SETRATEANCHORI would stop the output backend.",
+ // conn->connection_number); config.output->stop();
+ // }
}
pthread_cleanup_pop(1); // unlock the conn->flush_mutex
}
__attribute((unused)) rtsp_message *resp) {
debug(1, "Connection %d: GET %s Content-Length %d", conn->connection_number, req->path,
req->contentlength);
- resp->respcode = 500;
+ resp->respcode = 501; // 501 is not implemented
}
-void handle_post(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
- resp->respcode = 500;
- if (strcmp(req->path, "/feedback") == 0) {
- resp->respcode = 501;
- } else {
- debug(1, "Connection %d: Airplay 1. Unhandled POST %s Content-Length %d",
- conn->connection_number, req->path, req->contentlength);
- debug_log_rtsp_message(2, "POST request", req);
- }
+void handle_post(rtsp_conn_info *conn, rtsp_message *req,
+ __attribute((unused)) rtsp_message *resp) {
+ debug(1, "Connection %d: AP1 POST %s Content-Length %d", conn->connection_number, req->path,
+ req->contentlength);
+ resp->respcode = 501; // 501 is not implemented
}
-
#endif
#ifdef CONFIG_AIRPLAY_2
uint8_t public_key[32];
struct pairings *next;
-} *pairings;
+} * pairings;
static struct pairings *pairing_find(const char *device_id) {
for (struct pairings *pairing = pairings; pairing; pairing = pairing->next) {
static int pairing_add_cb(uint8_t public_key[32], const char *device_id,
void *cb_arg __attribute__((unused))) {
- debug(1, "pair-add cb for %s", device_id);
+ debug(2, "pair-add cb for %s", device_id);
struct pairings *pairing = pairing_find(device_id);
if (pairing) {
static int pairing_remove_cb(uint8_t public_key[32] __attribute__((unused)), const char *device_id,
void *cb_arg __attribute__((unused))) {
- debug(1, "pair-remove cb for %s", device_id);
+ debug(2, "pair-remove cb for %s", device_id);
struct pairings *pairing = pairing_find(device_id);
if (!pairing) {
- debug(1, "pair-remove callback for unknown device");
+ debug(1, "pair-remove callback for device \"%s\".", device_id);
return -1;
}
void handle_pair_add(rtsp_conn_info *conn __attribute__((unused)), rtsp_message *req,
rtsp_message *resp) {
+ debug(2, "Connection %d: handle_pair_add", conn->connection_number);
uint8_t *body = NULL;
size_t body_len = 0;
int ret = pair_add(PAIR_SERVER_HOMEKIT, &body, &body_len, pairing_add_cb, NULL,
void handle_pair_list(rtsp_conn_info *conn __attribute__((unused)), rtsp_message *req,
rtsp_message *resp) {
+ debug(2, "Connection %d: handle_pair_list", conn->connection_number);
uint8_t *body = NULL;
size_t body_len = 0;
int ret = pair_list(PAIR_SERVER_HOMEKIT, &body, &body_len, pairing_list_cb, NULL,
void handle_pair_remove(rtsp_conn_info *conn __attribute__((unused)), rtsp_message *req,
rtsp_message *resp) {
+ debug(3, "Connection %d: handle_pair_remove", conn->connection_number);
uint8_t *body = NULL;
size_t body_len = 0;
int ret = pair_remove(PAIR_SERVER_HOMEKIT, &body, &body_len, pairing_remove_cb, NULL,
resp->content = (char *)body; // these will be freed when the data is sent
resp->contentlength = body_len;
msg_add_header(resp, "Content-Type", "application/octet-stream");
- debug_log_rtsp_message(2, "pair-remove response", resp);
+ debug_log_rtsp_message(3, "pair-remove response", resp);
}
void handle_pair_verify(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
+ debug(3, "Connection %d: handle_pair_verify Content-Length %d", conn->connection_number,
+ req->contentlength);
int ret;
uint8_t *body = NULL;
size_t body_len = 0;
- struct pair_result *result;
- debug(3, "Connection %d: pair-verify Content-Length %d", conn->connection_number,
- req->contentlength);
+ // struct pair_result *result;
if (!conn->ap2_pairing_context.verify_ctx) {
conn->ap2_pairing_context.verify_ctx =
ret = pair_verify(&body, &body_len, conn->ap2_pairing_context.verify_ctx,
(const uint8_t *)req->content, req->contentlength);
if (ret < 0) {
- debug(1, pair_verify_errmsg(conn->ap2_pairing_context.verify_ctx));
+ debug(1, "%s", pair_verify_errmsg(conn->ap2_pairing_context.verify_ctx));
resp->respcode = 470; // Connection Authorization Required
goto out;
}
- ret = pair_verify_result(&result, conn->ap2_pairing_context.verify_ctx);
- if (ret == 0 && result->shared_secret_len > 0) {
- conn->ap2_pairing_context.control_cipher_bundle.cipher_ctx =
- pair_cipher_new(PAIR_SERVER_HOMEKIT, 2, result->shared_secret, result->shared_secret_len);
- if (!conn->ap2_pairing_context.control_cipher_bundle.cipher_ctx) {
- debug(1, "Error setting up rtsp control channel ciphering\n");
- goto out;
+ /*
+ ret = pair_verify_result(&result, conn->ap2_pairing_context.verify_ctx);
+ if (ret == 0 && result->shared_secret_len > 0) {
+ conn->ap2_pairing_context.control_cipher_bundle.cipher_ctx =
+ pair_cipher_new(PAIR_SERVER_HOMEKIT, 3, result->shared_secret, result->shared_secret_len);
+ if (!conn->ap2_pairing_context.control_cipher_bundle.cipher_ctx) {
+ debug(1, "Error setting up rtsp control channel ciphering\n");
+ goto out;
+ }
+ conn->ap2_pairing_context.event_cipher_bundle.cipher_ctx =
+ pair_cipher_new(PAIR_SERVER_HOMEKIT, 4, result->shared_secret, result->shared_secret_len);
+ if (!conn->ap2_pairing_context.event_cipher_bundle.cipher_ctx) {
+ debug(1, "Error setting up rtsp event channel ciphering\n");
+ goto out;
+ }
}
- }
+ */
out:
resp->content = (char *)body; // these will be freed when the data is sent
debug_log_rtsp_message(3, "pair-verify response", resp);
}
+void handle_pair_pin_start(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
+ uint8_t *body = NULL;
+ size_t body_len = 0;
+ debug(2, "Connection %d: pair-pin-start Content-Length %d", conn->connection_number,
+ req->contentlength);
+ debug_log_rtsp_message(2, "handle_pair_pin_start", req);
+
+ resp->content = (char *)body; // these will be freed when the data is sent
+ resp->contentlength = body_len;
+ if (body != NULL)
+ msg_add_header(resp, "Content-Type", "application/octet-stream");
+ debug_log_rtsp_message(2, "pair-pin-start response", resp);
+}
+
void handle_pair_setup(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
int ret;
uint8_t *body = NULL;
size_t body_len = 0;
- struct pair_result *result;
- debug(2, "Connection %d: handle_pair-setup Content-Length %d", conn->connection_number,
+ debug(3, "Connection %d: handle_pair_setup Content-Length %d", conn->connection_number,
req->contentlength);
+ debug_log_rtsp_message(3, "handle_pair_setup", req);
if (!conn->ap2_pairing_context.setup_ctx) {
conn->ap2_pairing_context.setup_ctx = pair_setup_new(PAIR_SERVER_HOMEKIT, config.airplay_pin,
ret = pair_setup(&body, &body_len, conn->ap2_pairing_context.setup_ctx,
(const uint8_t *)req->content, req->contentlength);
if (ret < 0) {
- debug(1, pair_setup_errmsg(conn->ap2_pairing_context.setup_ctx));
+ debug(1, "%s", pair_setup_errmsg(conn->ap2_pairing_context.setup_ctx));
resp->respcode = 470; // Connection Authorization Required
goto out;
}
- ret = pair_setup_result(NULL, &result, conn->ap2_pairing_context.setup_ctx);
- if (ret == 0 && result->shared_secret_len > 0) {
+ ret = pair_setup_result(NULL, &conn->pair_setup_result, conn->ap2_pairing_context.setup_ctx);
+ if (ret == 0 && conn->pair_setup_result->shared_secret_len > 0) {
// Transient pairing completed (pair-setup step 2), prepare encryption, but
// don't activate yet, the response to this request is still plaintext
conn->ap2_pairing_context.control_cipher_bundle.cipher_ctx =
- pair_cipher_new(PAIR_SERVER_HOMEKIT, 2, result->shared_secret, result->shared_secret_len);
+ pair_cipher_new(PAIR_SERVER_HOMEKIT, 3, conn->pair_setup_result->shared_secret,
+ conn->pair_setup_result->shared_secret_len,
+ ""); // last argument is the (possible) dynamic salt suffix
if (!conn->ap2_pairing_context.control_cipher_bundle.cipher_ctx) {
debug(1, "Error setting up rtsp control channel ciphering\n");
goto out;
}
+ conn->ap2_pairing_context.control_cipher_bundle.description = strdup("Control Stream");
+ conn->ap2_pairing_context.event_cipher_bundle.cipher_ctx =
+ pair_cipher_new(PAIR_SERVER_HOMEKIT, 4, conn->pair_setup_result->shared_secret,
+ conn->pair_setup_result->shared_secret_len,
+ ""); // last argument is the (possible) dynamic salt suffix
+ if (!conn->ap2_pairing_context.event_cipher_bundle.cipher_ctx) {
+ debug(1, "Error setting up rtsp control channel ciphering\n");
+ goto out;
+ }
+ conn->ap2_pairing_context.event_cipher_bundle.description = strdup("Event Stream");
}
out:
debug(3, "Connection %d: POST %s Content-Length %d", conn->connection_number, req->path,
req->contentlength);
debug_log_rtsp_message(3, NULL, req);
- if (conn->airplay_stream_category == remote_control_stream) {
+
+ int is_playing = 0;
+ int type = 0;
+ double rate = 0.0;
+
+ // get information from the current player, if any.
+
+ pthread_rwlock_rdlock(&principal_conn_lock); // don't let the principal_conn be changed
+ pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
+ if ((principal_conn != NULL) && (principal_conn->is_playing != 0)) {
+ is_playing = 1;
+ type = principal_conn->type;
+ rate = 1.0 * principal_conn->input_rate;
+ }
+ pthread_cleanup_pop(1); // release the principal_conn lock
+
+ // debug(1, "Player is%s playing.", is_playing != 0 ? "" : " not");
+
+ if (is_playing != 0) {
+ if ((type != 96) && (type != 103))
+ debug(1, "Connection %d, feedback unexpected type: %u.", conn->connection_number, type);
+ if ((rate != 44100.0) && (rate != 48000.0))
+ debug(2, "Connection %d, feedback unexpected rate: %f.", conn->connection_number, rate);
+ plist_t payload_plist = plist_new_dict();
+ plist_dict_set_item(payload_plist, "type", plist_new_uint(type));
+ plist_dict_set_item(payload_plist, "sr", plist_new_real(rate));
+
plist_t array_plist = plist_new_array();
+ plist_array_append_item(array_plist, payload_plist);
plist_t response_plist = plist_new_dict();
plist_dict_set_item(response_plist, "streams", array_plist);
plist_to_bin(response_plist, &resp->content, &resp->contentlength);
plist_free(response_plist);
+ // plist_free(array_plist);
+ // plist_free(payload_plist);
msg_add_header(resp, "Content-Type", "application/x-apple-binary-plist");
- debug_log_rtsp_message(3, "FEEDBACK response (remote_control_stream):", resp);
+ debug_log_rtsp_message(3, "FEEDBACK response:", resp);
}
-
- /* not finished yet
- plist_t payload_plist = plist_new_dict();
- plist_dict_set_item(payload_plist, "type", plist_new_uint(103));
- plist_dict_set_item(payload_plist, "sr", plist_new_real(44100.0));
-
- plist_t array_plist = plist_new_array();
- plist_array_append_item(array_plist, payload_plist);
-
- plist_t response_plist = plist_new_dict();
- plist_dict_set_item(response_plist, "streams",array_plist);
-
- plist_to_bin(response_plist, &resp->content, &resp->contentlength);
- plist_free(response_plist);
- // plist_free(array_plist);
- // plist_free(payload_plist);
-
- msg_add_header(resp, "Content-Type", "application/x-apple-binary-plist");
- debug_log_rtsp_message(2, "FEEDBACK response:", resp);
- */
}
-void handle_command(__attribute__((unused)) rtsp_conn_info *conn, rtsp_message *req,
+void handle_command(rtsp_conn_info *conn, rtsp_message *req,
__attribute__((unused)) rtsp_message *resp) {
- debug(2, "Connection %d: POST %s Content-Length %d", conn->connection_number, req->path,
+ debug(3, "Connection %d: POST %s Content-Length %d", conn->connection_number, req->path,
req->contentlength);
debug_log_rtsp_message(3, NULL, req);
if (rtsp_message_contains_plist(req)) {
plist_t subsidiary_plist = NULL;
plist_from_memory(buff, length, &subsidiary_plist);
if (subsidiary_plist) {
- char *printable_plist = plist_content(subsidiary_plist);
+ char *printable_plist = plist_as_xml_text(subsidiary_plist);
if (printable_plist) {
debug(3, "\n%s", printable_plist);
free(printable_plist);
} else {
debug(1, "Can't print the plist!");
}
- // plist_free(subsidiary_plist);
+ plist_free(subsidiary_plist);
} else {
debug(1, "Can't access the plist!");
}
}
}
} else {
- debug(1, "POST /command no mrSupportedCommandsFromSender item.");
+ debug(1, "Connection %d: POST /command no mrSupportedCommandsFromSender item.",
+ conn->connection_number);
}
} else {
- debug(1, "POST /command no params dict.");
+ debug(1, "Connection %d: POST /command no params dict.", conn->connection_number);
}
- resp->respcode = 400; // say it's a bad request
+ resp->respcode = 200;
} else {
- debug(1,
- "POST /command plist type is \"%s\", but \"updateMRSupportedCommands\" expected.",
- typeValue);
+ debug(1, "Connection %d: POST /command plist type \"%s\" received.",
+ conn->connection_number, typeValue);
+ debug_log_rtsp_message(2, NULL, req);
}
if (typeValue != NULL)
free(typeValue);
} else {
- debug(2, "Could not find a \"type\" item.");
+ debug(2, "Connection %d: Could not find a \"type\" item.", conn->connection_number);
}
plist_free(command_dict);
} else {
- debug(1, "POST /command plist cannot be inputted.");
+ debug(1, "Connection %d: POST /command plist cannot be inputted.", conn->connection_number);
}
} else {
- debug(1, "POST /command contains no plist");
+ debug(1, "Connection %d: POST /command contains no plist", conn->connection_number);
}
}
__attribute__((unused)) rtsp_message *resp) {
debug(2, "Connection %d: POST %s Content-Length %d", conn->connection_number, req->path,
req->contentlength);
- debug_log_rtsp_message(2, NULL, req);
+ debug_log_rtsp_message(3, NULL, req);
}
void handle_post(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
handle_pair_remove(conn, req, resp);
} else if (strcmp(req->path, "/pair-list") == 0) {
handle_pair_list(conn, req, resp);
+ } else if (strcmp(req->path, "/pair-pin-start") == 0) {
+ handle_pair_pin_start(conn, req, resp);
} else if (strcmp(req->path, "/fp-setup") == 0) {
handle_fp_setup(conn, req, resp);
} else if (strcmp(req->path, "/configure") == 0) {
}
void handle_setpeers(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
- debug(3, "Connection %d: SETPEERS %s Content-Length %d", conn->connection_number, req->path,
+ debug(2, "Connection %d: SETPEERS %s Content-Length %d", conn->connection_number, req->path,
req->contentlength);
debug_log_rtsp_message(3, "SETPEERS request", req);
/*
// set_client_as_ptp_clock(conn);
resp->respcode = 200;
}
-#endif
-
-#ifndef CONFIG_AIRPLAY_2
-void handle_options(rtsp_conn_info *conn, __attribute__((unused)) rtsp_message *req,
- rtsp_message *resp) {
- debug_log_rtsp_message(2, "OPTIONS request", req);
- debug(3, "Connection %d: OPTIONS", conn->connection_number);
+void handle_setpeersx(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
+ debug(2, "Connection %d: SETPEERSX %s Content-Length %d", conn->connection_number, req->path,
+ req->contentlength);
+ debug_log_rtsp_message(2, "SETPEERS Xrequest", req);
resp->respcode = 200;
- msg_add_header(resp, "Public",
- "ANNOUNCE, SETUP, RECORD, "
- "PAUSE, FLUSH, TEARDOWN, "
- "OPTIONS, GET_PARAMETER, SET_PARAMETER");
}
#endif
-#ifdef CONFIG_AIRPLAY_2
-
void handle_options(rtsp_conn_info *conn, __attribute__((unused)) rtsp_message *req,
rtsp_message *resp) {
debug_log_rtsp_message(2, "OPTIONS request", req);
resp->respcode = 200;
msg_add_header(resp, "Public",
"ANNOUNCE, SETUP, RECORD, "
- "PAUSE, FLUSH, FLUSHBUFFERED, TEARDOWN, "
- "OPTIONS, POST, GET, PUT");
-}
-
-void teardown_phase_one(rtsp_conn_info *conn) {
- // this can be called more than once on the same connection --
- // by the player itself but also by the play session being killed
- if (conn->player_thread) {
- player_stop(conn); // this nulls the player_thread
- activity_monitor_signify_activity(0); // inactive, and should be after command_stop()
- }
- if (conn->session_key) {
- free(conn->session_key);
- conn->session_key = NULL;
- }
-}
-
-void teardown_phase_two(rtsp_conn_info *conn) {
- // we are being asked to disconnect
- // this can be called more than once on the same connection --
- // by the player itself but also by the play seesion being killed
- debug(2, "Connection %d: TEARDOWN %s connection.", conn->connection_number,
- get_category_string(conn->airplay_stream_category));
- if (conn->airplay_stream_category == remote_control_stream) {
- if (conn->rtp_data_thread) {
- debug(2, "Connection %d: TEARDOWN %s Delete Data Thread.", conn->connection_number,
- get_category_string(conn->airplay_stream_category));
- pthread_cancel(*conn->rtp_data_thread);
- pthread_join(*conn->rtp_data_thread, NULL);
- free(conn->rtp_data_thread);
- conn->rtp_data_thread = NULL;
- }
- if (conn->data_socket) {
- debug(2, "Connection %d: TEARDOWN %s Close Data Socket.", conn->connection_number,
- get_category_string(conn->airplay_stream_category));
- close(conn->data_socket);
- conn->data_socket = 0;
- }
- }
-
- if (conn->rtp_event_thread) {
- debug(2, "Connection %d: TEARDOWN %s Delete Event Thread.", conn->connection_number,
- get_category_string(conn->airplay_stream_category));
- pthread_cancel(*conn->rtp_event_thread);
- pthread_join(*conn->rtp_event_thread, NULL);
- free(conn->rtp_event_thread);
- conn->rtp_event_thread = NULL;
- }
- if (conn->event_socket) {
- debug(2, "Connection %d: TEARDOWN %s Close Event Socket.", conn->connection_number,
- get_category_string(conn->airplay_stream_category));
- close(conn->event_socket);
- conn->event_socket = 0;
- }
-
- // if we are closing a PTP stream only, do this
- if (conn->airplay_stream_category == ptp_stream) {
- if (conn->airplay_gid != NULL) {
- free(conn->airplay_gid);
- conn->airplay_gid = NULL;
-
-#ifdef CONFIG_METADATA
- // this is here to ensure it's only performed once during a teardown of a ptp stream
- send_ssnc_metadata('disc', conn->client_ip_string, strlen(conn->client_ip_string), 1);
-#endif
- }
- conn->groupContainsGroupLeader = 0;
- if (conn->dacp_active_remote != NULL) {
- free(conn->dacp_active_remote);
- conn->dacp_active_remote = NULL;
- }
- clear_ptp_clock();
- }
-
- // only update these things if you're (still) the principal conn
- pthread_rwlock_wrlock(&principal_conn_lock); // don't let the principal_conn be changed
- pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
- if (principal_conn == conn) {
- if (conn->airplay_stream_category == ptp_stream) {
- config.airplay_statusflags &= (0xffffffff - (1 << 11)); // DeviceSupportsRelay
- build_bonjour_strings(conn);
- debug(2, "Connection %d: TEARDOWN mdns_update on %s.", conn->connection_number,
- get_category_string(conn->airplay_stream_category));
- mdns_update(NULL, secondary_txt_records);
- }
- principal_conn = NULL; // stop being principal_conn
- }
- pthread_cleanup_pop(1); // release the principal_conn lock
- debug(2, "Connection %d: TEARDOWN %s -- close the connection complete", conn->connection_number,
- get_category_string(conn->airplay_stream_category));
+ "PAUSE, FLUSH, TEARDOWN, "
+ "OPTIONS, GET_PARAMETER, SET_PARAMETER");
}
-void handle_teardown_2(rtsp_conn_info *conn, __attribute__((unused)) rtsp_message *req,
- rtsp_message *resp) {
-
- debug(2, "Connection %d: TEARDOWN 2 %s.", conn->connection_number,
- get_category_string(conn->airplay_stream_category));
- debug_log_rtsp_message(2, "TEARDOWN: ", req);
- resp->respcode = 200;
- msg_add_header(resp, "Connection", "close");
- plist_t messagePlist = plist_from_rtsp_content(req);
- if (messagePlist != NULL) {
- // now see if the incoming plist contains a "streams" array
- plist_t streams = plist_dict_get_item(messagePlist, "streams");
+void handle_teardown(rtsp_conn_info *conn, __attribute__((unused)) rtsp_message *req,
+ rtsp_message *resp) {
+ debug(2, "Connection %d: TEARDOWN (Classic AirPlay)", conn->connection_number);
+ debug_log_rtsp_message(2, "TEARDOWN (Classic AirPlay) request", req);
- if (streams) {
- debug(2, "Connection %d: TEARDOWN %s -- close the stream.", conn->connection_number,
- get_category_string(conn->airplay_stream_category));
- // we are being asked to close a stream
- teardown_phase_one(conn);
- plist_free(streams);
- debug(2, "Connection %d: TEARDOWN %s -- close the stream complete", conn->connection_number,
- get_category_string(conn->airplay_stream_category));
- } else {
- debug(2, "Connection %d: TEARDOWN %s -- close the connection.", conn->connection_number,
- get_category_string(conn->airplay_stream_category));
- teardown_phase_one(conn); // try to do phase one anyway
- teardown_phase_two(conn);
- }
- plist_free(messagePlist);
- resp->respcode = 200;
- } else {
- debug(1, "Connection %d: missing plist!", conn->connection_number);
- resp->respcode = 451; // don't know what to do here
+ // most of the cleanup here is done by the exiting player_thread, if any, and by the event
+ // receiver if and when it exits.
+
+ if (conn->player_thread) {
+ debug(2, "TEARDOWN is stopping a player thread before exiting...");
+ player_stop(conn); // this nulls the player_thread and cancels the threads...
+ activity_monitor_signify_activity(0); // inactive, and should be after command_stop()
}
+ resp->respcode = 200;
+ msg_add_header(resp, "Connection", "close");
// debug(1,"Bogus exit for valgrind -- remember to comment it out!.");
- // exit(EXIT_SUCCESS); //
+ // exit(EXIT_SUCCESS);
}
-#endif
-
-void teardown(rtsp_conn_info *conn) {
- debug(2, "Connection %d: TEARDOWN (Classic AirPlay).", conn->connection_number);
- player_stop(conn);
- activity_monitor_signify_activity(0); // inactive, and should be after command_stop()
- if (conn->dacp_active_remote != NULL) {
- free(conn->dacp_active_remote);
- conn->dacp_active_remote = NULL;
- }
- // only update these things if you're (still) the principal conn
- pthread_rwlock_wrlock(&principal_conn_lock); // don't let the principal_conn be changed
- pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
- if (principal_conn == conn) {
#ifdef CONFIG_AIRPLAY_2
- config.airplay_statusflags &= (0xffffffff - (1 << 11)); // DeviceSupportsRelay
- build_bonjour_strings(conn);
- mdns_update(NULL, secondary_txt_records);
-#endif
- principal_conn = NULL; // stop being principal_conn
- }
- pthread_cleanup_pop(1); // release the principal_conn lock
+void handle_options_2(rtsp_conn_info *conn, __attribute__((unused)) rtsp_message *req,
+ rtsp_message *resp) {
+ debug_log_rtsp_message(2, "OPTIONS request", req);
+ debug(3, "Connection %d: OPTIONS", conn->connection_number);
+ resp->respcode = 200;
+ msg_add_header(resp, "Public",
+ "ANNOUNCE, SETUP, RECORD, "
+ "PAUSE, FLUSH, FLUSHBUFFERED, TEARDOWN, "
+ "OPTIONS, POST, GET, PUT");
}
-void handle_teardown(rtsp_conn_info *conn, __attribute__((unused)) rtsp_message *req,
- rtsp_message *resp) {
- debug_log_rtsp_message(2, "TEARDOWN request", req);
- debug(2, "Connection %d: TEARDOWN", conn->connection_number);
- debug(3,
- "TEARDOWN: synchronously terminating the player thread of RTSP conversation thread %d (2).",
- conn->connection_number);
- teardown(conn);
+// TEARDOWN and TEARDOWN for AP2 look the same!
+
+void handle_teardown_2(rtsp_conn_info *conn, __attribute__((unused)) rtsp_message *req,
+ rtsp_message *resp) {
+
+ debug(2, "Connection %d: TEARDOWN 2 %s.", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
+ debug_log_rtsp_message(2, "TEARDOWN 2: ", req);
+
+ if (conn->player_thread) {
+ debug(2, "TEARDOWN 2 is stopping a player thread before exiting...");
+ player_stop(conn); // this nulls the player_thread and cancels the threads...
+ activity_monitor_signify_activity(0); // inactive, and should be after command_stop()
+ }
resp->respcode = 200;
msg_add_header(resp, "Connection", "close");
- debug(3, "TEARDOWN: successful termination of playing thread of RTSP conversation thread %d.",
- conn->connection_number);
+
// debug(1,"Bogus exit for valgrind -- remember to comment it out!.");
- // exit(EXIT_SUCCESS);
+ // sps_shutdown(TOE_normal); // ask for a normal exit
}
+#endif
void handle_flush(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
debug_log_rtsp_message(2, "FLUSH request", req);
}
}
debug(2, "RTSP Flush Requested: %u.", rtptime);
-
if ((conn != NULL) && (conn == principal_conn)) {
#ifdef CONFIG_METADATA
if (p)
void handle_setup_2(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
int err;
- debug(2, "Connection %d: SETUP (AirPlay 2)", conn->connection_number);
- debug_log_rtsp_message(3, "SETUP (AirPlay 2) SETUP incoming message", req);
+ debug(2, "Connection %d: SETUP (AirPlay 2) on %s", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
+ debug_log_rtsp_message(2, "SETUP (AirPlay 2) incoming message", req);
plist_t messagePlist = plist_from_rtsp_content(req);
plist_t setupResponsePlist = plist_new_dict();
- resp->respcode = 400;
+ resp->respcode = 501;
// see if we can get a name for the client
char *clientNameString = NULL;
+
plist_t nameItem = plist_dict_get_item(messagePlist, "name");
if (nameItem != NULL) {
- plist_get_string_val(nameItem, &clientNameString);
+ plist_get_string_val(nameItem, &conn->ap2_client_name); // generates a malloced string
+ clientNameString = strdup(conn->ap2_client_name);
} else {
clientNameString = strdup("<unknown>");
}
plist_get_string_val(timingProtocol, &timingProtocolString);
if (timingProtocolString) {
if (strcmp(timingProtocolString, "PTP") == 0) {
- debug(1, "Connection %d: AP2 PTP connection from %s:%u (\"%s\") to self at %s:%u.",
+ debug(2, "Connection %d: AP2 PTP connection from %s:%u (\"%s\") to self at %s:%u.",
conn->connection_number, conn->client_ip_string, conn->client_rtsp_port,
clientNameString, conn->self_ip_string, conn->self_rtsp_port);
conn->airplay_stream_category = ptp_stream;
conn->timing_type = ts_ptp;
+
+ do_pthread_setname(&conn->thread, "ap2_ptp_%d", conn->connection_number);
+
} else if (strcmp(timingProtocolString, "NTP") == 0) {
debug(1, "Connection %d: SETUP: NTP setup from %s:%u (\"%s\") to self at %s:%u.",
conn->connection_number, conn->client_ip_string, conn->client_rtsp_port,
clientNameString, conn->self_ip_string, conn->self_rtsp_port);
conn->airplay_stream_category = ntp_stream;
conn->timing_type = ts_ntp;
+ do_pthread_setname(&conn->thread, "ap2_ntp_%d", conn->connection_number);
} else if (strcmp(timingProtocolString, "None") == 0) {
debug(3,
"Connection %d: SETUP: a \"None\" setup detected from %s:%u (\"%s\") to self at "
uint8_t isRemoteControlOnlyBoolean = 0;
plist_get_bool_val(isRemoteControlOnly, &isRemoteControlOnlyBoolean);
if (isRemoteControlOnlyBoolean != 0) {
- debug(
- 2,
- "Connection %d: Remote Control connection from %s:%u (\"%s\") to self at %s:%u.",
- conn->connection_number, conn->client_ip_string, conn->client_rtsp_port,
- clientNameString, conn->self_ip_string, conn->self_rtsp_port);
+ debug(2,
+ "Connection %d: SETUP: Remote Control Only connection from %s:%u (\"%s\") to "
+ "self at %s:%u.",
+ conn->connection_number, conn->client_ip_string, conn->client_rtsp_port,
+ clientNameString, conn->self_ip_string, conn->self_rtsp_port);
conn->airplay_stream_category = remote_control_stream;
+ do_pthread_setname(&conn->thread, "ap2_rc_%d", conn->connection_number);
} else {
debug(1,
"Connection %d: SETUP: a \"None\" setup detected, with "
// timingPeerList
if (conn->airplay_stream_category == ptp_stream) {
- // airplay 2 always allows interruption, so should never return -1
- if (get_play_lock(conn, 1) != -1) {
+ // airplay 2 always allows interruption, so should never return
+ // play_lock_aquisition_failed
+ if (get_play_lock(conn, 1) != play_lock_aquisition_failed) {
+ debug(2, "Connection %d: %s AP2 setup -- play lock acquired.", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
#ifdef CONFIG_METADATA
send_ssnc_metadata('conn', conn->client_ip_string, strlen(conn->client_ip_string),
1); // before disconnecting an existing play
-#endif
-
-#ifdef CONFIG_METADATA
send_ssnc_metadata('clip', conn->client_ip_string, strlen(conn->client_ip_string), 1);
send_ssnc_metadata('svip', conn->self_ip_string, strlen(conn->self_ip_string), 1);
#endif
if (ptp_shm_interface_open() !=
0) // it should be open already, but just in case it isn't...
die("Can not access the NQPTP service. Has it stopped running?");
- // clear_ptp_clock();
debug_log_rtsp_message(3, "SETUP \"PTP\" message", req);
plist_t groupUUID = plist_dict_get_item(messagePlist, "groupUUID");
if (groupUUID) {
uint8_t value = 0;
plist_get_bool_val(groupContainsGroupLeader, &value);
conn->groupContainsGroupLeader = value;
- debug(2, "Updated groupContainsGroupLeader to %u", conn->groupContainsGroupLeader);
+ debug(3, "Updated groupContainsGroupLeader to %u", conn->groupContainsGroupLeader);
} else {
debug(1, "No groupContainsGroupLeader in SETUP");
}
// debug(1, "Interface index %d, name: \"%s\"",if_nametoindex(iap->ifa_name),
// iap->ifa_name);
if ((iap->ifa_addr) && (iap->ifa_netmask) && (iap->ifa_flags & IFF_UP) &&
- ((iap->ifa_flags & IFF_LOOPBACK) == 0) &&
- (config.interface == NULL || (strcmp(config.interface, iap->ifa_name) == 0))) {
+ ((iap->ifa_flags & IFF_LOOPBACK) == 0)) {
char buf[INET6_ADDRSTRLEN + 1]; // +1 for a NUL
memset(buf, 0, sizeof(buf));
if (iap->ifa_addr->sa_family == AF_INET6) {
// debug(1,"initial timing peer command: \"%s\".", timing_list_message);
// ptp_send_control_message_string(timing_list_message);
- set_client_as_ptp_clock(conn);
- ptp_send_control_message_string(
- "B"); // signify clock dependability period is "B"eginning (or continuing)
+ // deferring this until play is about to start
+ // set_client_as_ptp_clock(conn);
+ // ptp_send_control_message_string("B"); // signify clock dependability period is
+ // "B"eginning (or continuing)
plist_dict_set_item(timingPeerInfoPlist, "Addresses", addresses);
plist_dict_set_item(timingPeerInfoPlist, "ID",
plist_new_string(conn->self_ip_string));
// get a port to use as an event port
// bind a new TCP port and get a socket
conn->local_event_port = 0; // any port
- int err = bind_socket_and_port(SOCK_STREAM, conn->connection_ip_family,
- conn->self_ip_string, conn->self_scope_id,
- &conn->local_event_port, &conn->event_socket);
- if (err) {
+ int lerr = bind_socket_and_port(SOCK_STREAM, conn->connection_ip_family,
+ conn->self_ip_string, conn->self_scope_id,
+ &conn->local_event_port, &conn->event_socket);
+ if (lerr) {
die("SETUP on Connection %d: Error %d: could not find a TCP port to use as an "
"event "
"port",
- conn->connection_number, err);
+ conn->connection_number, lerr);
}
listen(conn->event_socket, 128); // ensure socket is open before telling client
if (conn->rtp_event_thread != NULL)
debug(1, "previous rtp_event_thread allocation not freed, it seems.");
+ conn->ap2_event_receiver_exited = 0;
conn->rtp_event_thread = malloc(sizeof(pthread_t));
if (conn->rtp_event_thread == NULL)
die("Couldn't allocate space for pthread_t");
- pthread_create(conn->rtp_event_thread, NULL, &rtp_event_receiver, (void *)conn);
-
+ named_pthread_create(conn->rtp_event_thread, NULL, &ap2_event_receiver, (void *)conn,
+ "ap2_ptp_evt_%d", conn->connection_number);
plist_dict_set_item(setupResponsePlist, "eventPort",
plist_new_uint(conn->local_event_port));
plist_dict_set_item(setupResponsePlist, "timingPort", plist_new_uint(0)); // dummy
-
- /*
- cancel_all_RTSP_threads(unspecified_stream_category,
- conn->connection_number); // kill all the other
- listeners
- */
- // only update these things if you're (still) the principal conn
- pthread_rwlock_wrlock(
- &principal_conn_lock); // don't let the principal_conn be changed
- pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
- if (principal_conn == conn) {
- config.airplay_statusflags |= 1 << 11; // DeviceSupportsRelay
- build_bonjour_strings(conn);
- debug(2, "Connection %d: SETUP mdns_update on %s.", conn->connection_number,
- get_category_string(conn->airplay_stream_category));
- mdns_update(NULL, secondary_txt_records);
- }
- pthread_cleanup_pop(1); // release the principal_conn lock
-
+ // cancel_all_RTSP_threads(ptp_stream,
+ // conn->connection_number); // kill all the other listeners
resp->respcode = 200;
} else {
debug(1, "SETUP on Connection %d: PTP setup -- no timingPeerInfo plist.",
conn->connection_number);
}
+ // since the GID from the client has been acquired, update the airplay bonjour strings.
+ build_bonjour_strings(conn);
+ debug(2, "Connection %d: SETUP mdns_update on %s.", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
+ mdns_update(NULL, secondary_txt_records);
+
#ifdef CONFIG_METADATA
check_and_send_plist_metadata(messagePlist, "name", 'snam');
check_and_send_plist_metadata(messagePlist, "deviceID", 'cdid');
}
} else if (conn->airplay_stream_category == ntp_stream) {
debug(1, "SETUP on Connection %d: ntp stream handling is not implemented!",
- conn->connection_number, req);
+ conn->connection_number);
warn("Shairport Sync can not handle NTP streams.");
} else if (conn->airplay_stream_category == remote_control_stream) {
- /*
- debug_log_rtsp_message(2, "SETUP (no stream) \"isRemoteControlOnly\" message", req);
+
+ debug_log_rtsp_message(3, "SETUP (no stream) \"isRemoteControlOnly\" message", req);
// get a port to use as an event port
// bind a new TCP port and get a socket
conn->local_event_port = 0; // any port
- int err = bind_socket_and_port(SOCK_STREAM, conn->connection_ip_family,
- conn->self_ip_string, conn->self_scope_id,
- &conn->local_event_port, &conn->event_socket);
- if (err) {
+ int lerr = bind_socket_and_port(SOCK_STREAM, conn->connection_ip_family,
+ conn->self_ip_string, conn->self_scope_id,
+ &conn->local_event_port, &conn->event_socket);
+ if (lerr) {
die("SETUP on Connection %d: Error %d: could not find a TCP port to use as an event "
"port",
- conn->connection_number, err);
+ conn->connection_number, lerr);
}
listen(conn->event_socket, 128); // ensure socket is open before telling client
- debug(1, "Connection %d SETUP (RC): TCP Remote Control event port opened: %u.",
+ debug(2, "Connection %d SETUP (RC): TCP Remote Control event port opened: %u.",
conn->connection_number, conn->local_event_port);
if (conn->rtp_event_thread != NULL)
debug(1,
"Connection %d SETUP (RC): previous rtp_event_thread allocation not freed, it "
"seems.",
conn->connection_number);
-
+ conn->ap2_event_receiver_exited = 0;
conn->rtp_event_thread = malloc(sizeof(pthread_t));
if (conn->rtp_event_thread == NULL)
die("Couldn't allocate space for pthread_t");
- pthread_create(conn->rtp_event_thread, NULL, &rtp_event_receiver, (void *)conn);
+ named_pthread_create(conn->rtp_event_thread, NULL, &ap2_rc_event_receiver, (void *)conn,
+ "ap2_rc_evt_%d", conn->connection_number);
plist_dict_set_item(setupResponsePlist, "eventPort",
plist_new_uint(conn->local_event_port));
- plist_dict_set_item(setupResponsePlist, "timingPort", plist_new_uint(0));
- cancel_all_RTSP_threads(
- remote_control_stream,
- conn->connection_number); // kill all the other remote control listeners
- */
+ debug(2, "SETUP on Connection %d: RemoteControl Only eventPort %u.",
+ conn->connection_number, conn->local_event_port);
+ // plist_dict_set_item(setupResponsePlist, "timingPort", plist_new_uint(0));
+ // cancel_all_RTSP_threads(
+ // remote_control_stream,
+ // conn->connection_number); // kill all the other remote control listeners
+
resp->respcode = 200;
} else {
debug(1, "SETUP on Connection %d: an unrecognised \"%s\" setup detected.",
"SETUP on Connection %d: Unrecognised SETUP incoming message from \"%s\": no "
"timingProtocol or streams plist found.",
conn->connection_number, (const char *)conn->client_ip_string);
- debug_log_rtsp_message(2, "Unrecognised SETUP incoming message.", req);
+ debug_log_rtsp_message(1, "Unrecognised SETUP incoming message.", req);
warn("Unrecognised SETUP incoming message -- ignored.");
}
} else {
debug(2, "Connection %d: SETUP on %s. A \"streams\" array has been found",
conn->connection_number, get_category_string(conn->airplay_stream_category));
+ debug_log_rtsp_message(3, "SETUP (AirPlay 2) SETUP with streams incoming message", req);
if (conn->airplay_stream_category == ptp_stream) {
- // get stream[0]
+
+ if (conn->player_thread) {
+ debug(1, "stopping a running player during setup phase 2");
+ player_stop(conn); // this nulls the player_thread and cancels the threads...
+ activity_monitor_signify_activity(0); // inactive, and should be after command_stop()
+ }
+
+ set_client_as_ptp_clock(conn);
ptp_send_control_message_string(
"B"); // signify clock dependability period is "B"eginning (or continuing)
plist_t stream0 = plist_array_get_item(streams, 0);
// get the session key -- it must have one
plist_t item = plist_dict_get_item(stream0, "shk"); // session key
- uint64_t item_value = 0; // the length
- plist_get_data_val(item, (char **)&conn->session_key, &item_value);
+ uint64_t item_value = 0;
+ if (item != NULL) {
+ plist_get_data_val(item, (char **)&conn->session_key,
+ &item_value); // item_value is the session key length (?)
+ } else {
+ warn("No session key (shk) property in setup! This is fatal!");
+ }
+
+ // get the compression type
+ // this seems to be static -- a stream's encoding can change dynamically, it seems
+ item = plist_dict_get_item(stream0, "ct"); // compression type
+ if (item != NULL) {
+ plist_get_uint_val(item, &item_value);
+ conn->compressionType = item_value;
+ // see https://emanuelecozzi.net/docs/airplay2/audio/ for values
+ } else {
+ debug(1, "No compression type (ct) property found in setup.");
+ }
- // more stuff
- // set up a UDP control stream and thread and a UDP or TCP audio stream and thread
+ // get the max frames per packet
+ item = plist_dict_get_item(stream0, "spf"); // samples per frame (?)
+ if (item != NULL) {
+ plist_get_uint_val(item, &item_value);
+ // see https://emanuelecozzi.net/docs/airplay2/audio/ for values
+ debug(3, "Frames per packet (aka spf (\"samples per frame\"?): %" PRId64 ".", item_value);
+ conn->frames_per_packet = item_value;
+ } else {
+ warn("No frames per packet (spf) property found in setup!");
+ }
// bind a new UDP port and get a socket
conn->local_ap2_control_port = 0; // any port
debug(2, "Connection %d: UDP control port opened: %u.", conn->connection_number,
conn->local_ap2_control_port);
- pthread_create(&conn->rtp_ap2_control_thread, NULL, &rtp_ap2_control_receiver, (void *)conn);
+ named_pthread_create(&conn->rtp_ap2_control_thread, NULL, &rtp_ap2_control_receiver,
+ (void *)conn, "ap2_cn_%d", conn->connection_number);
// get the DACP-ID and Active Remote for remote control stuff
send_metadata('ssnc', 'acre', ar, strlen(ar), req, 1);
#endif
} else {
- debug(1, "Connection %d: SETUP AP2 no Active-Remote information the SETUP Record.",
+ debug(2, "Connection %d: SETUP AP2 no Active-Remote information in the the SETUP Record.",
conn->connection_number);
if (conn->dacp_active_remote) { // this is in case SETUP was previously called
free(conn->dacp_active_remote);
send_metadata('ssnc', 'daid', ar, strlen(ar), req, 1);
#endif
} else {
- debug(1, "Connection %d: SETUP AP2 doesn't include DACP-ID string information.",
+ debug(2, "Connection %d: SETUP AP2 doesn't include DACP-ID string information.",
conn->connection_number);
if (conn->dacp_id) { // this is in case SETUP was previously called
free(conn->dacp_id);
item = plist_dict_get_item(stream0, "type");
item_value = 0;
plist_get_uint_val(item, &item_value);
-
+ conn->type = item_value;
switch (item_value) {
case 96: {
- debug(1, "Connection %d. AP2 Realtime Audio Stream.", conn->connection_number);
+ debug(1, "Connection %d. AP2 Realtime ALAC/44100/S16/2 Stream.", conn->connection_number);
debug_log_rtsp_message(2, "Realtime Audio Stream SETUP incoming message", req);
+
+ conn->stream.type = ast_apple_lossless;
conn->airplay_stream_type = realtime_stream;
+ // get the sample rate
+ item = plist_dict_get_item(stream0, "sr"); // sample rate
+ if (item != NULL) {
+ plist_get_uint_val(item, &item_value);
+ // see https://emanuelecozzi.net/docs/airplay2/audio/ for values
+ debug(2, "Sample rate: %" PRId64 ".", item_value);
+ conn->input_rate = item_value;
+ } else {
+ debug(1, "No sample rate (sr) property found in setup.");
+ }
+ conn->frames_per_packet = 352;
+
// bind a new UDP port and get a socket
conn->local_realtime_audio_port = 0; // any port
err = bind_socket_and_port(SOCK_DGRAM, conn->connection_ip_family, conn->self_ip_string,
debug(2, "Connection %d: UDP realtime audio port opened: %u.", conn->connection_number,
conn->local_realtime_audio_port);
- pthread_create(&conn->rtp_realtime_audio_thread, NULL, &rtp_realtime_audio_receiver,
- (void *)conn);
-
+ named_pthread_create(&conn->rtp_realtime_audio_thread, NULL, &rtp_realtime_audio_receiver,
+ (void *)conn, "ap2_rat_%d", conn->connection_number);
plist_dict_set_item(stream0dict, "type", plist_new_uint(96));
plist_dict_set_item(stream0dict, "dataPort",
plist_new_uint(conn->local_realtime_audio_port));
- conn->stream.type = ast_apple_lossless;
- debug(3, "An ALAC stream has been detected.");
-
- // Set reasonable connection defaults
- conn->stream.fmtp[0] = 96;
- conn->stream.fmtp[1] = 352;
- conn->stream.fmtp[2] = 0;
- conn->stream.fmtp[3] = 16;
- conn->stream.fmtp[4] = 40;
- conn->stream.fmtp[5] = 10;
- conn->stream.fmtp[6] = 14;
- conn->stream.fmtp[7] = 2;
- conn->stream.fmtp[8] = 255;
- conn->stream.fmtp[9] = 0;
- conn->stream.fmtp[10] = 0;
- conn->stream.fmtp[11] = 44100;
-
- // set the parameters of the player (as distinct from the parameters of the decoder --
- // that's done later).
- conn->max_frames_per_packet = conn->stream.fmtp[1]; // number of audio frames per packet.
- conn->input_rate = conn->stream.fmtp[11];
- conn->input_num_channels = conn->stream.fmtp[7];
- conn->input_bit_depth = conn->stream.fmtp[3];
- conn->input_bytes_per_frame = conn->input_num_channels * ((conn->input_bit_depth + 7) / 8);
debug(2, "Realtime Stream Play");
activity_monitor_signify_activity(1);
- player_prepare_to_play(conn);
player_play(conn);
conn->rtp_running = 1; // hack!
} break;
case 103: {
- debug(2, "Connection %d. AP2 Buffered Audio Stream.", conn->connection_number);
- debug_log_rtsp_message(2, "Buffered Audio Stream SETUP incoming message", req);
+ debug_log_rtsp_message(3, "Buffered Audio Stream SETUP incoming message", req);
conn->airplay_stream_type = buffered_stream;
- // get needed stuff
+
+ // get the audio format code
+ item = plist_dict_get_item(stream0, "audioFormat"); // audio format
+ if (item != NULL) {
+ plist_get_uint_val(item, &conn->audio_format);
+ // see https://emanuelecozzi.net/docs/airplay2/audio/ for values
+ // seems to be only the initial format -- it seems as if it can change dynamically
+ } else {
+ debug(1, "No audio format (audioFormat) property found in setup.");
+ }
// bind a new TCP port and get a socket
conn->local_buffered_audio_port = 0; // any port
debug(2, "Connection %d: TCP Buffered Audio port opened: %u.", conn->connection_number,
conn->local_buffered_audio_port);
- // hack.
- conn->max_frames_per_packet = 352; // number of audio frames per packet.
- conn->input_rate = 44100; // we are stuck with this for the moment.
- conn->input_num_channels = 2;
- conn->input_bit_depth = 16;
- conn->input_bytes_per_frame = conn->input_num_channels * ((conn->input_bit_depth + 7) / 8);
activity_monitor_signify_activity(1);
- player_prepare_to_play(
- conn); // get capabilities of DAC before creating the buffered audio thread
- pthread_create(&conn->rtp_buffered_audio_thread, NULL, &rtp_buffered_audio_processor,
- (void *)conn);
+ // debug(1, "Connection %d: create rtp_buffered_audio_thread", conn->connection_number);
+
+ named_pthread_create_with_priority(&conn->rtp_buffered_audio_thread, 2,
+ &rtp_buffered_audio_processor, (void *)conn,
+ "ap2_bat_%d", conn->connection_number);
plist_dict_set_item(stream0dict, "type", plist_new_uint(103));
plist_dict_set_item(stream0dict, "dataPort",
player_play(conn);
conn->rtp_running = 1; // hack!
} break;
+ case 130: {
+ debug(1, "Remote Control Setup Received on a PTP connection.");
+ debug_log_rtsp_message(2, "Incoming message", req);
+ } break;
default:
debug(1, "SETUP on Connection %d: Unhandled stream type %" PRIu64 ".",
conn->connection_number, item_value);
plist_array_append_item(streams_array, stream0dict);
plist_dict_set_item(setupResponsePlist, "streams", streams_array);
- resp->respcode = 200;
+ resp->respcode = 200;
} else if (conn->airplay_stream_category == remote_control_stream) {
- debug(2, "Connection %d (RC): SETUP: Remote Control Stream received from %s.",
+ debug(3, "Connection %d (RC): SETUP: Remote Control Only with stream received from %s.",
conn->connection_number, conn->client_ip_string);
- debug_log_rtsp_message(2, "Remote Control Stream SETUP incoming message", req);
- /*
- // get a port to use as an data port
- // bind a new TCP port and get a socket
- conn->local_data_port = 0; // any port
- int err =
- bind_socket_and_port(SOCK_STREAM, conn->connection_ip_family, conn->self_ip_string,
- conn->self_scope_id, &conn->local_data_port, &conn->data_socket);
- if (err) {
- die("SETUP on Connection %d (RC): Error %d: could not find a TCP port to use as a data "
- "port",
- conn->connection_number, err);
- }
-
- debug(1, "Connection %d SETUP (RC): TCP Remote Control data port opened: %u.",
- conn->connection_number, conn->local_data_port);
- if (conn->rtp_data_thread != NULL)
- debug(1, "Connection %d SETUP (RC): previous rtp_data_thread allocation not freed, it
- seems.", conn->connection_number); conn->rtp_data_thread = malloc(sizeof(pthread_t)); if
- (conn->rtp_data_thread == NULL) die("Couldn't allocate space for pthread_t");
-
- pthread_create(conn->rtp_data_thread, NULL, &rtp_data_receiver, (void *)conn);
+ debug_log_rtsp_message(3, "Remote Control Stream SETUP incoming message", req);
+
+ plist_t seed_item = NULL;
+ // the data port and listener thread may already have been set up
+ // if so, the local_data_port will be non-zero
+
+ if (conn->local_data_port == 0) {
+ // set up data channel ciphering
+ plist_t dict = plist_array_get_item(streams, 0);
+ if (dict != NULL) {
+ // get the seed that becomes the suffix for the salt
+ seed_item = plist_dict_get_item(dict, "seed"); // session key
+ uint64_t seed = 0;
+ if (seed_item != NULL) {
+ plist_get_uint_val(seed_item, &seed);
+ char salt_suffix[256] = "";
+ snprintf(salt_suffix, sizeof(salt_suffix), "%" PRIu64 "", seed);
+ conn->ap2_pairing_context.data_cipher_bundle.cipher_ctx =
+ pair_cipher_new(PAIR_SERVER_HOMEKIT, 5, conn->pair_setup_result->shared_secret,
+ conn->pair_setup_result->shared_secret_len,
+ salt_suffix); // last argument is the (possible) dynamic salt suffix
+ if (conn->ap2_pairing_context.data_cipher_bundle.cipher_ctx != NULL) {
+ conn->ap2_pairing_context.data_cipher_bundle.description = strdup("DataStream");
+ // get a port to use as an data port
+ // bind a new TCP port and get a socket
+ conn->local_data_port = 0; // any port
+ int lerr = bind_socket_and_port(SOCK_STREAM, conn->connection_ip_family,
+ conn->self_ip_string, conn->self_scope_id,
+ &conn->local_data_port, &conn->data_socket);
+ if (lerr) {
+ die("SETUP on Connection %d (RC): Error %d: could not find a TCP port to use as a "
+ "data "
+ "port",
+ conn->connection_number, lerr);
+ }
+ listen(conn->data_socket,
+ 128); // open port for listening before telling the client about it!
+ debug(2, "Connection %d SETUP (RC): TCP Remote Control data port opened: %u.",
+ conn->connection_number, conn->local_data_port);
+ } else {
+ debug(1, "Connection %d: SETUP: Error setting up rtsp data channel ciphering.",
+ conn->connection_number);
+ }
+ } else {
+ debug(2, "Connection %d: SETUP: No data channel encryption salt seed found.",
+ conn->connection_number);
+ }
+ } else {
+ debug(1, "Connection %d: SETUP: Could not find the streams array",
+ conn->connection_number);
+ }
+ } else {
+ debug(1, "Connection %d SETUP (RC): data port already allocated.", conn->connection_number);
+ }
plist_t coreResponseDict = plist_new_dict();
plist_dict_set_item(coreResponseDict, "streamID", plist_new_uint(1));
plist_dict_set_item(coreResponseDict, "type", plist_new_uint(130));
- plist_dict_set_item(coreResponseDict, "dataPort", plist_new_uint(conn->local_data_port));
+ if (seed_item != NULL)
+ plist_dict_set_item(coreResponseDict, "dataPort", plist_new_uint(conn->local_data_port));
plist_t coreResponseArray = plist_new_array();
plist_array_append_item(coreResponseArray, coreResponseDict);
plist_dict_set_item(setupResponsePlist, "streams", coreResponseArray);
- */
+
resp->respcode = 200;
} else {
debug(1, "Connection %d: SETUP: Stream received but no airplay category set. Nothing done.",
plist_free(messagePlist);
if (clientNameString != NULL)
free(clientNameString);
- debug_log_rtsp_message(3, " SETUP response", resp);
}
#endif
void handle_setup(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
- debug(2, "Connection %d: SETUP", conn->connection_number);
+ debug(3, "Connection %d: SETUP", conn->connection_number);
resp->respcode = 451; // invalid arguments -- expect them
// check this connection has the principal_conn, obtained during a prior ANNOUNCE
if ((conn != NULL) && (principal_conn == conn)) {
debug(2,
"Connection %d: SETUP DACP-ID \"%s\" from %s to %s with UDP ports Control: "
"%d, Timing: %d and Audio: %d.",
- conn->connection_number, conn->dacp_id, &conn->client_ip_string,
- &conn->self_ip_string, conn->local_control_port, conn->local_timing_port,
+ conn->connection_number, conn->dacp_id, (char *)&conn->client_ip_string,
+ (char *)&conn->self_ip_string, conn->local_control_port, conn->local_timing_port,
conn->local_audio_port);
} else {
conn->connection_number);
}
if (resp->respcode == 200) {
+ do_pthread_setname(&conn->thread, "rtsp_1_%d", conn->connection_number);
#ifdef CONFIG_METADATA
send_ssnc_metadata('clip', conn->client_ip_string, strlen(conn->client_ip_string), 1);
send_ssnc_metadata('svip', conn->self_ip_string, strlen(conn->self_ip_string), 1);
if (!strncmp(cp, "volume: ", strlen("volume: "))) {
float volume = atof(cp + strlen("volume: "));
- debug(2, "Connection %d: request to set AirPlay Volume to: %f.", conn->connection_number,
+ debug(3, "Connection %d: request to set AirPlay Volume to: %f.", conn->connection_number,
volume);
// if we are playing, go ahead and change the volume
#ifdef CONFIG_DBUS_INTERFACE
pthread_rwlock_rdlock(&principal_conn_lock); // don't let the principal_conn be changed
pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
if (principal_conn == conn) {
+ debug(3, "Connection %d: set player volume to %.3f.", conn->connection_number, volume);
player_volume(volume, conn);
+ debug(3, "Connection %d: set player volume to %.3f success.", conn->connection_number,
+ volume);
+ } else {
+ if (principal_conn != NULL)
+ debug(1, "Connection %d: fail to set player volume to %.3f. Principal conn is %d.",
+ conn->connection_number, volume, principal_conn->connection_number);
+ else
+ debug(1, "Connection %d: fail to set player volume to %.3f. Principal conn is NULL.",
+ conn->connection_number, volume);
}
if (conn != NULL) {
conn->own_airplay_volume = volume;
conn->own_airplay_volume_set = 1;
}
pthread_cleanup_pop(1); // release the principal_conn lock
- config.last_access_to_volume_info_time = get_absolute_time_in_ns();
#ifdef CONFIG_DBUS_INTERFACE
}
#endif
#ifdef CONFIG_METADATA
char *progress = cp + strlen("progress: ");
- // debug(2, "progress: \"%s\"",progress); // rtpstampstart/rtpstampnow/rtpstampend 44100 per
- // second
+ // debug(2, "progress: \"%s\"",progress); // rtpstampstart/rtpstampnow/rtpstampend 44100
+ // (always?) per second
send_ssnc_metadata('prgr', progress, strlen(progress), 1);
#endif
} else {
- debug(1, "Connection %d, unrecognised parameter: \"%s\" (%d)\n", conn->connection_number, cp,
- strlen(cp));
+ debug(1, "Connection %d, unrecognised parameter: \"%s\"\n", conn->connection_number, cp);
}
cp = next;
}
// 'paus' -- buffered audio stream paused. No arguments.
// 'pres' -- buffered audio stream resumed. No arguments.
// 'pffr' -- the first frame of a play session has been received and has been validly
-// timed.
+// timed. The argument is
+// "<frame_number>/<time_it_should_be_played_in_64_bit_nanoseconds>"."
+// 'pdis' -- a discontinuity in the timestamps of incoming frames has been detected.
+// timed. The argument is "<frame_number>/<discontinuity>".
+// discontinuity is the actual frame number less the expected frame number
+// positive means a gap
// 'pvol' -- play volume. The volume is sent as a string --
// "airplay_volume,volume,lowest_volume,highest_volume"
// volume, lowest_volume and highest_volume are given in dB.
// "ssnc", "chnk", packet_ix, packet_counts, packet_tag, packet_type, chunked_data.
// Notice that the number of items is different to the standard
-// including a simple base64 encoder to minimise malloc/free activity
-
-// From Stack Overflow, with thanks:
-// http://stackoverflow.com/questions/342409/how-do-i-base64-encode-decode-in-c
-// minor mods to make independent of C99.
-// more significant changes make it not malloc memory
-// needs to initialise the docoding table first
-
-// add _so to end of name to avoid confusion with polarssl's implementation
-
-static char encoding_table[] = {'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
- 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
- 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
- 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
- '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/'};
-
-static size_t mod_table[] = {0, 2, 1};
-
-// pass in a pointer to the data, its length, a pointer to the output buffer and
-// a pointer to an int
-// containing its maximum length
-// the actual length will be returned.
-
-char *base64_encode_so(const unsigned char *data, size_t input_length, char *encoded_data,
- size_t *output_length) {
-
- size_t calculated_output_length = 4 * ((input_length + 2) / 3);
- if (calculated_output_length > *output_length)
- return (NULL);
- *output_length = calculated_output_length;
-
- size_t i, j;
- for (i = 0, j = 0; i < input_length;) {
-
- uint32_t octet_a = i < input_length ? (unsigned char)data[i++] : 0;
- uint32_t octet_b = i < input_length ? (unsigned char)data[i++] : 0;
- uint32_t octet_c = i < input_length ? (unsigned char)data[i++] : 0;
-
- uint32_t triple = (octet_a << 0x10) + (octet_b << 0x08) + octet_c;
-
- encoded_data[j++] = encoding_table[(triple >> 3 * 6) & 0x3F];
- encoded_data[j++] = encoding_table[(triple >> 2 * 6) & 0x3F];
- encoded_data[j++] = encoding_table[(triple >> 1 * 6) & 0x3F];
- encoded_data[j++] = encoding_table[(triple >> 0 * 6) & 0x3F];
- }
-
- for (i = 0; i < mod_table[input_length % 3]; i++)
- encoded_data[*output_length - 1 - i] = '=';
-
- return encoded_data;
-}
-
-// with thanks!
-//
-
static int fd = -1;
// static int dirty = 0;
void metadata_delete_multicast_socket(void) {
if (config.metadata_enabled == 0)
return;
- shutdown(metadata_sock, SHUT_RDWR); // we want to immediately deallocate the buffer
- close(metadata_sock);
+ if (metadata_sock != -1) {
+ shutdown(metadata_sock, SHUT_RDWR); // we want to immediately deallocate the buffer
+ close(metadata_sock);
+ }
if (metadata_sockmsg)
free(metadata_sockmsg);
}
}
void *metadata_thread_function(__attribute__((unused)) void *ignore) {
+ // #include <syscall.h>
+ // debug(1, "metadata_thread_function PID %d", syscall(SYS_gettid));
metadata_create_multicast_socket();
metadata_package pack;
pthread_cleanup_push(metadata_thread_cleanup_function, NULL);
}
void *metadata_multicast_thread_function(__attribute__((unused)) void *ignore) {
+ // #include <syscall.h>
+ // debug(1, "metadata_multicast_thread_function PID %d", syscall(SYS_gettid));
metadata_create_multicast_socket();
metadata_package pack;
pthread_cleanup_push(metadata_multicast_thread_cleanup_function, NULL);
}
void *metadata_hub_thread_function(__attribute__((unused)) void *ignore) {
+ // #include <syscall.h>
+ // debug(1, "metadata_hub_thread_function PID %d", syscall(SYS_gettid));
metadata_package pack;
pthread_cleanup_push(metadata_hub_thread_cleanup_function, NULL);
while (1) {
}
void *metadata_mqtt_thread_function(__attribute__((unused)) void *ignore) {
+ // #include <syscall.h>
+ // debug(1, "metadata_mqtt_thread_function PID %d", syscall(SYS_gettid));
metadata_package pack;
pthread_cleanup_push(metadata_mqtt_thread_cleanup_function, NULL);
while (1) {
pc_queue_init(&metadata_queue, (char *)&metadata_queue_items, sizeof(metadata_package),
metadata_queue_size, "pipe");
- if (pthread_create(&metadata_thread, NULL, metadata_thread_function, NULL) != 0)
+ if (named_pthread_create(&metadata_thread, NULL, metadata_thread_function, NULL,
+ "metadata pipe") != 0)
debug(1, "Failed to create metadata thread!");
// create a pc_queue for the metadata_multicast_queue
pc_queue_init(&metadata_multicast_queue, (char *)&metadata_multicast_queue_items,
sizeof(metadata_package), metadata_multicast_queue_size, "multicast");
- if (pthread_create(&metadata_multicast_thread, NULL, metadata_multicast_thread_function,
- NULL) != 0)
+ if (named_pthread_create(&metadata_multicast_thread, NULL, metadata_multicast_thread_function,
+ NULL, "metadata mcst") != 0)
debug(1, "Failed to create metadata multicast thread!");
}
#ifdef CONFIG_METADATA_HUB
// create a pc_queue for the metadata hub
pc_queue_init(&metadata_hub_queue, (char *)&metadata_hub_queue_items, sizeof(metadata_package),
metadata_hub_queue_size, "hub");
- if (pthread_create(&metadata_hub_thread, NULL, metadata_hub_thread_function, NULL) != 0)
+ if (named_pthread_create(&metadata_hub_thread, NULL, metadata_hub_thread_function, NULL,
+ "metadata hub") != 0)
debug(1, "Failed to create metadata hub thread!");
#endif
#ifdef CONFIG_MQTT
// create a pc_queue for the MQTT handler
pc_queue_init(&metadata_mqtt_queue, (char *)&metadata_mqtt_queue_items, sizeof(metadata_package),
metadata_mqtt_queue_size, "mqtt");
- if (pthread_create(&metadata_mqtt_thread, NULL, metadata_mqtt_thread_function, NULL) != 0)
+ if (named_pthread_create(&metadata_mqtt_thread, NULL, metadata_mqtt_thread_function, NULL,
+ "metadata mqtt") != 0)
debug(1, "Failed to create metadata mqtt thread!");
#endif
metadata_running = 1;
// debug(2, "metadata stop hub thread.");
pthread_cancel(metadata_hub_thread);
pthread_join(metadata_hub_thread, NULL);
- // debug(2, "metadata stop hub done.");
pc_queue_delete(&metadata_hub_queue);
+ // debug(2, "metadata stop hub done.");
#endif
if (config.metadata_enabled) {
// debug(2, "metadata stop multicast thread.");
if (metadata_multicast_thread) {
pthread_cancel(metadata_multicast_thread);
pthread_join(metadata_multicast_thread, NULL);
- // debug(2, "metadata stop multicast done.");
pc_queue_delete(&metadata_multicast_queue);
+ // debug(2, "metadata stop multicast done.");
}
if (metadata_thread) {
// debug(2, "metadata stop metadata_thread thread.");
pthread_cancel(metadata_thread);
pthread_join(metadata_thread, NULL);
- // debug(2, "metadata_stop finished successfully.");
pc_queue_delete(&metadata_queue);
+ // debug(2, "metadata_stop finished successfully.");
}
}
}
}
-int send_metadata_to_queue(pc_queue *queue, uint32_t type, uint32_t code, char *data,
- uint32_t length, rtsp_message *carrier, int block) {
+int send_metadata_to_queue(pc_queue *queue, const uint32_t type, const uint32_t code,
+ const char *data, const uint32_t length, rtsp_message *carrier,
+ int block) {
// clang-format off
// parameters:
pack.code = code;
pack.length = length;
pack.carrier = carrier;
- pack.data = data;
+ pack.data = (char *)data;
if (pack.carrier) {
msg_retain(pack.carrier);
} else {
if (pack.carrier) {
if (rc == EWOULDBLOCK)
debug(2,
- "metadata queue \"%s\" full, dropping message item: type %x, code %x, data %x, "
+ "metadata queue \"%s\" full, dropping message item: type %x, code %x, data %" PRIxPTR ", "
"length %u, message %d.",
- queue->name, pack.type, pack.code, pack.data, pack.length,
+ queue->name, pack.type, pack.code, (uintptr_t)pack.data, pack.length,
pack.carrier->index_number);
msg_free(&pack.carrier);
} else {
if (rc == EWOULDBLOCK)
debug(
2,
- "metadata queue \"%s\" full, dropping data item: type %x, code %x, data %x, length %u.",
- queue->name, pack.type, pack.code, pack.data, pack.length);
+ "metadata queue \"%s\" full, dropping data item: type %x, code %x, data %" PRIxPTR ", length %u.",
+ queue->name, pack.type, pack.code, (uintptr_t)pack.data, pack.length);
if (pack.data)
free(pack.data);
}
return rc;
}
-int send_metadata(uint32_t type, uint32_t code, char *data, uint32_t length, rtsp_message *carrier,
- int block) {
+int send_metadata(const uint32_t type, const uint32_t code, const char *data, const uint32_t length,
+ rtsp_message *carrier, int block) {
int rc = 0;
if (config.metadata_enabled) {
rc = send_metadata_to_queue(&metadata_queue, type, code, data, length, carrier, block);
else
send_metadata('ssnc', 'mden', NULL, 0, NULL,
0); // metadata starting, if rtptime is not available
-
} else if (!strncmp(ct, "image", 5)) {
// Some server simply ignore the md field from the TXT record. If The
// config says 'please, do not include any cover art', we are polite and
static void handle_announce(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp) {
debug(2, "Connection %d: ANNOUNCE", conn->connection_number);
+#ifdef CONFIG_AIRPLAY_2
+ conn->airplay_stream_category = classic_airplay_stream; // already set in Classic AirPlay build
+ play_lock_r get_play_status = get_play_lock(
+ conn, 1); // always allow interruption in the Classic-AirPlay-in-AirPlay-2 mode (?)
+#else
+ play_lock_r get_play_status = get_play_lock(conn, config.allow_session_interruption);
+#endif
+ if (get_play_status != play_lock_aquisition_failed) {
- int get_play_status = get_play_lock(conn, config.allow_session_interruption);
- if (get_play_status != -1) {
- debug(2, "Connection %d: ANNOUNCE has acquired play lock.", conn->connection_number);
-
- conn->airplay_stream_category = classic_airplay_stream;
-
- // now, if this new session did not break in, then it's okay to reset the next UDP ports
- // to the start of the range
-
- if (get_play_status == 0) { // will be zero if it wasn't waiting to break in
+ // this has already been checked for in Classic Airplay and would be play_lock_already_acquired
+ // here. if this new session did not break in, then it's okay to reset the next UDP ports to the
+ // start of the range
+ if (get_play_status ==
+ play_lock_acquired_without_breaking_in) { // if it's safe to re-use original UDP ports
resetFreeUDPPort();
}
}
}
*/
- // In AirPlay 2, an ANNOUNCE signifies the start of an AirPlay 1 session.
+
#ifdef CONFIG_AIRPLAY_2
+ // In AirPlay 2, an ANNOUNCE signifies the start of an AirPlay 1 session.
+ debug(1, "Connection %d: %s connection from %s:%u to self at %s:%u.", conn->connection_number,
+ get_category_string(conn->airplay_stream_category), conn->client_ip_string,
+ conn->client_rtsp_port, conn->self_ip_string, conn->self_rtsp_port);
conn->airplay_type = ap_1;
conn->timing_type = ts_ntp;
- if (conn->airplay_gid != NULL) {
- free(conn->airplay_gid);
- conn->airplay_gid = NULL;
- }
-
- // only update these things if you're (still) the principal conn
- pthread_rwlock_rdlock(&principal_conn_lock); // don't let the principal_conn be changed
- pthread_cleanup_push(rwlock_unlock, (void *)&principal_conn_lock);
- if (principal_conn == conn) {
- config.airplay_statusflags |= 1 << 11; // DeviceSupportsRelay -- should this be on?
- build_bonjour_strings(conn);
- mdns_update(NULL, secondary_txt_records);
- }
- pthread_cleanup_pop(1); // release the principal_conn lock
-
- debug(1, "Connection %d: Classic AirPlay connection from %s:%u to self at %s:%u.",
- conn->connection_number, conn->client_ip_string, conn->client_rtsp_port,
- conn->self_ip_string, conn->self_rtsp_port);
+ conn->type = 96; // this is the AirPlay 2 code for Realtime Audio -- not sure it's right
#endif
-
conn->stream.type = ast_unknown;
resp->respcode = 200; // presumed OK
char *pssid = NULL;
if (pUncompressedCDAudio) {
debug(2, "An uncompressed PCM stream has been detected.");
conn->stream.type = ast_uncompressed;
- conn->max_frames_per_packet = 352; // number of audio frames per packet.
+ conn->frames_per_packet = 352; // number of audio frames per packet.
conn->input_rate = 44100;
conn->input_num_channels = 2;
conn->input_bit_depth = 16;
// set the parameters of the player (as distinct from the parameters of the decoder -- that's
// done later).
- conn->max_frames_per_packet = conn->stream.fmtp[1]; // number of audio frames per packet.
+ conn->frames_per_packet = conn->stream.fmtp[1]; // number of audio frames per packet.
conn->input_rate = conn->stream.fmtp[11];
conn->input_num_channels = conn->stream.fmtp[7];
conn->input_bit_depth = conn->stream.fmtp[3];
// print each line of the request content
// the problem is that nextline has replace all returns, newlines, etc. by
// NULLs
- char *cp = req->content;
- int cp_left = req->contentlength;
- while (cp_left > 1) {
- if (strlen(cp) != 0)
- warn(" %s", cp);
- cp += strlen(cp) + 1;
- cp_left -= strlen(cp) + 1;
+ char *lcp = req->content;
+ int lcp_left = req->contentlength;
+ while (lcp_left > 1) {
+ if (strlen(lcp) != 0)
+ warn(" %s", lcp);
+ lcp += strlen(lcp) + 1;
+ lcp_left -= strlen(lcp) + 1;
}
}
debug(2, "Connection %d: ANNOUNCE has completed.", conn->connection_number);
char *method;
void (*ap1_handler)(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp); // for AirPlay 1
void (*ap2_handler)(rtsp_conn_info *conn, rtsp_message *req, rtsp_message *resp); // for AirPlay 2
-} method_handlers[] = {{"OPTIONS", handle_options, handle_options},
+} method_handlers[] = {{"OPTIONS", handle_options, handle_options_2},
{"ANNOUNCE", handle_announce, handle_announce},
{"FLUSH", handle_flush, handle_flush},
{"TEARDOWN", handle_teardown, handle_teardown_2},
{"GET", handle_get, handle_get},
{"POST", handle_post, handle_post},
{"SETPEERS", handle_unimplemented_ap1, handle_setpeers},
+ {"SETPEERSX", handle_unimplemented_ap1, handle_setpeersx},
{"SETRATEANCHORTI", handle_unimplemented_ap1, handle_setrateanchori},
{"FLUSHBUFFERED", handle_unimplemented_ap1, handle_flushbuffered},
{"SETRATE", handle_unimplemented_ap1, handle_setrate},
{NULL, NULL}};
#endif
-static void apple_challenge(int fd, rtsp_message *req, rtsp_message *resp) {
+static void apple_challenge(int lfd, rtsp_message *req, rtsp_message *resp) {
char *hdr = msg_get_header(req, "Apple-Challenge");
if (!hdr)
return;
SOCKADDR fdsa;
socklen_t sa_len = sizeof(fdsa);
- getsockname(fd, (struct sockaddr *)&fdsa, &sa_len);
+ getsockname(lfd, (struct sockaddr *)&fdsa, &sa_len);
int chall_len;
uint8_t *chall = base64_dec(hdr, &chall_len);
static char *make_nonce(void) {
uint8_t random[8];
- int fd = open("/dev/urandom", O_RDONLY);
- if (fd < 0)
+ int lfd = open("/dev/urandom", O_RDONLY);
+ if (lfd < 0)
die("could not open /dev/urandom!");
// int ignore =
- if (read(fd, random, sizeof(random)) != sizeof(random))
+ if (read(lfd, random, sizeof(random)) != sizeof(random))
debug(1, "Error reading /dev/urandom");
- close(fd);
+ close(lfd);
return base64_enc(random, 8); // returns a pointer to malloc'ed memory
}
pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
debug(3, "Connection %d: %s rtsp_conversation_thread_func_cleanup_function called.",
conn->connection_number, get_category_string(conn->airplay_stream_category));
+
+ if (conn->player_thread) {
+ player_stop(conn); // this nulls the player_thread and cancels the threads...
+ activity_monitor_signify_activity(0); // inactive, and should be after command_stop()
+ }
+
+ if (conn->fd > 0) {
+ debug(
+ 2,
+ "Connection %d: terminating -- closing RTSP connection socket %d: from %s:%u to self at "
+ "%s:%u.",
+ conn->connection_number, conn->fd, conn->client_ip_string, conn->client_rtsp_port,
+ conn->self_ip_string, conn->self_rtsp_port);
+ close(conn->fd);
+ conn->fd = 0;
+ }
+
#ifdef CONFIG_AIRPLAY_2
- // AP2
- teardown_phase_one(conn);
- teardown_phase_two(conn);
-#else
- // AP1
- teardown(conn);
+ if (conn->session_key) {
+ free(conn->session_key);
+ conn->session_key = NULL;
+ }
+
+ // give the event receiver a chance to exit normally, if it exists
+ if (conn->rtp_event_thread != NULL) {
+ uint64_t event_receiver_start_wait_time = get_absolute_time_in_ns();
+ int64_t event_receiver_wait_time = 0;
+ do {
+ if (conn->ap2_event_receiver_exited == 0)
+ usleep(50000);
+ event_receiver_wait_time = get_absolute_time_in_ns() - event_receiver_start_wait_time;
+ } while ((conn->ap2_event_receiver_exited == 0) && (event_receiver_wait_time < 2000000000L));
+
+ if (conn->ap2_event_receiver_exited == 0) {
+ debug(2, "Connection %d: %s event receiver has not exited, so cancelling it.",
+ conn->connection_number, get_category_string(conn->airplay_stream_category));
+ pthread_cancel(*conn->rtp_event_thread);
+ }
+ pthread_join(*conn->rtp_event_thread, NULL);
+ free(conn->rtp_event_thread);
+ conn->rtp_event_thread = NULL;
+ }
+ conn->ap2_event_receiver_exited = 0;
+ debug(3, "Connection %d: %s event thread deleted.", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
#endif
debug(3, "Connection %d: terminating -- closing timing, control and audio sockets...",
close(conn->audio_socket);
conn->audio_socket = 0;
}
- if (conn->fd > 0) {
- debug(
- 2,
- "Connection %d: terminating -- closing RTSP connection socket %d: from %s:%u to self at "
- "%s:%u.",
- conn->connection_number, conn->fd, conn->client_ip_string, conn->client_rtsp_port,
- conn->self_ip_string, conn->self_rtsp_port);
- close(conn->fd);
- conn->fd = 0;
- }
if (conn->auth_nonce) {
free(conn->auth_nonce);
conn->auth_nonce = NULL;
#ifdef CONFIG_AIRPLAY_2
buf_drain(&conn->ap2_pairing_context.control_cipher_bundle.plaintext_read_buffer, -1);
buf_drain(&conn->ap2_pairing_context.control_cipher_bundle.encrypted_read_buffer, -1);
+ if (conn->ap2_pairing_context.control_cipher_bundle.description != NULL)
+ free(conn->ap2_pairing_context.control_cipher_bundle.description);
pair_cipher_free(conn->ap2_pairing_context.control_cipher_bundle.cipher_ctx);
+
+ buf_drain(&conn->ap2_pairing_context.event_cipher_bundle.plaintext_read_buffer, -1);
+ buf_drain(&conn->ap2_pairing_context.event_cipher_bundle.encrypted_read_buffer, -1);
+ if (conn->ap2_pairing_context.event_cipher_bundle.description != NULL)
+ free(conn->ap2_pairing_context.event_cipher_bundle.description);
+ pair_cipher_free(conn->ap2_pairing_context.event_cipher_bundle.cipher_ctx);
+
+ buf_drain(&conn->ap2_pairing_context.data_cipher_bundle.plaintext_read_buffer, -1);
+ buf_drain(&conn->ap2_pairing_context.data_cipher_bundle.encrypted_read_buffer, -1);
+ if (conn->ap2_pairing_context.data_cipher_bundle.description != NULL)
+ free(conn->ap2_pairing_context.data_cipher_bundle.description);
+ pair_cipher_free(conn->ap2_pairing_context.data_cipher_bundle.cipher_ctx);
+
pair_setup_free(conn->ap2_pairing_context.setup_ctx);
pair_verify_free(conn->ap2_pairing_context.verify_ctx);
if (conn->airplay_gid) {
free(conn->airplay_gid);
conn->airplay_gid = NULL;
}
-
#endif
rtp_terminate(conn);
conn->UserAgent = NULL;
}
+#ifdef CONFIG_AIRPLAY_2
+ if (conn->ap2_client_name) {
+ free(conn->ap2_client_name);
+ conn->ap2_client_name = NULL;
+ }
+#endif
// remove flow control and mutexes
-
- int rc = pthread_mutex_destroy(&conn->player_create_delete_mutex);
- if (rc)
- debug(1, "Connection %d: error %d destroying player_create_delete_mutex.",
- conn->connection_number, rc);
- rc = pthread_mutex_destroy(&conn->volume_control_mutex);
+ int rc = pthread_mutex_destroy(&conn->volume_control_mutex);
if (rc)
debug(1, "Connection %d: error %d destroying volume_control_mutex.", conn->connection_number,
rc);
rc = pthread_mutex_destroy(&conn->flush_mutex);
if (rc)
debug(1, "Connection %d: error %d destroying flush_mutex.", conn->connection_number, rc);
-
debug(3, "Connection %d: Closed.", conn->connection_number);
conn->running = 0; // for the garbage collector
+ release_play_lock(conn);
pthread_setcancelstate(oldState, NULL);
}
}
void msg_cleanup_function(void *arg) {
- // debug(3, "msg_cleanup_function called.");
+ debug(3, "msg_cleanup_function called 0x%" PRIxPTR ".", (uintptr_t)arg);
msg_free((rtsp_message **)arg);
}
static void *rtsp_conversation_thread_func(void *pconn) {
+ // #include <syscall.h>
+ // debug(1, "rtsp_conversation_thread_func PID %d", syscall(SYS_gettid));
rtsp_conn_info *conn = pconn;
int rc = pthread_mutex_init(&conn->flush_mutex, NULL);
enum rtsp_read_request_response reply;
- int rtsp_read_request_attempt_count = 1; // 1 means exit immediately
- rtsp_message *req, *resp;
+ // int rtsp_read_request_attempt_count = 1; // 1 means exit immediately
+ rtsp_message *req = NULL, *resp = NULL;
#ifdef CONFIG_AIRPLAY_2
conn->ap2_audio_buffer_size = 1024 * 1024 * 8;
#endif
while (conn->stop == 0) {
- int debug_level = 3; // for printing the request and response
+ pthread_testcancel();
+ int debug_level = 2; // for printing the request and response
// check to see if a conn has been zeroed
int i;
for (i = 0; i < nconns; i++) {
if ((conns[i] != NULL) && (conns[i]->connection_number == 0)) {
- debug(1, "conns[%d] at %" PRIxPTR " has a Connection Number of 0!", i, conns[i]);
+ debug(1, "conns[%d] has a Connection Number of 0!", i);
}
}
debug_mutex_unlock(&conns_lock, 3);
// (strcmp(req->method, "POST") ==
// 0)) // the options message is very common, so don't log it until level 3
// dl = 3;
+ debug(dl,
+ "Connection %d: (%s) received an RTSP Packet of type \"%s\":", conn->connection_number,
+ get_category_string(conn->airplay_stream_category), req->method);
+ debug_log_rtsp_message(dl, NULL, req);
- if (conn->airplay_stream_category == remote_control_stream) {
- debug(dl, "Connection %d (RC): Received an RTSP Packet of type \"%s\":",
- conn->connection_number, req->method),
- debug_log_rtsp_message(dl, NULL, req);
- } else {
- debug(dl, "Connection %d: Received an RTSP Packet of type \"%s\":", conn->connection_number,
- req->method),
- debug_log_rtsp_message(dl, NULL, req);
- }
apple_challenge(conn->fd, req, resp);
hdr = msg_get_header(req, "CSeq");
if (hdr)
msg_add_header(resp, "CSeq", hdr);
// msg_add_header(resp, "Audio-Jack-Status", "connected; type=analog");
#ifdef CONFIG_AIRPLAY_2
- msg_add_header(resp, "Server", "AirTunes/366.0");
+ char server_string[128];
+ snprintf(server_string, sizeof(server_string), "AirTunes/%s", config.srcvers);
+ msg_add_header(resp, "Server", server_string);
#else
msg_add_header(resp, "Server", "AirTunes/105.1");
#endif
}
}
if (method_selected == 0) {
- debug(2,
- "Connection %d: Unrecognised and unhandled rtsp request \"%s\". HTTP Response Code "
- "501 (\"Not Implemented\") returned.",
- conn->connection_number, req->method);
+ debug(1,
+ "Connection %d: (%s) unrecognised and unhandled rtsp request \"%s\". HTTP Response "
+ "Code "
+ "%d returned.",
+ conn->connection_number, get_category_string(conn->airplay_stream_category),
+ req->method, resp->respcode);
+ debug_log_rtsp_message(dl, NULL, req);
int y = req->contentlength;
if (y > 0) {
debug(dl, "Content: \"%s\".", obf);
}
}
+ resp->respcode = 200; // OK
}
- if (conn->airplay_stream_category == remote_control_stream) {
- debug(dl, "Connection %d (RC): RTSP Response:", conn->connection_number);
- debug_log_rtsp_message(dl, NULL, resp);
- } else {
- debug(dl, "Connection %d: RTSP Response:", conn->connection_number);
- debug_log_rtsp_message(dl, NULL, resp);
- }
+ debug(dl, "Connection %d: (%s) RTSP response:", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
+ debug_log_rtsp_message(dl, NULL, resp);
// if (conn->stop == 0) {
int err = msg_write_response(conn, resp);
if (err) {
if (err)
debug(1, "Could not set the RTSP socket to abort due to a write error on closing.");
conn->stop = 1;
- pthread_cancel(conn->thread);
+ // if (debuglev >= 1)
+ // debuglev = 3; // see what happens next
}
// }
pthread_cleanup_pop(1);
pthread_cleanup_pop(1);
- } else {
- int tstop = 0;
- if (reply == rtsp_read_request_response_immediate_shutdown_requested)
- tstop = 1;
- else if ((reply == rtsp_read_request_response_channel_closed) ||
- (reply == rtsp_read_request_response_read_error)) {
- if (conn->player_thread) {
- rtsp_read_request_attempt_count--;
- if (rtsp_read_request_attempt_count == 0) {
- tstop = 1;
- if (reply == rtsp_read_request_response_read_error) {
- struct linger so_linger;
- so_linger.l_onoff = 1; // "true"
- so_linger.l_linger = 0;
- int err = setsockopt(conn->fd, SOL_SOCKET, SO_LINGER, &so_linger, sizeof so_linger);
- if (err)
- debug(1, "Could not set the RTSP socket to abort due to a read error on closing.");
- }
- // debuglev = 3; // see what happens next
- } else {
- if (reply == rtsp_read_request_response_channel_closed)
- debug(2,
- "Connection %d: RTSP channel unexpectedly closed -- will try again %d time(s).",
- conn->connection_number, rtsp_read_request_attempt_count);
- if (reply == rtsp_read_request_response_read_error)
- debug(2, "Connection %d: RTSP channel read error -- will try again %d time(s).",
- conn->connection_number, rtsp_read_request_attempt_count);
- usleep(20000);
- }
- } else {
- tstop = 1;
- }
+ } else { // if the response is not rtsp_read_request_response_ok
+ conn->stop = 1;
+ if (reply == rtsp_read_request_response_read_error) {
+ debug(1, "bad packet received.");
+ struct linger so_linger;
+ so_linger.l_onoff = 1; // "true"
+ so_linger.l_linger = 0;
+ int err = setsockopt(conn->fd, SOL_SOCKET, SO_LINGER, &so_linger, sizeof so_linger);
+ if (err)
+ debug(1, "Could not set the RTSP socket to abort due to a read error on closing.");
} else if (reply == rtsp_read_request_response_bad_packet) {
+ conn->stop = 0; // don't stop for a bad packet
char *response_text = "RTSP/1.0 400 Bad Request\r\nServer: AirTunes/105.1\r\n\r\n";
- ssize_t reply = write(conn->fd, response_text, strlen(response_text));
- if (reply == -1) {
+ ssize_t lreply = write(conn->fd, response_text, strlen(response_text));
+ if (lreply == -1) {
char errorstring[1024];
strerror_r(errno, (char *)errorstring, sizeof(errorstring));
debug(1, "rtsp_read_request_response_bad_packet write response error %d: \"%s\".", errno,
(char *)errorstring);
- } else if (reply != (ssize_t)strlen(response_text)) {
- debug(1, "rtsp_read_request_response_bad_packet write %d bytes requested but %d written.",
+ } else if (lreply != (ssize_t)strlen(response_text)) {
+ debug(1, "rtsp_read_request_response_bad_packet write %zd bytes requested but %d written.",
strlen(response_text), reply);
}
- } else {
- debug(1, "Connection %d: rtsp_read_request error %d, packet ignored.",
- conn->connection_number, (int)reply);
- }
- if (tstop) {
- debug(3, "Connection %d: Terminate RTSP connection.", conn->connection_number);
- conn->stop = 1;
}
}
}
pthread_cleanup_pop(1);
- debug(2, "Connection %d: RTSP thread exit.", conn->connection_number);
+ debug(2, "Connection %d: exit.", conn->connection_number);
pthread_exit(NULL);
}
free(sockfd);
}
pthread_setcancelstate(oldState, NULL);
+ int i;
+ for (i = 0; i < nconns; i++) {
+ if (conns[i] != NULL) {
+ free(conns[i]);
+ conns[i] = NULL;
+ }
+ }
}
void *rtsp_listen_loop(__attribute((unused)) void *arg) {
+ // #include <syscall.h>
+ // debug(1, "rtsp_listen_loop PID %d", syscall(SYS_gettid));
int oldState;
pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
struct addrinfo hints, *info, *p;
for (p = info; p; p = p->ai_next) {
ret = 0;
- int fd = socket(p->ai_family, p->ai_socktype, IPPROTO_TCP);
+ int lfd = socket(p->ai_family, p->ai_socktype, IPPROTO_TCP);
int yes = 1;
// Handle socket open failures if protocol unavailable (or IPV6 not handled)
- if (fd != -1) {
+ if (lfd != -1) {
// Set the RTSP socket to close on exec() of child processes
// otherwise background run_this_before_play_begins or run_this_after_play_ends commands
// that are sleeping prevent the daemon from being restarted because
// the listening RTSP port is still in use.
// See: https://github.com/mikebrady/shairport-sync/issues/329
- fcntl(fd, F_SETFD, FD_CLOEXEC);
- ret = setsockopt(fd, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(yes));
+ fcntl(lfd, F_SETFD, FD_CLOEXEC);
+ ret = setsockopt(lfd, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(yes));
- struct timeval tv;
- tv.tv_sec = 3; // three seconds write timeout
- tv.tv_usec = 0;
- if (setsockopt(fd, SOL_SOCKET, SO_SNDTIMEO, (const char *)&tv, sizeof tv) == -1)
- debug(1, "Error %d setting send timeout for rtsp writeback.", errno);
-
- if ((config.dont_check_timeout == 0) && (config.timeout != 0)) {
- tv.tv_sec = config.timeout; // 120 seconds read timeout by default.
- tv.tv_usec = 0;
- if (setsockopt(fd, SOL_SOCKET, SO_RCVTIMEO, (const char *)&tv, sizeof tv) == -1)
- debug(1, "Error %d setting read timeout for rtsp connection.", errno);
- }
#ifdef IPV6_V6ONLY
// some systems don't support v4 access on v6 sockets, but some do.
// since we need to account for two sockets we might as well
// always.
if (p->ai_family == AF_INET6) {
- ret |= setsockopt(fd, IPPROTO_IPV6, IPV6_V6ONLY, &yes, sizeof(yes));
+ ret |= setsockopt(lfd, IPPROTO_IPV6, IPV6_V6ONLY, &yes, sizeof(yes));
}
#endif
if (!ret)
- ret = bind(fd, p->ai_addr, p->ai_addrlen);
+ ret = bind(lfd, p->ai_addr, p->ai_addrlen);
// one of the address families will fail on some systems that
// report its availability. do not complain.
debug(1, "unable to listen on %s port %d. The error is: \"%s\".", family, config.port,
strerror(errno));
} else {
- listen(fd, 255);
+ listen(lfd, 255);
nsock++;
sockfd = realloc(sockfd, (nsock + 1) * sizeof(int));
- sockfd[nsock] = fd;
+ sockfd[nsock] = lfd;
sockfd[0] = nsock; // the first entry is the number of sockets in the array
}
}
/*
- listen(fd, 5);
+ listen(lfd, 5);
nsock++;
sockfd = realloc(sockfd, nsock * sizeof(int));
- sockfd[nsock - 1] = fd;
+ sockfd[nsock - 1] = lfd;
*/
}
t2 = secondary_txt_records; // second set of text records in AirPlay 2 only
#endif
build_bonjour_strings(NULL); // no conn yet
+ // if a thread is created, e.g. Avahi, it'll inherit the name from this thread
mdns_register(t1, t2); // note that the dacp thread could still be using the mdns stuff after
// all player threads have been terminated, so mdns_unregister can't be
// in the rtsp_listen_loop cleanup.
-
+ pthread_t tid = pthread_self();
+ do_pthread_setname(&tid, "listener");
pthread_setcancelstate(oldState, NULL);
int acceptfd;
struct timeval tv;
rtsp_conn_info *conn = malloc(sizeof(rtsp_conn_info));
if (conn == 0)
die("Couldn't allocate memory for an rtsp_conn_info record.");
- pthread_cleanup_push(malloc_cleanup, conn);
+ pthread_cleanup_push(malloc_cleanup, &conn);
memset(conn, 0, sizeof(rtsp_conn_info));
conn->connection_number = RTSP_connection_index++;
- // debug(2, "Connection %d is at: 0x%" PRIxPTR ".", conn->connection_number, conn);
+ debug(2, "Connection %d is at: 0x%" PRIxPTR ".", conn->connection_number, (uintptr_t)conn);
+
+ // this means that the OPTIONS string we send before getting an ANNOUNCE is for AirPlay 2
#ifdef CONFIG_AIRPLAY_2
conn->airplay_type = ap_2; // changed if an ANNOUNCE is received
conn->timing_type = ts_ptp; // changed if an ANNOUNCE is received
+#else
+ conn->airplay_stream_category =
+ classic_airplay_stream; // really just used for debug messages in Classic AirPlay builds
#endif
socklen_t size_of_reply = sizeof(SOCKADDR);
- conn->fd = accept(acceptfd, (struct sockaddr *)&conn->remote, &size_of_reply);
+ conn->fd = eintr_checked_accept(acceptfd, (struct sockaddr *)&conn->remote, &size_of_reply);
if (conn->fd < 0) {
debug(1, "Connection %d: New connection on port %d not accepted:", conn->connection_number,
config.port);
perror("failed to accept connection");
+
+#ifndef CONFIG_AIRPLAY_2
+ // in Classic AirPlay, close the connection unless idle or interruptions allowed...
+ } else if ((principal_conn != NULL) && (config.allow_session_interruption == 0)) {
+ debug(1, "Connection %d: %s session interruption not allowed", conn->connection_number,
+ get_category_string(conn->airplay_stream_category));
+ close(conn->fd);
+#endif
+
} else {
size_of_reply = sizeof(SOCKADDR);
if (getsockname(conn->fd, (struct sockaddr *)&conn->local, &size_of_reply) == 0) {
- if ((config.dont_check_timeout == 0) && (config.timeout != 0)) {
+ if ((config.dont_check_timeout == 0) && (config.timeout >= 60)) {
+ /*
+ // shouldn't need this!
+
+ struct timeval tv;
+ tv.tv_sec = config.timeout; // seconds
+ tv.tv_usec = 0; // microseconds
+ if (setsockopt(conn->fd, SOL_SOCKET, SO_RCVTIMEO, (const char *)&tv, sizeof tv) != 0)
+ { char errorstring[1024]; strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ debug(1, "could not set time limit on read_from_rtsp_connection -- error %d
+ \"%s\".", errno, errorstring);
+ }
+ */
// skip this stuff in OpenBSD
#ifndef COMPILE_FOR_OPENBSD
// Thanks to https://holmeshe.me/network-essentials-setsockopt-SO_KEEPALIVE/ for this.
#ifdef TCP_KEEPINTVL
int keepAliveIdleTime =
config.timeout -
- 5 * 5; // wait this many seconds before checking for a dropped client
- int keepAliveCount = 5; // check this many times
+ 5 * 5; // wait this many seconds before checking for a dropped client
+ // a minute seems a bit short...
+ int keepAliveCount = 5; // check this many times
int keepAliveInterval = 5; // wait this many seconds between checks
#else
int keepAliveIdleTime =
#else
#define KEEP_ALIVE_OR_IDLE_OPTION TCP_KEEPIDLE
#endif
-
+ debug(3, "Connection %d: set the keepAliveIdleTime to %d seconds.",
+ conn->connection_number, keepAliveIdleTime);
if (setsockopt(conn->fd, SOL_OPTION, KEEP_ALIVE_OR_IDLE_OPTION,
(void *)&keepAliveIdleTime, sizeof(keepAliveIdleTime))) {
debug(1, "can't set the keepAliveIdleTime wait time");
// ---
// if TCP_KEEPINTVL is defined...
#ifdef TCP_KEEPINTVL
+ debug(3, "Connection %d: set the keepAliveCount to %d.", conn->connection_number,
+ keepAliveCount);
if (setsockopt(conn->fd, SOL_OPTION, TCP_KEEPCNT, (void *)&keepAliveCount,
sizeof(keepAliveCount))) {
debug(1, "can't set the keepAliveCount count");
}
+ debug(3, "Connection %d: set the keepAliveCount interval to %d seconds.",
+ conn->connection_number, keepAliveInterval);
if (setsockopt(conn->fd, SOL_OPTION, TCP_KEEPINTVL, (void *)&keepAliveInterval,
sizeof(keepAliveInterval))) {
debug(1, "can't set the keepAliveCount count interval");
};
#endif
+ debug(3, "Connection %d: enable SO_KEEPALIVE.", conn->connection_number);
+ int flags = 1;
+ if (setsockopt(conn->fd, SOL_SOCKET, SO_KEEPALIVE, (void *)&flags, sizeof(flags))) {
+ debug(1, "can't set SO_KEEPALIVE.");
+ }
#endif
}
+
// initialise the connection info
void *client_addr = NULL, *self_addr = NULL;
conn->connection_ip_family = conn->local.SAFAMILY;
debug(1, "Error figuring out Shairport Sync's own IP number.");
}
- ret = pthread_create(&conn->thread, NULL, rtsp_conversation_thread_func,
- conn); // also acts as a memory barrier
+ ret = named_pthread_create(&conn->thread, NULL, rtsp_conversation_thread_func, conn,
+ "rtsp_conn_%d",
+ conn->connection_number); // also acts as a memory barrier
if (ret) {
char errorstring[1024];
strerror_r(ret, (char *)errorstring, sizeof(errorstring));
die("Connection %d: cannot create an RTSP conversation thread. Error %d: \"%s\".",
conn->connection_number, ret, (char *)errorstring);
}
+
+#ifndef CONFIG_AIRPLAY_2
+ // in Classic AirPlay, since we know (by getting here) that interruptions are allowed, grab
+ // the principal conn
+ if (get_play_lock(conn, config.allow_session_interruption) ==
+ play_lock_acquired_without_breaking_in) {
+ // now, if this new session did not break in, then it's okay to reset the next UDP ports
+ // to the start of the range
+ resetFreeUDPPort();
+ }
+#endif
+
debug(3, "Successfully created RTSP receiver thread %d.", conn->connection_number);
conn->running = 1; // this must happen before the thread is tracked
track_thread(conn);
"Shairport Sync running on this device?",
config.port);
}
- debug(1, "Oops -- fell out of the RTSP select loop");
+ debug(1, "Fell out of the RTSP select loop -- this should never happen!");
pthread_exit(NULL);
}
void lock_player();
void unlock_player();
-// this can be used to forcibly stop a play session
-int get_play_lock(rtsp_conn_info *conn, int allow_session_interruption);
+// result of trying to acquire or release the play lock
+typedef enum {
+ play_lock_released,
+ play_lock_already_released,
+ play_lock_already_acquired,
+ play_lock_acquired_without_breaking_in,
+ play_lock_acquired_by_breaking_in,
+ play_lock_aquisition_failed
+} play_lock_r;
+
+// this can be used to [try to] forcibly stop a play session
+play_lock_r get_play_lock(rtsp_conn_info *conn, int allow_session_interruption);
+// this will release the play lock only if the conn has it or if the conn is NULL
+void release_play_lock(rtsp_conn_info *conn);
// initialise and completely delete the metadata stuff
// e.g. if it's malloced, to free it, etc.
// nothing is done automatically
-int send_ssnc_metadata(uint32_t code, char *data, uint32_t length, int block);
+int send_ssnc_metadata(const uint32_t code, const char *data, const uint32_t length,
+ const int block);
+
+#ifdef CONFIG_AIRPLAY_2
+ssize_t read_encrypted(int fd, pair_cipher_bundle *ctx, void *buf, size_t count);
+ssize_t write_encrypted(int fd, pair_cipher_bundle *ctx, const void *buf, size_t count);
+
+void generateTxtDataValueInfo(rtsp_conn_info *conn, void **response, size_t *responseLength);
+plist_t generateInfoPlist(rtsp_conn_info *conn);
+char *plist_as_xml_text(plist_t the_plist); // caller must free the returned NUL-terminated string
+#endif
#endif // _RTSP_H
// %V for the full version string, e.g. 3.3-OpenSSL-Avahi-ALSA-soxr-metadata-sysconfdir:/etc
// Overall length can not exceed 50 characters. Example: "Shairport Sync %v on %H".
// password = "secret"; // (AirPlay 1 only) leave this commented out if you don't want to require a password
-// interpolation = "auto"; // aka "stuffing". Default is "auto". Alternatives are "basic" or "soxr". Choose "soxr" only if you have a reasonably fast processor and Shairport Sync has been built with "soxr" support.
+// The interpolation setting below controls how Shairport Sync adds or removes frames of audio to keep in sync.
+// "auto" (default) measures the processor's floating point speed and chooses "soxr" if available and it is fast enough. Otherwise, "vernier" is selected.
+// "soxr" uses the SoX library to recode a packet of frames to a new packet containing more or fewer frames. This needs a processor with fast floating point capability.
+// "vernier" recodes a packet of frames to a new packet containing more or fewer frames. This is recommended for low powered devices.
+// "basic" causes the simple removal or insertion of frames in a packet of frames. Not recommended.
+// interpolation = "auto"; // aka "stuffing". Default is "auto". Alternatives are "vernier", "basic" or "soxr". Choose "soxr" only if you have a reasonably fast processor and Shairport Sync has been built with "soxr" support.
// output_backend = "alsa"; // Run "shairport-sync -h" to get a list of all output_backends, e.g. "alsa", "pipe", "stdout". The default is the first one.
// mdns_backend = "avahi"; // Run "shairport-sync -h" to get a list of all mdns_backends. The default is the first one.
// interface = "name"; // Use this advanced setting to specify the interface on which Shairport Sync should provide its service. Leave it commented out to get the default, which is to select the interface(s) automatically.
// airplay_device_id_offset = 0; // (AirPlay 2 only) add this to the default airplay_device_id calculated from one of the device's MAC address
// airplay_device_id = 0x<six-digit_hexadecimal_number>L; // (AirPlay 2 only) use this as the airplay_device_id e.g. 0xDCA632D4E8F3L -- remember the "L" at the end as it's a 64-bit quantity!
// regtype = "<string>"; // Use this advanced setting to set the service type and transport to be advertised by Zeroconf/Bonjour. Default is "_raop._tcp" for AirPlay 1, "_airplay._tcp" for AirPlay 2.
-
// drift_tolerance_in_seconds = 0.002; // allow a timing error of this number of seconds of drift away from exact synchronisation before attempting to correct it
// resync_threshold_in_seconds = 0.050; // a synchronisation error greater than this number of seconds will cause resynchronisation; 0 disables it
-// resync_recovery_time_in_seconds = 0.100; // allow this extra time to recover after a late resync. Increase the value, possibly to 0.5, in a virtual machine.
// playback_mode = "stereo"; // This can be "stereo", "mono", "reverse stereo", "both left" or "both right". Default is "stereo".
-// alac_decoder = "hammerton"; // This can be "hammerton" or "apple". This advanced setting allows you to choose
-// the original Shairport decoder by David Hammerton or the Apple Lossless Audio Codec (ALAC) decoder written by Apple.
-// If you build Shairport Sync with the flag --with-apple-alac, the Apple ALAC decoder will be chosen by default.
-
+// For FFmpeg channel and layout names, (e.g. "7.1", "FL", "3.0(back)", etc.), please see channel_names and channel_layout_map at https://ffmpeg.org/doxygen/trunk/channel__layout_8c_source.html
+// eight_channel_mode = "on"; // Enable reception of eight channel audio. Can be "off", "on" or an eight-channel FFmpeg channel layout. If "on", the channel layout used is: "7.1".
+// six_channel_mode = "on"; // Enable reception of six channel audio. Can be "off", "on" or a six-channel FFmpeg channel layout. If "on", the channel layout used is: "5.1".
+// mixdown = "auto"; // Enable mixdown. Can be "auto", "off" or an FFmpeg channel layout, e.g. "quad". If "auto", mixdown will occur, if needed, to the default channel layout for the output channels available.
+// output_channel_mapping = "auto"; // Specify how audio channels are mapped to the output device's channels:
+// Shairport Sync uses standard FFmpeg channel names for each channel in the audio output. The names are, in order, "FL", "FR", "FC", "LFE", "BL", "BR", "SL", "SR".
+// If "auto", the audio channels are matched, where possible, to the channels in the device's channel map. Any leftover output channels are mapped, in order, to leftover device channels.
+// If "off", or if there is no device channel map, audio channels are output to the device channels in order.
+// If a list of audio channels is given, e.g. ( "FL", "FR", "LFE", "FC", "BL", "BR", "SL", "SR" ), they are mapped in the order given to the device channels from 1 upwards.
+// The audio channel list can include the same channel more than once and can include the silent channel "--".
+
+// alac_decoder = <setting>; //This advanced setting is for Classic Airplay only. It can be "ffmpeg" (default on systems with FFmpeg support), "hammerton" (default on systems without FFmpeg support, deprecated) or "apple" (deprecated).
+// The original Shairport decoder is by David Hammerton. This is deprecated for security reasons as it is no longer maintained. However, it is compact and may be useful in storage-constrained devices. It is included by default on systems without FFmpeg support.
+// If you build Shairport Sync with the flag --with-apple-alac, the Apple ALAC decoder will be chosen by default unless FFmpeg support is included. The Apple ALAC decoder is deprecated for security reasons.
+// If you build Shairport Sync with the flag --with-ffmpeg, the FFmpeg ALAC decoder will be chosen by default. This is recommended except where there is insufficient storage on the device. With this setting, the Hammerton decoder will not be built.
+// In AirPlay 2 operation, the FFmpeg ALAC decoder is always used.
// ignore_volume_control = "no"; // set this to "yes" if you want the volume to be at 100% no matter what the source's volume control is set to.
// volume_range_db = 60 ; // use this advanced setting to set the range, in dB, you want between the maximum volume and the minimum volume. Range is 30 to 150 dB. Leave it commented out to use mixer's native range.
// volume_max_db = 0.0 ; // use this advanced setting, which must have a decimal point in it, to set the maximum volume, in dB, you wish to use.
// "standard" makes the volume change more quickly at lower volumes and slower at higher volumes.
// "flat" makes the volume change at the same rate at all volumes.
// "dasl_tapered" is similar to "standard" - it makes the volume change more quickly at lower volumes and slower at higher volumes.
-// The intention behind dasl_tapered is that a given percentage change in volume should result in the same percentage change in
+// The basic idea behind dasl_tapered is that a given percentage change in volume should result in the same percentage change in
// perceived loudness. For instance, doubling the volume level should result in doubling the perceived loudness.
// With the range of AirPlay volume being from -30 to 0, doubling the volume from -22.5 to -15 results in an increase of 10 dB.
// Similarly, doubling the volume from -15 to 0 results in an increase of 10 dB.
// For compatibility with mixers having a restricted attenuation range (e.g. 30 dB), "dasl_tapered" will switch to a flat profile at low AirPlay volumes.
-
// volume_control_combined_hardware_priority = "no"; // when extending the volume range by combining the built-in software attenuator with the hardware mixer attenuator, set this to "yes" to reduce volume by using the hardware mixer first, then the built-in software attenuator.
-
// default_airplay_volume = -24.0; // this is the suggested volume after a reset or after the high_volume_threshold has been exceed and the high_volume_idle_timeout_in_minutes has passed
-
-// The following settings are for dealing with potentially surprising high ("very loud") volume levels.
-// When a new play session starts, it usually requests a suggested volume level from Shairport Sync. This is normally the volume level of the last session.
-// This can cause unpleasant surprises if the last session was (a) very loud and (b) a long time ago.
-// Thus, the user could be unpleasantly surprised by the volume level of the new session.
-
-// To deal with this, when the last session volume is "very loud", the following two settings will lower the suggested volume after a period of idleness:
-
-// high_threshold_airplay_volume = -16.0; // airplay volume greater or equal to this is "very loud"
-// high_volume_idle_timeout_in_minutes = 0; // if the current volume is "very loud" and the device is not playing for more than this time, suggest the default volume for new connections instead of the current volume.
-// Note 1: This timeout is set to 0 by default to disable this feature. Set it to some positive number, e.g. 180 to activate the feature.
-// Note 2: Not all applications use the suggested volume: MacOS Music and Mac OS System Sounds use their own settings.
-
// run_this_when_volume_is_set = "/full/path/to/application/and/args"; // Run the specified application whenever the volume control is set or changed.
// The desired AirPlay volume is appended to the end of the command line – leave a space if you want it treated as an extra argument.
// AirPlay volume goes from 0.0 to -30.0 and -144.0 means "mute".
-
// audio_backend_latency_offset_in_seconds = 0.0; // This is added to the latency requested by the player to delay or advance the output by a fixed amount.
// Use it, for example, to compensate for a fixed delay in the audio back end.
// E.g. if the output device, e.g. a soundbar, takes 100 ms to process audio, set this to -0.1 to deliver the audio
// to the output device 100 ms early, allowing it time to process the audio and output it perfectly in sync.
-// audio_backend_buffer_desired_length_in_seconds = 0.2; // If set too small, buffer underflow occurs on low-powered machines.
+// audio_backend_buffer_desired_length_in_seconds = 0.2; // This is the desired size of the buffer to be maintained in the external output system, e.g. the DAC in ALSA. If set too small, buffer underflow occurs on low-powered machines.
// Too long and the response time to volume changes becomes annoying.
-// Default is 0.2 seconds in the alsa backend, 0.35 seconds in the pa backend and 1.0 seconds otherwise.
+// audio_decoded_buffer_desired_length_in_seconds = 1.0; // Advanced feature. This is the desired size of the buffer of fully deciphered and decoded audio maintained within Shairport Sync prior to sending it to the external output system , e.g. the DAC in ALSA.
+// Valid for AirPlay 2 Buffered Audio streams only.
// audio_backend_buffer_interpolation_threshold_in_seconds = 0.075; // Advanced feature. If the buffer size drops below this, stop using time-consuming interpolation like soxr to avoid dropouts due to underrun.
// audio_backend_silent_lead_in_time = "auto"; // This optional advanced setting, either "auto" or a positive number, sets the length of the period of silence that precedes the start of the audio.
// The default is "auto" -- the silent lead-in starts as soon as the player starts sending packets.
// Values greater than the latency are ignored. Values that are too low will affect initial synchronisation.
-
// dbus_service_bus = "system"; // The Shairport Sync dbus interface, if selected at compilation, will appear
// as "org.gnome.ShairportSync" on the whichever bus you specify here: "system" (default) or "session".
// mpris_service_bus = "system"; // The Shairport Sync mpris interface, if selected at compilation, will appear
// as "org.gnome.ShairportSync" on the whichever bus you specify here: "system" (default) or "session".
-
// resend_control_first_check_time = 0.10; // Use this optional advanced setting to set the wait time in seconds before deciding a packet is missing.
// resend_control_check_interval_time = 0.25; // Use this optional advanced setting to set the time in seconds between requests for a missing packet.
// resend_control_last_check_time = 0.10; // Use this optional advanced setting to set the latest time, in seconds, by which the last check should be done before the estimated time of a missing packet's transfer to the output buffer.
// wait_for_completion = "no"; // set to "yes" to get Shairport Sync to wait until the "run_this..." applications have terminated before continuing
// allow_session_interruption = "no"; // set to "yes" to allow another device to interrupt Shairport Sync while it's playing from an existing audio source
-// session_timeout = 120; // wait for this number of seconds after a source disappears before terminating the session and becoming available again.
+// session_timeout = 60; // wait for this number of seconds after a source disappears before terminating the session and becoming available again.
};
// Back End Settings
+// Rates, Formats and Channels
+// Shairport Sync can handle a wide range of wide range of output rates, formats and channels.
+
+// If it has been built for AirPlay 2 or with FFmpeg support, Shairport Sync can output in a wide range settings, including 48,000 and 44,100 frames per second, 32- and 24-bit sample sizes, 1 to 8 channels.
+// Otherwise, if Shairport Sync has been built (1) for classic AirPlay only and (2) without FFmpeg support, it is restricted to output rates of 44,100 frames per second or exact multiples and two channels only.
+
+// You can check Shairport Sync's support for FFmpeg by entering the command: shairport-sync -V
+// If the resulting string contains "FFmpeg" or "AirPlay2", then it has FFmpeg support built in.
+
+// With FFmpeg support:
+// Possible output rates are: 5512, 8000, 11025, 16000, 22050, 32000, 44100, 48000, 64000, 88200, 96000, 176400, 192000, 352800 and 384000 frames per seconds.
+// Possible output formats are: "S8", "U8", "S16_LE", "S16_BE", "S24_LE", "S24_BE", "S24_3LE", "S24_3BE", "S32_LE" and "S32_BE".
+// Possible output channel counts are: 1 to 8.
+
+// Without FFmpeg support:
+// Possible rates are: 44100, 88200, 176400 and 352800 frames per seconds.
+// Possible formats are: "S8", "U8", "S16_LE", "S16_BE", "S24_LE", "S24_BE", "S24_3LE", "S24_3BE", "S32_LE" and "S32_BE".
+// Possible channel counts are: 2.
+
+// Automatic settings
+// Shairport Sync will dynamically select output formats, attempting to match input and output rates, formats and channel counts, picking the best alternatives otherwise.
+// (Settings are static by default on the STDOUT and pipe backends, as there is no obvious way to signal a downstream consumer of the data when the format has changed.)
+
+// Rate Selection:
+// Shairport Sync checks the rates the output system can accept and those that have been specified in the configuration file.
+// From that set of possibilities, Shairport Sync will attempt to match the rate at which the audio is being received and will switch the output to that rate if necessary.
+// If the exact rate is not available, an exact multiple will be selected if available. Finally, and only with FFmpeg support, a higher rate or a lower rate will be chosen.
+// To avoid output rate switching, specify just one rate in the configuration file.
+
+// Format Selection:
+// Shairport Sync checks the formats the output system can accept and those that have been specified in the configuration file.
+// From that set of possibilities, Shairport Sync will use the deepest format unless ignore_volume_control is true and maximum_volume is not used, if which case it will try to switch the output to the exact format of the incoming audio.
+// To avoid output format switching, specify just one format in the configuration file.
+
+// Channel Count Selection:
+// Shairport Sync checks the channels counts the output system can accept and those that have been specified in the configuration file.
+// From that set of possibilities, Shairport Sync will attempt to match the output channels to the number of channels in the audio and will switch the output to that number of channels if necessary.
+// If the exact number of output channels is not available, a greater output channel count will be selected if available. Failing that, a lower channel count will be chosen.
+// To avoid channel count switching, specify just one channel count in the configuration file.
+
+
// These are parameters for the "alsa" audio back end.
// For this section to be operative, Shairport Sync must be built with the following configuration flag:
// --with-alsa
// mixer_control_index = 0; // the index of the mixer to use to adjust output volume. Default is 0. The mixer is fully identified by the combination of the mixer_control_name and the mixer_control_index, e.g. "PCM",0 would be such a specification.
// mixer_device = "default"; // the mixer_device default is whatever the output_device is. Normally you wouldn't have to use this.
-// output_rate = "auto"; // can be "auto", 44100, 88200, 176400 or 352800, but the device must have the capability.
-// output_format = "auto"; // can be "auto", "U8", "S8", "S16", "S16_LE", "S16_BE", "S24", "S24_LE", "S24_BE", "S24_3LE", "S24_3BE", "S32", "S32_LE" or "S32_BE" but the device must have the capability. Except where stated using (*LE or *BE), endianness matches that of the processor.
+// Note: if you specify settings here, the output device must be capable of them. Otherwise, Shairport Sync will quit and leave a message in the system log.
+// output_rate = "auto"; // Specify "auto", or a single rate, e.g. 48000, or a bracketed comma-separated list of rates, e.g. (44100, 48000, 64000). Default is "auto" -- try to match the input. See the "Rates, Formats and Channels" discussion above.
+// output_format = "auto"; // Specify "auto", or a single format, e.g. "S32_LE", or a bracketed comma-separated list of formats, e.g. ("S32_LE", "S16_LE"). Default is "auto". See the "Rates, Formats and Channels" discussion above.
+// output_channels = "auto"; // Specify "auto", or a specific number of channels, e.g. 2, or a bracketed comma-separated list of numbers of channels, e.g. (2, 6). Default is "auto" -- try to match the input. See the "Rates, Formats and Channels" discussion above.
-// disable_synchronization = "no"; // Set to "yes" to disable synchronization. Default is "no" This is really meant for troubleshooting.
+// disable_synchronization = "no"; // Set to "yes" to disable synchronization.
// period_size = <number>; // Use this optional advanced setting to set the alsa period size near to this value
// buffer_size = <number>; // Use this optional advanced setting to set the alsa buffer size near to this value
-// use_mmap_if_available = "yes"; // Use this optional advanced setting to control whether MMAP-based output is used to communicate with the DAC. Default is "yes"
+// use_mmap_if_available = "no"; // Use this optional advanced setting to control whether MMAP-based output is used to communicate with the DAC. Default is "no".
// use_hardware_mute_if_available = "no"; // Use this optional advanced setting to control whether the hardware in the DAC is used for muting. Default is "no", for compatibility with other audio players.
// maximum_stall_time = 0.200; // Use this optional advanced setting to control how long to wait for data to be consumed by the output device before considering it an error. It should never approach 200 ms.
// use_precision_timing = "auto"; // Use this optional advanced setting to control how Shairport Sync gathers timing information. When set to "auto", if the output device is a real hardware device, precision timing will be used. Choose "no" for more compatible standard timing, choose "yes" to force the use of precision timing, which may cause problems.
// disable_standby_mode = "never"; // This setting prevents the DAC from entering the standby mode. Some DACs make small "popping" noises when they go in and out of standby mode. Settings can be: "always", "auto" or "never". Default is "never", but only for backwards compatibility. The "auto" setting prevents entry to standby mode while Shairport Sync is in the "active" mode. You can use "yes" instead of "always" and "no" instead of "never".
// disable_standby_mode_silence_threshold = 0.040; // Use this optional advanced setting to control how little audio should remain in the output buffer before the disable_standby code should start sending silence to the output device.
-// disable_standby_mode_silence_scan_interval = 0.004; // Use this optional advanced setting to control how often the amount of audio remaining in the output buffer should be checked.
+// disable_standby_mode_silence_scan_interval = 0.030; // Use this optional advanced setting to control how often the amount of audio remaining in the output buffer should be checked.
+// disable_standby_mode_default_channels = 2; // Use this optional advanced setting to set the initial channel setting when disable_standby_mode is "always" or "yes". After a track has been played, the track's output channel setting will be used.
+// disable_standby_mode_default_rate = <rate>; // Use this optional advanced setting to set the initial rate, in frames per second, when disable_standby_mode is "always" or "yes". Default is 44100 for classic AirPlay, 48000 for AirPlay 2. After a track has been played, the track's output rate setting will be used.
};
-// Parameters for the "pw" PipeWire backend.
+// Parameters for the PipeWire backend.
// For this section to be operative, Shairport Sync must be built with the following configuration flag:
-// --with-pw
-pw =
+// --with-pipewire
+pipewire =
{
// application_name = "Shairport Sync"; // Set this to the name that should appear in the Sounds "Applications" or "Volume Levels".
// node_name = "Shairport Sync"; // This appears in some PipeWire CLI tool outputs.
// sink_target = "<sink target name>"; // Leave this commented out to get the sink target already chosen by the PipeWire system.
+
+// Note: if you specify settings here, the the PipeWire system must be capable of giving effect to them. Otherwise, Shairport Sync will quit and leave a message in the system log.
+// output_rate = "auto"; // Specify "auto", or a single rate, e.g. 48000, or a bracketed comma-separated list of rates, e.g. (44100, 48000, 64000). Default is "auto" -- try to match the input. See the "Rates, Formats and Channels" discussion above.
+// output_format = "auto"; // Specify "auto", or a single format, e.g. "S32_LE", or a bracketed comma-separated list of formats, e.g. ("S32_LE", "S16_LE"). Default is "auto". See the "Rates, Formats and Channels" discussion above.
+// output_channels = "auto"; // Specify "auto", or a specific number of channels, e.g. 2, or a bracketed comma-separated list of numbers of channels, e.g. (2, 6). Default is "auto" -- try to match the input. See the "Rates, Formats and Channels" discussion above.
};
// Parameters for the "sndio" audio back end. All are optional.
sndio =
{
// device = "default"; // optional setting to set the name of the output device, e.g. "rsnd/0", "rsnd/1", etc.
-// rate = 44100; // optional setting which can be 44100, 88200, 176400 or 352800, but the device must have the capability. Default is 44100.
-// format = "S16"; // optional setting which can be "U8", "S8", "S16", "S24", "S24_3LE", "S24_3BE" or "S32", but the device must have the capability. Except where stated using (*LE or *BE), endianness matches that of the processor.
// round = <number>; // advanced optional setting to set the period size near to this value
// bufsz = <number>; // advanced optional setting to set the buffer size near to this value
+
+// Note: if you specify settings here, the sndio system must be capable of giving effect to them. Otherwise, Shairport Sync will quit and leave a message in the system log.
+// output_rate = "auto"; // Specify "auto", or a single rate, e.g. 48000, or a bracketed comma-separated list of rates, e.g. (44100, 48000, 64000). Default is "auto" -- try to match the input. See the "Rates, Formats and Channels" discussion above.
+// output_format = "auto"; // Specify "auto", or a single format, e.g. "S32_LE", or a bracketed comma-separated list of formats, e.g. ("S32_LE", "S16_LE") from the list: "S32_LE", "S32_BE", "S16_LE" or "S16_BE". Default is "auto". See the "Rates, Formats and Channels" discussion above.
+// output_channels = "auto"; // Specify "auto", or a specific number of channels, e.g. 2, or a bracketed comma-separated list of numbers of channels, e.g. (2, 6). Default is "auto" -- try to match the input. See the "Rates, Formats and Channels" discussion above.
};
-// Parameters for the "pa" PulseAudio backend.
+// Parameters for the PulseAudio backend.
// For this section to be operative, Shairport Sync must be built with the following configuration flag:
-// --with-pa
-pa =
+// --with-pulseaudio
+pulseaudio =
{
// server = "host"; // Set this to override the default pulseaudio server that should be used.
// sink = "Sink Name"; // Set this to override the default pulseaudio sink that should be used. (Untested)
// application_name = "Shairport Sync"; //Set this to the name that should appear in the Sounds "Applications" tab when Shairport Sync is active.
+
+// Note: if you specify settings here, the PulseAudio system must be capable of giving effect to them. Otherwise, Shairport Sync will quit and leave a message in the system log.
+// output_rate = "auto"; // Specify "auto", or a single rate, e.g. 48000, or a bracketed comma-separated list of rates, e.g. (44100, 48000, 64000). Default is "auto" -- try to match the input. See the "Rates, Formats and Channels" discussion above.
+// output_format = "auto"; // Specify "auto", or a single format, e.g. "S32_LE", or a bracketed comma-separated list of formats, e.g. ("S32_LE", "S16_LE"). Default is "auto". See the "Rates, Formats and Channels" discussion above.
+// output_channels = "auto"; // Specify "auto", or a specific number of channels, e.g. 2, or a bracketed comma-separated list of numbers of channels, e.g. (2, 6). Default is "auto" -- try to match the input. See the "Rates, Formats and Channels" discussion above.
+// default_channel_layouts = "alsa"; // Set to "alsa" (default) for the alsa-compatible channel layouts (see "PA_CHANNEL_MAP_ALSA"), or set to "pulseaudio" if you want PulseAudio's own channel layouts ("PA_CHANNEL_MAP_DEFAULT") to be used instead.
};
-// Parameters for the "jack" JACK Audio Connection Kit backend.
+// Parameters for the JACK Audio Connection Kit backend.
// For this section to be operative, Shairport Sync must be built with the following configuration flag:
// --with-jack
jack =
// bufsz = <number>; // advanced optional setting to set the buffer size to this value
};
-// Parameters for the "pipe" audio back end, a back end that directs raw CD-format audio output to a pipe. No interpolation is done.
+// Parameters for the "pipe" audio back end, a back end that directs raw PCM audio output to a unix pipe. No interpolation is done.
// For this section to be operative, Shairport Sync must have been built with the following configuration flag:
// --with-pipe
pipe =
{
// name = "/tmp/shairport-sync-audio"; // this is the default
+
+// Note: if you specify "auto" or multiple settings here. Shairport Sync may switch between them to match the input, but there will be no notification in the pipe as changes occur. To avoid this, consider setting one just rate/format/channel count. If built with AirPlay 2 or FFmpeg support, Shairport Sync will automatically transcode and mixdown as necessary.
+// output_rate = <rate>; // Specify a single rate, e.g. 44100, or a bracketed comma-separated list of rates, e.g. (44100, 48000, 64000) or "auto" -- try to match the input. Default is 44100 for classic AirPlay, 48000 for AirPlay 2. See the "Rates, Formats and Channels" discussion above.
+// output_format = <format>; // Specify a format, e.g. "S16_LE", or a bracketed comma-separated list of formats, e.g. ("S32_LE", "S16_LE") or "auto". Default is "S16_LE" for classic AirPlay, "S32_LE" for AirPlay 2. See the "Rates, Formats and Channels" discussion above.
+// output_channels = 2; // Specify a specific number of channels, e.g. 2, or a bracketed comma-separated list of numbers of channels, e.g. (2, 6) or "auto" -- try to match the input. Default is 2. See the "Rates, Formats and Channels" discussion above.
};
-// There are no configuration file parameters for the "stdout" audio back end. No interpolation is done.
+// Parameters for the "stdout" audio back end, a back end that directs raw PCM audio output to STDOUT. No interpolation is done.
// To include support for the "stdout" backend, Shairport Sync must be built with the following configuration flag:
// --with-stdout
+stdout =
+{
+// Note: if you specify "auto" or multiple settings here. Shairport Sync may switch between them to match the input, but there will be no notification in STDOUT as changes occur. To avoid this, consider setting one just rate/format/channel count. If built with AirPlay 2 or FFmpeg support, Shairport Sync will automatically transcode and mixdown as necessary.
+// output_rate = <rate>; // Specify a single rate, e.g. 44100, or a bracketed comma-separated list of rates, e.g. (44100, 48000, 64000) or "auto" -- try to match the input. Default is 44100 for classic AirPlay, 48000 for AirPlay 2. See the "Rates, Formats and Channels" discussion above.
+// output_format = <format>; // Specify a format, e.g. "S16_LE", or a bracketed comma-separated list of formats, e.g. ("S32_LE", "S16_LE") or "auto". Default is "S16_LE" for classic AirPlay, "S32_LE" for AirPlay 2. See the "Rates, Formats and Channels" discussion above.
+// output_channels = 2; // Specify a specific number of channels, e.g. 2, or a bracketed comma-separated list of numbers of channels, e.g. (2, 6) or "auto" -- try to match the input. Default is 2. See the "Rates, Formats and Channels" discussion above.
+};
-// There are no configuration file parameters for the "ao" audio back end. No interpolation is done.
+// Parameters for the "ao" (also known as "libao") backend.
// To include support for the "ao" backend, Shairport Sync must be built with the following configuration flag:
// --with-ao
+ao =
+{
+// Note: if you specify settings here, the ao (aka "libao") system must be capable of giving effect to them. Otherwise, Shairport Sync will quit and leave a message in the system log.
+// output_rate = "auto"; // Specify "auto", or a single rate, e.g. 48000, or a bracketed comma-separated list of rates, e.g. (44100, 48000, 64000). Default is "auto" -- try to match the input. See the "Rates, Formats and Channels" discussion above.
+// output_format = "auto"; // Specify "auto", or a single format, e.g. "S32_LE", or a bracketed comma-separated list of formats, e.g. ("S32_LE", "S16_LE"). Default is "auto". See the "Rates, Formats and Channels" discussion above.
+// output_channels = "auto"; // Specify "auto", or a specific number of channels, e.g. 2, or a bracketed comma-separated list of numbers of channels, e.g. (2, 6). Default is "auto" -- try to match the input. See the "Rates, Formats and Channels" discussion above.
+};
+
// For this section to be operative, Shairport Sync must be built with the following configuration flag:
// --with-convolution
dsp =
{
-
//////////////////////////////////////////
-// This convolution filter can be used to apply almost any correction to the audio signal, like frequency and phase correction.
+// This convolution filter can be used with inpulse responses files
+// to apply almost any correction to the audio signal, like frequency and phase correction.
// For example you could measure (with a good microphone and a sweep-sine) the frequency response of your speakers + room,
// and apply a correction to get a flat response curve.
+// Long impulse response files require lots of floating-point processing!
//////////////////////////////////////////
//
-// convolution = "no"; // Set this to "yes" to activate the convolution filter.
-// convolution_ir_file = "impulse.wav"; // Impulse Response file to be convolved to the audio stream
-// convolution_gain = -4.0; // Static gain applied to prevent clipping during the convolution process
-// convolution_max_length = 44100; // Truncate the input file to this length in order to save CPU.
-
-
+// convolution_enabled = "no"; // Set this to "yes" to activate the convolution filter.
+// convolution_thread_pool_size = 1; // Number of CPU threads that can work on convolution at the same time.
+// convolution_ir_files = "<comma-separated list of full path names to impulse response files, for sample rates of 44100 and 48000 -- stereo or mono only>";
+// convolution_gain = -4.0; // Static gain applied after convolution, between -60 and +18 dB. Useful for preventing clipping or amplifying low convolution output levels
+// convolution_max_length_in_seconds = 1.0; // Truncate the input file to this length in order to save CPU.
+//
//////////////////////////////////////////
// This loudness filter is used to compensate for human ear non linearity.
-// When the volume decreases, our ears loose more sentisitivity in the low range frequencies than in the mid range ones.
-// This filter aims at compensating for this loss, applying a variable gain to low frequencies depending on the volume.
+// When the volume decreases, our ears lose more sensitivity to low frequencies than to mid range ones.
+// This filter aims at compensating for this loss, applying a small extra gain to low frequencies, depending on the volume.
// More info can be found here: https://en.wikipedia.org/wiki/Equal-loudness_contour
-// For this filter to work properly, you should disable (or set to a fix value) all other volume control and only let shairport-sync control your volume.
-// The setting "loudness_reference_volume_db" should be set at the volume reported by shairport-sync when listening to music at a normal listening volume.
+//
+// NOTE: To use this filter, Shairport Sync must _not_ be controlling a hardware mixer.
+//
+// The setting "loudness_reference_volume_db" is the highest level at which the loudness filter will take effect. Above this level the filter is off.
//////////////////////////////////////////
//
-// loudness = "no"; // Set this to "yes" to activate the loudness filter
-// loudness_reference_volume_db = -20.0; // Above this level the filter will have no effect anymore. Below this level it will gradually boost the low frequencies.
-
+// loudness_enabled = "no"; // Set this to "yes" to activate the loudness filter (only works if you are _not_ using hardware volume control)
+// loudness_reference_volume_db = -16.0; // Above this level the filter will have no effect. Below this level it will gradually boost the low frequencies.
};
// How to deal with metadata, including artwork
// pipe_timeout = 5000; // wait for this number of milliseconds for a blocked pipe to unblock before giving up
// progress_interval = 0.0; // if non-zero, progress 'phbt' messages will be sent at the interval specified in seconds. A 'phb0' message will also be sent when the first audio frame of a play session is about to be played.
// Each message consists of the RTPtime of a a frame of audio and the exact system time when it is to be played. The system time, in nanoseconds, is based the CLOCK_MONOTONIC_RAW of the machine -- if available -- or CLOCK_MONOTONIC otherwise.
-// Messages are sent when the frame is placed in the output device's buffer, thus, they will be _approximately_ 'audio_backend_buffer_desired_length_in_seconds' (default 0.2 seconds) ahead of time.
+// Messages are sent when the frame is placed in the output device's buffer, thus, they will be _approximately_ 'audio_backend_buffer_desired_length_in_seconds' ahead of time.
// socket_address = "226.0.0.1"; // if set to a host name or IP address, UDP packets containing metadata will be sent to this address. May be a multicast address. "socket-port" must be non-zero and "enabled" must be set to yes"
// socket_port = 5555; // if socket_address is set, the port to send UDP packets to
// socket_msglength = 65000; // the maximum packet size for any UDP metadata. This will be clipped to be between 500 or 65000. The default is 500.
// publish_raw = "no"; //whether to publish all available metadata under the codes given in the 'metadata' docs.
// publish_parsed = "no"; //whether to publish a small (but useful) subset of metadata under human-understandable topics
// empty_payload_substitute = "--"; // MQTT messages with empty payloads often are invisible or have special significance to MQTT brokers and readers.
-// To avoid empty payload problems, the string here is used instead of any empty payload. Set it to the empty string -- "" -- to leave the payload empty.
+// To avoid empty payload problems, this string is used instead of any empty payload. Set it to the empty string -- "" -- to leave the payload empty.
// Currently published topics:artist,album,title,genre,format,songalbum,volume,client_ip,
// Additionally, messages at the topics play_start,play_end,play_flush,play_resume are published
// publish_cover = "no"; //whether to publish the cover over mqtt in binary form. This may lead to a bit of load on the broker
-// enable_autodiscovery = "no"; //whether to publish an autodiscovery message to automatically appear in Home Assistant
-// autodiscovery_prefix = "homeassistant"; //string to prepend to autodiscovery topic
+// publish_retain = "no"; //whether to set the retain flag on published MQTT messages. When enabled, the broker stores the last message for each topic.
+// enable_autodiscovery = "no"; //whether to publish an autodiscovery message to automatically appear in Home Assistant
+// autodiscovery_prefix = "homeassistant"; //string to prepend to autodiscovery topic
// enable_remote = "no"; //whether to remote control via MQTT. RC is available under `topic`/remote.
// Available commands are "command", "beginff", "beginrew", "mutetoggle", "nextitem", "previtem", "pause", "playpause", "play", "stop", "playresume", "shuffle_songs", "volumedown", "volumeup"
};
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
-# Short-Description: Shairport Synchronous AirPlay
+# Short-Description: Shairport Sync -- AirPlay Receiver
# Description: Implements a synchronous (multi-room-capable) AirPlay receiver
### END INIT INFO
-# Author: Mike Brady <mikebrady@eircom.net>
+# Author: Mike Brady <4265913+mikebrady@users.noreply.github.com>
#
# Do NOT "set -e"
+++ /dev/null
-[Unit]
-Description=Shairport Sync - AirPlay Audio Receiver
-After=sound.target
-Requires=avahi-daemon.service
-After=avahi-daemon.service
-Wants=network-online.target
-After=network.target network-online.target
-
-[Service]
-ExecStart=@prefix@/bin/shairport-sync --log-to-syslog
-User=shairport-sync
-Group=shairport-sync
-
-[Install]
-WantedBy=multi-user.target
After=sound.target
Wants=network-online.target
After=network.target network-online.target
+@SYSTEMD_AFTER_ARGS@
+@SYSTEMD_REQUIRES_ARGS@
+StartLimitIntervalSec=300
+StartLimitBurst=5
[Service]
ExecStart=@prefix@/bin/shairport-sync --log-to-syslog
User=shairport-sync
Group=shairport-sync
+LimitRTPRIO=5
+Restart=on-failure
+RestartSec=5s
[Install]
WantedBy=multi-user.target
--- /dev/null
+[Unit]
+Description=Shairport Sync - AirPlay Audio Receiver
+After=sound.target
+
+[Service]
+ExecStart=/usr/local/bin/shairport-sync --log-to-syslog
+
+[Install]
+WantedBy=default.target
/*
* This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2019
+ * Copyright (c) Mike Brady 2019--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
/*
* This file is part of Shairport Sync.
- * Copyright (c) Mike Brady 2019
+ * Copyright (c) Mike Brady 2019--2025
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
* Shairport, an Apple Airplay receiver
* Copyright (c) James Laird 2013
* All rights reserved.
- * Modifications and additions (c) Mike Brady 2014--2023
+ * Modifications and additions (c) Mike Brady 2014--2025
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* OTHER DEALINGS IN THE SOFTWARE.
*/
-#include <sys/socket.h>
-#include <sys/types.h>
#include <errno.h>
#include <fcntl.h>
#include <getopt.h>
#include <memory.h>
#include <net/if.h>
#include <popt.h>
+#include <signal.h>
#include <stdio.h>
#include <stdlib.h>
+#include <sys/socket.h>
#include <sys/stat.h>
+#include <sys/types.h>
#include <sys/wait.h>
#include <unistd.h>
#include "config.h"
+#ifdef CONFIG_FFMPEG
+#include <libavutil/log.h>
+#endif
+
#ifdef CONFIG_AIRPLAY_2
#include "ptp-utilities.h"
#include <gcrypt.h>
#ifdef CONFIG_AIRPLAY_2
int has_fltp_capable_aac_decoder(void) {
-
// return 1 if the AAC decoder advertises fltp decoding capability, which
// is needed for decoding Buffered Audio streams
+ debug(3, "checking availability of an fltp-capable aac decoder");
int has_capability = 0;
const AVCodec *codec = avcodec_find_decoder(AV_CODEC_ID_AAC);
if (codec != NULL) {
- const enum AVSampleFormat *p = codec->sample_fmts;
- if (p != NULL) {
- while ((has_capability == 0) && (*p != AV_SAMPLE_FMT_NONE)) {
- if (*p == AV_SAMPLE_FMT_FLTP)
+ const enum AVSampleFormat *formats = NULL;
+#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(61, 13, 100)
+ // New API (FFmpeg 7.1+) for getting formats
+ debug(3, "getting sample formats the new way");
+ int format_count = 0;
+ if ((avcodec_get_supported_config(NULL, codec, AV_CODEC_CONFIG_SAMPLE_FORMAT, 0,
+ (const void **)&formats, &format_count) < 0) ||
+ (format_count == 0))
+ formats = NULL; // not clear if the returned pointer is nulled on error or on zero items
+#else
+ debug(3, "getting sample formats the old way");
+ // older API
+ formats = codec->sample_fmts;
+#endif
+ if (formats != NULL) {
+ while ((has_capability == 0) && (*formats != AV_SAMPLE_FMT_NONE)) {
+ if (*formats == AV_SAMPLE_FMT_FLTP) {
has_capability = 1;
- p++;
+ }
+ formats++;
}
}
+ } else {
+ debug(3, "no AAC codec found.");
}
return has_capability;
}
pthread_t soxr_time_check_thread;
int soxr_time_check_thread_started = 0;
void *soxr_time_check(__attribute__((unused)) void *arg) {
+ // this just checks how long it takes to process adding and subtracing a frame
+ // from a buffer at 44100
+ // #include <syscall.h>
+ // debug(1, "soxr_time_check PID %d", syscall(SYS_gettid));
+
const int buffer_length = 352;
int32_t inbuffer[buffer_length * 2];
int32_t outbuffer[(buffer_length + 1) * 2];
if (number_of_iterations != 0) {
config.soxr_delay_index = soxr_execution_time_int / number_of_iterations;
} else {
- debug(1, "No soxr-timing iterations performed, so \"basic\" iteration will be used.");
+ debug(1, "No soxr-timing iterations performed, so \"vernier\" iteration will be used.");
config.soxr_delay_index = 0; // used as a flag
}
debug(2, "soxr_delay: %d nanoseconds, soxr_delay_threshold: %d milliseconds.",
if ((config.packet_stuffing == ST_soxr) &&
(config.soxr_delay_index > config.soxr_delay_threshold))
inform("Note: this device may be too slow for \"soxr\" interpolation. Consider choosing the "
- "\"basic\" or \"auto\" interpolation setting.");
+ "\"auto\", \"vernier\" or \"basic\" interpolation setting.");
if (config.packet_stuffing == ST_auto)
debug(
1, "\"%s\" interpolation has been chosen.",
((config.soxr_delay_index != 0) && (config.soxr_delay_index <= config.soxr_delay_threshold))
? "soxr"
- : "basic");
+ : "vernier");
pthread_exit(NULL);
}
-
#endif
void usage(char *progname) {
printf(" They are listed at the end of this text.\n");
printf(" If no mdns backend is specified, they are tried in order until one works.\n");
printf(" -r, --resync=THRESHOLD [Deprecated] resync if error exceeds this number of frames. Set to 0 to stop resyncing.\n");
- printf(" -t, --timeout=SECONDS Go back to idle mode from play mode after a break in communications of this many seconds (default 120). Set to 0 never to exit play mode.\n");
+ printf(" -t, --timeout=SECONDS Go back to idle mode from play mode after a break in communications of this many seconds (default 60). Set to 0 never to exit play mode.\n");
printf(" --tolerance=TOLERANCE [Deprecated] Allow a synchronization error of TOLERANCE frames (default 88) before trying to correct it.\n");
printf(" --logOutputLevel Log the output level setting -- a debugging option, useful for determining the optimum maximum volume.\n");
#ifdef CONFIG_LIBDAEMON
// strings will dangle.
char *raw_service_name = NULL; /* Used to pick up the service name before possibly expanding it */
char *stuffing = NULL; /* used for picking up the stuffing option */
- signed char c; /* used for argument parsing */
+#if defined(CONFIG_DBUS_INTERFACE) || defined(CONFIG_MPRIS_INTERFACE)
+ char *dbus_default_message_bus =
+ NULL; /* used for picking the "system" or "session" bus as the default */
+#endif
+ signed char c; /* used for argument parsing */
// int i = 0; /* used for tracking options */
int resync_threshold_in_frames = 0;
int tolerance_in_frames = 0;
poptContext optCon; /* context for parsing command-line options */
struct poptOption optionsTable[] = {
- {"verbose", 'v', POPT_ARG_NONE, NULL, 'v', NULL, NULL},
- {"kill", 'k', POPT_ARG_NONE, &killOption, 0, NULL, NULL},
- {"daemon", 'd', POPT_ARG_NONE, &daemonisewith, 0, NULL, NULL},
- {"justDaemoniseNoPIDFile", 'j', POPT_ARG_NONE, &daemonisewithout, 0, NULL, NULL},
- {"configfile", 'c', POPT_ARG_STRING, &config.configfile, 0, NULL, NULL},
- {"statistics", 0, POPT_ARG_NONE, &config.statistics_requested, 0, NULL, NULL},
- {"logOutputLevel", 0, POPT_ARG_NONE, &config.logOutputLevel, 0, NULL, NULL},
- {"version", 'V', POPT_ARG_NONE, NULL, 0, NULL, NULL},
- {"displayConfig", 'X', POPT_ARG_NONE, &display_config_selected, 0, NULL, NULL},
- {"port", 'p', POPT_ARG_INT, &config.port, 0, NULL, NULL},
- {"name", 'a', POPT_ARG_STRING, &raw_service_name, 0, NULL, NULL},
- {"output", 'o', POPT_ARG_STRING, &config.output_name, 0, NULL, NULL},
- {"on-start", 'B', POPT_ARG_STRING, &config.cmd_start, 0, NULL, NULL},
- {"on-stop", 'E', POPT_ARG_STRING, &config.cmd_stop, 0, NULL, NULL},
- {"wait-cmd", 'w', POPT_ARG_NONE, &config.cmd_blocking, 0, NULL, NULL},
- {"mdns", 'm', POPT_ARG_STRING, &config.mdns_name, 0, NULL, NULL},
- {"latency", 'L', POPT_ARG_INT, &config.userSuppliedLatency, 0, NULL, NULL},
- {"stuffing", 'S', POPT_ARG_STRING, &stuffing, 'S', NULL, NULL},
- {"resync", 'r', POPT_ARG_INT, &resync_threshold_in_frames, 'r', NULL, NULL},
- {"timeout", 't', POPT_ARG_INT, &config.timeout, 't', NULL, NULL},
- {"password", 0, POPT_ARG_STRING, &config.password, 0, NULL, NULL},
- {"tolerance", 'z', POPT_ARG_INT, &tolerance_in_frames, 'z', NULL, NULL},
- {"use-stderr", 'u', POPT_ARG_NONE, NULL, 'u', NULL, NULL},
- {"log-to-syslog", 0, POPT_ARG_NONE, &log_to_syslog_selected, 0, NULL, NULL},
+ {"verbose", 'v', POPT_ARG_NONE, NULL, 'v', NULL, NULL},
+ {"kill", 'k', POPT_ARG_NONE, &killOption, 0, NULL, NULL},
+ {"daemon", 'd', POPT_ARG_NONE, &daemonisewith, 0, NULL, NULL},
+ {"justDaemoniseNoPIDFile", 'j', POPT_ARG_NONE, &daemonisewithout, 0, NULL, NULL},
+ {"configfile", 'c', POPT_ARG_STRING, &config.configfile, 0, NULL, NULL},
+ {"statistics", 0, POPT_ARG_NONE, &config.statistics_requested, 0, NULL, NULL},
+ {"logOutputLevel", 0, POPT_ARG_NONE, &config.logOutputLevel, 0, NULL, NULL},
+ {"version", 'V', POPT_ARG_NONE, NULL, 0, NULL, NULL},
+ {"displayConfig", 'X', POPT_ARG_NONE, &display_config_selected, 0, NULL, NULL},
+ {"port", 'p', POPT_ARG_INT, &config.port, 0, NULL, NULL},
+ {"name", 'a', POPT_ARG_STRING, &raw_service_name, 0, NULL, NULL},
+ {"output", 'o', POPT_ARG_STRING, &config.output_name, 0, NULL, NULL},
+ {"on-start", 'B', POPT_ARG_STRING, &config.cmd_start, 0, NULL, NULL},
+ {"on-stop", 'E', POPT_ARG_STRING, &config.cmd_stop, 0, NULL, NULL},
+ {"wait-cmd", 'w', POPT_ARG_NONE, &config.cmd_blocking, 0, NULL, NULL},
+ {"mdns", 'm', POPT_ARG_STRING, &config.mdns_name, 0, NULL, NULL},
+ {"latency", 'L', POPT_ARG_INT, &config.userSuppliedLatency, 0, NULL, NULL},
+ {"stuffing", 'S', POPT_ARG_STRING, &stuffing, 'S', NULL, NULL},
+ {"resync", 'r', POPT_ARG_INT, &resync_threshold_in_frames, 'r', NULL, NULL},
+ {"timeout", 't', POPT_ARG_INT, &config.timeout, 't', NULL, NULL},
+ {"password", 0, POPT_ARG_STRING, &config.password, 0, NULL, NULL},
+#if defined(CONFIG_DBUS_INTERFACE) || defined(CONFIG_MPRIS_INTERFACE)
+ {"dbus-default-message-bus", 0, POPT_ARG_STRING, &dbus_default_message_bus, 0, NULL, NULL},
+#endif
+ {"tolerance", 'z', POPT_ARG_INT, &tolerance_in_frames, 'z', NULL, NULL},
+ {"use-stderr", 'u', POPT_ARG_NONE, NULL, 'u', NULL, NULL},
+ {"log-to-syslog", 0, POPT_ARG_NONE, &log_to_syslog_selected, 0, NULL, NULL},
#ifdef CONFIG_METADATA
- {"metadata-enable", 'M', POPT_ARG_NONE, &config.metadata_enabled, 'M', NULL, NULL},
- {"metadata-pipename", 0, POPT_ARG_STRING, &config.metadata_pipename, 0, NULL, NULL},
- {"get-coverart", 'g', POPT_ARG_NONE, &config.get_coverart, 'g', NULL, NULL},
+ {"metadata-enable", 'M', POPT_ARG_NONE, &config.metadata_enabled, 'M', NULL, NULL},
+ {"metadata-pipename", 0, POPT_ARG_STRING, &config.metadata_pipename, 0, NULL, NULL},
+ {"get-coverart", 'g', POPT_ARG_NONE, &config.get_coverart, 'g', NULL, NULL},
#endif
- POPT_AUTOHELP{NULL, 0, 0, NULL, 0, NULL, NULL}};
+ POPT_AUTOHELP{NULL, 0, 0, NULL, 0, NULL, NULL}
+ };
// we have to parse the command line arguments to look for a config file
int optind;
poptSetOtherOptionHelp(optCon, "[OPTIONS]* ");
/* Now do options processing just to get a debug log destination and level */
- debuglev = 0;
while ((c = poptGetNextOpt(optCon)) >= 0) {
switch (c) {
case 'v':
- debuglev++;
+ increase_debug_level();
break;
case 'u':
inform("Warning: the option -u is no longer needed and is deprecated. Debug and statistics "
break;
case 'r':
config.resync_threshold = (resync_threshold_in_frames * 1.0) / 44100;
- inform("Warning: the option -r or --resync is deprecated. Please use the "
+ inform("Warning: the option -r or --resync is deprecated and ignored!\nPlease use the "
"\"resync_threshold_in_seconds\" setting in the config file instead.");
break;
case 'z':
config.tolerance = (tolerance_in_frames * 1.0) / 44100;
- inform("Warning: the option --tolerance is deprecated. Please use the "
+ inform("Warning: the option --tolerance is deprecated and ignored\nPlease use the "
"\"drift_tolerance_in_seconds\" setting in the config file instead.");
break;
}
poptFreeContext(optCon);
+ if (config.timeout != 0) {
+ if (config.timeout < 60) {
+ inform("Note: the timeout value if invalid -- it must be 0 (i.e. no timeout) or at least 60. "
+ "Set to the default value of 60 seconds instead.");
+ config.timeout = 60;
+ }
+ }
+
if (log_to_syslog_selected) {
// if this was the first command line argument, it'll already have been chosen
if (log_to_syslog_select_is_first_command_line_argument == 0) {
config.audio_backend_silent_lead_in_time_auto =
1; // start outputting silence as soon as packets start arriving
config.default_airplay_volume = -24.0;
- config.high_threshold_airplay_volume =
- -16.0; // if the volume exceeds this, reset to the default volume if idle for the
- // limit_to_high_volume_threshold_time_in_minutes time
- config.limit_to_high_volume_threshold_time_in_minutes =
- 0; // after this time in minutes, if the volume is higher, use the default_airplay_volume
- // volume for new play sessions.
config.fixedLatencyOffset = 11025; // this sounds like it works properly.
config.diagnostic_drop_packet_fraction = 0.0;
config.active_state_timeout = 10.0;
// seconds then we can add an offset of 5.17 seconds and still leave a second's worth of buffers
// for unexpected circumstances
+ config.model = strdup("ShairportSync");
+ // config.model = strdup("AudioAccessory5,1");
+
+ // config.srcvers = strdup(PACKAGE_VERSION);
+ // config.srcvers = strdup("760.13.1");
+
+ config.srcvers = strdup("366.0");
+
+ // config.osvers = strdup(VERSION);
+ config.osvers = strdup("15.0");
+
+ // make up a firmware version
+#ifdef CONFIG_USE_GIT_VERSION_STRING
+ if (git_version_string[0] != '\0')
+ config.firmware_version = strdup(git_version_string);
+ else
+#endif
+ config.firmware_version = strdup(PACKAGE_VERSION);
+
#ifdef CONFIG_METADATA
/* Get the metadata setting. */
config.metadata_enabled = 1; // if metadata support is included, then enable it by default
#endif
#ifdef CONFIG_CONVOLUTION
- config.convolution_max_length = 8192;
+ config.convolution_max_length_in_seconds = 1.0;
+ config.convolution_gain = -4.0;
+ config.convolution_threads = 1; // This is to merely to minimise potential power supply noise some
+ // CPUs make switching cores on and off. E.g. Pi 3.
#endif
- config.loudness_reference_volume_db = -20;
+ config.loudness_reference_volume_db = -16;
#ifdef CONFIG_METADATA_HUB
config.cover_art_cache_dir = "/tmp/shairport-sync/.cache/coverart";
uint64_t mask =
((uint64_t)1 << 17) | ((uint64_t)1 << 16) | ((uint64_t)1 << 15) | ((uint64_t)1 << 50);
config.airplay_features =
+ // 0x114BD04A5FCA00 & (~mask);
0x1C340405D4A00 & (~mask); // APX + Authentication4 (b14) with no metadata (see below)
+
+ /*
+ config.airplay_features |= (uint64_t)1 << 21; // Audio Format 4
+
+ config.airplay_features |= (uint64_t)1 << 25; // Unknown
+ config.airplay_features |= (uint64_t)1 << 36; // Unknown
+ config.airplay_features |= (uint64_t)1 << 39; // Unknown
+ config.airplay_features |= (uint64_t)1 << 43; // Supports System Pairing
+ config.airplay_features |= (uint64_t)1 << 47; // Unknown
+
+ // 0xB
+ config.airplay_features |= (uint64_t)1 << 63; // Unknown
+ config.airplay_features |= (uint64_t)1 << 61; // Unknown
+ config.airplay_features |= (uint64_t)1 << 60; // Unknown
+ // ...0xC
+ // // config.airplay_features |= (uint64_t)1 << 59; // Unknown
+ config.airplay_features |= (uint64_t)1 << 58; // Unknown
+
+ // ...0x3
+ config.airplay_features |= (uint64_t)1 << 53; // Unknown
+ config.airplay_features |= (uint64_t)1 << 52; // Unknown
+ */
+
+ // config.airplay_features |= ((uint64_t)1 << 58) | ((uint64_t)1 << 60) | ((uint64_t)1 << 58);
+
+ // 60 seems to interfere with disconnecting from a group
+
// Advertised with mDNS and returned with GET /info, see
// https://openairplay.github.io/airplay-spec/status_flags.html 0x4: Audio cable attached, no PIN
// required (transient pairing), 0x204: Audio cable attached, OneTimePairingRequired 0x604: Audio
// cable attached, OneTimePairingRequired, device was setup for Homekit access control
- config.airplay_statusflags = 0x04;
+ config.airplay_statusflags = 0;
+ config.airplay_statusflags |= 1 << 2; // Audio cable is attached
+ // config.airplay_statusflags |= 1 << 10; // DeviceWasSetupForHKAccessControl
+ // config.airplay_statusflags |= 1 << 11; // DeviceSupportsRelay
+ // config.airplay_statusflags |= 1 << 19; // Unknown. Seems to control whether individual volume
+ // controls are shown and whether the SPS devices shows when its active.
+
// Set to NULL to work with transient pairing
config.airplay_pin = NULL;
uuid_unparse_lower(binuuid, uuid);
config.airplay_pi = uuid;
+ char *pgid_uuid = malloc(UUID_STR_LEN + 1); // leave space for the NUL at the end
+ uuid_generate_random(binuuid);
+ uuid_unparse_lower(binuuid, pgid_uuid);
+ config.airplay_pgid = pgid_uuid;
+
#endif
// config_setting_t *setting;
- const char *str = 0;
+ const char *str = NULL;
int value = 0;
double dvalue = 0.0;
if (config_file_real_path == NULL) {
debug(2, "can't resolve the configuration file \"%s\".", config.configfile);
} else {
- debug(2, "looking for configuration file at full path \"%s\"", config_file_real_path);
+ debug(1, "looking for configuration file at full path \"%s\"", config_file_real_path);
/* Read the file. If there is an error, report it and exit. */
if (config_read_file(&config_file_stuff, config_file_real_path)) {
config_set_auto_convert(&config_file_stuff,
config.cfg = &config_file_stuff;
/* Get the Service Name. */
- if (config_lookup_string(config.cfg, "general.name", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "general.name", &str)) {
raw_service_name = (char *)str;
}
#ifdef CONFIG_LIBDAEMON
&daemonisewithout);
/* Get the directory path for the pid file created when the program is daemonised. */
- if (config_lookup_string(config.cfg, "sessioncontrol.daemon_pid_dir", &str))
+ if (config_lookup_non_empty_string(config.cfg, "sessioncontrol.daemon_pid_dir", &str))
config.piddir = (char *)str;
#endif
/* Get the mdns_backend setting. */
- if (config_lookup_string(config.cfg, "general.mdns_backend", &str))
+ if (config_lookup_non_empty_string(config.cfg, "general.mdns_backend", &str))
config.mdns_name = (char *)str;
/* Get the output_backend setting. */
- if (config_lookup_string(config.cfg, "general.output_backend", &str))
+ if (config_lookup_non_empty_string(config.cfg, "general.output_backend", &str))
config.output_name = (char *)str;
/* Get the port setting. */
if (config_lookup_int(config.cfg, "general.port", &value)) {
if ((value < 0) || (value > 65535))
#ifdef CONFIG_AIRPLAY_2
- die("Invalid port number \"%sd\". It should be between 0 and 65535, default is 7000",
+ die("Invalid port number \"%d\". It should be between 0 and 65535, default is 7000",
value);
#else
- die("Invalid port number \"%sd\". It should be between 0 and 65535, default is 5000",
+ die("Invalid port number \"%d\". It should be between 0 and 65535, default is 5000",
value);
#endif
else
/* Get the udp port base setting. */
if (config_lookup_int(config.cfg, "general.udp_port_base", &value)) {
if ((value < 0) || (value > 65535))
- die("Invalid port number \"%sd\". It should be between 0 and 65535, default is 6001",
+ die("Invalid port number \"%d\". It should be between 0 and 65535, default is 6001",
value);
else
config.udp_port_base = value;
* starting at the port base. Only three ports are needed. */
if (config_lookup_int(config.cfg, "general.udp_port_range", &value)) {
if ((value < 3) || (value > 65535))
- die("Invalid port range \"%sd\". It should be between 3 and 65535, default is 10",
+ die("Invalid port range \"%d\". It should be between 3 and 65535, default is 10",
value);
else
config.udp_port_range = value;
}
/* Get the password setting. */
- if (config_lookup_string(config.cfg, "general.password", &str))
+ if (config_lookup_non_empty_string(config.cfg, "general.password", &str))
config.password = (char *)str;
if (config_lookup_string(config.cfg, "general.interpolation", &str)) {
if (strcasecmp(str, "basic") == 0)
config.packet_stuffing = ST_basic;
+ else if (strcasecmp(str, "vernier") == 0)
+ config.packet_stuffing = ST_vernier;
else if (strcasecmp(str, "auto") == 0)
config.packet_stuffing = ST_auto;
else if (strcasecmp(str, "soxr") == 0)
"support. Change the \"general/interpolation\" setting in the configuration file.");
#endif
else
- die("Invalid interpolation option choice \"%s\". It should be \"auto\", \"basic\" or "
+ die("Invalid interpolation option choice \"%s\". It should be \"auto\", \"basic\", "
+ "\"vernier\" or "
"\"soxr\"",
str);
}
/* The old drift tolerance setting. */
if (config_lookup_int(config.cfg, "general.drift", &value)) {
- inform("The drift setting is deprecated. Use "
+ inform("The drift setting is deprecated and ignored. Please use "
"drift_tolerance_in_seconds instead");
- config.tolerance = 1.0 * value / 44100;
}
/* The old resync setting. */
if (config_lookup_int(config.cfg, "general.resync_threshold", &value)) {
- inform("The resync_threshold setting is deprecated. Use "
+ inform("The resync_threshold setting is deprecated and ignored. Please use "
"resync_threshold_in_seconds instead");
- config.resync_threshold = 1.0 * value / 44100;
}
/* Get the drift tolerance setting. */
if (config_lookup_float(config.cfg, "general.resync_threshold_in_seconds", &dvalue))
config.resync_threshold = dvalue;
- /* Get the resync recovery time setting. */
- if (config_lookup_float(config.cfg, "general.resync_recovery_time_in_seconds", &dvalue))
- config.resync_recovery_time = dvalue;
-
/* Get the verbosity setting. */
if (config_lookup_int(config.cfg, "general.log_verbosity", &value)) {
warn("The \"general\" \"log_verbosity\" setting is deprecated. Please use the "
"\"diagnostics\" \"log_verbosity\" setting instead.");
if ((value >= 0) && (value <= 3))
- debuglev = value;
+ set_debug_level(value);
else
die("Invalid log verbosity setting option choice \"%d\". It should be between 0 and 3, "
"inclusive.",
/* Get the verbosity setting. */
if (config_lookup_int(config.cfg, "diagnostics.log_verbosity", &value)) {
if ((value >= 0) && (value <= 3))
- debuglev = value;
+ set_debug_level(value);
else
die("Invalid diagnostics log_verbosity setting option choice \"%d\". It should be "
"between 0 and 3, "
if ((dvalue >= 0.0) && (dvalue <= 3.0))
config.diagnostic_drop_packet_fraction = dvalue;
else
- die("Invalid diagnostics drop_this_fraction_of_audio_packets setting \"%d\". It should "
+ die("Invalid diagnostics drop_this_fraction_of_audio_packets setting \"%f\". It should "
"be "
"between 0.0 and 1.0, "
"inclusive.",
}
}
- /* Get the optional high_volume_threshold setting. */
- if (config_lookup_float(config.cfg, "general.high_threshold_airplay_volume", &dvalue)) {
- // debug(1, "High threshold airplay volume setting of %f on the -30.0 to 0 scale", dvalue);
- if ((dvalue >= -30.0) && (dvalue <= 0.0)) {
- config.high_threshold_airplay_volume = dvalue;
- } else {
- warn("The high threshold airplay volume setting must be between -30.0 and 0.0.");
- }
- }
-
- /* Get the optional high volume idle tiomeout setting. */
- if (config_lookup_float(config.cfg, "general.high_volume_idle_timeout_in_minutes", &dvalue)) {
- // debug(1, "High high_volume_idle_timeout_in_minutes setting of %f", dvalue);
- if (dvalue >= 0.0) {
- config.limit_to_high_volume_threshold_time_in_minutes = dvalue;
- } else {
- warn("The high volume idle timeout in minutes setting must be 0.0 or greater. A setting "
- "of 0.0 disables the high volume check.");
- }
- }
-
- if (config_lookup_string(config.cfg, "general.run_this_when_volume_is_set", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "general.run_this_when_volume_is_set", &str)) {
config.cmd_set_volume = (char *)str;
}
/* Get the interface to listen on, if specified Default is all interfaces */
/* we keep the interface name and the index */
- if (config_lookup_string(config.cfg, "general.interface", &str))
- config.interface = strdup(str);
-
if (config_lookup_string(config.cfg, "general.interface", &str)) {
+ config.interface = strdup(str);
config.interface_index = if_nametoindex(config.interface);
if (config.interface_index == 0) {
/* Get the regtype -- the service type and protocol, separated by a dot. Default is
* "_raop._tcp" */
- if (config_lookup_string(config.cfg, "general.regtype", &str))
+ if (config_lookup_non_empty_string(config.cfg, "general.regtype", &str))
config.regtype = strdup(str);
/* Get the volume range, in dB, that should be used If not set, it means you just use the
/* Get the alac_decoder setting. */
if (config_lookup_string(config.cfg, "general.alac_decoder", &str)) {
- if (strcasecmp(str, "hammerton") == 0)
- config.use_apple_decoder = 0;
- else if (strcasecmp(str, "apple") == 0) {
+ if (strcasecmp(str, "hammerton") == 0) {
+ if ((config.decoders_supported & 1 << decoder_hammerton) != 0)
+ config.decoder_in_use = 1 << decoder_hammerton; // use David Hammerton's ALAC decoder
+ else
+ inform(
+ "Support for the Hammerton ALAC decoder has not been compiled into this version of "
+ "Shairport Sync. The default decoder will be used.");
+ } else if (strcasecmp(str, "apple") == 0) {
if ((config.decoders_supported & 1 << decoder_apple_alac) != 0)
- config.use_apple_decoder = 1;
+ config.decoder_in_use = 1 << decoder_apple_alac; // use the Apple ALAC decoder
else
inform("Support for the Apple ALAC decoder has not been compiled into this version of "
"Shairport Sync. The default decoder will be used.");
+ } else if (strcasecmp(str, "ffmpeg") == 0) {
+ if ((config.decoders_supported & 1 << decoder_ffmpeg_alac) != 0)
+ config.decoder_in_use = 1 << decoder_ffmpeg_alac; // use the FFMPEG ALAC decoder
+ else
+ inform("Support for the FFMPEG ALAC decoder has not been compiled into this version of "
+ "Shairport Sync. The default decoder will be used.");
} else
- die("Invalid alac_decoder option choice \"%s\". It should be \"hammerton\" or \"apple\"",
+ die("Invalid alac_decoder option choice \"%s\". It should be \"ffmpeg\", \"hammerton\" "
+ "or \"apple\"",
str);
}
str);
}
- if (config_lookup_string(config.cfg, "metadata.pipe_name", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "metadata.pipe_name", &str)) {
config.metadata_pipename = (char *)str;
}
config.metadata_progress_interval = dvalue;
}
- if (config_lookup_string(config.cfg, "metadata.socket_address", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "metadata.socket_address", &str)) {
config.metadata_sockaddr = (char *)str;
}
if (config_lookup_int(config.cfg, "metadata.socket_port", &value)) {
#endif
#ifdef CONFIG_METADATA_HUB
- if (config_lookup_string(config.cfg, "metadata.cover_art_cache_directory", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "metadata.cover_art_cache_directory", &str)) {
config.cover_art_cache_dir = (char *)str;
}
}
#endif
- if (config_lookup_string(config.cfg, "sessioncontrol.run_this_before_play_begins", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "sessioncontrol.run_this_before_play_begins",
+ &str)) {
config.cmd_start = (char *)str;
}
- if (config_lookup_string(config.cfg, "sessioncontrol.run_this_after_play_ends", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "sessioncontrol.run_this_after_play_ends",
+ &str)) {
config.cmd_stop = (char *)str;
}
- if (config_lookup_string(config.cfg, "sessioncontrol.run_this_before_entering_active_state",
- &str)) {
+ if (config_lookup_non_empty_string(
+ config.cfg, "sessioncontrol.run_this_before_entering_active_state", &str)) {
config.cmd_active_start = (char *)str;
}
- if (config_lookup_string(config.cfg, "sessioncontrol.run_this_after_exiting_active_state",
- &str)) {
+ if (config_lookup_non_empty_string(
+ config.cfg, "sessioncontrol.run_this_after_exiting_active_state", &str)) {
config.cmd_active_stop = (char *)str;
}
config.active_state_timeout = dvalue;
}
- if (config_lookup_string(config.cfg,
- "sessioncontrol.run_this_if_an_unfixable_error_is_detected", &str)) {
+ if (config_lookup_non_empty_string(
+ config.cfg, "sessioncontrol.run_this_if_an_unfixable_error_is_detected", &str)) {
config.cmd_unfixable = (char *)str;
}
}
if (config_lookup_int(config.cfg, "sessioncontrol.session_timeout", &value)) {
- config.timeout = value;
- config.dont_check_timeout = 0; // this is for legacy -- only set by -t 0
+ if (value == 0) {
+ config.dont_check_timeout = 1;
+ } else if (value < 60) {
+ warn("Invalid value \"%d\" for \"session_timeout\". It must be 0 (i.e. no timeout) or at "
+ "least 60. "
+ "The default of %d will be used instead.",
+ value, config.timeout);
+ config.dont_check_timeout = 0;
+ } else {
+ config.timeout = value;
+ config.dont_check_timeout = 0;
+ }
}
#ifdef CONFIG_CONVOLUTION
+
if (config_lookup_string(config.cfg, "dsp.convolution", &str)) {
if (strcasecmp(str, "no") == 0)
- config.convolution = 0;
- else if (strcasecmp(str, "yes") == 0)
- config.convolution = 1;
- else
- die("Invalid dsp.convolution setting \"%s\". It should be \"yes\" or \"no\"", str);
+ config.convolution_enabled = 0;
+ else if (strcasecmp(str, "yes") == 0) {
+ config.convolution_enabled = 1;
+ }
+ warn("the \"dsp\" \"convolution\" setting is deprecated and will be removed due to its "
+ "potential ambiguity. Please use \"convolution_enabled\" instead.");
+ }
+
+ if (config_lookup_string(config.cfg, "dsp.convolution_enabled", &str)) {
+ if (strcasecmp(str, "no") == 0)
+ config.convolution_enabled = 0;
+ else if (strcasecmp(str, "yes") == 0) {
+ config.convolution_enabled = 1;
+ } else
+ die("Invalid dsp.convolution_enabled setting \"%s\". It should be \"yes\" or \"no\"",
+ str);
+ }
+
+ if (config_lookup_int(config.cfg, "dsp.convolution_thread_pool_size", &value)) {
+ if ((value >= 1) && (value <= 64)) {
+ config.convolution_threads = value;
+ } else {
+ warn("Invalid value \"%u\" for \"convolution_thread_pool_size\". It must be between 1 "
+ "and 64."
+ "The default of %u will be used instead.",
+ value, config.convolution_threads);
+ }
}
if (config_lookup_float(config.cfg, "dsp.convolution_gain", &dvalue)) {
config.convolution_gain = dvalue;
- if (dvalue > 10 || dvalue < -50)
- die("Invalid value \"%f\" for dsp.convolution_gain. It should be between -50 and +10 dB",
+ if (dvalue > 18 || dvalue < -60)
+ die("Invalid value \"%f\" for dsp.convolution_gain. It should be between -60 and +18 dB",
dvalue);
}
if (config_lookup_int(config.cfg, "dsp.convolution_max_length", &value)) {
- config.convolution_max_length = value;
-
+ config.convolution_max_length_in_seconds = (double)value / 44100;
+ warn("the \"dsp\" \"convolution_max_length\" setting is deprecated, as it assumes a fixed "
+ "sample rate of 44,100. It will be removed. "
+ "Please use convolution_max_length_in_seconds instead.");
if (value < 1 || value > 200000)
die("dsp.convolution_max_length must be within 1 and 200000");
}
- if (config_lookup_string(config.cfg, "dsp.convolution_ir_file", &str)) {
- config.convolution_ir_file = strdup(str);
- config.convolver_valid =
- convolver_init(config.convolution_ir_file, config.convolution_max_length);
+ if (config_lookup_float(config.cfg, "dsp.convolution_max_length_in_seconds", &dvalue)) {
+
+ if (dvalue > 20 || dvalue < 0) {
+ warn("Invalid value \"%f\" for dsp.convolution_max_length_in_seconds -- ignored. It "
+ "should be between 0 and 20. It is set to %f.1.",
+ dvalue, config.convolution_max_length_in_seconds);
+ } else {
+ config.convolution_max_length_in_seconds = dvalue;
+ }
+ }
+
+ if (config_lookup_non_empty_string(config.cfg, "dsp.convolution_ir_file", &str)) {
+ warn(
+ "the \"dsp\" \"convolution_ir_file\" setting is deprecated and will be removed. Please "
+ "use \"convolution_ir_files\" instead, which allows multiple comma-separated files.");
+ config.convolution_ir_files = parse_ir_filenames(str, &config.convolution_ir_file_count);
}
- if (config.convolution && config.convolution_ir_file == NULL) {
- warn("Convolution enabled but no convolution_ir_file provided");
+ if (config_lookup_non_empty_string(config.cfg, "dsp.convolution_ir_files", &str)) {
+ config.convolution_ir_files = parse_ir_filenames(str, &config.convolution_ir_file_count);
}
#endif
+
if (config_lookup_string(config.cfg, "dsp.loudness", &str)) {
if (strcasecmp(str, "no") == 0)
- config.loudness = 0;
- else if (strcasecmp(str, "yes") == 0)
- config.loudness = 1;
- else
- die("Invalid dsp.loudness \"%s\". It should be \"yes\" or \"no\"", str);
+ config.loudness_enabled = 0;
+ else if (strcasecmp(str, "yes") == 0) {
+ config.loudness_enabled = 1;
+ }
+ warn("the \"dsp\" \"loudness\" setting is deprecated and will be removed due to its "
+ "potential ambiguity. Please use \"loudness_enabled\" instead.");
+ }
+
+ if (config_lookup_string(config.cfg, "dsp.loudness_enabled", &str)) {
+ if (strcasecmp(str, "no") == 0)
+ config.loudness_enabled = 0;
+ else if (strcasecmp(str, "yes") == 0) {
+ config.loudness_enabled = 1;
+ } else
+ die("Invalid dsp.loudness_enabled \"%s\". It should be \"yes\" or \"no\"", str);
}
if (config_lookup_float(config.cfg, "dsp.loudness_reference_volume_db", &dvalue)) {
dvalue);
}
- if (config.loudness == 1 && config_lookup_string(config.cfg, "alsa.mixer_control_name", &str))
- die("Loudness activated but hardware volume is active. You must remove "
- "\"alsa.mixer_control_name\" to use the loudness filter.");
+ if (config.loudness_enabled == 1 &&
+ config_lookup_non_empty_string(config.cfg, "alsa.mixer_control_name", &str))
+ die("The loudness filter is activated but cannot be used because the volume is being "
+ "controlled by a hardware mixer. "
+ "You must not use a hardware mixer when using the loudness filter.");
} else {
if (config_error_type(&config_file_stuff) == CONFIG_ERR_FILE_IO)
if (config.mqtt_enabled && !config.metadata_enabled) {
die("You need to have metadata enabled in order to use mqtt");
}
- if (config_lookup_string(config.cfg, "mqtt.hostname", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "mqtt.hostname", &str)) {
config.mqtt_hostname = (char *)str;
// TODO: Document that, if this is false, whole mqtt func is disabled
}
config.mqtt_port = 1883;
if (config_lookup_int(config.cfg, "mqtt.port", &value)) {
if ((value < 0) || (value > 65535))
- die("Invalid mqtt port number \"%sd\". It should be between 0 and 65535, default is 1883",
+ die("Invalid mqtt port number \"%d\". It should be between 0 and 65535, default is 1883",
value);
else
config.mqtt_port = value;
}
- if (config_lookup_string(config.cfg, "mqtt.username", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "mqtt.username", &str)) {
config.mqtt_username = (char *)str;
}
- if (config_lookup_string(config.cfg, "mqtt.password", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "mqtt.password", &str)) {
config.mqtt_password = (char *)str;
}
int capath = 0;
- if (config_lookup_string(config.cfg, "mqtt.capath", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "mqtt.capath", &str)) {
config.mqtt_capath = (char *)str;
capath = 1;
}
- if (config_lookup_string(config.cfg, "mqtt.cafile", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "mqtt.cafile", &str)) {
if (capath)
die("Supply either mqtt cafile or mqtt capath -- you have supplied both!");
config.mqtt_cafile = (char *)str;
}
int certkeynum = 0;
- if (config_lookup_string(config.cfg, "mqtt.certfile", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "mqtt.certfile", &str)) {
config.mqtt_certfile = (char *)str;
certkeynum++;
}
- if (config_lookup_string(config.cfg, "mqtt.keyfile", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "mqtt.keyfile", &str)) {
config.mqtt_keyfile = (char *)str;
certkeynum++;
}
"If you do not want to use TLS Client Authentication, leave both empty.");
}
- if (config_lookup_string(config.cfg, "mqtt.topic", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "mqtt.topic", &str)) {
config.mqtt_topic = (char *)str;
}
config_set_lookup_bool(config.cfg, "mqtt.publish_raw", &config.mqtt_publish_raw);
config_set_lookup_bool(config.cfg, "mqtt.publish_parsed", &config.mqtt_publish_parsed);
config_set_lookup_bool(config.cfg, "mqtt.publish_cover", &config.mqtt_publish_cover);
+ config_set_lookup_bool(config.cfg, "mqtt.publish_retain", &config.mqtt_publish_retain);
if (config.mqtt_publish_cover && !config.get_coverart) {
die("You need to have metadata.include_cover_art enabled in order to use mqtt.publish_cover");
}
- config_set_lookup_bool(config.cfg, "mqtt.enable_autodiscovery", &config.mqtt_enable_autodiscovery);
- if (config_lookup_string(config.cfg, "mqtt.autodiscovery_prefix", &str)) {
+ config_set_lookup_bool(config.cfg, "mqtt.enable_autodiscovery",
+ &config.mqtt_enable_autodiscovery);
+ if (config_lookup_non_empty_string(config.cfg, "mqtt.autodiscovery_prefix", &str)) {
config.mqtt_autodiscovery_prefix = (char *)str;
}
config_set_lookup_bool(config.cfg, "mqtt.enable_remote", &config.mqtt_enable_remote);
- if (config_lookup_string(config.cfg, "mqtt.empty_payload_substitute", &str)) {
+ if (config_lookup_non_empty_string(config.cfg, "mqtt.empty_payload_substitute", &str)) {
if (strlen(str) == 0)
config.mqtt_empty_payload_substitute = NULL;
else
case 'S':
if (strcmp(stuffing, "basic") == 0)
config.packet_stuffing = ST_basic;
+ else if (strcmp(stuffing, "vernier") == 0)
+ config.packet_stuffing = ST_vernier;
else if (strcmp(stuffing, "auto") == 0)
config.packet_stuffing = ST_auto;
else if (strcmp(stuffing, "soxr") == 0)
"support. Change the -S option setting.");
#endif
else
- die("Illegal stuffing option \"%s\" -- must be \"basic\" or \"soxr\"", stuffing);
+ die("Illegal stuffing option \"%s\" -- must be \"auto\", \"vernier\", \"basic\" or "
+ "\"soxr\"",
+ stuffing);
break;
}
}
poptFreeContext(optCon);
+#if defined(CONFIG_DBUS_INTERFACE) || (CONFIG_MPRIS_INTERFACE)
+ // now check to see if a dbus service bus was given
+ if (dbus_default_message_bus != NULL) {
+ if (strcasecmp(dbus_default_message_bus, "system") == 0)
+ config.dbus_default_message_bus = DBT_system;
+ else if (strcasecmp(dbus_default_message_bus, "session") == 0)
+ config.dbus_default_message_bus = DBT_session;
+ else
+ die("Invalid dbus_default_message_bus option choice \"%s\". It should be \"system\" "
+ "(default) or "
+ "\"session\".",
+ str);
+ }
+#endif
+
// here, we are finally finished reading the options
// finish the Airplay 2 options
config.airplay_device_id = strdup(apids);
+ // Create an airplay psi UUID based on the ap1_prefix.
+
+ // a uuid_t and an md5 hash are both 128 bits, 16 bytes
+ uuid_t result;
+ memset(result, 0, sizeof(result));
+ memcpy(result, config.ap1_prefix, sizeof(result));
+
+ // OpenSSL is mandatory for AirPlay 2 anyway
+#ifdef CONFIG_OPENSSL
+ EVP_MD_CTX *mdctx = EVP_MD_CTX_new();
+ EVP_DigestInit_ex(mdctx, EVP_md5(), NULL);
+ EVP_DigestUpdate(mdctx, config.ap1_prefix, sizeof(config.ap1_prefix));
+ unsigned int md5_digest_len = EVP_MD_size(EVP_md5());
+ EVP_DigestFinal_ex(mdctx, result, &md5_digest_len);
+ EVP_MD_CTX_free(mdctx);
+#endif
+
+ // now, convert it into a type 4 UUID
+ // see https://stackoverflow.com/questions/10867405/generating-v5-uuid-what-is-name-and-namespace
+ // //set high-nibble to 5 to indicate type 5
+
+ result[6] &= 0x0F;
+ result[6] |= 0x40;
+
+ // set upper two bits to "10"
+ result[8] &= 0x3F;
+ result[8] |= 0x80;
+
+ char *psi_uuid = malloc(UUID_STR_LEN + 1); // leave space for the NUL at the end
+ // Produces a UUID string at uuid consisting of lower-case letters
+ uuid_unparse_lower(result, psi_uuid);
+ config.airplay_psi = psi_uuid;
+ debug(3, "size of pk is %zu.", sizeof(config.airplay_pk));
+
+ pair_public_key_get(PAIR_SERVER_HOMEKIT, config.airplay_pk, config.airplay_device_id);
+ char buf[128];
+ char *ptr = buf;
+ size_t pk_index;
+ for (pk_index = 0; pk_index < sizeof(config.airplay_pk); pk_index++)
+ ptr += sprintf(ptr, "%02x", config.airplay_pk[pk_index]);
+ *ptr = '\0';
+ config.pk_string = strdup(buf);
+
#ifdef CONFIG_METADATA
// If we are asking for metadata, turn on the relevant bits
if (config.metadata_enabled != 0) {
- config.airplay_features |= (1 << 17) | (1 << 16); // 16 is progress, 17 is text
+ config.airplay_features |= (uint64_t)1 << 16; // progress, 17 is text, 50 is in a binary plist
+ config.airplay_features |= (uint64_t)1 << 17; // text, 50 is in a binary plist
+ // config.airplay_features |=
+ // (uint64_t)1 << 50; // binary plist
+
// If we are asking for artwork, turn on the relevant bit
if (config.get_coverart)
- config.airplay_features |= (1 << 15); // 15 is artwork
+ config.airplay_features |= (uint64_t)1 << 15; // artwork
}
#endif
+ // now generate the fex field
+ uint8_t fexbytes[8];
+ uint64_t temp = config.airplay_features;
+ debug(2, "airplay_features are %" PRIx64 ".", temp);
+ for (i = 0; i < 8; i++) {
+ fexbytes[i] = temp & 0xff;
+ temp = temp >> 8;
+ }
+
+ config.airplay_fex = base64_enc(fexbytes, 8);
+ if (config.airplay_fex == NULL)
+ die("could not allocate memory for \"airplay_fex\"");
+ // strip the padding.
+ char *padding = strchr(config.airplay_fex, '=');
+ if (padding)
+ *padding = 0;
+ debug(2, "airplay_fex is \"%s\"", config.airplay_fex);
#endif
#ifdef CONFIG_LIBDAEMON
#endif
if (tdebuglev != 0)
- debuglev = tdebuglev;
+ set_debug_level(tdebuglev);
// now set the initial volume to the default volume
config.airplay_volume =
}
#if defined(CONFIG_DBUS_INTERFACE) || defined(CONFIG_MPRIS_INTERFACE)
-static GMainLoop *g_main_loop = NULL;
-pthread_t dbus_thread;
-void *dbus_thread_func(__attribute__((unused)) void *arg) {
- g_main_loop = g_main_loop_new(NULL, FALSE);
- g_main_loop_run(g_main_loop);
- debug(2, "g_main_loop thread exit");
- pthread_exit(NULL);
+GThread *glib_worker_thread = NULL;
+
+gpointer glib_worker_thread_function(__attribute__((unused)) gpointer data) {
+
+ // use the default global-default main context
+ config.glib_worker_loop = g_main_loop_new(NULL, FALSE);
+
+ // debug(1, "glib worker thread started.");
+
+#ifdef CONFIG_DBUS_INTERFACE
+ debug(2, "starting up D-Bus services");
+ start_dbus_service();
+#endif
+#ifdef CONFIG_MPRIS_INTERFACE
+ debug(2, "starting up MPRIS services");
+ start_mpris_service();
+#endif
+
+ // debug(1, "g_main_loop_run start.");
+
+ g_main_loop_run(config.glib_worker_loop);
+
+ // debug(1, "g_main_loop_run exit.");
+
+#ifdef CONFIG_MPRIS_INTERFACE
+ debug(2, "stopping MPRIS service");
+ stop_mpris_service();
+ // debug(1, "stopped MPRIS service");
+#endif
+
+#ifdef CONFIG_DBUS_INTERFACE
+ debug(2, "stopping D-Bus service");
+ stop_dbus_service();
+ // debug(1, "stopped D-Bus service");
+#endif
+
+ g_main_loop_unref(config.glib_worker_loop);
+ if (config.quit_requested_from_glib_mainloop != 0) {
+ debug(2, "glib_mainloop_thread_function asking for exit");
+ exit(EXIT_SUCCESS);
+ }
+ return NULL;
}
+
#endif
#ifdef CONFIG_LIBDAEMON
pthread_cancel(rtsp_listener_thread);
pthread_join(rtsp_listener_thread, NULL); // not sure you need this
}
- debug(3, "exit_rtsp_listener ends");
+ debug(2, "exit_rtsp_listener ends");
}
void exit_function() {
- debug(3, "exit_function begins");
if (type_of_exit_cleanup != TOE_emergency) {
// the following is to ensure that if libdaemon has been included
// that most of this code will be skipped when the parent process is exiting
#endif
#if defined(CONFIG_DBUS_INTERFACE) || defined(CONFIG_MPRIS_INTERFACE)
- /*
- Actually, there is no stop_mpris_service() function.
- #ifdef CONFIG_MPRIS_INTERFACE
- stop_mpris_service();
- #endif
- */
-#ifdef CONFIG_DBUS_INTERFACE
- debug(2, "Stopping D-Bus service");
- stop_dbus_service();
- debug(2, "Stopping D-Bus service done");
-#endif
- if (g_main_loop) {
- debug(2, "Stopping D-Bus Loop Thread");
- g_main_loop_quit(g_main_loop);
-
- // If the request to exit has come from the D-Bus system,
- // the D-Bus Loop Thread will not exit until the request is completed
- // so don't wait for it
- if (type_of_exit_cleanup != TOE_dbus)
- pthread_join(dbus_thread, NULL);
- debug(2, "Stopping D-Bus Loop Thread Done");
+ if ((glib_worker_thread != NULL) && (config.quit_requested_from_glib_mainloop == 0)) {
+ g_main_loop_quit(config.glib_worker_loop);
+ debug(2, "GMainLoop stop requested");
}
#endif
#endif
#ifdef CONFIG_CONVOLUTION
- if (config.convolution_ir_file)
- free(config.convolution_ir_file);
+ if (config.convolution_ir_files) {
+ free_ir_filenames(config.convolution_ir_files, config.convolution_ir_file_count);
+ config.convolution_ir_files = NULL;
+ config.convolution_ir_file_count = 0;
+ }
+ convolver_pool_closedown();
#endif
if (config.regtype)
free(config.regtype);
+ if (config.model)
+ free(config.model);
+ if (config.srcvers)
+ free(config.srcvers);
+ if (config.osvers)
+ free(config.osvers);
+
#ifdef CONFIG_AIRPLAY_2
if (config.regtype2)
free(config.regtype2);
free(config.airplay_pin);
if (config.airplay_pi)
free(config.airplay_pi);
+ if (config.airplay_pgid)
+ free(config.airplay_pgid);
+ if (config.airplay_psi)
+ free(config.airplay_psi);
+ if (config.pk_string)
+ free(config.pk_string);
+ if (config.firmware_version)
+ free(config.firmware_version);
ptp_shm_interface_close(); // close it if it's open
#endif
void _display_config(const char *filename, const int linenumber, __attribute__((unused)) int argc,
__attribute__((unused)) char **argv) {
+
+#if defined(__GNUC__) || defined(__clang__)
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wformat-zero-length"
+#endif
+
_inform(filename, linenumber, ">> Display Config Start.");
// see the man entry on popen
char *i1 = str_replace(i0, "sessioncontrol : \n{\n};\n", "");
char *i2 = str_replace(i1, "alsa : \n{\n};\n", "");
char *i3 = str_replace(i2, "sndio : \n{\n};\n", "");
- char *i4 = str_replace(i3, "pa : \n{\n};\n", "");
+ char *i4 = str_replace(i3, "pulseaudio : \n{\n};\n", "");
char *i5 = str_replace(i4, "jack : \n{\n};\n", "");
char *i6 = str_replace(i5, "pipe : \n{\n};\n", "");
char *i7 = str_replace(i6, "dsp : \n{\n};\n", "");
char *i8 = str_replace(i7, "metadata : \n{\n};\n", "");
char *i9 = str_replace(i8, "mqtt : \n{\n};\n", "");
char *i10 = str_replace(i9, "diagnostics : \n{\n};\n", "");
+ char *i11 = str_replace(i10, "pipewire : \n{\n};\n", "");
+ char *i12 = str_replace(i11, "stdout : \n{\n};\n", "");
+ char *i13 = str_replace(i12, "pipe : \n{\n};\n", "");
+ char *i14 = str_replace(i13, "ao : \n{\n};\n", "");
// debug(1,"i10 is \"%s\".",i10);
// free intermediate strings
+ free(i13);
+ free(i12);
+ free(i11);
+ free(i10);
free(i9);
free(i8);
free(i7);
free(i0);
// print it out
- if (strlen(i10) == 0)
+ if (strlen(i14) == 0)
_inform(filename, linenumber, "The Configuration file contains no active settings.");
else {
_inform(filename, linenumber, "Configuration File Settings:");
- char *p = i10;
+ char *p = i14;
while (*p != '\0') {
i = 0;
while ((*p != '\0') && (*p != '\n')) {
}
}
- free(i10); // free the cleaned-up configuration string
+ free(i14); // free the cleaned-up configuration string
/*
while (fgets(result, 1024, cr) != NULL) {
}
}
_inform(filename, linenumber, "");
- _inform(filename, linenumber, ">> Display Config End.");
+ _inform(filename, linenumber, ">> Display Config End.");
+
+ #if defined(__GNUC__) || defined(__clang__)
+ #pragma GCC diagnostic pop
+ #endif
}
#define display_config(argc, argv) _display_config(__FILE__, __LINE__, argc, argv)
-int main(int argc, char **argv) {
-#ifdef COMPILE_FOR_OPENBSD
- /* Start with the superset of all potentially required promises. */
- if (pledge("stdio rpath wpath cpath dpath inet unix dns proc exec audio", NULL) == -1)
- die("pledge: %s", strerror(errno));
+#ifdef CONFIG_FFMPEG
+
+/*
+typedef struct { // channel layout names and equates -- see
+ // https://www.ffmpeg.org/doxygen/2.4/group__channel__mask__c.html
+ const char *name; // e.g. "AV_CH_LAYOUT_5POINT1"
+ uint64_t channel_layout; // e.g. AV_CH_LAYOUT_5POINT1
+} channel_layout_t;
+channel_layout_t channel_layouts[] = {
+ {"AV_CH_LAYOUT_MONO", AV_CH_LAYOUT_MONO},
+ {"AV_CH_LAYOUT_STEREO", AV_CH_LAYOUT_STEREO},
+ {"AV_CH_LAYOUT_2POINT1", AV_CH_LAYOUT_2POINT1},
+ {"AV_CH_LAYOUT_2_1", AV_CH_LAYOUT_2_1},
+ {"AV_CH_LAYOUT_SURROUND", AV_CH_LAYOUT_SURROUND},
+ {"AV_CH_LAYOUT_3POINT1", AV_CH_LAYOUT_3POINT1},
+ {"AV_CH_LAYOUT_4POINT0", AV_CH_LAYOUT_4POINT0},
+ {"AV_CH_LAYOUT_4POINT1", AV_CH_LAYOUT_4POINT1},
+ {"AV_CH_LAYOUT_2_2", AV_CH_LAYOUT_2_2},
+ {"AV_CH_LAYOUT_QUAD", AV_CH_LAYOUT_QUAD},
+ {"AV_CH_LAYOUT_5POINT0", AV_CH_LAYOUT_5POINT0},
+ {"AV_CH_LAYOUT_5POINT1", AV_CH_LAYOUT_5POINT1},
+ {"AV_CH_LAYOUT_5POINT0_BACK", AV_CH_LAYOUT_5POINT0_BACK},
+ {"AV_CH_LAYOUT_5POINT1_BACK", AV_CH_LAYOUT_5POINT1_BACK},
+ {"AV_CH_LAYOUT_6POINT0", AV_CH_LAYOUT_6POINT0},
+ {"AV_CH_LAYOUT_6POINT0_FRONT", AV_CH_LAYOUT_6POINT0_FRONT},
+ {"AV_CH_LAYOUT_HEXAGONAL", AV_CH_LAYOUT_HEXAGONAL},
+ {"AV_CH_LAYOUT_6POINT1", AV_CH_LAYOUT_6POINT1},
+ {"AV_CH_LAYOUT_6POINT1_BACK", AV_CH_LAYOUT_6POINT1_BACK},
+ {"AV_CH_LAYOUT_6POINT1_FRONT", AV_CH_LAYOUT_6POINT1_FRONT},
+ {"AV_CH_LAYOUT_7POINT0", AV_CH_LAYOUT_7POINT0},
+ {"AV_CH_LAYOUT_7POINT0_FRONT", AV_CH_LAYOUT_7POINT0_FRONT},
+ {"AV_CH_LAYOUT_7POINT1", AV_CH_LAYOUT_7POINT1},
+ {"AV_CH_LAYOUT_7POINT1_WIDE", AV_CH_LAYOUT_7POINT1_WIDE},
+ {"AV_CH_LAYOUT_7POINT1_WIDE_BACK", AV_CH_LAYOUT_7POINT1_WIDE_BACK},
+ {"AV_CH_LAYOUT_OCTAGONAL", AV_CH_LAYOUT_OCTAGONAL},
+ // {"AV_CH_LAYOUT_STEREO_DOWNMIX", AV_CH_LAYOUT_STEREO_DOWNMIX},
+};
+
+uint64_t av_channel_layout_lookup(const char *str) {
+ unsigned int m;
+ uint64_t response = 0;
+ for (m = 0; (m < sizeof(channel_layouts) / sizeof(channel_layout_t)) && (response == 0); m++) {
+ if (strcasecmp(str, channel_layouts[m].name) == 0) {
+ response = channel_layouts[m].channel_layout;
+ }
+ }
+ return response;
+}
+
+const char *av_channel_layout_name(uint64_t channel_layout) {
+ const char *response = NULL;
+ unsigned int m;
+ for (m = 0; (m < sizeof(channel_layouts) / sizeof(channel_layout_t)) && (response == NULL); m++) {
+ if (channel_layouts[m].channel_layout == channel_layout) {
+ response = channel_layouts[m].name;
+ }
+ }
+ return response;
+}
+*/
+
#endif
+int main(int argc, char **argv) {
+ // initialise debug messages stuff -- level 0, no elapsed time, relative time, file and line
+ // debug_init(int level, int show_elapsed_time, int show_relative_time, int show_file_and_line)
+ debug_init(0, 0, 1, 1);
memset(&config, 0, sizeof(config)); // also clears all strings, BTW
/* Check if we are called with -V or --version parameter */
if (argc >= 2 && ((strcmp(argv[1], "-V") == 0) || (strcmp(argv[1], "--version") == 0))) {
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(58, 9, 100)
avcodec_register_all();
#endif
+ if (debug_level() == 0)
+ av_log_set_level(AV_LOG_ERROR);
+ else
+ av_log_set_level(AV_LOG_VERBOSE);
#endif
/* Check if we are called with -h or --help parameter */
pid = getpid();
config.log_fd = -1;
conns = NULL; // no connections active
- ns_time_at_startup = get_absolute_time_in_ns();
- ns_time_at_last_debug_message = ns_time_at_startup;
#ifdef CONFIG_LIBDAEMON
daemon_set_verbosity(LOG_DEBUG);
type_of_exit_cleanup = TOE_normal; // what kind of exit cleanup needed
atexit(exit_function);
- // set defaults
-
// get a device id -- the first non-local MAC address
get_device_id((uint8_t *)&config.hw_addr, 6);
config.debugger_show_file_and_line =
1; // by default, log the file and line of the originating message
config.debugger_show_relative_time =
- 1; // by default, log the time back to the previous debug message
- config.timeout = 120; // wait this number of seconds to wait for a dropped RTSP connection to come back before declaring it lost.
+ 1; // by default, log the time back to the previous debug message
+ config.timeout = 60; // wait this number of seconds to wait for a dropped RTSP connection to come
+ // back before declaring it lost.
config.buffer_start_fill = 220;
- config.resync_threshold = 0.050; // default
- config.resync_recovery_time = 0.1; // drop this amount of frames following the resync delay.
+ config.resync_threshold = 0.050; // default
config.tolerance = 0.002;
#ifdef CONFIG_AIRPLAY_2
config.packet_stuffing = ST_auto; // use soxr interpolation by default if support has been
// included and if the CPU is fast enough
#else
- config.packet_stuffing = ST_basic; // simple interpolation or deletion
+ config.packet_stuffing = ST_vernier; // you need to explicitly ask for "basic" (ST_basic)
#endif
// char hostname[100];
set_requested_connection_state_to_output(
1); // we expect to be able to connect to the output device
config.audio_backend_buffer_desired_length = 0.15; // seconds
+ config.audio_decoded_buffer_desired_length = 0.75; // seconds
config.udp_port_base = 6001;
config.udp_port_range = 10;
- config.output_format = SPS_FORMAT_S16_LE; // default
- config.output_format_auto_requested = 1; // default auto select format
- config.output_rate = 44100; // default
- config.output_rate_auto_requested = 1; // default auto select format
- config.decoders_supported =
- 1 << decoder_hammerton; // David Hammerton's decoder supported by default
+
+ config.current_output_configuration = 0; // no output configuration selected...
+
+ // config.output_format = SPS_FORMAT_S16_LE; // default
+ config.output_rate_auto_requested = 1; // default auto select format
+ config.output_format_auto_requested = 1; // default auto select format
+
+#ifdef CONFIG_HAMMERTON
+ config.decoders_supported |= 1 << decoder_hammerton; // David Hammerton's decoder (deprecated)
+ config.decoder_in_use = 1 << decoder_hammerton;
+#endif
#ifdef CONFIG_APPLE_ALAC
- config.decoders_supported += 1 << decoder_apple_alac;
- config.use_apple_decoder = 1; // use the ALAC decoder by default if support has been included
+ config.decoders_supported |= 1 << decoder_apple_alac; // Apple ALAC decoder (deprecated)
+ config.decoder_in_use = 1 << decoder_apple_alac; // If present, use this in preference
+#endif
+#ifdef CONFIG_FFMPEG
+ config.decoders_supported |= 1 << decoder_ffmpeg_alac;
+ config.decoder_in_use = 1 << decoder_ffmpeg_alac; // If present, use this in preference
#endif
+ config.output_channel_mapping_enable = 1; // enabled by default
+ config.output_channel_map_size = 0; // use the device's channel map if it has one
+ config.mixdown_enable = 1; // enabled by default
+ config.mixdown_channel_layout =
+ 0; // 0 means pick a mixdown based on the number of output channels
+
// initialise random number generator
r64init(0);
// parse arguments into config -- needed to locate pid_dir
int audio_arg = parse_options(argc, argv);
-#ifdef COMPILE_FOR_OPENBSD
- /* Any command to be executed at runtime? */
- int run_cmds = config.cmd_active_start != NULL || config.cmd_active_stop != NULL ||
- config.cmd_set_volume != NULL || config.cmd_start != NULL ||
- config.cmd_stop != NULL;
-#endif
-
// mDNS supports maximum of 63-character names (we append 13).
if (strlen(config.service_name) > 50) {
warn("The service name \"%s\" is too long (max 50 characters) and has been truncated.",
#endif
-#ifdef COMPILE_FOR_OPENBSD
- /* Past daemon(3)'s double fork(2). */
-
- /* Only user-defined commands are executed. */
- if (!run_cmds)
- /* Drop "proc exec". */
- if (pledge("stdio rpath wpath cpath dpath inet unix dns audio", NULL) == -1)
- die("pledge: %s", strerror(errno));
-#endif
-
#ifdef CONFIG_AIRPLAY_2
if (has_fltp_capable_aac_decoder() == 0) {
#endif
- debug(1, "Log Verbosity is %d.", debuglev);
+ debug(2, "Log Verbosity is %d.", debug_level());
config.output = audio_get_output(config.output_name);
if (!config.output) {
- die("Invalid audio backend \"%s\" selected!",
+ die("the audio backend selected: \"%s\" is not supported. Either it is invalid, or support has "
+ "not been included in this build of Shairport Sync.",
config.output_name == NULL ? "<unspecified>" : config.output_name);
}
+ debug(1, "audio backend is \"%s\".", config.output_name);
config.output->init(argc - audio_arg, argv + audio_arg);
-#ifdef COMPILE_FOR_OPENBSD
- /* Past first and last sio_open(3), sndio(7) only needs "audio". */
+#ifdef CONFIG_FFMPEG
+ if (debug_level() <= 1) // keep FFmpeg stuff quiet unless verbosity is 2 or more
+ av_log_set_level(AV_LOG_QUIET);
-#ifdef CONFIG_METADATA
- /* Only coverart cache is created.
- * Only metadata pipe is special. */
- if (!config.metadata_enabled)
-#endif
+#if LIBAVUTIL_VERSION_MAJOR >= 57
+
+ // default multichannel on
+ {
+ AVChannelLayout default_layout =
+ AV_CHANNEL_LAYOUT_7POINT1; // big fat macro to initialise the default layout
+ config.eight_channel_layout = default_layout.u.mask;
+ }
{
- /* Drop "cpath dpath". */
- if (run_cmds) {
- if (pledge("stdio rpath wpath inet unix dns proc exec audio", NULL) == -1)
- die("pledge: %s", strerror(errno));
+ AVChannelLayout default_layout =
+ AV_CHANNEL_LAYOUT_5POINT1; // big fat macro to initialise the default layout
+ config.six_channel_layout = default_layout.u.mask;
+ }
+
+ const char *str;
+
+ if ((config.cfg != NULL) &&
+ (config_lookup_string(config.cfg, "general.eight_channel_mode", &str))) {
+ if ((strcasecmp(str, "off") == 0) || (strcasecmp(str, "no") == 0)) {
+ config.eight_channel_layout = 0; // 0 on initialisation
+ } else if ((strcasecmp(str, "on") == 0) || (strcasecmp(str, "yes") == 0)) {
+ // AVChannelLayout default_layout =
+ // AV_CHANNEL_LAYOUT_7POINT1; // big fat macro to initialise the default layout
+ // config.eight_channel_layout = default_layout.u.mask;
+ } else {
+ AVChannelLayout channel_layout;
+ if (av_channel_layout_from_string(&channel_layout, str) == 0) {
+ if (channel_layout.nb_channels == 8) {
+ config.eight_channel_layout = channel_layout.u.mask;
+ } else {
+ warn("the eight_channel_mode setting \"%s\" is a %u-channel layout. If a channel layout "
+ "is "
+ "given, it must be an 8-channel layout. eight_channel_mode is set to \"off\".",
+ str, channel_layout.nb_channels);
+ }
+ av_channel_layout_uninit(&channel_layout);
+ } else {
+ warn("the eight_channel_mode setting \"%s\" is not recognised -- it should be \"off\" or "
+ "\"on\" or an eight-channel FFmpeg channel layout, e.g. \"7.1\". "
+ "eight_channel_mode is set to \"off\".",
+ str);
+ }
+ }
+ }
+
+ if ((config.cfg != NULL) &&
+ (config_lookup_string(config.cfg, "general.six_channel_mode", &str))) {
+ if ((strcasecmp(str, "off") == 0) || (strcasecmp(str, "no") == 0)) {
+ config.six_channel_layout = 0; // 0 on initialisation
+ } else if ((strcasecmp(str, "on") == 0) || (strcasecmp(str, "yes") == 0)) {
+ // AVChannelLayout default_layout =
+ // AV_CHANNEL_LAYOUT_5POINT1; // big fat macro to initialise the default layout
+ // config.six_channel_layout = default_layout.u.mask;
+ } else {
+ AVChannelLayout channel_layout;
+ if (av_channel_layout_from_string(&channel_layout, str) == 0) {
+ if (channel_layout.nb_channels == 6) {
+ config.six_channel_layout = channel_layout.u.mask;
+ } else {
+ warn("the six_channel_mode setting \"%s\" is a %u-channel layout. If a channel layout is "
+ "given, it must be a 6-channel layout. six_channel_mode is set to \"off\".",
+ str, channel_layout.nb_channels);
+ }
+ av_channel_layout_uninit(&channel_layout);
+ } else {
+ warn("the six_channel_mode setting \"%s\" is not recognised -- it should be \"off\" or "
+ "\"on\" or a six-channel FFmpeg channel layout, e.g. \"5.1\". "
+ "six_channel_mode is set to \"off\".",
+ str);
+ }
+ }
+ }
+
+ if ((config.cfg != NULL) &&
+ (config_lookup_non_empty_string(config.cfg, "general.mixdown", &str))) {
+ if ((strcasecmp(str, "off") == 0) || (strcasecmp(str, "no") == 0)) {
+ config.mixdown_enable = 0; // 0 on initialisation
+ debug(1, "mixdown disabled.");
+ } else if (strcasecmp(str, "auto") == 0) {
+ config.mixdown_enable = 1;
+ config.mixdown_channel_layout = 0; // 0 means auto
+ debug(1, "mixdown target: auto.");
+ } else {
+ AVChannelLayout channel_layout;
+ if (av_channel_layout_from_string(&channel_layout, str) == 0) {
+ config.mixdown_enable = 1;
+ config.mixdown_channel_layout = channel_layout.u.mask;
+ av_channel_layout_uninit(&channel_layout);
+ debug(1, "mixdown target: \"%s\".", str);
+ } else {
+ warn("the mixdown setting \"%s\" is not recognised -- it should be \"off\" or \"auto\" or "
+ "an "
+ "FFmpeg channel layout, e.g. \"stereo\". the mixdown is set to \"auto\".",
+ str);
+ config.mixdown_enable = 1;
+ config.mixdown_channel_layout = 0; // 0 means auto
+ }
+ }
+ }
+#else
+
+ // default on
+ config.eight_channel_layout = AV_CH_LAYOUT_7POINT1;
+ config.six_channel_layout = AV_CH_LAYOUT_5POINT1;
+
+ const char *str;
+
+ if ((config.cfg != NULL) &&
+ (config_lookup_non_empty_string(config.cfg, "general.eight_channel_mode", &str))) {
+ if ((strcasecmp(str, "off") == 0) || (strcasecmp(str, "no") == 0)) {
+ config.eight_channel_layout = 0; // 0 on initialisation
+ } else if ((strcasecmp(str, "on") == 0) || (strcasecmp(str, "yes") == 0)) {
+ // config.eight_channel_layout = AV_CH_LAYOUT_7POINT1;
+ } else if (av_get_channel_layout(str) != 0) {
+ if (av_get_channel_layout_nb_channels(av_get_channel_layout(str)) == 8) {
+ config.eight_channel_layout = av_get_channel_layout(str);
+ } else {
+ warn("the eight_channel_mode setting \"%s\" is a %u channel layout. If a channel layout is "
+ "given, it must be an 8-channel layout. eight_channel_mode is set to \"off\".",
+ str, av_get_channel_layout_nb_channels(av_get_channel_layout(str)));
+ }
+ } else {
+ warn("the eight_channel_mode setting \"%s\" is not recognised -- it should be \"off\" or "
+ "\"on\" or an 8-channel FFmpeg channel layout, e.g. \"7.1\". "
+ "eight_channel_mode is set to \"off\".",
+ str);
+ }
+ }
+
+ if ((config.cfg != NULL) &&
+ (config_lookup_non_empty_string(config.cfg, "general.six_channel_mode", &str))) {
+ if ((strcasecmp(str, "off") == 0) || (strcasecmp(str, "no") == 0)) {
+ config.six_channel_layout = 0; // 0 on initialisation
+ } else if ((strcasecmp(str, "on") == 0) || (strcasecmp(str, "yes") == 0)) {
+ // config.six_channel_layout = AV_CH_LAYOUT_5POINT1;
+ } else if (av_get_channel_layout(str) != 0) {
+ if (av_get_channel_layout_nb_channels(av_get_channel_layout(str)) == 6) {
+ config.six_channel_layout = av_get_channel_layout(str);
+ } else {
+ warn("the six_channel_mode setting \"%s\" is a %u channel layout. If a channel layout is "
+ "given, it must be a 6-channel layout. six_channel_mode is set to \"off\".",
+ str, av_get_channel_layout_nb_channels(av_get_channel_layout(str)));
+ }
+ } else {
+ warn("the six_channel_mode setting \"%s\" is not recognised -- it should be \"off\" or "
+ "\"on\" or a 6-channel FFmpeg channel layout, e.g. \"5.1\". "
+ "six_channel_mode is set to \"off\".",
+ str);
+ }
+ }
+
+ if ((config.cfg != NULL) &&
+ (config_lookup_non_empty_string(config.cfg, "general.mixdown", &str))) {
+ if ((strcasecmp(str, "off") == 0) || (strcasecmp(str, "no") == 0)) {
+ config.mixdown_enable = 0; // 0 on initialisation
+ } else if (strcasecmp(str, "auto") == 0) {
+ config.mixdown_enable = 1;
+ config.mixdown_channel_layout = 0; // 0 means auto
+ } else if (av_get_channel_layout(str) != 0) {
+ config.mixdown_enable = 1;
+ config.mixdown_channel_layout = av_get_channel_layout(str);
} else {
- if (pledge("stdio rpath wpath inet unix dns audio", NULL) == -1)
- die("pledge: %s", strerror(errno));
+ warn("the mixdown setting \"%s\" is not recognised -- it should be \"off\" or \"auto\" or an "
+ "FFmpeg channel layout, e.g. \"stereo\". the mixdown is set to \"auto\".",
+ str);
+ config.mixdown_enable = 1;
+ config.mixdown_channel_layout = 0; // 0 means auto
}
}
#endif
- // pthread_cleanup_push(main_cleanup_handler, NULL);
+ if (config.cfg != NULL) {
+ config_setting_t *output_channel_mapping_setting =
+ config_lookup(config.cfg, "general.output_channel_mapping");
+ if (output_channel_mapping_setting != NULL) {
+ const char *sstr = config_setting_get_string(output_channel_mapping_setting);
+ if (sstr != NULL) { // definitely a string
+ if (strcasecmp(sstr, "auto") == 0) {
+ config.output_channel_mapping_enable = 1; // this is the default anyway
+ config.output_channel_map_size = 0; // use the device's channel map
+ debug(1, "device output channel map chosen");
+ } else if ((strcasecmp(sstr, "off") == 0) || (strcasecmp(sstr, "no") == 0)) {
+ config.output_channel_mapping_enable = 0; // no mapping
+ } else {
+ warn("the output_channel_mapping setting \"%s\" is not recognised -- it should be "
+ "\"auto\", \"off\" or a "
+ "bracketed comma-separated list of short channel names, e.g. (\"FL\", \"FR\", "
+ "\"LFE\");",
+ sstr);
+ }
+ } else {
+ int i = 0;
+ for (i = 0; i < config_setting_length(output_channel_mapping_setting); i++) {
+ // is a list or array, so okay
+ const char *channel_id =
+ config_setting_get_string_elem(output_channel_mapping_setting, i);
+ if (channel_id != NULL) { // definitely a string
+ int found = 0;
+ if (strcmp(channel_id, "--") == 0) {
+ found = 1;
+ } else {
+#if LIBAVUTIL_VERSION_MAJOR >= 57
+ const int buffer_size = 32;
+ char buffer[buffer_size];
+ enum AVChannel channel_index;
+ for (channel_index = AV_CHAN_NONE;
+ ((channel_index < AV_CHAN_BOTTOM_FRONT_RIGHT) && (found == 0));
+ channel_index++) {
+ found = av_channel_name(buffer, buffer_size, channel_index);
+ if (found > 0) {
+ found = ((av_channel_name(buffer, buffer_size, channel_index) > 0) &&
+ (strcmp(channel_id, buffer) == 0));
+ } else {
+ found = 0;
+ }
+ }
+#else
+ uint64_t channel_index;
+ for (channel_index = 0; ((channel_index < 64) && (found == 0)); channel_index++) {
+ found = ((av_get_channel_name(1 << channel_index) != NULL) &&
+ (strcmp(channel_id, av_get_channel_name(1 << channel_index)) == 0));
+ }
+#endif
+ }
+ if (found != 0) {
+ config.output_channel_map[i] = strdup(channel_id);
+ debug(2, "output channel %d is \"%s\".", i, config.output_channel_map[i]);
+ } else {
+ warn("channel \"%s\" is not recognised -- output channel %d will be silent.",
+ channel_id, i);
+ config.output_channel_map[i] = strdup("--");
+ }
+ config.output_channel_map_size++;
+ }
+ }
+ if (config.output_channel_map_size == 0)
+ warn("the output_channel_mapping setting was empty. No output channel mapping will be "
+ "done.");
+ else
+ config.output_channel_mapping_enable = 1;
+ }
+ }
+ }
- // daemon_log(LOG_NOTICE, "startup");
+#endif
switch (config.endianness) {
case SS_LITTLE_ENDIAN:
"44100 frames per second).",
BUFFER_FRAMES * 352 - 22050);
}
-
+ const int option_print_level = 1;
/* Print out options */
- debug(1, "disable_resend_requests is %s.", config.disable_resend_requests ? "on" : "off");
- debug(1,
+ debug(option_print_level, "disable_resend_requests is %s.",
+ config.disable_resend_requests ? "on" : "off");
+ debug(option_print_level,
"diagnostic_drop_packet_fraction is %f. A value of 0.0 means no packets will be dropped "
"deliberately.",
config.diagnostic_drop_packet_fraction);
- debug(1, "statistics_requester status is %d.", config.statistics_requested);
+ debug(option_print_level, "statistics_requester status is %d.", config.statistics_requested);
#if CONFIG_LIBDAEMON
- debug(1, "daemon status is %d.", config.daemonise);
- debug(1, "daemon pid file path is \"%s\".", pid_file_proc());
-#endif
- debug(1, "rtsp listening port is %d.", config.port);
- debug(1, "udp base port is %d.", config.udp_port_base);
- debug(1, "udp port range is %d.", config.udp_port_range);
- debug(1, "player name is \"%s\".", config.service_name);
- debug(1, "backend is \"%s\".", config.output_name);
- debug(1, "run_this_before_play_begins action is \"%s\".", strnull(config.cmd_start));
- debug(1, "run_this_after_play_ends action is \"%s\".", strnull(config.cmd_stop));
- debug(1, "wait-cmd status is %d.", config.cmd_blocking);
- debug(1, "run_this_before_play_begins may return output is %d.", config.cmd_start_returns_output);
- debug(1, "run_this_if_an_unfixable_error_is_detected action is \"%s\".",
+ debug(option_print_level, "daemon status is %d.", config.daemonise);
+ debug(option_print_level, "daemon pid file path is \"%s\".", pid_file_proc());
+#endif
+ debug(option_print_level, "rtsp listening port is %d.", config.port);
+ debug(option_print_level, "udp base port is %d.", config.udp_port_base);
+ debug(option_print_level, "udp port range is %d.", config.udp_port_range);
+ debug(option_print_level, "player name is \"%s\".", config.service_name);
+ debug(option_print_level, "run_this_before_play_begins action is \"%s\".",
+ strnull(config.cmd_start));
+ debug(option_print_level, "run_this_after_play_ends action is \"%s\".", strnull(config.cmd_stop));
+ debug(option_print_level, "wait-cmd status is %d.", config.cmd_blocking);
+ debug(option_print_level, "run_this_before_play_begins may return output is %d.",
+ config.cmd_start_returns_output);
+ debug(option_print_level, "run_this_if_an_unfixable_error_is_detected action is \"%s\".",
strnull(config.cmd_unfixable));
- debug(1, "run_this_before_entering_active_state action is \"%s\".",
+ debug(option_print_level, "run_this_before_entering_active_state action is \"%s\".",
strnull(config.cmd_active_start));
- debug(1, "run_this_after_exiting_active_state action is \"%s\".",
+ debug(option_print_level, "run_this_after_exiting_active_state action is \"%s\".",
strnull(config.cmd_active_stop));
- debug(1, "active_state_timeout is %f seconds.", config.active_state_timeout);
- debug(1, "mdns backend \"%s\".", strnull(config.mdns_name));
+ debug(option_print_level, "active_state_timeout is %f seconds.", config.active_state_timeout);
+ debug(option_print_level, "mdns backend \"%s\".", strnull(config.mdns_name));
debug(2, "userSuppliedLatency is %d.", config.userSuppliedLatency);
- debug(1, "interpolation setting is \"%s\".",
- config.packet_stuffing == ST_basic ? "basic"
- : config.packet_stuffing == ST_soxr ? "soxr"
- : "auto");
- debug(1, "interpolation soxr_delay_threshold is %d.", config.soxr_delay_threshold);
- debug(1, "resync time is %f seconds.", config.resync_threshold);
- debug(1, "resync recovery time is %f seconds.", config.resync_recovery_time);
- debug(1, "allow a session to be interrupted: %d.", config.allow_session_interruption);
- debug(1, "busy timeout time is %d.", config.timeout);
- debug(1, "drift tolerance is %f seconds.", config.tolerance);
- debug(1, "password is %s.", config.password == NULL ? "not set" : "set (omitted)");
- debug(1, "default airplay volume is: %.6f.", config.default_airplay_volume);
- debug(1, "high threshold airplay volume is: %.6f.", config.high_threshold_airplay_volume);
- if (config.limit_to_high_volume_threshold_time_in_minutes == 0)
- debug(1, "check for higher-than-threshold volume for new play session is disabled.");
- else
- debug(1,
- "suggest default airplay volume for new play sessions instead of higher-than-threshold "
- "airplay volume after: %d minutes.",
- config.limit_to_high_volume_threshold_time_in_minutes);
- debug(1, "ignore_volume_control is %d.", config.ignore_volume_control);
+ debug(option_print_level, "interpolation setting is \"%s\".",
+ config.packet_stuffing == ST_basic ? "basic"
+ : config.packet_stuffing == ST_vernier ? "vernier"
+ : config.packet_stuffing == ST_soxr ? "soxr"
+ : "auto");
+ debug(option_print_level, "interpolation soxr_delay_threshold is %d.",
+ config.soxr_delay_threshold);
+ debug(option_print_level, "resync time is %f seconds.", config.resync_threshold);
+ debug(option_print_level, "allow a classic AirPlay session to be interrupted: \"%s\".",
+ config.allow_session_interruption == 0 ? "no" : "yes");
+ debug(option_print_level, "busy timeout time is %d.", config.timeout);
+ debug(option_print_level, "drift tolerance is %f seconds.", config.tolerance);
+ debug(option_print_level, "password is \"%s\".", strnull(config.password));
+ debug(option_print_level, "default airplay volume is: %.6f.", config.default_airplay_volume);
+ debug(option_print_level, "ignore_volume_control is %d.", config.ignore_volume_control);
if (config.volume_max_db_set)
- debug(1, "volume_max_db is %d.", config.volume_max_db);
+ debug(option_print_level, "volume_max_db is %d.", config.volume_max_db);
else
- debug(1, "volume_max_db is not set");
- debug(1, "volume range in dB (zero means use the range specified by the mixer): %u.",
+ debug(option_print_level, "volume_max_db is not set");
+ debug(option_print_level,
+ "volume range in dB (zero means use the range specified by the mixer): %u.",
config.volume_range_db);
- debug(1,
+ debug(option_print_level,
"volume_range_combined_hardware_priority (1 means hardware mixer attenuation is used "
"first) is %d.",
config.volume_range_hw_priority);
- debug(1, "playback_mode is %d (0-stereo, 1-mono, 1-reverse_stereo, 2-both_left, 3-both_right).",
+ debug(option_print_level,
+ "playback_mode is %d (0-stereo, 1-mono, 1-reverse_stereo, 2-both_left, 3-both_right).",
config.playback_mode);
- debug(1, "disable_synchronization is %d.", config.no_sync);
- debug(1, "use_mmap_if_available is %d.", config.no_mmap ? 0 : 1);
- debug(1, "output_format automatic selection is %sabled.",
+ debug(option_print_level, "disable_synchronization is %d.", config.no_sync);
+ debug(option_print_level, "use_mmap_if_available is %d.", config.no_mmap ? 0 : 1);
+ debug(option_print_level, "output_format automatic selection is %sabled.",
config.output_format_auto_requested ? "en" : "dis");
- if (config.output_format_auto_requested == 0)
- debug(1, "output_format is \"%s\".", sps_format_description_string(config.output_format));
- debug(1, "output_rate automatic selection is %sabled.",
+ // if (config.output_format_auto_requested == 0)
+ // debug(option_print_level, "output_format is \"%s\".",
+ // sps_format_description_string(config.current_output_configuration->format));
+ debug(option_print_level, "output_rate automatic selection is %sabled.",
config.output_rate_auto_requested ? "en" : "dis");
- if (config.output_rate_auto_requested == 0)
- debug(1, "output_rate is %d.", config.output_rate);
- debug(1, "audio backend desired buffer length is %f seconds.",
+ // if (config.output_rate_auto_requested == 0)
+ // debug(option_print_level, "output_rate is %d.", config.current_output_configuration->rate);
+ debug(option_print_level, "audio backend desired buffer length is %f seconds.",
config.audio_backend_buffer_desired_length);
- debug(1, "audio_backend_buffer_interpolation_threshold_in_seconds is %f seconds.",
+ debug(option_print_level,
+ "audio_backend_buffer_interpolation_threshold_in_seconds is %f seconds.",
config.audio_backend_buffer_interpolation_threshold_in_seconds);
- debug(1, "audio backend latency offset is %f seconds.", config.audio_backend_latency_offset);
+ debug(option_print_level, "audio backend latency offset is %f seconds.",
+ config.audio_backend_latency_offset);
if (config.audio_backend_silent_lead_in_time_auto == 1)
- debug(1, "audio backend silence lead-in time is \"auto\".");
+ debug(option_print_level, "audio backend silence lead-in time is \"auto\".");
else
- debug(1, "audio backend silence lead-in time is %f seconds.",
+ debug(option_print_level, "audio backend silence lead-in time is %f seconds.",
config.audio_backend_silent_lead_in_time);
- debug(1, "zeroconf regtype is \"%s\".", config.regtype);
- debug(1, "decoders_supported field is %d.", config.decoders_supported);
- debug(1, "use_apple_decoder is %d.", config.use_apple_decoder);
- debug(1, "alsa_use_hardware_mute is %d.", config.alsa_use_hardware_mute);
+ debug(option_print_level, "zeroconf regtype is \"%s\".", config.regtype);
+ debug(option_print_level,
+ "decoders_supported bit field is %d (1 == hammerton, 2 == apple, 4 == ffmpeg).",
+ config.decoders_supported);
+ debug(option_print_level, "decoder_in_use is %d.", config.decoder_in_use);
+ debug(option_print_level, "alsa_use_hardware_mute is %d.", config.alsa_use_hardware_mute);
if (config.interface)
- debug(1, "mdns service interface \"%s\" requested.", config.interface);
+ debug(option_print_level, "mdns service interface \"%s\" requested.", config.interface);
else
- debug(1, "no special mdns service interface was requested.");
+ debug(option_print_level, "no special mdns service interface was requested.");
char *realConfigPath = realpath(config.configfile, NULL);
if (realConfigPath) {
- debug(1, "configuration file name \"%s\" resolves to \"%s\".", config.configfile,
- realConfigPath);
+ debug(option_print_level, "configuration file name \"%s\" resolves to \"%s\".",
+ config.configfile, realConfigPath);
free(realConfigPath);
} else {
- debug(1, "configuration file name \"%s\" can not be resolved.", config.configfile);
+ debug(option_print_level, "configuration file name \"%s\" can not be resolved.",
+ config.configfile);
}
#ifdef CONFIG_METADATA
- debug(1, "metadata enabled is %d.", config.metadata_enabled);
- debug(1, "metadata pipename is \"%s\".", config.metadata_pipename);
- debug(1, "metadata socket address is \"%s\" port %d.", strnull(config.metadata_sockaddr),
+ debug(option_print_level, "metadata enabled is %d.", config.metadata_enabled);
+ debug(option_print_level, "metadata pipename is \"%s\".", config.metadata_pipename);
+ debug(option_print_level, "metadata socket address is \"%s\" port %d.", config.metadata_sockaddr,
config.metadata_sockport);
- debug(1, "metadata socket packet size is \"%d\".", config.metadata_sockmsglength);
- debug(1, "get-coverart is %d.", config.get_coverart);
+ debug(option_print_level, "metadata socket packet size is \"%zd\".",
+ config.metadata_sockmsglength);
+ debug(option_print_level, "get-coverart is %d.", config.get_coverart);
#endif
#ifdef CONFIG_MQTT
- debug(1, "mqtt is %sabled.", config.mqtt_enabled ? "en" : "dis");
- debug(1, "mqtt hostname is %s, port is %d.", config.mqtt_hostname, config.mqtt_port);
- debug(1, "mqtt topic is %s.", config.mqtt_topic);
- debug(1, "mqtt will%s publish raw metadata.", config.mqtt_publish_raw ? "" : " not");
- debug(1, "mqtt will%s publish parsed metadata.", config.mqtt_publish_parsed ? "" : " not");
- debug(1, "mqtt will%s publish cover Art.", config.mqtt_publish_cover ? "" : " not");
- debug(1, "mqtt remote control is %sabled.", config.mqtt_enable_remote ? "en" : "dis");
- debug(1, "mqtt autodiscovery is %sabled.", config.mqtt_enable_autodiscovery ? "en" : "dis");
+ debug(option_print_level, "mqtt is %sabled.", config.mqtt_enabled ? "en" : "dis");
+ debug(option_print_level, "mqtt hostname is %s, port is %d.", config.mqtt_hostname,
+ config.mqtt_port);
+ debug(option_print_level, "mqtt topic is %s.", config.mqtt_topic);
+ debug(option_print_level, "mqtt will%s publish raw metadata.",
+ config.mqtt_publish_raw ? "" : " not");
+ debug(option_print_level, "mqtt will%s publish parsed metadata.",
+ config.mqtt_publish_parsed ? "" : " not");
+ debug(option_print_level, "mqtt will%s publish cover Art.",
+ config.mqtt_publish_cover ? "" : " not");
+ debug(option_print_level, "mqtt will%s set retain flag.",
+ config.mqtt_publish_retain ? "" : " not");
+ debug(option_print_level, "mqtt remote control is %sabled.",
+ config.mqtt_enable_remote ? "en" : "dis");
+ debug(option_print_level, "mqtt autodiscovery is %sabled.",
+ config.mqtt_enable_autodiscovery ? "en" : "dis");
#endif
#ifdef CONFIG_CONVOLUTION
- debug(1, "convolution is %d.", config.convolution);
- debug(1, "convolution IR file is \"%s\"", config.convolution_ir_file);
- debug(1, "convolution max length %d", config.convolution_max_length);
- debug(1, "convolution gain is %f", config.convolution_gain);
-#endif
- debug(1, "loudness is %d.", config.loudness);
- debug(1, "loudness reference level is %f", config.loudness_reference_volume_db);
+ debug(option_print_level, "convolution_enabled is %s.",
+ config.convolution_enabled != 0 ? "true" : "false");
+ debug(option_print_level, "convolution maximum length is %f seconds.",
+ config.convolution_max_length_in_seconds);
+ debug(option_print_level, "convolution gain is %f", config.convolution_gain);
+ sanity_check_ir_files(option_print_level, config.convolution_ir_files,
+ config.convolution_ir_file_count);
+#endif
+ debug(option_print_level, "loudness_enabled is %s.",
+ config.loudness_enabled != 0 ? "true" : "false");
+ debug(option_print_level, "loudness reference level is %f", config.loudness_reference_volume_db);
#ifdef CONFIG_SOXR
- pthread_create(&soxr_time_check_thread, NULL, &soxr_time_check, NULL);
+ named_pthread_create(&soxr_time_check_thread, NULL, &soxr_time_check, NULL, "soxr_checker");
soxr_time_check_thread_started = 1;
+#endif
+
+#ifdef CONFIG_FFMPEG
+ debug(2, "LIBAVUTIL_VERSION_MAJOR is %d", LIBAVUTIL_VERSION_MAJOR);
+
+#if LIBAVUTIL_VERSION_MAJOR >= 57
+ unsigned int cc;
+ for (cc = 1; cc <= 8; cc++) {
+ AVChannelLayout output_channel_layout;
+ av_channel_layout_default(&output_channel_layout, cc);
+ char chLayoutDescription[1024];
+ int sts = av_channel_layout_describe(&output_channel_layout, chLayoutDescription,
+ sizeof(chLayoutDescription));
+ if (sts >= 0) {
+ int idx;
+ char channel_map[1024];
+ channel_map[0] = '\0';
+ for (idx = 0; idx < output_channel_layout.nb_channels; idx++) {
+ enum AVChannel av = av_channel_layout_channel_from_index(&output_channel_layout, idx);
+ char chName[64];
+ int cts = av_channel_name(chName, sizeof(chName), av);
+ if (cts >= 0) {
+ if (idx != 0)
+ strncat(channel_map, " ", sizeof(channel_map) - 1);
+ strncat(channel_map, chName, sizeof(channel_map) - 1 - strlen(channel_map));
+ debug(3, "Channel %d: \"%s\"", idx, chName);
+ } else {
+ debug(1, "Insufficient space for the name of channel %d.", idx);
+ }
+ }
+ if (cc == 1) {
+ debug(2, "Default layout for one channel: \"%s\", channel map: \"%s\".",
+ chLayoutDescription, channel_map);
+ } else {
+ debug(2, "Default layout for %u channels: \"%s\", channel map: \"%s\".", cc,
+ chLayoutDescription, channel_map);
+ }
+ } else {
+ debug(1, "Insufficient space for the description of the default layout for %u channels.", cc);
+ }
+ av_channel_layout_uninit(&output_channel_layout);
+ }
+#endif
+
#endif
// In AirPlay 2 mode, the AP1 prefix is the same as the device ID less the colons
unsigned int md5_digest_len = EVP_MD_size(EVP_md5());
EVP_DigestFinal_ex(mdctx, ap_md5, &md5_digest_len);
EVP_MD_CTX_free(mdctx);
-
#endif
#ifdef CONFIG_MBEDTLS
dacp_monitor_start();
#endif
-#if defined(CONFIG_DBUS_INTERFACE) || defined(CONFIG_MPRIS_INTERFACE)
- // Start up DBUS services after initial settings are all made
- // debug(1, "Starting up D-Bus services");
- pthread_create(&dbus_thread, NULL, &dbus_thread_func, NULL);
-#ifdef CONFIG_DBUS_INTERFACE
- start_dbus_service();
-#endif
-#ifdef CONFIG_MPRIS_INTERFACE
- start_mpris_service();
-#endif
-#endif
-
#ifdef CONFIG_MQTT
if (config.mqtt_enabled) {
initialise_mqtt();
}
#endif
+#if defined(CONFIG_DBUS_INTERFACE) || defined(CONFIG_MPRIS_INTERFACE)
+ glib_worker_thread = g_thread_new("glib worker", glib_worker_thread_function, NULL);
+#endif
+
#ifdef CONFIG_AIRPLAY_2
+
ptp_send_control_message_string(
"T"); // send this message to get nqptp to create the named shm interface
uint64_t nqptp_start_waiting_time = get_absolute_time_in_ns();
#ifdef CONFIG_METADATA
send_ssnc_metadata('svna', config.service_name, strlen(config.service_name), 1);
- char buffer[256] = "";
- snprintf(buffer, sizeof(buffer), "%d", config.output_rate);
- send_ssnc_metadata('ofps', buffer, strlen(buffer), 1);
- snprintf(buffer, sizeof(buffer), "%s", sps_format_description_string(config.output_format));
- send_ssnc_metadata('ofmt', buffer, strlen(buffer), 1);
#endif
- activity_monitor_start(); // not yet for AP2
- pthread_create(&rtsp_listener_thread, NULL, &rtsp_listen_loop, NULL);
+#ifdef CONFIG_CONVOLUTION
+ convolver_pool_init(config.convolution_threads, 8); // 8 channels
+#endif
+ activity_monitor_start();
+ debug(3, "create an RTSP listener");
+ named_pthread_create(&rtsp_listener_thread, NULL, &rtsp_listen_loop, NULL, "bonjour");
atexit(exit_rtsp_listener);
pthread_join(rtsp_listener_thread, NULL);
return 0;
* tinysvcmdns - a tiny MDNS implementation for publishing services
* Copyright (C) 2011 Darell Tan
* All rights reserved.
- * Updated many times by Mike Brady (c) 2014 -- 2019
+ * Updated many times by Mike Brady (c) 2014--2025
* Includes fixes for CVE-12087 and CVE-2017-12130
*
* Redistribution and use in source and binary forms, with or without
pthread_attr_init(&attr);
pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
- if (pthread_create(&tid, &attr, (void *(*)(void *)) & main_loop, (void *)server) != 0) {
+ if (named_pthread_create(&tid, &attr, (void *(*)(void *)) & main_loop, (void *)server,
+ "tinysvcmdns") != 0) {
pthread_mutex_destroy(&server->data_lock);
free(server);
return NULL;
--- /dev/null
+#!/bin/sh
+
+# user-service-installer.sh - Installs a user-level systemd service for shairport-sync
+
+SERVICE_NAME="shairport-sync.service"
+SERVICE_SOURCE="./scripts/shairport-sync.user.service"
+USER_SYSTEMD_DIR="$HOME/.config/systemd/user"
+SERVICE_DEST="$USER_SYSTEMD_DIR/$SERVICE_NAME"
+APP_NAME="shairport-sync"
+
+DRY_RUN=0
+
+show_help() {
+ echo "Usage: $0 [OPTION]"
+ echo ""
+ echo "Installs a user-level systemd service to:"
+ echo " $USER_SYSTEMD_DIR"
+ echo ""
+ echo "Options:"
+ echo " -h, --help Show this help message and exit"
+ echo " --dry-run Show what the script would do without making changes"
+}
+
+# Parse arguments
+while [ $# -gt 0 ]; do
+ case "$1" in
+ -h|--help)
+ show_help
+ exit 0
+ ;;
+ --dry-run)
+ DRY_RUN=1
+ ;;
+ *)
+ echo "Unknown option: $1" >&2
+ show_help
+ exit 1
+ ;;
+ esac
+ shift
+done
+
+echo "--------------------------------------------------"
+echo "Systemd User Service Installer for $APP_NAME"
+echo "This script will:"
+echo " - Check for system/initd conflicts"
+echo " - Verify the app is not already running"
+echo " - Install the user service"
+echo " - Enable it to run at login"
+if [ "$DRY_RUN" -eq 1 ]; then
+ echo ""
+ echo "Dry-run mode: no changes will be made."
+fi
+echo "--------------------------------------------------"
+echo ""
+
+# Check for systemd
+if command -v systemctl >/dev/null 2>&1; then
+ echo "[OK] systemd is available."
+else
+ echo "[FAIL] systemd is not available on this system." >&2
+ exit 1
+fi
+
+# Check for system-level service
+if systemctl is-enabled "$APP_NAME" >/dev/null 2>&1; then
+ echo "[FAIL] A system-level systemd service for '$APP_NAME' is currently enabled." >&2
+ echo " [ADVICE] Please disable it before installing a user-level service." >&2
+ exit 1
+else
+ echo "[OK] No conflicting system-level systemd service enabled."
+fi
+
+# Check for init.d service
+if [ -x "/etc/init.d/$APP_NAME" ]; then
+ echo "[FAIL] An init.d script for '$APP_NAME' exists at /etc/init.d/$APP_NAME." >&2
+ echo " [ADVICE] Please remove or disable the init.d version before proceeding." >&2
+ exit 1
+else
+ echo "[OK] No conflicting init.d service found."
+fi
+
+# Note if the user-level service is already running
+if systemctl --user is-enabled "$APP_NAME" >/dev/null 2>&1; then
+ echo "[NOTE] A user-level systemd service for '$APP_NAME' is currently enabled."
+fi
+
+# Check if the application is already running
+if pgrep -x "$APP_NAME" >/dev/null 2>&1; then
+ echo "[FAIL] $APP_NAME is already running." >&2
+ echo " [ADVICE] Please stop $APP_NAME before proceeding." >&2
+ exit 1
+else
+ echo "[OK] $APP_NAME is not currently running."
+fi
+
+# Create directory
+if [ ! -d "$USER_SYSTEMD_DIR" ]; then
+ if [ "$DRY_RUN" -eq 0 ]; then
+ echo "Creating directory $USER_SYSTEMD_DIR..."
+ mkdir -p "$USER_SYSTEMD_DIR"
+ if [ $? -eq 0 ]; then
+ echo "[OK] Directory $USER_SYSTEMD_DIR created."
+ else
+ echo "[FAIL] Failed to create directory $USER_SYSTEMD_DIR" >&2
+ exit 1
+ fi
+
+ fi
+else
+ echo "[OK] User systemd directory already exists."
+fi
+
+if [ -d "$USER_SYSTEMD_DIR" ] && [ ! -w "$USER_SYSTEMD_DIR" ]; then
+ echo "[FAIL] $USER_SYSTEMD_DIR is not writable." >&2
+ echo " [ADVICE] Please ensure $USER_SYSTEMD_DIR is owned and writable by user \"$(whoami)\" before proceeding." >&2
+ exit 1
+fi
+
+
+# If the service file already exists, check that we can replace it
+
+if [ -f "$SERVICE_DEST" ] && [ ! -w "$SERVICE_DEST" ]; then
+ echo "[FAIL] The existing $SERVICE_DEST can not be replaced due to its ownership or permissions." >&2
+ echo " [ADVICE] Please delete $SERVICE_DEST or ensure it is owned and writable by user \"$(whoami)\" before proceeding." >&2
+ exit 1
+fi
+
+# Create the service file
+
+if [ "$DRY_RUN" -eq 0 ]; then
+ echo "Creating service file at $SERVICE_DEST..."
+ cat > "$SERVICE_DEST" <<EOF
+[Unit]
+Description=Shairport Sync - AirPlay Audio Receiver
+After=sound.target
+
+[Service]
+ExecStart=/usr/local/bin/shairport-sync --log-to-syslog
+
+[Install]
+WantedBy=default.target
+EOF
+ if [ $? -eq 0 ]; then
+ echo "[OK] Service file created."
+ else
+ echo "[FAIL] Failed to create service file." >&2
+ exit 1
+ fi
+fi
+
+# Reload systemd
+if [ "$DRY_RUN" -eq 0 ]; then
+ echo "Reloading systemd user daemon..."
+ systemctl --user daemon-reexec
+ systemctl --user daemon-reload
+ if [ $? -eq 0 ]; then
+ echo "[OK] The systemd user daemon was reloaded."
+ else
+ echo "[FAIL] Failed to reload systemd user daemon." >&2
+ exit 1
+ fi
+fi
+
+# Enable service
+if [ "$DRY_RUN" -eq 0 ]; then
+ echo "Enabling service $SERVICE_NAME..."
+ systemctl --user enable --now "$SERVICE_NAME"
+ if [ $? -eq 0 ]; then
+ echo "[OK] Service enabled and started."
+ else
+ echo "[FAIL] Failed to enable the user service." >&2
+ exit 1
+ fi
+fi
+
+echo ""
+if [ "$DRY_RUN" -eq 1 ]; then
+ echo "Dry run completed successfully."
+else
+ echo "Installation complete."
+ echo "The user-level systemd service for $APP_NAME is now installed and enabled."
+fi
--- /dev/null
+/*
+ * Buffered Read. This file is part of Shairport Sync
+ * Copyright (c) Mike Brady 2025
+
+ * Modifications, including those associated with audio synchronization, multithreading and
+ * metadata handling copyright (c) Mike Brady 2014--2025
+ * All rights reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+ #include <sys/types.h>
+ #include "buffered_read.h"
+ #include "common.h"
+
+ssize_t buffered_read(buffered_tcp_desc *descriptor, void *buf, size_t count,
+ size_t *bytes_remaining) {
+ ssize_t response = -1;
+ if (debug_mutex_lock(&descriptor->mutex, 50000, 1) != 0)
+ debug(1, "problem with mutex");
+ pthread_cleanup_push(mutex_unlock, (void *)&descriptor->mutex);
+ // wipe the slate dlean before reading...
+ descriptor->error_code = 0;
+ descriptor->closed = 0;
+
+ if (descriptor->buffer_occupancy == 0) {
+ debug(2, "buffered_read: buffer empty -- waiting for %zu bytes.", count);
+ }
+
+ while ((descriptor->buffer_occupancy == 0) && (descriptor->error_code == 0) &&
+ (descriptor->closed == 0)) {
+ if (pthread_cond_wait(&descriptor->not_empty_cv, &descriptor->mutex))
+ debug(1, "Error waiting for buffered read");
+ else
+ debug(2, "buffered_read: signalled with %zu bytes after waiting.",
+ descriptor->buffer_occupancy);
+ }
+
+ if (descriptor->error_code) {
+ errno = descriptor->error_code;
+ debug(1, "buffered_read: error %d.", errno);
+ response = -1;
+ } else if (descriptor->closed != 0) {
+ debug(2, "buffered_read: connection closed.");
+ errno = 0; // no error -- just closed
+ response = 0;
+ } else if (descriptor->buffer_occupancy != 0) {
+ ssize_t bytes_to_move = count;
+
+ if (descriptor->buffer_occupancy < count) {
+ bytes_to_move = descriptor->buffer_occupancy;
+ }
+
+ ssize_t top_gap = descriptor->buffer + descriptor->buffer_max_size - descriptor->toq;
+ if (top_gap < bytes_to_move)
+ bytes_to_move = top_gap;
+
+ memcpy(buf, descriptor->toq, bytes_to_move);
+ descriptor->toq += bytes_to_move;
+ if (descriptor->toq == descriptor->buffer + descriptor->buffer_max_size)
+ descriptor->toq = descriptor->buffer;
+ descriptor->buffer_occupancy -= bytes_to_move;
+ if (bytes_remaining != NULL)
+ *bytes_remaining = descriptor->buffer_occupancy;
+ response = bytes_to_move;
+ if (pthread_cond_signal(&descriptor->not_full_cv))
+ debug(1, "Error signalling");
+ }
+
+ pthread_cleanup_pop(1); // release the mutex
+ return response;
+}
+
+#define STANDARD_PACKET_SIZE 4096
+
+void buffered_tcp_reader_cleanup_handler(__attribute__((unused)) void *arg) {
+ debug(2, "Buffered TCP Reader Thread Exit via Cleanup.");
+}
+
+void *buffered_tcp_reader(void *arg) {
+ // #include <syscall.h>
+ // debug(1, "buffered_tcp_reader PID %d", syscall(SYS_gettid));
+ pthread_cleanup_push(buffered_tcp_reader_cleanup_handler, NULL);
+ buffered_tcp_desc *descriptor = (buffered_tcp_desc *)arg;
+
+ // listen(descriptor->sock_fd, 5); // this is done in the handle_setup_2 code to ensure it's open
+ // when the client hears about it...
+ ssize_t nread;
+ SOCKADDR remote_addr;
+ memset(&remote_addr, 0, sizeof(remote_addr));
+ socklen_t addr_size = sizeof(remote_addr);
+ int finished = 0;
+ int fd = accept(descriptor->sock_fd, (struct sockaddr *)&remote_addr, &addr_size);
+ // debug(1, "buffered_tcp_reader: the client has opened a buffered audio link.");
+ intptr_t pfd = fd;
+ pthread_cleanup_push(socket_cleanup, (void *)pfd);
+
+ do {
+ int have_time_to_sleep = 0;
+ if (debug_mutex_lock(&descriptor->mutex, 500000, 1) != 0)
+ debug(1, "problem with mutex");
+ pthread_cleanup_push(mutex_unlock, (void *)&descriptor->mutex);
+ while (descriptor->buffer_occupancy == descriptor->buffer_max_size) {
+ if (pthread_cond_wait(&descriptor->not_full_cv, &descriptor->mutex))
+ debug(1, "Error waiting for not_full_cv");
+ }
+ pthread_cleanup_pop(1); // release the mutex
+
+ // now we know it is not full, so go ahead and try to read some more into it
+
+ // wrap
+ if ((size_t)(descriptor->eoq - descriptor->buffer) == descriptor->buffer_max_size)
+ descriptor->eoq = descriptor->buffer;
+
+ // figure out how much to ask for
+ size_t bytes_to_request = STANDARD_PACKET_SIZE;
+ size_t free_space = descriptor->buffer_max_size - descriptor->buffer_occupancy;
+ if (bytes_to_request > free_space)
+ bytes_to_request = free_space; // don't ask for more than will fit
+
+ size_t gap_to_end_of_buffer =
+ descriptor->buffer + descriptor->buffer_max_size - descriptor->eoq;
+ if (gap_to_end_of_buffer < bytes_to_request)
+ bytes_to_request =
+ gap_to_end_of_buffer; // only ask for what will fill to the top of the buffer
+
+ // do the read
+ if (descriptor->buffer_occupancy == 0)
+ debug(2, "recv of up to %zd bytes with an buffer empty.", bytes_to_request);
+ nread = recv(fd, descriptor->eoq, bytes_to_request, 0);
+ // debug(1, "Received %d bytes for a buffer size of %d bytes.",nread,
+ // descriptor->buffer_occupancy + nread);
+ if (debug_mutex_lock(&descriptor->mutex, 50000, 1) != 0)
+ debug(1, "problem with not empty mutex");
+ pthread_cleanup_push(mutex_unlock, (void *)&descriptor->mutex);
+ if (nread < 0) {
+ char errorstring[1024];
+ strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ debug(1, "error in buffered_tcp_reader %d: \"%s\". Could not recv a packet.", errno,
+ errorstring);
+ descriptor->error_code = errno;
+ } else if (nread == 0) {
+ descriptor->closed = 1;
+ debug(
+ 2,
+ "buffered audio port closed by remote end. Terminating the buffered_tcp_reader thread.");
+ finished = 1;
+ } else if (nread > 0) {
+ descriptor->eoq += nread;
+ descriptor->buffer_occupancy += nread;
+ }
+
+ // signal if we got data or an error or the file closed
+ if (pthread_cond_signal(&descriptor->not_empty_cv))
+ debug(1, "Error signalling");
+ if (descriptor->buffer_occupancy > 16384)
+ have_time_to_sleep = 1;
+ pthread_cleanup_pop(1); // release the mutex
+ if (have_time_to_sleep)
+ usleep(10000); // give other threads a chance to run...
+ } while (finished == 0);
+
+ debug(2, "Buffered TCP Reader Thread Exit \"Normal\" Exit Begin.");
+ pthread_cleanup_pop(1); // close the socket
+ pthread_cleanup_pop(1); // cleanup
+ debug(2, "Buffered TCP Reader Thread Exit \"Normal\" Exit.");
+ pthread_exit(NULL);
+}
+
+// this will read a block of the size specified to the buffer
+// and will return either with the block or on error
+ssize_t read_sized_block(buffered_tcp_desc *descriptor, void *buf, size_t count,
+ size_t *bytes_remaining) {
+ ssize_t response, nread;
+ size_t inbuf = 0; // bytes already in the buffer
+ int keep_trying = 1;
+
+ do {
+ nread = buffered_read(descriptor, buf + inbuf, count - inbuf, bytes_remaining);
+ if (nread == 0) {
+ // a blocking read that returns zero means eof -- implies connection closed
+ debug(2, "read_sized_block connection closed.");
+ keep_trying = 0;
+ } else if (nread < 0) {
+ if (errno == EAGAIN) {
+ debug(1, "read_sized_block getting Error 11 -- EAGAIN from a blocking read!");
+ }
+ if ((errno != EAGAIN) && (errno != EINTR)) {
+ char errorstring[1024];
+ strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ debug(1, "read_sized_block read error %d: \"%s\".", errno, (char *)errorstring);
+ keep_trying = 0;
+ }
+ } else {
+ inbuf += (size_t)nread;
+ }
+ } while ((keep_trying != 0) && (inbuf < count));
+ if (nread <= 0)
+ response = nread;
+ else
+ response = inbuf;
+ return response;
+}
--- /dev/null
+#ifndef _BUFFERED_READ_H
+#define _BUFFERED_READ_H
+
+typedef struct {
+ int closed;
+ int error_code;
+ int sock_fd;
+ char *buffer;
+ char *toq;
+ char *eoq;
+ size_t buffer_max_size;
+ size_t buffer_occupancy;
+ pthread_mutex_t mutex;
+ pthread_cond_t not_empty_cv;
+ pthread_cond_t not_full_cv;
+} buffered_tcp_desc;
+
+void *buffered_tcp_reader(void *arg);
+
+// read the number of bytes specified by "count".
+ssize_t read_sized_block(buffered_tcp_desc *descriptor, void *buf, size_t count,
+ size_t *bytes_remaining);
+
+#endif // _BUFFERED_READ_H
--- /dev/null
+/*
+MIT License
+
+Copyright (c) 2023--2025 Mike Brady 4265913+mikebrady@users.noreply.github.com
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+*/
+
+#include "debug.h"
+#include <inttypes.h>
+#include <pthread.h>
+#include <stdarg.h>
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <syslog.h>
+
+static int debuglev = 0;
+int debugger_show_elapsed_time = 0;
+int debugger_show_relative_time = 0;
+int debugger_show_file_and_line = 1;
+
+static uint64_t ns_time_at_startup = 0;
+static uint64_t ns_time_at_last_debug_message;
+
+// always lock use this when accessing the ns_time_at_last_debug_message
+static pthread_mutex_t debug_timing_lock = PTHREAD_MUTEX_INITIALIZER;
+
+uint64_t debug_get_absolute_time_in_ns() {
+ uint64_t time_now_ns;
+ struct timespec tn;
+ // CLOCK_REALTIME because PTP uses it.
+ clock_gettime(CLOCK_REALTIME, &tn);
+ uint64_t tnnsec = tn.tv_sec;
+ tnnsec = tnnsec * 1000000000;
+ uint64_t tnjnsec = tn.tv_nsec;
+ time_now_ns = tnnsec + tnjnsec;
+ return time_now_ns;
+}
+
+void debug_init(int level, int show_elapsed_time, int show_relative_time, int show_file_and_line) {
+ ns_time_at_startup = debug_get_absolute_time_in_ns();
+ ns_time_at_last_debug_message = ns_time_at_startup;
+ debuglev = level;
+ debugger_show_elapsed_time = show_elapsed_time;
+ debugger_show_relative_time = show_relative_time;
+ debugger_show_file_and_line = show_file_and_line;
+}
+
+int debug_level() {
+ return debuglev;
+};
+
+void set_debug_level(int level) {
+ debuglev = level;
+}
+
+void increase_debug_level() {
+ if (debuglev < 3)
+ debuglev++;
+}
+
+void decrease_debug_level() {
+ if (debuglev > 0)
+ debuglev--;
+}
+
+int get_show_elapsed_time() {
+ return debugger_show_elapsed_time;
+
+}
+void set_show_elapsed_time(int setting) {
+ debugger_show_elapsed_time = setting;
+}
+int get_show_relative_timel() {
+ return debugger_show_relative_time;
+}
+
+void set_show_relative_time(int setting) {
+ debugger_show_relative_time = setting;
+}
+
+int get_show_file_and_line() {
+ return debugger_show_file_and_line;
+}
+
+void set_show_file_and_line(int setting) {
+ debugger_show_file_and_line = setting;
+}
+
+char *generate_preliminary_string(char *buffer, size_t buffer_length, double tss, double tsl,
+ const char *filename, const int linenumber, const char *prefix) {
+ size_t space_remaining = buffer_length;
+ char *insertion_point = buffer;
+ if (debugger_show_elapsed_time) {
+ snprintf(insertion_point, space_remaining, "% 20.9f", tss);
+ insertion_point = insertion_point + strlen(insertion_point);
+ space_remaining = space_remaining - strlen(insertion_point);
+ }
+ if (debugger_show_relative_time) {
+ snprintf(insertion_point, space_remaining, "% 20.9f", tsl);
+ insertion_point = insertion_point + strlen(insertion_point);
+ space_remaining = space_remaining - strlen(insertion_point);
+ }
+ if (debugger_show_file_and_line) {
+ snprintf(insertion_point, space_remaining, " \"%s:%d\"", filename, linenumber);
+ insertion_point = insertion_point + strlen(insertion_point);
+ space_remaining = space_remaining - strlen(insertion_point);
+ }
+ if (prefix) {
+ snprintf(insertion_point, space_remaining, "%s", prefix);
+ insertion_point = insertion_point + strlen(insertion_point);
+ space_remaining = space_remaining - strlen(insertion_point);
+ }
+ return insertion_point;
+}
+
+void _die(const char *filename, const int linenumber, const char *format, ...) {
+ int oldState;
+ pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
+ char b[1024];
+ b[0] = 0;
+ char *s;
+ if (debuglev) {
+ pthread_mutex_lock(&debug_timing_lock);
+ uint64_t time_now = debug_get_absolute_time_in_ns();
+ uint64_t time_since_start = time_now - ns_time_at_startup;
+ uint64_t time_since_last_debug_message = time_now - ns_time_at_last_debug_message;
+ ns_time_at_last_debug_message = time_now;
+ pthread_mutex_unlock(&debug_timing_lock);
+ s = generate_preliminary_string(b, sizeof(b), 1.0 * time_since_start / 1000000000,
+ 1.0 * time_since_last_debug_message / 1000000000, filename,
+ linenumber, " *fatal error: ");
+ } else {
+ strncpy(b, "fatal error: ", sizeof(b));
+ s = b + strlen(b);
+ }
+ va_list args;
+ va_start(args, format);
+ vsnprintf(s, sizeof(b) - (s - b), format, args);
+ va_end(args);
+ // syslog(LOG_ERR, "%s", b);
+ fprintf(stderr, "%s\n", b);
+ pthread_setcancelstate(oldState, NULL);
+ exit(EXIT_FAILURE);
+}
+
+void _warn(const char *filename, const int linenumber, const char *format, ...) {
+ int oldState;
+ pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
+ char b[1024];
+ b[0] = 0;
+ char *s;
+ if (debuglev) {
+ pthread_mutex_lock(&debug_timing_lock);
+ uint64_t time_now = debug_get_absolute_time_in_ns();
+ uint64_t time_since_start = time_now - ns_time_at_startup;
+ uint64_t time_since_last_debug_message = time_now - ns_time_at_last_debug_message;
+ ns_time_at_last_debug_message = time_now;
+ pthread_mutex_unlock(&debug_timing_lock);
+ s = generate_preliminary_string(b, sizeof(b), 1.0 * time_since_start / 1000000000,
+ 1.0 * time_since_last_debug_message / 1000000000, filename,
+ linenumber, " *warning: ");
+ } else {
+ strncpy(b, "warning: ", sizeof(b));
+ s = b + strlen(b);
+ }
+ va_list args;
+ va_start(args, format);
+ vsnprintf(s, sizeof(b) - (s - b), format, args);
+ va_end(args);
+ // syslog(LOG_WARNING, "%s", b);
+ fprintf(stderr, "%s\n", b);
+ pthread_setcancelstate(oldState, NULL);
+}
+
+void _debug(const char *filename, const int linenumber, int level, const char *format, ...) {
+ if (level > debuglev)
+ return;
+ int oldState;
+ pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
+ char b[1024];
+ b[0] = 0;
+ pthread_mutex_lock(&debug_timing_lock);
+ uint64_t time_now = debug_get_absolute_time_in_ns();
+ uint64_t time_since_start = time_now - ns_time_at_startup;
+ uint64_t time_since_last_debug_message = time_now - ns_time_at_last_debug_message;
+ ns_time_at_last_debug_message = time_now;
+ pthread_mutex_unlock(&debug_timing_lock);
+ char *s = generate_preliminary_string(b, sizeof(b), 1.0 * time_since_start / 1000000000,
+ 1.0 * time_since_last_debug_message / 1000000000, filename,
+ linenumber, " ");
+ va_list args;
+ va_start(args, format);
+ vsnprintf(s, sizeof(b) - (s - b), format, args);
+ va_end(args);
+ // syslog(LOG_DEBUG, "%s", b);
+ fprintf(stderr, "%s\n", b);
+ pthread_setcancelstate(oldState, NULL);
+}
+
+void _inform(const char *filename, const int linenumber, const char *format, ...) {
+ int oldState;
+ pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &oldState);
+ char b[1024];
+ b[0] = 0;
+ char *s;
+ if (debuglev) {
+ pthread_mutex_lock(&debug_timing_lock);
+ uint64_t time_now = debug_get_absolute_time_in_ns();
+ uint64_t time_since_start = time_now - ns_time_at_startup;
+ uint64_t time_since_last_debug_message = time_now - ns_time_at_last_debug_message;
+ ns_time_at_last_debug_message = time_now;
+ pthread_mutex_unlock(&debug_timing_lock);
+ s = generate_preliminary_string(b, sizeof(b), 1.0 * time_since_start / 1000000000,
+ 1.0 * time_since_last_debug_message / 1000000000, filename,
+ linenumber, " ");
+ } else {
+ s = b;
+ }
+ va_list args;
+ va_start(args, format);
+ vsnprintf(s, sizeof(b) - (s - b), format, args);
+ va_end(args);
+ // syslog(LOG_INFO, "%s", b);
+ fprintf(stderr, "%s\n", b);
+ pthread_setcancelstate(oldState, NULL);
+}
+
+void _debug_print_buffer(const char *thefilename, const int linenumber, int level, void *vbuf,
+ size_t buf_len) {
+ if (level > debuglev)
+ return;
+ char *buf = (char *)vbuf;
+ char *obf =
+ malloc(buf_len * 4 + 1); // to be on the safe side -- 4 characters on average for each byte
+ if (obf != NULL) {
+ char *obfp = obf;
+ unsigned int obfc;
+ for (obfc = 0; obfc < buf_len; obfc++) {
+ snprintf(obfp, 3, "%02X", buf[obfc]);
+ obfp += 2;
+ if (obfc != buf_len - 1) {
+ if (obfc % 32 == 31) {
+ snprintf(obfp, 5, " || ");
+ obfp += 4;
+ } else if (obfc % 16 == 15) {
+ snprintf(obfp, 4, " | ");
+ obfp += 3;
+ } else if (obfc % 4 == 3) {
+ snprintf(obfp, 2, " ");
+ obfp += 1;
+ }
+ }
+ };
+ *obfp = 0;
+ _debug(thefilename, linenumber, level, "%s", obf);
+ free(obf);
+ }
+}
\ No newline at end of file
--- /dev/null
+/*
+MIT License
+
+Copyright (c) 2023--2025 Mike Brady 4265913+mikebrady@users.noreply.github.com
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+*/
+
+#ifndef __DEBUG_H
+#define __DEBUG_H
+
+#include <stddef.h>
+
+#ifdef __cplusplus
+#define EXTERNC extern "C"
+#else
+#define EXTERNC
+#endif
+
+// four level debug message utility giving file and line, total elapsed time,
+// interval time warn / inform / debug / die calls.
+
+// level 0 is no messages, level 3 is most messages
+EXTERNC void debug_init(int level, int show_elapsed_time, int show_relative_time,
+ int show_file_and_line);
+
+EXTERNC int debug_level();
+EXTERNC void set_debug_level(int level);
+EXTERNC void increase_debug_level();
+EXTERNC void decrease_debug_level();
+EXTERNC int get_show_elapsed_time();
+EXTERNC void set_show_elapsed_time(int setting);
+EXTERNC int get_show_relative_timel();
+EXTERNC void set_show_relative_time(int setting);
+EXTERNC int get_show_file_and_line();
+EXTERNC void set_show_file_and_line(int setting);
+
+#if defined(__GNUC__) || defined(__clang__)
+#define PRINTF_LIKE(fmt, args) __attribute__((format(printf, fmt, args)))
+#else
+#define PRINTF_LIKE(fmt, args)
+#endif
+
+// Function declarations with printf-style format checking
+EXTERNC void _die(const char *filename, const int linenumber, const char *format, ...) PRINTF_LIKE(3,4);
+EXTERNC void _warn(const char *filename, const int linenumber, const char *format, ...) PRINTF_LIKE(3,4);
+EXTERNC void _inform(const char *filename, const int linenumber, const char *format, ...) PRINTF_LIKE(3,4);
+EXTERNC void _debug(const char *filename, const int linenumber, int level, const char *format, ...) PRINTF_LIKE(4,5);
+EXTERNC void _debug_print_buffer(const char *thefilename, const int linenumber, int level, void *buf,
+ size_t buf_len); // not printf-style, no change needed
+#define die(...) _die(__FILE__, __LINE__, __VA_ARGS__)
+#define debug(...) _debug(__FILE__, __LINE__, __VA_ARGS__)
+#define warn(...) _warn(__FILE__, __LINE__, __VA_ARGS__)
+#define inform(...) _inform(__FILE__, __LINE__, __VA_ARGS__)
+#define debug_print_buffer(...) _debug_print_buffer(__FILE__, __LINE__, __VA_ARGS__)
+
+#endif /* __DEBUG_H */
\ No newline at end of file
--- /dev/null
+/*
+ * 23-bit modular arithmetic. This file is part of Shairport Sync
+ * Copyright (c) Mike Brady 2025
+
+ * Modifications, including those associated with audio synchronization, multithreading and
+ * metadata handling copyright (c) Mike Brady 2014--2025
+ * All rights reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+ #include <stdint.h>
+ #include "mod23.h"
+
+#define MOD_23BIT 0x7FFFFF // 2^23 - 1
+
+// Assumes 'a' and 'b' are within 2^22 of each other
+int32_t a_minus_b_mod23(uint32_t a, uint32_t b) {
+
+ // Mask to 23 bits
+ a &= MOD_23BIT;
+ b &= MOD_23BIT;
+
+ // Compute difference modulo 2^23
+ uint32_t diff = (a - b) & MOD_23BIT;
+
+ // Interpret as signed 23-bit value
+ // If the top bit (bit 22) is set, it's negative
+ int32_t signed_diff = (diff & 0x400000) ? (diff | 0xFF800000) : diff;
+
+ return signed_diff;
+}
--- /dev/null
+#ifndef _MOD23_H
+#define _MOD23_H
+
+int32_t a_minus_b_mod23(uint32_t a, uint32_t b);
+
+#endif // _MOD23_H
--- /dev/null
+/*
+ * Network Utilities. This file is part of Shairport Sync.
+ * Copyright (c) Mike Brady 2014--2025
+ * All rights reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+#include <errno.h>
+#include <string.h>
+#include "network_utilities.h"
+
+int eintr_checked_accept(int sockfd, struct sockaddr *addr,
+ socklen_t *addrlen) {
+ int response;
+ do {
+ response = accept(sockfd, addr, addrlen);
+
+ if (response == -1) {
+ char errorstring[1024];
+ strerror_r(errno, (char *)errorstring, sizeof(errorstring));
+ debug(1,
+ "error %d accept()ing a socketin ap2_event_receiver: \"%s\". (Note: error %d will be ignored.)",
+ errno, errorstring, EINTR);
+ }
+
+ } while((response == -1) && (errno == EINTR));
+ return response;
+}
\ No newline at end of file
--- /dev/null
+#ifndef _NETWORK_UTILITIES_H
+#define _NETWORK_UTILITIES_H
+
+#include <sys/socket.h>
+
+#define restrict
+
+int eintr_checked_accept(int sockfd, struct sockaddr *addr,
+ socklen_t *addrlen);
+#endif // _NETWORK_UTILITIES_H
--- /dev/null
+/*
+ * Structured Buffer. This file is part of Shairport Sync
+ * Copyright (c) Mike Brady 2025
+
+ * Modifications, including those associated with audio synchronization, multithreading and
+ * metadata handling copyright (c) Mike Brady 2014--2025
+ * All rights reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <stdarg.h>
+#include <stdio.h>
+#include <stdint.h>
+
+
+typedef struct {
+ char *buf;
+ size_t buf_size;
+ size_t buf_pos;
+} structured_buffer;
+
+structured_buffer *sbuf_new(size_t size) {
+ structured_buffer *sbuf = (structured_buffer *)malloc(sizeof(structured_buffer));
+ if (sbuf != NULL) {
+ memset(sbuf, 0, sizeof(structured_buffer));
+ char *buf = malloc(size + 1); // extra space for a possible NULL
+ if (buf == NULL) {
+ free(sbuf);
+ sbuf = NULL;
+ } else {
+ sbuf->buf_size = size;
+ sbuf->buf = buf;
+ }
+ }
+ return sbuf;
+}
+
+int sbuf_clear(structured_buffer *sbuf) {
+ int response = -1;
+ if ((sbuf != NULL) && (sbuf->buf != NULL)) {
+ sbuf->buf_pos = 0;
+ response = 0;
+ }
+ return response;
+}
+
+void sbuf_free(structured_buffer *sbuf) {
+ if (sbuf != NULL) {
+ if (sbuf->buf != NULL)
+ free(sbuf->buf);
+ free(sbuf);
+ }
+}
+
+void sbuf_cleanup(void *arg) {
+ structured_buffer *sbuf = (structured_buffer *)arg;
+ debug(3, "structured_buffer cleanup");
+ sbuf_free(sbuf);
+}
+
+int sbuf_printf(structured_buffer *sbuf, const char *format, ...) {
+ int response = -1;
+ if ((sbuf != NULL) && (sbuf->buf != NULL)) {
+ char *p = sbuf->buf + sbuf->buf_pos;
+ va_list args;
+ va_start(args, format);
+ vsnprintf(p, sbuf->buf_size - sbuf->buf_pos, format, args);
+ sbuf->buf_pos = sbuf->buf_pos + strlen(p);
+ response = strlen(p);
+ va_end(args);
+ }
+ return response;
+}
+
+int sbuf_append(structured_buffer *sbuf, char *plistString, uint32_t plistStringLength) {
+ int response = -1;
+ if ((sbuf != NULL) && (sbuf->buf != NULL) && (plistString != NULL)) {
+ if (plistStringLength == 0) {
+ response = 0;
+ } else {
+ if (plistStringLength < (sbuf->buf_size - sbuf->buf_pos)) {
+ memcpy(sbuf->buf + sbuf->buf_pos, plistString, plistStringLength);
+ sbuf->buf_pos = sbuf->buf_pos + plistStringLength;
+ response = 0;
+ } else {
+ debug(1, "plist too large -- omitted");
+ }
+ }
+ }
+ return response;
+}
+
+int sbuf_buf_and_length(structured_buffer *sbuf, char **b, size_t *l) {
+ int response = 0;
+ if ((sbuf != NULL) && (sbuf->buf != NULL)) {
+ *b = sbuf->buf;
+ *l = sbuf->buf_pos;
+ } else {
+ response = -1;
+ }
+ return response;
+}
--- /dev/null
+#ifndef _STRUCTURED_BUFFER_H
+#define _STRUCTURED_BUFFER_H
+
+typedef struct {
+ char *buf;
+ size_t buf_size;
+ size_t buf_pos;
+} structured_buffer;
+
+structured_buffer *sbuf_new(size_t size);
+int sbuf_clear(structured_buffer *sbuf);
+void sbuf_free(structured_buffer *sbuf);
+void sbuf_cleanup(void *arg);
+int sbuf_printf(structured_buffer *sbuf, const char *format, ...);
+int sbuf_append(structured_buffer *sbuf, char *plistString, uint32_t plistStringLength);
+int sbuf_buf_and_length(structured_buffer *sbuf, char **b, size_t *l);
+
+#endif // _STRUCTURED_BUFFER_H
--- /dev/null
+{
+ ignore_unversioned_libs
+ Memcheck:Leak
+ ...
+ obj:*/lib*/lib*.so
+}
+{
+ ignore_versioned_libs
+ Memcheck:Leak
+ ...
+ obj:*/lib*/lib*.so.*
+}
+
--- /dev/null
+#! /bin/sh
+# echo "This is the git version checker"
+test -f gitversion-stamp && BGD=`cat gitversion-stamp` || :
+# echo "existing gitversion-stamp is $BGD"
+GD=`git describe --tags --dirty --broken --always 2>/dev/null`
+if [ x"$GD" = x ] ; then
+ GD="NA"
+fi
+# echo "current git description is $GD"
+printf "build: $GD\n" # displays git version
+if [ x"$GD" != x"$BGD" ] ; then
+ printf "$GD\n" > gitversion-stamp
+ printf "// Do not edit!\n" > gitversion.h
+ printf "// This file is automatically generated.\n" >> gitversion.h
+ printf " char git_version_string[] = \"" >> gitversion.h
+ ## the tr is because we need to remove the trailing newline
+ cat gitversion-stamp | tr -d '[[:space:]]' >> gitversion.h
+ printf "\";\n" >> gitversion.h
+fi
+
+# Usage. Below is what you would add to Makefile.am. When it runs, the
+# following two files are generated: 'gitversion-stamp' and 'gitversion.h'.
+
+# gitversion-stamp stores the most recently used git description.
+# gitversion.h is a C header file containing that git description as a string.
+
+# Put this script in the top level source folder and make sure it has execute permission.
+
+# These are the lines (remove the leading '#' on each) to add to the Makefile.am file:
+# ## Check if the git version information has changed and rebuild gitversion.h if so.
+# .PHONY: gitversion-check
+# gitversion-check:
+# $(top_srcdir)/check-gitversion
+
+# ## You may have to change from += to = below:
+# BUILT_SOURCES += gitversion-check
+# CLEANFILES += gitversion-stamp gitversion.h
--- /dev/null
+# args: first is the source (XML) file, second is the destination directory
+
+# converts the xml file to a plist file and converts that into a byte
+# array defined in a .h and .c file
+# for compilation
+
+INPUT_FILE_NAME=`basename $1`
+BASE_FILENAME=`echo ${INPUT_FILE_NAME%.*}`
+LABEL=`echo _${BASE_FILENAME}_PLIST | tr '[:lower:]' '[:upper:]' | tr '.' '_'`
+
+mkdir -p $2
+
+printf "// Please do not edit this file!\n" > $2/$BASE_FILENAME.h
+printf "// This file was automatically generated from $INPUT_FILE_NAME.\n\n" >> $2/$BASE_FILENAME.h
+printf "#ifndef $LABEL\n" >> $2/$BASE_FILENAME.h
+printf "#define $LABEL\n\n" >> $2/$BASE_FILENAME.h
+printf "#include <stddef.h>\n\n" >> $2/$BASE_FILENAME.h
+printf "extern unsigned char ${BASE_FILENAME}_plist[];\n" >> $2/$BASE_FILENAME.h
+printf "extern size_t ${BASE_FILENAME}_plist_len;\n\n" >> $2/$BASE_FILENAME.h
+printf "#endif // $LABEL\n" >> $2/$BASE_FILENAME.h
+
+printf "// Please do not edit this file!\n" > $2/$BASE_FILENAME.c
+printf "// This file was automatically generated from $INPUT_FILE_NAME.\n\n" >> $2/$BASE_FILENAME.c
+printf "#include \"$BASE_FILENAME.h\"\n\n" >> $2/$BASE_FILENAME.c
+printf "unsigned char ${BASE_FILENAME}_plist[] = {\n" >> $2/$BASE_FILENAME.c
+plistutil -i $1 | xxd -i - >> $2/$BASE_FILENAME.c
+printf " };\n\n" >> $2/$BASE_FILENAME.c
+printf "size_t ${BASE_FILENAME}_plist_len = sizeof(${BASE_FILENAME}_plist);\n" >> $2/$BASE_FILENAME.c