diff --git a/.dockerignore b/.dockerignore index 15191b977a..a6ccb41e66 100644 --- a/.dockerignore +++ b/.dockerignore @@ -4,8 +4,11 @@ # do not ignore .git, needed for versioning !/.git +# do not ignore .rstcheck.cfg, needed to test building docs +!/.rstcheck.cfg + # ignore repo directories and files -docs/ +docker/ gh-pages-template/ scripts/ tools/ @@ -13,6 +16,7 @@ crowdin.yml # ignore dev directories build/ +cmake-*/ venv/ # ignore artifacts diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 09a0aaeae6..e4a6419418 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -165,16 +165,12 @@ jobs: remove-android: 'true' remove-haskell: 'true' remove-codeql: 'true' - remove-docker-images: 'false' + remove-docker-images: 'true' - name: Checkout uses: actions/checkout@v4 - - - name: Checkout Flathub Shared Modules - uses: actions/checkout@v4 with: - repository: flathub/shared-modules - path: build/shared-modules + submodules: recursive - name: Setup Dependencies Linux Flatpak run: | @@ -185,8 +181,10 @@ jobs: cmake \ flatpak \ qemu-user-static + sudo su $(whoami) -c "flatpak --user remote-add --if-not-exists flathub \ https://flathub.org/repo/flathub.flatpakrepo" + sudo su $(whoami) -c "flatpak --user install -y flathub \ org.flatpak.Builder \ org.freedesktop.Platform/${{ matrix.arch }}/${PLATFORM_VERSION} \ @@ -291,61 +289,49 @@ jobs: remove-android: 'true' remove-haskell: 'true' remove-codeql: 'true' - remove-docker-images: 'false' + remove-docker-images: 'true' - name: Checkout uses: actions/checkout@v4 with: submodules: recursive + - name: Install wget + run: | + sudo apt-get update -y + sudo apt-get install -y \ + wget + + - name: Install CUDA + env: + CUDA_VERSION: 11.8.0 + CUDA_BUILD: 520.61.05 + timeout-minutes: 4 + run: | + url_base="https://developer.download.nvidia.com/compute/cuda/${CUDA_VERSION}/local_installers" + url="${url_base}/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux.run" + sudo wget -q -O /root/cuda.run ${url} + sudo chmod a+x /root/cuda.run + sudo /root/cuda.run --silent --toolkit --toolkitpath=/usr/local/cuda --no-opengl-libs --no-man-page --no-drm + sudo rm /root/cuda.run + - name: Setup Dependencies Linux + timeout-minutes: 5 run: | + # allow newer gcc sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y - if [[ ${{ matrix.dist }} == "18.04" ]]; then - # Ubuntu 18.04 packages - sudo add-apt-repository ppa:savoury1/boost-defaults-1.71 -y - - sudo apt-get update -y - sudo apt-get install -y \ - libboost-filesystem1.71-dev \ - libboost-locale1.71-dev \ - libboost-log1.71-dev \ - libboost-regex1.71-dev \ - libboost-program-options1.71-dev - - # Install cmake - wget https://cmake.org/files/v3.22/cmake-3.22.2-linux-x86_64.sh - chmod +x cmake-3.22.2-linux-x86_64.sh - mkdir /opt/cmake - ./cmake-3.22.2-linux-x86_64.sh --prefix=/opt/cmake --skip-license - ln --force --symbolic /opt/cmake/bin/cmake /usr/local/bin/cmake - cmake --version - - # install newer tar from focal... appimagelint fails on 18.04 without this - echo "original tar version" - tar --version - wget -O tar.deb http://security.ubuntu.com/ubuntu/pool/main/t/tar/tar_1.30+dfsg-7ubuntu0.20.04.3_amd64.deb - sudo apt-get -y install -f ./tar.deb - echo "new tar version" - tar --version - else - # Ubuntu 20.04+ packages - sudo apt-get update -y - sudo apt-get install -y \ - cmake \ - libboost-filesystem-dev \ - libboost-locale-dev \ - libboost-log-dev \ - libboost-program-options-dev - fi - sudo apt-get install -y \ build-essential \ + cmake \ gcc-10 \ g++-10 \ libayatana-appindicator3-dev \ libavdevice-dev \ + libboost-filesystem-dev \ + libboost-locale-dev \ + libboost-log-dev \ + libboost-program-options-dev \ libcap-dev \ libcurl4-openssl-dev \ libdrm-dev \ @@ -367,7 +353,7 @@ jobs: libxfixes-dev \ libxrandr-dev \ libxtst-dev \ - wget + python3 # clean apt cache sudo apt-get clean @@ -382,20 +368,21 @@ jobs: --slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-10 \ --slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-10 - # Install CUDA - sudo wget \ - https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_520.61.05_linux.run \ - --progress=bar:force:noscroll -q --show-progress -O /root/cuda.run - sudo chmod a+x /root/cuda.run - sudo /root/cuda.run --silent --toolkit --toolkitpath=/usr --no-opengl-libs --no-man-page --no-drm - sudo rm /root/cuda.run + - name: Setup python + id: python + uses: actions/setup-python@v5 + with: + python-version: '3.11' - name: Build Linux env: BRANCH: ${{ github.head_ref || github.ref_name }} BUILD_VERSION: ${{ needs.check_changelog.outputs.next_version_bare }} COMMIT: ${{ github.event.pull_request.head.sha || github.sha }} + timeout-minutes: 5 run: | + echo "nproc: $(nproc)" + mkdir -p build mkdir -p artifacts @@ -403,6 +390,7 @@ jobs: cmake \ -DBUILD_WERROR=ON \ -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_CUDA_COMPILER:PATH=/usr/local/cuda/bin/nvcc \ -DCMAKE_INSTALL_PREFIX=/usr \ -DSUNSHINE_ASSETS_DIR=share/sunshine \ -DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \ @@ -412,20 +400,7 @@ jobs: -DSUNSHINE_ENABLE_CUDA=ON \ ${{ matrix.EXTRA_ARGS }} \ .. - make -j ${nproc} - - - name: Package Linux - CPACK - # todo - this is no longer used - if: ${{ matrix.type == 'cpack' }} - working-directory: build - run: | - cpack -G DEB - mv ./cpack_artifacts/Sunshine.deb ../artifacts/sunshine-${{ matrix.dist }}.deb - - if [[ ${{ matrix.dist }} == "20.04" ]]; then - cpack -G RPM - mv ./cpack_artifacts/Sunshine.rpm ../artifacts/sunshine.rpm - fi + make -j $(expr $(nproc) - 1) # use all but one core - name: Set AppImage Version if: | @@ -452,12 +427,12 @@ jobs: # AppImage # https://docs.appimage.org/packaging-guide/index.html - wget https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage + wget -q https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage chmod +x linuxdeploy-x86_64.AppImage # https://github.com/linuxdeploy/linuxdeploy-plugin-gtk sudo apt-get install libgtk-3-dev librsvg2-dev -y - wget https://raw.githubusercontent.com/linuxdeploy/linuxdeploy-plugin-gtk/master/linuxdeploy-plugin-gtk.sh + wget -q https://raw.githubusercontent.com/linuxdeploy/linuxdeploy-plugin-gtk/master/linuxdeploy-plugin-gtk.sh chmod +x linuxdeploy-plugin-gtk.sh export DEPLOY_GTK_VERSION=3 @@ -475,14 +450,17 @@ jobs: # permissions chmod +x ../artifacts/sunshine.AppImage + - name: Delete cuda + # free up space on the runner + run: | + sudo rm -rf /usr/local/cuda + - name: Verify AppImage if: ${{ matrix.type == 'AppImage' }} run: | wget https://github.com/TheAssassin/appimagelint/releases/download/continuous/appimagelint-x86_64.AppImage chmod +x appimagelint-x86_64.AppImage - # rm -rf ~/.cache/appimagelint/ - ./appimagelint-x86_64.AppImage ./artifacts/sunshine.AppImage - name: Upload Artifacts @@ -491,6 +469,50 @@ jobs: name: sunshine-linux-${{ matrix.type }}-${{ matrix.dist }} path: artifacts/ + - name: Install test deps + run: | + sudo apt-get update -y + sudo apt-get install -y \ + doxygen \ + graphviz \ + python3-venv \ + x11-xserver-utils \ + xvfb + + # clean apt cache + sudo apt-get clean + sudo rm -rf /var/lib/apt/lists/* + + - name: Run tests + id: test + working-directory: build/tests + run: | + export DISPLAY=:1 + Xvfb ${DISPLAY} -screen 0 1024x768x24 & + + ./test_sunshine --gtest_color=yes + + - name: Generate gcov report + # any except canceled or skipped + if: always() && (steps.test.outcome == 'success' || steps.test.outcome == 'failure') + id: test_report + working-directory: build + run: | + ${{ steps.python.outputs.python-path }} -m pip install gcovr + ${{ steps.python.outputs.python-path }} -m gcovr -r .. \ + --exclude ../tests/ \ + --exclude ../third-party/ \ + --xml-pretty \ + -o coverage.xml + + - name: Upload coverage + # any except canceled or skipped + if: always() && (steps.test_report.outcome == 'success') + uses: codecov/codecov-action@v3 + with: + files: ./build/coverage.xml + flags: ${{ runner.os }} + - name: Create/Update GitHub Release if: ${{ needs.setup_release.outputs.create_release == 'true' }} uses: ncipollo/release-action@v1 @@ -540,10 +562,12 @@ jobs: echo "This is a PUSH event" clone_url=${{ github.event.repository.clone_url }} branch="${{ github.ref_name }}" + default_branch="${{ github.event.repository.default_branch }}" else echo "This is a PR event" clone_url=${{ github.event.pull_request.head.repo.clone_url }} branch="${{ github.event.pull_request.head.ref }}" + default_branch="${{ github.event.pull_request.head.repo.default_branch }}" fi echo "Branch: ${branch}" echo "Clone URL: ${clone_url}" @@ -553,6 +577,7 @@ jobs: cmake \ -DGITHUB_BRANCH="${branch}" \ -DGITHUB_CLONE_URL="${clone_url}" \ + -DGITHUB_DEFAULT_BRANCH="${default_branch}" \ -DSUNSHINE_CONFIGURE_HOMEBREW=ON \ -DSUNSHINE_CONFIGURE_ONLY=ON \ .. @@ -633,6 +658,12 @@ jobs: # install dependencies using homebrew brew install cmake + - name: Setup python + id: python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: Configure Portfile run: | # variables for Portfile @@ -690,6 +721,7 @@ jobs: - name: Build port env: subportlist: ${{ steps.subportlist.outputs.subportlist }} + id: build run: | subport="Sunshine" @@ -711,6 +743,13 @@ jobs: "$subport" echo "::endgroup::" + - name: Build Logs + if: always() + run: | + logfile="/opt/local/var/macports/logs/_Users_runner_work_Sunshine_Sunshine_ports_multimedia_Sunshine/Sunshine/main.log" + cat "$logfile" + sudo mv "${logfile}" "${logfile}.bak" + - name: Upload Artifacts if: ${{ matrix.release }} uses: actions/upload-artifact@v4 @@ -718,6 +757,85 @@ jobs: name: sunshine-macports path: artifacts/ + - name: Fix screen capture permissions + if: ${{ matrix.os_version != 12 }} # macOS-12 is okay + # can be removed if the following is fixed in the runner image + # https://github.com/actions/runner-images/issues/9529 + # https://github.com/actions/runner-images/pull/9530 + run: | + # https://apple.stackexchange.com/questions/362865/macos-list-apps-authorized-for-full-disk-access + + # permissions for screen capture + values="'kTCCServiceScreenCapture','/opt/off/opt/runner/provisioner/provisioner',1,2,4,1,NULL,NULL,0,'UNUSED',NULL,0,1687786159" + if [[ "${{ matrix.os_version }}" == "14" ]]; then + # TCC access table in Sonoma has extra 4 columns: pid, pid_version, boot_uuid, last_reminded + values="${values},NULL,NULL,'UNUSED',${values##*,}" + fi + + # system and user databases + dbPaths=( + "/Library/Application Support/com.apple.TCC/TCC.db" + "$HOME/Library/Application Support/com.apple.TCC/TCC.db" + ) + + sqlQuery="INSERT OR IGNORE INTO access VALUES($values);" + + for dbPath in "${dbPaths[@]}"; do + echo "Column names for $dbPath" + echo "-------------------" + sudo sqlite3 "$dbPath" "PRAGMA table_info(access);" + echo "Current permissions for $dbPath" + echo "-------------------" + sudo sqlite3 "$dbPath" "SELECT * FROM access WHERE service='kTCCServiceScreenCapture';" + sudo sqlite3 "$dbPath" "$sqlQuery" + echo "Updated permissions for $dbPath" + echo "-------------------" + sudo sqlite3 "$dbPath" "SELECT * FROM access WHERE service='kTCCServiceScreenCapture';" + done + + - name: Run tests + id: test + timeout-minutes: 10 + run: | + sudo port test "Sunshine" + + - name: Test Logs + if: always() + run: | + logfile="/opt/local/var/macports/logs/_Users_runner_work_Sunshine_Sunshine_ports_multimedia_Sunshine/Sunshine/main.log" + cat "$logfile" + + - name: Generate gcov report + # any except canceled or skipped + if: always() && (steps.test.outcome == 'success' || steps.test.outcome == 'failure') + id: test_report + working-directory: + /opt/local/var/macports/build/_Users_runner_work_Sunshine_Sunshine_ports_multimedia_Sunshine/Sunshine/work + run: | + base_dir=$(pwd) + build_dir=${base_dir}/build + + # get the directory name that starts with Sunshine-* + dir=$(ls -d Sunshine-*) + + cd ${build_dir} + ${{ steps.python.outputs.python-path }} -m pip install gcovr + sudo ${{ steps.python.outputs.python-path }} -m gcovr -r ../${dir} \ + --exclude ../${dir}/tests/ \ + --exclude ../${dir}/third-party/ \ + --gcov-object-directory $(pwd) \ + --verbose \ + --xml-pretty \ + -o ${{ github.workspace }}/build/coverage.xml + + - name: Upload coverage + # any except canceled or skipped + if: always() && (steps.test_report.outcome == 'success') + uses: codecov/codecov-action@v3 + with: + files: ./build/coverage.xml + flags: ${{ runner.os }}-${{ matrix.os_version }} + - name: Create/Update GitHub Release if: ${{ needs.setup_release.outputs.create_release == 'true' && matrix.release }} uses: ncipollo/release-action@v1 @@ -743,6 +861,110 @@ jobs: with: submodules: recursive + - name: Prepare tests + id: prepare-tests + if: false # todo: DirectX11 is not available, so even software encoder fails + run: | + # function to download and extract a zip file + function DownloadAndExtract { + param ( + [string]$Uri, + [string]$OutFile + ) + + $maxRetries = 5 + $retryCount = 0 + $success = $false + + while (-not $success -and $retryCount -lt $maxRetries) { + $retryCount++ + Write-Host "Downloading $Uri to $OutFile, attempt $retryCount of $maxRetries" + try { + Invoke-WebRequest -Uri $Uri -OutFile $OutFile + $success = $true + } catch { + Write-Host "Attempt $retryCount of $maxRetries failed with error: $($_.Exception.Message). Retrying..." + Start-Sleep -Seconds 5 + } + } + + if (-not $success) { + Write-Host "Failed to download the file after $maxRetries attempts." + exit 1 + } + + # use .NET to get the base name of the file + $baseName = (Get-Item $OutFile).BaseName + + # Extract the zip file + Expand-Archive -Path $OutFile -DestinationPath $baseName + } + + # virtual display driver + DownloadAndExtract ` + -Uri "https://www.amyuni.com/downloads/usbmmidd_v2.zip" ` + -OutFile "usbmmidd_v2.zip" + + # install + Set-Location -Path usbmmidd_v2/usbmmidd_v2 + ./deviceinstaller64 install usbmmidd.inf usbmmidd + + # create the virtual display + ./deviceinstaller64 enableidd 1 + + # move up a directory + Set-Location -Path ../.. + + # install devcon + DownloadAndExtract ` + -Uri "https://github.com/Drawbackz/DevCon-Installer/releases/download/1.4-rc/Devcon.Installer.zip" ` + -OutFile "Devcon.Installer.zip" + Set-Location -Path Devcon.Installer + # hash needs to match OS version + # https://github.com/Drawbackz/DevCon-Installer/blob/master/devcon_sources.json + Start-Process -FilePath "./Devcon Installer.exe" -Wait -ArgumentList ` + 'install', ` + '-hash', '54004C83EE34F6A55380528A8B29F4C400E61FBB947A19E0AB9E5A193D7D961E', ` + '-addpath', ` + '-update', ` + '-dir', 'C:\Windows\System32' + + # disable Hyper-V Video + # https://stackoverflow.com/a/59490940 + C:\Windows\System32\devcon.exe disable "VMBUS\{da0a7802-e377-4aac-8e77-0558eb1073f8}" + + # move up a directory + Set-Location -Path .. + + # multi monitor tool + DownloadAndExtract ` + -Uri "http://www.nirsoft.net/utils/multimonitortool-x64.zip" ` + -OutFile "multimonitortool.zip" + + # enable the virtual display + # http://www.nirsoft.net/utils/multi_monitor_tool.html + Set-Location -Path multimonitortool + + # Original Hyper-V is \\.\DISPLAY1, it will recreate itself as \\.\DISPLAY6 (or something higher than 2) + # USB Mobile Monitor Virtual Display is \\.\DISPLAY2 + + # these don't seem to work if not using runAs + # todo: do they work if not using runAs? + Start-Process powershell -Verb runAs -ArgumentList '-Command ./MultiMonitorTool.exe /enable \\.\DISPLAY2' + Start-Process powershell -Verb runAs -ArgumentList '-Command ./MultiMonitorTool.exe /SetPrimary \\.\DISPLAY2' + + # wait a few seconds + Start-Sleep -s 5 + + # list monitors + ./MultiMonitorTool.exe /stext monitor_list.txt + + # wait a few seconds + Start-Sleep -s 5 + + # print the monitor list + Get-Content -Path monitor_list.txt + - name: Setup Dependencies Windows uses: msys2/setup-msys2@v2 with: @@ -750,12 +972,14 @@ jobs: install: >- base-devel diffutils + doxygen git make mingw-w64-x86_64-binutils mingw-w64-x86_64-boost mingw-w64-x86_64-cmake mingw-w64-x86_64-curl + mingw-w64-x86_64-graphviz mingw-w64-x86_64-miniupnpc mingw-w64-x86_64-nlohmann-json mingw-w64-x86_64-nodejs @@ -768,6 +992,24 @@ jobs: wget yasm + - name: Setup python + # use this instead of msys2 python due to known issues using wheels, https://www.msys2.org/docs/python/ + id: setup-python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Python Path + id: python-path + shell: msys2 {0} + run: | + # replace backslashes with double backslashes + python_path=$(echo "${{ steps.setup-python.outputs.python-path }}" | sed 's/\\/\\\\/g') + + # step output + echo "python-path=${python_path}" + echo "python-path=${python_path}" >> $GITHUB_OUTPUT + - name: Build Windows shell: msys2 {0} env: @@ -781,6 +1023,8 @@ jobs: -DBUILD_WERROR=ON \ -DCMAKE_BUILD_TYPE=RelWithDebInfo \ -DSUNSHINE_ASSETS_DIR=assets \ + -DTESTS_PYTHON_EXECUTABLE='${{ steps.python-path.outputs.python-path }}' \ + -DTESTS_SOFTWARE_ENCODER_UNAVAILABLE='skip' \ -G "MinGW Makefiles" \ .. mingw32-make -j$(nproc) @@ -799,6 +1043,35 @@ jobs: mv ./cpack_artifacts/Sunshine.exe ../artifacts/sunshine-windows-installer.exe mv ./cpack_artifacts/Sunshine.zip ../artifacts/sunshine-windows-portable.zip + - name: Run tests + id: test + shell: msys2 {0} + working-directory: build/tests + run: | + ./test_sunshine.exe --gtest_color=yes + + - name: Generate gcov report + # any except canceled or skipped + if: always() && (steps.test.outcome == 'success' || steps.test.outcome == 'failure') + id: test_report + shell: msys2 {0} + working-directory: build + run: | + ${{ steps.python-path.outputs.python-path }} -m pip install gcovr + ${{ steps.python-path.outputs.python-path }} -m gcovr -r .. \ + --exclude ../tests/ \ + --exclude ../third-party/ \ + --xml-pretty \ + -o coverage.xml + + - name: Upload coverage + # any except canceled or skipped + if: always() && (steps.test_report.outcome == 'success') + uses: codecov/codecov-action@v3 + with: + files: ./build/coverage.xml + flags: ${{ runner.os }} + - name: Package Windows Debug Info working-directory: build run: | diff --git a/.gitmodules b/.gitmodules index a4231d16f7..4efc01425b 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,7 +1,19 @@ +[submodule "packaging/linux/flatpak/deps/org.flatpak.Builder.BaseApp"] + path = packaging/linux/flatpak/deps/org.flatpak.Builder.BaseApp + url = https://github.com/flathub/org.flatpak.Builder.BaseApp + branch = branch/23.08 +[submodule "packaging/linux/flatpak/deps/shared-modules"] + path = packaging/linux/flatpak/deps/shared-modules + url = https://github.com/flathub/shared-modules + branch = master [submodule "third-party/build-deps"] path = third-party/build-deps url = https://github.com/LizardByte/build-deps.git branch = dist +[submodule "third-party/googletest"] + path = third-party/googletest + url = https://github.com/google/googletest/ + branch = v1.14.x [submodule "third-party/moonlight-common-c"] path = third-party/moonlight-common-c url = https://github.com/moonlight-stream/moonlight-common-c.git diff --git a/CHANGELOG.md b/CHANGELOG.md index d4fbff3a4f..0ca2b370d2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,37 @@ # Changelog +## [0.23.0] - 2024-04-06 +Attention, this release contains critical security fixes. Please update as soon as possible. + +**Breaking** +- (Linux) Drop support for Ubuntu 20.04 +- (Linux) No longer provide arm64 rpm packages, due to extreme compile time on GitHub hosted runners + +**Fixed** +- (Network) Ensure unpairing takes effect without restart +- (Capture/Linux) Fix logical comparison of texture size +- (Service/Windows) Quote the path to sunshinesvc.exe when launching the termination helper + +**Added** +- (WebUI) Localization support +- (Capture/Linux) Populate host latency for kmx/x11 grab +- (Capture/Windows) AMF rate control improvements +- (Linux) Add support for Ubuntu 24.04 (x86_64 only) + +**Dependencies** +- Bump rstcheck from 6.2.0 to 6.2.1 +- Bump org.flatpak.Builder.BaseApp from 644487f to 6e295e6 +- Bump ffmpeg +- Bump @fortawesome/fontawesome-free from 6.5.1 to 6.5.2 + +**Misc** +- (Style) Refactored video encoder declarations +- (CI) Refactored Linux build in CI +- (CI) Added unit testing and code coverage +- (Docs/macOS) Update curl command for Portfile install +- (Style) Refactor logging initialization + + ## [0.22.2] - 2024-03-15 **Fixed** - (Tray/Windows) Fix broken system tray icon on some systems @@ -766,3 +798,4 @@ settings. In v0.17.0, games now run under your user account without elevated pri [0.22.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.22.0 [0.22.1]: https://github.com/LizardByte/Sunshine/releases/tag/v0.22.1 [0.22.2]: https://github.com/LizardByte/Sunshine/releases/tag/v0.22.2 +[0.23.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.23.0 diff --git a/CMakeLists.txt b/CMakeLists.txt index ebff395abb..180a9911bd 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,9 +1,10 @@ cmake_minimum_required(VERSION 3.18) # `CMAKE_CUDA_ARCHITECTURES` requires 3.18 +# set_source_files_properties requires 3.18 # todo - set this conditionally # todo - set version to 0.0.0 once confident in automated versioning -project(Sunshine VERSION 0.22.2 +project(Sunshine VERSION 0.23.0 DESCRIPTION "Self-hosted game stream host for Moonlight" HOMEPAGE_URL "https://app.lizardbyte.dev/Sunshine") @@ -28,6 +29,9 @@ include(${CMAKE_MODULE_PATH}/prep/build_version.cmake) # cmake build flags include(${CMAKE_MODULE_PATH}/prep/options.cmake) +# initial prep +include(${CMAKE_MODULE_PATH}/prep/init.cmake) + # configure special package files, such as sunshine.desktop, Flatpak manifest, Portfile , etc. include(${CMAKE_MODULE_PATH}/prep/special_package_configuration.cmake) diff --git a/README.rst b/README.rst index 11508d976e..5ea9001484 100644 --- a/README.rst +++ b/README.rst @@ -17,69 +17,48 @@ System Requirements **Minimum Requirements** -+------------+------------------------------------------------------------+ -| GPU | AMD: VCE 1.0 or higher, see `obs-amd hardware support`_ | -| +------------------------------------------------------------+ -| | Intel: VAAPI-compatible, see: `VAAPI hardware support`_ | -| +------------------------------------------------------------+ -| | Nvidia: NVENC enabled cards, see `nvenc support matrix`_ | -+------------+------------------------------------------------------------+ -| CPU | AMD: Ryzen 3 or higher | -| +------------------------------------------------------------+ -| | Intel: Core i3 or higher | -+------------+------------------------------------------------------------+ -| RAM | 4GB or more | -+------------+------------------------------------------------------------+ -| OS | Windows: 10+ (Windows Server not supported) | -| +------------------------------------------------------------+ -| | macOS: 12+ | -| +------------------------------------------------------------+ -| | Linux/Debian: 11 (bullseye) | -| +------------------------------------------------------------+ -| | Linux/Fedora: 38+ | -| +------------------------------------------------------------+ -| | Linux/Ubuntu: 20.04+ (focal) | -+------------+------------------------------------------------------------+ -| Network | Host: 5GHz, 802.11ac | -| +------------------------------------------------------------+ -| | Client: 5GHz, 802.11ac | -+------------+------------------------------------------------------------+ +.. csv-table:: + :widths: 15, 60 + + "GPU", "AMD: VCE 1.0 or higher, see: `obs-amd hardware support `_" + "", "Intel: VAAPI-compatible, see: `VAAPI hardware support `_" + "", "Nvidia: NVENC enabled cards, see: `nvenc support matrix `_" + "CPU", "AMD: Ryzen 3 or higher" + "", "Intel: Core i3 or higher" + "RAM", "4GB or more" + "OS", "Windows: 10+ (Windows Server does not support virtual gamepads)" + "", "macOS: 12+" + "", "Linux/Debian: 11 (bullseye)" + "", "Linux/Fedora: 38+" + "", "Linux/Ubuntu: 22.04+ (jammy)" + "Network", "Host: 5GHz, 802.11ac" + "", "Client: 5GHz, 802.11ac" **4k Suggestions** -+------------+------------------------------------------------------------+ -| GPU | AMD: Video Coding Engine 3.1 or higher | -| +------------------------------------------------------------+ -| | Intel: HD Graphics 510 or higher | -| +------------------------------------------------------------+ -| | Nvidia: GeForce GTX 1080 or higher | -+------------+------------------------------------------------------------+ -| CPU | AMD: Ryzen 5 or higher | -| +------------------------------------------------------------+ -| | Intel: Core i5 or higher | -+------------+------------------------------------------------------------+ -| Network | Host: CAT5e ethernet or better | -| +------------------------------------------------------------+ -| | Client: CAT5e ethernet or better | -+------------+------------------------------------------------------------+ +.. csv-table:: + :widths: 15, 60 + + "GPU", "AMD: Video Coding Engine 3.1 or higher" + "", "Intel: HD Graphics 510 or higher" + "", "Nvidia: GeForce GTX 1080 or higher" + "CPU", "AMD: Ryzen 5 or higher" + "", "Intel: Core i5 or higher" + "Network", "Host: CAT5e ethernet or better" + "", "Client: CAT5e ethernet or better" **HDR Suggestions** -+------------+------------------------------------------------------------+ -| GPU | AMD: Video Coding Engine 3.4 or higher | -| +------------------------------------------------------------+ -| | Intel: UHD Graphics 730 or higher | -| +------------------------------------------------------------+ -| | Nvidia: Pascal-based GPU (GTX 10-series) or higher | -+------------+------------------------------------------------------------+ -| CPU | AMD: todo | -| +------------------------------------------------------------+ -| | Intel: todo | -+------------+------------------------------------------------------------+ -| Network | Host: CAT5e ethernet or better | -| +------------------------------------------------------------+ -| | Client: CAT5e ethernet or better | -+------------+------------------------------------------------------------+ +.. csv-table:: + :widths: 15, 60 + + "GPU", "AMD: Video Coding Engine 3.4 or higher" + "", "Intel: UHD Graphics 730 or higher" + "", "Nvidia: Pascal-based GPU (GTX 10-series) or higher" + "CPU", "AMD: todo" + "", "Intel: todo" + "Network", "Host: CAT5e ethernet or better" + "", "Client: CAT5e ethernet or better" Integrations ------------ @@ -96,6 +75,10 @@ Integrations :alt: Read the Docs :target: http://sunshinestream.readthedocs.io/ +.. image:: https://img.shields.io/codecov/c/gh/LizardByte/Sunshine?token=SMGXQ5NVMJ&style=for-the-badge&logo=codecov&label=codecov + :alt: Codecov + :target: https://codecov.io/gh/LizardByte/Sunshine + Support ------- @@ -122,7 +105,3 @@ Stats .. image:: https://img.shields.io/github/stars/lizardbyte/sunshine.svg?logo=github&style=for-the-badge :alt: GitHub stars :target: https://github.com/LizardByte/Sunshine - -.. _nvenc support matrix: https://developer.nvidia.com/video-encode-and-decode-gpu-support-matrix-new -.. _obs-amd hardware support: https://github.com/obsproject/obs-amd-encoder/wiki/Hardware-Support -.. _VAAPI hardware support: https://www.intel.com/content/www/us/en/developer/articles/technical/linuxmedia-vaapi.html diff --git a/cmake/compile_definitions/common.cmake b/cmake/compile_definitions/common.cmake index 646e1298e4..6e03829584 100644 --- a/cmake/compile_definitions/common.cmake +++ b/cmake/compile_definitions/common.cmake @@ -110,11 +110,6 @@ set(SUNSHINE_TARGET_FILES "${CMAKE_SOURCE_DIR}/src/stat_trackers.cpp" ${PLATFORM_TARGET_FILES}) -set_source_files_properties("${CMAKE_SOURCE_DIR}/src/upnp.cpp" PROPERTIES COMPILE_FLAGS -Wno-pedantic) - -set_source_files_properties("${CMAKE_SOURCE_DIR}/third-party/nanors/rs.c" - PROPERTIES COMPILE_FLAGS "-include deps/obl/autoshim.h -ftree-vectorize") - if(NOT SUNSHINE_ASSETS_DIR_DEF) set(SUNSHINE_ASSETS_DIR_DEF "${SUNSHINE_ASSETS_DIR}") endif() @@ -134,15 +129,6 @@ include_directories( ${PLATFORM_INCLUDE_DIRS} ) -string(TOUPPER "x${CMAKE_BUILD_TYPE}" BUILD_TYPE) -if("${BUILD_TYPE}" STREQUAL "XDEBUG") - if(WIN32) - set_source_files_properties("${CMAKE_SOURCE_DIR}/src/nvhttp.cpp" PROPERTIES COMPILE_FLAGS -O2) - endif() -else() - add_definitions(-DNDEBUG) -endif() - list(APPEND SUNSHINE_EXTERNAL_LIBRARIES ${MINIUPNP_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT} diff --git a/cmake/compile_definitions/linux.cmake b/cmake/compile_definitions/linux.cmake index b6d1990a24..0b54559005 100644 --- a/cmake/compile_definitions/linux.cmake +++ b/cmake/compile_definitions/linux.cmake @@ -8,10 +8,6 @@ if(${SUNSHINE_BUILD_APPIMAGE}) string(REPLACE "${CMAKE_INSTALL_PREFIX}" ".${CMAKE_INSTALL_PREFIX}" SUNSHINE_ASSETS_DIR_DEF ${SUNSHINE_ASSETS_DIR}) endif() -if(NOT DEFINED SUNSHINE_EXECUTABLE_PATH) - set(SUNSHINE_EXECUTABLE_PATH "sunshine") -endif() - # cuda set(CUDA_FOUND OFF) if(${SUNSHINE_ENABLE_CUDA}) diff --git a/cmake/compile_definitions/windows.cmake b/cmake/compile_definitions/windows.cmake index 699a84c184..26e89a20b8 100644 --- a/cmake/compile_definitions/windows.cmake +++ b/cmake/compile_definitions/windows.cmake @@ -29,16 +29,6 @@ file(GLOB NVPREFS_FILES CONFIGURE_DEPENDS # vigem include_directories(SYSTEM "${CMAKE_SOURCE_DIR}/third-party/ViGEmClient/include") -set_source_files_properties("${CMAKE_SOURCE_DIR}/third-party/ViGEmClient/src/ViGEmClient.cpp" - PROPERTIES COMPILE_DEFINITIONS "UNICODE=1;ERROR_INVALID_DEVICE_OBJECT_PARAMETER=650") -set(VIGEM_COMPILE_FLAGS "") -string(APPEND VIGEM_COMPILE_FLAGS "-Wno-unknown-pragmas ") -string(APPEND VIGEM_COMPILE_FLAGS "-Wno-misleading-indentation ") -string(APPEND VIGEM_COMPILE_FLAGS "-Wno-class-memaccess ") -string(APPEND VIGEM_COMPILE_FLAGS "-Wno-unused-function ") -string(APPEND VIGEM_COMPILE_FLAGS "-Wno-unused-variable ") -set_source_files_properties("${CMAKE_SOURCE_DIR}/third-party/ViGEmClient/src/ViGEmClient.cpp" - PROPERTIES COMPILE_FLAGS ${VIGEM_COMPILE_FLAGS}) # sunshine icon if(NOT DEFINED SUNSHINE_ICON_PATH) diff --git a/cmake/packaging/common.cmake b/cmake/packaging/common.cmake index ad3f9bc068..c7c5b3a5cc 100644 --- a/cmake/packaging/common.cmake +++ b/cmake/packaging/common.cmake @@ -12,10 +12,18 @@ set(CPACK_PACKAGE_ICON ${PROJECT_SOURCE_DIR}/sunshine.png) set(CPACK_PACKAGE_FILE_NAME "${CMAKE_PROJECT_NAME}") set(CPACK_STRIP_FILES YES) -#install common assets +# install common assets install(DIRECTORY "${SUNSHINE_SOURCE_ASSETS_DIR}/common/assets/" DESTINATION "${SUNSHINE_ASSETS_DIR}" PATTERN "web" EXCLUDE) +# copy assets to build directory, for running without install +file(GLOB_RECURSE ALL_ASSETS + RELATIVE "${SUNSHINE_SOURCE_ASSETS_DIR}/common/assets/" "${SUNSHINE_SOURCE_ASSETS_DIR}/common/assets/*") +list(FILTER ALL_ASSETS EXCLUDE REGEX "^web/.*$") # Filter out the web directory +foreach(asset ${ALL_ASSETS}) # Copy assets to build directory, excluding the web directory + file(COPY "${SUNSHINE_SOURCE_ASSETS_DIR}/common/assets/${asset}" + DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/assets") +endforeach() # install built vite assets install(DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/assets/web" diff --git a/cmake/packaging/linux.cmake b/cmake/packaging/linux.cmake index 499f058c8b..4d9cfbcec7 100644 --- a/cmake/packaging/linux.cmake +++ b/cmake/packaging/linux.cmake @@ -2,6 +2,9 @@ install(DIRECTORY "${SUNSHINE_SOURCE_ASSETS_DIR}/linux/assets/" DESTINATION "${SUNSHINE_ASSETS_DIR}") +# copy assets to build directory, for running without install +file(COPY "${SUNSHINE_SOURCE_ASSETS_DIR}/linux/assets/" + DESTINATION "${CMAKE_BINARY_DIR}/assets") if(${SUNSHINE_BUILD_APPIMAGE} OR ${SUNSHINE_BUILD_FLATPAK}) install(FILES "${SUNSHINE_SOURCE_ASSETS_DIR}/linux/misc/60-sunshine.rules" DESTINATION "${SUNSHINE_ASSETS_DIR}/udev/rules.d") diff --git a/cmake/packaging/macos.cmake b/cmake/packaging/macos.cmake index f7c3a518c9..a16fdb66a2 100644 --- a/cmake/packaging/macos.cmake +++ b/cmake/packaging/macos.cmake @@ -10,15 +10,16 @@ if(SUNSHINE_PACKAGE_MACOS) # todo set(MAC_PREFIX "${CMAKE_PROJECT_NAME}.app/Contents") set(INSTALL_RUNTIME_DIR "${MAC_PREFIX}/MacOS") - install(DIRECTORY "${SUNSHINE_SOURCE_ASSETS_DIR}/macos/assets/" - DESTINATION "${SUNSHINE_ASSETS_DIR}") - install(TARGETS sunshine BUNDLE DESTINATION . COMPONENT Runtime RUNTIME DESTINATION ${INSTALL_RUNTIME_DIR} COMPONENT Runtime) else() - install(DIRECTORY "${SUNSHINE_SOURCE_ASSETS_DIR}/macos/assets/" - DESTINATION "${SUNSHINE_ASSETS_DIR}") install(FILES "${SUNSHINE_SOURCE_ASSETS_DIR}/macos/misc/uninstall_pkg.sh" DESTINATION "${SUNSHINE_ASSETS_DIR}") endif() + +install(DIRECTORY "${SUNSHINE_SOURCE_ASSETS_DIR}/macos/assets/" + DESTINATION "${SUNSHINE_ASSETS_DIR}") +# copy assets to build directory, for running without install +file(COPY "${SUNSHINE_SOURCE_ASSETS_DIR}/macos/assets/" + DESTINATION "${CMAKE_BINARY_DIR}/assets") diff --git a/cmake/packaging/windows.cmake b/cmake/packaging/windows.cmake index 2b512ed699..bbd497ee3a 100644 --- a/cmake/packaging/windows.cmake +++ b/cmake/packaging/windows.cmake @@ -39,6 +39,9 @@ install(DIRECTORY "${SUNSHINE_SOURCE_ASSETS_DIR}/windows/misc/gamepad/" install(DIRECTORY "${SUNSHINE_SOURCE_ASSETS_DIR}/windows/assets/" DESTINATION "${SUNSHINE_ASSETS_DIR}" COMPONENT assets) +# copy assets to build directory, for running without install +file(COPY "${SUNSHINE_SOURCE_ASSETS_DIR}/windows/assets/" + DESTINATION "${CMAKE_BINARY_DIR}/assets") # set(CPACK_NSIS_MUI_HEADERIMAGE "") # TODO: image should be 150x57 bmp set(CPACK_PACKAGE_ICON "${CMAKE_SOURCE_DIR}\\\\sunshine.ico") diff --git a/cmake/prep/init.cmake b/cmake/prep/init.cmake new file mode 100644 index 0000000000..93e8b59772 --- /dev/null +++ b/cmake/prep/init.cmake @@ -0,0 +1,9 @@ +if (WIN32) +elseif (APPLE) +elseif (UNIX) + include(GNUInstallDirs) + + if(NOT DEFINED SUNSHINE_EXECUTABLE_PATH) + set(SUNSHINE_EXECUTABLE_PATH "sunshine") + endif() +endif () diff --git a/cmake/prep/options.cmake b/cmake/prep/options.cmake index 9a7fca8e5e..1555036eeb 100644 --- a/cmake/prep/options.cmake +++ b/cmake/prep/options.cmake @@ -1,3 +1,10 @@ +option(BUILD_TESTS "Build tests" ON) +option(TESTS_ENABLE_PYTHON_TESTS "Enable Python tests" ON) + +# DirectX11 is not available in GitHub runners, so even software encoding fails +set(TESTS_SOFTWARE_ENCODER_UNAVAILABLE "fail" + CACHE STRING "How to handle unavailable software encoders in tests. 'fail/skip'") + option(BUILD_WERROR "Enable -Werror flag." OFF) # if this option is set, the build will exit after configuring special package configuration files diff --git a/cmake/prep/special_package_configuration.cmake b/cmake/prep/special_package_configuration.cmake index d04066cdc8..17e724c90d 100644 --- a/cmake/prep/special_package_configuration.cmake +++ b/cmake/prep/special_package_configuration.cmake @@ -6,8 +6,6 @@ if (APPLE) configure_file(packaging/macos/sunshine.rb sunshine.rb @ONLY) endif() elseif (UNIX) - include(GNUInstallDirs) # this needs to be included prior to configuring the desktop files - # configure the .desktop file if(${SUNSHINE_BUILD_APPIMAGE}) configure_file(packaging/linux/AppImage/sunshine.desktop sunshine.desktop @ONLY) @@ -35,6 +33,7 @@ elseif (UNIX) # configure the flatpak manifest if(${SUNSHINE_CONFIGURE_FLATPAK_MAN}) configure_file(packaging/linux/flatpak/dev.lizardbyte.sunshine.yml dev.lizardbyte.sunshine.yml @ONLY) + file(COPY packaging/linux/flatpak/deps/ DESTINATION ${CMAKE_BINARY_DIR}) endif() endif() diff --git a/cmake/targets/common.cmake b/cmake/targets/common.cmake index 9f2ce08240..ec7c7cbcf9 100644 --- a/cmake/targets/common.cmake +++ b/cmake/targets/common.cmake @@ -3,6 +3,18 @@ add_executable(sunshine ${SUNSHINE_TARGET_FILES}) +# Homebrew build fails the vite build if we set these environment variables +# this block must be before the platform specific code +if(${SUNSHINE_BUILD_HOMEBREW}) + set(NPM_SOURCE_ASSETS_DIR "") + set(NPM_ASSETS_DIR "") + set(NPM_BUILD_HOMEBREW "true") +else() + set(NPM_SOURCE_ASSETS_DIR ${SUNSHINE_SOURCE_ASSETS_DIR}) + set(NPM_ASSETS_DIR ${CMAKE_BINARY_DIR}) + set(NPM_BUILD_HOMEBREW "") +endif() + # platform specific target definitions if(WIN32) include(${CMAKE_MODULE_PATH}/targets/windows.cmake) @@ -37,19 +49,44 @@ endif() target_compile_options(sunshine PRIVATE $<$:${SUNSHINE_COMPILE_OPTIONS}>;$<$:${SUNSHINE_COMPILE_OPTIONS_CUDA};-std=c++17>) # cmake-lint: disable=C0301 -# Homebrew build fails the vite build if we set these environment variables -if(${SUNSHINE_BUILD_HOMEBREW}) - set(NPM_SOURCE_ASSETS_DIR "") - set(NPM_ASSETS_DIR "") - set(NPM_BUILD_HOMEBREW "true") -else() - set(NPM_SOURCE_ASSETS_DIR ${SUNSHINE_SOURCE_ASSETS_DIR}) - set(NPM_ASSETS_DIR ${CMAKE_BINARY_DIR}) - set(NPM_BUILD_HOMEBREW "") +# tests +if(BUILD_TESTS) + add_subdirectory(tests) endif() -#WebUI build -add_custom_target(web-ui ALL - WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}" - COMMENT "Installing NPM Dependencies and Building the Web UI" - COMMAND bash -c \"npm install && SUNSHINE_BUILD_HOMEBREW=${NPM_BUILD_HOMEBREW} SUNSHINE_SOURCE_ASSETS_DIR=${NPM_SOURCE_ASSETS_DIR} SUNSHINE_ASSETS_DIR=${NPM_ASSETS_DIR} npm run build\") # cmake-lint: disable=C0301 +# custom compile flags, must be after adding tests + +# src/upnp +set_source_files_properties("${CMAKE_SOURCE_DIR}/src/upnp.cpp" + DIRECTORY "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests" + PROPERTIES COMPILE_FLAGS -Wno-pedantic) + +# third-party/nanors +set_source_files_properties("${CMAKE_SOURCE_DIR}/third-party/nanors/rs.c" + DIRECTORY "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests" + PROPERTIES COMPILE_FLAGS "-include deps/obl/autoshim.h -ftree-vectorize") + +# third-party/ViGEmClient +set(VIGEM_COMPILE_FLAGS "") +string(APPEND VIGEM_COMPILE_FLAGS "-Wno-unknown-pragmas ") +string(APPEND VIGEM_COMPILE_FLAGS "-Wno-misleading-indentation ") +string(APPEND VIGEM_COMPILE_FLAGS "-Wno-class-memaccess ") +string(APPEND VIGEM_COMPILE_FLAGS "-Wno-unused-function ") +string(APPEND VIGEM_COMPILE_FLAGS "-Wno-unused-variable ") +set_source_files_properties("${CMAKE_SOURCE_DIR}/third-party/ViGEmClient/src/ViGEmClient.cpp" + DIRECTORY "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests" + PROPERTIES + COMPILE_DEFINITIONS "UNICODE=1;ERROR_INVALID_DEVICE_OBJECT_PARAMETER=650" + COMPILE_FLAGS ${VIGEM_COMPILE_FLAGS}) + +# src/nvhttp +string(TOUPPER "x${CMAKE_BUILD_TYPE}" BUILD_TYPE) +if("${BUILD_TYPE}" STREQUAL "XDEBUG") + if(WIN32) + set_source_files_properties("${CMAKE_SOURCE_DIR}/src/nvhttp.cpp" + DIRECTORY "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests" + PROPERTIES COMPILE_FLAGS -O2) + endif() +else() + add_definitions(-DNDEBUG) +endif() diff --git a/cmake/targets/unix.cmake b/cmake/targets/unix.cmake index 047a0b3d38..2ce0378fdf 100644 --- a/cmake/targets/unix.cmake +++ b/cmake/targets/unix.cmake @@ -1,2 +1,8 @@ # unix specific target definitions # put anything here that applies to both linux and macos + +#WebUI build +add_custom_target(web-ui ALL + WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}" + COMMENT "Installing NPM Dependencies and Building the Web UI" + COMMAND bash -c \"npm install && SUNSHINE_BUILD_HOMEBREW=${NPM_BUILD_HOMEBREW} SUNSHINE_SOURCE_ASSETS_DIR=${NPM_SOURCE_ASSETS_DIR} SUNSHINE_ASSETS_DIR=${NPM_ASSETS_DIR} npm run build\") # cmake-lint: disable=C0301 diff --git a/cmake/targets/windows.cmake b/cmake/targets/windows.cmake index 341d7c2e74..e429feaa82 100644 --- a/cmake/targets/windows.cmake +++ b/cmake/targets/windows.cmake @@ -4,3 +4,9 @@ set(CMAKE_FIND_LIBRARY_SUFFIXES ".dll") find_library(ZLIB ZLIB1) list(APPEND SUNSHINE_EXTERNAL_LIBRARIES Wtsapi32.lib) + +#WebUI build +add_custom_target(web-ui ALL + WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}" + COMMENT "Installing NPM Dependencies and Building the Web UI" + COMMAND cmd /C "npm install && set \"SUNSHINE_SOURCE_ASSETS_DIR=${NPM_SOURCE_ASSETS_DIR}\" && set \"SUNSHINE_ASSETS_DIR=${NPM_ASSETS_DIR}\" && npm run build") # cmake-lint: disable=C0301 diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000000..59209e4643 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,15 @@ +--- +codecov: + branch: nightly + +coverage: + status: + project: + default: + target: auto + threshold: 10% + +comment: + layout: "diff, flags, files" + behavior: default + require_changes: false # if true: only post the comment if coverage changes diff --git a/crowdin.yml b/crowdin.yml index 0be504ba7d..3dd19366ef 100644 --- a/crowdin.yml +++ b/crowdin.yml @@ -1,7 +1,7 @@ --- "base_path": "." "base_url": "https://api.crowdin.com" # optional (for Crowdin Enterprise only) -"preserve_hierarchy": false # flatten tree on crowdin +"preserve_hierarchy": true # false will flatten tree on crowdin, but doesn't work with dest option "pull_request_labels": [ "crowdin", "l10n" @@ -10,6 +10,7 @@ "files": [ { "source": "/locale/*.po", + "dest": "/%original_file_name%", "translation": "/locale/%two_letters_code%/LC_MESSAGES/%original_file_name%", "languages_mapping": { "two_letters_code": { @@ -17,6 +18,13 @@ "en-GB": "en_GB", "en-US": "en_US" } - } + }, + "update_option": "update_as_unapproved" + }, + { + "source": "/src_assets/common/assets/web/public/assets/locale/en.json", + "dest": "/sunshine.json", + "translation": "/src_assets/common/assets/web/public/assets/locale/%two_letters_code%.%file_extension%", + "update_option": "update_as_unapproved" } ] diff --git a/docker/archlinux.dockerfile b/docker/archlinux.dockerfile index e8cfd93998..26c1b186d7 100644 --- a/docker/archlinux.dockerfile +++ b/docker/archlinux.dockerfile @@ -43,7 +43,8 @@ pacman -Syu --disable-download-timeout --needed --noconfirm \ cmake \ cuda \ git \ - namcap + namcap \ + xorg-server-xvfb _DEPS # Setup builder user @@ -84,6 +85,8 @@ RUN mv /build/sunshine/build/sunshine.install . RUN <<_PKGBUILD #!/bin/bash set -e +export DISPLAY=:1 +Xvfb ${DISPLAY} -screen 0 1024x768x24 & namcap -i PKGBUILD makepkg -si --noconfirm rm -f /build/sunshine/pkg/sunshine-debug*.pkg.tar.zst diff --git a/docker/clion-toolchain.dockerfile b/docker/clion-toolchain.dockerfile index bb4604f54f..3af5dd4f45 100644 --- a/docker/clion-toolchain.dockerfile +++ b/docker/clion-toolchain.dockerfile @@ -14,6 +14,8 @@ FROM toolchain-base as toolchain ARG TARGETPLATFORM RUN echo "target_platform: ${TARGETPLATFORM}" +ENV DISPLAY=:0 + SHELL ["/bin/bash", "-o", "pipefail", "-c"] # install dependencies RUN <<_DEPS @@ -24,10 +26,12 @@ apt-get install -y --no-install-recommends \ build-essential \ cmake=3.22.* \ ca-certificates \ + doxygen \ gcc=4:11.2.* \ g++=4:11.2.* \ gdb \ git \ + graphviz \ libayatana-appindicator3-dev \ libavdevice-dev \ libboost-filesystem-dev=1.74.* \ @@ -54,8 +58,12 @@ apt-get install -y --no-install-recommends \ libxfixes-dev \ libxrandr-dev \ libxtst-dev \ + python3.10 \ + python3.10-venv \ udev \ - wget + wget \ + x11-xserver-utils \ + xvfb if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then apt-get install -y --no-install-recommends \ libmfx-dev @@ -98,3 +106,28 @@ chmod a+x ./cuda.run ./cuda.run --silent --toolkit --toolkitpath=/usr/local --no-opengl-libs --no-man-page --no-drm rm ./cuda.run _INSTALL_CUDA + +WORKDIR / +# Write a shell script that starts Xvfb and then runs a shell +RUN <<_ENTRYPOINT +#!/bin/bash +set -e +cat < /entrypoint.sh +#!/bin/bash +Xvfb ${DISPLAY} -screen 0 1024x768x24 & +if [ "\$#" -eq 0 ]; then + exec "/bin/bash" +else + exec "\$@" +fi +EOF +_ENTRYPOINT + +# Make the script executable +RUN chmod +x /entrypoint.sh + +# Note about CLion +RUN echo "ATTENTION: CLion will override the entrypoint, you can disable this in the toolchain settings" + +# Use the shell script as the entrypoint +ENTRYPOINT ["/entrypoint.sh"] diff --git a/docker/debian-bookworm.dockerfile b/docker/debian-bookworm.dockerfile index d664ff8c8e..7f49bb1a49 100644 --- a/docker/debian-bookworm.dockerfile +++ b/docker/debian-bookworm.dockerfile @@ -32,7 +32,9 @@ apt-get update -y apt-get install -y --no-install-recommends \ build-essential \ cmake=3.25.* \ + doxygen \ git \ + graphviz \ libavdevice-dev \ libayatana-appindicator3-dev \ libboost-filesystem-dev=1.74.* \ @@ -61,8 +63,12 @@ apt-get install -y --no-install-recommends \ libxtst-dev \ nodejs \ npm \ + python3.11 \ + python3.11-venv \ udev \ - wget + wget \ + x11-xserver-utils \ + xvfb if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then apt-get install -y --no-install-recommends \ libmfx-dev @@ -120,6 +126,17 @@ make -j "$(nproc)" cpack -G DEB _MAKE +# run tests +WORKDIR /build/sunshine/build/tests +# hadolint ignore=SC1091 +RUN <<_TEST +#!/bin/bash +set -e +export DISPLAY=:1 +Xvfb ${DISPLAY} -screen 0 1024x768x24 & +./test_sunshine --gtest_color=yes +_TEST + FROM scratch AS artifacts ARG BASE ARG TAG diff --git a/docker/debian-bullseye.dockerfile b/docker/debian-bullseye.dockerfile index 5f607c2481..5e0667f018 100644 --- a/docker/debian-bullseye.dockerfile +++ b/docker/debian-bullseye.dockerfile @@ -33,7 +33,9 @@ apt-get install -y --no-install-recommends \ build-essential \ ca-certificates \ cmake=3.18.* \ + doxygen \ git \ + graphviz \ libavdevice-dev \ libayatana-appindicator3-dev \ libboost-filesystem-dev=1.74.* \ @@ -60,8 +62,12 @@ apt-get install -y --no-install-recommends \ libxfixes-dev \ libxrandr-dev \ libxtst-dev \ + python3.9 \ + python3.9-venv \ udev \ - wget + wget \ + x11-xserver-utils \ + xvfb if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then apt-get install -y --no-install-recommends \ libmfx-dev @@ -134,6 +140,17 @@ make -j "$(nproc)" cpack -G DEB _MAKE +# run tests +WORKDIR /build/sunshine/build/tests +# hadolint ignore=SC1091 +RUN <<_TEST +#!/bin/bash +set -e +export DISPLAY=:1 +Xvfb ${DISPLAY} -screen 0 1024x768x24 & +./test_sunshine --gtest_color=yes +_TEST + FROM scratch AS artifacts ARG BASE ARG TAG diff --git a/docker/fedora-38.dockerfile b/docker/fedora-38.dockerfile index 55622ab707..52796d509f 100644 --- a/docker/fedora-38.dockerfile +++ b/docker/fedora-38.dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1.4 # artifacts: true -# platforms: linux/amd64,linux/arm64/v8 +# platforms: linux/amd64 # platforms_pr: linux/amd64 # no-cache-filters: sunshine-base,artifacts,sunshine ARG BASE=fedora @@ -32,9 +32,11 @@ dnf -y group install "Development Tools" dnf -y install \ boost-devel-1.78.0* \ cmake-3.27.* \ + doxygen \ gcc-13.2.* \ gcc-c++-13.2.* \ git \ + graphviz \ libappindicator-gtk3-devel \ libcap-devel \ libcurl-devel \ @@ -58,9 +60,11 @@ dnf -y install \ openssl-devel \ opus-devel \ pulseaudio-libs-devel \ + python3.10 \ rpm-build \ wget \ - which + which \ + xorg-x11-server-Xvfb if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then dnf -y install intel-mediasdk-devel fi @@ -117,6 +121,17 @@ make -j "$(nproc)" cpack -G RPM _MAKE +# run tests +WORKDIR /build/sunshine/build/tests +# hadolint ignore=SC1091 +RUN <<_TEST +#!/bin/bash +set -e +export DISPLAY=:1 +Xvfb ${DISPLAY} -screen 0 1024x768x24 & +./test_sunshine --gtest_color=yes +_TEST + FROM scratch AS artifacts ARG BASE ARG TAG diff --git a/docker/fedora-39.dockerfile b/docker/fedora-39.dockerfile index 20dae39a79..262b40fc7a 100644 --- a/docker/fedora-39.dockerfile +++ b/docker/fedora-39.dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1.4 # artifacts: true -# platforms: linux/amd64,linux/arm64/v8 +# platforms: linux/amd64 # platforms_pr: linux/amd64 # no-cache-filters: sunshine-base,artifacts,sunshine ARG BASE=fedora @@ -32,9 +32,11 @@ dnf -y group install "Development Tools" dnf -y install \ boost-devel-1.81.0* \ cmake-3.27.* \ + doxygen \ gcc-13.2.* \ gcc-c++-13.2.* \ git \ + graphviz \ libappindicator-gtk3-devel \ libcap-devel \ libcurl-devel \ @@ -58,9 +60,11 @@ dnf -y install \ openssl-devel \ opus-devel \ pulseaudio-libs-devel \ + python3.11 \ rpm-build \ wget \ - which + which \ + xorg-x11-server-Xvfb if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then dnf -y install intel-mediasdk-devel fi @@ -124,6 +128,17 @@ make -j "$(nproc)" cpack -G RPM _MAKE +# run tests +WORKDIR /build/sunshine/build/tests +# hadolint ignore=SC1091 +RUN <<_TEST +#!/bin/bash +set -e +export DISPLAY=:1 +Xvfb ${DISPLAY} -screen 0 1024x768x24 & +./test_sunshine --gtest_color=yes +_TEST + FROM scratch AS artifacts ARG BASE ARG TAG diff --git a/docker/ubuntu-22.04.dockerfile b/docker/ubuntu-22.04.dockerfile index fa2d5e19d8..f75fd1ee1e 100644 --- a/docker/ubuntu-22.04.dockerfile +++ b/docker/ubuntu-22.04.dockerfile @@ -33,9 +33,11 @@ apt-get install -y --no-install-recommends \ build-essential \ cmake=3.22.* \ ca-certificates \ + doxygen \ git \ - libayatana-appindicator3-dev \ + graphviz \ libavdevice-dev \ + libayatana-appindicator3-dev \ libboost-filesystem-dev=1.74.* \ libboost-locale-dev=1.74.* \ libboost-log-dev=1.74.* \ @@ -60,8 +62,12 @@ apt-get install -y --no-install-recommends \ libxfixes-dev \ libxrandr-dev \ libxtst-dev \ + python3.10 \ + python3.10-venv \ udev \ - wget + wget \ + x11-xserver-utils \ + xvfb if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then apt-get install -y --no-install-recommends \ libmfx-dev @@ -135,6 +141,17 @@ make -j "$(nproc)" cpack -G DEB _MAKE +# run tests +WORKDIR /build/sunshine/build/tests +# hadolint ignore=SC1091 +RUN <<_TEST +#!/bin/bash +set -e +export DISPLAY=:1 +Xvfb ${DISPLAY} -screen 0 1024x768x24 & +./test_sunshine --gtest_color=yes +_TEST + FROM scratch AS artifacts ARG BASE ARG TAG diff --git a/docker/ubuntu-20.04.dockerfile b/docker/ubuntu-24.04.dockerfile similarity index 77% rename from docker/ubuntu-20.04.dockerfile rename to docker/ubuntu-24.04.dockerfile index 4a1dcf4867..97008d9597 100644 --- a/docker/ubuntu-20.04.dockerfile +++ b/docker/ubuntu-24.04.dockerfile @@ -1,10 +1,10 @@ # syntax=docker/dockerfile:1.4 # artifacts: true -# platforms: linux/amd64,linux/arm64/v8 +# platforms: linux/amd64 # platforms_pr: linux/amd64 # no-cache-filters: sunshine-base,artifacts,sunshine ARG BASE=ubuntu -ARG TAG=20.04 +ARG TAG=24.04 FROM ${BASE}:${TAG} AS sunshine-base ENV DEBIAN_FRONTEND=noninteractive @@ -31,16 +31,18 @@ set -e apt-get update -y apt-get install -y --no-install-recommends \ build-essential \ + cmake=3.28.* \ ca-certificates \ - gcc-10=10.5.* \ - g++-10=10.5.* \ + doxygen \ + gcc-11 \ + g++-11 \ git \ + graphviz \ libayatana-appindicator3-dev \ - libavdevice-dev \ - libboost-filesystem-dev=1.71.* \ - libboost-locale-dev=1.71.* \ - libboost-log-dev=1.71.* \ - libboost-program-options-dev=1.71.* \ + libboost-filesystem-dev=1.83.* \ + libboost-locale-dev=1.83.* \ + libboost-log-dev=1.83.* \ + libboost-program-options-dev=1.83.* \ libcap-dev \ libcurl4-openssl-dev \ libdrm-dev \ @@ -61,8 +63,12 @@ apt-get install -y --no-install-recommends \ libxfixes-dev \ libxrandr-dev \ libxtst-dev \ + python3.11 \ + python3.11-venv \ udev \ - wget + wget \ + x11-xserver-utils \ + xvfb if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then apt-get install -y --no-install-recommends \ libmfx-dev @@ -71,6 +77,7 @@ apt-get clean rm -rf /var/lib/apt/lists/* _DEPS + #Install Node # hadolint ignore=SC1091 RUN <<_INSTALL_NODE @@ -88,35 +95,13 @@ RUN <<_GCC_ALIAS #!/bin/bash set -e update-alternatives --install \ - /usr/bin/gcc gcc /usr/bin/gcc-10 100 \ - --slave /usr/bin/g++ g++ /usr/bin/g++-10 \ - --slave /usr/bin/gcov gcov /usr/bin/gcov-10 \ - --slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-10 \ - --slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-10 + /usr/bin/gcc gcc /usr/bin/gcc-11 100 \ + --slave /usr/bin/g++ g++ /usr/bin/g++-11 \ + --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \ + --slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-11 \ + --slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-11 _GCC_ALIAS -# install cmake -# sunshine requires cmake >= 3.18 -WORKDIR /build/cmake -# https://cmake.org/download/ -ENV CMAKE_VERSION="3.25.1" -# hadolint ignore=SC3010 -RUN <<_INSTALL_CMAKE -#!/bin/bash -set -e -cmake_prefix="https://github.com/Kitware/CMake/releases/download/v" -if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then - cmake_arch="x86_64" -elif [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then - cmake_arch="aarch64" -fi -url="${cmake_prefix}${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-linux-${cmake_arch}.sh" -echo "cmake url: ${url}" -wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cmake.sh -sh ./cmake.sh --prefix=/usr/local --skip-license -rm ./cmake.sh -_INSTALL_CMAKE - # install cuda WORKDIR /build/cuda # versions: https://developer.nvidia.com/cuda-toolkit-archive @@ -154,6 +139,7 @@ set -e #Set Node version source "$HOME/.nvm/nvm.sh" nvm use 20.9.0 +#Actually build cmake \ -DBUILD_WERROR=ON \ -DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \ @@ -170,6 +156,17 @@ make -j "$(nproc)" cpack -G DEB _MAKE +# run tests +WORKDIR /build/sunshine/build/tests +# hadolint ignore=SC1091 +RUN <<_TEST +#!/bin/bash +set -e +export DISPLAY=:1 +Xvfb ${DISPLAY} -screen 0 1024x768x24 & +./test_sunshine --gtest_color=yes +_TEST + FROM scratch AS artifacts ARG BASE ARG TAG @@ -197,9 +194,9 @@ EXPOSE 48010 EXPOSE 47998-48000/udp # setup user -ARG PGID=1000 +ARG PGID=1001 ENV PGID=${PGID} -ARG PUID=1000 +ARG PUID=1001 ENV PUID=${PUID} ENV TZ="UTC" ARG UNAME=lizard diff --git a/docs/Doxyfile b/docs/Doxyfile index d6e79cd257..d6aa47edd5 100644 --- a/docs/Doxyfile +++ b/docs/Doxyfile @@ -1,4 +1,4 @@ -# Doxyfile 1.9.6 +# Doxyfile 1.10.0 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. @@ -42,7 +42,7 @@ DOXYFILE_ENCODING = UTF-8 # title of most generated pages and in a few other places. # The default value is: My Project. -PROJECT_NAME = "Sunshine" +PROJECT_NAME = Sunshine # The PROJECT_NUMBER tag can be used to enter a project or revision number. This # could be handy for archiving the generated documentation or if some version @@ -63,6 +63,12 @@ PROJECT_BRIEF = "Sunshine is a Gamestream host for Moonlight." PROJECT_LOGO = ../sunshine.png +# With the PROJECT_ICON tag one can specify an icon that is included in the tabs +# when the HTML document is shown. Doxygen will copy the logo to the output +# directory. + +PROJECT_ICON = + # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path # into which the generated documentation will be written. If a relative path is # entered, it will be relative to the location where doxygen was started. If @@ -365,6 +371,17 @@ MARKDOWN_SUPPORT = YES TOC_INCLUDE_HEADINGS = 5 +# The MARKDOWN_ID_STYLE tag can be used to specify the algorithm used to +# generate identifiers for the Markdown headings. Note: Every identifier is +# unique. +# Possible values are: DOXYGEN use a fixed 'autotoc_md' string followed by a +# sequence number starting at 0 and GITHUB use the lower case version of title +# with any whitespace replaced by '-' and punctuation characters removed. +# The default value is: DOXYGEN. +# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. + +MARKDOWN_ID_STYLE = DOXYGEN + # When enabled doxygen tries to link words that correspond to documented # classes, or namespaces to their corresponding documentation. Such a link can # be prevented in individual cases by putting a % sign in front of the word or @@ -489,6 +506,14 @@ LOOKUP_CACHE_SIZE = 0 NUM_PROC_THREADS = 0 +# If the TIMESTAMP tag is set different from NO then each generated page will +# contain the date or date and time when the page was generated. Setting this to +# NO can help when comparing the output of multiple runs. +# Possible values are: YES, NO, DATETIME and DATE. +# The default value is: NO. + +TIMESTAMP = NO + #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- @@ -874,7 +899,14 @@ WARN_IF_UNDOC_ENUM_VAL = NO # a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS # then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but # at the end of the doxygen process doxygen will return with a non-zero status. -# Possible values are: NO, YES and FAIL_ON_WARNINGS. +# If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS_PRINT then doxygen behaves +# like FAIL_ON_WARNINGS but in case no WARN_LOGFILE is defined doxygen will not +# write the warning messages in between other messages but write them at the end +# of a run, in case a WARN_LOGFILE is defined the warning messages will be +# besides being in the defined file also be shown at the end of a run, unless +# the WARN_LOGFILE is defined as - i.e. standard output (stdout) in that case +# the behavior will remain as with the setting FAIL_ON_WARNINGS. +# Possible values are: NO, YES, FAIL_ON_WARNINGS and FAIL_ON_WARNINGS_PRINT. # The default value is: NO. WARN_AS_ERROR = NO @@ -953,12 +985,12 @@ INPUT_FILE_ENCODING = # Note the list of default checked file patterns might differ from the list of # default file extension mappings. # -# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, -# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, -# *.hh, *.hxx, *.hpp, *.h++, *.l, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, -# *.inc, *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C -# comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, -# *.vhdl, *.ucf, *.qsf and *.ice. +# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cxxm, +# *.cpp, *.cppm, *.ccm, *.c++, *.c++m, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, +# *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, *.h++, *.ixx, *.l, *.cs, *.d, +# *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, *.md, *.mm, *.dox (to +# be provided as doxygen C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, +# *.f18, *.f, *.for, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice. FILE_PATTERNS = *.c \ *.cc \ @@ -1043,9 +1075,6 @@ EXCLUDE_PATTERNS = # output. The symbol name can be a fully qualified name, a word, or if the # wildcard * is used, a substring. Examples: ANamespace, AClass, # ANamespace::AClass, ANamespace::*Test -# -# Note that the wildcards are matched against the file with absolute path, so to -# exclude all test directories use the pattern */test/* EXCLUDE_SYMBOLS = @@ -1159,7 +1188,8 @@ FORTRAN_COMMENT_AFTER = 72 SOURCE_BROWSER = NO # Setting the INLINE_SOURCES tag to YES will include the body of functions, -# classes and enums directly into the documentation. +# multi-line macros, enums or list initialized variables directly into the +# documentation. # The default value is: NO. INLINE_SOURCES = NO @@ -1428,15 +1458,6 @@ HTML_COLORSTYLE_SAT = 100 HTML_COLORSTYLE_GAMMA = 80 -# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML -# page will contain the date and time when the page was generated. Setting this -# to YES can help to show when doxygen was last run and thus if the -# documentation is up to date. -# The default value is: NO. -# This tag requires that the tag GENERATE_HTML is set to YES. - -HTML_TIMESTAMP = NO - # If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML # documentation will contain a main index with vertical navigation menus that # are dynamically created via JavaScript. If disabled, the navigation index will @@ -1456,6 +1477,33 @@ HTML_DYNAMIC_MENUS = YES HTML_DYNAMIC_SECTIONS = NO +# If the HTML_CODE_FOLDING tag is set to YES then classes and functions can be +# dynamically folded and expanded in the generated HTML source code. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_CODE_FOLDING = YES + +# If the HTML_COPY_CLIPBOARD tag is set to YES then doxygen will show an icon in +# the top right corner of code and text fragments that allows the user to copy +# its content to the clipboard. Note this only works if supported by the browser +# and the web page is served via a secure context (see: +# https://www.w3.org/TR/secure-contexts/), i.e. using the https: or file: +# protocol. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COPY_CLIPBOARD = YES + +# Doxygen stores a couple of settings persistently in the browser (via e.g. +# cookies). By default these settings apply to all HTML pages generated by +# doxygen across all projects. The HTML_PROJECT_COOKIE tag can be used to store +# the settings under a project specific key, such that the user preferences will +# be stored separately. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_PROJECT_COOKIE = + # With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries # shown in the various tree structured indices initially; the user can expand # and collapse entries dynamically later on. Doxygen will expand the tree to @@ -1586,6 +1634,16 @@ BINARY_TOC = NO TOC_EXPAND = NO +# The SITEMAP_URL tag is used to specify the full URL of the place where the +# generated documentation will be placed on the server by the user during the +# deployment of the documentation. The generated sitemap is called sitemap.xml +# and placed on the directory specified by HTML_OUTPUT. In case no SITEMAP_URL +# is specified no sitemap is generated. For information about the sitemap +# protocol see https://www.sitemaps.org +# This tag requires that the tag GENERATE_HTML is set to YES. + +SITEMAP_URL = + # If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and # QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that # can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help @@ -2074,9 +2132,16 @@ PDF_HYPERLINKS = YES USE_PDFLATEX = YES -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode -# command to the generated LaTeX files. This will instruct LaTeX to keep running -# if errors occur, instead of asking the user for help. +# The LATEX_BATCHMODE tag signals the behavior of LaTeX in case of an error. +# Possible values are: NO same as ERROR_STOP, YES same as BATCH, BATCH In batch +# mode nothing is printed on the terminal, errors are scrolled as if is +# hit at every error; missing files that TeX tries to input or request from +# keyboard input (\read on a not open input stream) cause the job to abort, +# NON_STOP In nonstop mode the diagnostic message will appear on the terminal, +# but there is no possibility of user interaction just like in batch mode, +# SCROLL In scroll mode, TeX will stop only for missing files to input or if +# keyboard input is necessary and ERROR_STOP In errorstop mode, TeX will stop at +# each error, asking for user intervention. # The default value is: NO. # This tag requires that the tag GENERATE_LATEX is set to YES. @@ -2097,14 +2162,6 @@ LATEX_HIDE_INDICES = NO LATEX_BIB_STYLE = plain -# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated -# page will contain the date and time when the page was generated. Setting this -# to NO can help when comparing the output of multiple runs. -# The default value is: NO. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_TIMESTAMP = NO - # The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute) # path from which the emoji images will be read. If a relative path is entered, # it will be relative to the LATEX_OUTPUT directory. If left blank the @@ -2270,13 +2327,39 @@ DOCBOOK_OUTPUT = doxydocbook #--------------------------------------------------------------------------- # If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an -# AutoGen Definitions (see http://autogen.sourceforge.net/) file that captures +# AutoGen Definitions (see https://autogen.sourceforge.net/) file that captures # the structure of the code including all documentation. Note that this feature # is still experimental and incomplete at the moment. # The default value is: NO. GENERATE_AUTOGEN_DEF = NO +#--------------------------------------------------------------------------- +# Configuration options related to Sqlite3 output +#--------------------------------------------------------------------------- + +# If the GENERATE_SQLITE3 tag is set to YES doxygen will generate a Sqlite3 +# database with symbols found by doxygen stored in tables. +# The default value is: NO. + +GENERATE_SQLITE3 = NO + +# The SQLITE3_OUTPUT tag is used to specify where the Sqlite3 database will be +# put. If a relative path is entered the value of OUTPUT_DIRECTORY will be put +# in front of it. +# The default directory is: sqlite3. +# This tag requires that the tag GENERATE_SQLITE3 is set to YES. + +SQLITE3_OUTPUT = sqlite3 + +# The SQLITE3_RECREATE_DB tag is set to YES, the existing doxygen_sqlite3.db +# database file will be recreated with each doxygen run. If set to NO, doxygen +# will warn if a database file is already found and not modify it. +# The default value is: YES. +# This tag requires that the tag GENERATE_SQLITE3 is set to YES. + +SQLITE3_RECREATE_DB = YES + #--------------------------------------------------------------------------- # Configuration options related to the Perl module output #--------------------------------------------------------------------------- @@ -2419,15 +2502,15 @@ TAGFILES = GENERATE_TAGFILE = -# If the ALLEXTERNALS tag is set to YES, all external class will be listed in -# the class index. If set to NO, only the inherited external classes will be -# listed. +# If the ALLEXTERNALS tag is set to YES, all external classes and namespaces +# will be listed in the class and namespace index. If set to NO, only the +# inherited external classes will be listed. # The default value is: NO. ALLEXTERNALS = NO # If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will be +# in the topic index. If set to NO, only the current project's groups will be # listed. # The default value is: YES. @@ -2441,16 +2524,9 @@ EXTERNAL_GROUPS = YES EXTERNAL_PAGES = YES #--------------------------------------------------------------------------- -# Configuration options related to the dot tool +# Configuration options related to diagram generator tools #--------------------------------------------------------------------------- -# You can include diagrams made with dia in doxygen documentation. Doxygen will -# then run dia to produce the diagram and insert it in the documentation. The -# DIA_PATH tag allows you to specify the directory where the dia binary resides. -# If left empty dia is assumed to be found in the default search path. - -DIA_PATH = - # If set to YES the inheritance and collaboration graphs will hide inheritance # and usage relations if the target is undocumented or is not a class. # The default value is: YES. @@ -2459,7 +2535,7 @@ HIDE_UNDOC_RELATIONS = YES # If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is # available from the path. This tool is part of Graphviz (see: -# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent +# https://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent # Bell Labs. The other options in this section have no effect if this option is # set to NO # The default value is: NO. @@ -2512,13 +2588,19 @@ DOT_NODE_ATTR = "shape=box,height=0.2,width=0.4" DOT_FONTPATH = -# If the CLASS_GRAPH tag is set to YES (or GRAPH) then doxygen will generate a -# graph for each documented class showing the direct and indirect inheritance -# relations. In case HAVE_DOT is set as well dot will be used to draw the graph, -# otherwise the built-in generator will be used. If the CLASS_GRAPH tag is set -# to TEXT the direct and indirect inheritance relations will be shown as texts / -# links. -# Possible values are: NO, YES, TEXT and GRAPH. +# If the CLASS_GRAPH tag is set to YES or GRAPH or BUILTIN then doxygen will +# generate a graph for each documented class showing the direct and indirect +# inheritance relations. In case the CLASS_GRAPH tag is set to YES or GRAPH and +# HAVE_DOT is enabled as well, then dot will be used to draw the graph. In case +# the CLASS_GRAPH tag is set to YES and HAVE_DOT is disabled or if the +# CLASS_GRAPH tag is set to BUILTIN, then the built-in generator will be used. +# If the CLASS_GRAPH tag is set to TEXT the direct and indirect inheritance +# relations will be shown as texts / links. Explicit enabling an inheritance +# graph or choosing a different representation for an inheritance graph of a +# specific class, can be accomplished by means of the command \inheritancegraph. +# Disabling an inheritance graph can be accomplished by means of the command +# \hideinheritancegraph. +# Possible values are: NO, YES, TEXT, GRAPH and BUILTIN. # The default value is: YES. CLASS_GRAPH = YES @@ -2526,15 +2608,21 @@ CLASS_GRAPH = YES # If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a # graph for each documented class showing the direct and indirect implementation # dependencies (inheritance, containment, and class references variables) of the -# class with other documented classes. +# class with other documented classes. Explicit enabling a collaboration graph, +# when COLLABORATION_GRAPH is set to NO, can be accomplished by means of the +# command \collaborationgraph. Disabling a collaboration graph can be +# accomplished by means of the command \hidecollaborationgraph. # The default value is: YES. # This tag requires that the tag HAVE_DOT is set to YES. COLLABORATION_GRAPH = YES # If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for -# groups, showing the direct groups dependencies. See also the chapter Grouping -# in the manual. +# groups, showing the direct groups dependencies. Explicit enabling a group +# dependency graph, when GROUP_GRAPHS is set to NO, can be accomplished by means +# of the command \groupgraph. Disabling a directory graph can be accomplished by +# means of the command \hidegroupgraph. See also the chapter Grouping in the +# manual. # The default value is: YES. # This tag requires that the tag HAVE_DOT is set to YES. @@ -2576,8 +2664,8 @@ DOT_UML_DETAILS = NO # The DOT_WRAP_THRESHOLD tag can be used to set the maximum number of characters # to display on a single line. If the actual line length exceeds this threshold -# significantly it will wrapped across multiple lines. Some heuristics are apply -# to avoid ugly line breaks. +# significantly it will be wrapped across multiple lines. Some heuristics are +# applied to avoid ugly line breaks. # Minimum value: 0, maximum value: 1000, default value: 17. # This tag requires that the tag HAVE_DOT is set to YES. @@ -2594,7 +2682,9 @@ TEMPLATE_RELATIONS = NO # If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to # YES then doxygen will generate a graph for each documented file showing the # direct and indirect include dependencies of the file with other documented -# files. +# files. Explicit enabling an include graph, when INCLUDE_GRAPH is is set to NO, +# can be accomplished by means of the command \includegraph. Disabling an +# include graph can be accomplished by means of the command \hideincludegraph. # The default value is: YES. # This tag requires that the tag HAVE_DOT is set to YES. @@ -2603,7 +2693,10 @@ INCLUDE_GRAPH = YES # If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are # set to YES then doxygen will generate a graph for each documented file showing # the direct and indirect include dependencies of the file with other documented -# files. +# files. Explicit enabling an included by graph, when INCLUDED_BY_GRAPH is set +# to NO, can be accomplished by means of the command \includedbygraph. Disabling +# an included by graph can be accomplished by means of the command +# \hideincludedbygraph. # The default value is: YES. # This tag requires that the tag HAVE_DOT is set to YES. @@ -2643,7 +2736,10 @@ GRAPHICAL_HIERARCHY = YES # If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the # dependencies a directory has on other directories in a graphical way. The # dependency relations are determined by the #include relations between the -# files in the directories. +# files in the directories. Explicit enabling a directory graph, when +# DIRECTORY_GRAPH is set to NO, can be accomplished by means of the command +# \directorygraph. Disabling a directory graph can be accomplished by means of +# the command \hidedirectorygraph. # The default value is: YES. # This tag requires that the tag HAVE_DOT is set to YES. @@ -2659,7 +2755,7 @@ DIR_GRAPH_MAX_DEPTH = 1 # The DOT_IMAGE_FORMAT tag can be used to set the image format of the images # generated by dot. For an explanation of the image formats see the section # output formats in the documentation of the dot tool (Graphviz (see: -# http://www.graphviz.org/)). +# https://www.graphviz.org/)). # Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order # to make the SVG files visible in IE 9+ (other browsers do not have this # requirement). @@ -2696,11 +2792,12 @@ DOT_PATH = DOTFILE_DIRS = -# The MSCFILE_DIRS tag can be used to specify one or more directories that -# contain msc files that are included in the documentation (see the \mscfile -# command). +# You can include diagrams made with dia in doxygen documentation. Doxygen will +# then run dia to produce the diagram and insert it in the documentation. The +# DIA_PATH tag allows you to specify the directory where the dia binary resides. +# If left empty dia is assumed to be found in the default search path. -MSCFILE_DIRS = +DIA_PATH = # The DIAFILE_DIRS tag can be used to specify one or more directories that # contain dia files that are included in the documentation (see the \diafile @@ -2777,3 +2874,19 @@ GENERATE_LEGEND = YES # The default value is: YES. DOT_CLEANUP = YES + +# You can define message sequence charts within doxygen comments using the \msc +# command. If the MSCGEN_TOOL tag is left empty (the default), then doxygen will +# use a built-in version of mscgen tool to produce the charts. Alternatively, +# the MSCGEN_TOOL tag can also specify the name an external tool. For instance, +# specifying prog as the value, doxygen will call the tool as prog -T +# -o . The external tool should support +# output file formats "png", "eps", "svg", and "ismap". + +MSCGEN_TOOL = + +# The MSCFILE_DIRS tag can be used to specify one or more directories that +# contain msc files that are included in the documentation (see the \mscfile +# command). + +MSCFILE_DIRS = diff --git a/docs/Makefile b/docs/Makefile index d0c3cbf102..8b6275ab8c 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -3,7 +3,7 @@ # You can set these variables from the command line, and also # from the environment for the first two. -SPHINXOPTS ?= +SPHINXOPTS ?= -W --keep-going SPHINXBUILD ?= sphinx-build SOURCEDIR = source BUILDDIR = build diff --git a/docs/make.bat b/docs/make.bat index dc1312ab09..08ca223208 100644 --- a/docs/make.bat +++ b/docs/make.bat @@ -9,6 +9,7 @@ if "%SPHINXBUILD%" == "" ( ) set SOURCEDIR=source set BUILDDIR=build +set "SPHINXOPTS=-W --keep-going" %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( @@ -25,11 +26,11 @@ if errorlevel 9009 ( if "%1" == "" goto help -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% || exit /b %ERRORLEVEL% goto end :help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% || exit /b %ERRORLEVEL% :end popd diff --git a/docs/requirements.txt b/docs/requirements.txt index 688a0a896c..f7104d627c 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,7 @@ breathe==4.35.0 furo==2024.1.29 m2r2==0.3.3.post2 -rstcheck[sphinx]==6.2.0 +rstcheck[sphinx]==6.2.1 rstfmt==0.0.14 Sphinx==7.2.6 sphinx-copybutton==0.5.2 diff --git a/docs/source/about/advanced_usage.rst b/docs/source/about/advanced_usage.rst index f29e6d05f8..1c3a0f64c5 100644 --- a/docs/source/about/advanced_usage.rst +++ b/docs/source/about/advanced_usage.rst @@ -47,6 +47,40 @@ editing the `conf` file in a text editor. Use the examples as reference. `General `__ ----------------------------------------------------- +`locale `__ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +**Description** + The locale used for Sunshine's user interface. + +**Choices** + +.. table:: + :widths: auto + + ======= =========== + Value Description + ======= =========== + de German + en English + en_GB English (UK) + en_US English (United States) + es Spanish + fr French + it Italian + ru Russian + sv Swedish + zh Chinese (Simplified) + ======= =========== + +**Default** + ``en`` + +**Example** + .. code-block:: text + + locale = en + `sunshine_name `__ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1489,12 +1523,12 @@ keybindings =========== =========== **Default** - ``vbr_latency`` + ``cbr`` **Example** .. code-block:: text - amd_rc = vbr_latency + amd_rc = cbr `amd_usage `__ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1509,14 +1543,15 @@ keybindings .. table:: :widths: auto - =============== =========== - Value Description - =============== =========== - transcoding transcoding (slowest) - webcam webcam (slow) - lowlatency low latency (fast) - ultralowlatency ultra low latency (fastest) - =============== =========== + ======================= =========== + Value Description + ======================= =========== + transcoding transcoding (slowest) + webcam webcam (slow) + lowlatency_high_quality low latency, high quality (fast) + lowlatency low latency (faster) + ultralowlatency ultra low latency (fastest) + ======================= =========== **Default** ``ultralowlatency`` @@ -1558,6 +1593,22 @@ keybindings amd_vbaq = enabled +`amd_enforce_hrd `__ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +**Description** + Enable Hypothetical Reference Decoder (HRD) enforcement to help constrain the target bitrate. + + .. note:: This option only applies when using amdvce `encoder`_. + +**Default** + ``enabled`` + +**Example** + .. code-block:: text + + amd_enforce_hrd = enabled + `amd_coder `__ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/source/about/setup.rst b/docs/source/about/setup.rst index 2a2b015ce9..5c70c11a00 100644 --- a/docs/source/about/setup.rst +++ b/docs/source/about/setup.rst @@ -45,8 +45,8 @@ Install sunshine-debian-bullseye-{arch}.deb 11.8.0 450.80.02 35;50;52;60;61;62;70;75;80;86;90 sunshine-fedora-38-{arch}.rpm 12.4.0 525.60.13 50;52;60;61;62;70;75;80;86;90 sunshine-fedora-39-{arch}.rpm 12.4.0 525.60.13 50;52;60;61;62;70;75;80;86;90 - sunshine-ubuntu-20.04-{arch}.deb 11.8.0 450.80.02 35;50;52;60;61;62;70;75;80;86;90 sunshine-ubuntu-22.04-{arch}.deb 11.8.0 450.80.02 35;50;52;60;61;62;70;75;80;86;90 + sunshine-ubuntu-24.04-{arch}.deb 11.8.0 450.80.02 35;50;52;60;61;62;70;75;80;86;90 =========================================== ============== ============== ================================ .. tab:: AppImage @@ -307,7 +307,7 @@ Install mkdir -p ~/ports/multimedia/sunshine cd ~/ports/multimedia/sunshine - curl -O https://github.com/LizardByte/Sunshine/releases/latest/download/Portfile + curl -OL https://github.com/LizardByte/Sunshine/releases/latest/download/Portfile cd ~/ports portindex sudo port install sunshine diff --git a/docs/source/building/linux.rst b/docs/source/building/linux.rst index 789409e417..7bf1af490a 100644 --- a/docs/source/building/linux.rst +++ b/docs/source/building/linux.rst @@ -88,9 +88,8 @@ Install Requirements wget \ # necessary for cuda install with `run` file which # necessary for cuda install with `run` file -Ubuntu 20.04 +Ubuntu 22.04 ^^^^^^^^^^^^ -End of Life: April 2030 Install Requirements .. code-block:: bash @@ -98,8 +97,7 @@ Install Requirements sudo apt update && sudo apt install \ build-essential \ cmake \ - g++-10 \ - libayatana-appindicator3-dev \ + libappindicator3-dev \ libavdevice-dev \ libboost-filesystem-dev \ libboost-locale-dev \ @@ -117,7 +115,6 @@ Install Requirements libpulse-dev \ libssl-dev \ libva-dev \ # VA-API - libvdpau-dev \ libwayland-dev \ # Wayland libx11-dev \ # X11 libxcb-shm0-dev \ # X11 @@ -128,21 +125,11 @@ Install Requirements libxtst-dev \ # X11 nodejs \ npm \ - wget # necessary for cuda install with `run` file - -Update gcc alias - .. code-block:: bash - - update-alternatives --install \ - /usr/bin/gcc gcc /usr/bin/gcc-10 100 \ - --slave /usr/bin/g++ g++ /usr/bin/g++-10 \ - --slave /usr/bin/gcov gcov /usr/bin/gcov-10 \ - --slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-10 \ - --slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-10 + nvidia-cuda-dev \ # CUDA, NvFBC + nvidia-cuda-toolkit # CUDA, NvFBC -Ubuntu 22.04 +Ubuntu 24.04 ^^^^^^^^^^^^ -End of Life: April 2027 Install Requirements .. code-block:: bash @@ -150,6 +137,8 @@ Install Requirements sudo apt update && sudo apt install \ build-essential \ cmake \ + gcc-11 \ + g++-11 \ libappindicator3-dev \ libavdevice-dev \ libboost-filesystem-dev \ @@ -181,11 +170,22 @@ Install Requirements nvidia-cuda-dev \ # CUDA, NvFBC nvidia-cuda-toolkit # CUDA, NvFBC +Update gcc alias + .. code-block:: bash + + update-alternatives --install \ + /usr/bin/gcc gcc /usr/bin/gcc-11 100 \ + --slave /usr/bin/g++ g++ /usr/bin/g++-11 \ + --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \ + --slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-11 \ + --slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-11 + CUDA ---- If the version of CUDA available from your distro is not adequate, manually install CUDA. .. tip:: The version of CUDA you use will determine compatibility with various GPU generations. + At the time of writing, the recommended version to use is CUDA ~11.8. See `CUDA compatibility `__ for more info. Select the appropriate run file based on your desired CUDA version and architecture according to diff --git a/docs/source/building/macos.rst b/docs/source/building/macos.rst index bf96fb394f..1b874d71a1 100644 --- a/docs/source/building/macos.rst +++ b/docs/source/building/macos.rst @@ -12,14 +12,14 @@ MacPorts Install Requirements .. code-block:: bash - sudo port install avahi boost180 cmake curl libopus miniupnpc npm9 pkgconfig + sudo port install avahi boost180 cmake curl doxygen graphviz libopus miniupnpc npm9 pkgconfig python311 py311-pip Homebrew """""""" Install Requirements .. code-block:: bash - brew install boost cmake miniupnpc node opus pkg-config + brew install boost cmake doxygen graphviz miniupnpc node opus pkg-config python@3.11 If there are issues with an SSL header that is not found: .. tab:: Intel @@ -45,7 +45,7 @@ Build .. code-block:: bash cmake .. - make -j ${nproc} + make -j $(sysctl -n hw.ncpu) cpack -G DragNDrop # optionally, create a macOS dmg package diff --git a/docs/source/building/windows.rst b/docs/source/building/windows.rst index f73346ffeb..90b3d9e3c3 100644 --- a/docs/source/building/windows.rst +++ b/docs/source/building/windows.rst @@ -18,6 +18,7 @@ Install dependencies: base-devel \ cmake \ diffutils \ + doxygen \ gcc \ git \ make \ @@ -25,13 +26,17 @@ Install dependencies: mingw-w64-x86_64-boost \ mingw-w64-x86_64-cmake \ mingw-w64-x86_64-curl \ + mingw-w64-x86_64-graphviz \ mingw-w64-x86_64-miniupnpc \ mingw-w64-x86_64-nlohmann-json \ mingw-w64-x86_64-nodejs \ mingw-w64-x86_64-onevpl \ mingw-w64-x86_64-openssl \ mingw-w64-x86_64-opus \ - mingw-w64-x86_64-toolchain + mingw-w64-x86_64-rust \ + mingw-w64-x86_64-toolchain \ + python \ + python-pip Build ----- diff --git a/docs/source/conf.py b/docs/source/conf.py index c2b66a5663..42fb91eb78 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -7,7 +7,6 @@ # standard imports from datetime import datetime import os -import re import subprocess @@ -27,16 +26,8 @@ author = 'ReenigneArcher' # The full version, including alpha/beta/rc tags -with open(os.path.join(root_dir, 'CMakeLists.txt'), 'r') as f: - version = re.search(r"project\(Sunshine VERSION ((\d+)\.(\d+)\.(\d+))", str(f.read())).group(1) -""" -To use cmake method for obtaining version instead of regex, -1. Within CMakeLists.txt add the following line without backticks: - ``configure_file(docs/source/conf.py.in "${CMAKE_CURRENT_SOURCE_DIR}/docs/source/conf.py" @ONLY)`` -2. Rename this file to ``conf.py.in`` -3. Uncomment the next line -""" -# version = '@PROJECT_VERSION@' # use this for cmake configure_file method +# https://docs.readthedocs.io/en/stable/reference/environment-variables.html#envvar-READTHEDOCS_VERSION +version = os.getenv('READTHEDOCS_VERSION', 'dirty') # -- General configuration --------------------------------------------------- @@ -105,6 +96,17 @@ doxy_version = doxy_proc.stdout.decode('utf-8').strip() print('doxygen version: ' + doxy_version) +# create build directories, as doxygen fails to create it in macports and docker +directories = [ + os.path.join(source_dir, 'build'), + os.path.join(source_dir, 'build', 'doxyxml'), +] +for d in directories: + os.makedirs( + name=d, + exist_ok=True, + ) + # run doxygen doxy_proc = subprocess.run('doxygen Doxyfile', shell=True, cwd=source_dir) if doxy_proc.returncode != 0: diff --git a/docs/source/contributing/localization.rst b/docs/source/contributing/localization.rst index 2ca912805e..21cd330e48 100644 --- a/docs/source/contributing/localization.rst +++ b/docs/source/contributing/localization.rst @@ -30,42 +30,84 @@ localization there. Extraction ---------- -There should be minimal cases where strings need to be extracted from source code; however it may be necessary in some -situations. For example if a system tray icon is added it should be localized as it is user interfacing. -- Wrap the string to be extracted in a function as shown. - .. code-block:: cpp +.. tab:: UI - #include - #include + Sunshine uses `Vue I18n `__ for localizing the UI. + The following is a simple example of how to use it. - std::string msg = boost::locale::translate("Hello world!"); + - Add the string to `src_assets/common/assets/web/public/assets/locale/en.json`, in English. + .. code-block:: json -.. tip:: More examples can be found in the documentation for - `boost locale `__. + { + "index": { + "welcome": "Hello, Sunshine!" + } + } -.. warning:: This is for information only. Contributors should never include manually updated template files, or - manually compiled language files in Pull Requests. + .. note:: The json keys should be sorted alphabetically. You can use `jsonabc `__ + to sort the keys. -Strings are automatically extracted from the code to the `locale/sunshine.po` template file. The generated file is -used by CrowdIn to generate language specific template files. The file is generated using the -`.github/workflows/localize.yml` workflow and is run on any push event into the `nightly` branch. Jobs are only run if -any of the following paths are modified. + .. attention:: Due to the integration with Crowdin, it is important to only add strings to the `en.json` file, + and to not modify any other language files. After the PR is merged, the translations can take place + on `CrowdIn `__. Once the translations are complete, a PR will be made + to merge the translations into Sunshine. -.. code-block:: yaml + - Use the string in a Vue component. + .. code-block:: html - - 'src/**' + -When testing locally it may be desirable to manually extract, initialize, update, and compile strings. Python is -required for this, along with the python dependencies in the `./scripts/requirements.txt` file. Additionally, -`xgettext `__ must be installed. + .. tip:: More formatting examples can be found in the + `Vue I18n guide `__. -**Extract, initialize, and update** - .. code-block:: bash +.. tab:: C++ - python ./scripts/_locale.py --extract --init --update + There should be minimal cases where strings need to be extracted from C++ source code; however it may be necessary in + some situations. For example the system tray icon could be localized as it is user interfacing. -**Compile** - .. code-block:: bash + - Wrap the string to be extracted in a function as shown. + .. code-block:: cpp - python ./scripts/_locale.py --compile + #include + #include + + std::string msg = boost::locale::translate("Hello world!"); + + .. tip:: More examples can be found in the documentation for + `boost locale `__. + + .. warning:: This is for information only. Contributors should never include manually updated template files, or + manually compiled language files in Pull Requests. + + Strings are automatically extracted from the code to the `locale/sunshine.po` template file. The generated file is + used by CrowdIn to generate language specific template files. The file is generated using the + `.github/workflows/localize.yml` workflow and is run on any push event into the `nightly` branch. Jobs are only run if + any of the following paths are modified. + + .. code-block:: yaml + + - 'src/**' + + When testing locally it may be desirable to manually extract, initialize, update, and compile strings. Python is + required for this, along with the python dependencies in the `./scripts/requirements.txt` file. Additionally, + `xgettext `__ must be installed. + + **Extract, initialize, and update** + .. code-block:: bash + + python ./scripts/_locale.py --extract --init --update + + **Compile** + .. code-block:: bash + + python ./scripts/_locale.py --compile + + .. attention:: Due to the integration with Crowdin, it is important to not include any extracted or compiled files in + Pull Requests. The files are automatically generated and updated by the workflow. Once the PR is merged, the + translations can take place on `CrowdIn `__. Once the translations are + complete, a PR will be made to merge the translations into Sunshine. diff --git a/docs/source/contributing/testing.rst b/docs/source/contributing/testing.rst index e4b3ae1050..2d9f6290d3 100644 --- a/docs/source/contributing/testing.rst +++ b/docs/source/contributing/testing.rst @@ -59,5 +59,81 @@ Format inplace with rstfmt Unit Testing ------------ -.. todo:: Sunshine does not currently have any unit tests. If you would like to help us improve please get in contact - with us, or make a PR with suggested changes. +Sunshine uses `Google Test `__ for unit testing. Google Test is included in the +repo as a submodule. The test sources are located in the `./tests` directory. + +The tests need to be compiled into an executable, and then run. The tests are built using the normal build process, but +can be disabled by setting the `BUILD_TESTS` CMake option to `OFF`. + +To run the tests, execute the following command from the build directory: + +.. tab:: Linux + + .. code-block:: bash + + pushd tests + ./test_sunshine + popd + +.. tab:: macOS + + .. code-block:: bash + + pushd tests + ./test_sunshine + popd + +.. tab:: Windows + + .. code-block:: bash + + pushd tests + test_sunshine.exe + popd + +To see all available options, run the tests with the `--help` option. + +.. tab:: Linux + + .. code-block:: bash + + pushd tests + ./test_sunshine --help + popd + +.. tab:: macOS + + .. code-block:: bash + + pushd tests + ./test_sunshine --help + popd + +.. tab:: Windows + + .. code-block:: bash + + pushd tests + test_sunshine.exe --help + popd + +Some tests rely on Python to run. CMake will search for Python and enable the docs tests if it is found, otherwise +cmake will fail. You can manually disable the tests by setting the `TESTS_ENABLE_PYTHON_TESTS` CMake option to +`OFF`. + +.. tip:: + + See the googletest `FAQ `__ for more information on how to use + Google Test. + +We use `gcovr `__ to generate code coverage reports, +and `Codecov `__ to analyze the reports for all PRs and commits. + +Codecov will fail a PR if the total coverage is reduced too much, or if not enough of the diff is covered by tests. +In some cases, the code cannot be covered when running the tests inside of GitHub runners. For example, any test that +needs access to the GPU will not be able to run. In these cases, the coverage can be omitted by adding comments to the +code. See the `gcovr documentation `__ for +more information. + +Even if your changes cannot be covered in the CI, we still encourage you to write the tests for them. This will allow +maintainers to run the tests locally. diff --git a/package.json b/package.json index b685e0966a..db309cb7ec 100644 --- a/package.json +++ b/package.json @@ -4,12 +4,13 @@ "dev": "vite build --watch" }, "dependencies": { - "@fortawesome/fontawesome-free": "6.5.1", + "@fortawesome/fontawesome-free": "6.5.2", "@popperjs/core": "2.11.8", "@vitejs/plugin-vue": "4.6.2", "bootstrap": "5.3.3", "vite": "4.5.2", "vite-plugin-ejs": "1.6.4", - "vue": "3.4.5" + "vue": "3.4.5", + "vue-i18n": "9.11.0" } } diff --git a/packaging/linux/Arch/PKGBUILD b/packaging/linux/Arch/PKGBUILD index 44a6beb2b2..6fde7fe5a9 100644 --- a/packaging/linux/Arch/PKGBUILD +++ b/packaging/linux/Arch/PKGBUILD @@ -7,7 +7,7 @@ pkgrel=1 pkgdesc="@PROJECT_DESCRIPTION@" arch=('x86_64' 'aarch64') url=@PROJECT_HOMEPAGE_URL@ -license=('GPL3') +license=('GPL-3.0-only') install=sunshine.install depends=('avahi' @@ -31,16 +31,21 @@ depends=('avahi' 'numactl' 'openssl' 'opus' + 'python' 'udev') +checkdepends=('doxygen' + 'graphviz') makedepends=('boost' 'cmake' + 'gcc12' 'git' 'make' 'nodejs' 'npm') optdepends=('cuda: Nvidia GPU encoding support' 'libva-mesa-driver: AMD GPU encoding support' - 'intel-media-driver: Intel GPU encoding support') + 'intel-media-driver: Intel GPU encoding support' + 'xorg-server-xvfb: Virtual X server for headless testing') provides=('sunshine') @@ -57,6 +62,9 @@ build() { export BUILD_VERSION="@GITHUB_BUILD_VERSION@" export COMMIT="@GITHUB_COMMIT@" + export CC=gcc-12 + export CXX=g++-12 + export CFLAGS="${CFLAGS/-Werror=format-security/}" export CXXFLAGS="${CXXFLAGS/-Werror=format-security/}" @@ -72,6 +80,14 @@ build() { make -C build } +check() { + export CC=gcc-12 + export CXX=g++-12 + + cd "${srcdir}/build/tests" + ./test_sunshine --gtest_color=yes +} + package() { make -C build install DESTDIR="$pkgdir" } diff --git a/packaging/linux/flatpak/deps/org.flatpak.Builder.BaseApp b/packaging/linux/flatpak/deps/org.flatpak.Builder.BaseApp new file mode 160000 index 0000000000..6e295e6307 --- /dev/null +++ b/packaging/linux/flatpak/deps/org.flatpak.Builder.BaseApp @@ -0,0 +1 @@ +Subproject commit 6e295e630740ae8ef82c6291724e709b36477042 diff --git a/packaging/linux/flatpak/deps/shared-modules b/packaging/linux/flatpak/deps/shared-modules new file mode 160000 index 0000000000..d0229951ac --- /dev/null +++ b/packaging/linux/flatpak/deps/shared-modules @@ -0,0 +1 @@ +Subproject commit d0229951ac23967c4f5697bd7b5c1bd7e641b8c3 diff --git a/packaging/linux/flatpak/dev.lizardbyte.sunshine.yml b/packaging/linux/flatpak/dev.lizardbyte.sunshine.yml index 1da03310f7..0e891dacf0 100644 --- a/packaging/linux/flatpak/dev.lizardbyte.sunshine.yml +++ b/packaging/linux/flatpak/dev.lizardbyte.sunshine.yml @@ -34,6 +34,8 @@ build-options: prepend-ld-library-path: /usr/lib/sdk/vala/lib modules: + - "org.flatpak.Builder.BaseApp/xvfb.json" + - name: boost disabled: false buildsystem: simple @@ -341,6 +343,7 @@ modules: - -DSUNSHINE_ENABLE_DRM=ON - -DSUNSHINE_ENABLE_CUDA=ON - -DSUNSHINE_BUILD_FLATPAK=ON + - -DTESTS_ENABLE_PYTHON_TESTS=OFF sources: - type: git url: "@GITHUB_CLONE_URL@" @@ -358,3 +361,7 @@ modules: 's%/app/bin/sunshine%flatpak run dev.lizardbyte.sunshine\nExecStop=flatpak kill dev.lizardbyte.sunshine%g' /app/share/sunshine/systemd/user/sunshine.service - install -D $FLATPAK_BUILDER_BUILDDIR/packaging/linux/flatpak/scripts/* /app/bin + run-tests: true + test-rule: "" # empty to disable + test-commands: + - xvfb-run tests/test_sunshine --gtest_color=yes diff --git a/packaging/macos/Portfile b/packaging/macos/Portfile index ba4815e25d..aacc3209ff 100644 --- a/packaging/macos/Portfile +++ b/packaging/macos/Portfile @@ -31,13 +31,19 @@ post-fetch { system -W ${worksrcpath} "${git.cmd} submodule update --init --recursive" } +# https://guide.macports.org/chunked/reference.dependencies.html depends_build-append port:npm9 \ port:pkgconfig depends_lib port:avahi \ port:curl \ port:libopus \ - port:miniupnpc + port:miniupnpc \ + port:python311 \ + port:py311-pip + +depends_test port:doxygen \ + port:graphviz boost.version 1.81 @@ -62,3 +68,9 @@ notes-append "Run @PROJECT_NAME@ by executing 'sunshine ', notes-append "The config file will be created if it doesn't exist." notes-append "It is recommended to set a location for the apps file in the config." notes-append "See our documentation at 'https://docs.lizardbyte.dev/projects/sunshine/en/v@PROJECT_VERSION@/' for further info." + +test.run yes +test.dir ${build.dir}/tests +test.target "" +test.cmd ./test_sunshine +test.args --gtest_color=yes diff --git a/packaging/macos/sunshine.rb b/packaging/macos/sunshine.rb index e312c99d4d..1853c0c8c7 100644 --- a/packaging/macos/sunshine.rb +++ b/packaging/macos/sunshine.rb @@ -7,30 +7,32 @@ class @PROJECT_NAME@ < Formula tag: "@GITHUB_BRANCH@" version "@PROJECT_VERSION@" license all_of: ["GPL-3.0-only"] - head "@GITHUB_CLONE_URL@", branch: "nightly" + head "@GITHUB_CLONE_URL@", branch: "@GITHUB_DEFAULT_BRANCH@" depends_on "boost" => :build depends_on "cmake" => :build + depends_on "node" => :build depends_on "pkg-config" => :build depends_on "curl" depends_on "miniupnpc" - depends_on "node" depends_on "openssl" depends_on "opus" def install args = %W[ - -DBUIld_WERROR=ON + -DBUILD_WERROR=ON -DCMAKE_INSTALL_PREFIX=#{prefix} -DOPENSSL_ROOT_DIR=#{Formula["openssl"].opt_prefix} -DSUNSHINE_ASSETS_DIR=sunshine/assets -DSUNSHINE_BUILD_HOMEBREW=ON + -DTESTS_ENABLE_PYTHON_TESTS=OFF ] system "cmake", "-S", ".", "-B", "build", *std_cmake_args, *args cd "build" do system "make", "-j" system "make", "install" + bin.install "tests/test_sunshine" end end @@ -54,9 +56,10 @@ def caveats test do # test that the binary runs at all - output = shell_output("#{bin}/sunshine --version").strip - puts output + system "#{bin}/sunshine", "--version" - # TODO: add unit tests + # run the test suite + # cannot build tests with python tests because homebrew destroys the source directory + system "#{bin}/test_sunshine", "--gtest_color=yes" end end diff --git a/scripts/_locale.py b/scripts/_locale.py index d967974e3f..884805702e 100644 --- a/scripts/_locale.py +++ b/scripts/_locale.py @@ -22,16 +22,19 @@ year = datetime.datetime.now().year -# retroarcher target locales +# target locales target_locales = [ - 'de', # Deutsch + 'de', # German 'en', # English 'en_GB', # English (United Kingdom) 'en_US', # English (United States) - 'es', # español - 'fr', # français - 'it', # italiano - 'ru', # русский + 'es', # Spanish + 'fr', # French + 'it', # Italian + 'ja', # Japanese + 'ru', # Russian + 'sv', # Swedish + 'zh', # Chinese ] diff --git a/scripts/update_clang_format.py b/scripts/update_clang_format.py index 8cf9b9f0d9..9e0dacda84 100644 --- a/scripts/update_clang_format.py +++ b/scripts/update_clang_format.py @@ -5,6 +5,7 @@ # variables directories = [ 'src', + 'tests', 'tools', os.path.join('third-party', 'glad'), os.path.join('third-party', 'nvfbc'), diff --git a/src/config.cpp b/src/config.cpp index 21562f8348..ab6dbfb5c3 100644 --- a/src/config.cpp +++ b/src/config.cpp @@ -85,14 +85,17 @@ namespace config { #define AMF_VIDEO_ENCODER_AV1_USAGE_LOW_LATENCY 1 #define AMF_VIDEO_ENCODER_AV1_USAGE_ULTRA_LOW_LATENCY 2 #define AMF_VIDEO_ENCODER_AV1_USAGE_WEBCAM 3 - #define AMF_VIDEO_ENCODER_HEVC_USAGE_TRANSCONDING 0 + #define AMF_VIDEO_ENCODER_AV1_USAGE_LOW_LATENCY_HIGH_QUALITY 5 + #define AMF_VIDEO_ENCODER_HEVC_USAGE_TRANSCODING 0 #define AMF_VIDEO_ENCODER_HEVC_USAGE_ULTRA_LOW_LATENCY 1 #define AMF_VIDEO_ENCODER_HEVC_USAGE_LOW_LATENCY 2 #define AMF_VIDEO_ENCODER_HEVC_USAGE_WEBCAM 3 - #define AMF_VIDEO_ENCODER_USAGE_TRANSCONDING 0 + #define AMF_VIDEO_ENCODER_HEVC_USAGE_LOW_LATENCY_HIGH_QUALITY 5 + #define AMF_VIDEO_ENCODER_USAGE_TRANSCODING 0 #define AMF_VIDEO_ENCODER_USAGE_ULTRA_LOW_LATENCY 1 #define AMF_VIDEO_ENCODER_USAGE_LOW_LATENCY 2 #define AMF_VIDEO_ENCODER_USAGE_WEBCAM 3 + #define AMF_VIDEO_ENCODER_USAGE_LOW_LATENCY_HIGH_QUALITY 5 #define AMF_VIDEO_ENCODER_UNDEFINED 0 #define AMF_VIDEO_ENCODER_CABAC 1 #define AMF_VIDEO_ENCODER_CALV 2 @@ -144,20 +147,23 @@ namespace config { enum class usage_av1_e : int { transcoding = AMF_VIDEO_ENCODER_AV1_USAGE_TRANSCODING, webcam = AMF_VIDEO_ENCODER_AV1_USAGE_WEBCAM, + lowlatency_high_quality = AMF_VIDEO_ENCODER_AV1_USAGE_LOW_LATENCY_HIGH_QUALITY, lowlatency = AMF_VIDEO_ENCODER_AV1_USAGE_LOW_LATENCY, ultralowlatency = AMF_VIDEO_ENCODER_AV1_USAGE_ULTRA_LOW_LATENCY }; enum class usage_hevc_e : int { - transcoding = AMF_VIDEO_ENCODER_HEVC_USAGE_TRANSCONDING, + transcoding = AMF_VIDEO_ENCODER_HEVC_USAGE_TRANSCODING, webcam = AMF_VIDEO_ENCODER_HEVC_USAGE_WEBCAM, + lowlatency_high_quality = AMF_VIDEO_ENCODER_HEVC_USAGE_LOW_LATENCY_HIGH_QUALITY, lowlatency = AMF_VIDEO_ENCODER_HEVC_USAGE_LOW_LATENCY, ultralowlatency = AMF_VIDEO_ENCODER_HEVC_USAGE_ULTRA_LOW_LATENCY }; enum class usage_h264_e : int { - transcoding = AMF_VIDEO_ENCODER_USAGE_TRANSCONDING, + transcoding = AMF_VIDEO_ENCODER_USAGE_TRANSCODING, webcam = AMF_VIDEO_ENCODER_USAGE_WEBCAM, + lowlatency_high_quality = AMF_VIDEO_ENCODER_USAGE_LOW_LATENCY_HIGH_QUALITY, lowlatency = AMF_VIDEO_ENCODER_USAGE_LOW_LATENCY, ultralowlatency = AMF_VIDEO_ENCODER_USAGE_ULTRA_LOW_LATENCY }; @@ -195,12 +201,13 @@ namespace config { template std::optional - usage_from_view(const std::string_view &rc) { + usage_from_view(const std::string_view &usage) { #define _CONVERT_(x) \ - if (rc == #x##sv) return (int) T::x + if (usage == #x##sv) return (int) T::x _CONVERT_(transcoding); _CONVERT_(webcam); _CONVERT_(lowlatency); + _CONVERT_(lowlatency_high_quality); _CONVERT_(ultralowlatency); #undef _CONVERT_ return std::nullopt; @@ -346,14 +353,15 @@ namespace config { (int) amd::quality_h264_e::balanced, // quality (h264) (int) amd::quality_hevc_e::balanced, // quality (hevc) (int) amd::quality_av1_e::balanced, // quality (av1) - (int) amd::rc_h264_e::vbr_latency, // rate control (h264) - (int) amd::rc_hevc_e::vbr_latency, // rate control (hevc) - (int) amd::rc_av1_e::vbr_latency, // rate control (av1) + (int) amd::rc_h264_e::cbr, // rate control (h264) + (int) amd::rc_hevc_e::cbr, // rate control (hevc) + (int) amd::rc_av1_e::cbr, // rate control (av1) (int) amd::usage_h264_e::ultralowlatency, // usage (h264) (int) amd::usage_hevc_e::ultralowlatency, // usage (hevc) (int) amd::usage_av1_e::ultralowlatency, // usage (av1) 0, // preanalysis 1, // vbaq + 1, // enforce_hrd (int) amd::coder_e::_auto, // coder }, // amd @@ -441,6 +449,7 @@ namespace config { }; sunshine_t sunshine { + "en", // locale 2, // min_log_level 0, // flags {}, // User file @@ -990,13 +999,14 @@ namespace config { std::string usage; string_f(vars, "amd_usage", usage); if (!usage.empty()) { - video.amd.amd_usage_h264 = amd::usage_from_view(rc); - video.amd.amd_usage_hevc = amd::usage_from_view(rc); - video.amd.amd_usage_av1 = amd::usage_from_view(rc); + video.amd.amd_usage_h264 = amd::usage_from_view(usage); + video.amd.amd_usage_hevc = amd::usage_from_view(usage); + video.amd.amd_usage_av1 = amd::usage_from_view(usage); } bool_f(vars, "amd_preanalysis", (bool &) video.amd.amd_preanalysis); bool_f(vars, "amd_vbaq", (bool &) video.amd.amd_vbaq); + bool_f(vars, "amd_enforce_hrd", (bool &) video.amd.amd_enforce_hrd); int_f(vars, "vt_coder", video.vt.vt_coder, vt::coder_from_view); int_f(vars, "vt_software", video.vt.vt_allow_sw, vt::allow_software_from_view); @@ -1101,6 +1111,20 @@ namespace config { config::sunshine.flags[config::flag::UPNP].flip(); } + string_restricted_f(vars, "locale", config::sunshine.locale, { + "de"sv, // German + "en"sv, // English + "en_GB"sv, // English (UK) + "en_US"sv, // English (US) + "es"sv, // Spanish + "fr"sv, // French + "it"sv, // Italian + "ja"sv, // Japanese + "ru"sv, // Russian + "sv"sv, // Swedish + "zh"sv, // Chinese + }); + std::string log_level_string; string_f(vars, "min_log_level", log_level_string); @@ -1161,7 +1185,7 @@ namespace config { auto line = argv[x]; if (line == "--help"sv) { - print_help(*argv); + logging::print_help(*argv); return 1; } #ifdef _WIN32 @@ -1181,7 +1205,7 @@ namespace config { break; } if (apply_flags(line + 1)) { - print_help(*argv); + logging::print_help(*argv); return -1; } } @@ -1195,7 +1219,7 @@ namespace config { else { TUPLE_EL(var, 1, parse_option(line, line_end)); if (!var) { - print_help(*argv); + logging::print_help(*argv); return -1; } diff --git a/src/config.h b/src/config.h index 6c48f466b8..2c85096bac 100644 --- a/src/config.h +++ b/src/config.h @@ -59,6 +59,7 @@ namespace config { std::optional amd_usage_av1; std::optional amd_preanalysis; std::optional amd_vbaq; + std::optional amd_enforce_hrd; int amd_coder; } amd; @@ -160,6 +161,7 @@ namespace config { bool elevated; }; struct sunshine_t { + std::string locale; int min_log_level; std::bitset flags; std::string credentials_file; diff --git a/src/confighttp.cpp b/src/confighttp.cpp index 0657902dd1..de25bf0e7c 100644 --- a/src/confighttp.cpp +++ b/src/confighttp.cpp @@ -550,6 +550,24 @@ namespace confighttp { } } + void + getLocale(resp_https_t response, req_https_t request) { + // we need to return the locale whether authenticated or not + + print_req(request); + + pt::ptree outputTree; + auto g = util::fail_guard([&]() { + std::ostringstream data; + + pt::write_json(data, outputTree); + response->write(data.str()); + }); + + outputTree.put("status", "true"); + outputTree.put("locale", config::sunshine.locale); + } + void saveConfig(resp_https_t response, req_https_t request) { if (!authenticate(response, request)) return; @@ -743,6 +761,7 @@ namespace confighttp { server.resource["^/api/apps$"]["POST"] = saveApp; server.resource["^/api/config$"]["GET"] = getConfig; server.resource["^/api/config$"]["POST"] = saveConfig; + server.resource["^/api/configLocale$"]["GET"] = getLocale; server.resource["^/api/restart$"]["POST"] = restart; server.resource["^/api/password$"]["POST"] = savePassword; server.resource["^/api/apps/([0-9]+)$"]["DELETE"] = deleteApp; diff --git a/src/crypto.cpp b/src/crypto.cpp index e92e6e9e7d..9a5ef5a474 100644 --- a/src/crypto.cpp +++ b/src/crypto.cpp @@ -17,6 +17,10 @@ namespace crypto { X509_STORE_add_cert(x509_store.get(), cert.get()); _certs.emplace_back(std::make_pair(std::move(cert), std::move(x509_store))); } + void + cert_chain_t::clear() { + _certs.clear(); + } static int openssl_verify_cb(int ok, X509_STORE_CTX *ctx) { diff --git a/src/crypto.h b/src/crypto.h index eb355f576c..410d3c802a 100644 --- a/src/crypto.h +++ b/src/crypto.h @@ -73,6 +73,9 @@ namespace crypto { void add(x509_t &&cert); + void + clear(); + const char * verify(x509_t::element_type *cert); diff --git a/src/entry_handler.cpp b/src/entry_handler.cpp index 146a4dfb07..8d17b7d270 100644 --- a/src/entry_handler.cpp +++ b/src/entry_handler.cpp @@ -87,12 +87,12 @@ namespace args { * * EXAMPLES: * ```cpp - * print_help("sunshine", 0, nullptr); + * help("sunshine", 0, nullptr); * ``` */ int help(const char *name, int argc, char *argv[]) { - print_help(name); + logging::print_help(name); return 0; } @@ -109,7 +109,7 @@ namespace args { */ int version(const char *name, int argc, char *argv[]) { - std::cout << PROJECT_NAME << " version: v" << PROJECT_VER << std::endl; + // version was already logged at startup return 0; } diff --git a/src/logging.cpp b/src/logging.cpp index 70a2ae82a0..e03bcbf513 100644 --- a/src/logging.cpp +++ b/src/logging.cpp @@ -4,9 +4,11 @@ */ // standard includes +#include #include // lib includes +#include #include #include #include @@ -16,6 +18,10 @@ // local includes #include "logging.h" +extern "C" { +#include +} + using namespace std::literals; namespace bl = boost::log; @@ -29,45 +35,182 @@ bl::sources::severity_logger warning(3); // Strange events bl::sources::severity_logger error(4); // Recoverable errors bl::sources::severity_logger fatal(5); // Unrecoverable errors -/** - * @brief Flush the log. - * - * EXAMPLES: - * ```cpp - * log_flush(); - * ``` - */ -void -log_flush() { - sink->flush(); -} +BOOST_LOG_ATTRIBUTE_KEYWORD(severity, "Severity", int) -/** - * @brief Print help to stdout. - * @param name The name of the program. - * - * EXAMPLES: - * ```cpp - * print_help("sunshine"); - * ``` - */ -void -print_help(const char *name) { - std::cout - << "Usage: "sv << name << " [options] [/path/to/configuration_file] [--cmd]"sv << std::endl - << " Any configurable option can be overwritten with: \"name=value\""sv << std::endl - << std::endl - << " Note: The configuration will be created if it doesn't exist."sv << std::endl - << std::endl - << " --help | print help"sv << std::endl - << " --creds username password | set user credentials for the Web manager"sv << std::endl - << " --version | print the version of sunshine"sv << std::endl - << std::endl - << " flags"sv << std::endl - << " -0 | Read PIN from stdin"sv << std::endl - << " -1 | Do not load previously saved state and do retain any state after shutdown"sv << std::endl - << " | Effectively starting as if for the first time without overwriting any pairings with your devices"sv << std::endl - << " -2 | Force replacement of headers in video stream"sv << std::endl - << " -p | Enable/Disable UPnP"sv << std::endl - << std::endl; -} +namespace logging { + /** + * @brief A destructor that restores the initial state. + */ + deinit_t::~deinit_t() { + deinit(); + } + + /** + * @brief Deinitialize the logging system. + * + * EXAMPLES: + * ```cpp + * deinit(); + * ``` + */ + void + deinit() { + log_flush(); + bl::core::get()->remove_sink(sink); + sink.reset(); + } + + /** + * @brief Initialize the logging system. + * @param min_log_level The minimum log level to output. + * @param log_file The log file to write to. + * @returns A deinit_t object that will deinitialize the logging system when it goes out of scope. + * + * EXAMPLES: + * ```cpp + * log_init(2, "sunshine.log"); + * ``` + */ + [[nodiscard]] std::unique_ptr + init(int min_log_level, const std::string &log_file) { + if (sink) { + // Deinitialize the logging system before reinitializing it. This can probably only ever be hit in tests. + deinit(); + } + + setup_av_logging(min_log_level); + + sink = boost::make_shared(); + + boost::shared_ptr stream { &std::cout, boost::null_deleter() }; + sink->locked_backend()->add_stream(stream); + sink->locked_backend()->add_stream(boost::make_shared(log_file)); + sink->set_filter(severity >= min_log_level); + + sink->set_formatter([](const bl::record_view &view, bl::formatting_ostream &os) { + constexpr const char *message = "Message"; + constexpr const char *severity = "Severity"; + constexpr int DATE_BUFFER_SIZE = 21 + 2 + 1; // Full string plus ": \0" + + auto log_level = view.attribute_values()[severity].extract().get(); + + std::string_view log_type; + switch (log_level) { + case 0: + log_type = "Verbose: "sv; + break; + case 1: + log_type = "Debug: "sv; + break; + case 2: + log_type = "Info: "sv; + break; + case 3: + log_type = "Warning: "sv; + break; + case 4: + log_type = "Error: "sv; + break; + case 5: + log_type = "Fatal: "sv; + break; + }; + + char _date[DATE_BUFFER_SIZE]; + std::time_t t = std::time(nullptr); + strftime(_date, DATE_BUFFER_SIZE, "[%Y:%m:%d:%H:%M:%S]: ", std::localtime(&t)); + + os << _date << log_type << view.attribute_values()[message].extract(); + }); + + // Flush after each log record to ensure log file contents on disk isn't stale. + // This is particularly important when running from a Windows service. + sink->locked_backend()->auto_flush(true); + + bl::core::get()->add_sink(sink); + return std::make_unique(); + } + + /** + * @brief Setup AV logging. + * @param min_log_level The log level. + */ + void + setup_av_logging(int min_log_level) { + if (min_log_level >= 1) { + av_log_set_level(AV_LOG_QUIET); + } + else { + av_log_set_level(AV_LOG_DEBUG); + } + av_log_set_callback([](void *ptr, int level, const char *fmt, va_list vl) { + static int print_prefix = 1; + char buffer[1024]; + + av_log_format_line(ptr, level, fmt, vl, buffer, sizeof(buffer), &print_prefix); + if (level <= AV_LOG_ERROR) { + // We print AV_LOG_FATAL at the error level. FFmpeg prints things as fatal that + // are expected in some cases, such as lack of codec support or similar things. + BOOST_LOG(error) << buffer; + } + else if (level <= AV_LOG_WARNING) { + BOOST_LOG(warning) << buffer; + } + else if (level <= AV_LOG_INFO) { + BOOST_LOG(info) << buffer; + } + else if (level <= AV_LOG_VERBOSE) { + // AV_LOG_VERBOSE is less verbose than AV_LOG_DEBUG + BOOST_LOG(debug) << buffer; + } + else { + BOOST_LOG(verbose) << buffer; + } + }); + } + + /** + * @brief Flush the log. + * + * EXAMPLES: + * ```cpp + * log_flush(); + * ``` + */ + void + log_flush() { + if (sink) { + sink->flush(); + } + } + + /** + * @brief Print help to stdout. + * @param name The name of the program. + * + * EXAMPLES: + * ```cpp + * print_help("sunshine"); + * ``` + */ + void + print_help(const char *name) { + std::cout + << "Usage: "sv << name << " [options] [/path/to/configuration_file] [--cmd]"sv << std::endl + << " Any configurable option can be overwritten with: \"name=value\""sv << std::endl + << std::endl + << " Note: The configuration will be created if it doesn't exist."sv << std::endl + << std::endl + << " --help | print help"sv << std::endl + << " --creds username password | set user credentials for the Web manager"sv << std::endl + << " --version | print the version of sunshine"sv << std::endl + << std::endl + << " flags"sv << std::endl + << " -0 | Read PIN from stdin"sv << std::endl + << " -1 | Do not load previously saved state and do retain any state after shutdown"sv << std::endl + << " | Effectively starting as if for the first time without overwriting any pairings with your devices"sv << std::endl + << " -2 | Force replacement of headers in video stream"sv << std::endl + << " -p | Enable/Disable UPnP"sv << std::endl + << std::endl; + } +} // namespace logging diff --git a/src/logging.h b/src/logging.h index 47a08555a0..24f9d16908 100644 --- a/src/logging.h +++ b/src/logging.h @@ -10,7 +10,6 @@ #include #include -extern boost::shared_ptr> sink; using text_sink = boost::log::sinks::asynchronous_sink; extern boost::log::sources::severity_logger verbose; @@ -20,8 +19,20 @@ extern boost::log::sources::severity_logger warning; extern boost::log::sources::severity_logger error; extern boost::log::sources::severity_logger fatal; -// functions -void -log_flush(); -void -print_help(const char *name); +namespace logging { + class deinit_t { + public: + ~deinit_t(); + }; + + void + deinit(); + [[nodiscard]] std::unique_ptr + init(int min_log_level, const std::string &log_file); + void + setup_av_logging(int min_log_level); + void + log_flush(); + void + print_help(const char *name); +} // namespace logging diff --git a/src/main.cpp b/src/main.cpp index a390144868..22d9e4c4be 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -8,9 +8,6 @@ #include #include -// lib includes -#include - // local includes #include "confighttp.h" #include "entry_handler.h" @@ -26,19 +23,10 @@ #include "video.h" extern "C" { -#include #include } using namespace std::literals; -namespace bl = boost::log; - -struct NoDelete { - void - operator()(void *) {} -}; - -BOOST_LOG_ATTRIBUTE_KEYWORD(severity, "Severity", int) std::map> signal_handlers; void @@ -118,84 +106,10 @@ main(int argc, char *argv[]) { return 0; } - if (config::sunshine.min_log_level >= 1) { - av_log_set_level(AV_LOG_QUIET); - } - else { - av_log_set_level(AV_LOG_DEBUG); + auto log_deinit_guard = logging::init(config::sunshine.min_log_level, config::sunshine.log_file); + if (!log_deinit_guard) { + BOOST_LOG(error) << "Logging failed to initialize"sv; } - av_log_set_callback([](void *ptr, int level, const char *fmt, va_list vl) { - static int print_prefix = 1; - char buffer[1024]; - - av_log_format_line(ptr, level, fmt, vl, buffer, sizeof(buffer), &print_prefix); - if (level <= AV_LOG_ERROR) { - // We print AV_LOG_FATAL at the error level. FFmpeg prints things as fatal that - // are expected in some cases, such as lack of codec support or similar things. - BOOST_LOG(error) << buffer; - } - else if (level <= AV_LOG_WARNING) { - BOOST_LOG(warning) << buffer; - } - else if (level <= AV_LOG_INFO) { - BOOST_LOG(info) << buffer; - } - else if (level <= AV_LOG_VERBOSE) { - // AV_LOG_VERBOSE is less verbose than AV_LOG_DEBUG - BOOST_LOG(debug) << buffer; - } - else { - BOOST_LOG(verbose) << buffer; - } - }); - - sink = boost::make_shared(); - - boost::shared_ptr stream { &std::cout, NoDelete {} }; - sink->locked_backend()->add_stream(stream); - sink->locked_backend()->add_stream(boost::make_shared(config::sunshine.log_file)); - sink->set_filter(severity >= config::sunshine.min_log_level); - - sink->set_formatter([message = "Message"s, severity = "Severity"s](const bl::record_view &view, bl::formatting_ostream &os) { - constexpr int DATE_BUFFER_SIZE = 21 + 2 + 1; // Full string plus ": \0" - - auto log_level = view.attribute_values()[severity].extract().get(); - - std::string_view log_type; - switch (log_level) { - case 0: - log_type = "Verbose: "sv; - break; - case 1: - log_type = "Debug: "sv; - break; - case 2: - log_type = "Info: "sv; - break; - case 3: - log_type = "Warning: "sv; - break; - case 4: - log_type = "Error: "sv; - break; - case 5: - log_type = "Fatal: "sv; - break; - }; - - char _date[DATE_BUFFER_SIZE]; - std::time_t t = std::time(nullptr); - strftime(_date, DATE_BUFFER_SIZE, "[%Y:%m:%d:%H:%M:%S]: ", std::localtime(&t)); - - os << _date << log_type << view.attribute_values()[message].extract(); - }); - - // Flush after each log record to ensure log file contents on disk isn't stale. - // This is particularly important when running from a Windows service. - sink->locked_backend()->auto_flush(true); - - bl::core::get()->add_sink(sink); - auto fg = util::fail_guard(log_flush); // logging can begin at this point // if anything is logged prior to this point, it will appear in stdout, but not in the log viewer in the UI @@ -227,7 +141,7 @@ main(int argc, char *argv[]) { nvprefs_instance.modify_application_profile(); // Modify global settings, undo file is produced in the process to restore after improper termination nvprefs_instance.modify_global_profile(); - // Unload dynamic library to survive driver reinstallation + // Unload dynamic library to survive driver re-installation nvprefs_instance.unload(); } @@ -320,7 +234,7 @@ main(int argc, char *argv[]) { auto task = []() { BOOST_LOG(fatal) << "10 seconds passed, yet Sunshine's still running: Forcing shutdown"sv; - log_flush(); + logging::log_flush(); lifetime::debug_trap(); }; force_shutdown = task_pool.pushDelayed(task, 10s).task_id; @@ -333,7 +247,7 @@ main(int argc, char *argv[]) { auto task = []() { BOOST_LOG(fatal) << "10 seconds passed, yet Sunshine's still running: Forcing shutdown"sv; - log_flush(); + logging::log_flush(); lifetime::debug_trap(); }; force_shutdown = task_pool.pushDelayed(task, 10s).task_id; @@ -346,8 +260,8 @@ main(int argc, char *argv[]) { // If any of the following fail, we log an error and continue event though sunshine will not function correctly. // This allows access to the UI to fix configuration problems or view the logs. - auto deinit_guard = platf::init(); - if (!deinit_guard) { + auto platf_deinit_guard = platf::init(); + if (!platf_deinit_guard) { BOOST_LOG(error) << "Platform failed to initialize"sv; } diff --git a/src/nvhttp.cpp b/src/nvhttp.cpp index b8bddb44bb..695820f4b3 100644 --- a/src/nvhttp.cpp +++ b/src/nvhttp.cpp @@ -42,6 +42,8 @@ namespace nvhttp { namespace fs = std::filesystem; namespace pt = boost::property_tree; + crypto::cert_chain_t cert_chain; + class SunshineHttpsServer: public SimpleWeb::Server { public: SunshineHttpsServer(const std::string &certification_file, const std::string &private_key_file): @@ -1017,7 +1019,6 @@ namespace nvhttp { conf_intern.pkey = file_handler::read_file(config::nvhttp.pkey.c_str()); conf_intern.servercert = file_handler::read_file(config::nvhttp.cert.c_str()); - crypto::cert_chain_t cert_chain; for (auto &[_, client] : map_id_client) { for (auto &cert : client.certs) { cert_chain.add(crypto::x509(cert)); @@ -1026,15 +1027,15 @@ namespace nvhttp { auto add_cert = std::make_shared>(30); - // /resume doesn't always get the parameter "localAudioPlayMode" - // /launch will store it in host_audio + // resume doesn't always get the parameter "localAudioPlayMode" + // launch will store it in host_audio bool host_audio {}; https_server_t https_server { config::nvhttp.cert, config::nvhttp.pkey }; http_server_t http_server; // Verify certificates after establishing connection - https_server.verify = [&cert_chain, add_cert](SSL *ssl) { + https_server.verify = [add_cert](SSL *ssl) { crypto::x509_t x509 { SSL_get_peer_certificate(ssl) }; if (!x509) { BOOST_LOG(info) << "unknown -- denied"sv; @@ -1148,6 +1149,7 @@ namespace nvhttp { void erase_all_clients() { map_id_client.clear(); + cert_chain.clear(); save_state(); } } // namespace nvhttp diff --git a/src/platform/linux/graphics.cpp b/src/platform/linux/graphics.cpp index b412988956..2cd81dd451 100644 --- a/src/platform/linux/graphics.cpp +++ b/src/platform/linux/graphics.cpp @@ -22,7 +22,9 @@ extern "C" { #define fourcc_mod_code(vendor, val) ((((uint64_t) vendor) << 56) | ((val) &0x00ffffffffffffffULL)) #define DRM_FORMAT_MOD_INVALID fourcc_mod_code(0, ((1ULL << 56) - 1)) -#define SUNSHINE_SHADERS_DIR SUNSHINE_ASSETS_DIR "/shaders/opengl" +#if !defined(SUNSHINE_SHADERS_DIR) // for testing this needs to be defined in cmake as we don't do an install + #define SUNSHINE_SHADERS_DIR SUNSHINE_ASSETS_DIR "/shaders/opengl" +#endif using namespace std::literals; namespace gl { @@ -37,7 +39,7 @@ namespace gl { } tex_t::~tex_t() { - if (!size() == 0) { + if (size() != 0) { ctx.DeleteTextures(size(), begin()); } } diff --git a/src/platform/linux/kmsgrab.cpp b/src/platform/linux/kmsgrab.cpp index d4feb3557d..b742e9edc6 100644 --- a/src/platform/linux/kmsgrab.cpp +++ b/src/platform/linux/kmsgrab.cpp @@ -1069,7 +1069,7 @@ namespace platf { } inline capture_e - refresh(file_t *file, egl::surface_descriptor_t *sd) { + refresh(file_t *file, egl::surface_descriptor_t *sd, std::optional &frame_timestamp) { // Check for a change in HDR metadata if (connector_id) { auto connector_props = card.connector_props(*connector_id); @@ -1080,6 +1080,7 @@ namespace platf { } plane_t plane = drmModeGetPlane(card.fd.el, plane_id); + frame_timestamp = std::chrono::steady_clock::now(); auto fb = card.fb(plane.get()); if (!fb) { @@ -1303,7 +1304,8 @@ namespace platf { egl::surface_descriptor_t sd; - auto status = refresh(fb_fd, &sd); + std::optional frame_timestamp; + auto status = refresh(fb_fd, &sd, frame_timestamp); if (status != capture_e::ok) { return status; } @@ -1330,6 +1332,8 @@ namespace platf { gl::ctx.GetTextureSubImage(rgb->tex[0], 0, img_offset_x, img_offset_y, 0, width, height, 1, GL_BGRA, GL_UNSIGNED_BYTE, img_out->height * img_out->row_pitch, img_out->data); + img_out->frame_timestamp = frame_timestamp; + if (cursor && captured_cursor.visible) { blend_cursor(*img_out); } @@ -1456,7 +1460,7 @@ namespace platf { auto img = (egl::img_descriptor_t *) img_out.get(); img->reset(); - auto status = refresh(fb_fd, &img->sd); + auto status = refresh(fb_fd, &img->sd, img->frame_timestamp); if (status != capture_e::ok) { return status; } diff --git a/src/platform/linux/x11grab.cpp b/src/platform/linux/x11grab.cpp index 1167d3f580..0a639b9cc2 100644 --- a/src/platform/linux/x11grab.cpp +++ b/src/platform/linux/x11grab.cpp @@ -535,6 +535,7 @@ namespace platf { auto img = (x11_img_t *) img_out.get(); XImage *x_img { x11::GetImage(xdisplay.get(), xwindow, offset_x, offset_y, width, height, AllPlanes, ZPixmap) }; + img->frame_timestamp = std::chrono::steady_clock::now(); img->width = x_img->width; img->height = x_img->height; diff --git a/src/platform/windows/display_vram.cpp b/src/platform/windows/display_vram.cpp index 1baa1282bb..4aa1800ba4 100644 --- a/src/platform/windows/display_vram.cpp +++ b/src/platform/windows/display_vram.cpp @@ -25,7 +25,9 @@ extern "C" { #include -#define SUNSHINE_SHADERS_DIR SUNSHINE_ASSETS_DIR "/shaders/directx" +#if !defined(SUNSHINE_SHADERS_DIR) // for testing this needs to be defined in cmake as we don't do an install + #define SUNSHINE_SHADERS_DIR SUNSHINE_ASSETS_DIR "/shaders/directx" +#endif namespace platf { using namespace std::literals; } diff --git a/src/stream.cpp b/src/stream.cpp index 9c14680452..df5b3d9619 100644 --- a/src/stream.cpp +++ b/src/stream.cpp @@ -1814,7 +1814,7 @@ namespace stream { // The alternative is that Sunshine can never start another session until it's manually restarted. auto task = []() { BOOST_LOG(fatal) << "Hang detected! Session failed to terminate in 10 seconds."sv; - log_flush(); + logging::log_flush(); lifetime::debug_trap(); }; auto force_kill = task_pool.pushDelayed(task, 10s).task_id; diff --git a/src/video.cpp b/src/video.cpp index f786aeb59f..6c1938c284 100644 --- a/src/video.cpp +++ b/src/video.cpp @@ -14,7 +14,6 @@ extern "C" { #include #include #include -#include } #include "cbs.h" @@ -51,12 +50,6 @@ namespace video { av_buffer_unref(&ref); } - using avcodec_ctx_t = util::safe_ptr; - using avcodec_frame_t = util::safe_ptr; - using avcodec_buffer_t = util::safe_ptr; - using sws_t = util::safe_ptr; - using img_event_t = std::shared_ptr>>; - namespace nv { enum class profile_h264_e : int { @@ -87,11 +80,6 @@ namespace video { }; } // namespace qsv - platf::mem_type_e - map_base_dev_type(AVHWDeviceType type); - platf::pix_fmt_e - map_pix_fmt(AVPixelFormat fmt); - util::Either dxgi_init_avcodec_hardware_input_buffer(platf::avcodec_encode_device_t *); util::Either @@ -288,137 +276,6 @@ namespace video { ALWAYS_REPROBE = 1 << 9, // This is an encoder of last resort and we want to aggressively probe for a better one }; - struct encoder_platform_formats_t { - virtual ~encoder_platform_formats_t() = default; - platf::mem_type_e dev_type; - platf::pix_fmt_e pix_fmt_8bit, pix_fmt_10bit; - }; - - struct encoder_platform_formats_avcodec: encoder_platform_formats_t { - using init_buffer_function_t = std::function(platf::avcodec_encode_device_t *)>; - - encoder_platform_formats_avcodec( - const AVHWDeviceType &avcodec_base_dev_type, - const AVHWDeviceType &avcodec_derived_dev_type, - const AVPixelFormat &avcodec_dev_pix_fmt, - const AVPixelFormat &avcodec_pix_fmt_8bit, - const AVPixelFormat &avcodec_pix_fmt_10bit, - const init_buffer_function_t &init_avcodec_hardware_input_buffer_function): - avcodec_base_dev_type { avcodec_base_dev_type }, - avcodec_derived_dev_type { avcodec_derived_dev_type }, - avcodec_dev_pix_fmt { avcodec_dev_pix_fmt }, - avcodec_pix_fmt_8bit { avcodec_pix_fmt_8bit }, - avcodec_pix_fmt_10bit { avcodec_pix_fmt_10bit }, - init_avcodec_hardware_input_buffer { init_avcodec_hardware_input_buffer_function } { - dev_type = map_base_dev_type(avcodec_base_dev_type); - pix_fmt_8bit = map_pix_fmt(avcodec_pix_fmt_8bit); - pix_fmt_10bit = map_pix_fmt(avcodec_pix_fmt_10bit); - } - - AVHWDeviceType avcodec_base_dev_type, avcodec_derived_dev_type; - AVPixelFormat avcodec_dev_pix_fmt; - AVPixelFormat avcodec_pix_fmt_8bit, avcodec_pix_fmt_10bit; - - init_buffer_function_t init_avcodec_hardware_input_buffer; - }; - - struct encoder_platform_formats_nvenc: encoder_platform_formats_t { - encoder_platform_formats_nvenc( - const platf::mem_type_e &dev_type, - const platf::pix_fmt_e &pix_fmt_8bit, - const platf::pix_fmt_e &pix_fmt_10bit) { - encoder_platform_formats_t::dev_type = dev_type; - encoder_platform_formats_t::pix_fmt_8bit = pix_fmt_8bit; - encoder_platform_formats_t::pix_fmt_10bit = pix_fmt_10bit; - } - }; - - struct encoder_t { - std::string_view name; - enum flag_e { - PASSED, // Is supported - REF_FRAMES_RESTRICT, // Set maximum reference frames - CBR, // Some encoders don't support CBR, if not supported --> attempt constant quantatication parameter instead - DYNAMIC_RANGE, // hdr - VUI_PARAMETERS, // AMD encoder with VAAPI doesn't add VUI parameters to SPS - MAX_FLAGS - }; - - static std::string_view - from_flag(flag_e flag) { -#define _CONVERT(x) \ - case flag_e::x: \ - return #x##sv - switch (flag) { - _CONVERT(PASSED); - _CONVERT(REF_FRAMES_RESTRICT); - _CONVERT(CBR); - _CONVERT(DYNAMIC_RANGE); - _CONVERT(VUI_PARAMETERS); - _CONVERT(MAX_FLAGS); - } -#undef _CONVERT - - return "unknown"sv; - } - - struct option_t { - KITTY_DEFAULT_CONSTR_MOVE(option_t) - option_t(const option_t &) = default; - - std::string name; - std::variant *, std::function, std::string, std::string *> value; - - option_t(std::string &&name, decltype(value) &&value): - name { std::move(name) }, value { std::move(value) } {} - }; - - const std::unique_ptr platform_formats; - - struct { - std::vector common_options; - std::vector sdr_options; - std::vector hdr_options; - std::vector fallback_options; - - // QP option to set in the case that CBR/VBR is not supported - // by the encoder. If CBR/VBR is guaranteed to be supported, - // don't specify this option to avoid wasteful encoder probing. - std::optional qp; - - std::string name; - std::bitset capabilities; - - bool - operator[](flag_e flag) const { - return capabilities[(std::size_t) flag]; - } - - std::bitset::reference - operator[](flag_e flag) { - return capabilities[(std::size_t) flag]; - } - } av1, hevc, h264; - - uint32_t flags; - }; - - struct encode_session_t { - virtual ~encode_session_t() = default; - - virtual int - convert(platf::img_t &img) = 0; - - virtual void - request_idr_frame() = 0; - - virtual void - request_normal_frame() = 0; - - virtual void - invalidate_ref_frames(int64_t first_frame, int64_t last_frame) = 0; - }; - class avcodec_encode_session_t: public encode_session_t { public: avcodec_encode_session_t() = default; @@ -586,7 +443,7 @@ namespace video { auto capture_thread_sync = safe::make_shared(start_capture_sync, end_capture_sync); #ifdef _WIN32 - static encoder_t nvenc { + encoder_t nvenc { "nvenc"sv, std::make_unique( platf::mem_type_e::dxgi, @@ -630,7 +487,7 @@ namespace video { PARALLEL_ENCODING | REF_FRAMES_INVALIDATION // flags }; #elif !defined(__APPLE__) - static encoder_t nvenc { + encoder_t nvenc { "nvenc"sv, std::make_unique( #ifdef _WIN32 @@ -718,7 +575,7 @@ namespace video { #endif #ifdef _WIN32 - static encoder_t quicksync { + encoder_t quicksync { "quicksync"sv, std::make_unique( AV_HWDEVICE_TYPE_D3D11VA, AV_HWDEVICE_TYPE_QSV, @@ -799,7 +656,7 @@ namespace video { PARALLEL_ENCODING | CBR_WITH_VBR | RELAXED_COMPLIANCE | NO_RC_BUF_LIMIT }; - static encoder_t amdvce { + encoder_t amdvce { "amdvce"sv, std::make_unique( AV_HWDEVICE_TYPE_D3D11VA, AV_HWDEVICE_TYPE_NONE, @@ -815,6 +672,7 @@ namespace video { { "quality"s, &config::video.amd.amd_quality_av1 }, { "rc"s, &config::video.amd.amd_rc_av1 }, { "usage"s, &config::video.amd.amd_usage_av1 }, + { "enforce_hrd"s, &config::video.amd.amd_enforce_hrd }, }, {}, // SDR-specific options {}, // HDR-specific options @@ -836,6 +694,7 @@ namespace video { { "rc"s, &config::video.amd.amd_rc_hevc }, { "usage"s, &config::video.amd.amd_usage_hevc }, { "vbaq"s, &config::video.amd.amd_vbaq }, + { "enforce_hrd"s, &config::video.amd.amd_enforce_hrd }, }, {}, // SDR-specific options {}, // HDR-specific options @@ -855,6 +714,7 @@ namespace video { { "rc"s, &config::video.amd.amd_rc_h264 }, { "usage"s, &config::video.amd.amd_usage_h264 }, { "vbaq"s, &config::video.amd.amd_vbaq }, + { "enforce_hrd"s, &config::video.amd.amd_enforce_hrd }, }, // SDR-specific options {}, @@ -871,7 +731,7 @@ namespace video { }; #endif - static encoder_t software { + encoder_t software { "software"sv, std::make_unique( AV_HWDEVICE_TYPE_NONE, AV_HWDEVICE_TYPE_NONE, @@ -936,7 +796,7 @@ namespace video { }; #ifdef __linux__ - static encoder_t vaapi { + encoder_t vaapi { "vaapi"sv, std::make_unique( AV_HWDEVICE_TYPE_VAAPI, AV_HWDEVICE_TYPE_NONE, @@ -1004,7 +864,7 @@ namespace video { #endif #ifdef __APPLE__ - static encoder_t videotoolbox { + encoder_t videotoolbox { "videotoolbox"sv, std::make_unique( AV_HWDEVICE_TYPE_VIDEOTOOLBOX, AV_HWDEVICE_TYPE_NONE, diff --git a/src/video.h b/src/video.h index fec5c38b34..eb8eabc358 100644 --- a/src/video.h +++ b/src/video.h @@ -11,11 +11,181 @@ extern "C" { #include +#include } struct AVPacket; namespace video { + platf::mem_type_e + map_base_dev_type(AVHWDeviceType type); + platf::pix_fmt_e + map_pix_fmt(AVPixelFormat fmt); + + void + free_ctx(AVCodecContext *ctx); + void + free_frame(AVFrame *frame); + void + free_buffer(AVBufferRef *ref); + + using avcodec_ctx_t = util::safe_ptr; + using avcodec_frame_t = util::safe_ptr; + using avcodec_buffer_t = util::safe_ptr; + using sws_t = util::safe_ptr; + using img_event_t = std::shared_ptr>>; + + struct encoder_platform_formats_t { + virtual ~encoder_platform_formats_t() = default; + platf::mem_type_e dev_type; + platf::pix_fmt_e pix_fmt_8bit, pix_fmt_10bit; + }; + + struct encoder_platform_formats_avcodec: encoder_platform_formats_t { + using init_buffer_function_t = std::function(platf::avcodec_encode_device_t *)>; + + encoder_platform_formats_avcodec( + const AVHWDeviceType &avcodec_base_dev_type, + const AVHWDeviceType &avcodec_derived_dev_type, + const AVPixelFormat &avcodec_dev_pix_fmt, + const AVPixelFormat &avcodec_pix_fmt_8bit, + const AVPixelFormat &avcodec_pix_fmt_10bit, + const init_buffer_function_t &init_avcodec_hardware_input_buffer_function): + avcodec_base_dev_type { avcodec_base_dev_type }, + avcodec_derived_dev_type { avcodec_derived_dev_type }, + avcodec_dev_pix_fmt { avcodec_dev_pix_fmt }, + avcodec_pix_fmt_8bit { avcodec_pix_fmt_8bit }, + avcodec_pix_fmt_10bit { avcodec_pix_fmt_10bit }, + init_avcodec_hardware_input_buffer { init_avcodec_hardware_input_buffer_function } { + dev_type = map_base_dev_type(avcodec_base_dev_type); + pix_fmt_8bit = map_pix_fmt(avcodec_pix_fmt_8bit); + pix_fmt_10bit = map_pix_fmt(avcodec_pix_fmt_10bit); + } + + AVHWDeviceType avcodec_base_dev_type, avcodec_derived_dev_type; + AVPixelFormat avcodec_dev_pix_fmt; + AVPixelFormat avcodec_pix_fmt_8bit, avcodec_pix_fmt_10bit; + + init_buffer_function_t init_avcodec_hardware_input_buffer; + }; + + struct encoder_platform_formats_nvenc: encoder_platform_formats_t { + encoder_platform_formats_nvenc( + const platf::mem_type_e &dev_type, + const platf::pix_fmt_e &pix_fmt_8bit, + const platf::pix_fmt_e &pix_fmt_10bit) { + encoder_platform_formats_t::dev_type = dev_type; + encoder_platform_formats_t::pix_fmt_8bit = pix_fmt_8bit; + encoder_platform_formats_t::pix_fmt_10bit = pix_fmt_10bit; + } + }; + + struct encoder_t { + std::string_view name; + enum flag_e { + PASSED, // Is supported + REF_FRAMES_RESTRICT, // Set maximum reference frames + CBR, // Some encoders don't support CBR, if not supported --> attempt constant quantatication parameter instead + DYNAMIC_RANGE, // hdr + VUI_PARAMETERS, // AMD encoder with VAAPI doesn't add VUI parameters to SPS + MAX_FLAGS + }; + + static std::string_view + from_flag(flag_e flag) { +#define _CONVERT(x) \ + case flag_e::x: \ + std::string_view(#x) + switch (flag) { + _CONVERT(PASSED); + _CONVERT(REF_FRAMES_RESTRICT); + _CONVERT(CBR); + _CONVERT(DYNAMIC_RANGE); + _CONVERT(VUI_PARAMETERS); + _CONVERT(MAX_FLAGS); + } +#undef _CONVERT + + return { "unknown" }; + } + + struct option_t { + KITTY_DEFAULT_CONSTR_MOVE(option_t) + option_t(const option_t &) = default; + + std::string name; + std::variant *, std::function, std::string, std::string *> value; + + option_t(std::string &&name, decltype(value) &&value): + name { std::move(name) }, value { std::move(value) } {} + }; + + const std::unique_ptr platform_formats; + + struct codec_t { + std::vector common_options; + std::vector sdr_options; + std::vector hdr_options; + std::vector fallback_options; + + // QP option to set in the case that CBR/VBR is not supported + // by the encoder. If CBR/VBR is guaranteed to be supported, + // don't specify this option to avoid wasteful encoder probing. + std::optional qp; + + std::string name; + std::bitset capabilities; + + bool + operator[](flag_e flag) const { + return capabilities[(std::size_t) flag]; + } + + std::bitset::reference + operator[](flag_e flag) { + return capabilities[(std::size_t) flag]; + } + } av1, hevc, h264; + + uint32_t flags; + }; + + struct encode_session_t { + virtual ~encode_session_t() = default; + + virtual int + convert(platf::img_t &img) = 0; + + virtual void + request_idr_frame() = 0; + + virtual void + request_normal_frame() = 0; + + virtual void + invalidate_ref_frames(int64_t first_frame, int64_t last_frame) = 0; + }; + + // encoders + extern encoder_t software; + +#if !defined(__APPLE__) + extern encoder_t nvenc; // available for windows and linux +#endif + +#ifdef _WIN32 + extern encoder_t amdvce; + extern encoder_t quicksync; +#endif + +#ifdef __linux__ + extern encoder_t vaapi; +#endif + +#ifdef __APPLE__ + extern encoder_t videotoolbox; +#endif + struct packet_raw_t { virtual ~packet_raw_t() = default; @@ -154,6 +324,8 @@ namespace video { config_t config, void *channel_data); + bool + validate_encoder(encoder_t &encoder, bool expect_failure); int probe_encoders(); } // namespace video diff --git a/src_assets/common/assets/web/Navbar.vue b/src_assets/common/assets/web/Navbar.vue index 948fb4d724..9e4e1be64f 100644 --- a/src_assets/common/assets/web/Navbar.vue +++ b/src_assets/common/assets/web/Navbar.vue @@ -11,22 +11,22 @@ diff --git a/src_assets/common/assets/web/ResourceCard.vue b/src_assets/common/assets/web/ResourceCard.vue index e2481ca475..aee837689c 100644 --- a/src_assets/common/assets/web/ResourceCard.vue +++ b/src_assets/common/assets/web/ResourceCard.vue @@ -1,35 +1,32 @@