CI: Add new repository actions for GitHub

This commit is contained in:
PatTheMav 2023-07-17 20:09:52 +02:00
parent ae897c2764
commit d6b98def22
No known key found for this signature in database
26 changed files with 2285 additions and 50 deletions

118
.github/actions/build-obs/action.yaml vendored Normal file
View file

@ -0,0 +1,118 @@
name: Set Up and Build obs-studio
description: Builds obs-studio for specified architecture and build config
inputs:
target:
description: Build target for obs-studio
required: true
config:
description: Build configuration
required: false
default: RelWithDebInfo
codesign:
description: Enable codesigning (macOS only)
required: false
default: 'false'
codesignIdent:
description: Developer ID for application codesigning (macOS only)
required: false
default: '-'
codesignTeam:
description: Team ID for application codesigning (macOS only)
required: false
default: ''
workingDirectory:
description: Working directory for packaging
required: false
default: ${{ github.workspace }}
runs:
using: composite
steps:
- name: Run macOS Build
if: runner.os == 'macOS'
shell: zsh --no-rcs --errexit --pipefail {0}
working-directory: ${{ inputs.workingDirectory }}
env:
CODESIGN_IDENT: ${{ inputs.codesignIdent }}
CODESIGN_TEAM: ${{ inputs.codesignTeam }}
run: |
: Run macOS Build
local -a build_args=(
--config ${{ inputs.config }}
--target macos-${{ inputs.target }}
)
if (( ${+RUNNER_DEBUG} )) build_args+=(--debug)
if [[ '${{ inputs.codesign }}' == true ]] build_args+=(--codesign)
git fetch origin --no-tags --no-recurse-submodules -q
.github/scripts/build-macos ${build_args}
- name: Install Dependencies 🛍️
if: runner.os == 'Linux'
shell: bash
run: |
: Install Dependencies 🛍️
echo ::group::Install Dependencies
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
brew install --quiet zsh
echo ::endgroup::
- name: Run Ubuntu Build
if: runner.os == 'Linux'
shell: zsh --no-rcs --errexit --pipefail {0}
working-directory: ${{ inputs.workingDirectory }}
run: |
: Run Ubuntu Build
local -a build_args=(
--config ${{ inputs.config }}
--target linux-${{ inputs.target }}
--generator Ninja
)
if (( ${+RUNNER_DEBUG} )) build_args+=(--debug)
git fetch origin --no-tags --no-recurse-submodules -q
.github/scripts/build-linux ${build_args}
- name: Run Windows Build
if: runner.os == 'Windows'
shell: pwsh
working-directory: ${{ inputs.workingDirectory }}
run: |
# Run Windows Build
$BuildArgs = @{
Target = '${{ inputs.target }}'
Configuration = '${{ inputs.config }}'
}
if ( $Env:RUNNER_DEBUG -ne $null ) {
$BuildArgs += @{ Debug = $true }
}
git fetch origin --no-tags --no-recurse-submodules -q
.github/scripts/Build-Windows.ps1 @BuildArgs
- name: Create Summary 📊
if: contains(fromJSON('["Linux", "macOS"]'), runner.os)
shell: zsh --no-rcs --errexit --pipefail {0}
env:
CCACHE_CONFIGPATH: ${{ inputs.workingDirectory }}/.ccache.conf
run: |
: Create Summary 📊
local -a ccache_data
if (( ${+RUNNER_DEBUG} )) {
setopt XTRACE
ccache_data=("${(fA)$(ccache -s -vv)}")
} else {
ccache_data=("${(fA)$(ccache -s)}")
}
print '### ${{ runner.os }} Ccache Stats (${{ inputs.target }})' >> $GITHUB_STEP_SUMMARY
print '```' >> $GITHUB_STEP_SUMMARY
for line (${ccache_data}) {
print ${line} >> $GITHUB_STEP_SUMMARY
}
print '```' >> $GITHUB_STEP_SUMMARY

View file

@ -0,0 +1,57 @@
name: Check For Changed Files
description: Checks for changed files compared to specific git reference and glob expression
inputs:
baseRef:
description: Git reference to check against
required: true
ref:
description: Git reference to check with
required: false
default: HEAD
checkGlob:
description: Glob expression to limit check to specific files
required: false
useFallback:
description: Use fallback compare against prior commit
required: false
default: 'true'
outputs:
hasChangedFiles:
value: ${{ steps.checks.outputs.hasChangedFiles }}
description: True if specified files were changed in comparison to specified git reference
changedFiles:
value: ${{ toJSON(steps.checks.outputs.changedFiles) }}
description: List of changed files
runs:
using: composite
steps:
- name: Check For Changed Files ✅
shell: bash
id: checks
env:
GIT_BASE_REF: ${{ inputs.baseRef }}
GIT_REF: ${{ inputs.ref }}
USE_FALLBACK: ${{ inputs.useFallback }}
run: |
: Check for Changed Files ✅
if [[ "${RUNNER_DEBUG}" ]]; then set -x; fi
shopt -s extglob
shopt -s dotglob
if ! git cat-file -e ${GIT_BASE_REF}; then
echo "::warning::Provided base reference ${GIT_BASE_REF} is invalid"
if [[ "${USE_FALLBACK}" == 'true' ]]; then
GIT_BASE_REF='HEAD~1'
fi
fi
changes=($(git diff --name-only ${GIT_BASE_REF} ${GIT_REF} -- ${{ inputs.checkGlob }}))
if (( ${#changes[@]} )); then
file_string="${changes[*]}"
echo "hasChangedFiles=true" >> $GITHUB_OUTPUT
echo "changedFiles=[${file_string// /,}]" >> GITHUB_OUTPUT
else
echo "hasChangedFiles=false" >> $GITHUB_OUTPUT
echo "changedFiles=[]" >> GITHUB_OUTPUT
fi

View file

@ -0,0 +1,38 @@
name: Flatpak Manifest Validator
description: Checks order of Flatpak modules in manifest file
inputs:
manifestFile:
description: Flatpak manifest file to check
failCondition:
description: Controls whether failed checks also fail the workflow run
required: false
default: never
workingDirectory:
description: Working directory for checks
required: false
default: ${{ github.workspace }}
runs:
using: composite
steps:
- name: Check Runner Operating System 🏃‍♂️
if: runner.os == 'Windows'
shell: bash
run: |
: Check Runner Operating System 🏃‍♂️
echo "services-validation action requires a macOS-based or Linux-based runner."
exit 2
- name: Validate Flatpak Manifest 🕵️
shell: bash
working-directory: ${{ inputs.workingDirectory }}
run: |
: Validate Flatpak Manifest 🕵️
echo ::group::Run Validation
if [[ '${{ inputs.failCondition }}' == 'never' ]]; then set +e; fi
python3 -u \
build-aux/format-manifest.py \
build-aux/com.obsproject.Studio.json \
--check \
--loglevel INFO
echo ::endgroup::

View file

@ -0,0 +1,62 @@
name: Generate Documentation
description: Updates Sphinx-based documentation
inputs:
sourceDirectory:
description: Path to repository checkout
required: false
default: ${{ github.workspace }}
disableLinkExtensions:
description: Disable Sphinx link extensions
required: false
default: 'false'
runs:
using: composite
steps:
- name: Update Version Number and Copyright ↗️
id: setup
shell: bash
run: |
: Update Version Number and Copyright ↗️
if [[ "${RUNNER_DEBUG}" ]]; then set -x; fi
: "${major:=}"
: "${minor:=}"
: "${patch:=}"
read -r _ major _ minor _ patch _ <<< \
"$(grep -E -e "#define LIBOBS_API_(MAJOR|MINOR|PATCH)_VER *" libobs/obs-config.h \
| sed 's/#define //g' \
| tr -s ' ' \
| tr '\n' ' ')"
sed -i -E \
-e "s/version = '([0-9]+\.[0-9]+\.[0-9]+)'/version = '${major}.${minor}.${patch}'/g" \
-e "s/release = '([0-9]+\.[0-9]+\.[0-9]+)'/release = '${major}.${minor}.${patch}'/g" \
-e "s/copyright = '(2017-[0-9]+, Lain Bailey)'/copyright = '2017-$(date +"%Y"), Lain Bailey'/g" \
${{ inputs.sourceDirectory }}/docs/sphinx/conf.py
if [[ '${{ inputs.disableLinkExtensions }}' == 'true' ]]; then
sed -i -e "s/html_link_suffix = None/html_link_suffix = ''/g" \
${{ inputs.sourceDirectory }}/docs/sphinx/conf.py
echo "artifactName=OBS Studio Docs (No Extensions)" >> $GITHUB_OUTPUT
else
echo "artifactName=OBS Studio Docs" >> $GITHUB_OUTPUT
fi
echo "commitHash=${GITHUB_SHA:0:9}" >> $GITHUB_OUTPUT
- name: Install Sphinx 📜
uses: totaldebug/sphinx-publish-action@1.2.0
with:
sphinx_src: ${{ inputs.sourceDirectory }}/docs/sphinx
build_only: true
target_branch: master
target_path: '../home/_build'
pre_build_commands: 'pip install -Iv sphinx==5.1.1'
- uses: actions/upload-artifact@v3
with:
name: ${{ steps.setup.outputs.artifactName }} ${{ steps.setup.outputs.commitHash }}
path: |
${{ runner.temp }}/_github_home/_build
!${{ runner.temp }}/_github_home/_build/.doctrees

114
.github/actions/package-obs/action.yaml vendored Normal file
View file

@ -0,0 +1,114 @@
name: Package obs-studio
description: Packages obs-studio for specified architecture and build config
inputs:
target:
description: Build target for dependencies
required: true
config:
description: Build configuration
required: false
default: Release
codesign:
description: Enable codesigning (macOS only)
required: false
default: 'false'
notarize:
description: Enable notarization (macOS only)
required: false
default: 'false'
codesignIdent:
description: Developer ID for application codesigning (macOS only)
required: false
default: '-'
codesignUser:
description: Apple ID username for notarization (macOS only)
required: false
default: ''
codesignPass:
description: Apple ID password for notarization (macOS only)
required: false
default: ''
package:
description: Create platform-specific packages instead of archives
required: false
default: 'false'
workingDirectory:
description: Working directory for packaging
required: false
default: ${{ github.workspace }}
runs:
using: composite
steps:
- name: Run macOS packaging
if: runner.os == 'macOS'
shell: zsh --no-rcs --errexit --pipefail {0}
working-directory: ${{ inputs.workingDirectory }}
env:
CODESIGN_IDENT: ${{ inputs.codesignIdent }}
CODESIGN_IDENT_USER: ${{ inputs.codesignUser }}
CODESIGN_IDENT_PASS: ${{ inputs.codesignPass }}
run: |
: Run macOS Packaging
local -a package_args=(
--target macos-${{ inputs.target }}
--config ${{ inputs.config }}
)
if (( ${+RUNNER_DEBUG} )) build_args+=(--debug)
if [[ '${{ inputs.codesign }}' == true ]] package_args+=(--codesign)
if [[ '${{ inputs.notarize }}' == true ]] package_args+=(--notarize)
if [[ '${{ inputs.package }}' == true ]] package_args+=(--package)
.github/scripts/package-macos ${package_args}
- name: Install Dependencies 🛍️
if: runner.os == 'Linux'
shell: bash
run: |
: Install Dependencies 🛍️
echo ::group::Install Dependencies
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
brew install --quiet zsh
echo ::endgroup::
- name: Run Ubuntu packaging
if: runner.os == 'Linux'
shell: zsh --no-rcs --errexit --pipefail {0}
working-directory: ${{ inputs.workingDirectory }}
run: |
: Run Ubuntu Packaging
local -a package_args=(
--target linux-${{ inputs.target }}
--config ${{ inputs.config }}
)
if (( ${+RUNNER_DEBUG} )) build_args+=(--debug)
if [[ '${{ inputs.package }}' == true ]] package_args+=(--package)
${{ inputs.workingDirectory }}/.github/scripts/package-linux ${package_args}
- name: Run Windows packaging
if: runner.os == 'Windows'
shell: pwsh
working-directory: ${{ inputs.workingDirectory }}
run: |
# Run Windows Packaging
$PackageArgs = @{
Target = '${{ inputs.target }}'
Configuration = '${{ inputs.config }}'
}
if ( $Env:RUNNER_DEBUG -ne $null ) {
$PackageArgs += @{ Debug = $true }
}
if ( ( Test-Path env:CI ) -and ( Test-Path env:RUNNER_DEBUG ) ) {
$BuildArgs += @{
Debug = $true
}
}
.github/scripts/Package-windows.ps1 @PackageArgs

View file

@ -0,0 +1,64 @@
name: Validate UI XML
description: Validates Qt UI XML files
inputs:
failCondition:
description: Controls whether failed checks also fail the workflow run
required: false
default: never
workingDirectory:
description: Working directory for checks
required: false
default: ${{ github.workspace }}
runs:
using: composite
steps:
- name: Check Runner Operating System 🏃‍♂️
if: runner.os == 'Windows'
shell: bash
run: |
: Check Runner Operating System 🏃‍♂️
echo "::notice::qt-xml-validator action requires an Linux-based or macOS-based runner."
exit 2
- name: Install xmllint 🕵️
if: runner.os == 'Linux'
shell: bash
run: |
: Install xmllint 🕵️
if [[ "${RUNNER_DEBUG}" ]]; then set -x; fi
echo ::group::Installing libxml2-utils
sudo apt-get -qq update
sudo apt-get install --no-install-recommends -y libxml2-utils
echo ::endgroup::
- name: Register Annotations 📝
uses: korelstar/xmllint-problem-matcher@v1
- name: Validate XML 💯
shell: bash
env:
GITHUB_EVENT_FORCED: ${{ github.event.forced }}
GITHUB_REF_BEFORE: ${{ github.event.before }}
run: |
: Validate XML 💯
if [[ "${RUNNER_DEBUG}" ]]; then set -x; fi
shopt -s extglob
changes=($(git diff --name-only HEAD~1 HEAD -- UI/forms))
case "${GITHUB_EVENT_NAME}" in
pull_request) changes=($(git diff --name-only origin/"${GITHUB_BASE_REF}" HEAD -- UI/forms)) ;;
push)
if [[ "${GITHUB_EVENT_FORCED}" == false ]]; then
changes=($(git diff --name-only ${GITHUB_REF_BEFORE} HEAD -- UI/forms))
fi
;;
*) ;;
esac
if (( ${#changes[@]} )); then
if [[ '${{ inputs.failCondition }}' == never ]]; then set +e; fi
xmllint \
--schema ${{ github.workspace }}/UI/forms/XML-Schema-Qt5.15.xsd \
--noout "${changes[@]}"
fi

View file

@ -0,0 +1,61 @@
name: Run clang-format
description: Runs clang-format and checks for any changes introduced by it
inputs:
failCondition:
description: Controls whether failed checks also fail the workflow run
required: false
default: never
workingDirectory:
description: Working directory for checks
required: false
default: ${{ github.workspace }}
runs:
using: composite
steps:
- name: Check Runner Operating System 🏃‍♂️
if: runner.os == 'Windows'
shell: bash
run: |
: Check Runner Operating System 🏃‍♂️
echo "::notice::run-clang-format action requires a macOS-based or Linux-based runner."
exit 2
- name: Install Dependencies 🛍️
if: runner.os == 'Linux'
shell: bash
run: |
: Install Dependencies 🛍️
echo ::group::Install Dependencies
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
echo "/home/linuxbrew/.linuxbrew/opt/clang-format@13/bin" >> $GITHUB_PATH
brew install --quiet zsh
echo ::endgroup::
- name: Run clang-format 🐉
id: result
shell: zsh --no-rcs --errexit --pipefail {0}
working-directory: ${{ inputs.workingDirectory }}
env:
GITHUB_EVENT_FORCED: ${{ github.event.forced }}
GITHUB_REF_BEFORE: ${{ github.event.before }}
run: |
: Run clang-format 🐉
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
local -a changes=($(git diff --name-only HEAD~1 HEAD))
case ${GITHUB_EVENT_NAME} {
pull_request) changes=($(git diff --name-only origin/${GITHUB_BASE_REF} HEAD)) ;;
push) if [[ ${GITHUB_EVENT_FORCED} != true ]] changes=($(git diff --name-only ${GITHUB_REF_BEFORE} HEAD)) ;;
*) ;;
}
if (( ${changes[(I)(*.c|*.h|*.cpp|*.hpp|*.m|*.mm)]} )) {
print ::group::Install clang-format-13
brew install --quiet obsproject/tools/clang-format@13
print ::endgroup::
print ::group::Run clang-format-13
./build-aux/run-clang-format --fail-${{ inputs.failCondition }} --check
print ::endgroup::
}

View file

@ -0,0 +1,60 @@
name: Run cmake-format
description: Runs cmake-format and checks for any changes introduced by it
inputs:
failCondition:
description: Controls whether failed checks also fail the workflow run
required: false
default: never
workingDirectory:
description: Working directory for checks
required: false
default: ${{ github.workspace }}
runs:
using: composite
steps:
- name: Check Runner Operating System 🏃‍♂️
if: runner.os == 'Windows'
shell: bash
run: |
: Check Runner Operating System 🏃‍♂️
echo "::notice::run-cmake-format action requires a macOS-based or Linux-based runner."
exit 2
- name: Install Dependencies 🛍️
if: runner.os == 'Linux'
shell: bash
run: |
: Install Dependencies 🛍️
echo ::group::Install Dependencies
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
brew install --quiet zsh
echo ::endgroup::
- name: Run cmake-format 🎛️
id: result
shell: zsh --no-rcs --errexit --pipefail {0}
working-directory: ${{ github.workspace }}
env:
GITHUB_EVENT_FORCED: ${{ github.event.forced }}
GITHUB_REF_BEFORE: ${{ github.event.before }}
run: |
: Run cmake-format 🎛️
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
local -a changes=($(git diff --name-only HEAD~1 HEAD))
case ${GITHUB_EVENT_NAME} {
pull_request) changes=($(git diff --name-only origin/${GITHUB_BASE_REF} HEAD)) ;;
push) if [[ ${GITHUB_EVENT_FORCED} != true ]] changes=($(git diff --name-only ${GITHUB_REF_BEFORE} HEAD)) ;;
*) ;;
}
if (( ${changes[(I)*.cmake|*CMakeLists.txt]} )) {
print ::group::Install cmakelang
pip3 install cmakelang
print ::endgroup::
print ::group::Run cmake-format
./build-aux/run-cmake-format --fail-${{ inputs.failCondition }} --check
print ::endgroup::
}

View file

@ -0,0 +1,60 @@
name: Run swift-format
description: Runs swift-format and checks for any changes introduced by it
inputs:
failCondition:
description: Controls whether failed checks also fail the workflow run
required: false
default: never
workingDirectory:
description: Working directory for checks
required: false
default: ${{ github.workspace }}
runs:
using: composite
steps:
- name: Check Runner Operating System 🏃‍♂️
if: runner.os == 'Windows'
shell: bash
run: |
: Check Runner Operating System 🏃‍♂️
echo "::notice::run-swift-format action requires a macOS-based or Linux-based runner."
exit 2
- name: Install Dependencies 🛍️
if: runner.os == 'Linux'
shell: bash
run: |
: Install Dependencies 🛍️
echo ::group::Install Dependencies
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
brew install --quiet zsh
echo ::endgroup::
- name: Run swift-format 🔥
id: result
shell: zsh --no-rcs --errexit --pipefail {0}
working-directory: ${{ github.workspace }}
env:
GITHUB_EVENT_FORCED: ${{ github.event.forced }}
GITHUB_REF_BEFORE: ${{ github.event.before }}
run: |
: Run swift-format 🔥
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
local -a changes=($(git diff --name-only HEAD~1 HEAD))
case ${GITHUB_EVENT_NAME} {
pull_request) changes=($(git diff --name-only origin/${GITHUB_BASE_REF} HEAD)) ;;
push) if [[ ${GITHUB_EVENT_FORCED} != true ]] changes=($(git diff --name-only ${GITHUB_REF_BEFORE} HEAD)) ;;
*) ;;
}
if (( ${changes[(I)*.swift]} )) {
print ::group::Install swift-format
brew install --quiet swift-format
print ::endgroup::
print ::group::Run swift-format
./build-aux/run-swift-format --fail-${{ inputs.failCondition }} --check
print ::endgroup::
}

View file

@ -0,0 +1,113 @@
name: Services Validation
description: Checks services configuration file and checks for defunct services
inputs:
repositorySecret:
description: GitHub token for API access
required: true
runSchemaChecks:
description: Enable schema checking
required: false
default: 'true'
runServiceChecks:
description: Enable defunct service checking
required: false
default: 'false'
createPullRequest:
description: Enable pull request creation after service checks
required: false
default: 'false'
workingDirectory:
description: Working directory for checks
required: false
default: ${{ github.workspace }}
outputs:
hasDefunctServices:
description: True if defunct services were found in configuration
value: ${{ steps.check.outputs.make_pr }}
runs:
using: composite
steps:
- name: Check Runner Operating System 🏃‍♂️
if: runner.os == 'Windows'
shell: bash
run: |
: Check Runner Operating System 🏃‍♂️
echo "::notice::services-validation action requires a macOS-based or Linux-based runner."
exit 2
- name: Install and Configure Python 🐍
shell: bash
run: |
: Install and Configure Python 🐍
if [[ "${RUNNER_DEBUG}" ]]; then set -x; fi
echo ::group::Python Set Up
if [[ "${RUNNER_OS}" == Linux ]]; then
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH
fi
brew install --quiet python3
python3 -m pip install jsonschema json_source_map requests
echo ::endgroup::
- name: Validate Services File JSON Schema 🕵️
if: fromJSON(inputs.runSchemaChecks)
shell: bash
working-directory: ${{ inputs.workingDirectory }}
run: |
: Validate Services File JSON Schema 🕵️
if [[ "${RUNNER_DEBUG}" ]]; then set -x; fi
shopt -s extglob
echo ::group::Run Validation
python3 -u \
.github/scripts/utils.py/check-jsonschema.py \
plugins/rtmp-services/data/@(services|package).json \
--loglevel INFO
echo ::endgroup::
- name: Annotate schema validation errors 🏷️
if: fromJSON(inputs.runSchemaChecks) && failure()
uses: yuzutech/annotations-action@v0.4.0
with:
repo-token: ${{ inputs.repositorySecret }}
title: Service JSON Errors
input: ${{ inputs.workingDirectory }}/validation_errors.json
- name: Restore Timestamp Cache ⏳
if: fromJSON(inputs.runServiceChecks)
uses: actions/cache@v3
with:
path: ${{ github.workspace }}/other
key: service-check-${{ github.run_id }}
restore-keys: service-check-
- name: Check for defunct services 📉
id: services-check
if: fromJSON(inputs.runServiceChecks)
shell: bash
working-directory: ${{ inputs.workingDirectory }}
env:
GITHUB_TOKEN: ${{ inputs.repositorySecret }}
WORKFLOW_RUN_ID: ${{ github.run_id }}
REPOSITORY: ${{ github.repository }}
run: |
: Check for defunct services 📉
python3 -u .github/scripts/utils.py/check-services.py
- uses: actions/upload-artifact@v3
if: fromJSON(inputs.runServiceChecks)
with:
name: timestamps
path: ${{ inputs.workingDirectory }}/other/*
- name: Create pull request 🔧
uses: peter-evans/create-pull-request@f094b77505fb89581e68a1163fbd2fffece39da1
if: fromJSON(inputs.createPullRequest) && fromJSON(inputs.runServiceChecks) && fromJSON(steps.services-check.outputs.make_pr)
with:
author: 'Service Checker <commits@obsproject.com>'
commit-message: 'rtmp-services: Remove defunct servers/services'
title: 'rtmp-services: Remove defunct servers/services'
branch: 'automated/clean-services'
body: ${{ fromJSON(steps.services-check.outputs.pr_message) }}
delete-branch: true

View file

@ -0,0 +1,146 @@
name: Set up macOS Code Signing
description: Sets up code signing certificates, provisioning profiles, and notarization information
inputs:
codesignIdentity:
description: Code signing identity
required: true
codesignCertificate:
description: PKCS12 certificate in base64 format
required: true
certificatePassword:
description: Password required to install PKCS12 certificate
required: true
keychainPassword:
description: Password to use for temporary keychain
required: false
notarizationUser:
description: Apple ID to use for notarization
required: false
notarizationPassword:
description: Application password for notarization
provisioningProfile:
description: Provisioning profile in base64 format
required: false
outputs:
haveCodesignIdent:
description: True if necessary code signing credentials were found
value: ${{ steps.codesign.outputs.haveCodesignIdent }}
haveProvisioningProfile:
description: True if necessary provisioning profile credentials were found
value: ${{ steps.provisioning.outputs.haveProvisioningProfile }}
haveNotarizationUser:
description: True if necessary notarization credentials were found
value: ${{ steps.notarization.outputs.haveNotarizationUser }}
codesignIdent:
description: Code signing identity
value: ${{ steps.codesign.outputs.codesignIdent }}
codesignTeam:
description: Code signing team
value: ${{ steps.codesign.outputs.codesignTeam }}
runs:
using: composite
steps:
- name: Check Runner Operating System 🏃‍♂️
if: runner.os != 'macOS'
shell: bash
run: |
: Check Runner Operating System 🏃‍♂️
echo "setup-macos-codesigning action requires a macOS-based runner."
exit 2
- name: macOS Code Signing ✍️
id: codesign
shell: zsh --no-rcs --errexit --pipefail {0}
env:
MACOS_SIGNING_IDENTITY: ${{ inputs.codesignIdentity }}
MACOS_SIGNING_CERT: ${{ inputs.codesignCertificate }}
MAOCS_SIGNING_CERT_PASSWORD: ${{ inputs.certificatePassword }}
MACOS_KEYCHAIN_PASSWORD: ${{ inputs.keychainPassword }}
run: |
: macOS Code Signing ✍️
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
if [[ ${MACOS_SIGNING_IDENTITY} && ${MACOS_SIGNING_CERT} ]] {
print 'haveCodesignIdent=true' >> $GITHUB_OUTPUT
local -r certificate_path="${RUNNER_TEMP}/build_certificate.p12"
local -r keychain_path="${RUNNER_TEMP}/app-signing.keychain-db"
print -n "${MACOS_SIGNING_CERT}" | base64 --decode --output=${certificate_path}
: "${MACOS_KEYCHAIN_PASSWORD:="$(print ${RANDOM} | sha1sum | head -c 32)"}"
print '::group::Keychain setup'
security create-keychain -p "${MACOS_KEYCHAIN_PASSWORD}" ${keychain_path}
security set-keychain-settings -lut 21600 ${keychain_path}
security unlock-keychain -p "${MACOS_KEYCHAIN_PASSWORD}" ${keychain_path}
security import "${certificate_path}" -P "${MAOCS_SIGNING_CERT_PASSWORD}" -A \
-t cert -f pkcs12 -k ${keychain_path} \
-T /usr/bin/codesign -T /usr/bin/security -T /usr/bin/xcrun
security set-key-partition-list -S 'apple-tool:,apple:' -k "${MACOS_KEYCHAIN_PASSWORD}" \
${keychain_path} &> /dev/null
security list-keychain -d user -s ${keychain_path} 'login-keychain'
print '::endgroup::'
local -r team_id="${${MACOS_SIGNING_IDENTITY##* }//(\(|\))/}"
print "codesignIdent=${MACOS_SIGNING_IDENTITY}" >> $GITHUB_OUTPUT
print "MACOS_KEYCHAIN_PASSWORD=${MACOS_KEYCHAIN_PASSWORD}" >> $GITHUB_ENV
print "codesignTeam=${team_id}" >> $GITHUB_OUTPUT
} else {
print 'haveCodesignIdent=false' >> $GITHUB_OUTPUT
}
- name: Provisioning Profile 👤
id: provisioning
if: fromJSON(steps.codesign.outputs.haveCodesignIdent)
shell: zsh --no-rcs --errexit --pipefail {0}
env:
MACOS_SIGNING_PROVISIONING_PROFILE: ${{ inputs.provisioningProfile }}
run: |
: Provisioning Profile 👤
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
if [[ "${MACOS_SIGNING_PROVISIONING_PROFILE}" ]] {
print 'haveProvisioningProfile=true' >> $GITHUB_OUTPUT
local -r profile_path="${RUNNER_TEMP}/build_profile.provisionprofile"
print -n "${MACOS_SIGNING_PROVISIONING_PROFILE}" \
| base64 --decode --output="${profile_path}"
print '::group::Provisioning Profile Setup'
mkdir -p ~/Library/MobileDevice/Provisioning\ Profiles
security cms -D -i ${profile_path} -o ${RUNNER_TEMP}/build_profile.plist
local -r uuid="$(plutil -extract UUID raw ${RUNNER_TEMP}/build_profile.plist)"
local -r team_id="$(plutil -extract TeamIdentifier.0 raw -expect string ${RUNNER_TEMP}/build_profile.plist)"
if [[ ${team_id} != '${{ steps.codesign.codesignTeam }}' ]] {
print '::notice::Code Signing team in provisioning profile does not match certificate.'
}
cp ${profile_path} ~/Library/MobileDevice/Provisioning\ Profiles/${uuid}.provisionprofile
print "provisioningProfileUUID=${uuid}" >> $GITHUB_OUTPUT
print '::endgroup::'
} else {
print 'haveProvisioningProfile=false' >> $GITHUB_OUTPUT
}
- name: Notarization 🧑‍💼
id: notarization
if: fromJSON(steps.codesign.outputs.haveCodesignIdent)
shell: zsh --no-rcs --errexit --pipefail {0}
env:
MACOS_NOTARIZATION_USERNAME: ${{ inputs.notarizationUser }}
MACOS_NOTARIZATION_PASSWORD: ${{ inputs.notarizationPassword }}
run: |
: Notarization 🧑‍💼
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
if [[ ${MACOS_NOTARIZATION_USERNAME} && ${MACOS_NOTARIZATION_PASSWORD} ]] {
print 'haveNotarizationUser=true' >> $GITHUB_OUTPUT
} else {
print 'haveNotarizationUser=false' >> $GITHUB_OUTPUT
}

View file

@ -0,0 +1,213 @@
name: Generate Sparkle Appcast
description: Creates Sparkle Appcast for a new release and generates delta patch files
inputs:
sparklePrivateKey:
description: Private key used for Sparkle signing
required: true
baseImage:
description: Disk image to base the Sparkle Appcast on
required: true
channel:
description: Sparkle Appcast channel to use
required: false
default: stable
count:
description: Number of old versions to generate deltas for
required: false
default: '1'
urlPrefix:
description: URL prefix to use for Sparkle downloads
required: true
customTitle:
description: Custom title to use for Appcast
required: false
customLink:
description: Custom link to use for Appcast
required: false
runs:
using: composite
steps:
- name: Check Runner Operating System 🏃‍♂️
if: runner.os != 'macOS'
shell: bash
run: |
: Check Runner Operating System 🏃‍♂️
echo '::notice::sparkle-appcast action requires a macOS-based runner.'
exit 2
- name: Install Dependencies
shell: zsh --no-rcs --errexit --pipefail {0}
run: |
: Install Dependencies
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
print ::group::Install Dependencies
brew install --quiet coreutils pandoc
print ::endgroup::
- name: Set Up Sparkle ✨
shell: zsh --no-rcs --errexit --pipefail {0}
run: |
: Set Up Sparkle ✨
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
local version
local base_url
local hash
IFS=';' read -r version base_url hash <<< \
"$(jq -r '.tools.sparkle | {version, baseUrl, hash} | join(";")' buildspec.json)"
mkdir -p Sparkle && pushd Sparkle
curl -s -L -O "${base_url}/${version}/Sparkle-${version}.tar.xz"
local checksum="$(sha256sum Sparkle-${version}.tar.xz | cut -d " " -f 1)"
if [[ ${hash} != ${checksum} ]] {
print "::error::Sparkle-${version}.tar.xz checksum mismatch: ${checksum} (expected: ${hash})"
exit 2
}
tar -xJf "Sparkle-${version}.tar.xz"
popd
mkdir builds
mkdir -p output/appcasts/stable
mkdir -p output/sparkle_deltas
- name: Download Builds 📥
id: builds
shell: zsh --no-rcs --errexit --pipefail {0}
run: |
: Download Builds 📥
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
pushd builds
local image_location=(${{ inputs.baseImage }})
hdiutil attach -readonly -noverify -noautoopen -plist ${image_location} > result.plist
local -i num_entities=$(( $(plutil -extract system-entities raw -- result.plist) - 1 ))
local keys
local mount_point
for i ({0..${num_entities}}) {
keys=($(plutil -extract system-entities.${i} raw -- result.plist))
if [[ ${keys} == *mount-point* ]] {
mount_point=$(plutil -extract system-entities.${i}.mount-point raw -- result.plist)
break
}
}
local feed_url
local info_plist=(${mount_point}/*.app/Contents/Info.plist)
if [[ -f ${info_plist} ]] {
feed_url=$(plutil -extract SUFeedURL raw -- ${info_plist})
} else {
print '::error:: No Info.plist file found in specified disk image.'
hdiutil detach ${mount_point}
exit 2
}
print "feedUrl=${feed_url}" >> $GITHUB_OUTPUT
hdiutil detach ${mount_point}
curl -s -L -O ${feed_url}
local -a artifacts=($(\
xmllint \
-xpath "//rss/channel/item[*[local-name()='channel'][text()='${{ inputs.channel }}']]/enclosure/@url" \
${feed_url:t} \
| sed -n 's/.*url="\(.*\)"/\1/p')
)
local url
local file_name
for i ({1..${{ inputs.count }}}) {
url="${artifacts[${i}]}"
file_name="${artifacts[${i}]:t}"
curl -s -L -O ${url}
}
mv ${{ inputs.baseImage }} ${PWD}
rm -rf - result.plist
popd
- name: Prepare Release Notes 📝
shell: zsh --no-rcs --errexit --pipefail {0}
run: |
: Prepare Release Notes 📝
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
git tag -l --format='%(contents)' ${GITHUB_REF_NAME} \
| tr '\n' '\\n' \
| sed 's/-----BEGIN SSH SIGNATURE-----.*-----END SSH SIGNATURE-----//g' \
| tr '\\n' '\n' > notes.rst
sed -i '' '2i\'$'\n''###################################################' notes.rst
pandoc -f rst -t html notes.rst -o output/appcasts/notes_${{ inputs.channel }}.html
- name: Generate Appcast 🎙️
shell: zsh --no-rcs --errexit --pipefail {0}
run: |
: Generate Appcast 🎙️
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
print -n '${{ inputs.sparklePrivateKey }}' >> eddsa_private.key
local feed_url='${{ steps.builds.outputs.feedUrl }}'
Sparkle/bin/generate_appcast \
--verbose \
--ed-key-file eddsa_private.key \
--download-url-prefix '${{ inputs.urlPrefix }}/' \
--full-release-notes-url "${feed_url//updates_*/notes_${{ inputs.channel }}.html}" \
--maximum-versions 0 \
--maximum-deltas ${{ inputs.count }} \
--channel '${{ inputs.channel }}' \
builds
local -a deltas=(builds/*.delta(N))
if (( #deltas )) {
mv ${deltas} output/sparkle_deltas
}
mv builds/*.xml output/appcasts
- name: Adjust Appcast 🎙️
shell: zsh --no-rcs --errexit --pipefail {0}
run: |
: Adjust Appcast 🎙️
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
local feed_url='${{ steps.builds.outputs.feedUrl }}'
local arch=${${${(s:_:)feed_url:t}[2]}//x86/x86_64}
local -a appcasts=(output/appcasts/*_v2.xml)
local adjusted
for appcast (${appcasts}) {
adjusted="${appcast//.xml/-adjusted.xml}"
xsltproc \
--stringparam pDeltaUrl "${{ inputs.urlPrefix }}/sparkle_deltas/${arch}/" \
--stringparam pSparkleUrl '${{ inputs.urlPrefix }}/' \
--stringparam pCustomTitle '${{ inputs.customTitle }}' \
--stringparam pCustomLink '${{ inputs.customLink }}' \
-o ${adjusted} ${GITHUB_ACTION_PATH}/appcast_adjust.xslt ${appcast}
xmllint --format ${adjusted} >! ${appcast}
rm ${adjusted}
}
- name: Create Legacy Appcast 📟
shell: zsh --no-rcs --errexit --pipefail {0}
run: |
: Create Legacy Appcast 📟
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
local -a appcasts=(output/appcasts/*_v2.xml)
local legacy
for appcast (${appcasts}) {
legacy="${appcast//.xml/-legacy.xml}"
xsltproc \
-o ${legacy} ${GITHUB_ACTION_PATH}/appcast_legacy.xslt ${appcast}
xmllint --format ${legacy} >! output/appcasts/stable/${${appcast:t}//-v2.xml/.xml}
rm ${legacy}
}

View file

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="UTF-8"?>
<xsl:stylesheet version="1.0"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
xmlns:sparkle="http://www.andymatuschak.org/xml-namespaces/sparkle">
<xsl:output method="xml" encoding="UTF-8" omit-xml-declaration="no"/>
<xsl:strip-space elements="*"/>
<xsl:param name="pCustomTitle" select="/rss/channel/title" />
<xsl:param name="pCustomLink" select="/rss/channel/link" />
<xsl:param name="pSparkleUrl" select="''" />
<xsl:param name="pDeltaUrl" select="''" />
<xsl:template match="@* | node()">
<xsl:copy>
<xsl:apply-templates select="@* | node()" />
</xsl:copy>
</xsl:template>
<xsl:template match="/rss/channel/title" />
<xsl:template match="/rss/channel/link" />
<xsl:template match="/rss/channel">
<xsl:copy>
<xsl:element name="title"><xsl:value-of select="$pCustomTitle" /></xsl:element>
<xsl:element name="link"><xsl:value-of select="$pCustomLink" /></xsl:element>
<xsl:apply-templates select="@* | node()" />
</xsl:copy>
</xsl:template>
<xsl:template match="/rss/channel/item/sparkle:deltas/enclosure/@url">
<xsl:attribute name="url">
<xsl:choose>
<xsl:when test="starts-with(., $pDeltaUrl)">
<xsl:value-of select="." />
</xsl:when>
<xsl:otherwise>
<xsl:value-of select="$pDeltaUrl" />
<xsl:value-of select="substring-after(., $pSparkleUrl)" />
</xsl:otherwise>
</xsl:choose>
</xsl:attribute>
</xsl:template>
<xsl:template match="/rss/channel/item/sparkle:fullReleaseNotesLink">
<xsl:element name="sparkle:releaseNotesLink"><xsl:apply-templates select="@* | node()" /></xsl:element>
</xsl:template>
</xsl:stylesheet>

View file

@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<xsl:stylesheet version="1.0"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
xmlns:sparkle="http://www.andymatuschak.org/xml-namespaces/sparkle">
<xsl:output method="xml" encoding="UTF-8" omit-xml-declaration="no"/>
<xsl:strip-space elements="*"/>
<xsl:template match="@* | node()">
<xsl:copy>
<xsl:apply-templates select="@* | node()" />
</xsl:copy>
</xsl:template>
<xsl:template match="/rss/channel/item[sparkle:channel[text()!='stable']]" />
<xsl:template match="/rss/channel/item/sparkle:channel" />
<xsl:template match="/rss/channel/item/sparkle:deltas" />
</xsl:stylesheet>

286
.github/actions/steam-upload/action.yaml vendored Normal file
View file

@ -0,0 +1,286 @@
name: Steam Upload
description: Creates and uploads stable and nightly builds of obs-studio and beta builds (if available)
inputs:
steamSecret:
description: Steam auth code
required: true
steamUser:
description: Steam user name
required: true
steamPassword:
description: Steam user password
required: true
workflowSecret:
description: GitHub API token to use for API calls
required: true
tagName:
description: Tag name to use for packaging
required: false
default: ''
stableBranch:
description: Name of the stable branch to use
required: false
default: staging
betaBranch:
description: Name of the beta branch to use
required: false
default: beta_staging
nightlyBranch:
description: Name of the nightly branch to use
required: false
default: nightly
playtestBranch:
description: Name of the playtest branch to use
required: false
default: staging
customAssetWindows:
description: Custom asset for Windows
required: false
default: ''
customAssetMacOSApple:
description: Custom asset for macOS Apple Silicon
required: false
default: ''
customAssetMacOSIntel:
description: Custom asset for macOS Intel
required: false
default: ''
preview:
description: Enable preview mode (no uploads done)
required: false
default: ''
runs:
using: composite
steps:
- name: Check Runner Operating System 🏃‍♂️
if: runner.os != 'macOS'
shell: bash
run: |
: Check Runner Operating System 🏃‍♂️
echo '::error::steam-upload action requires a macOS-based runner.'
exit 2
- name: Check GitHub Event 🔬
if: contains(fromJSON('["release", "workflow_dispatch", "schedule"]'), github.event_name) != true
shell: zsh --no-rcs --errexit --pipefail {0}
run: |
: Check GitHub Event 🔬
print "::error:steam-upload action can only be used with 'release', 'workflow-dispatch', or 'schedule' events."
exit 2
- name: Download Assets 📥
id: asset-info
shell: zsh --no-rcs --errexit --pipefail {0}
env:
GH_TOKEN: ${{ inputs.workflowSecret }}
windows_custom_asset: ${{ steps.asset-info.outputs.windowsAssetUrl }}
macos_apple_custom_asset: ${{ steps.asset-info.outputs.macos_appleAssetUrl }}
macos_intel_custom_asset: ${{ steps.asset-info.outputs.macos_intelAssetUrl }}
run: |
: Download Assets 📥
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
local root_dir="${PWD}"
local description
local is_prerelease
case ${GITHUB_EVENT_NAME} {
release)
gh release download \
--pattern '*macOS*.dmg' \
--pattern '*Windows*' \
--pattern '*.zip' \
--clobber
IFS=';' read -r description is_prerelease <<< \
"$(gh release view --json tagName,isPrerelease --jq 'join(";")')"
;;
workflow_dispatch)
if [[ '${{ inputs.tagName }}' =~ [0-9]+\.[0-9]+\.[0-9]+(-(rc|beta)[0-9]+)*$ ]] {
gh release download ${{ inputs.tagName }} \
--pattern '*macOS*.dmg' \
--pattern '*Windows*' \
--pattern '*.zip' \
--clobber
description='${{ inputs.tagName }}'
read -r is_prerelease <<< \
"$(gh release view ${{ inputs.tagName }} --json isPrerelease --jq '.isPrerelease')"
asset_names=(gh release view ${{ inputs.tagName }} --json assets \
--jq '.assets[] | select(.name|test(".*(macos|Full-x64|windows).*")) | .name')
local -A custom_assets=(
windows "Windows x64;${windows_custom_asset}"
macos_apple "macOS Apple;${macos_apple_custom_asset}"
macos_intel "macOS Intel;${macos_intel_custom_asset}"
)
local display_name
local url
mkdir -p custom_assets && pushd custom_assets
for platform (windows macos_apple macos_intel) {
IFS=';' read -r display_name url <<< "${custom_assets[${platform}]}"
if [[ ${url} ]] {
print "::group::Download of ${display_name} custom asset"
curl --location --silent --remote-name ${url}
if [[ ! -f ${root_dir}/${url:t} ]] {
print "::warning::Custom asset for ${display_name} does not replace an existing release asset"
} else {
rm -rf -- ${root_dir}/${url:t}
}
mv ${url:t} ${root_dir}
print '::endgroup::'
}
}
popd
} else {
print "::error::Invalid tag name for non-release workflow run: '${{ inputs.tagName }}'."
exit 2
}
;;
schedule)
gh run download ${GITHUB_RUN_ID} \
--pattern '*macos*' \
--pattern '*windows*'
local short_hash="${GITHUB_SHA:0:9}"
mv obs-studio-windows-x64-${short_hash}/obs-studio-*-windows-x64.zip \
${root_dir}
mv obs-studio-macos-arm64-${short_hash}/obs-studio-*-macos-apple.dmg \
${root_dir}
mv obs-studio-macos-intel-${short_hash}/obs-studio-*-macos-intel.dmg \
${root_dir}
description="g${GITHUB_SHA}"
is_prerelease='false'
;;
}
print "description=${description}" >> $GITHUB_OUTPUT
print "is_prerelease=${is_prerelease}" >> $GITHUB_OUTPUT
- name: Prepare Builds for Steam 🍜
shell: zsh --no-rcs --errexit --pipefail --extendedglob {0}
run: |
: Prepare Builds for Steam 🍜
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
local root_dir="${PWD}"
mkdir -p steam && pushd steam
print '::group::Prepare Windows x64 assets'
mkdir -p steam-windows && pushd steam-windows
unzip ${root_dir}/(#i)obs-studio-*.zip
rm ${root_dir}/(#i)obs-studio-*.zip
cp -r ${root_dir}/build-aux/steam/scripts_windows scripts
touch disable_updater
popd
print '::endgroup::'
print '::group::Prepare macOS Apple assets'
mkdir -p steam-macos/arm64/OBS.app
hdiutil attach -noverify -readonly -noautoopen -mountpoint /Volumes/obs-studio-arm64 ${root_dir}/(#i)obs-studio-*-macos-apple.dmg
ditto /Volumes/obs-studio-arm64/OBS.app steam-macos/arm64/OBS.app
hdiutil unmount /Volumes/obs-studio-arm64
rm ${root_dir}/(#i)obs-studio-*-macos-apple.dmg
print '::endgroup::'
print '::group::Prepare macOS Intel assets'
mkdir -p steam-macos/x86_64/OBS.app
hdiutil attach -noverify -readonly -noautoopen -mountpoint /Volumes/obs-studio-x86_64 ${root_dir}/(#i)obs-studio-*-macos-intel.dmg
ditto /Volumes/obs-studio-x86_64/OBS.app steam-macos/x86_64/OBS.app
hdiutil unmount /Volumes/obs-studio-x86_64
rm ${root_dir}/(#i)obs-studio-*-macos-intel.dmg
print '::endgroup::'
cp ${root_dir}/build-aux/steam/scripts_macos/launch.sh steam-macos/launch.sh
popd
- name: Set Up steamcmd 🚂
uses: CyberAndrii/setup-steamcmd@b786e0da44db3d817e66fa3910a9560cb28c9323
- name: Generate Steam auth code 🔐
id: steam-totp
uses: CyberAndrii/steam-totp@c7f636bc64e77f1b901e0420b7890813141508ee
if: ${{ ! fromJSON(inputs.preview) }}
with:
shared_secret: ${{ inputs.steamSecret }}
- name: Upload to Steam 📤
shell: zsh --no-rcs --errexit --pipefail {0}
run: |
: Upload to Steam 📤
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
local root_dir="${PWD}"
local build_file='build.vdf'
local branch_name
pushd steam
print '::group::Prepare Steam Build Script'
case ${GITHUB_EVENT_NAME} {
schedule) branch_name='${{ inputs.nightlyBranch }}' ;;
release|workflow_dispatch)
if [[ '${{ steps.asset-info.outputs.is_prerelease }}' == 'true' ]] {
branch_name='${{ inputs.betaBranch }}'
} else {
branch_name='${{ inputs.stableBranch }}'
}
;;
}
sed "s/@@DESC@@/${branch_name}-${{ steps.asset-info.outputs.description }}/;s/@@BRANCH@@/${branch_name}/" \
${root_dir}/build-aux/steam/obs_build.vdf > ${build_file}
print "Generated ${build_file}:\n$(<${build_file})"
print '::endgroup::'
print '::group::Upload to Steam'
local preview='${{ inputs.preview }}'
steamcmd \
+login '${{ inputs.steamUser }}' '${{ inputs.steamPassword }}' '${{ steps.steam-totp.outputs.code }}' \
+run_app_build ${preview:+-preview} ${build_file} \
+quit
print '::endgroup'
popd
- name: Upload to Steam (Playtest) 📤
if: fromJSON(steps.asset-info.outputs.is_prerelease)
shell: zsh --no-rcs --errexit --pipefail {0}
run: |
: Upload to Steam (Playtest) 📤
if (( ${+RUNNER_DEBUG} )) setopt XTRACE
local build_file='build_playtest.vdf'
local branch_name='${{ inputs.playtestBranch }}'
pushd steam
print '::group::Prepare Steam Build Script'
set "s/@@DESC@@/${branch_name}-${{ steps.asset-info.outputs.description }}/;s/@@BRANCH@@/${branch_name}" \
${root_dir}/build-aux/steam/obs_playtest_build.vdf > ${build_file}
print "Generated ${build_file}:\n$(<${build_file})"
print '::endgroup::'
print '::group::Upload to Steam'
local preview
if [[ '${{ inputs.preview }}' == 'true' ]] preview='-preview'
steamcmd \
+login '${{ inputs.steamUser }}' '${{ inputs.steamPassword }}' '${{ steps.steam-totp.outputs.code }}' \
+run_app_build ${preview} ${build_file} \
+quit
print '::endgroup'
popd
- name: Upload Steam build logs
uses: actions/upload-artifact@v3
with:
name: steam-build-logs
path: ${{ github.workspace }}/steam/build/*.log

View file

@ -0,0 +1,131 @@
import argparse
import json
import logging
import os
import sys
from typing import Any
from json_source_map import calculate
from json_source_map.errors import InvalidInputError
from jsonschema import Draft7Validator
def discover_schema_file(filename: str) -> tuple[str | None, Any]:
logger = logging.getLogger()
with open(filename) as json_file:
json_data = json.load(json_file)
schema_filename = json_data.get("$schema", None)
if not schema_filename:
logger.info(f" ${filename} has no schema definition")
return (None, None)
schema_file = os.path.join(os.path.dirname(filename), schema_filename)
with open(schema_file) as schema_file:
schema_data = json.load(schema_file)
return (str(schema_file), schema_data)
def validate_json_files(
schema_data: dict[Any, Any], json_file_name: str
) -> list[dict[str, str]]:
logger = logging.getLogger()
with open(json_file_name) as json_file:
text_data = json_file.read()
json_data = json.loads(text_data)
source_map = calculate(text_data)
validator = Draft7Validator(schema_data)
violations = []
for violation in sorted(validator.iter_errors(json_data), key=str):
logger.info(
f"⚠️ Schema violation in file '{json_file_name}':\n{violation}\n----\n"
)
if len(violation.absolute_path):
error_path = "/".join(
str(path_element) for path_element in violation.absolute_path
)
error_entry = source_map["/{}".format(error_path)]
violation_data = {
"file": json_file_name,
"title": "Validation Error",
"message": violation.message,
"annotation_level": "failure",
"start_line": error_entry.value_start.line + 1,
"end_line": error_entry.value_end.line + 1,
}
violations.append(violation_data)
return violations
def main() -> int:
parser = argparse.ArgumentParser(
description="Validate JSON files by schema definition"
)
parser.add_argument(
"json_files", metavar="FILE", type=str, nargs="+", help="JSON file to validate"
)
parser.add_argument(
"--loglevel", type=str, help="Set log level", default="WARNING", required=False
)
arguments = parser.parse_args()
logging.basicConfig(level=arguments.loglevel, format="%(levelname)s - %(message)s")
logger = logging.getLogger()
schema_mappings = {}
for json_file in arguments.json_files:
try:
(schema_file, schema_data) = discover_schema_file(json_file)
except OSError as e:
logger.error(f"❌ Failed to discover schema for file '{json_file}': {e}")
return 2
if schema_file and schema_file not in schema_mappings.keys():
schema_mappings.update(
{schema_file: {"schema_data": schema_data, "files": set()}}
)
schema_mappings[schema_file]["files"].add(json_file)
validation_errors = []
for schema_entry in schema_mappings.values():
for json_file in schema_entry["files"]:
try:
new_errors = validate_json_files(schema_entry["schema_data"], json_file)
except (InvalidInputError, OSError) as e:
logger.error(
f"❌ Failed to create JSON source map for file '{json_file}': {e}"
)
return 2
[validation_errors.append(error) for error in new_errors]
if validation_errors:
try:
with open("validation_errors.json", "w") as results_file:
json.dump(validation_errors, results_file)
except OSError as e:
logger.error(f"❌ Failed to write validation results file: {e}")
return 2
return 1
return 0
if __name__ == "__main__":
sys.exit(main())

View file

@ -0,0 +1,381 @@
import json
import socket
import ssl
import os
import time
import requests
import sys
import zipfile
from io import BytesIO
from random import randbytes
from urllib.parse import urlparse
from collections import defaultdict
MINIMUM_PURGE_AGE = 9.75 * 24 * 60 * 60 # slightly less than 10 days
TIMEOUT = 10
SKIPPED_SERVICES = {"YouNow", "SHOWROOM", "Dacast"}
SERVICES_FILE = "plugins/rtmp-services/data/services.json"
PACKAGE_FILE = "plugins/rtmp-services/data/package.json"
CACHE_FILE = "other/timestamps.json"
GITHUB_OUTPUT_FILE = os.environ.get("GITHUB_OUTPUT", None)
DO_NOT_PING = {"jp9000"}
PR_MESSAGE = """This is an automatically created pull request to remove unresponsive servers and services.
| Service | Action Taken | Author(s) |
| ------- | ------------ | --------- |
{table}
If you are not responsible for an affected service and want to be excluded from future pings please let us know.
Created by workflow run: https://github.com/{repository}/actions/runs/{run_id}"""
# GQL is great isn't it
GQL_QUERY = """{
repositoryOwner(login: "obsproject") {
repository(name: "obs-studio") {
object(expression: "master") {
... on Commit {
blame(path: "plugins/rtmp-services/data/services.json") {
ranges {
startingLine
endingLine
commit {
author {
user {
login
}
}
}
}
}
}
}
}
}
}"""
context = ssl.create_default_context()
def check_ftl_server(hostname) -> bool:
"""Check if hostname resolves to a valid address - FTL handshake not implemented"""
try:
socket.getaddrinfo(hostname, 8084, proto=socket.IPPROTO_UDP)
except socket.gaierror as e:
print(f"⚠️ Could not resolve hostname for server: {hostname} (Exception: {e})")
return False
else:
return True
def check_hls_server(uri) -> bool:
"""Check if URL responds with status code < 500 and not 404, indicating that at least there's *something* there"""
try:
r = requests.post(uri, timeout=TIMEOUT)
if r.status_code >= 500 or r.status_code == 404:
raise Exception(f"Server responded with {r.status_code}")
except Exception as e:
print(f"⚠️ Could not connect to HLS server: {uri} (Exception: {e})")
return False
else:
return True
def check_rtmp_server(uri) -> bool:
"""Try connecting and sending a RTMP handshake (with SSL if necessary)"""
parsed = urlparse(uri)
hostname, port = parsed.netloc.partition(":")[::2]
if port:
port = int(port)
elif parsed.scheme == "rtmps":
port = 443
else:
port = 1935
try:
recv = b""
with socket.create_connection((hostname, port), timeout=TIMEOUT) as sock:
# RTMP handshake is \x03 + 4 bytes time (can be 0) + 4 zero bytes + 1528 bytes random
handshake = b"\x03\x00\x00\x00\x00\x00\x00\x00\x00" + randbytes(1528)
if parsed.scheme == "rtmps":
with context.wrap_socket(sock, server_hostname=hostname) as ssock:
ssock.sendall(handshake)
while True:
_tmp = ssock.recv(4096)
recv += _tmp
if len(recv) >= 1536 or not _tmp:
break
else:
sock.sendall(handshake)
while True:
_tmp = sock.recv(4096)
recv += _tmp
if len(recv) >= 1536 or not _tmp:
break
if len(recv) < 1536 or recv[0] != 3:
raise ValueError("Invalid RTMP handshake received from server")
except Exception as e:
print(f"⚠️ Connection to server failed: {uri} (Exception: {e})")
return False
else:
return True
def get_last_artifact():
s = requests.session()
s.headers["Authorization"] = f'Bearer {os.environ["GITHUB_TOKEN"]}'
run_id = os.environ["WORKFLOW_RUN_ID"]
repo = os.environ["REPOSITORY"]
# fetch run first, get workflow id from there to get workflow runs
r = s.get(f"https://api.github.com/repos/{repo}/actions/runs/{run_id}")
r.raise_for_status()
workflow_id = r.json()["workflow_id"]
r = s.get(
f"https://api.github.com/repos/{repo}/actions/workflows/{workflow_id}/runs",
params=dict(
per_page=1,
status="completed",
branch="master",
conclusion="success",
event="schedule",
),
)
r.raise_for_status()
runs = r.json()
if not runs["workflow_runs"]:
raise ValueError("No completed workflow runs found")
r = s.get(runs["workflow_runs"][0]["artifacts_url"])
r.raise_for_status()
for artifact in r.json()["artifacts"]:
if artifact["name"] == "timestamps":
artifact_url = artifact["archive_download_url"]
break
else:
raise ValueError("No previous artifact found.")
r = s.get(artifact_url)
r.raise_for_status()
zip_data = BytesIO()
zip_data.write(r.content)
with zipfile.ZipFile(zip_data) as zip_ref:
for info in zip_ref.infolist():
if info.filename == "timestamps.json":
return json.loads(zip_ref.read(info.filename))
def find_people_to_blame(raw_services: str, servers: list[tuple[str, str]]) -> dict:
if not servers:
return dict()
# Fetch Blame data from github
s = requests.session()
s.headers["Authorization"] = f'Bearer {os.environ["GITHUB_TOKEN"]}'
r = s.post(
"https://api.github.com/graphql", json=dict(query=GQL_QUERY, variables=dict())
)
r.raise_for_status()
j = r.json()
# The file is only ~2600 lines so this isn't too crazy and makes the lookup very easy
line_author = dict()
for blame in j["data"]["repositoryOwner"]["repository"]["object"]["blame"][
"ranges"
]:
for i in range(blame["startingLine"] - 1, blame["endingLine"]):
if user := blame["commit"]["author"]["user"]:
line_author[i] = user["login"]
service_authors = defaultdict(set)
for i, line in enumerate(raw_services.splitlines()):
if '"url":' not in line:
continue
for server, service in servers:
if server in line and (author := line_author.get(i)):
if author not in DO_NOT_PING:
service_authors[service].add(author)
return service_authors
def set_output(name, value):
if not GITHUB_OUTPUT_FILE:
return
try:
with open(GITHUB_OUTPUT_FILE, "a", encoding="utf-8", newline="\n") as f:
f.write(f"{name}={value}\n")
except Exception as e:
print(f"Writing to github output files failed: {e!r}")
def main():
try:
with open(SERVICES_FILE, encoding="utf-8") as services_file:
raw_services = services_file.read()
services = json.loads(raw_services)
with open(PACKAGE_FILE, encoding="utf-8") as package_file:
package = json.load(package_file)
except OSError as e:
print(f"❌ Could not open services/package file: {e}")
return 1
# attempt to load last check result cache
try:
with open(CACHE_FILE, encoding="utf-8") as check_file:
fail_timestamps = json.load(check_file)
except OSError as e:
# cache might be evicted or not exist yet, so this is non-fatal
print(
f"⚠️ Could not read cache file, trying to get last artifact (Exception: {e})"
)
try:
fail_timestamps = get_last_artifact()
except Exception as e:
print(f"⚠️ Could not fetch cache file, starting fresh. (Exception: {e})")
fail_timestamps = dict()
else:
print("Fetched cache file from last run artifact.")
else:
print("Successfully loaded cache file:", CACHE_FILE)
start_time = int(time.time())
affected_services = dict()
removed_servers = list()
# create temporary new list
new_services = services.copy()
new_services["services"] = []
for service in services["services"]:
# skip services that do custom stuff that we can't easily check
if service["name"] in SKIPPED_SERVICES:
new_services["services"].append(service)
continue
service_type = service.get("recommended", {}).get("output", "rtmp_output")
if service_type not in {"rtmp_output", "ffmpeg_hls_muxer", "ftl_output"}:
print("Unknown service type:", service_type)
new_services["services"].append(service)
continue
# create a copy to mess with
new_service = service.copy()
new_service["servers"] = []
# run checks for all the servers, and store results in timestamp cache
for server in service["servers"]:
if service_type == "ftl_output":
is_ok = check_ftl_server(server["url"])
elif service_type == "ffmpeg_hls_muxer":
is_ok = check_hls_server(server["url"])
else: # rtmp
is_ok = check_rtmp_server(server["url"])
if not is_ok:
if ts := fail_timestamps.get(server["url"], None):
if (delta := start_time - ts) >= MINIMUM_PURGE_AGE:
print(
f'🗑️ Purging server "{server["url"]}", it has been '
f"unresponsive for {round(delta/60/60/24)} days."
)
removed_servers.append((server["url"], service["name"]))
# continuing here means not adding it to the new list, thus dropping it
continue
else:
fail_timestamps[server["url"]] = start_time
elif is_ok and server["url"] in fail_timestamps:
# remove timestamp of failed check if server is back
delta = start_time - fail_timestamps[server["url"]]
print(
f'💡 Server "{server["url"]}" is back after {round(delta/60/60/24)} days!'
)
del fail_timestamps[server["url"]]
new_service["servers"].append(server)
if (diff := len(service["servers"]) - len(new_service["servers"])) > 0:
print(f' Removed {diff} server(s) from {service["name"]}')
affected_services[service["name"]] = f"{diff} servers removed"
# remove services with no valid servers
if not new_service["servers"]:
print(f'💀 Service "{service["name"]}" has no valid servers left, removing!')
affected_services[service["name"]] = f"Service removed"
continue
new_services["services"].append(new_service)
# write cache file
try:
os.makedirs("other", exist_ok=True)
with open(CACHE_FILE, "w", encoding="utf-8") as cache_file:
json.dump(fail_timestamps, cache_file)
except OSError as e:
print(f"❌ Could not write cache file: {e}")
return 1
else:
print("Successfully wrote cache file:", CACHE_FILE)
if removed_servers:
# increment package version and save that as well
package["version"] += 1
package["files"][0]["version"] += 1
try:
with open(SERVICES_FILE, "w", encoding="utf-8") as services_file:
json.dump(new_services, services_file, indent=4, ensure_ascii=False)
services_file.write("\n")
with open(PACKAGE_FILE, "w", encoding="utf-8") as package_file:
json.dump(package, package_file, indent=4)
package_file.write("\n")
except OSError as e:
print(f"❌ Could not write services/package file: {e}")
return 1
else:
print(
f"Successfully wrote services/package files:\n- {SERVICES_FILE}\n- {PACKAGE_FILE}"
)
# try to find authors to ping, this is optional and is allowed to fail
try:
service_authors = find_people_to_blame(raw_services, removed_servers)
except Exception as e:
print(f"⚠ Could not fetch blame for some reason: {e}")
service_authors = dict()
# set GitHub outputs
set_output("make_pr", "true")
msg = PR_MESSAGE.format(
repository=os.environ["REPOSITORY"],
run_id=os.environ["WORKFLOW_RUN_ID"],
table="\n".join(
"| {name} | {action} | {authors} |".format(
name=name.replace("|", "\\|"),
action=action,
authors=", ".join(
f"@{author}" for author in sorted(service_authors.get(name, []))
),
)
for name, action in sorted(affected_services.items())
),
)
set_output("pr_message", json.dumps(msg))
else:
set_output("make_pr", "false")
if __name__ == "__main__":
sys.exit(main())

View file

@ -1,27 +1,14 @@
#!/bin/zsh
arch_name="$(uname -m)"
arch_name="${CPUTYPE}"
is_translated="$(sysctl -in sysctl.proc_translated)"
# When the script is launched from Steam, it'll be run through Rosetta.
# Manually override arch to arm64 in that case.
if [ "$(sysctl -in sysctl.proc_translated)" = "1" ]; then
arch_name="arm64"
fi
if (( is_translated )) arch_name="arm64"
if [[ ${@} == *'--intel'* ]] arch_name="x86_64"
if [[ -d OBS.app ]] exec open OBS.app -W --args "${@}"
# Allow users to force Rosetta
if [[ "$@" =~ \-\-intel ]]; then
arch_name="x86_64"
fi
# legacy app installation
if [ -d OBS.app ]; then
exec open OBS.app -W --args "$@"
fi
if [ "${arch_name}" = "x86_64" ]; then
exec open x86/OBS.app -W --args "$@"
elif [ "${arch_name}" = "arm64" ]; then
exec open arm64/OBS.app -W --args "$@"
else
echo "Unknown architecture: ${arch_name}"
fi
case ${arch_name} {
x86_64) exec open x86_64/OBS.app -W --args "${@}" ;;
arm64) exec open arm64/OBS.app -W --args "${@}" ;;
*) echo "Unknown architecture: ${arch_name}"; exit 2 ;;
}

View file

@ -1,40 +1,96 @@
import argparse
import glob
import json
import logging
import os
import sys
from typing import Any
MAIN_MANIFEST_FILENAME = "com.obsproject.Studio.json"
def main():
dir_path = os.path.dirname(os.path.realpath(__file__))
if not os.path.isfile(os.path.join(dir_path, MAIN_MANIFEST_FILENAME)):
print("The script is not ran in the same folder as the manifest")
return 1
def main() -> int:
parser = argparse.ArgumentParser(description="Format Flatpak manifest")
parser.add_argument(
"manifest_file",
metavar="FILE",
type=str,
help="Manifest file to adjust format for",
)
parser.add_argument(
"--check",
action="store_true",
help="Check for necessary changes only",
default=False,
required=False,
)
parser.add_argument(
"--loglevel", type=str, help="Set log level", default="WARNING", required=False
)
for root, dirs, files in os.walk(dir_path):
for file in files:
if not file.endswith(".json"):
continue
arguments = parser.parse_args()
print(f"Formatting {file}")
# Load JSON file
with open(os.path.join(root, file), "r") as f:
j = json.load(f)
logging.basicConfig(level=arguments.loglevel, format="%(message)s")
logger = logging.getLogger()
if file == MAIN_MANIFEST_FILENAME:
# Sort module files order in the manifest
# Assumption: All modules except the last are strings
file_modules = j["modules"][0:-1]
last_module = j["modules"][-1]
file_modules.sort(key=lambda file_name: file_name)
j["modules"] = file_modules
j["modules"].append(last_module)
manifest_file = arguments.manifest_file
# Overwrite JSON file
with open(os.path.join(root, file), "w") as f:
json.dump(j, f, indent=4, ensure_ascii=False)
f.write("\n")
try:
with open(manifest_file, "r+") as manifest:
manifest_path = os.path.dirname(manifest_file)
manifest_string = manifest.read()
manifest_data = json.loads(manifest_string)
module_list = manifest_data.get("modules", [])
obs_object = module_list[-1]
if type(obs_object) != dict:
logger.error(
f"❌ Last element in modules list is not the obs-studio object"
)
return 2
new_module_list = []
for module in module_list:
if type(module) == str:
if not os.path.isfile(os.path.join(manifest_path, module)):
logger.warning(
f"⚠️ Specified module {os.path.basename(module)} not found."
)
continue
new_module_list.append(module)
new_module_list.sort()
new_module_list.append(obs_object)
manifest_data["modules"] = new_module_list
new_manifest_string = (
f"{json.dumps(manifest_data, indent=4, ensure_ascii=False)}\n"
)
if arguments.check:
if new_module_list != module_list:
logger.error(f"❌ Module list failed order validation")
return 2
elif new_manifest_string != manifest_string:
logger.error(f"❌ Manifest file is not correctly formatted")
return 2
else:
logger.info(f"✅ Module list passed order validation")
return 0
manifest.seek(0)
manifest.truncate()
manifest.write(new_manifest_string)
logger.info(f"✅ Updated manifest file '{manifest_file}")
except IOError:
logger.error(f"❌ Unable to read manifest file '{manifest_file}'")
return 2
return 0
if __name__ == '__main__':
if __name__ == "__main__":
sys.exit(main())

View file

@ -0,0 +1,36 @@
"AppBuild"
{
"AppID" "1905180"
"Desc" "github_@@DESC@@"
"ContentRoot" "./"
"BuildOutput" "build/"
"SetLive" "@@BRANCH@@"
"Depots"
{
"1905181" // Windows
{
"ContentRoot" "./steam-windows"
"InstallScript" "scripts/installscript.vdf"
"FileMapping"
{
"LocalPath" "*"
"DepotPath" "."
"recursive" "1"
}
}
"1905182" // Mac
{
"ContentRoot" "./steam-macos"
"FileMapping"
{
"LocalPath" "*"
"DepotPath" "."
"recursive" "1"
}
}
}
}

View file

@ -0,0 +1,35 @@
"AppBuild"
{
"AppID" "1905640"
"Desc" "github_@@DESC@@"
"ContentRoot" "./"
"BuildOutput" "build/"
"SetLive" "@@BRANCH@@"
"Depots"
{
"1905642" // Windows
{
"ContentRoot" "./steam-windows"
"FileMapping"
{
"LocalPath" "*"
"DepotPath" "."
"recursive" "1"
}
}
"1905641" // Mac
{
"ContentRoot" "./steam-macos"
"FileMapping"
{
"LocalPath" "*"
"DepotPath" "."
"recursive" "1"
}
}
}
}

View file

@ -0,0 +1,14 @@
#!/bin/zsh
arch_name="${CPUTYPE}"
is_translated="$(sysctl -in sysctl.proc_translated)"
if (( is_translated )) arch_name="arm64"
if [[ ${@} == *'--intel'* ]] arch_name="x86_64"
if [[ -d OBS.app ]] exec open OBS.app -W --args "${@}"
case ${arch_name} {
x86_64) exec open x86_64/OBS.app -W --args "${@}" ;;
arm64) exec open arm64/OBS.app -W --args "${@}" ;;
*) echo "Unknown architecture: ${arch_name}"; exit 2 ;;
}

View file

@ -0,0 +1,85 @@
@echo off
@cd /d "%~dp0"
goto checkAdmin
:checkAdmin
net session >nul 2>&1
if %errorLevel% == 0 (
echo.
) else (
echo Administrative rights are required. Please re-run this script as Administrator.
goto end
)
:writeRegistry
reg add "HKLM\SOFTWARE\OBS Studio" /f /t REG_SZ /d %1 /reg:32
reg add "HKLM\SOFTWARE\OBS Studio" /f /t REG_SZ /d %1 /reg:64
:setupProgramData
:: Required for UWP applications
mkdir "%PROGRAMDATA%\obs-studio-hook"
icacls "%PROGRAMDATA%\obs-studio-hook" /grant "ALL APPLICATION PACKAGES":(OI)(CI)(GR,GE)
:checkDLL
echo Checking for 32-bit Virtual Cam registration...
reg query "HKLM\SOFTWARE\Classes\CLSID\{A3FCE0F5-3493-419F-958A-ABA1250EC20B}" >nul 2>&1 /reg:32
if %errorLevel% == 0 (
echo 32-bit Virtual Cam found, skipping install...
echo.
) else (
echo 32-bit Virtual Cam not found, installing...
goto install32DLL
)
:CheckDLLContinue
echo Checking for 64-bit Virtual Cam registration...
reg query "HKLM\SOFTWARE\Classes\CLSID\{A3FCE0F5-3493-419F-958A-ABA1250EC20B}" >nul 2>&1 /reg:64
if %errorLevel% == 0 (
echo 64-bit Virtual Cam found, skipping install...
echo.
) else (
echo 64-bit Virtual Cam not found, installing...
goto install64DLL
)
goto endSuccess
:install32DLL
echo Installing 32-bit Virtual Cam...
regsvr32.exe /i /s %1\data\obs-plugins\win-dshow\obs-virtualcam-module32.dll
reg query "HKLM\SOFTWARE\Classes\CLSID\{A3FCE0F5-3493-419F-958A-ABA1250EC20B}" >nul 2>&1 /reg:32
if %errorLevel% == 0 (
echo 32-bit Virtual Cam successfully installed
echo.
) else (
echo 32-bit Virtual Cam installation failed
echo.
goto endFail
)
goto checkDLLContinue
:install64DLL
echo Installing 64-bit Virtual Cam...
regsvr32.exe /i /s %1\data\obs-plugins\win-dshow\obs-virtualcam-module64.dll
reg query "HKLM\SOFTWARE\Classes\CLSID\{A3FCE0F5-3493-419F-958A-ABA1250EC20B}" >nul 2>&1 /reg:64
if %errorLevel% == 0 (
echo 64-bit Virtual Cam successfully installed
echo.
goto endSuccess
) else (
echo 64-bit Virtual Cam installation failed
echo.
goto endFail
)
:endFail
echo Something failed, please report this on the OBS Discord or Forums!
goto end
:endSuccess
echo Virtual Cam installed!
echo.
:end
exit

View file

@ -0,0 +1,20 @@
"InstallScript"
{
"Run Process"
{
"install"
{
"process 1" "scripts\\install.bat"
"command 1" "\"%INSTALLDIR%\""
}
}
"Run Process On Uninstall"
{
"uninstall"
{
"process 1" "scripts\\uninstall.bat"
"command 1" "\"%INSTALLDIR%\""
}
}
}

View file

@ -0,0 +1,33 @@
@echo off
@cd /d "%~dp0"
goto checkAdmin
:checkAdmin
net session >nul 2>&1
if %errorLevel% == 0 (
echo.
) else (
echo Administrative rights are required. Please re-run this script as Administrator.
goto end
)
:clearRegistry
reg delete "HKLM\SOFTWARE\OBS Studio" /f /reg:32
reg delete "HKLM\SOFTWARE\OBS Studio" /f /reg:64
:: Vulkan layer keys
reg delete "HKLM\SOFTWARE\Khronos\Vulkan\ImplicitLayers" /f /v "%PROGRAMDATA%\obs-studio-hook\obs-vulkan64.json" /reg:32
reg delete "HKLM\SOFTWARE\Khronos\Vulkan\ImplicitLayers" /f /v "%PROGRAMDATA%\obs-studio-hook\obs-vulkan32.json" /reg:64
:deleteProgramDataFolder
RMDIR /S /Q "%PROGRAMDATA%\obs-studio-hook"
:uninstallDLLs
regsvr32.exe /u /s %1\data\obs-plugins\win-dshow\obs-virtualcam-module32.dll
regsvr32.exe /u /s %1\data\obs-plugins\win-dshow\obs-virtualcam-module64.dll
:endSuccess
echo Virtual Cam uninstalled!
echo.
:end
exit

View file

@ -41,6 +41,12 @@
}
},
"tools": {
"sparkle": {
"version": "2.3.2",
"baseUrl": "https://github.com/sparkle-project/Sparkle/releases/download",
"label": "Sparkle 2",
"hash": "2b3fe6918ca20a83729aad34f8f693a678b714a17d33b5f13ca2d25edfa7eed3"
},
"ccache-win": {
"version": "4.8.1",
"baseUrl": "https://github.com/ccache/ccache/releases/download/",