2020-12-12 15:32:05 +00:00
|
|
|
name: Release
|
|
|
|
|
|
|
|
on:
|
|
|
|
workflow_dispatch:
|
|
|
|
inputs:
|
|
|
|
ref:
|
|
|
|
description: 'Ref to build (for Pull Requests, use refs/pull/NNN/head)'
|
|
|
|
required: true
|
|
|
|
repository_dispatch:
|
|
|
|
# client_payload should be the same as the inputs for workflow_dispatch.
|
|
|
|
types:
|
|
|
|
- Build*
|
|
|
|
release:
|
|
|
|
types:
|
|
|
|
- published
|
|
|
|
|
|
|
|
jobs:
|
|
|
|
source:
|
|
|
|
name: Source
|
|
|
|
|
|
|
|
runs-on: ubuntu-20.04
|
|
|
|
|
|
|
|
outputs:
|
|
|
|
version: ${{ steps.metadata.outputs.version }}
|
|
|
|
is_tag: ${{ steps.metadata.outputs.is_tag }}
|
|
|
|
trigger_type: ${{ steps.metadata.outputs.trigger_type }}
|
|
|
|
folder: ${{ steps.metadata.outputs.folder }}
|
|
|
|
|
|
|
|
steps:
|
|
|
|
- name: Checkout (Release)
|
|
|
|
if: github.event_name == 'release'
|
|
|
|
uses: actions/checkout@v2
|
|
|
|
with:
|
|
|
|
# We generate a changelog; for this we need the full git log.
|
|
|
|
fetch-depth: 0
|
|
|
|
|
|
|
|
- name: Checkout (Manual)
|
|
|
|
if: github.event_name == 'workflow_dispatch'
|
|
|
|
uses: actions/checkout@v2
|
|
|
|
with:
|
|
|
|
ref: ${{ github.event.inputs.ref }}
|
|
|
|
# We generate a changelog; for this we need the full git log.
|
|
|
|
fetch-depth: 0
|
|
|
|
|
|
|
|
- name: Checkout (Trigger)
|
|
|
|
if: github.event_name == 'repository_dispatch'
|
|
|
|
uses: actions/checkout@v2
|
|
|
|
with:
|
|
|
|
ref: ${{ github.event.client_payload.ref }}
|
|
|
|
# We generate a changelog; for this we need the full git log.
|
|
|
|
fetch-depth: 0
|
|
|
|
|
|
|
|
- name: Check valid branch name
|
|
|
|
run: |
|
|
|
|
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
|
|
|
REF="${{ github.event.inputs.ref }}"
|
|
|
|
elif [ "${{ github.event_name }}" = "repository_dispatch" ]; then
|
|
|
|
REF="${{ github.event.client_payload.ref }}"
|
|
|
|
else
|
|
|
|
REF="${{ github.ref }}"
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Check if we are a tag.
|
|
|
|
if [ -n "$(git name-rev --name-only --tags --no-undefined HEAD 2>/dev/null || false)" ]; then
|
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Check if the checkout caused the branch to be named.
|
|
|
|
if [ "$(git rev-parse --abbrev-ref HEAD)" != "HEAD" ]; then
|
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Check if this was a pull request.
|
|
|
|
if [ -n "$(echo ${REF} | grep '^refs/pull/[0-9]*')" ]; then
|
|
|
|
PULL=$(echo ${REF} | cut -d/ -f3)
|
|
|
|
git checkout -b pr${PULL}
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Are we still in a detached state? Error out.
|
|
|
|
if [ "$(git rev-parse --abbrev-ref HEAD)" == "HEAD" ]; then
|
|
|
|
echo "The 'ref' given resulted in a checkout of a detached HEAD."
|
|
|
|
echo "We cannot detect the version for these checkout accurate."
|
|
|
|
echo ""
|
|
|
|
echo "If you want to build a Pull Request, make sure you use 'refs/pull/NNN/head'."
|
|
|
|
echo ""
|
|
|
|
echo "Cancelling build, as without a version we cannot store the artifacts."
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
- name: Generate metadata
|
|
|
|
id: metadata
|
|
|
|
run: |
|
|
|
|
echo "::group::Prepare metadata files"
|
|
|
|
cmake -DGENERATE_OTTDREV=1 -P cmake/scripts/FindVersion.cmake
|
|
|
|
./.github/changelog.sh > .changelog
|
|
|
|
TZ='UTC' date +"%Y-%m-%d %H:%M UTC" > .release_date
|
|
|
|
cat .ottdrev | cut -f 1 -d$'\t' > .version
|
|
|
|
|
|
|
|
if [ $(cat .ottdrev | cut -f 6 -d$'\t') = '1' ]; then
|
|
|
|
# Assume that all tags are always releases. Why else make a tag?
|
|
|
|
IS_TAG="true"
|
|
|
|
|
|
|
|
FOLDER="${{ env.FOLDER_RELEASES }}"
|
|
|
|
TRIGGER_TYPE="new-tag"
|
|
|
|
else
|
|
|
|
IS_TAG="false"
|
|
|
|
|
|
|
|
BRANCH=$(git symbolic-ref -q HEAD | sed 's@.*/@@')
|
|
|
|
if [ -z "${BRANCH}" ]; then
|
|
|
|
echo "Internal error: branch name is empty."
|
|
|
|
echo "An earlier step should have prevented this from happening."
|
|
|
|
echo "Cancelling build, as without a branch name we cannot store the artifacts"
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ "${BRANCH}" = "${{ env.NIGHTLIES_BRANCH }}" ]; then
|
|
|
|
# The "master" branch is special, and we call a nightly.
|
|
|
|
FOLDER="${{ env.FOLDER_NIGHTLIES }}/$(date +%Y)"
|
|
|
|
TRIGGER_TYPE="new-master"
|
|
|
|
else
|
|
|
|
# All other branches, which can be builds of Pull Requests, are
|
|
|
|
# put in their own folder.
|
|
|
|
FOLDER="${{ env.FOLDER_BRANCHES }}/${BRANCH}"
|
|
|
|
TRIGGER_TYPE="new-branch"
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
|
|
|
|
mkdir -p build/bundles
|
|
|
|
cp .changelog build/bundles/changelog.txt
|
|
|
|
cp .release_date build/bundles/released.txt
|
|
|
|
cp README.md build/bundles/README.md
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "Release Date: $(cat .release_date)"
|
|
|
|
echo "Revision: $(cat .ottdrev)"
|
|
|
|
echo "Version: $(cat .version)"
|
|
|
|
echo "Is tag: ${IS_TAG}"
|
|
|
|
echo "Folder on CDN: ${FOLDER}"
|
|
|
|
echo "Workflow trigger: ${TRIGGER_TYPE}"
|
|
|
|
|
|
|
|
echo "::set-output name=version::$(cat .version)"
|
|
|
|
echo "::set-output name=is_tag::${IS_TAG}"
|
|
|
|
echo "::set-output name=folder::${FOLDER}"
|
|
|
|
echo "::set-output name=trigger_type::${TRIGGER_TYPE}"
|
|
|
|
env:
|
|
|
|
NIGHTLIES_BRANCH: master
|
|
|
|
FOLDER_RELEASES: openttd-releases
|
|
|
|
FOLDER_NIGHTLIES: openttd-nightlies
|
|
|
|
FOLDER_BRANCHES: openttd-branches
|
|
|
|
|
|
|
|
- name: Remove VCS information
|
|
|
|
run: |
|
|
|
|
rm -rf .git
|
|
|
|
|
|
|
|
- name: Create bundles
|
|
|
|
run: |
|
|
|
|
FOLDER_NAME=openttd-${{ steps.metadata.outputs.version }}
|
|
|
|
|
|
|
|
# Rename the folder to openttd-NNN
|
|
|
|
mkdir ${FOLDER_NAME}
|
|
|
|
find . -maxdepth 1 -not -name . -not -name build -not -name ${FOLDER_NAME} -exec mv {} ${FOLDER_NAME}/ \;
|
|
|
|
|
|
|
|
echo "::group::Create tarball (xz) bundle"
|
|
|
|
tar --xz -cvf build/bundles/${FOLDER_NAME}-source.tar.xz ${FOLDER_NAME}
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
# This tarball is only to be used within this workflow.
|
|
|
|
echo "::group::Create tarball (gz) bundle"
|
|
|
|
tar --gzip -cvf source.tar.gz ${FOLDER_NAME}
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Create zip bundle"
|
|
|
|
zip -9 -r build/bundles/${FOLDER_NAME}-source.zip ${FOLDER_NAME}
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
- name: Store bundles
|
|
|
|
uses: actions/upload-artifact@v2
|
|
|
|
with:
|
|
|
|
name: openttd-source
|
|
|
|
path: build/bundles/*
|
|
|
|
retention-days: 5
|
|
|
|
|
|
|
|
- name: Store source (for other jobs)
|
|
|
|
uses: actions/upload-artifact@v2
|
|
|
|
with:
|
|
|
|
name: internal-source
|
|
|
|
path: source.tar.gz
|
|
|
|
retention-days: 1
|
|
|
|
|
|
|
|
docs:
|
|
|
|
name: Docs
|
|
|
|
needs: source
|
|
|
|
|
|
|
|
runs-on: ubuntu-20.04
|
|
|
|
|
|
|
|
steps:
|
|
|
|
- name: Download source
|
|
|
|
uses: actions/download-artifact@v2
|
|
|
|
with:
|
|
|
|
name: internal-source
|
|
|
|
|
|
|
|
- name: Unpack source
|
|
|
|
run: |
|
|
|
|
tar -xf source.tar.gz --strip-components=1
|
|
|
|
|
|
|
|
- name: Install dependencies
|
|
|
|
run: |
|
|
|
|
echo "::group::Update apt"
|
|
|
|
sudo apt-get update
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Install dependencies"
|
|
|
|
sudo apt-get install -y --no-install-recommends \
|
|
|
|
doxygen \
|
|
|
|
# EOF
|
|
|
|
echo "::endgroup::"
|
|
|
|
env:
|
|
|
|
DEBIAN_FRONTEND: noninteractive
|
|
|
|
|
|
|
|
- name: Build
|
|
|
|
run: |
|
|
|
|
mkdir -p ${GITHUB_WORKSPACE}/build
|
|
|
|
cd ${GITHUB_WORKSPACE}/build
|
|
|
|
|
|
|
|
echo "::group::CMake"
|
|
|
|
cmake ${GITHUB_WORKSPACE} \
|
|
|
|
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
|
|
|
-DOPTION_DOCS_ONLY=ON \
|
|
|
|
# EOF
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Build"
|
|
|
|
make docs
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
- name: Create bundles
|
|
|
|
run: |
|
|
|
|
BASENAME=openttd-${{ needs.source.outputs.version }}
|
|
|
|
|
|
|
|
cd ${GITHUB_WORKSPACE}/build
|
|
|
|
|
|
|
|
mv docs/source ${BASENAME}-docs
|
|
|
|
mv docs/ai-api ${BASENAME}-docs-ai
|
|
|
|
mv docs/gs-api ${BASENAME}-docs-gs
|
|
|
|
|
|
|
|
mkdir -p bundles
|
|
|
|
|
|
|
|
echo "::group::Create docs bundle"
|
|
|
|
tar --xz -cf bundles/${BASENAME}-docs.tar.xz ${BASENAME}-docs
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Create AI API docs bundle"
|
|
|
|
tar --xz -cf bundles/${BASENAME}-docs-ai.tar.xz ${BASENAME}-docs-ai
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Create GameScript API docs bundle"
|
|
|
|
tar --xz -cf bundles/${BASENAME}-docs-gs.tar.xz ${BASENAME}-docs-gs
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
- name: Store bundles
|
|
|
|
uses: actions/upload-artifact@v2
|
|
|
|
with:
|
|
|
|
name: openttd-docs
|
|
|
|
path: build/bundles/*.tar.xz
|
|
|
|
retention-days: 5
|
|
|
|
|
|
|
|
linux:
|
2021-02-05 11:31:27 +00:00
|
|
|
name: Linux (Generic)
|
|
|
|
needs: source
|
|
|
|
|
|
|
|
runs-on: ubuntu-20.04
|
|
|
|
container:
|
|
|
|
# manylinux2014 is based on CentOS 7, but already has a lot of things
|
|
|
|
# installed and preconfigured. It makes it easier to build OpenTTD.
|
|
|
|
image: quay.io/pypa/manylinux2014_x86_64
|
|
|
|
|
|
|
|
steps:
|
|
|
|
- name: Download source
|
|
|
|
uses: actions/download-artifact@v2
|
|
|
|
with:
|
|
|
|
name: internal-source
|
|
|
|
|
|
|
|
- name: Unpack source
|
|
|
|
run: |
|
|
|
|
tar -xf source.tar.gz --strip-components=1
|
|
|
|
|
|
|
|
- name: Install dependencies
|
|
|
|
run: |
|
|
|
|
echo "::group::Install dependencies"
|
|
|
|
yum install -y \
|
|
|
|
fontconfig-devel \
|
|
|
|
freetype-devel \
|
|
|
|
libicu-devel \
|
|
|
|
libpng-devel \
|
|
|
|
libpng-devel \
|
|
|
|
lzo-devel \
|
|
|
|
SDL2-devel \
|
|
|
|
wget \
|
|
|
|
xz-devel \
|
|
|
|
zlib-devel \
|
|
|
|
# EOF
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
2021-02-05 22:56:44 +00:00
|
|
|
# The yum variant of fluidsynth depends on all possible audio drivers,
|
|
|
|
# like jack, ALSA, pulseaudio, etc. This is not really useful for us,
|
|
|
|
# as we route the output of fluidsynth back via our sound driver, and
|
|
|
|
# as such do not use these audio driver outputs at all. So instead,
|
|
|
|
# we compile fluidsynth ourselves, with as little dependencies as
|
|
|
|
# possible. This currently means it picks up SDL2, but this is fine,
|
|
|
|
# as we need SDL2 anyway.
|
2021-02-05 11:31:27 +00:00
|
|
|
echo "::group::Install fluidsynth"
|
|
|
|
wget https://github.com/FluidSynth/fluidsynth/archive/v2.1.6.tar.gz
|
|
|
|
tar xf v2.1.6.tar.gz
|
|
|
|
(
|
|
|
|
cd fluidsynth-2.1.6
|
|
|
|
mkdir build
|
|
|
|
cd build
|
|
|
|
cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX=/usr
|
|
|
|
make -j$(nproc)
|
|
|
|
make install
|
|
|
|
)
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
- name: Install GCC problem matcher
|
|
|
|
uses: ammaraskar/gcc-problem-matcher@master
|
|
|
|
|
|
|
|
- name: Build
|
|
|
|
run: |
|
|
|
|
mkdir -p build
|
|
|
|
cd build
|
|
|
|
|
|
|
|
echo "::group::CMake"
|
|
|
|
cmake ${GITHUB_WORKSPACE} \
|
|
|
|
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
|
|
|
-DOPTION_PACKAGE_DEPENDENCIES=ON \
|
|
|
|
# EOF
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Build"
|
|
|
|
echo "Running on $(nproc) cores"
|
|
|
|
make -j$(nproc) package
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
# Remove the sha256 files CPack generates; we will do this ourself at
|
|
|
|
# the end of this workflow.
|
|
|
|
rm -f bundles/*.sha256
|
|
|
|
|
|
|
|
- name: Store bundles
|
|
|
|
uses: actions/upload-artifact@v2
|
|
|
|
with:
|
|
|
|
name: openttd-linux-generic
|
|
|
|
path: build/bundles
|
|
|
|
retention-days: 5
|
|
|
|
|
|
|
|
linux-distro:
|
|
|
|
name: Linux (Distros)
|
2020-12-12 15:32:05 +00:00
|
|
|
needs: source
|
|
|
|
|
|
|
|
if: needs.source.outputs.is_tag == 'true'
|
|
|
|
|
|
|
|
strategy:
|
|
|
|
fail-fast: false
|
|
|
|
matrix:
|
|
|
|
include:
|
|
|
|
- container_image: "ubuntu:18.04"
|
|
|
|
bundle_name: "bionic"
|
|
|
|
- container_image: "ubuntu:20.04"
|
|
|
|
bundle_name: "focal"
|
|
|
|
- container_image: "ubuntu:20.10"
|
|
|
|
bundle_name: "groovy"
|
|
|
|
- container_image: "debian:buster"
|
|
|
|
bundle_name: "buster"
|
|
|
|
|
|
|
|
runs-on: ubuntu-20.04
|
|
|
|
container:
|
|
|
|
image: ${{ matrix.container_image }}
|
|
|
|
|
|
|
|
steps:
|
|
|
|
- name: Download source
|
|
|
|
uses: actions/download-artifact@v2
|
|
|
|
with:
|
|
|
|
name: internal-source
|
|
|
|
|
|
|
|
- name: Unpack source
|
|
|
|
run: |
|
|
|
|
tar -xf source.tar.gz --strip-components=1
|
|
|
|
|
|
|
|
- name: Install dependencies
|
|
|
|
run: |
|
|
|
|
echo "::group::Update apt"
|
|
|
|
apt-get update
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Install dependencies"
|
|
|
|
apt-get install -y --no-install-recommends \
|
|
|
|
cmake \
|
|
|
|
debhelper \
|
|
|
|
g++ \
|
|
|
|
git \
|
|
|
|
make \
|
|
|
|
openssl \
|
|
|
|
libfontconfig-dev \
|
|
|
|
libfluidsynth-dev \
|
|
|
|
libicu-dev \
|
|
|
|
liblzma-dev \
|
|
|
|
liblzo2-dev \
|
|
|
|
libsdl2-dev \
|
|
|
|
lsb-release \
|
|
|
|
zlib1g-dev \
|
|
|
|
# EOF
|
|
|
|
echo "::endgroup::"
|
|
|
|
env:
|
|
|
|
DEBIAN_FRONTEND: noninteractive
|
|
|
|
|
|
|
|
- name: Install GCC problem matcher
|
|
|
|
uses: ammaraskar/gcc-problem-matcher@master
|
|
|
|
|
|
|
|
- name: Build
|
|
|
|
run: |
|
|
|
|
mkdir -p build
|
|
|
|
cd build
|
|
|
|
|
|
|
|
echo "::group::CMake"
|
|
|
|
cmake ${GITHUB_WORKSPACE} \
|
|
|
|
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
|
|
|
-DCMAKE_INSTALL_PREFIX=/usr \
|
|
|
|
# EOF
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Build"
|
|
|
|
echo "Running on $(nproc) cores"
|
|
|
|
make -j$(nproc) package
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
# Remove the sha256 files CPack generates; we will do this ourself at
|
|
|
|
# the end of this workflow.
|
|
|
|
rm -f bundles/*.sha256
|
|
|
|
|
|
|
|
- name: Store bundles
|
|
|
|
uses: actions/upload-artifact@v2
|
|
|
|
with:
|
|
|
|
name: openttd-linux-${{ matrix.bundle_name }}
|
|
|
|
path: build/bundles
|
|
|
|
retention-days: 5
|
|
|
|
|
|
|
|
macos:
|
|
|
|
name: MacOS
|
|
|
|
needs: source
|
|
|
|
|
2021-01-07 13:51:50 +00:00
|
|
|
runs-on: macos-10.15
|
2020-12-12 15:32:05 +00:00
|
|
|
env:
|
|
|
|
MACOSX_DEPLOYMENT_TARGET: 10.9
|
|
|
|
|
|
|
|
steps:
|
|
|
|
- name: Download source
|
|
|
|
uses: actions/download-artifact@v2
|
|
|
|
with:
|
|
|
|
name: internal-source
|
|
|
|
|
|
|
|
- name: Unpack source
|
|
|
|
run: |
|
|
|
|
tar -xf source.tar.gz --strip-components=1
|
|
|
|
|
2021-01-30 09:36:20 +00:00
|
|
|
- name: Install dependencies
|
|
|
|
env:
|
|
|
|
HOMEBREW_NO_AUTO_UPDATE: 1
|
|
|
|
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
|
|
|
run: |
|
|
|
|
brew install pandoc
|
|
|
|
|
2021-01-14 10:13:27 +00:00
|
|
|
# The following step can be removed when the build VM is updated with a revision of
|
|
|
|
# vcpkg dating from roughly 01/01/2021 or later. At that point, `doNotUpdateVcpkg`
|
|
|
|
# can be set to `true` and the `vcpkgGitCommitId` can be removed.
|
2021-01-07 13:51:50 +00:00
|
|
|
- name: Update vcpkg
|
|
|
|
run: |
|
2021-01-14 10:13:27 +00:00
|
|
|
cd /usr/local/share/vcpkg
|
|
|
|
git fetch --unshallow
|
2021-01-07 13:51:50 +00:00
|
|
|
|
2020-12-12 15:32:05 +00:00
|
|
|
- name: Prepare vcpkg (with cache)
|
|
|
|
uses: lukka/run-vcpkg@v6
|
|
|
|
with:
|
2021-01-14 10:13:27 +00:00
|
|
|
vcpkgDirectory: '/usr/local/share/vcpkg'
|
2021-01-07 13:51:50 +00:00
|
|
|
doNotUpdateVcpkg: false
|
|
|
|
vcpkgGitCommitId: 2a42024b53ebb512fb5dd63c523338bf26c8489c
|
2021-02-15 11:49:29 +00:00
|
|
|
vcpkgArguments: 'liblzma:x64-osx libpng:x64-osx lzo:x64-osx liblzma:arm64-osx libpng:arm64-osx lzo:arm64-osx'
|
2020-12-12 15:32:05 +00:00
|
|
|
|
|
|
|
- name: Build tools
|
|
|
|
run: |
|
|
|
|
mkdir build-host
|
|
|
|
cd build-host
|
|
|
|
|
|
|
|
echo "::group::CMake"
|
|
|
|
cmake ${GITHUB_WORKSPACE} \
|
|
|
|
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
|
|
|
-DOPTION_TOOLS_ONLY=ON \
|
|
|
|
# EOF
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Build tools"
|
|
|
|
echo "Running on $(sysctl -n hw.logicalcpu) cores"
|
|
|
|
make -j$(sysctl -n hw.logicalcpu) tools
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
- name: Install GCC problem matcher
|
|
|
|
uses: ammaraskar/gcc-problem-matcher@master
|
|
|
|
|
2021-01-12 21:47:08 +00:00
|
|
|
- name: Import code signing certificates
|
|
|
|
uses: Apple-Actions/import-codesign-certs@v1
|
|
|
|
with:
|
|
|
|
# The certificates in a PKCS12 file encoded as a base64 string
|
|
|
|
p12-file-base64: ${{ secrets.APPLE_DEVELOPER_CERTIFICATE_P12_BASE64 }}
|
|
|
|
# The password used to import the PKCS12 file.
|
|
|
|
p12-password: ${{ secrets.APPLE_DEVELOPER_CERTIFICATE_PASSWORD }}
|
|
|
|
# If this is run on a fork, there may not be a certificate set up - continue in this case
|
|
|
|
continue-on-error: true
|
|
|
|
|
2021-01-14 13:32:49 +00:00
|
|
|
- name: Build arm64
|
2020-12-12 15:32:05 +00:00
|
|
|
run: |
|
2021-01-14 13:32:49 +00:00
|
|
|
mkdir build-arm64
|
|
|
|
cd build-arm64
|
2020-12-12 15:32:05 +00:00
|
|
|
|
|
|
|
echo "::group::CMake"
|
|
|
|
cmake ${GITHUB_WORKSPACE} \
|
2021-01-14 13:32:49 +00:00
|
|
|
-DCMAKE_OSX_ARCHITECTURES=arm64 \
|
|
|
|
-DVCPKG_TARGET_TRIPLET=arm64-osx \
|
|
|
|
-DCMAKE_TOOLCHAIN_FILE=/usr/local/share/vcpkg/scripts/buildsystems/vcpkg.cmake \
|
|
|
|
-DHOST_BINARY_DIR=${GITHUB_WORKSPACE}/build-host \
|
|
|
|
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
|
|
|
# EOF
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Build"
|
|
|
|
echo "Running on $(sysctl -n hw.logicalcpu) cores"
|
|
|
|
make -j$(sysctl -n hw.logicalcpu)
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
- name: Build x64
|
|
|
|
run: |
|
|
|
|
mkdir build-x64
|
|
|
|
cd build-x64
|
|
|
|
|
|
|
|
echo "::group::CMake"
|
|
|
|
cmake ${GITHUB_WORKSPACE} \
|
|
|
|
-DCMAKE_OSX_ARCHITECTURES=x86_64 \
|
|
|
|
-DVCPKG_TARGET_TRIPLET=x64-osx \
|
2021-01-14 10:13:27 +00:00
|
|
|
-DCMAKE_TOOLCHAIN_FILE=/usr/local/share/vcpkg/scripts/buildsystems/vcpkg.cmake \
|
2020-12-12 15:32:05 +00:00
|
|
|
-DHOST_BINARY_DIR=${GITHUB_WORKSPACE}/build-host \
|
|
|
|
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
2021-01-12 21:47:08 +00:00
|
|
|
-DCPACK_BUNDLE_APPLE_CERT_APP=${{ secrets.APPLE_DEVELOPER_CERTIFICATE_ID }} \
|
|
|
|
"-DCPACK_BUNDLE_APPLE_CODESIGN_PARAMETER=--deep -f --options runtime" \
|
2021-01-17 16:45:23 +00:00
|
|
|
-DAPPLE_UNIVERSAL_PACKAGE=1 \
|
2020-12-12 15:32:05 +00:00
|
|
|
# EOF
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
2021-01-14 13:32:49 +00:00
|
|
|
echo "::group::Build"
|
|
|
|
echo "Running on $(sysctl -n hw.logicalcpu) cores"
|
|
|
|
make -j$(sysctl -n hw.logicalcpu)
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
- name: Build package
|
|
|
|
run: |
|
|
|
|
cd build-x64
|
|
|
|
|
|
|
|
# Combine the `openttd` binaries from each build into a single file
|
|
|
|
lipo -create -output openttd-universal ../build-*/openttd
|
|
|
|
mv openttd-universal openttd
|
|
|
|
|
2020-12-12 15:32:05 +00:00
|
|
|
echo "::group::Build"
|
|
|
|
echo "Running on $(sysctl -n hw.logicalcpu) cores"
|
|
|
|
make -j$(sysctl -n hw.logicalcpu) package
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
2021-01-14 09:08:02 +00:00
|
|
|
# Remove the sha256 files CPack generates; we will do this ourself at
|
|
|
|
# the end of this workflow.
|
|
|
|
rm -f bundles/*.sha256
|
|
|
|
|
2021-01-12 21:47:08 +00:00
|
|
|
- name: Install gon
|
|
|
|
env:
|
|
|
|
HOMEBREW_NO_AUTO_UPDATE: 1
|
|
|
|
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
|
|
|
run: |
|
|
|
|
brew tap mitchellh/gon
|
|
|
|
brew install mitchellh/gon/gon
|
|
|
|
|
|
|
|
- name: Notarize
|
|
|
|
env:
|
|
|
|
AC_USERNAME: ${{ secrets.APPLE_DEVELOPER_APP_USERNAME }}
|
|
|
|
AC_PASSWORD: ${{ secrets.APPLE_DEVELOPER_APP_PASSWORD }}
|
|
|
|
run: |
|
2021-01-14 13:32:49 +00:00
|
|
|
cd build-x64
|
2021-01-12 21:47:08 +00:00
|
|
|
../os/macosx/notarize.sh
|
2020-12-12 15:32:05 +00:00
|
|
|
|
2021-01-28 12:54:56 +00:00
|
|
|
- name: Build zip
|
|
|
|
run: |
|
|
|
|
cd build-x64
|
|
|
|
|
|
|
|
pushd _CPack_Packages/*/Bundle/openttd-*/
|
|
|
|
|
|
|
|
# Remove the Applications symlink from the staging folder
|
|
|
|
rm -f Applications
|
|
|
|
|
|
|
|
# Remove the original dmg built by CPack to avoid a conflict when resolving
|
|
|
|
# the zip_filename variable below
|
|
|
|
rm -f ../*.dmg
|
|
|
|
|
|
|
|
zip_filename=(../openttd-*)
|
|
|
|
|
|
|
|
# Package up the existing, notarised .app into a zip file
|
|
|
|
zip -r -9 ${zip_filename}.zip OpenTTD.app
|
|
|
|
|
|
|
|
popd
|
|
|
|
|
|
|
|
# Now move it into place to be uploaded
|
|
|
|
mv _CPack_Packages/*/Bundle/openttd-*.zip bundles/
|
|
|
|
|
2020-12-12 15:32:05 +00:00
|
|
|
- name: Store bundles
|
|
|
|
uses: actions/upload-artifact@v2
|
|
|
|
with:
|
2021-01-14 13:32:49 +00:00
|
|
|
name: openttd-macos-universal
|
|
|
|
path: build-x64/bundles
|
2020-12-12 15:32:05 +00:00
|
|
|
retention-days: 5
|
|
|
|
|
|
|
|
windows:
|
|
|
|
name: Windows
|
|
|
|
needs: source
|
|
|
|
|
|
|
|
strategy:
|
|
|
|
fail-fast: false
|
|
|
|
matrix:
|
|
|
|
include:
|
|
|
|
- arch: x86
|
|
|
|
- arch: x64
|
2021-01-16 21:31:20 +00:00
|
|
|
- arch: arm64
|
2020-12-12 15:32:05 +00:00
|
|
|
|
|
|
|
runs-on: windows-latest
|
|
|
|
|
|
|
|
steps:
|
|
|
|
- name: Download source
|
|
|
|
uses: actions/download-artifact@v2
|
|
|
|
with:
|
|
|
|
name: internal-source
|
|
|
|
|
|
|
|
- name: Unpack source
|
|
|
|
shell: bash
|
|
|
|
run: |
|
|
|
|
tar -xf source.tar.gz --strip-components=1
|
|
|
|
|
2021-01-30 09:36:20 +00:00
|
|
|
- name: Install dependencies
|
|
|
|
shell: bash
|
|
|
|
run: |
|
|
|
|
choco install pandoc
|
|
|
|
|
2021-01-14 10:26:13 +00:00
|
|
|
# "restore-cache" which is done by "run-vcpkg" uses Windows tar.
|
|
|
|
# A git clone on windows marks a few files as read-only; when Windows tar
|
|
|
|
# tries to extract the cache over this folder, it fails, despite the files
|
|
|
|
# being identical. This failure shows up as an warning in the logs. We
|
|
|
|
# avoid this by simply removing the read-only mark from the git folder.
|
|
|
|
# In other words: this is a hack!
|
|
|
|
# See: https://github.com/lukka/run-vcpkg/issues/61
|
|
|
|
- name: Remove read-only flag from vcpkg git folder
|
|
|
|
shell: powershell
|
|
|
|
run: |
|
|
|
|
attrib -r "c:\vcpkg\.git\*.*" /s
|
|
|
|
|
2020-12-12 15:32:05 +00:00
|
|
|
- name: Prepare vcpkg (with cache)
|
|
|
|
uses: lukka/run-vcpkg@v6
|
|
|
|
with:
|
|
|
|
vcpkgDirectory: 'c:/vcpkg'
|
|
|
|
doNotUpdateVcpkg: true
|
|
|
|
vcpkgArguments: 'liblzma libpng lzo zlib'
|
|
|
|
vcpkgTriplet: '${{ matrix.arch }}-windows-static'
|
|
|
|
|
|
|
|
- name: Build tools
|
|
|
|
uses: lukka/run-cmake@v3
|
|
|
|
with:
|
|
|
|
cmakeListsOrSettingsJson: CMakeListsTxtAdvanced
|
|
|
|
useVcpkgToolchainFile: false
|
|
|
|
buildDirectory: '${{ github.workspace }}/build-host'
|
|
|
|
buildWithCMakeArgs: '--target tools'
|
2020-12-26 12:48:57 +00:00
|
|
|
cmakeAppendedArgs: ' -GNinja -DOPTION_TOOLS_ONLY=ON -DCMAKE_BUILD_TYPE=RelWithDebInfo'
|
2020-12-12 15:32:05 +00:00
|
|
|
|
|
|
|
- name: Install MSVC problem matcher
|
|
|
|
uses: ammaraskar/msvc-problem-matcher@master
|
|
|
|
|
|
|
|
- name: Build (with installer)
|
2021-01-16 21:31:20 +00:00
|
|
|
if: needs.source.outputs.is_tag == 'true'
|
2020-12-12 15:32:05 +00:00
|
|
|
uses: lukka/run-cmake@v3
|
|
|
|
with:
|
|
|
|
cmakeListsOrSettingsJson: CMakeListsTxtAdvanced
|
|
|
|
useVcpkgToolchainFile: true
|
|
|
|
buildDirectory: '${{ github.workspace }}/build'
|
2020-12-26 12:48:57 +00:00
|
|
|
cmakeAppendedArgs: ' -GNinja -DOPTION_USE_NSIS=ON -DHOST_BINARY_DIR=${{ github.workspace }}/build-host -DCMAKE_BUILD_TYPE=RelWithDebInfo'
|
2020-12-12 15:32:05 +00:00
|
|
|
|
|
|
|
- name: Build (without installer)
|
2021-01-16 21:31:20 +00:00
|
|
|
if: needs.source.outputs.is_tag != 'true'
|
2020-12-12 15:32:05 +00:00
|
|
|
uses: lukka/run-cmake@v3
|
|
|
|
with:
|
|
|
|
cmakeListsOrSettingsJson: CMakeListsTxtAdvanced
|
|
|
|
useVcpkgToolchainFile: true
|
|
|
|
buildDirectory: '${{ github.workspace }}/build'
|
2020-12-26 12:48:57 +00:00
|
|
|
cmakeAppendedArgs: ' -GNinja -DHOST_BINARY_DIR=${{ github.workspace }}/build-host -DCMAKE_BUILD_TYPE=RelWithDebInfo'
|
2020-12-12 15:32:05 +00:00
|
|
|
|
|
|
|
- name: Create bundles
|
|
|
|
shell: bash
|
|
|
|
run: |
|
|
|
|
cd ${GITHUB_WORKSPACE}/build
|
|
|
|
echo "::group::Run CPack"
|
|
|
|
cpack
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Prepare PDB to be bundled"
|
|
|
|
PDB=$(ls bundles/*.zip | cut -d/ -f2 | sed 's/.zip$/.pdb/')
|
|
|
|
cp openttd.pdb bundles/${PDB}
|
|
|
|
xz -9 bundles/${PDB}
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Cleanup"
|
|
|
|
# Remove the sha256 files CPack generates; we will do this ourself at
|
|
|
|
# the end of this workflow.
|
|
|
|
rm -f bundles/*.sha256
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
- name: Store bundles
|
|
|
|
uses: actions/upload-artifact@v2
|
|
|
|
with:
|
|
|
|
name: openttd-windows-${{ matrix.arch }}
|
|
|
|
path: build/bundles
|
|
|
|
retention-days: 5
|
|
|
|
|
|
|
|
upload:
|
2021-02-05 19:58:15 +00:00
|
|
|
name: Upload (AWS)
|
2020-12-12 15:32:05 +00:00
|
|
|
needs:
|
|
|
|
- source
|
|
|
|
- docs
|
|
|
|
- linux
|
2021-02-05 11:31:27 +00:00
|
|
|
- linux-distro
|
2020-12-12 15:32:05 +00:00
|
|
|
- macos
|
|
|
|
- windows
|
|
|
|
|
|
|
|
# The 'linux' job can be skipped if it is a nightly. That normally causes
|
|
|
|
# this job to be skipped too, unless we have this length boy :)
|
|
|
|
# "always()" is important here, it is the keyword to use to stop skipping
|
|
|
|
# this job if any dependency is skipped. It looks a bit silly, but it is
|
|
|
|
# how GitHub Actions work ;)
|
2021-02-05 11:31:27 +00:00
|
|
|
if: always() && needs.source.result == 'success' && needs.docs.result == 'success' && needs.linux.result == 'success' && (needs.linux-distro.result == 'success' || needs.linux-distro.result == 'skipped') && needs.macos.result == 'success' && needs.windows.result == 'success'
|
2020-12-12 15:32:05 +00:00
|
|
|
|
|
|
|
runs-on: ubuntu-20.04
|
|
|
|
|
|
|
|
steps:
|
|
|
|
- name: Download all bundles
|
|
|
|
uses: actions/download-artifact@v2
|
|
|
|
|
|
|
|
- name: Calculate checksums
|
|
|
|
run: |
|
|
|
|
echo "::group::Move bundles to a single folder"
|
|
|
|
mkdir bundles
|
|
|
|
mv openttd-*/* bundles/
|
|
|
|
cd bundles
|
|
|
|
echo "::group::Build"
|
|
|
|
|
|
|
|
for i in $(ls openttd-*); do
|
|
|
|
echo "::group::Calculating checksums for ${i}"
|
|
|
|
openssl dgst -r -md5 -hex $i > $i.md5sum
|
|
|
|
openssl dgst -r -sha1 -hex $i > $i.sha1sum
|
|
|
|
openssl dgst -r -sha256 -hex $i > $i.sha256sum
|
|
|
|
echo "::endgroup::"
|
|
|
|
done
|
|
|
|
|
|
|
|
- name: Upload bundles to AWS
|
|
|
|
run: |
|
|
|
|
aws s3 cp --recursive --only-show-errors bundles/ s3://${{ secrets.CDN_S3_BUCKET }}/${{ needs.source.outputs.folder }}/${{ needs.source.outputs.version }}/
|
|
|
|
|
|
|
|
# We do not invalidate the CloudFront distribution here. The trigger
|
|
|
|
# for "New OpenTTD release" first updated the manifest files and
|
|
|
|
# creates an index.html. We invalidate after that, so everything
|
|
|
|
# becomes visible at once.
|
|
|
|
env:
|
|
|
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
|
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
|
|
AWS_REGION: ${{ secrets.AWS_REGION }}
|
|
|
|
|
|
|
|
- name: Trigger 'New OpenTTD release'
|
|
|
|
uses: peter-evans/repository-dispatch@v1
|
|
|
|
with:
|
|
|
|
token: ${{ secrets.DEPLOYMENT_TOKEN }}
|
|
|
|
repository: OpenTTD/workflows
|
|
|
|
event-type: ${{ needs.source.outputs.trigger_type }}
|
|
|
|
client-payload: '{"version": "${{ needs.source.outputs.version }}", "folder": "${{ needs.source.outputs.folder }}"}'
|
2021-02-05 19:58:15 +00:00
|
|
|
|
|
|
|
upload-steam:
|
|
|
|
name: Upload (Steam)
|
|
|
|
needs:
|
|
|
|
- source
|
|
|
|
- linux
|
|
|
|
- macos
|
|
|
|
- windows
|
|
|
|
|
|
|
|
if: needs.source.outputs.trigger_type == 'new-master' || needs.source.outputs.trigger_type == 'new-tag'
|
|
|
|
|
|
|
|
runs-on: ubuntu-20.04
|
|
|
|
|
|
|
|
steps:
|
|
|
|
- name: Download all bundles
|
|
|
|
uses: actions/download-artifact@v2
|
|
|
|
|
|
|
|
- name: Setup steamcmd
|
|
|
|
uses: CyberAndrii/setup-steamcmd@v1
|
|
|
|
|
|
|
|
- name: Generate Steam auth code
|
|
|
|
id: steam-totp
|
|
|
|
uses: CyberAndrii/steam-totp@v1
|
|
|
|
with:
|
|
|
|
shared_secret: ${{ secrets.STEAM_SHARED_SECRET }}
|
|
|
|
|
|
|
|
- name: Upload to Steam
|
|
|
|
run: |
|
|
|
|
echo "::group::Extracting source"
|
|
|
|
mkdir source
|
|
|
|
(
|
|
|
|
cd source
|
|
|
|
tar -xf ../internal-source/source.tar.gz --strip-components=1
|
|
|
|
)
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
mkdir steam
|
|
|
|
(
|
|
|
|
cd steam
|
|
|
|
|
|
|
|
echo "::group::Prepare Win32"
|
|
|
|
unzip ../openttd-windows-x86/openttd-*-windows-win32.zip
|
|
|
|
mv openttd-*-windows-win32 steam-win32
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Prepare Win64"
|
|
|
|
unzip ../openttd-windows-x64/openttd-*-windows-win64.zip
|
|
|
|
mv openttd-*-windows-win64 steam-win64
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Prepare macOS"
|
|
|
|
mkdir steam-macos
|
|
|
|
(
|
|
|
|
cd steam-macos
|
|
|
|
unzip ../../openttd-macos-universal/openttd-*-macos-universal.zip
|
|
|
|
)
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Prepare Linux"
|
|
|
|
tar xvf ../openttd-linux-generic/openttd-*-linux-generic-amd64.tar.xz
|
|
|
|
mv openttd-*-linux-generic-amd64 steam-linux
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Preparing build file"
|
|
|
|
if [ "${{ needs.source.outputs.trigger_type }}" = "new-tag" ]; then
|
|
|
|
BRANCH="testing"
|
|
|
|
else
|
|
|
|
BRANCH="nightly"
|
|
|
|
fi
|
|
|
|
cat ../source/os/steam/release.vdf | sed 's/@@DESCRIPTION@@/openttd-${{ needs.source.outputs.version }}/;s/@@BRANCH@@/'${BRANCH}'/' > release.vdf
|
|
|
|
cat release.vdf
|
|
|
|
echo "::endgroup::"
|
|
|
|
|
|
|
|
echo "::group::Upload to Steam"
|
|
|
|
steamcmd +login ${{ secrets.STEAM_USERNAME }} ${{ secrets.STEAM_PASSWORD }} ${{ steps.steam-totp.outputs.code }} +run_app_build $(pwd)/release.vdf +quit
|
|
|
|
echo "::endgroup::"
|
|
|
|
)
|