Skip to content

Feature/sw 396 pre arm check for cht temperature 1 #827

Feature/sw 396 pre arm check for cht temperature 1

Feature/sw 396 pre arm check for cht temperature 1 #827

name: Carbonix Build
on:
push:
branches:
- CxPilot
- CxPilot-*
- master
paths-ignore:
# remove non chibios HAL
- 'libraries/AP_HAL_Linux/**'
- 'libraries/AP_HAL_ESP32/**'
- 'libraries/AP_HAL_SITL/**'
# remove non stm directories
- 'Tools/CHDK-Script/**'
- 'Tools/CodeStyle/**'
- 'Tools/completion/**'
- 'Tools/debug/**'
- 'Tools/environment_install/**'
- 'Tools/FilterTestTool/**'
- 'Tools/Frame_params/**'
- 'Tools/geotag/**'
- 'Tools/GIT_Test/**'
- 'Tools/gittools/**'
- 'Tools/Hello/**'
- 'Tools/LogAnalyzer/**'
- 'Tools/mavproxy_modules/**'
- 'Tools/Pozyx/**'
- 'Tools/PrintVersion.py'
- 'Tools/Replay/**'
- 'Tools/simulink/**'
- 'Tools/UDP_Proxy/**'
- 'Tools/vagrant/**'
- 'Tools/Vicon/**'
# Remove vehicles autotest we need support of test_build_option.py in the Tools/autotest directory
- 'Tools/autotest/antennatracker.py'
- 'Tools/autotest/arduplane.py'
- 'Tools/autotest/ardusub.py'
- 'Tools/autotest/balancebot.py'
- 'Tools/autotest/location.txt'
- 'Tools/autotest/quadplane.py'
- 'Tools/autotest/rover.py'
- 'Tools/autotest/sailboat.py'
- 'Tools/autotest/swarminit.txt'
# Remove markdown files as irrelevant
- '**.md'
# Remove dotfile at root directory
- './.dir-locals.el'
- './.dockerignore'
- './.editorconfig'
- './.flake8'
- './.gitattributes'
- './.github'
- './.gitignore'
- './.pre-commit-config.yaml'
- './.pydevproject'
- './.valgrind-suppressions'
- './.valgrindrc'
- 'Dockerfile'
- 'Vagrantfile'
- 'Makefile'
# Remove some directories check
- '.vscode/**'
- '.github/ISSUE_TEMPLATE/**'
pull_request:
paths-ignore:
# remove non chibios HAL
- 'libraries/AP_HAL_Linux/**'
- 'libraries/AP_HAL_ESP32/**'
- 'libraries/AP_HAL_SITL/**'
# remove non stm directories
- 'Tools/CHDK-Script/**'
- 'Tools/CodeStyle/**'
- 'Tools/completion/**'
- 'Tools/debug/**'
- 'Tools/environment_install/**'
- 'Tools/FilterTestTool/**'
- 'Tools/Frame_params/**'
- 'Tools/geotag/**'
- 'Tools/GIT_Test/**'
- 'Tools/gittools/**'
- 'Tools/Hello/**'
- 'Tools/LogAnalyzer/**'
- 'Tools/mavproxy_modules/**'
- 'Tools/Pozyx/**'
- 'Tools/PrintVersion.py'
- 'Tools/Replay/**'
- 'Tools/simulink/**'
- 'Tools/UDP_Proxy/**'
- 'Tools/vagrant/**'
- 'Tools/Vicon/**'
# Remove vehicles autotest we need support of test_build_option.py in the Tools/autotest directory
- 'Tools/autotest/antennatracker.py'
- 'Tools/autotest/arduplane.py'
- 'Tools/autotest/ardusub.py'
- 'Tools/autotest/autotest.py'
- 'Tools/autotest/balancebot.py'
- 'Tools/autotest/common.py'
- 'Tools/autotest/examples.py'
- 'Tools/autotest/quadplane.py'
- 'Tools/autotest/rover.py'
- 'Tools/autotest/sailboat.py'
- 'Tools/autotest/**.txt'
- 'Tools/autotest/logger_metadata/**'
- 'Tools/autotest/param_metadata/**'
# Remove markdown files as irrelevant
- '**.md'
# Remove dotfile at root directory
- './.dir-locals.el'
- './.dockerignore'
- './.editorconfig'
- './.flake8'
- './.gitattributes'
- './.github'
- './.gitignore'
- './.pre-commit-config.yaml'
- './.pydevproject'
- './.valgrind-suppressions'
- './.valgrindrc'
- 'Dockerfile'
- 'Vagrantfile'
- 'Makefile'
# Remove some directories check
- '.vscode/**'
- '.github/ISSUE_TEMPLATE/**'
release:
types: [published]
workflow_dispatch:
concurrency:
group: ci-${{github.workflow}}-${{ github.ref }}
cancel-in-progress: true
jobs:
setup-s3-path:
runs-on: ubuntu-22.04
if: ${{ !contains(github.event.pull_request.labels.*.name, 'SKIP_BUILD') }}
outputs:
s3_path: ${{ steps.set-s3-path.outputs.s3_path }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
# PR runs, by default, create a merge commit and then checkout the merge commit. We don't want that.
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.ref }}
- name: Extract firmware version, commit id, and branch name
id: extract_info
run: |
FIRMWARE_VERSION=$(grep -oP 'define AP_CUSTOM_FIRMWARE_STRING "\K(.*)(?=")' libraries/AP_HAL_ChibiOS/hwdef/CarbonixCommon/version.inc)
COMMIT_ID=$(git rev-parse --short HEAD)
BRANCH_NAME=$(echo ${GITHUB_REF#refs/heads/})
echo "firmware_version=$FIRMWARE_VERSION" >> $GITHUB_ENV
echo "commit_id=$COMMIT_ID" >> $GITHUB_ENV
echo "branch_name=$BRANCH_NAME" >> $GITHUB_ENV
shell: bash
- name: Set S3 Path
id: set-s3-path
run: |
DATE_HR=$(date +%Y%m%d_%H%M)
if [ "${{ github.event_name }}" == "release" ]; then
PATH_TO_S3=s3://carbonix-firmware-release-files/Carbopilot_V2/${DATE_HR}_${{ env.firmware_version }}_${{ env.commit_id }}/
echo "Release to: $PATH_TO_S3"
elif [ "${{ github.event_name }}" == "push" ] && [[ "${{ env.branch_name }}" == CxPilot* ]]; then
PATH_TO_S3=s3://carbonix-firmware-dev-files/Carbopilot_V2/${{ env.branch_name }}/${DATE_HR}_${{ env.firmware_version }}_${{ env.commit_id }}/
echo "PUSH : $PATH_TO_S3"
elif [ "${{ github.event_name }}" == "pull_request" ]; then
PATH_TO_S3=s3://carbonix-firmware-dev-files/Carbopilot_V2/PR/${DATE_HR}_${{ env.firmware_version }}_${{ env.commit_id }}_${{ github.event.pull_request.number }}/
echo "PR : $PATH_TO_S3"
else
PATH_TO_S3="s3://carbonix-firmware-dev-files/Carbopilot_V2/Manual/${DATE_HR}_${{ env.firmware_version }}_${{ env.commit_id }}/"
echo "Manual trigger or other: $PATH_TO_S3"
fi
echo "s3_path=${PATH_TO_S3}" >> $GITHUB_OUTPUT
shell: bash
build-periph:
runs-on: ubuntu-22.04
if: ${{ !contains(github.event.pull_request.labels.*.name, 'SKIP_BUILD') }}
needs: setup-s3-path
container: ardupilot/ardupilot-dev-${{ matrix.toolchain }}:v0.1.3
strategy:
fail-fast: false
matrix:
config: [
CarbonixF405,
CarbonixF405-no-crystal
]
toolchain: [ chibios ]
gcc: [10]
exclude:
- gcc: 10
toolchain: chibios-clang
steps:
- uses: actions/checkout@v4
with:
# PR runs, by default, create a merge commit and then checkout the merge commit. We don't want that.
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.ref }}
submodules: 'recursive'
- name: Prepare ccache timestamp
id: ccache_cache_timestamp
run: |
NOW=$(date -u +"%F-%T")
echo "timestamp=${NOW}" >> $GITHUB_OUTPUT
- name: ccache cache files
uses: actions/cache@v4
with:
path: ~/.ccache
key: ${{github.workflow}}-ccache-${{matrix.config}}-${{ matrix.toolchain }}-${{ matrix.gcc }}-${{steps.ccache_cache_timestamp.outputs.timestamp}}
restore-keys: ${{github.workflow}}-ccache-${{matrix.config}}-${{ matrix.toolchain }}-${{ matrix.gcc }}
- name: setup ccache
run: |
. .github/workflows/ccache.env
- name: Install bash tools
run: |
sudo apt-get update
sudo apt-get -y install xxd
- name: build ${{matrix.config}}
shell: bash
run: |
git config --global --add safe.directory ${GITHUB_WORKSPACE}
if [[ ${{ matrix.toolchain }} == "chibios-clang" ]]; then
export CC=clang
export CXX=clang++
fi
PATH="/usr/lib/ccache:/opt/gcc-arm-none-eabi-${{matrix.gcc}}/bin:$PATH"
PATH="/github/home/.local/bin:$PATH"
Tools/Carbonix_scripts/carbonix_waf_build.sh ${{ matrix.config }}
ccache -s
ccache -z
- name: Upload build artifacts
uses: actions/upload-artifact@v4
with:
name: build-periph-${{ matrix.config }}
path: output/
collect-aircraft-config-files:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'SKIP_BUILD') }}
runs-on: ubuntu-22.04
needs: build-periph
outputs:
aircraft-config-files: ${{ steps.collect.outputs.aircraft-config-files }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
# PR runs, by default, create a merge commit and then checkout the merge commit. We don't want that.
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.ref }}
- name: Install jq
run: sudo apt-get update && sudo apt-get install -y jq
- name: Collect XML files
id: collect
run: |
xml_files=$(find libraries/AP_HAL_ChibiOS/hwdef/CarbonixCommon/aircraft_configuration -name "*.xml" -print0 | \
xargs -0 -n 1 basename | \
sed 's/.xml$//' | \
jq -R -s -c 'split("\n")[:-1]')
echo "aircraft-config-files=$xml_files" >> $GITHUB_OUTPUT
process-ac:
if: ${{ !contains(github.event.pull_request.labels.*.name, 'SKIP_BUILD') }}
runs-on: ubuntu-22.04
needs: [collect-aircraft-config-files, setup-s3-path]
container: ardupilot/ardupilot-dev-${{ matrix.toolchain }}:v0.1.3
strategy:
fail-fast: false
matrix:
xml_file: ${{ fromJson(needs.collect-aircraft-config-files.outputs.aircraft-config-files) }}
toolchain: [ chibios ]
gcc: [10]
exclude:
- gcc: 10
toolchain: chibios-clang
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
# PR runs, by default, create a merge commit and then checkout the merge commit. We don't want that.
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.ref }}
submodules: 'recursive'
- name: Install bash tools
run: |
sudo apt-get update
sudo apt-get -y install xxd
- name: Download all build artifacts
uses: actions/download-artifact@v4
with:
path: periph-build/
- name: List files
run: |
ls -la periph-build/*/
- name: Configure Git Safe Directory
run: |
git config --global --add safe.directory ${GITHUB_WORKSPACE}
- name: Get Commit ID
id: get_commit_id
run: |
COMMIT_ID=$(git rev-parse --short HEAD)
echo "commit_id=$COMMIT_ID" >> $GITHUB_ENV
shell: sh -e {0}
- name: Run aircraft_config.py
id: aircraft_config
run: |
python Tools/Carbonix_scripts/aircraft_config.py "libraries/AP_HAL_ChibiOS/hwdef/CarbonixCommon/aircraft_configuration/${{ matrix.xml_file }}.xml" "${{ env.commit_id }}"
if [ -d "final-output" ]; then
echo "exists=true" >> $GITHUB_ENV
echo "Found final-output directory"
else
echo "exists=false" >> $GITHUB_ENV
echo "No final-output directory found"
fi
- name: Install AWS CLI
if: ${{ env.exists == 'true' }}
run: |
apt-get update -y
DEBIAN_FRONTEND=noninteractive apt-get install -y curl unzip
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip -q awscliv2.zip
./aws/install --update
- name: Configure AWS credentials
if: ${{ env.exists == 'true' }}
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_S3_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Upload to S3
if: ${{ env.exists == 'true' }}
run: |
PATH_TO_S3=${{ needs.setup-s3-path.outputs.s3_path }}
echo "Uploading Artifacts to: $PATH_TO_S3"
aws s3 cp final-output/ $PATH_TO_S3 --recursive
- name: Upload build artifacts
if: ${{ env.exists == 'true' }}
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.xml_file }}
path: final-output/${{ matrix.xml_file }}/
build-sitl:
runs-on: 'windows-latest'
if: ${{ !contains(github.event.pull_request.labels.*.name, 'SKIP_BUILD') }}
needs: setup-s3-path
steps:
- uses: actions/checkout@v4
with:
# PR runs, by default, create a merge commit and then checkout the merge commit. We don't want that.
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.ref }}
submodules: 'recursive'
- name: Prepare ccache timestamp
id: ccache_cache_timestamp
shell: bash
run: |
NOW=$(date -u +"%F-%T")
echo "timestamp=${NOW}" >> $GITHUB_OUTPUT
WORKFLOWNAME="${{github.workflow}}"
NAME_DASHED=${WORKFLOWNAME//+( )/_}
echo "cache-key=${NAME_DASHED}" >> $GITHUB_OUTPUT
- uses: cygwin/cygwin-install-action@master
with:
packages: cygwin64 gcc-g++=10.2.0-1 ccache python37 python37-future python37-lxml python37-pip python37-setuptools python37-wheel git procps gettext
add-to-path: false
# Put ccache into github cache for faster build
- name: setup ccache
env:
PATH: /usr/bin:$(cygpath ${SYSTEMROOT})/system32
shell: C:\cygwin\bin\bash.exe -eo pipefail '{0}'
run: >-
mkdir -p /cygdrive/d/a/ardupilot/ardupilot/ccache &&
mkdir -p /usr/local/etc &&
echo "export CCACHE_SLOPPINESS=file_stat_matches" >> ~/ccache.conf &&
echo "export CCACHE_DIR=/cygdrive/d/a/ardupilot/ardupilot/ccache" >> ~/ccache.conf &&
echo "export CCACHE_BASEDIR=/cygdrive/d/a/ardupilot/ardupilot" >> ~/ccache.conf &&
echo "export CCACHE_COMPRESS=1" >> ~/ccache.conf &&
echo "export CCACHE_COMPRESSLEVEL=6" >> ~/ccache.conf &&
echo "export CCACHE_MAXSIZE=400M" >> ~/ccache.conf &&
source ~/ccache.conf &&
ccache -s
- name: ccache cache files
uses: actions/cache@v4
with:
path: D:/a/ardupilot/ardupilot/ccache
key: ${{ steps.ccache_cache_timestamp.outputs.cache-key }}-ccache-${{steps.ccache_cache_timestamp.outputs.timestamp}}
restore-keys: ${{ steps.ccache_cache_timestamp.outputs.cache-key }}-ccache- # restore ccache from either previous build on this branch or on base branch
- name: Prepare Python environment
env:
PATH: /usr/bin:$(cygpath ${SYSTEMROOT})/system32
shell: C:\cygwin\bin\bash.exe -eo pipefail '{0}'
run: >-
ln -sf /usr/bin/python3.7 /usr/bin/python && ln -sf /usr/bin/pip3.7 /usr/bin/pip &&
python -m pip install --progress-bar off empy==3.3.4 pexpect &&
python -m pip install --progress-bar off dronecan --upgrade &&
cp /usr/bin/ccache /usr/local/bin/ &&
cd /usr/local/bin && ln -s ccache /usr/local/bin/gcc &&
ln -s ccache /usr/local/bin/g++ &&
ln -s ccache /usr/local/bin/x86_64-pc-cygwin-gcc &&
ln -s ccache /usr/local/bin/x86_64-pc-cygwin-g++
- name: Build SITL
env:
PATH: /usr/bin:$(cygpath ${SYSTEMROOT})/system32
shell: C:\cygwin\bin\bash.exe -eo pipefail '{0}'
run: >-
git config --global --add safe.directory /cygdrive/d/a/${GITHUB_REPOSITORY#$GITHUB_REPOSITORY_OWNER/}/${GITHUB_REPOSITORY#$GITHUB_REPOSITORY_OWNER/} &&
export PATH=/usr/local/bin:/usr/bin:$(cygpath ${SYSTEMROOT})/system32 &&
source ~/ccache.conf &&
Tools/scripts/cygwin_build.sh &&
ccache -s
- name: Check build files
id: check_files
uses: andstor/file-existence-action@v2
with:
files: "artifacts/*.exe"
fail: true
- name: Archive build
uses: actions/upload-artifact@v4
with:
name: sitl
path: artifacts
retention-days: 90
- name: Download Artifacts
uses: actions/download-artifact@v4
with:
path: temp
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_S3_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Upload artifacts to S3
shell: pwsh
run: |
$env:PATH_TO_S3 = '${{ needs.setup-s3-path.outputs.s3_path }}'
echo "Uploading to: $env:PATH_TO_S3"
aws s3 cp temp/ $env:PATH_TO_S3 --recursive
build-upload-zip:
runs-on: ubuntu-22.04
if: ${{ !contains(github.event.pull_request.labels.*.name, 'SKIP_BUILD') }}
needs: [process-ac, build-sitl, setup-s3-path]
steps:
- name: Extract folder name from s3_path
id: extract-folder-name
run: |
s3_path="${{ needs.setup-s3-path.outputs.s3_path }}"
folder_name=$(basename "$s3_path")
echo "folder_name=$folder_name" >> $GITHUB_ENV
- name: Create folder from s3_path
run: mkdir -p aircraft_zip
- name: List available artifacts
id: list-artifacts
uses: actions/github-script@v6
with:
script: |
const artifacts = await github.rest.actions.listArtifactsForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
});
return artifacts.data.artifacts.map(artifact => artifact.name);
- name: Filter artifacts
id: filter-artifacts
run: |
echo "Filtered artifacts:" > filtered_artifacts.txt
for artifact in ${{ steps.list-artifacts.outputs.result }}; do
if [[ $artifact =~ .*_AC_.*[0-9]+(\.[0-9]+)? ]]; then
echo $artifact >> filtered_artifacts.txt
fi
done
echo "sitl" >> filtered_artifacts.txt
cat filtered_artifacts.txt
- name: Download filtered artifacts
uses: actions/download-artifact@v4
with:
name: ${{ steps.filter-artifacts.outputs.result }}
path: aircraft_zip/
- name: Delete unnecessary folders
run: |
rm -rf aircraft_zip/build-periph-*
- name: List downloaded files
run: ls -R aircraft_zip/
- name: Install p7zip
run: sudo apt-get install -y p7zip-full
- name: 7zip the folder
run: |
cd aircraft_zip
7z a ../${{ env.folder_name }}.7z .
- name: Upload 7z folder to artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ env.folder_name }}
path: ${{ env.folder_name }}.7z
retention-days: 15
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_S3_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Upload 7z folder to S3
run: |
PATH_TO_S3=${{ needs.setup-s3-path.outputs.s3_path }}
# Remove trailing slash if it exists
PATH_TO_S3=${PATH_TO_S3%/}
echo "Uploading to: $PATH_TO_S3"
aws s3 cp ${{ env.folder_name }}.7z $PATH_TO_S3/${{ env.folder_name }}.7z