Skip to content

Commit

Permalink
[ci] port Azure Pipelines to GitHub Actions
Browse files Browse the repository at this point in the history
This is a targeted port since the branch has diverged significantly
from master.

Signed-off-by: Gary Guo <[email protected]>
  • Loading branch information
nbdd0121 committed Nov 25, 2024
1 parent 503f6e8 commit 379d832
Show file tree
Hide file tree
Showing 8 changed files with 864 additions and 570 deletions.
31 changes: 31 additions & 0 deletions .github/actions/download-partial-build-bin/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# Copyright lowRISC contributors (OpenTitan project).
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0

name: Download partial build-bin artifacts
description: Download partial build-bin and merge them

inputs:
job-patterns:
description: Glob patterns of jobs to download artifact from
required: true

runs:
using: composite
steps:
- name: Download partial build bins
uses: actions/download-artifact@v4
with:
pattern: partial-build-bin-${{ inputs.job-patterns }}
path: downloads
- name: Extract and merge bins
shell: bash
run: |
mkdir -p build-bin
find downloads -name "build-bin.tar" -exec \
tar -C build-bin --strip-components=1 -xvf {} \;
rm -rf downloads
- name: Show all downloads files
shell: bash
run: |
find build-bin
132 changes: 132 additions & 0 deletions .github/actions/prepare-env/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
# Copyright lowRISC contributors (OpenTitan project).
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0

name: Prepare environment
description: Install dependencies and prepare environment needed for OpenTitan

inputs:
service_account_json:
description: Service account JSON for Google Cloud access
default: ''
verilator-version:
description: Verilator version to install
required: true
default: '4.210'
verilator-path:
description: Path at which to install Veriltator
required: true
default: /tools/verilator
verible-version:
description: Verible version to install
required: true
default: 'v0.0-2135-gb534c1fe'
verible-path:
description: Path at which to install Verible
required: true
default: /tools/verible
configure-bazel:
description: Configure Bazel to use remote cache
required: true
default: true

runs:
using: composite
steps:
- name: Install system dependencies
run: |
sudo apt update
grep '^[^#]' apt-requirements.txt | xargs sudo apt install -y
shell: bash

- uses: astral-sh/setup-uv@v3
with:
version: '0.4.20'
enable-cache: true
cache-dependency-glob: |
pyproject.toml
python-requirements.txt
- name: Install Python
shell: bash
run: |
uv python install 3.8
# Create a virtual environment for UV
uv venv ~/.local/share/venv
echo "$HOME/.local/share/venv/bin" >> "$GITHUB_PATH"
echo "VIRTUAL_ENV=$HOME/.local/share/venv" >> "$GITHUB_ENV"
- name: Install Python dependencies
shell: bash
run: |
uv pip install -r python-requirements.txt --require-hashes
# We installed uv from setup-uv action, so uninstall from venv to prevent conflict
uv pip uninstall uv
- name: Install Verilator
run: |
VERILATOR_TAR="verilator-v${{ inputs.verilator-version }}.tar.gz"
VERILATOR_URL="https://storage.googleapis.com/verilator-builds/${VERILATOR_TAR}"
sudo mkdir -p "${{ inputs.verilator-path }}"
curl -sSfL "$VERILATOR_URL" | sudo tar -C "${{ inputs.verilator-path }}" -xvzf -
echo "${{ inputs.verilator-path }}/v${{ inputs.verilator-version }}/bin" >> "$GITHUB_PATH"
shell: bash

- name: Install Verible
run: |
VERIBLE_TAR="verible-${{ inputs.verible-version }}-Ubuntu-22.04-jammy-x86_64.tar.gz"
VERIBLE_URL="https://github.com/chipsalliance/verible/releases/download/${{ inputs.verible-version }}/${VERIBLE_TAR}"
sudo mkdir -p "${{ inputs.verible-path }}"
curl -sSfL "$VERIBLE_URL" | sudo tar -C "${{ inputs.verible-path }}" -xvzf - --strip-components=1
# Fixup bin permission which is broken in tarball.
sudo chmod 755 "${{ inputs.verible-path }}/bin"
echo "${{ inputs.verible-path }}/bin" >> "$GITHUB_PATH"
shell: bash

# Log into Google Cloud using service account JSON.
# This can't be Workload Identity Federation because Bazel performance using WIF is terrible.
# This needs access to secrets and thus doesn't work for pull request.
- uses: google-github-actions/auth@v2
if: github.event_name != 'pull_request'
with:
credentials_json: '${{ inputs.service_account_json }}'

- name: Configure ~/.bazelrc
if: inputs.configure-bazel == 'true'
run: |
cp ci/.bazelrc ~/.bazelrc
# Inject the OS version into a parameter used in the action key computation to
# avoid collisions between different operating systems in the caches.
# See #14695 for more information.
echo "build --remote_default_exec_properties=OSVersion=\"$(lsb_release -ds)\"" >> ~/.bazelrc
if ${{ github.event_name != 'pull_request' }}; then
echo "Will upload to the cache." >&2
echo "build --google_default_credentials" >> ~/.bazelrc
else
echo "Download from cache only." >&2
echo "build --remote_upload_local_results=false" >> ~/.bazelrc
fi
shell: bash

- name: Install merge-junit
run: |
MERGE_JUNIT_PATH="/tools/merge-junit"
MERGE_JUNIT_TAR="merge-junit-v0.2.1-x86_64-unknown-linux-musl.tar.gz"
MERGE_JUNIT_URL="https://github.com/tobni/merge-junit/releases/download/v0.2.1/${MERGE_JUNIT_TAR}"
MERGE_JUNIT_SHA256="5c6a63063f3a155ea4da912d5cae2ec4a89022df31d7942f2aba463ee4790152"
curl -fLSs -o "/tmp/${MERGE_JUNIT_TAR}" "$MERGE_JUNIT_URL"
HASH=$(sha256sum "/tmp/$MERGE_JUNIT_TAR" | awk '{print $1}')
if [[ "$HASH" != "$MERGE_JUNIT_SHA256" ]]; then
echo "The hash of merge-junit does not match" >&2
echo "$HASH != $MERGE_JUNIT_SHA256" >&2
exit 1
fi
sudo mkdir -p $MERGE_JUNIT_PATH
sudo chmod 777 $MERGE_JUNIT_PATH
tar -C $MERGE_JUNIT_PATH -xvzf "/tmp/${MERGE_JUNIT_TAR}" --strip-components=1
echo $MERGE_JUNIT_PATH >> "$GITHUB_PATH"
rm "/tmp/${MERGE_JUNIT_TAR}"
shell: bash
44 changes: 44 additions & 0 deletions .github/actions/publish-bazel-test-results/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# Copyright lowRISC contributors (OpenTitan project).
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0

name: Publish Bazel test results
description: Merge Bazel test results and publish the report

inputs:
merged-results:
description: Path to place merged JUnit report
default: test_results.xml
artifact-name:
description: Name of uploaded artifact. Leave empty to skip upload.
default: ''

runs:
using: composite
steps:
# Bazel produce one xml for each test. Merge them together.
- name: Merge JUnit reports
shell: bash
run: |
if find -L bazel-out -name "test.xml" | grep -F '' >> /tmp/test-xmls; then
cat /tmp/test-xmls | xargs merge-junit -o "${{ inputs.merged-results }}"
else
# merge-junit doesn't handle 0 inputs.
echo '<?xml version="1.0" encoding="UTF-8"?><testsuites/>' >> "${{ inputs.merged-results }}"
fi
- name: Upload report as artifact
if: inputs.artifact-name != ''
uses: actions/upload-artifact@v4
with:
name: ${{ inputs.artifact-name }}
path: ${{ inputs.merged-results }}
# In case this is from a re-run
overwrite: true

- name: Publish job summary
uses: mikepenz/action-junit-report@ec3a351c13e080dc4fa94c49ab7ad5bf778a9668 # v5
with:
report_paths: ${{ inputs.merged-results }}
annotate_only: true
detailed_summary: true
102 changes: 102 additions & 0 deletions .github/workflows/bitstream.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
# Copyright lowRISC contributors (OpenTitan project).
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0

name: FPGA bitstream build
on:
workflow_call:
inputs:
top_name:
type: string
design_suffix:
type: string
vivado_version:
default: "2021.1"
type: string

jobs:
bitstream:
name: Build bitstream
runs-on: ubuntu-22.04-bitstream
timeout-minutes: 240
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Required by get-bitstream-strategy.sh
- name: Prepare environment
uses: ./.github/actions/prepare-env
with:
service_account_json: '${{ secrets.BAZEL_CACHE_CREDS }}'

- name: Configure bitstream strategy
id: strategy
run: |
ci/scripts/get-bitstream-strategy.sh "chip_${{ inputs.top_name }}_${{ inputs.design_suffix }}" \
':!/third_party/rust/' \
':!/sw/' \
':!/*.hjson' \
':!/*.tpl' \
':!/site/' \
':!/doc/' \
':!/COMMITTERS' \
':!/CLA' \
':!/*.md' \
':!/.github/' \
':!/hw/**/dv/*' \
':!/hw/dv/'
- name: Extract cached bitstream
if: steps.strategy.outputs.bitstreamStrategy == 'cached'
run: |
. util/build_consts.sh
bazel_package="//hw/bitstream"
design_name=chip_${{ inputs.top_name }}_${{ inputs.design_suffix }}
cached_archive="${bazel_package}:${design_name}_cached_archive"
./bazelisk.sh build "${cached_archive}"
bitstream_archive=$(./bazelisk.sh outquery "${cached_archive}")
cp -Lv ${bitstream_archive} build-bin.tar
- name: Build and splice bitstream with Vivado
if: steps.strategy.outputs.bitstreamStrategy != 'cached'
run: |
bazel_package=//hw/bitstream/vivado
bitstream_target=${bazel_package}:fpga_${{ inputs.design_suffix }}
archive_target=${bazel_package}:${{ inputs.top_name }}_${{ inputs.design_suffix }}_archive
trap 'get_logs' EXIT
get_logs() {
design_name=chip_${{ inputs.top_name }}_${{ inputs.design_suffix }}
SUB_PATH="hw/top_${{ inputs.top_name }}/${design_name}"
mkdir -p "$OBJ_DIR/$SUB_PATH" "$BIN_DIR/$SUB_PATH"
# This can fail if the build result is from Bazel cache
cp -rLvt "$OBJ_DIR/$SUB_PATH/" \
$(./bazelisk.sh outquery-all ${bitstream_target}) || true
bitstream_archive=$(./bazelisk.sh outquery ${archive_target})
cp -Lv ${bitstream_archive} build-bin.tar
}
. util/build_consts.sh
module load "xilinx/vivado/${{inputs.vivado_version }}"
./bazelisk.sh build ${archive_target}
- name: Display synthesis & implementation logs
if: steps.strategy.outputs.bitstreamStrategy != 'cached'
run: |
. util/build_consts.sh
echo "Synthesis log"
cat $OBJ_DIR/hw/top_${{ inputs.top_name }}/build.fpga_${{ inputs.design_suffix }}/synth-vivado/lowrisc_systems_chip_${{ inputs.top_name }}_${{ inputs.design_suffix }}_0.1.runs/synth_1/runme.log || true
echo "Implementation log"
cat $OBJ_DIR/hw/top_${{ inputs.top_name }}/build.fpga_${{ inputs.design_suffix }}/synth-vivado/lowrisc_systems_chip_${{ inputs.top_name }}_${{ inputs.design_suffix }}_0.1.runs/impl_1/runme.log || true
- name: Upload step outputs
uses: actions/upload-artifact@v4
with:
name: partial-build-bin-chip_${{ inputs.top_name }}_${{ inputs.design_suffix }}
path: build-bin.tar

- name: Upload artifacts if build failed
if: failure()
uses: actions/upload-artifact@v4
with:
name: chip_${{ inputs.top_name }}_${{ inputs.design_suffix }}-build-out
path: build-out
Loading

0 comments on commit 379d832

Please sign in to comment.