diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..c403e78 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,36 @@ +--- +name: Bug report +about: Create a report to help us improve +title: "[TITLE]" +labels: 'bug-report' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS & Version: [e.g. iOS 10.2.1] + - Browser & Version [e.g. chrome v71.0.12345] + +**Smartphone (please complete the following information):** + - Device: [e.g. iPhone6] + - OS: [e.g. iOS8.1] + - Browser & Version [e.g. stock browser v0.1.2] + +**Additional context** +Add any other context about the problem here. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..ac0792f --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,27 @@ +Description +--- + +Motivation and Context +--- + +How Has This Been Tested? +--- + +What process can a PR reviewer use to test or verify this change? +--- + + + + + +Breaking Changes +--- + +- [x] None +- [ ] Requires data directory on base node to be deleted +- [ ] Requires hard fork +- [ ] Other - Please specify + + + diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..567c3fd --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,8 @@ +--- +version: 2 +updates: + # Maintain dependencies for GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/audit.yml b/.github/workflows/audit.yml new file mode 100644 index 0000000..1fffef0 --- /dev/null +++ b/.github/workflows/audit.yml @@ -0,0 +1,27 @@ +--- +name: Security audit - daily + +'on': + push: + paths: + # Run if workflow changes + - '.github/workflows/audit.yml' + # Run on changed dependencies + - '**/Cargo.toml' + - '**/Cargo.lock' + # Run if the configuration file changes + - '**/audit.toml' + # Rerun periodicly to pick up new advisories + schedule: + - cron: '43 05 * * *' + # Run manually + workflow_dispatch: + +jobs: + security_audit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: rustsec/audit-check@v1.4.1 + with: + token: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/build_binaries.json b/.github/workflows/build_binaries.json new file mode 100644 index 0000000..943e511 --- /dev/null +++ b/.github/workflows/build_binaries.json @@ -0,0 +1,63 @@ +[ + { + "name": "linux-x86_64", + "runs-on": "ubuntu-20.04", + "rust": "nightly-2024-03-01", + "target": "x86_64-unknown-linux-gnu", + "cross": false, + "build_metric": true + }, + { + "name": "linux-arm64", + "runs-on": "ubuntu-latest", + "rust": "stable", + "target": "aarch64-unknown-linux-gnu", + "cross": true, + "flags": "--features libtor --workspace --exclude minotari_mining_helper_ffi --exclude tari_integration_tests", + "build_metric": true + }, + { + "name": "linux-riscv64", + "runs-on": "ubuntu-latest", + "rust": "stable", + "target": "riscv64gc-unknown-linux-gnu", + "cross": true, + "flags": "--workspace --exclude minotari_mining_helper_ffi --exclude tari_integration_tests", + "build_enabled": true, + "best_effort": true + }, + { + "name": "macos-x86_64", + "runs-on": "macos-12", + "rust": "stable", + "target": "x86_64-apple-darwin", + "cross": false + }, + { + "name": "macos-arm64", + "runs-on": "macos-14", + "rust": "stable", + "target": "aarch64-apple-darwin", + "cross": false + }, + { + "name": "windows-x64", + "runs-on": "windows-2019", + "rust": "stable", + "target": "x86_64-pc-windows-msvc", + "cross": false, + "features": "safe", + "flags": "--workspace --exclude tari_libtor" + }, + { + "name": "windows-arm64", + "runs-on": "windows-latest", + "rust": "stable", + "target": "aarch64-pc-windows-msvc", + "cross": false, + "features": "safe", + "target_bins": "minotari_node, minotari_console_wallet, minotari_merge_mining_proxy, minotari_miner", + "flags": "--workspace --exclude tari_libtor", + "build_enabled": false + } +] diff --git a/.github/workflows/build_binaries.yml b/.github/workflows/build_binaries.yml new file mode 100644 index 0000000..6bd01ed --- /dev/null +++ b/.github/workflows/build_binaries.yml @@ -0,0 +1,866 @@ +--- +name: Build Matrix of Binaries + +'on': + push: + tags: + - "v[0-9]+.[0-9]+.[0-9]*" + branches: + - "build-all-*" + - "build-bins-*" + schedule: + - cron: "05 00 * * *" + workflow_dispatch: + inputs: + customTag: + description: "Development Tag" + required: true + default: "development-tag" + +env: + TS_FILENAME: "tari_suite" + TS_BUNDLE_ID_BASE: "com.tarilabs" + TS_SIG_FN: "sha256-unsigned.txt" + ## Must be a JSon string + TS_FILES: '["minotari_node","minotari_console_wallet","minotari_miner","minotari_merge_mining_proxy"]' + TS_FEATURES: "default, safe" + TS_LIBRARIES: "minotari_mining_helper_ffi" + TARI_NETWORK_DIR: testnet + toolchain: nightly-2024-03-01 + matrix-json-file: ".github/workflows/build_binaries.json" + CARGO_HTTP_MULTIPLEXING: false + CARGO_UNSTABLE_SPARSE_REGISTRY: true + CARGO: cargo + CARGO_OPTIONS: "--release" + CARGO_CACHE: true + +concurrency: + # https://docs.github.com/en/actions/examples/using-concurrency-expressions-and-a-test-matrix + group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' + cancel-in-progress: ${{ !startsWith(github.ref, 'refs/tags/v') || github.ref != 'refs/heads/development' || github.ref != 'refs/heads/nextnet' || github.ref != 'refs/heads/stagenet' }} + +permissions: {} + +jobs: + matrix-prep: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - uses: actions/checkout@v4 + with: + submodules: false + + - name: Set Matrix + id: set-matrix + run: | + # + # build all targets images + # matrix=$( jq -s -c .[] .github/workflows/build_binaries.json ) + # + # build only single target image + # matrix_selection=$( jq -c '.[] | select( ."name" == "windows-x64" )' ${{ env.matrix-json-file }} ) + # matrix_selection=$( jq -c '.[] | select( ."name" | contains("macos") )' ${{ env.matrix-json-file }} ) + # + # build select target images - build_enabled + matrix_selection=$( jq -c '.[] | select( ."build_enabled" != false )' ${{ env.matrix-json-file }} ) + # + # Setup the json build matrix + matrix=$(echo ${matrix_selection} | jq -s -c '{"builds": .}') + echo $matrix + echo $matrix | jq . + echo "matrix=${matrix}" >> $GITHUB_OUTPUT + + matrix-check: + # Debug matrix + if: ${{ false }} + runs-on: ubuntu-latest + needs: matrix-prep + steps: + - name: Install json2yaml + run: | + sudo npm install -g json2yaml + + - name: Check matrix definition + run: | + matrix='${{ needs.matrix-prep.outputs.matrix }}' + echo $matrix + echo $matrix | jq . + echo $matrix | json2yaml + + builds: + name: Building ${{ matrix.builds.name }} on ${{ matrix.builds.runs-on }} + needs: matrix-prep + continue-on-error: ${{ matrix.builds.best_effort || false }} + outputs: + TARI_NETWORK_DIR: ${{ steps.set-tari-network.outputs.TARI_NETWORK_DIR }} + TARI_VERSION: ${{ steps.set-tari-vars.outputs.TARI_VERSION }} + VSHA_SHORT: ${{ steps.set-tari-vars.outputs.VSHA_SHORT }} + strategy: + fail-fast: false + matrix: ${{ fromJson(needs.matrix-prep.outputs.matrix) }} + + runs-on: ${{ matrix.builds.runs-on }} + + steps: + - name: Checkout source code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Declare TestNet for tags + id: set-tari-network + # Don't forget to comment out the below if, when force testing with GHA_NETWORK + if: ${{ startsWith(github.ref, 'refs/tags/v') }} + env: + GHA_NETWORK: ${{ github.ref_name }} + # GHA_NETWORK: "v1.0.0-rc.4" + shell: bash + run: | + source buildtools/multinet_envs.sh ${{ env.GHA_NETWORK }} + echo ${TARI_NETWORK} + echo ${TARI_TARGET_NETWORK} + echo ${TARI_NETWORK_DIR} + echo "TARI_NETWORK=${TARI_NETWORK}" >> $GITHUB_ENV + echo "TARI_TARGET_NETWORK=${TARI_TARGET_NETWORK}" >> $GITHUB_ENV + echo "TARI_NETWORK_DIR=${TARI_NETWORK_DIR}" >> $GITHUB_ENV + echo "TARI_NETWORK_DIR=${TARI_NETWORK_DIR}" >> $GITHUB_OUTPUT + + - name: Declare Global Variables 4 GHA ${{ github.event_name }} + id: set-tari-vars + shell: bash + run: | + echo "VBRANCH=${{ github.ref_name }}" >> $GITHUB_ENV + VSHA_SHORT=$(git rev-parse --short HEAD) + echo "VSHA_SHORT=${VSHA_SHORT}" >> $GITHUB_ENV + echo "VSHA_SHORT=${VSHA_SHORT}" >> $GITHUB_OUTPUT + TARI_VERSION=$(awk -F ' = ' '$1 ~ /^version/ \ + { gsub(/["]/, "", $2); printf("%s",$2) }' \ + "$GITHUB_WORKSPACE/applications/minotari_node/Cargo.toml") + echo "TARI_VERSION=${TARI_VERSION}" >> $GITHUB_ENV + echo "TARI_VERSION=${TARI_VERSION}" >> $GITHUB_OUTPUT + if [[ "${{ matrix.builds.features }}" == "" ]]; then + echo "BUILD_FEATURES=${{ env.TS_FEATURES }}" >> $GITHUB_ENV + else + echo "BUILD_FEATURES=${{ matrix.builds.features }}" >> $GITHUB_ENV + fi + TARGET_BINS="" + if [[ "${{ matrix.builds.target_bins }}" == "" ]]; then + ARRAY_BINS=( $(echo ${TS_FILES} | jq --raw-output '.[]' | awk '{ print $1 }') ) + else + ARRAY_BINS=( $(echo "${{ matrix.builds.target_bins }}" | tr ', ' '\n') ) + fi + for BIN_FILE in "${ARRAY_BINS[@]}"; do + echo "Adding ${BIN_FILE} to Builds" + TARGET_BINS+="--bin ${BIN_FILE} " + done + echo "TARGET_BINS=${TARGET_BINS}" >> $GITHUB_ENV + TARGET_LIBS="" + if [[ "${{ matrix.builds.target_libs }}" == "" ]]; then + ARRAY_LIBS=( $(echo ${TS_LIBRARIES} | tr ', ' '\n') ) + else + ARRAY_LIBS=( $(echo "${{ matrix.builds.target_libs }}" | tr ', ' '\n') ) + fi + for LIB_FILE in "${ARRAY_LIBS[@]}"; do + echo "Adding ${LIB_FILE} to library Builds" + TARGET_LIBS+="--package ${LIB_FILE} " + done + echo "TARGET_LIBS=${TARGET_LIBS}" >> $GITHUB_ENV + TARI_BUILD_ISA_CPU=${{ matrix.builds.target }} + # Strip unknown part + TARI_BUILD_ISA_CPU=${TARI_BUILD_ISA_CPU//-unknown-linux-gnu} + # Strip gc used by rust + TARI_BUILD_ISA_CPU=${TARI_BUILD_ISA_CPU//gc} + echo "TARI_BUILD_ISA_CPU=${TARI_BUILD_ISA_CPU}" >> $GITHUB_ENV + + - name: Scheduled Destination Folder Override + if: ${{ github.event_name == 'schedule' && github.event.schedule == '05 00 * * *' }} + shell: bash + run: | + echo "S3_DEST_OVERRIDE=daily/" >> $GITHUB_ENV + + - name: Setup Rust toolchain + uses: dtolnay/rust-toolchain@master + with: + components: rustfmt, clippy + toolchain: ${{ matrix.builds.rust }} + targets: ${{ matrix.builds.target }} + + - name: Install Linux dependencies - Ubuntu + if: ${{ startsWith(runner.os,'Linux') && ( ! matrix.builds.cross ) }} + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + + - name: Install Linux dependencies - Ubuntu - cross-compiled ${{ env.TARI_BUILD_ISA_CPU }} on x86-64 + if: ${{ startsWith(runner.os,'Linux') && ( ! matrix.builds.cross ) && matrix.builds.name != 'linux-x86_64' }} + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies-cross_compile.sh ${{ env.TARI_BUILD_ISA_CPU }} + rustup target add ${{ matrix.builds.target }} + echo "PKG_CONFIG_SYSROOT_DIR=/usr/${{ env.TARI_BUILD_ISA_CPU }}-linux-gnu/" >> $GITHUB_ENV + + - name: Install macOS dependencies + if: startsWith(runner.os,'macOS') + run: | + # openssl, cmake and autoconf already installed + brew install zip coreutils automake protobuf + rustup target add ${{ matrix.builds.target }} + + - name: Install Windows dependencies + if: startsWith(runner.os,'Windows') + run: | + vcpkg.exe install sqlite3:x64-windows zlib:x64-windows + # Bug in choco - need to install each package individually + choco upgrade llvm -y + # psutils is out of date + # choco upgrade psutils -y + choco upgrade openssl -y + # Should already be installed + # choco upgrade strawberryperl -y + choco upgrade protoc -y + + - name: Set environment variables - Nix + if: ${{ ! startsWith(runner.os,'Windows') }} + shell: bash + run: | + echo "SHARUN=shasum --algorithm 256" >> $GITHUB_ENV + echo "CC=gcc" >> $GITHUB_ENV + echo "TS_EXT=" >> $GITHUB_ENV + echo "SHELL_EXT=.sh" >> $GITHUB_ENV + echo "PLATFORM_SPECIFIC_DIR=linux" >> $GITHUB_ENV + echo "TS_DIST=/dist" >> $GITHUB_ENV + + - name: Set environment variables - macOS + if: startsWith(runner.os,'macOS') + shell: bash + run: | + echo "PLATFORM_SPECIFIC_DIR=osx" >> $GITHUB_ENV + echo "LIB_EXT=.dylib" >> $GITHUB_ENV + + # Hardcoded sdk for MacOSX on ARM64 + - name: Set environment variables - macOS - ARM64 (pin/sdk) + # Debug + if: ${{ false }} + # if: ${{ startsWith(runner.os,'macOS') && matrix.builds.name == 'macos-arm64' }} + run: | + xcrun --show-sdk-path + ls -alhtR "/Library/Developer/CommandLineTools/SDKs/" + echo "RANDOMX_RS_CMAKE_OSX_SYSROOT=/Library/Developer/CommandLineTools/SDKs/MacOSX12.1.sdk" >> $GITHUB_ENV + + - name: Set environment variables - Ubuntu + if: startsWith(runner.os,'Linux') + shell: bash + run: | + echo "LIB_EXT=.so" >> $GITHUB_ENV + + - name: Set environment variables - Windows + if: startsWith(runner.os,'Windows') + shell: bash + run: | + # echo "SHARUN=pwsh C:\ProgramData\chocolatey\lib\psutils\tools\psutils-master\shasum.ps1 --algorithm 256" >> $GITHUB_ENV + mkdir -p "$GITHUB_WORKSPACE\psutils" + curl -v -o "$GITHUB_WORKSPACE\psutils\getopt.ps1" "https://raw.githubusercontent.com/lukesampson/psutils/master/getopt.ps1" + curl -v -o "$GITHUB_WORKSPACE\psutils\shasum.ps1" "https://raw.githubusercontent.com/lukesampson/psutils/master/shasum.ps1" + echo "SHARUN=pwsh $GITHUB_WORKSPACE\psutils\shasum.ps1 --algorithm 256" >> $GITHUB_ENV + echo "TS_EXT=.exe" >> $GITHUB_ENV + echo "LIB_EXT=.dll" >> $GITHUB_ENV + echo "SHELL_EXT=.bat" >> $GITHUB_ENV + echo "TS_DIST=\dist" >> $GITHUB_ENV + echo "PLATFORM_SPECIFIC_DIR=windows" >> $GITHUB_ENV + echo "SQLITE3_LIB_DIR=C:\vcpkg\installed\x64-windows\lib" >> $GITHUB_ENV + echo "OPENSSL_DIR=C:\Program Files\OpenSSL-Win64" >> $GITHUB_ENV + echo "LIBCLANG_PATH=C:\Program Files\LLVM\bin" >> $GITHUB_ENV + echo "C:\Strawberry\perl\bin" >> $GITHUB_PATH + + - name: Cache cargo files and outputs + if: ${{ ( ! startsWith(github.ref, 'refs/tags/v') ) && ( ! matrix.builds.cross ) && ( env.CARGO_CACHE ) }} + uses: Swatinem/rust-cache@v2 + with: + key: ${{ matrix.builds.target }} + + - name: Install and setup cargo cross + if: ${{ matrix.builds.cross }} + shell: bash + run: | + #cargo install cross + cargo install cross --git https://github.com/cross-rs/cross + echo "CARGO=cross" >> $GITHUB_ENV + + - name: Install and setup cargo-auditable + if: ${{ false }} + # if: ${{ startsWith(github.ref, 'refs/tags/v') }} + shell: bash + run: | + cargo install cargo-auditable + echo "CARGO=${{ env.CARGO }} auditable" >> $GITHUB_ENV + echo "CARGO_OPTIONS=${{ env.CARGO_OPTIONS }} --release" >> $GITHUB_ENV + + - name: Show command used for Cargo + shell: bash + run: | + echo "cargo command is: ${{ env.CARGO }}" + echo "cargo options is: ${{ env.CARGO_OPTIONS }}" + echo "cross flag: ${{ matrix.builds.cross }}" + + - name: Build release binaries + shell: bash + run: | + ${{ env.CARGO }} build ${{ env.CARGO_OPTIONS }} \ + --target ${{ matrix.builds.target }} \ + --features "${{ env.BUILD_FEATURES }}" \ + ${{ env.TARGET_BINS }} \ + ${{ matrix.builds.flags }} --locked + + - name: Build release libraries + shell: bash + run: | + ${{ env.CARGO }} build ${{ env.CARGO_OPTIONS }} \ + --target ${{ matrix.builds.target }} \ + --lib ${{ env.TARGET_LIBS }} \ + ${{ matrix.builds.flags }} --locked + + - name: Copy binaries to folder for archiving + shell: bash + run: | + # set -xo pipefail + mkdir -p "$GITHUB_WORKSPACE${TS_DIST}" + cd "$GITHUB_WORKSPACE${TS_DIST}" + BINFILE="${TS_FILENAME}-${TARI_VERSION}-${VSHA_SHORT}-${{ matrix.builds.name }}${TS_EXT}" + echo "BINFILE=${BINFILE}" >> $GITHUB_ENV + echo "Copying files for ${BINFILE} to $(pwd)" + echo "MTS_SOURCE=$(pwd)" >> $GITHUB_ENV + ls -alht "$GITHUB_WORKSPACE/target/${{ matrix.builds.target }}/release/" + ARRAY_FILES=( $(echo ${TS_FILES} | jq --raw-output '.[]' | awk '{ print $1 }') ) + for FILE in "${ARRAY_FILES[@]}"; do + echo "checking for file - ${FILE}${TS_EXT}" + if [ -f "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/${FILE}${TS_EXT}" ]; then + cp -vf "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/${FILE}${TS_EXT}" . + fi + done + if [[ "${{ matrix.builds.target_libs }}" == "" ]]; then + ARRAY_LIBS=( $(echo ${TS_LIBRARIES} | tr ', ' '\n') ) + else + ARRAY_LIBS=( $(echo "${{ matrix.builds.target_libs }}" | tr ', ' '\n') ) + fi + for FILE in "${ARRAY_LIBS[@]}"; do + echo "checking for file - ${FILE}${TS_EXT}" + # Check on Nix for libs + if [ -f "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/lib${FILE}${LIB_EXT}" ]; then + cp -vf "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/lib${FILE}${LIB_EXT}" . + fi + # Check on Windows libs + if [ -f "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/${FILE}${LIB_EXT}" ]; then + cp -vf "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/${FILE}${LIB_EXT}" . + fi + done + if [ -f "${GITHUB_WORKSPACE}/applications/minotari_node/${PLATFORM_SPECIFIC_DIR}/runtime/start_tor${SHELL_EXT}" ]; then + cp -vf "${GITHUB_WORKSPACE}/applications/minotari_node/${PLATFORM_SPECIFIC_DIR}/runtime/start_tor${SHELL_EXT}" . + fi + ls -alhtR ${{ env.MTS_SOURCE }} + + - name: Build minotari_node with metrics too + if: ${{ matrix.builds.build_metric }} + shell: bash + run: | + ${{ env.CARGO }} build ${{ env.CARGO_OPTIONS }} \ + --target ${{ matrix.builds.target }} \ + --features "${{ env.BUILD_FEATURES }}, metrics" \ + --bin minotari_node \ + ${{ matrix.builds.flags }} --locked + cp -vf "$GITHUB_WORKSPACE/target/${{ matrix.builds.target }}/release/minotari_node${TS_EXT}" \ + "${{ env.MTS_SOURCE }}/minotari_node-metrics${TS_EXT}" + + - name: Build targeted miners + # if: ${{ ( startsWith(github.ref, 'refs/tags/v') ) && ( matrix.builds.miner_cpu_targets != '' ) }} + if: ${{ matrix.builds.miner_cpu_targets != '' }} + shell: bash + run: | + ARRAY_TARGETS=( $(echo "${{ matrix.builds.miner_cpu_targets }}" | tr ', ' '\n') ) + for CPU_TARGET in "${ARRAY_TARGETS[@]}"; do + echo "Target CPU ${CPU_TARGET} for miner" + export RUSTFLAGS="-C target-cpu=${CPU_TARGET}" + ${{ env.CARGO }} build ${{ env.CARGO_OPTIONS }} \ + --target ${{ matrix.builds.target }} \ + --features "${{ env.BUILD_FEATURES }}" \ + --bin minotari_miner \ + ${{ matrix.builds.flags }} --locked + cp -vf "$GITHUB_WORKSPACE/target/${{ matrix.builds.target }}/release/minotari_miner" \ + "${{ env.MTS_SOURCE }}/minotari_miner-${CPU_TARGET}" + done + + - name: Pre/unsigned OSX Artifact upload for Archive + # Debug + if: ${{ false }} + # if: startsWith(runner.os,'macOS') + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ env.TS_FILENAME }}_unsigned-archive-${{ matrix.builds.name }} + path: "${{ env.MTS_SOURCE }}/*" + + - name: Build the macOS pkg + if: startsWith(runner.os,'macOS') + continue-on-error: true + env: + MACOS_KEYCHAIN_PASS: ${{ secrets.MACOS_KEYCHAIN_PASS }} + MACOS_APPLICATION_ID: ${{ secrets.MACOS_APPLICATION_ID }} + MACOS_APPLICATION_CERT: ${{ secrets.MACOS_APPLICATION_CERT }} + MACOS_APPLICATION_PASS: ${{ secrets.MACOS_APPLICATION_PASS }} + MACOS_INSTALLER_ID: ${{ secrets.MACOS_INSTALLER_ID }} + MACOS_INSTALLER_CERT: ${{ secrets.MACOS_INSTALLER_CERT }} + MACOS_INSTALLER_PASS: ${{ secrets.MACOS_INSTALLER_PASS }} + MACOS_NOTARIZE_USERNAME: ${{ secrets.MACOS_NOTARIZE_USERNAME }} + MACOS_NOTARIZE_PASSWORD: ${{ secrets.MACOS_NOTARIZE_PASSWORD }} + MACOS_ASC_PROVIDER: ${{ secrets.MACOS_ASC_PROVIDER }} + run: | + echo $MACOS_APPLICATION_CERT | base64 --decode > application.p12 + echo $MACOS_INSTALLER_CERT | base64 --decode > installer.p12 + security create-keychain -p $MACOS_KEYCHAIN_PASS build.keychain + security default-keychain -s build.keychain + security unlock-keychain -p $MACOS_KEYCHAIN_PASS build.keychain + security import application.p12 -k build.keychain -P $MACOS_APPLICATION_PASS -T /usr/bin/codesign + security import installer.p12 -k build.keychain -P $MACOS_INSTALLER_PASS -T /usr/bin/pkgbuild + security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k $MACOS_KEYCHAIN_PASS build.keychain + if [[ "${{ matrix.builds.name }}" == "macos-arm64" ]]; then + echo "Add codesign extra args for ${{ matrix.builds.name }}" + OSX_CODESIGN_EXTRAS="--entitlements ${GITHUB_WORKSPACE}/applications/minotari_node/osx-pkg/entitlements.xml" + fi + cd buildtools + export target_release="target/${{ matrix.builds.target }}/release" + mkdir -p "${{ runner.temp }}/osxpkg" + export tarball_parent="${{ runner.temp }}/osxpkg" + export tarball_source="${{ env.TARI_NETWORK_DIR }}" + ./create_osx_install_zip.sh unused nozip + ARRAY_FILES=( $(echo ${TS_FILES} | jq --raw-output '.[]' | awk '{ print $1 }') ) + find "${GITHUB_WORKSPACE}/${target_release}" \ + -name "randomx-*" -type f -perm -+x \ + -exec cp -vf {} "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/" \; + FILES_DIAG_UTILS=( \ + $(find "${GITHUB_WORKSPACE}/${target_release}" \ + -name "randomx-*" -type f -perm -+x \ + -exec sh -c 'echo "$(basename "{}")"' \; \ + ) \ + ) + ARRAY_FILES+=(${FILES_DIAG_UTILS[@]}) + for FILE in "${ARRAY_FILES[@]}"; do + codesign --options runtime --force --verify --verbose --timestamp ${OSX_CODESIGN_EXTRAS} \ + --prefix "${{ env.TS_BUNDLE_ID_BASE }}.${{ env.TS_FILENAME }}." \ + --sign "Developer ID Application: $MACOS_APPLICATION_ID" \ + "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/$FILE" + codesign --verify --deep --display --verbose=4 \ + "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/$FILE" + cp -vf "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/$FILE" \ + "${{ env.MTS_SOURCE }}" + done + distDirPKG=$(mktemp -d -t ${{ env.TS_FILENAME }}) + echo "${distDirPKG}" + echo "distDirPKG=${distDirPKG}" >> $GITHUB_ENV + TS_Temp=${{ env.TS_FILENAME }} + TS_BUNDLE_ID_VALID_NAME=$(echo "${TS_Temp//_/-}") + # Strip apple-darwin + TS_ARCH=$(echo "${${{ matrix.builds.target }}//-apple-darwin/}") + pkgbuild --root "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}" \ + --identifier "${{ env.TS_BUNDLE_ID_BASE }}.pkg.${TS_BUNDLE_ID_VALID_NAME}" \ + --version "${TARI_VERSION}" \ + --install-location "/tmp/tari" \ + --scripts "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/scripts" \ + --sign "Developer ID Installer: ${MACOS_INSTALLER_ID}" \ + "${distDirPKG}/${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" + echo -e "Submitting to Apple...\n\n" + xcrun notarytool submit \ + "${distDirPKG}/${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" \ + --apple-id "${MACOS_NOTARIZE_USERNAME}" \ + --password ${MACOS_NOTARIZE_PASSWORD} \ + --team-id ${MACOS_ASC_PROVIDER} \ + --verbose --wait 2>&1 | tee -a notarisation.result + # Maybe use line from with "Processing complete"? + requestUUID=$(tail -n5 notarisation.result | grep "id:" | cut -d" " -f 4) + requestSTATUS=$(tail -n5 notarisation.result | grep "\ \ status:" | cut -d" " -f 4) + if [[ ${requestUUID} == "" ]] || [[ ${requestSTATUS} != "Accepted" ]]; then + echo "## status: ${requestSTATUS} - could not notarize - ${requestUUID} - ${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" + exit 1 + else + echo "Notarization RequestUUID: ${requestUUID}" + echo -e "\nStapling package...\ + ${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg\n" + xcrun stapler staple -v \ + "${distDirPKG}/${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" + fi + cd ${distDirPKG} + echo "Compute pkg shasum" + ${SHARUN} "${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" \ + >> "${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg.sha256" + cat "${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg.sha256" + echo "Checksum verification for pkg is " + ${SHARUN} --check "${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg.sha256" + + - name: Artifact upload for macOS pkg + if: startsWith(runner.os,'macOS') + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg + path: "${{ env.distDirPKG }}/${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}*.pkg*" + + - name: Build the Windows installer + if: startsWith(runner.os,'Windows') + shell: cmd + run: | + cd buildtools + "%programfiles(x86)%\Inno Setup 6\iscc.exe" "/DMyAppVersion=${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer" "/DMinotariSuite=${{ env.TS_FILENAME }}" "/DTariSuitePath=${{ github.workspace }}${{ env.TS_DIST }}" "windows_inno_installer.iss" + cd Output + echo "Compute archive shasum" + ${{ env.SHARUN }} "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe" >> "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe.sha256" + echo "Show the shasum" + cat "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe.sha256" + echo "Checksum verification archive is " + ${{ env.SHARUN }} --check "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe.sha256" + + - name: Artifact upload for Windows installer + if: startsWith(runner.os,'Windows') + uses: actions/upload-artifact@v4 + with: + name: "${{ env.TS_FILENAME }}_windows_installer" + path: "${{ github.workspace }}/buildtools/Output/*" + + - name: Archive and Checksum Binaries + shell: bash + run: | + echo "Archive ${{ env.BINFILE }} too ${{ env.BINFILE }}.zip" + cd "${{ env.MTS_SOURCE }}" + echo "Compute files shasum" + ${SHARUN} * >> "${{ env.BINFILE }}.sha256" + echo "Show the shasum" + cat "${{ env.BINFILE }}.sha256" + echo "Checksum verification for files is " + ${SHARUN} --check "${{ env.BINFILE }}.sha256" + 7z a "${{ env.BINFILE }}.zip" * + echo "Compute archive shasum" + ${SHARUN} "${{ env.BINFILE }}.zip" >> "${{ env.BINFILE }}.zip.sha256" + echo "Show the shasum" + cat "${{ env.BINFILE }}.zip.sha256" + echo "Checksum verification archive is " + ${SHARUN} --check "${{ env.BINFILE }}.zip.sha256" + + - name: Artifact upload for Archive + uses: actions/upload-artifact@v4 + with: + name: ${{ env.TS_FILENAME }}_archive-${{ matrix.builds.name }} + path: "${{ github.workspace }}${{ env.TS_DIST }}/${{ env.BINFILE }}.zip*" + + - name: Prep diag-utils archive for upload + continue-on-error: true + shell: bash + run: | + mkdir -p "${{ env.MTS_SOURCE }}-diag-utils" + cd "${{ env.MTS_SOURCE }}-diag-utils" + # Find RandomX built tools for testing + find "$GITHUB_WORKSPACE/target/${{ matrix.builds.target }}/release/" \ + -name "randomx-*${{ env.TS_EXT}}" -type f -perm -+x -exec cp -vf {} . \; + echo "Compute diag utils shasum" + ${SHARUN} * \ + >> "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.sha256" + cat "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.sha256" + echo "Checksum verification for diag utils is " + ${SHARUN} --check "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.sha256" + 7z a "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip" * + echo "Compute diag utils archive shasum" + ${SHARUN} "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip" \ + >> "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip.sha256" + cat "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip.sha256" + echo "Checksum verification for diag utils archive is " + ${SHARUN} --check "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip.sha256" + + - name: Artifact upload for diag-utils + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }} + path: "${{ github.workspace }}${{ env.TS_DIST }}-diag-utils/*.zip*" + + macOS-universal-assemble: + name: macOS universal assemble + needs: builds + + env: + TARI_VERSION: ${{ needs.builds.outputs.TARI_VERSION }} + VSHA_SHORT: ${{ needs.builds.outputs.VSHA_SHORT }} + SHARUN: "shasum --algorithm 256" + + continue-on-error: true + + runs-on: macos-14 + + steps: + - name: Checkout source code + uses: actions/checkout@v4 + + - name: Download macOS binaries + uses: actions/download-artifact@v4 + with: + path: osxuni + # macos - x86_64 / arm64 + pattern: ${{ env.TS_FILENAME }}_archive-macos-* + merge-multiple: true + + - name: Set environment variables for macOS universal + shell: bash + run: | + BINFN="${TS_FILENAME}-${TARI_VERSION}-${VSHA_SHORT}" + echo "BINFN=${BINFN}" >> $GITHUB_ENV + + - name: Install macOS dependencies + shell: bash + run: | + brew install coreutils + + - name: Verify checksums and extract + shell: bash + working-directory: osxuni + run: | + ls -alhtR + ${SHARUN} --ignore-missing --check \ + "${{ env.BINFN }}-macos-x86_64.zip.sha256" + ${SHARUN} --ignore-missing --check \ + "${{ env.BINFN }}-macos-arm64.zip.sha256" + ls -alhtR + mkdir macos-universal macos-x86_64 macos-arm64 + cd macos-x86_64 + 7z e "../${{ env.BINFN }}-macos-x86_64.zip" + cd ../macos-arm64 + 7z e "../${{ env.BINFN }}-macos-arm64.zip" + + - name: Assemble macOS universal binaries + shell: bash + working-directory: osxuni + run: | + ls -alhtR + ARRAY_FILES=( $(echo ${TS_FILES} | jq --raw-output '.[]' | awk '{ print $1 }') ) + for FILE in "${ARRAY_FILES[@]}"; do + echo "processing binary file - ${FILE}" + lipo -create -output macos-universal/${FILE} \ + macos-x86_64/${FILE} \ + macos-arm64/${FILE} + done + ARRAY_LIBS=( $(echo ${TS_LIBRARIES} | tr ', ' '\n') ) + for FILE in "${ARRAY_LIBS[@]}"; do + echo "processing library file - lib${FILE}.dylib" + lipo -create -output macos-universal/lib${FILE}.dylib \ + macos-x86_64/lib${FILE}.dylib \ + macos-arm64/lib${FILE}.dylib + done + ls -alhtR macos-universal + + - name: Build the macOS universal pkg + continue-on-error: true + env: + MACOS_KEYCHAIN_PASS: ${{ secrets.MACOS_KEYCHAIN_PASS }} + MACOS_APPLICATION_ID: ${{ secrets.MACOS_APPLICATION_ID }} + MACOS_APPLICATION_CERT: ${{ secrets.MACOS_APPLICATION_CERT }} + MACOS_APPLICATION_PASS: ${{ secrets.MACOS_APPLICATION_PASS }} + MACOS_INSTALLER_ID: ${{ secrets.MACOS_INSTALLER_ID }} + MACOS_INSTALLER_CERT: ${{ secrets.MACOS_INSTALLER_CERT }} + MACOS_INSTALLER_PASS: ${{ secrets.MACOS_INSTALLER_PASS }} + MACOS_NOTARIZE_USERNAME: ${{ secrets.MACOS_NOTARIZE_USERNAME }} + MACOS_NOTARIZE_PASSWORD: ${{ secrets.MACOS_NOTARIZE_PASSWORD }} + MACOS_ASC_PROVIDER: ${{ secrets.MACOS_ASC_PROVIDER }} + run: | + echo $MACOS_APPLICATION_CERT | base64 --decode > application.p12 + echo $MACOS_INSTALLER_CERT | base64 --decode > installer.p12 + security create-keychain -p $MACOS_KEYCHAIN_PASS build.keychain + security default-keychain -s build.keychain + security unlock-keychain -p $MACOS_KEYCHAIN_PASS build.keychain + security import application.p12 -k build.keychain -P $MACOS_APPLICATION_PASS -T /usr/bin/codesign + security import installer.p12 -k build.keychain -P $MACOS_INSTALLER_PASS -T /usr/bin/pkgbuild + security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k $MACOS_KEYCHAIN_PASS build.keychain + OSX_CODESIGN_EXTRAS="--entitlements ${GITHUB_WORKSPACE}/applications/minotari_node/osx-pkg/entitlements.xml" + cd buildtools + # export target_release="target/${{ matrix.builds.target }}/release" + # matrix.builds.target=macos-universal + # matrix.builds.name=macos-universal + export target_release="osxuni/macos-universal" + mkdir -p "${{ runner.temp }}/osxpkg" + export tarball_parent="${{ runner.temp }}/osxpkg" + export tarball_source="${{ env.TARI_NETWORK_DIR }}" + ./create_osx_install_zip.sh unused nozip + ARRAY_FILES=( $(echo ${TS_FILES} | jq --raw-output '.[]' | awk '{ print $1 }') ) + for FILE in "${ARRAY_FILES[@]}"; do + codesign --options runtime --force --verify --verbose --timestamp ${OSX_CODESIGN_EXTRAS} \ + --prefix "${{ env.TS_BUNDLE_ID_BASE }}.${{ env.TS_FILENAME }}." \ + --sign "Developer ID Application: $MACOS_APPLICATION_ID" \ + "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/$FILE" + codesign --verify --deep --display --verbose=4 \ + "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/$FILE" + cp -vf "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/$FILE" \ + "${{ github.workspace }}/osxuni/macos-universal/" + done + distDirPKG=$(mktemp -d -t ${{ env.TS_FILENAME }}) + echo "${distDirPKG}" + echo "distDirPKG=${distDirPKG}" >> $GITHUB_ENV + TS_Temp=${{ env.TS_FILENAME }} + TS_BUNDLE_ID_VALID_NAME=$(echo "${TS_Temp//_/-}") + TS_ARCH=universal + pkgbuild --root "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}" \ + --identifier "${{ env.TS_BUNDLE_ID_BASE }}.pkg.${TS_BUNDLE_ID_VALID_NAME}" \ + --version "${TARI_VERSION}" \ + --install-location "/tmp/tari" \ + --scripts "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/scripts" \ + --sign "Developer ID Installer: ${MACOS_INSTALLER_ID}" \ + "${distDirPKG}/${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg" + echo -e "Submitting to Apple...\n\n" + xcrun notarytool submit \ + "${distDirPKG}/${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg" \ + --apple-id "${MACOS_NOTARIZE_USERNAME}" \ + --password ${MACOS_NOTARIZE_PASSWORD} \ + --team-id ${MACOS_ASC_PROVIDER} \ + --verbose --wait 2>&1 | tee -a notarisation.result + # Maybe use line from with "Processing complete"? + requestUUID=$(tail -n5 notarisation.result | grep "id:" | cut -d" " -f 4) + requestSTATUS=$(tail -n5 notarisation.result | grep "\ \ status:" | cut -d" " -f 4) + if [[ ${requestUUID} == "" ]] || [[ ${requestSTATUS} != "Accepted" ]]; then + echo "## status: ${requestSTATUS} - could not notarize - ${requestUUID} - ${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg" + exit 1 + else + echo "Notarization RequestUUID: ${requestUUID}" + echo -e "\nStapling package...\ + ${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg\n" + xcrun stapler staple -v \ + "${distDirPKG}/${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg" + fi + cd ${distDirPKG} + echo "Compute pkg shasum" + ${SHARUN} "${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg" \ + >> "${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg.sha256" + cat "${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg.sha256" + echo "Checksum verification for pkg is " + ${SHARUN} --check "${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg.sha256" + + - name: Artifact upload for macOS universal pkg + if: startsWith(runner.os,'macOS') + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg + path: "${{ env.distDirPKG }}/${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}*.pkg*" + + - name: Archive and Checksum macOS universal Binaries + shell: bash + working-directory: osxuni/macos-universal + run: | + # set -xo pipefail + BINFILE="${BINFN}-macos-universal" + echo "BINFILE=${BINFILE}" >> $GITHUB_ENV + echo "Archive ${BINFILE} into ${BINFILE}.zip" + echo "Compute files shasum into ${BINFILE}.sha256" + ${SHARUN} * >> "${BINFILE}.sha256" + echo "Show the shasum" + cat "${BINFILE}.sha256" + echo "Checksum verification for files is " + ${SHARUN} --check "${BINFILE}.sha256" + 7z a "${BINFILE}.zip" * + echo "Compute archive shasum into ${BINFILE}.zip.sha256" + ${SHARUN} "${BINFILE}.zip" >> "${BINFILE}.zip.sha256" + echo "Show the shasum from ${BINFILE}.zip.sha256" + cat "${BINFILE}.zip.sha256" + echo "Checksum verification archive is " + ${SHARUN} --check "${BINFILE}.zip.sha256" + + - name: Artifact upload for Archive + uses: actions/upload-artifact@v4 + with: + name: ${{ env.TS_FILENAME }}_archive-macos-universal + path: "${{ github.workspace }}/osxuni/macos-universal/${{ env.BINFILE }}.zip*" + + create-release: + if: ${{ startsWith(github.ref, 'refs/tags/v') }} + + runs-on: ubuntu-latest + needs: builds + + env: + TARI_NETWORK_DIR: ${{ needs.builds.outputs.TARI_NETWORK_DIR }} + TARI_VERSION: ${{ needs.builds.outputs.TARI_VERSION }} + + permissions: + contents: write + + steps: + - name: Download binaries + uses: actions/download-artifact@v4 + with: + path: ${{ env.TS_FILENAME }} + pattern: "${{ env.TS_FILENAME }}*" + merge-multiple: true + + - name: Verify checksums and Prep Uploads + shell: bash + working-directory: ${{ env.TS_FILENAME }} + run: | + # set -xo pipefail + sudo apt-get update + sudo apt-get --no-install-recommends --assume-yes install dos2unix + ls -alhtR + if [ -f "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}.${{ env.TS_SIG_FN }}" ] ; then + rm -fv "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}.${{ env.TS_SIG_FN }}" + fi + # Merge all sha256 files into one + find . -name "*.sha256" -type f -print | xargs cat >> "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}.${{ env.TS_SIG_FN }}" + dos2unix --quiet "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}.${{ env.TS_SIG_FN }}" + cat "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}.${{ env.TS_SIG_FN }}" + sha256sum --ignore-missing --check "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}.${{ env.TS_SIG_FN }}" + ls -alhtR + + - name: Create release + uses: ncipollo/release-action@v1 + with: + artifacts: "${{ env.TS_FILENAME }}*/**/*" + token: ${{ secrets.GITHUB_TOKEN }} + prerelease: true + draft: true + allowUpdates: true + updateOnlyUnreleased: true + replacesArtifacts: true + + - name: Sync assets to S3 + continue-on-error: true + if: ${{ env.AWS_SECRET_ACCESS_KEY != '' && matrix.builds.runs-on != 'self-hosted' }} + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3CMD: "cp" + S3OPTIONS: '--recursive --exclude "*" --include "*.sha256*" --include "*.zip*" --include "*.pkg*" --include "*installer.exe*"' + shell: bash + working-directory: ${{ env.TS_FILENAME }} + run: | + echo "Upload processing ..." + ls -alhtR + echo "Clean up" + # Bash check if file with wildcards, does not work as expected + # if [ -f ${{ env.TS_FILENAME }}*diag-utils* ] ; then + if ls ${{ env.TS_FILENAME }}*diag-utils* > /dev/null 2>&1 ; then + rm -fv ${{ env.TS_FILENAME }}*diag-utils* + fi + echo "Folder setup" + if ls ${{ env.TS_FILENAME }}*linux* > /dev/null 2>&1 ; then + mkdir -p "linux/${{ env.TARI_NETWORK_DIR }}/" + mv -v ${{ env.TS_FILENAME }}*linux* "linux/${{ env.TARI_NETWORK_DIR }}/" + fi + if ls ${{ env.TS_FILENAME }}*macos* > /dev/null 2>&1 ; then + mkdir -p "osx/${{ env.TARI_NETWORK_DIR }}/" + mv -v ${{ env.TS_FILENAME }}*macos* "osx/${{ env.TARI_NETWORK_DIR }}/" + fi + if ls ${{ env.TS_FILENAME }}*windows* > /dev/null 2>&1 ; then + mkdir -p "windows/${{ env.TARI_NETWORK_DIR }}/" + mv -v ${{ env.TS_FILENAME }}*windows* "windows/${{ env.TARI_NETWORK_DIR }}/" + fi + ls -alhtR + aws --version + echo "ls current" + aws s3 ls --region ${{ secrets.AWS_REGION }} \ + s3://${{ secrets.AWS_S3_BUCKET }}/current/ + echo "Upload current" + aws s3 ${{ env.S3CMD }} --region ${{ secrets.AWS_REGION }} \ + . \ + s3://${{ secrets.AWS_S3_BUCKET }}/current/ \ + ${{ env.S3OPTIONS }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..70b3fc8 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,250 @@ +--- +name: CI + +'on': + workflow_dispatch: + pull_request: + types: + - opened + - reopened + - synchronize + merge_group: + +env: + toolchain: nightly-2024-03-01 + CARGO_HTTP_MULTIPLEXING: false + CARGO_TERM_COLOR: always + CARGO_UNSTABLE_SPARSE_REGISTRY: true + CARGO_INCREMENTAL: 0 + PROTOC: protoc + TERM: unknown + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + clippy: + name: clippy + runs-on: [ubuntu-20.04] + steps: + - name: checkout + uses: actions/checkout@v4 + - name: toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.toolchain }} + components: clippy, rustfmt + - name: ubuntu dependencies + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + - name: caching (nightly) + # Don't use rust-cache. + # Rust-cache disables a key feature of actions/cache: restoreKeys. + # Without restore keys, we lose the ability to get partial matches on caches, and end + # up with too many cache misses. + # Use a "small" suffix to use the build caches where possible, but build caches won't use this + uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/registry/CACHEDIR.TAG + ~/.cargo/git + target + key: tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }}-small + restore-keys: | + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }}-small + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly + - name: cargo format + run: cargo fmt --all -- --check + - name: Install cargo-lints + run: cargo install cargo-lints + - name: Clippy check (with lints) + run: cargo lints clippy --all-targets --all-features + machete: + # Checks for unused dependencies. + name: machete + runs-on: [ubuntu-20.04] + steps: + - name: checkout + uses: actions/checkout@v4 + - name: toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.toolchain }} + components: clippy, rustfmt + - name: ubuntu dependencies + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + - name: caching (machete) + # Don't use rust-cache. + # Rust-cache disables a key feature of actions/cache: restoreKeys. + # Without restore keys, we lose the ability to get partial matches on caches, and end + # up with too many cache misses. + uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/registry/CACHEDIR.TAG + ~/.cargo/git + target + key: tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }}-small + restore-keys: | + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }}-small + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly + - name: cargo machete + run: | + cargo install cargo-machete + cargo machete + build-stable: + # Runs cargo check with stable toolchain to determine whether the codebase is likely to build + # on stable Rust. + name: cargo check with stable + runs-on: [self-hosted, ubuntu-high-cpu] + steps: + - name: checkout + uses: actions/checkout@v4 + - name: caching (stable) + # Don't use rust-cache. + # Rust-cache disables a key feature of actions/cache: restoreKeys. + # Without restore keys, we lose the ability to get partial matches on caches, and end + # up with too many cache misses. + # This job runs on self-hosted, so use local-cache instead. + uses: maxnowack/local-cache@v2 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/registry/CACHEDIR.TAG + ~/.cargo/git + target + key: tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-stable-${{ hashFiles('**/Cargo.lock') }}-small + restore-keys: | + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-stable-${{ hashFiles('**/Cargo.lock') }}-small + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-stable-${{ hashFiles('**/Cargo.lock') }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-stable + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }} + - name: rust-toolchain.toml override by removing + run: rm -f rust-toolchain.toml + - name: toolchain + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + - name: ubuntu dependencies + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + - name: rustup show + run: rustup show + - name: cargo check + run: cargo check --release --all-targets --workspace --exclude tari_integration_tests --locked + - name: cargo check wallet ffi separately + run: cargo check --release --package minotari_wallet_ffi --locked + - name: cargo check chat ffi separately + run: cargo check --release --package minotari_chat_ffi --locked + licenses: + name: file licenses + runs-on: [ubuntu-20.04] + steps: + - name: checkout + uses: actions/checkout@v4 + - name: install ripgrep + run: | + wget https://github.com/BurntSushi/ripgrep/releases/download/13.0.0/ripgrep_13.0.0_amd64.deb + sudo dpkg -i ripgrep_13.0.0_amd64.deb + rg --version || exit 1 + - name: run the license check + run: ./scripts/file_license_check.sh + test: + name: test + runs-on: [self-hosted, ubuntu-high-cpu] + permissions: + checks: write + pull-requests: write + strategy: + matrix: + tari_target_network: [ + { target: "testnet", network: "esmeralda" }, + { target: "nextnet", network: "nextnet" }, + { target: "mainnet", network: "stagenet" }, + ] + env: + TARI_TARGET_NETWORK: ${{ matrix.tari_target_network.target }} + TARI_NETWORK: ${{ matrix.tari_target_network.network }} + RUST_LOG: debug + steps: + - name: checkout + uses: actions/checkout@v4 + - name: toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.toolchain }} + - name: ubuntu dependencies + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + - name: caching (nightly) + # Don't use rust-cache. + # Rust-cache disables a key feature of actions/cache: restoreKeys. + # Without restore keys, we lose the ability to get partial matches on caches, and end + # up with too many cache misses. + # This job runs on self-hosted, so use local-cache instead. + uses: maxnowack/local-cache@v2 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/registry/CACHEDIR.TAG + ~/.cargo/git + target + key: tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }}-${{ matrix.tari_target_network.target }} + restore-keys: | + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }}-${{ matrix.tari_target_network.network }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly + - name: Install cargo-nextest + run: cargo install cargo-nextest --locked --force + - name: cargo test compile + run: cargo test -vv --no-run --locked --all-features --release + - name: cargo test + run: cargo nextest run --all-features --release -E "not package(tari_integration_tests)" --profile ci + - name: upload artifact + uses: actions/upload-artifact@v4 # upload test results as artifact + if: always() + with: + name: test-results-${{ matrix.tari_target_network.target }}.${{ matrix.tari_target_network.network }} + path: ${{ github.workspace }}/target/nextest/ci/junit.xml + + # Allows other workflows to know the PR number + artifacts: + name: pr_2_artifact + runs-on: [ubuntu-20.04] + steps: + - name: Save the PR number in an artifact + shell: bash + env: + PR_NUM: ${{ github.event.number }} + run: echo $PR_NUM > pr_num.txt + + - name: Upload the PR number + uses: actions/upload-artifact@v4 + with: + name: pr_num + path: ./pr_num.txt + + # needed for test results + event_file: + name: "Upload Event File for Test Results" + runs-on: ubuntu-latest + steps: + - name: Upload + uses: actions/upload-artifact@v4 + with: + name: Event File + path: ${{ github.event_path }} diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml new file mode 100644 index 0000000..78e4b2f --- /dev/null +++ b/.github/workflows/coverage.yml @@ -0,0 +1,48 @@ +--- +# Notes for this action: +# Restoring caches is largely useless, since the compiler flags are only useful for code coverage runs. +# This GA is self-hosted, and has local caching solutions. +name: Source Coverage + +'on': + push: + branches: + - development + - ci-coverage-* + +env: + toolchain: nightly-2024-03-01 + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + coverage: + name: test and generate coverage + runs-on: [ self-hosted, ubuntu-high-mem ] + steps: + - name: checkout source code + uses: actions/checkout@v4 + + - name: ubuntu dependencies + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + + - uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly + + - name: run tests with coverage + # Prepare the coverage data, even if some tests fail + continue-on-error: true + run: bash -c ./scripts/source_coverage.sh + + - name: Coveralls upload + uses: coverallsapp/github-action@v2 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + format: lcov + file: lcov.info + diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml new file mode 100644 index 0000000..921ab95 --- /dev/null +++ b/.github/workflows/integration_tests.yml @@ -0,0 +1,209 @@ +--- +name: Integration tests + +"on": + pull_request: + types: + - opened + - reopened + - synchronize + merge_group: + schedule: + - cron: '0 2 * * *' # daily @ 02h00 (non-critical) + - cron: '0 12 * * 6' # weekly - Saturday @ noon (long-running) + workflow_dispatch: + inputs: + ci_bins: + type: boolean + default: true + description: 'run ci on binaries' + ci_ffi: + type: boolean + default: true + description: 'run ci on ffi' + ci_profile: + default: ci + description: 'ci profile to run' + type: string + +env: + toolchain: nightly-2024-03-01 + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + # cancel-in-progress: true + +jobs: + base_layer: + name: Cucumber tests / Base Layer + runs-on: [self-hosted, ubuntu-high-cpu] + steps: + - name: checkout + uses: actions/checkout@v4 + + - name: Envs setup + id: envs_setup + shell: bash + run: | + if [ "${{ github.event_name }}" == "schedule" ] ; then + echo "CI_FFI=false" >> $GITHUB_ENV + if [ "${{ github.event.schedule }}" == "0 2 * * *" ] ; then + echo "CI_PROFILE=(not @long-running)" >> $GITHUB_ENV + elif [ "${{ github.event.schedule }}" == "0 12 * * 6" ] ; then + echo "CI_PROFILE=@long-running" >> $GITHUB_ENV + fi + else + echo "CI ..." + echo "CI_PROFILE=@critical and (not @long-running)" >> $GITHUB_ENV + CI_BINS=${{ inputs.ci_bins }} + echo "Run binary - ${CI_BINS}" + echo "CI_BINS=${CI_BINS:-true}" >> $GITHUB_ENV + fi + + - name: Setup rust toolchain + uses: dtolnay/rust-toolchain@master + with: + components: rustfmt, clippy + toolchain: ${{ env.toolchain }} + + - name: Install ubuntu dependencies + shell: bash + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + + - name: caching (nightly) + # Don't use rust-cache. + # Rust-cache disables a key feature of actions/cache: restoreKeys. + # Without restore keys, we lose the ability to get partial matches on caches, and end + # up with too many cache misses. + # This job runs on self-hosted, so use local-cache instead. + uses: maxnowack/local-cache@v2 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/registry/CACHEDIR.TAG + ~/.cargo/git + target + key: tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly + + - name: cargo test compile + run: cargo test --no-run --locked --all-features --release ${{ env.TARGET_BINS }} + + - name: Run ${{ env.CI_PROFILE }} integration tests for binaries + if: ${{ env.CI_BINS == 'true' }} + timeout-minutes: 90 + run: | + cargo test \ + --test cucumber \ + -v \ + --all-features \ + --release \ + --package tari_integration_tests \ + -- -t "${{ env.CI_PROFILE }} and (not @wallet-ffi) and (not @chat-ffi) and (not @broken)" \ + -c 5 \ + --retry 2 + + - name: upload artifact + uses: actions/upload-artifact@v4 # upload test results as artifact + if: always() + with: + name: junit-cucumber + path: ${{ github.workspace }}/integration_tests/cucumber-output-junit.xml + + ffi: + name: Cucumber tests / FFI + runs-on: [self-hosted, ubuntu-high-cpu] + steps: + - name: checkout + uses: actions/checkout@v4 + + - name: Envs setup + id: envs_setup + shell: bash + run: | + if [ "${{ github.event_name }}" == "schedule" ] ; then + echo "CI_FFI=false" >> $GITHUB_ENV + if [ "${{ github.event.schedule }}" == "0 2 * * *" ] ; then + echo "CI_PROFILE=(not @long-running)" >> $GITHUB_ENV + elif [ "${{ github.event.schedule }}" == "0 12 * * 6" ] ; then + echo "CI_PROFILE=@long-running" >> $GITHUB_ENV + fi + else + echo "CI ..." + echo "CI_PROFILE=@critical and (not @long-running)" >> $GITHUB_ENV + CI_FFI=${{ inputs.ci_ffi }} + echo "Run FFI - ${CI_FFI}" + echo "CI_FFI=${CI_FFI:-true}" >> $GITHUB_ENV + fi + + - name: Setup rust toolchain + if: ${{ env.CI_FFI == 'true' }} + uses: dtolnay/rust-toolchain@master + with: + components: rustfmt, clippy + toolchain: ${{ env.toolchain }} + + - name: Install ubuntu dependencies + if: ${{ env.CI_FFI == 'true' }} + shell: bash + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + + - name: Cache cargo files and outputs + if: ${{ env.CI_FFI == 'true' }} + # Don't use rust-cache. + # This job runs on self-hosted, so use local-cache instead. + uses: maxnowack/local-cache@v2 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/registry/CACHEDIR.TAG + ~/.cargo/git + target + key: tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly + + - name: cargo test compile + if: ${{ env.CI_FFI == 'true' }} + run: cargo test --no-run --locked --all-features --release ${{ env.TARGET_BINS }} + + - name: Run ${{ env.CI_PROFILE }} integration tests for ffi + if: ${{ env.CI_FFI == 'true' }} + timeout-minutes: 90 + run: | + cargo test \ + --test cucumber \ + -v \ + --all-features \ + --release \ + --package tari_integration_tests \ + -- -t "(@wallet-ffi or @chat-ffi) and ${{ env.CI_PROFILE }} and (not @broken)" \ + -c 1 \ + --retry 2 + + - name: upload artifact + uses: actions/upload-artifact@v4 # upload test results as artifact + if: always() + with: + name: junit-ffi-cucumber + path: ${{ github.workspace }}/integration_tests/cucumber-output-junit.xml + + # needed for test results + event_file: + name: "Upload Event File for Test Results" + runs-on: ubuntu-latest + steps: + - name: Upload + uses: actions/upload-artifact@v4 + with: + name: Event File + path: ${{ github.event_path }} diff --git a/.github/workflows/pr_title.yml b/.github/workflows/pr_title.yml new file mode 100644 index 0000000..ae85f6b --- /dev/null +++ b/.github/workflows/pr_title.yml @@ -0,0 +1,30 @@ +--- +# Checks that PR titles conform to Conventional Commits +# See https://www.conventionalcommits.org/en/v1.0.0/ for more information +name: PR + +'on': + pull_request: + types: + - opened + - reopened + - edited + - synchronize + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + check-title: + runs-on: ubuntu-latest + steps: + - name: install + run: | + npm install -g @commitlint/cli @commitlint/config-conventional + echo "module.exports = {extends: ['@commitlint/config-conventional']}" > commitlint.config.js + - name: lint + run: | + echo "$PR_TITLE" | commitlint + env: + PR_TITLE: ${{github.event.pull_request.title}} diff --git a/.gitignore b/.gitignore index 6985cf1..75ae65b 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,6 @@ Cargo.lock # MSVC Windows builds of rustc generate these, which store debugging information *.pdb + +# Ignore OS files +.DS_Store diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 0000000..da1acdf --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,3 @@ +# CI/CD-related files require a review by the devops team +.github/**/* @tari-project/devops +CODEOWNERS @tari-project/devops diff --git a/clippy.toml b/clippy.toml new file mode 100644 index 0000000..8fcbb43 --- /dev/null +++ b/clippy.toml @@ -0,0 +1,4 @@ +cognitive-complexity-threshold = 15 +too-many-arguments-threshold = 12 +# Set from 200 to size of a RistrettoPublicKey +enum-variant-size-threshold = 216 diff --git a/lints.toml b/lints.toml new file mode 100644 index 0000000..cb44026 --- /dev/null +++ b/lints.toml @@ -0,0 +1,73 @@ +deny = [ + # Prevent spelling mistakes in lints + 'unknown_lints', + # clippy groups: + 'clippy::correctness', + # All clippy allows must have a reason + # TODO: enable lint-reasons feature + # 'clippy::allow_attributes_without_reason', + # Docs + #'missing_docs', + # 'clippy::missing_errors_doc', + # 'clippy::missing_safety_doc', + # 'clippy::missing_panics_doc', + + # Common mistakes + 'clippy::await_holding_lock', + 'unused_variables', + 'unused_imports', + 'dead_code', + 'unused_extern_crates', + 'unused_must_use', + 'unreachable_patterns', + 'clippy::cloned_instead_of_copied', + 'clippy::create_dir', + 'clippy::dbg_macro', + 'clippy::else_if_without_else', + 'clippy::inline_always', + 'let_underscore_drop', + 'clippy::let_unit_value', + 'clippy::match_on_vec_items', + 'clippy::match_wild_err_arm', + # In crypto code, it is fairly common to have similar names e.g. `owner_pk` and `owner_k` + # 'clippy::similar_names', + 'clippy::needless_borrow', + # style + 'clippy::style', + 'clippy::explicit_into_iter_loop', + 'clippy::explicit_iter_loop', + 'clippy::if_not_else', + 'clippy::match_bool', + # Although generally good practice, this is disabled because the code becomes worse + # or needs mod-level exclusion in these cases: + # tauri commands, blockchain async db needs owned copy, &Arc, Copy types, T: AsRef<..>, T: ToString + # 'clippy::needless_pass_by_value', + 'clippy::range_plus_one', + 'clippy::struct_excessive_bools', + 'clippy::too_many_lines', + 'clippy::trivially_copy_pass_by_ref', + # Highlights potential casting mistakes + 'clippy::cast_lossless', + 'clippy::cast_possible_truncation', + 'clippy::cast_possible_wrap', + # Precision loss is almost always competely fine and is only useful as a sanity check. + # https://rust-lang.github.io/rust-clippy/master/index.html#cast_precision_loss + # 'clippy::cast_precision_loss', +# 'clippy::cast_sign_loss' + 'clippy::unnecessary_to_owned', + 'clippy::nonminimal_bool', + 'clippy::needless_question_mark', + # dbg! macro is intended as a debugging tool. It should not be in version control. + 'clippy::dbg_macro' +] + +allow = [ + # allow Default::default calls + 'clippy::default_trait_access', + # Generally when developers fix this, it can lead to leaky abstractions or worse, so + # too many arguments is generally the lesser of two evils + 'clippy::too_many_arguments', + # `assert!(!foo(bar))` is misread the majority of the time, while `assert_eq!(foo(bar), false)` is crystal clear + 'clippy::bool-assert-comparison', + 'clippy::blocks_in_conditions', +]