ci(pre-commit): Bump hooks versions and fix leftover files (#10680)
* update(hooks): Bump pre-commit hooks versions * fix(formatting): Fix python script formatting * fix(formatting): Fix leftover files on protected folders
This commit is contained in:
parent
92dd841ffc
commit
76d1f9e643
28 changed files with 363 additions and 353 deletions
24
.github/ISSUE_TEMPLATE/Issue-report.yml
vendored
24
.github/ISSUE_TEMPLATE/Issue-report.yml
vendored
|
|
@ -5,7 +5,7 @@ body:
|
|||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
* Before reporting a new issue please check and search in [List of existing issues](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue)
|
||||
* Before reporting a new issue please check and search in [List of existing issues](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue)
|
||||
* Please check [Online Documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/index.html)
|
||||
* Take a look on [Troubleshooting guide](https://docs.espressif.com/projects/arduino-esp32/en/latest/troubleshooting.html)
|
||||
* If still experiencing the issue, please provide as many details as possible below about your hardware, computer setup and code.
|
||||
|
|
@ -24,7 +24,7 @@ body:
|
|||
description: What development board or other hardware is the chip attached to?
|
||||
placeholder: ex. DevKitC, plain module on breadboard, etc. If your hardware is custom or unusual, please attach a photo.
|
||||
validations:
|
||||
required: true
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other-hw
|
||||
attributes:
|
||||
|
|
@ -60,7 +60,7 @@ body:
|
|||
- v2.0.8
|
||||
- v2.0.7
|
||||
- v2.0.6
|
||||
- v2.0.5
|
||||
- v2.0.5
|
||||
- v2.0.4
|
||||
- v2.0.3
|
||||
- v2.0.2
|
||||
|
|
@ -77,7 +77,7 @@ body:
|
|||
description: What IDE are you using?
|
||||
placeholder: eg. Arduino IDE, PlatformIO, Sloeber...
|
||||
validations:
|
||||
required: true
|
||||
required: true
|
||||
- type: input
|
||||
id: os
|
||||
attributes:
|
||||
|
|
@ -95,13 +95,13 @@ body:
|
|||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: PSRAM
|
||||
id: PSRAM
|
||||
attributes:
|
||||
label: PSRAM enabled
|
||||
description: Is PSRAM enabled?
|
||||
options:
|
||||
- 'yes'
|
||||
- 'no'
|
||||
- "yes"
|
||||
- "no"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
|
|
@ -116,8 +116,8 @@ body:
|
|||
id: Description
|
||||
attributes:
|
||||
label: Description
|
||||
description: Please describe your problem here and expected behaviour
|
||||
placeholder: ex. Can't connect/weird behaviour/wrong function/missing parameter..
|
||||
description: Please describe your problem here and expected behavior
|
||||
placeholder: ex. Can't connect/weird behavior/wrong function/missing parameter..
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
|
|
@ -128,7 +128,7 @@ body:
|
|||
placeholder: ex. Related part of the code to replicate the issue
|
||||
render: cpp
|
||||
validations:
|
||||
required: true
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Debug
|
||||
attributes:
|
||||
|
|
@ -137,11 +137,11 @@ body:
|
|||
placeholder: Enable Core debug level - Debug on tools menu of Arduino IDE, then put the serial output here.
|
||||
render: plain
|
||||
validations:
|
||||
required: true
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other-remarks
|
||||
attributes:
|
||||
label: Other Steps to Reproduce
|
||||
label: Other Steps to Reproduce
|
||||
description: Is there any other information you can think of which will help us reproduce this problem? Any additional info can be added as well.
|
||||
placeholder: ex. I also tried on other OS, HW...it works correctly on that setup.
|
||||
- type: checkboxes
|
||||
|
|
|
|||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
|
|
@ -5,4 +5,4 @@ contact_links:
|
|||
about: Community channel for questions and help
|
||||
- name: ESP32 Forum - Arduino
|
||||
url: https://esp32.com/viewforum.php?f=19
|
||||
about: Official Forum for questions
|
||||
about: Official Forum for questions
|
||||
|
|
|
|||
78
.github/scripts/merge_packages.py
vendored
78
.github/scripts/merge_packages.py
vendored
|
|
@ -1,50 +1,58 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# This script merges two Arduino Board Manager package json files.
|
||||
# Usage:
|
||||
# python merge_packages.py package_esp8266com_index.json version/new/package_esp8266com_index.json
|
||||
# Written by Ivan Grokhotkov, 2015
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
#from distutils.version import LooseVersion
|
||||
|
||||
# from distutils.version import LooseVersion
|
||||
from packaging.version import Version
|
||||
import re
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
def load_package(filename):
|
||||
pkg = json.load(open(filename))['packages'][0]
|
||||
print("Loaded package {0} from {1}".format(pkg['name'], filename), file=sys.stderr)
|
||||
print("{0} platform(s), {1} tools".format(len(pkg['platforms']), len(pkg['tools'])), file=sys.stderr)
|
||||
pkg = json.load(open(filename))["packages"][0]
|
||||
print("Loaded package {0} from {1}".format(pkg["name"], filename), file=sys.stderr)
|
||||
print("{0} platform(s), {1} tools".format(len(pkg["platforms"]), len(pkg["tools"])), file=sys.stderr)
|
||||
return pkg
|
||||
|
||||
|
||||
def merge_objects(versions, obj):
|
||||
for o in obj:
|
||||
name = o['name'].encode('ascii')
|
||||
ver = o['version'].encode('ascii')
|
||||
if not name in versions:
|
||||
name = o["name"].encode("ascii")
|
||||
ver = o["version"].encode("ascii")
|
||||
if name not in versions:
|
||||
print("found new object, {0}".format(name), file=sys.stderr)
|
||||
versions[name] = {}
|
||||
if not ver in versions[name]:
|
||||
if ver not in versions[name]:
|
||||
print("found new version {0} for object {1}".format(ver, name), file=sys.stderr)
|
||||
versions[name][ver] = o
|
||||
return versions
|
||||
|
||||
# Normalize ESP release version string (x.x.x) by adding '-rc<MAXINT>' (x.x.x-rc9223372036854775807) to ensure having REL above any RC
|
||||
# Dummy approach, functional anyway for current ESP package versioning (unlike NormalizedVersion/LooseVersion/StrictVersion & similar crap)
|
||||
|
||||
# Normalize ESP release version string (x.x.x) by adding '-rc<MAXINT>' (x.x.x-rc9223372036854775807)
|
||||
# to ensure having REL above any RC
|
||||
# Dummy approach, functional anyway for current ESP package versioning
|
||||
# (unlike NormalizedVersion/LooseVersion/StrictVersion & similar crap)
|
||||
def pkgVersionNormalized(versionString):
|
||||
|
||||
verStr = str(versionString)
|
||||
verParts = re.split('\.|-rc|-alpha', verStr, flags=re.IGNORECASE)
|
||||
|
||||
verParts = re.split(r"\.|-rc|-alpha", verStr, flags=re.IGNORECASE)
|
||||
|
||||
if len(verParts) == 3:
|
||||
if (sys.version_info > (3, 0)): # Python 3
|
||||
verStr = str(versionString) + '-rc' + str(sys.maxsize)
|
||||
else: # Python 2
|
||||
verStr = str(versionString) + '-rc' + str(sys.maxint)
|
||||
|
||||
if sys.version_info > (3, 0): # Python 3
|
||||
verStr = str(versionString) + "-rc" + str(sys.maxsize)
|
||||
else: # Python 2
|
||||
verStr = str(versionString) + "-rc" + str(sys.maxint)
|
||||
|
||||
elif len(verParts) != 4:
|
||||
print("pkgVersionNormalized WARNING: unexpected version format: {0})".format(verStr), file=sys.stderr)
|
||||
|
||||
|
||||
return verStr
|
||||
|
||||
|
||||
|
|
@ -54,31 +62,37 @@ def main(args):
|
|||
return 1
|
||||
|
||||
tools = {}
|
||||
platforms = {}
|
||||
platforms = {}
|
||||
pkg1 = load_package(args[1])
|
||||
tools = merge_objects(tools, pkg1['tools']);
|
||||
platforms = merge_objects(platforms, pkg1['platforms']);
|
||||
tools = merge_objects(tools, pkg1["tools"])
|
||||
platforms = merge_objects(platforms, pkg1["platforms"])
|
||||
pkg2 = load_package(args[2])
|
||||
tools = merge_objects(tools, pkg2['tools']);
|
||||
platforms = merge_objects(platforms, pkg2['platforms']);
|
||||
tools = merge_objects(tools, pkg2["tools"])
|
||||
platforms = merge_objects(platforms, pkg2["platforms"])
|
||||
|
||||
pkg1['tools'] = []
|
||||
pkg1['platforms'] = []
|
||||
pkg1["tools"] = []
|
||||
pkg1["platforms"] = []
|
||||
|
||||
for name in tools:
|
||||
for version in tools[name]:
|
||||
print("Adding tool {0}-{1}".format(name, version), file=sys.stderr)
|
||||
pkg1['tools'].append(tools[name][version])
|
||||
pkg1["tools"].append(tools[name][version])
|
||||
|
||||
for name in platforms:
|
||||
for version in platforms[name]:
|
||||
print("Adding platform {0}-{1}".format(name, version), file=sys.stderr)
|
||||
pkg1['platforms'].append(platforms[name][version])
|
||||
|
||||
#pkg1['platforms'] = sorted(pkg1['platforms'], key=lambda k: LooseVersion(pkgVersionNormalized(k['version'])), reverse=True)
|
||||
pkg1['platforms'] = sorted(pkg1['platforms'], key=lambda k: Version(pkgVersionNormalized(k['version'])), reverse=True)
|
||||
pkg1["platforms"].append(platforms[name][version])
|
||||
|
||||
json.dump({'packages':[pkg1]}, sys.stdout, indent=2)
|
||||
# pkg1["platforms"] = sorted(
|
||||
# pkg1["platforms"], key=lambda k: LooseVersion(pkgVersionNormalized(k["version"])), reverse=True
|
||||
# )
|
||||
|
||||
if __name__ == '__main__':
|
||||
pkg1["platforms"] = sorted(
|
||||
pkg1["platforms"], key=lambda k: Version(pkgVersionNormalized(k["version"])), reverse=True
|
||||
)
|
||||
|
||||
json.dump({"packages": [pkg1]}, sys.stdout, indent=2)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv))
|
||||
|
|
|
|||
18
.github/workflows/allboards.yml
vendored
18
.github/workflows/allboards.yml
vendored
|
|
@ -1,6 +1,6 @@
|
|||
name: Boards Test - Remote trigger
|
||||
|
||||
# The workflow will run on remote dispath with event-type set to "test-boards"
|
||||
# The workflow will run on remote dispatch with event-type set to "test-boards"
|
||||
on:
|
||||
repository_dispatch:
|
||||
types: [test-boards]
|
||||
|
|
@ -20,8 +20,7 @@ jobs:
|
|||
ref: ${{ github.event.client_payload.branch }}
|
||||
|
||||
- name: Get boards fqbns
|
||||
run:
|
||||
bash .github/scripts/find_all_boards.sh
|
||||
run: bash .github/scripts/find_all_boards.sh
|
||||
|
||||
setup-chunks:
|
||||
needs: find-boards
|
||||
|
|
@ -43,8 +42,7 @@ jobs:
|
|||
|
||||
- id: set-test-chunks
|
||||
name: Set Chunks
|
||||
run:
|
||||
echo "test-chunks<<EOF" >> $GITHUB_OUTPUT
|
||||
run: echo "test-chunks<<EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "$( jq -nc '${{ needs.find-boards.outputs.fqbns }} | [_nwise( ${{ needs.find-boards.outputs.board-count }}/15 | ceil)]')" >> $GITHUB_OUTPUT
|
||||
|
||||
|
|
@ -61,7 +59,7 @@ jobs:
|
|||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
matrix:
|
||||
chunk: ${{ fromJSON(needs.setup-chunks.outputs['test-chunks']) }}
|
||||
|
||||
steps:
|
||||
|
|
@ -71,9 +69,8 @@ jobs:
|
|||
ref: ${{ github.event.client_payload.branch }}
|
||||
|
||||
- name: Echo FQBNS to file
|
||||
run:
|
||||
echo "$FQBN" > fqbns.json
|
||||
env:
|
||||
run: echo "$FQBN" > fqbns.json
|
||||
env:
|
||||
FQBN: ${{ toJSON(matrix.chunk) }}
|
||||
|
||||
- name: Compile sketch
|
||||
|
|
@ -88,5 +85,4 @@ jobs:
|
|||
enable-warnings-report: false
|
||||
cli-compile-flags: |
|
||||
- --warnings="all"
|
||||
sketch-paths:
|
||||
"- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"
|
||||
sketch-paths: "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"
|
||||
|
|
|
|||
14
.github/workflows/boards.yml
vendored
14
.github/workflows/boards.yml
vendored
|
|
@ -4,9 +4,9 @@ name: Boards Test
|
|||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'boards.txt'
|
||||
- 'libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino'
|
||||
- '.github/workflows/boards.yml'
|
||||
- "boards.txt"
|
||||
- "libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"
|
||||
- ".github/workflows/boards.yml"
|
||||
|
||||
env:
|
||||
# It's convenient to set variables for values used multiple times in the workflow
|
||||
|
|
@ -28,8 +28,7 @@ jobs:
|
|||
uses: dcarbone/install-jq-action@v1.0.1
|
||||
|
||||
- name: Get board name
|
||||
run:
|
||||
bash .github/scripts/find_new_boards.sh ${{ github.repository }} ${{github.base_ref}}
|
||||
run: bash .github/scripts/find_new_boards.sh ${{ github.repository }} ${{github.base_ref}}
|
||||
|
||||
test-boards:
|
||||
needs: find-boards
|
||||
|
|
@ -72,7 +71,7 @@ jobs:
|
|||
./tools/openocd-esp32
|
||||
./tools/riscv32-*
|
||||
./tools/xtensa-*
|
||||
|
||||
|
||||
- name: Compile sketch
|
||||
uses: P-R-O-C-H-Y/compile-sketches@main
|
||||
with:
|
||||
|
|
@ -85,6 +84,5 @@ jobs:
|
|||
cli-compile-flags: |
|
||||
- --warnings="all"
|
||||
exit-on-fail: true
|
||||
sketch-paths:
|
||||
"- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"
|
||||
sketch-paths: "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"
|
||||
verbose: true
|
||||
|
|
|
|||
42
.github/workflows/build_py_tools.yml
vendored
42
.github/workflows/build_py_tools.yml
vendored
|
|
@ -3,11 +3,11 @@ name: Build Python Tools
|
|||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/build_py_tools.yml'
|
||||
- 'tools/get.py'
|
||||
- 'tools/espota.py'
|
||||
- 'tools/gen_esp32part.py'
|
||||
- 'tools/gen_insights_package.py'
|
||||
- ".github/workflows/build_py_tools.yml"
|
||||
- "tools/get.py"
|
||||
- "tools/espota.py"
|
||||
- "tools/gen_esp32part.py"
|
||||
- "tools/gen_insights_package.py"
|
||||
|
||||
jobs:
|
||||
find-changed-tools:
|
||||
|
|
@ -33,8 +33,8 @@ jobs:
|
|||
uses: tj-actions/changed-files@v41
|
||||
id: verify-changed-files
|
||||
with:
|
||||
fetch_depth: '2'
|
||||
since_last_remote_commit: 'true'
|
||||
fetch_depth: "2"
|
||||
since_last_remote_commit: "true"
|
||||
files: |
|
||||
tools/get.py
|
||||
tools/espota.py
|
||||
|
|
@ -57,20 +57,20 @@ jobs:
|
|||
matrix:
|
||||
os: [windows-latest, macos-latest, ubuntu-20.04, ARM]
|
||||
include:
|
||||
- os: windows-latest
|
||||
TARGET: win64
|
||||
EXTEN: .exe
|
||||
SEPARATOR: ';'
|
||||
- os: macos-latest
|
||||
TARGET: macos
|
||||
SEPARATOR: ':'
|
||||
- os: ubuntu-20.04
|
||||
TARGET: linux-amd64
|
||||
SEPARATOR: ':'
|
||||
- os: ARM
|
||||
CONTAINER: python:3.8-bullseye
|
||||
TARGET: arm
|
||||
SEPARATOR: ':'
|
||||
- os: windows-latest
|
||||
TARGET: win64
|
||||
EXTEN: .exe
|
||||
SEPARATOR: ";"
|
||||
- os: macos-latest
|
||||
TARGET: macos
|
||||
SEPARATOR: ":"
|
||||
- os: ubuntu-20.04
|
||||
TARGET: linux-amd64
|
||||
SEPARATOR: ":"
|
||||
- os: ARM
|
||||
CONTAINER: python:3.8-bullseye
|
||||
TARGET: arm
|
||||
SEPARATOR: ":"
|
||||
container: ${{ matrix.CONTAINER }} # use python container on ARM
|
||||
env:
|
||||
DISTPATH: pytools-${{ matrix.TARGET }}
|
||||
|
|
|
|||
20
.github/workflows/dangerjs.yml
vendored
20
.github/workflows/dangerjs.yml
vendored
|
|
@ -11,14 +11,14 @@ jobs:
|
|||
pull-request-style-linter:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out PR head
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Check out PR head
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: DangerJS pull request linter
|
||||
uses: espressif/shared-github-dangerjs@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
rule-max-commits: 'false'
|
||||
commit-messages-min-summary-length: '10'
|
||||
- name: DangerJS pull request linter
|
||||
uses: espressif/shared-github-dangerjs@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
rule-max-commits: "false"
|
||||
commit-messages-min-summary-length: "10"
|
||||
|
|
|
|||
57
.github/workflows/docs_build.yml
vendored
57
.github/workflows/docs_build.yml
vendored
|
|
@ -3,18 +3,17 @@ name: Documentation Build and Deploy CI
|
|||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- release/v2.x
|
||||
- master
|
||||
- release/v2.x
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- '.github/workflows/docs_build.yml'
|
||||
- "docs/**"
|
||||
- ".github/workflows/docs_build.yml"
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- '.github/workflows/docs_build.yml'
|
||||
- "docs/**"
|
||||
- ".github/workflows/docs_build.yml"
|
||||
|
||||
jobs:
|
||||
|
||||
build-docs:
|
||||
name: Build ESP-Docs
|
||||
runs-on: ubuntu-22.04
|
||||
|
|
@ -22,25 +21,25 @@ jobs:
|
|||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
cache-dependency-path: docs/requirements.txt
|
||||
cache: 'pip'
|
||||
python-version: '3.10'
|
||||
- name: Build
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install python3-pip python3-setuptools
|
||||
# GitHub CI installs pip3 and setuptools outside the path.
|
||||
# Update the path to include them and run.
|
||||
cd ./docs
|
||||
PATH=/home/runner/.local/bin:$PATH pip3 install -r requirements.txt --prefer-binary
|
||||
PATH=/home/runner/.local/bin:$PATH SPHINXOPTS="-W" build-docs -l en
|
||||
- name: Archive Docs
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
path: docs
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
cache-dependency-path: docs/requirements.txt
|
||||
cache: "pip"
|
||||
python-version: "3.10"
|
||||
- name: Build
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install python3-pip python3-setuptools
|
||||
# GitHub CI installs pip3 and setuptools outside the path.
|
||||
# Update the path to include them and run.
|
||||
cd ./docs
|
||||
PATH=/home/runner/.local/bin:$PATH pip3 install -r requirements.txt --prefer-binary
|
||||
PATH=/home/runner/.local/bin:$PATH SPHINXOPTS="-W" build-docs -l en
|
||||
- name: Archive Docs
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
path: docs
|
||||
|
|
|
|||
80
.github/workflows/docs_deploy.yml
vendored
80
.github/workflows/docs_deploy.yml
vendored
|
|
@ -7,11 +7,11 @@ on:
|
|||
- completed
|
||||
push:
|
||||
branches:
|
||||
- release/v2.x
|
||||
- master
|
||||
- release/v2.x
|
||||
- master
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- '.github/workflows/docs_deploy.yml'
|
||||
- "docs/**"
|
||||
- ".github/workflows/docs_deploy.yml"
|
||||
|
||||
jobs:
|
||||
deploy-prod-docs:
|
||||
|
|
@ -21,39 +21,39 @@ jobs:
|
|||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- name: Check if release workflow is successful
|
||||
if: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.conclusion != 'success' }}
|
||||
run: |
|
||||
echo "Release workflow failed. Exiting..."
|
||||
exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
cache-dependency-path: docs/requirements.txt
|
||||
cache: 'pip'
|
||||
python-version: '3.10'
|
||||
- name: Deploy Documentation
|
||||
env:
|
||||
# Deploy to production server
|
||||
# DOCS_BUILD_DIR: "./docs/_build/"
|
||||
DOCS_DEPLOY_PRIVATEKEY: ${{ secrets.DOCS_KEY }}
|
||||
DOCS_DEPLOY_PATH: ${{ secrets.DOCS_PATH }}
|
||||
DOCS_DEPLOY_SERVER: ${{ secrets.DOCS_SERVER }}
|
||||
DOCS_DEPLOY_SERVER_USER: ${{ secrets.DOCS_USER }}
|
||||
DOCS_DEPLOY_URL_BASE: ${{ secrets.DOCS_URL }}
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install python3-pip python3-setuptools
|
||||
source ./docs/utils.sh
|
||||
add_doc_server_ssh_keys $DOCS_DEPLOY_PRIVATEKEY $DOCS_DEPLOY_SERVER $DOCS_DEPLOY_SERVER_USER
|
||||
export GIT_VER=$(git describe --always)
|
||||
echo "PIP install requirements..."
|
||||
pip3 install --user -r ./docs/requirements.txt
|
||||
echo "Building the Docs..."
|
||||
cd ./docs && build-docs -l en
|
||||
echo "Deploy the Docs..."
|
||||
export DOCS_BUILD_DIR=$GITHUB_WORKSPACE/docs/
|
||||
cd $GITHUB_WORKSPACE/docs
|
||||
deploy-docs
|
||||
- name: Check if release workflow is successful
|
||||
if: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.conclusion != 'success' }}
|
||||
run: |
|
||||
echo "Release workflow failed. Exiting..."
|
||||
exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
cache-dependency-path: docs/requirements.txt
|
||||
cache: "pip"
|
||||
python-version: "3.10"
|
||||
- name: Deploy Documentation
|
||||
env:
|
||||
# Deploy to production server
|
||||
# DOCS_BUILD_DIR: "./docs/_build/"
|
||||
DOCS_DEPLOY_PRIVATEKEY: ${{ secrets.DOCS_KEY }}
|
||||
DOCS_DEPLOY_PATH: ${{ secrets.DOCS_PATH }}
|
||||
DOCS_DEPLOY_SERVER: ${{ secrets.DOCS_SERVER }}
|
||||
DOCS_DEPLOY_SERVER_USER: ${{ secrets.DOCS_USER }}
|
||||
DOCS_DEPLOY_URL_BASE: ${{ secrets.DOCS_URL }}
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install python3-pip python3-setuptools
|
||||
source ./docs/utils.sh
|
||||
add_doc_server_ssh_keys $DOCS_DEPLOY_PRIVATEKEY $DOCS_DEPLOY_SERVER $DOCS_DEPLOY_SERVER_USER
|
||||
export GIT_VER=$(git describe --always)
|
||||
echo "PIP install requirements..."
|
||||
pip3 install --user -r ./docs/requirements.txt
|
||||
echo "Building the Docs..."
|
||||
cd ./docs && build-docs -l en
|
||||
echo "Deploy the Docs..."
|
||||
export DOCS_BUILD_DIR=$GITHUB_WORKSPACE/docs/
|
||||
cd $GITHUB_WORKSPACE/docs
|
||||
deploy-docs
|
||||
|
|
|
|||
21
.github/workflows/gh-pages.yml
vendored
21
.github/workflows/gh-pages.yml
vendored
|
|
@ -3,21 +3,20 @@ name: GitHub Pages CI
|
|||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- pages
|
||||
- master
|
||||
- pages
|
||||
paths:
|
||||
- 'README.md'
|
||||
- '.github/scripts/on-pages.sh'
|
||||
- '.github/workflows/gh-pages.yml'
|
||||
- "README.md"
|
||||
- ".github/scripts/on-pages.sh"
|
||||
- ".github/workflows/gh-pages.yml"
|
||||
|
||||
jobs:
|
||||
|
||||
build-pages:
|
||||
name: Build GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Copy Files
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: bash ./.github/scripts/on-pages.sh
|
||||
- uses: actions/checkout@v4
|
||||
- name: Copy Files
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: bash ./.github/scripts/on-pages.sh
|
||||
|
|
|
|||
14
.github/workflows/lib.yml
vendored
14
.github/workflows/lib.yml
vendored
|
|
@ -7,7 +7,7 @@ on:
|
|||
|
||||
# Schedule weekly builds on every Sunday at 4 am
|
||||
schedule:
|
||||
- cron: '0 4 * * SUN'
|
||||
- cron: "0 4 * * SUN"
|
||||
|
||||
concurrency:
|
||||
group: libs-${{ github.event.pull_request.number || github.ref }}
|
||||
|
|
@ -27,7 +27,6 @@ jobs:
|
|||
contains(github.event.pull_request.labels.*.name, 'lib_test') ||
|
||||
(github.event_name == 'schedule' && github.repository == 'espressif/arduino-esp32')
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
REPOSITORY: |
|
||||
- source-path: '.'
|
||||
|
|
@ -60,7 +59,6 @@ jobs:
|
|||
- target: esp32p4
|
||||
fqbn: espressif:esp32:esp32p4
|
||||
|
||||
|
||||
steps:
|
||||
# This step makes the contents of the repository available to the workflow
|
||||
- name: Checkout repository
|
||||
|
|
@ -88,7 +86,7 @@ jobs:
|
|||
path: ${{ env.SKETCHES_REPORTS_PATH }}
|
||||
|
||||
report-to-file:
|
||||
needs: compile-sketch # Wait for the compile job to finish to get the data for the report
|
||||
needs: compile-sketch # Wait for the compile job to finish to get the data for the report
|
||||
if: github.event_name == 'schedule' # Only run the job when the workflow is triggered by a schedule
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
|
@ -97,11 +95,10 @@ jobs:
|
|||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ env.GITHUB_TOKEN }}
|
||||
fetch-depth: '0'
|
||||
fetch-depth: "0"
|
||||
|
||||
- name: Switch branch
|
||||
run:
|
||||
git checkout remotes/origin/gh-pages
|
||||
run: git checkout remotes/origin/gh-pages
|
||||
|
||||
# This step is needed to get the size data produced by the compile jobs
|
||||
- name: Download sketches reports artifact
|
||||
|
|
@ -118,8 +115,7 @@ jobs:
|
|||
destination-file: ${{ env.RESULT_LIBRARY_TEST_FILE }}
|
||||
|
||||
- name: Append file with action URL
|
||||
run:
|
||||
echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_LIBRARY_TEST_FILE }}
|
||||
run: echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_LIBRARY_TEST_FILE }}
|
||||
|
||||
- name: Push to github repo
|
||||
run: |
|
||||
|
|
|
|||
2
.github/workflows/pre-commit.yml
vendored
2
.github/workflows/pre-commit.yml
vendored
|
|
@ -37,7 +37,7 @@ jobs:
|
|||
uses: actions/setup-python@v5
|
||||
with:
|
||||
cache-dependency-path: tools/pre-commit/requirements.txt
|
||||
cache: 'pip'
|
||||
cache: "pip"
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Get Python version hash
|
||||
|
|
|
|||
2
.github/workflows/publishlib.yml
vendored
2
.github/workflows/publishlib.yml
vendored
|
|
@ -47,7 +47,7 @@ jobs:
|
|||
uses: juliangruber/read-file-action@v1
|
||||
with:
|
||||
path: ./artifacts/workflows/pr_num.txt
|
||||
|
||||
|
||||
- name: Report results
|
||||
uses: P-R-O-C-H-Y/report-size-deltas@libs
|
||||
with:
|
||||
|
|
|
|||
7
.github/workflows/publishsizes-2.x.yml
vendored
7
.github/workflows/publishsizes-2.x.yml
vendored
|
|
@ -11,11 +11,11 @@ env:
|
|||
|
||||
jobs:
|
||||
sizes-test-results:
|
||||
name: Sizes Comparsion Results
|
||||
name: Sizes Comparison Results
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4 # This step checks out the repository's code at gh-pages branch
|
||||
uses: actions/checkout@v4 # This step checks out the repository's code at gh-pages branch
|
||||
with:
|
||||
ref: gh-pages
|
||||
|
||||
|
|
@ -41,8 +41,7 @@ jobs:
|
|||
destination-file: ${{ env.RESULT_SIZES_TEST_FILE }}
|
||||
|
||||
- name: Append file with action URL
|
||||
run:
|
||||
echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_SIZES_TEST_FILE }}
|
||||
run: echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_SIZES_TEST_FILE }}
|
||||
|
||||
- name: Push to github repo
|
||||
run: |
|
||||
|
|
|
|||
4
.github/workflows/publishsizes.yml
vendored
4
.github/workflows/publishsizes.yml
vendored
|
|
@ -14,7 +14,7 @@ env:
|
|||
|
||||
jobs:
|
||||
sizes-test-results:
|
||||
name: Sizes Comparsion Results
|
||||
name: Sizes Comparison Results
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
|
|
@ -22,7 +22,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4 # This step checks out the repository's code at gh-pages branch
|
||||
uses: actions/checkout@v4 # This step checks out the repository's code at gh-pages branch
|
||||
with:
|
||||
ref: gh-pages
|
||||
|
||||
|
|
|
|||
227
.github/workflows/push.yml
vendored
227
.github/workflows/push.yml
vendored
|
|
@ -4,30 +4,30 @@ on:
|
|||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- release/*
|
||||
- master
|
||||
- release/*
|
||||
pull_request:
|
||||
paths:
|
||||
- 'cores/**'
|
||||
- 'libraries/**'
|
||||
- '!libraries/**.md'
|
||||
- '!libraries/**.txt'
|
||||
- '!libraries/**.properties'
|
||||
- '!libraries/**.py'
|
||||
- 'package/**'
|
||||
- 'tools/**.py'
|
||||
- 'platform.txt'
|
||||
- 'programmers.txt'
|
||||
- 'idf_component.yml'
|
||||
- 'Kconfig.projbuild'
|
||||
- 'package.json'
|
||||
- 'CMakeLists.txt'
|
||||
- '.github/workflows/push.yml'
|
||||
- '.github/scripts/**'
|
||||
- '!.github/scripts/find_*'
|
||||
- '!.github/scripts/on-release.sh'
|
||||
- '!.github/scripts/tests_*'
|
||||
- '!.github/scripts/upload_*'
|
||||
- "cores/**"
|
||||
- "libraries/**"
|
||||
- "!libraries/**.md"
|
||||
- "!libraries/**.txt"
|
||||
- "!libraries/**.properties"
|
||||
- "!libraries/**.py"
|
||||
- "package/**"
|
||||
- "tools/**.py"
|
||||
- "platform.txt"
|
||||
- "programmers.txt"
|
||||
- "idf_component.yml"
|
||||
- "Kconfig.projbuild"
|
||||
- "package.json"
|
||||
- "CMakeLists.txt"
|
||||
- ".github/workflows/push.yml"
|
||||
- ".github/scripts/**"
|
||||
- "!.github/scripts/find_*"
|
||||
- "!.github/scripts/on-release.sh"
|
||||
- "!.github/scripts/tests_*"
|
||||
- "!.github/scripts/upload_*"
|
||||
- "variants/esp32/**/*"
|
||||
- "variants/esp32s2/**/*"
|
||||
- "variants/esp32s3/**/*"
|
||||
|
|
@ -49,8 +49,8 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
if: ${{ !(github.event_name == 'pull_request' && startsWith(github.head_ref, 'release/')) }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: bash ./.github/scripts/check-cmakelists.sh
|
||||
- uses: actions/checkout@v4
|
||||
- run: bash ./.github/scripts/check-cmakelists.sh
|
||||
|
||||
gen-chunks:
|
||||
name: Generate chunks
|
||||
|
|
@ -65,16 +65,16 @@ jobs:
|
|||
chunk_count: ${{ steps.set-chunks.outputs.chunk_count }}
|
||||
chunks: ${{ steps.set-chunks.outputs.chunks }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
with:
|
||||
files_yaml: |
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
with:
|
||||
files_yaml: |
|
||||
core:
|
||||
- '.github/**'
|
||||
- '!.github/scripts/install-platformio-esp32.sh'
|
||||
|
|
@ -115,31 +115,31 @@ jobs:
|
|||
- '.github/scripts/install-platformio-esp32.sh'
|
||||
- 'tools/platformio-build.py'
|
||||
|
||||
- name: Set chunks
|
||||
id: set-chunks
|
||||
env:
|
||||
LIB_FILES: ${{ steps.changed-files.outputs.libraries_all_changed_files }}
|
||||
IS_PR: ${{ github.event_name == 'pull_request' }}
|
||||
MAX_CHUNKS: ${{ env.MAX_CHUNKS }}
|
||||
BUILD_PLATFORMIO: ${{ steps.changed-files.outputs.platformio_any_changed == 'true' }}
|
||||
BUILD_IDF: ${{ steps.changed-files.outputs.idf_any_changed == 'true' }}
|
||||
BUILD_LIBRARIES: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }}
|
||||
BUILD_STATIC_SKETCHES: ${{ steps.changed-files.outputs.static_sketeches_any_changed == 'true' }}
|
||||
FS_CHANGED: ${{ steps.changed-files.outputs.fs_any_changed == 'true' }}
|
||||
NETWORKING_CHANGED: ${{ steps.changed-files.outputs.networking_any_changed == 'true' }}
|
||||
CORE_CHANGED: ${{ steps.changed-files.outputs.core_any_changed == 'true' }}
|
||||
LIB_CHANGED: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }}
|
||||
run: |
|
||||
bash ./.github/scripts/set_push_chunks.sh
|
||||
- name: Set chunks
|
||||
id: set-chunks
|
||||
env:
|
||||
LIB_FILES: ${{ steps.changed-files.outputs.libraries_all_changed_files }}
|
||||
IS_PR: ${{ github.event_name == 'pull_request' }}
|
||||
MAX_CHUNKS: ${{ env.MAX_CHUNKS }}
|
||||
BUILD_PLATFORMIO: ${{ steps.changed-files.outputs.platformio_any_changed == 'true' }}
|
||||
BUILD_IDF: ${{ steps.changed-files.outputs.idf_any_changed == 'true' }}
|
||||
BUILD_LIBRARIES: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }}
|
||||
BUILD_STATIC_SKETCHES: ${{ steps.changed-files.outputs.static_sketeches_any_changed == 'true' }}
|
||||
FS_CHANGED: ${{ steps.changed-files.outputs.fs_any_changed == 'true' }}
|
||||
NETWORKING_CHANGED: ${{ steps.changed-files.outputs.networking_any_changed == 'true' }}
|
||||
CORE_CHANGED: ${{ steps.changed-files.outputs.core_any_changed == 'true' }}
|
||||
LIB_CHANGED: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }}
|
||||
run: |
|
||||
bash ./.github/scripts/set_push_chunks.sh
|
||||
|
||||
- name: Upload sketches found
|
||||
if: ${{ steps.set-chunks.outputs.build_all == 'false' && steps.set-chunks.outputs.build_libraries == 'true' }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: sketches_found
|
||||
path: sketches_found.txt
|
||||
overwrite: true
|
||||
if-no-files-found: error
|
||||
- name: Upload sketches found
|
||||
if: ${{ steps.set-chunks.outputs.build_all == 'false' && steps.set-chunks.outputs.build_libraries == 'true' }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: sketches_found
|
||||
path: sketches_found.txt
|
||||
overwrite: true
|
||||
if-no-files-found: error
|
||||
|
||||
# Ubuntu
|
||||
build-arduino-linux:
|
||||
|
|
@ -153,45 +153,45 @@ jobs:
|
|||
chunk: ${{ fromJson(needs.gen-chunks.outputs.chunks) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Get libs cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }}
|
||||
path: |
|
||||
./tools/dist
|
||||
./tools/esp32-arduino-libs
|
||||
./tools/esptool
|
||||
./tools/mk*
|
||||
./tools/openocd-esp32
|
||||
./tools/riscv32-*
|
||||
./tools/xtensa-*
|
||||
- name: Get libs cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }}
|
||||
path: |
|
||||
./tools/dist
|
||||
./tools/esp32-arduino-libs
|
||||
./tools/esptool
|
||||
./tools/mk*
|
||||
./tools/openocd-esp32
|
||||
./tools/riscv32-*
|
||||
./tools/xtensa-*
|
||||
|
||||
- name: Build all sketches
|
||||
if: ${{ needs.gen-chunks.outputs.build_all == 'true' }}
|
||||
run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ env.MAX_CHUNKS }} 1
|
||||
- name: Build all sketches
|
||||
if: ${{ needs.gen-chunks.outputs.build_all == 'true' }}
|
||||
run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ env.MAX_CHUNKS }} 1
|
||||
|
||||
- name: Download sketches found
|
||||
if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }}
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: sketches_found
|
||||
- name: Download sketches found
|
||||
if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }}
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: sketches_found
|
||||
|
||||
- name: Build selected sketches
|
||||
if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }}
|
||||
run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ needs.gen-chunks.outputs.chunk_count }} 1 sketches_found.txt
|
||||
- name: Build selected sketches
|
||||
if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }}
|
||||
run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ needs.gen-chunks.outputs.chunk_count }} 1 sketches_found.txt
|
||||
|
||||
#Upload cli compile json as artifact
|
||||
- name: Upload cli compile json
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: pr_cli_compile_${{ matrix.chunk }}
|
||||
path: cli_compile_${{ matrix.chunk }}.json
|
||||
overwrite: true
|
||||
#Upload cli compile json as artifact
|
||||
- name: Upload cli compile json
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: pr_cli_compile_${{ matrix.chunk }}
|
||||
path: cli_compile_${{ matrix.chunk }}.json
|
||||
overwrite: true
|
||||
|
||||
# Windows and MacOS
|
||||
build-arduino-win-mac:
|
||||
|
|
@ -205,12 +205,12 @@ jobs:
|
|||
os: [windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Build Sketches
|
||||
run: bash ./.github/scripts/on-push.sh
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Build Sketches
|
||||
run: bash ./.github/scripts/on-push.sh
|
||||
|
||||
# # PlatformIO on Windows, Ubuntu and Mac
|
||||
# build-platformio:
|
||||
|
|
@ -226,13 +226,13 @@ jobs:
|
|||
# matrix:
|
||||
# os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
# steps:
|
||||
# - uses: actions/checkout@v4
|
||||
# - uses: actions/setup-python@v5
|
||||
# with:
|
||||
# python-version: '3.x'
|
||||
# - name: Build Sketches
|
||||
# run: bash ./.github/scripts/on-push.sh 1 1 #equal and non-zero to trigger PIO
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Build Sketches
|
||||
run: bash ./.github/scripts/on-push.sh 1 1 #equal and non-zero to trigger PIO
|
||||
|
||||
# ESP-IDF component build
|
||||
build-esp-idf-component:
|
||||
|
|
@ -251,7 +251,17 @@ jobs:
|
|||
# https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-guides/tools/idf-docker-image.html
|
||||
# for details.
|
||||
idf_ver: ["release-v5.3"]
|
||||
idf_target: ["esp32", "esp32s2", "esp32s3", "esp32c2", "esp32c3", "esp32c6", "esp32h2", "esp32p4"]
|
||||
idf_target:
|
||||
[
|
||||
"esp32",
|
||||
"esp32s2",
|
||||
"esp32s3",
|
||||
"esp32c2",
|
||||
"esp32c3",
|
||||
"esp32c6",
|
||||
"esp32h2",
|
||||
"esp32p4"
|
||||
]
|
||||
container: espressif/idf:${{ matrix.idf_ver }}
|
||||
steps:
|
||||
- name: Check out arduino-esp32 as a component
|
||||
|
|
@ -276,16 +286,15 @@ jobs:
|
|||
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Check out repository
|
||||
# Check out repository
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{secrets.GITHUB_TOKEN}}
|
||||
fetch-depth: '0'
|
||||
fetch-depth: "0"
|
||||
|
||||
- name: Switch branch
|
||||
run:
|
||||
git checkout remotes/origin/gh-pages
|
||||
run: git checkout remotes/origin/gh-pages
|
||||
|
||||
- name: Download sketches reports artifact
|
||||
uses: actions/download-artifact@v4
|
||||
|
|
|
|||
24
.github/workflows/release.yml
vendored
24
.github/workflows/release.yml
vendored
|
|
@ -10,15 +10,15 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- run: pip install packaging
|
||||
- run: pip install pyserial
|
||||
- name: Build Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: bash ./.github/scripts/on-release.sh
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install packaging
|
||||
- run: pip install pyserial
|
||||
- name: Build Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: bash ./.github/scripts/on-release.sh
|
||||
|
|
|
|||
32
.github/workflows/tests.yml
vendored
32
.github/workflows/tests.yml
vendored
|
|
@ -16,22 +16,22 @@ on:
|
|||
pull_request:
|
||||
types: [opened, reopened, closed, synchronize, labeled, unlabeled]
|
||||
paths:
|
||||
- '.github/workflows/tests*'
|
||||
- '.github/scripts/*.sh'
|
||||
- '!.github/scripts/check-cmakelists.sh'
|
||||
- '!.github/scripts/find_*'
|
||||
- '!.github/scripts/on-*.sh'
|
||||
- '!.github/scripts/set_push_chunks.sh'
|
||||
- '!.github/scripts/update-version.sh'
|
||||
- '!.github/scripts/upload_py_tools.sh'
|
||||
- 'tests/**'
|
||||
- 'cores/**'
|
||||
- 'libraries/*/src/**.cpp'
|
||||
- 'libraries/*/src/**.h'
|
||||
- 'libraries/*/src/**.c'
|
||||
- 'package/**'
|
||||
- ".github/workflows/tests*"
|
||||
- ".github/scripts/*.sh"
|
||||
- "!.github/scripts/check-cmakelists.sh"
|
||||
- "!.github/scripts/find_*"
|
||||
- "!.github/scripts/on-*.sh"
|
||||
- "!.github/scripts/set_push_chunks.sh"
|
||||
- "!.github/scripts/update-version.sh"
|
||||
- "!.github/scripts/upload_py_tools.sh"
|
||||
- "tests/**"
|
||||
- "cores/**"
|
||||
- "libraries/*/src/**.cpp"
|
||||
- "libraries/*/src/**.h"
|
||||
- "libraries/*/src/**.c"
|
||||
- "package/**"
|
||||
schedule:
|
||||
- cron: '0 2 * * *'
|
||||
- cron: "0 2 * * *"
|
||||
|
||||
concurrency:
|
||||
group: tests-${{ github.event.pull_request.number || github.ref }}
|
||||
|
|
@ -115,7 +115,7 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
type: ${{ fromJson(needs.gen-matrix.outputs.qemu-types) }}
|
||||
chip: ['esp32', 'esp32c3']
|
||||
chip: ["esp32", "esp32c3"]
|
||||
with:
|
||||
type: ${{ matrix.type }}
|
||||
chip: ${{ matrix.chip }}
|
||||
|
|
|
|||
4
.github/workflows/tests_build.yml
vendored
4
.github/workflows/tests_build.yml
vendored
|
|
@ -5,11 +5,11 @@ on:
|
|||
inputs:
|
||||
type:
|
||||
type: string
|
||||
description: 'Type of tests to build'
|
||||
description: "Type of tests to build"
|
||||
required: true
|
||||
chip:
|
||||
type: string
|
||||
description: 'Chip to build tests for'
|
||||
description: "Chip to build tests for"
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
|
|
|
|||
4
.github/workflows/tests_hw.yml
vendored
4
.github/workflows/tests_hw.yml
vendored
|
|
@ -5,11 +5,11 @@ on:
|
|||
inputs:
|
||||
type:
|
||||
type: string
|
||||
description: 'Type of tests to run'
|
||||
description: "Type of tests to run"
|
||||
required: true
|
||||
chip:
|
||||
type: string
|
||||
description: 'Chip to run tests for'
|
||||
description: "Chip to run tests for"
|
||||
required: true
|
||||
|
||||
env:
|
||||
|
|
|
|||
4
.github/workflows/tests_qemu.yml
vendored
4
.github/workflows/tests_qemu.yml
vendored
|
|
@ -64,8 +64,8 @@ jobs:
|
|||
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
|
||||
with:
|
||||
cache-dependency-path: tests/requirements.txt
|
||||
cache: 'pip'
|
||||
python-version: '3.x'
|
||||
cache: "pip"
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install Python dependencies
|
||||
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
|
||||
|
|
|
|||
16
.github/workflows/tests_results.yml
vendored
16
.github/workflows/tests_results.yml
vendored
|
|
@ -18,11 +18,11 @@ jobs:
|
|||
github.event.workflow_run.conclusion == 'timed_out'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: write
|
||||
statuses: write
|
||||
checks: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
actions: write
|
||||
statuses: write
|
||||
checks: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
|
|
@ -139,13 +139,13 @@ jobs:
|
|||
core.info(`${name} is ${state}`);
|
||||
|
||||
- name: Create output folder
|
||||
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }}
|
||||
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
|
||||
run: |
|
||||
rm -rf artifacts
|
||||
mkdir -p runtime-tests-results
|
||||
|
||||
- name: Generate badge
|
||||
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }}
|
||||
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
|
||||
uses: jaywcjlove/generated-badges@v1.0.13
|
||||
with:
|
||||
label: Runtime Tests
|
||||
|
|
@ -154,7 +154,7 @@ jobs:
|
|||
color: ${{ job.status == 'success' && 'green' || 'red' }}
|
||||
|
||||
- name: Push badge
|
||||
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }}
|
||||
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
|
||||
run: |
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
|
|
|
|||
4
.github/workflows/tests_wokwi.yml
vendored
4
.github/workflows/tests_wokwi.yml
vendored
|
|
@ -247,8 +247,8 @@ jobs:
|
|||
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
|
||||
with:
|
||||
cache-dependency-path: tests/requirements.txt
|
||||
cache: 'pip'
|
||||
python-version: '3.x'
|
||||
cache: "pip"
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install dependencies
|
||||
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ default_language_version:
|
|||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: "v4.5.0"
|
||||
rev: "v5.0.0"
|
||||
hooks:
|
||||
- id: check-case-conflict
|
||||
- id: check-symlinks
|
||||
|
|
@ -47,13 +47,13 @@ repos:
|
|||
types_or: [c, c++]
|
||||
exclude: ^.*\/build_opt\.h$
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: "22.10.0"
|
||||
rev: "24.10.0"
|
||||
hooks:
|
||||
- id: black
|
||||
types_or: [python]
|
||||
args: [--line-length=120] #From the arduino code style. Add as argument rather than creating a new config file.
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: "7.0.0"
|
||||
rev: "7.1.1"
|
||||
hooks:
|
||||
- id: flake8
|
||||
types_or: [python]
|
||||
|
|
@ -67,7 +67,7 @@ repos:
|
|||
- id: prettier
|
||||
types_or: [yaml]
|
||||
- repo: https://github.com/errata-ai/vale
|
||||
rev: "v3.0.7"
|
||||
rev: "v3.9.1"
|
||||
hooks:
|
||||
- id: vale
|
||||
name: vale-sync
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ bool MatterGenericSwitch::begin() {
|
|||
// endpoint handles can be used to add/modify clusters.
|
||||
endpoint_t *endpoint = generic_switch::create(node::get(), &switch_config, ENDPOINT_FLAG_NONE, (void *)this);
|
||||
if (endpoint == nullptr) {
|
||||
log_e("Failed to create Generic swtich endpoint");
|
||||
log_e("Failed to create Generic switch endpoint");
|
||||
return false;
|
||||
}
|
||||
// Add group cluster to the switch endpoint
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ def test_psramspeed(dut, request):
|
|||
sums[(test, size, impl)]["time_sum"] += time
|
||||
|
||||
avg_results = {}
|
||||
for (test, size, impl) in sums:
|
||||
for test, size, impl in sums:
|
||||
rate_avg = round(sums[(test, size, impl)]["rate_sum"] / runs, 2)
|
||||
time_avg = round(sums[(test, size, impl)]["time_sum"] / runs, 2)
|
||||
LOGGER.info(
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ def test_ramspeed(dut, request):
|
|||
sums[(test, size, impl)]["time_sum"] += time
|
||||
|
||||
avg_results = {}
|
||||
for (test, size, impl) in sums:
|
||||
for test, size, impl in sums:
|
||||
rate_avg = round(sums[(test, size, impl)]["rate_sum"] / runs, 2)
|
||||
time_avg = round(sums[(test, size, impl)]["time_sum"] / runs, 2)
|
||||
LOGGER.info(
|
||||
|
|
|
|||
|
|
@ -529,7 +529,7 @@ class PartitionDefinition(object):
|
|||
def to_csv(self, simple_formatting=False):
|
||||
def addr_format(a, include_sizes):
|
||||
if not simple_formatting and include_sizes:
|
||||
for (val, suffix) in [(0x100000, "M"), (0x400, "K")]:
|
||||
for val, suffix in [(0x100000, "M"), (0x400, "K")]:
|
||||
if a % val == 0:
|
||||
return "%d%s" % (a // val, suffix)
|
||||
return "0x%x" % a
|
||||
|
|
|
|||
Loading…
Reference in a new issue