Compare commits

..

1 commit

Author SHA1 Message Date
hathach
eb3b131a10 correct adafruit metro esp32s3 pins for rev B 2023-11-21 18:10:26 +07:00
2272 changed files with 68123 additions and 174420 deletions

View file

@ -1,246 +0,0 @@
# Clang format version: 18.1.3
---
BasedOnStyle: LLVM
AccessModifierOffset: -2
AlignAfterOpenBracket: BlockIndent
AlignArrayOfStructures: None
AlignConsecutiveAssignments:
Enabled: false
AcrossEmptyLines: false
AcrossComments: false
AlignCompound: false
AlignFunctionPointers: false
PadOperators: true
AlignConsecutiveBitFields:
Enabled: true
AcrossEmptyLines: false
AcrossComments: false
AlignCompound: false
AlignFunctionPointers: false
PadOperators: false
AlignConsecutiveDeclarations:
Enabled: false
AcrossEmptyLines: false
AcrossComments: false
AlignCompound: false
AlignFunctionPointers: false
PadOperators: false
AlignConsecutiveMacros:
Enabled: true
AcrossEmptyLines: false
AcrossComments: false
AlignCompound: false
AlignFunctionPointers: false
PadOperators: false
AlignConsecutiveShortCaseStatements:
Enabled: true
AcrossEmptyLines: false
AcrossComments: false
AlignCaseColons: false
AlignEscapedNewlines: Left
AlignOperands: Align
AlignTrailingComments:
Kind: Always
OverEmptyLines: 0
AllowAllArgumentsOnNextLine: true
AllowAllParametersOfDeclarationOnNextLine: true
AllowBreakBeforeNoexceptSpecifier: Never
AllowShortBlocksOnASingleLine: Empty
AllowShortCaseLabelsOnASingleLine: true
AllowShortCompoundRequirementOnASingleLine: true
AllowShortEnumsOnASingleLine: false
AllowShortFunctionsOnASingleLine: Empty
AllowShortIfStatementsOnASingleLine: Never
AllowShortLambdasOnASingleLine: Empty
AllowShortLoopsOnASingleLine: true
AlwaysBreakAfterDefinitionReturnType: None
AlwaysBreakAfterReturnType: None
AlwaysBreakBeforeMultilineStrings: false
AlwaysBreakTemplateDeclarations: MultiLine
AttributeMacros:
- __capability
BinPackArguments: true
BinPackParameters: true
BitFieldColonSpacing: Both
BraceWrapping:
AfterCaseLabel: true
AfterClass: false
AfterControlStatement: Never
AfterEnum: false
AfterFunction: false
AfterNamespace: false
AfterObjCDeclaration: false
AfterStruct: false
AfterUnion: false
AfterExternBlock: false
BeforeCatch: false
BeforeElse: false
BeforeLambdaBody: false
BeforeWhile: false
IndentBraces: false
SplitEmptyFunction: false
SplitEmptyRecord: true
SplitEmptyNamespace: true
BreakAdjacentStringLiterals: true
BreakAfterAttributes: Always
BreakAfterJavaFieldAnnotations: false
BreakArrays: false
BreakBeforeBinaryOperators: NonAssignment
BreakBeforeBraces: Custom
BreakBeforeConceptDeclarations: Always
BreakBeforeInlineASMColon: OnlyMultiline
BreakBeforeTernaryOperators: true
BreakConstructorInitializers: BeforeColon
BreakInheritanceList: BeforeColon
BreakStringLiterals: true
ColumnLimit: 160
CommentPragmas: ""
CompactNamespaces: false
ConstructorInitializerIndentWidth: 2
ContinuationIndentWidth: 2
Cpp11BracedListStyle: true
DerivePointerAlignment: false
DisableFormat: false
EmptyLineAfterAccessModifier: Never
EmptyLineBeforeAccessModifier: LogicalBlock
ExperimentalAutoDetectBinPacking: false
FixNamespaceComments: true
ForEachMacros:
- foreach
- Q_FOREACH
- BOOST_FOREACH
IfMacros:
- KJ_IF_MAYBE
IncludeBlocks: Preserve
IncludeCategories:
- Regex: ^"(llvm|llvm-c|clang|clang-c)/
Priority: 2
SortPriority: 0
CaseSensitive: false
- Regex: ^(<|"(gtest|gmock|isl|json)/)
Priority: 3
SortPriority: 0
CaseSensitive: false
- Regex: .*
Priority: 1
SortPriority: 0
CaseSensitive: false
IncludeIsMainRegex: ""
IncludeIsMainSourceRegex: ""
IndentAccessModifiers: false
IndentCaseBlocks: false
IndentCaseLabels: true
IndentExternBlock: NoIndent
IndentGotoLabels: false
IndentPPDirectives: None
IndentRequiresClause: false
IndentWidth: 2
IndentWrappedFunctionNames: true
InsertBraces: true
InsertNewlineAtEOF: true
InsertTrailingCommas: None
IntegerLiteralSeparator:
Binary: 0
BinaryMinDigits: 0
Decimal: 0
DecimalMinDigits: 0
Hex: 0
HexMinDigits: 0
JavaScriptQuotes: Leave
JavaScriptWrapImports: true
KeepEmptyLinesAtEOF: false
KeepEmptyLinesAtTheStartOfBlocks: true
LambdaBodyIndentation: Signature
Language: Cpp
LineEnding: LF
MacroBlockBegin: ""
MacroBlockEnd: ""
MaxEmptyLinesToKeep: 1
NamespaceIndentation: None
ObjCBinPackProtocolList: Auto
ObjCBlockIndentWidth: 2
ObjCBreakBeforeNestedBlockParam: true
ObjCSpaceAfterProperty: false
ObjCSpaceBeforeProtocolList: true
PPIndentWidth: -1
PackConstructorInitializers: BinPack
PenaltyBreakAssignment: 2
PenaltyBreakBeforeFirstCallParameter: 19
PenaltyBreakComment: 300
PenaltyBreakFirstLessLess: 120
PenaltyBreakOpenParenthesis: 0
PenaltyBreakScopeResolution: 500
PenaltyBreakString: 1000
PenaltyBreakTemplateDeclaration: 10
PenaltyExcessCharacter: 1000000
PenaltyIndentedWhitespace: 0
PenaltyReturnTypeOnItsOwnLine: 60
PointerAlignment: Right
QualifierAlignment: Leave
ReferenceAlignment: Pointer
ReflowComments: false
RemoveBracesLLVM: false
RemoveParentheses: Leave
RemoveSemicolon: false
RequiresClausePosition: OwnLine
RequiresExpressionIndentation: OuterScope
SeparateDefinitionBlocks: Leave
ShortNamespaceLines: 1
SkipMacroDefinitionBody: false
SortIncludes: Never
SortJavaStaticImport: Before
SortUsingDeclarations: LexicographicNumeric
SpaceAfterCStyleCast: false
SpaceAfterLogicalNot: false
SpaceAfterTemplateKeyword: false
SpaceAroundPointerQualifiers: Default
SpaceBeforeAssignmentOperators: true
SpaceBeforeCaseColon: false
SpaceBeforeCpp11BracedList: false
SpaceBeforeCtorInitializerColon: true
SpaceBeforeInheritanceColon: true
SpaceBeforeJsonColon: false
SpaceBeforeParens: ControlStatements
SpaceBeforeParensOptions:
AfterControlStatements: true
AfterForeachMacros: true
AfterFunctionDeclarationName: false
AfterFunctionDefinitionName: false
AfterIfMacros: true
AfterOverloadedOperator: true
AfterPlacementOperator: true
AfterRequiresInClause: false
AfterRequiresInExpression: false
BeforeNonEmptyParentheses: false
SpaceBeforeRangeBasedForLoopColon: true
SpaceBeforeSquareBrackets: false
SpaceInEmptyBlock: false
SpacesBeforeTrailingComments: 2
SpacesInAngles: Never
SpacesInContainerLiterals: false
SpacesInLineCommentPrefix:
Minimum: 1
Maximum: -1
SpacesInParens: Never
SpacesInParensOptions:
InConditionalStatements: false
InCStyleCasts: false
InEmptyParentheses: false
Other: false
SpacesInSquareBrackets: false
Standard: Auto
StatementAttributeLikeMacros:
- Q_EMIT
StatementMacros:
- Q_UNUSED
- QT_REQUIRE_VERSION
TabWidth: 2
UseTab: Never
VerilogBreakBetweenInstancePorts: true
WhitespaceSensitiveMacros:
- BOOST_PP_STRINGIZE
- CF_SWIFT_NAME
- NS_SWIFT_NAME
- PP_STRINGIZE
- STRINGIZE
BracedInitializerIndentWidth: 2

View file

@ -1,8 +0,0 @@
[codespell]
# Source: https://github.com/arduino/tooling-project-assets/blob/main/workflow-templates/assets/spell-check/.codespellrc
# In the event of a false positive, add the problematic word, in all lowercase, to a comma-separated list here:
ignore-words-list = ba,licence,ot,dout,als,exten,emac
skip = ./.git,./.licenses,__pycache__,.clang-format,.codespellrc,.editorconfig,.flake8,.prettierignore,.yamllint.yml,.gitignore,boards.txt,platform.txt,programmers.txt
builtin = clear,informal,en-GB_to_en-US
check-filenames =
check-hidden =

View file

@ -1,60 +0,0 @@
# Source: https://github.com/arduino/tooling-project-assets/blob/main/workflow-templates/assets/general/.editorconfig
# See: https://editorconfig.org/
# The formatting style defined in this file is the official standardized style to be used in all Arduino Tooling
# projects and should not be modified.
# Note: indent style for each file type is defined even when it matches the universal config in order to make it clear
# that this type has an official style.
[*]
charset = utf-8
end_of_line = lf
indent_size = 2
indent_style = space
insert_final_newline = true
trim_trailing_whitespace = true
[*.{adoc,asc,asciidoc}]
indent_size = 2
indent_style = space
[*.{bash,sh}]
indent_size = 4
indent_style = space
[*.{c,cc,cp,cpp,cxx,h,hh,hpp,hxx,ii,inl,ino,ixx,pde,tpl,tpp,txx}]
indent_size = 2
indent_style = space
[*.{go,mod}]
indent_style = tab
[*.java]
indent_size = 2
indent_style = space
[*.{js,jsx,json,jsonc,json5,ts,tsx}]
indent_size = 2
indent_style = space
[*.{md,mdx,mkdn,mdown,markdown}]
indent_size = unset
indent_style = space
[*.proto]
indent_size = 2
indent_style = space
[*.py]
indent_size = 4
indent_style = space
[*.svg]
indent_size = 2
indent_style = space
[*.{yaml,yml}]
indent_size = 2
indent_style = space
[{.gitconfig,.gitmodules}]
indent_style = tab

10
.flake8
View file

@ -1,10 +0,0 @@
# Source: https://github.com/arduino/tooling-project-assets/blob/main/workflow-templates/assets/check-python/.flake8
# See: https://flake8.pycqa.org/en/latest/user/configuration.html
[flake8]
doctests = True
# W503 and W504 are mutually exclusive. PEP 8 recommends line break before.
ignore = W503,E203
max-complexity = 20
max-line-length = 120
select = E,W,F,C,N

81
.github/CODEOWNERS vendored
View file

@ -1,81 +0,0 @@
# CODEOWNERS for ESP32 Arduino Core
# This file is used to specify the code owners for the ESP32 Arduino Core.
# Read more about CODEOWNERS:
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners
# Note that order matters. The last matching pattern will be used.
# The default owners are the active developers of the ESP32 Arduino Core.
# Refrain from using @espressif/arduino-esp32 to avoid spamming non-developers with review requests.
* @espressif/arduino-devs
# CI
/.github/ @lucasssvaz @me-no-dev @P-R-O-C-H-Y
/.github/codeql/ @lucasssvaz
/.gitlab/ @lucasssvaz
/tests/ @lucasssvaz @P-R-O-C-H-Y
# Tools
/tools/ @me-no-dev
/tools/pre-commit/ @lucasssvaz
/tools/add_lib.sh @P-R-O-C-H-Y
# Pre-commit
/.* @lucasssvaz # Files in root directory that start with a dot.
# Git Files
/.gitignore @espressif/arduino-devs
/.gitmodules @espressif/arduino-devs
# Documentation
/docs/ @pedrominatel
/.github/ISSUE_TEMPLATE/ @pedrominatel
/.github/PULL_REQUEST_TEMPLATE.md @pedrominatel
/.readthedocs.yaml @pedrominatel
/*.md @pedrominatel
# Boards
/variants/ @P-R-O-C-H-Y
/boards.txt @P-R-O-C-H-Y
# Arduino as Component
/idf_component_examples/ @SuGlider
/idf_component.yml @SuGlider @me-no-dev
/CMakeLists.txt @SuGlider @me-no-dev
/Kconfig.projbuild @SuGlider @me-no-dev
# Build System
/package.json @me-no-dev
/platform.txt @me-no-dev
/programmers.txt @me-no-dev
/package/ @me-no-dev
# Libraries
/libraries/ArduinoOTA/ @me-no-dev
/libraries/AsyncUDP/ @me-no-dev
/libraries/BLE/ @lucasssvaz @SuGlider
/libraries/ESP_I2S/ @me-no-dev
/libraries/ESP_NOW/ @P-R-O-C-H-Y @lucasssvaz
/libraries/ESP_SR/ @me-no-dev
/libraries/ESPmDNS/ @me-no-dev
/libraries/Ethernet/ @me-no-dev
/libraries/Matter/ @SuGlider
/libraries/NetBIOS/ @me-no-dev
/libraries/Network/ @me-no-dev
/libraries/OpenThread/ @SuGlider
/libraries/PPP/ @me-no-dev
/libraries/SPI/ @me-no-dev
/libraries/Update/ @me-no-dev
/libraries/USB/ @SuGlider @me-no-dev
/libraries/WiFi/ @me-no-dev
/libraries/WiFiProv/ @me-no-dev
/libraries/Wire/ @me-no-dev
/libraries/Zigbee/ @P-R-O-C-H-Y
# CI JSON
# Keep this after other libraries and tests to avoid being overridden.
**/ci.json @lucasssvaz
# The CODEOWNERS file should be owned by the developers of the ESP32 Arduino Core.
# Leave this entry as the last one to avoid being overridden.
/.github/CODEOWNERS @espressif/arduino-devs

View file

@ -5,7 +5,6 @@ body:
- type: markdown
attributes:
value: |
* Please note that we can only process feature requests reported in English to ensure effective communication and support. Feature requests written in other languages will be closed, with a request to rewrite them in English.
* We welcome any ideas or feature requests! It is helpful if you can explain exactly why the feature would be useful.
* There are usually some outstanding feature requests in the [existing issues list](https://github.com/espressif/arduino-esp32/issues?q=is%3Aopen+is%3Aissue+label%3A%22Type%3A+Feature+request%22), feel free to add comments to them.
* If you would like to contribute, please read the [contributions guide](https://docs.espressif.com/projects/arduino-esp32/en/latest/contributing.html).

View file

@ -5,7 +5,6 @@ body:
- type: markdown
attributes:
value: |
* Please note that we can only process issues reported in English to ensure effective communication and support. Issues written in other languages will be closed, with a request to rewrite them in English.
* Before reporting a new issue please check and search in [List of existing issues](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue)
* Please check [Online Documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/index.html)
* Take a look on [Troubleshooting guide](https://docs.espressif.com/projects/arduino-esp32/en/latest/troubleshooting.html)
@ -40,27 +39,8 @@ body:
label: Version
description: What version of Arduino ESP32 are you running? If possible, consider updating to the latest version.
options:
- latest stable Release (if not listed below)
- latest development Release Candidate (RC-X)
- latest master (checkout manually)
- v3.3.0
- v3.2.1
- v3.2.0
- v3.1.3
- v3.1.2
- v3.1.1
- v3.1.0
- v3.0.7
- v3.0.6
- v3.0.5
- v3.0.4
- v3.0.3
- v3.0.2
- v3.0.1
- v3.0.0
- v2.0.17
- v2.0.16
- v2.0.15
- latest development Release Candidate (RC-X)
- v2.0.14
- v2.0.13
- v2.0.12
@ -80,23 +60,12 @@ body:
- other
validations:
required: true
- type: dropdown
id: type
attributes:
label: Type
description: How would you define the type of the issue? Please select from the types below.
options:
- Task
- Bug
- Question
validations:
required: true
- type: input
id: IDE
attributes:
label: IDE Name
description: What IDE are you using?
placeholder: eg. Arduino IDE, VSCode, Sloeber...
placeholder: eg. Arduino IDE, PlatformIO, Sloeber...
validations:
required: true
- type: input
@ -121,8 +90,8 @@ body:
label: PSRAM enabled
description: Is PSRAM enabled?
options:
- "yes"
- "no"
- 'yes'
- 'no'
validations:
required: true
- type: input
@ -137,8 +106,8 @@ body:
id: Description
attributes:
label: Description
description: Please describe your problem here and expected behavior
placeholder: ex. Can't connect/weird behavior/wrong function/missing parameter..
description: Please describe your problem here and expected behaviour
placeholder: ex. Can't connect/weird behaviour/wrong function/missing parameter..
validations:
required: true
- type: textarea

View file

@ -1,5 +1,8 @@
blank_issues_enabled: false
contact_links:
- name: Arduino Core for Espressif Discord Server
url: https://discord.gg/8xY6e9crwv
about: Community Discord server for questions and help
- name: Arduino ESP32 Gitter Channel
url: https://gitter.im/espressif/arduino-esp32
about: Community channel for questions and help
- name: ESP32 Forum - Arduino
url: https://esp32.com/viewforum.php?f=19
about: Official Forum for questions

View file

@ -5,7 +5,6 @@
2. [ ] Please provide related links (*eg. Issue which will be closed by this Pull Request*)
3. [ ] Please **update relevant Documentation** if applicable
4. [ ] Please check [Contributing guide](https://docs.espressif.com/projects/arduino-esp32/en/latest/contributing.html)
5. [ ] Please **confirm option to "Allow edits and access to secrets by maintainers"** when opening a Pull Request
*This entire section above can be deleted if all items are checked.*

View file

@ -1,26 +0,0 @@
name: "CodeQL config"
packs:
- trailofbits/cpp-queries
- githubsecuritylab/codeql-cpp-queries
- githubsecuritylab/codeql-python-queries
queries:
- uses: security-extended
- uses: security-and-quality
query-filters:
- exclude:
query path:
- /^experimental\/.*/
- exclude:
tags contain:
- experimental
- exclude:
problem.severity:
- recommendation
- exclude:
id: tob/cpp/use-of-legacy-algorithm
paths-ignore:
- tests/**

View file

@ -1,5 +1,4 @@
#!/bin/bash
#
# This script is used in the CI workflow. It checks all non-examples source files in libraries/ and cores/ are listed in
# CMakeLists.txt for the cmake-based IDF component
@ -13,10 +12,10 @@ set -e
git submodule update --init --recursive
# find all source files in repo
REPO_SRCS=$(find cores/esp32/ libraries/ -name 'examples' -prune -o -name '*.c' -print -o -name '*.cpp' -print | sort)
REPO_SRCS=`find cores/esp32/ libraries/ -name 'examples' -prune -o -name '*.c' -print -o -name '*.cpp' -print | sort`
# find all source files named in CMakeLists.txt COMPONENT_SRCS
CMAKE_SRCS=$(cmake --trace-expand -P CMakeLists.txt 2>&1 | grep set\(srcs | cut -d'(' -f3 | sed 's/ )//' | sed 's/srcs //' | tr ' ;' '\n' | sort)
CMAKE_SRCS=`cmake --trace-expand -P CMakeLists.txt 2>&1 | grep set\(srcs | cut -d'(' -f3 | sed 's/ )//' | sed 's/srcs //' | tr ' ;' '\n' | sort`
if ! diff -u0 --label "Repo Files" --label "srcs" <(echo "$REPO_SRCS") <(echo "$CMAKE_SRCS"); then
echo "Source files in repo (-) and source files in CMakeLists.txt (+) don't match"

View file

@ -3,37 +3,33 @@
# Get all boards
boards_array=()
boards_list=$(grep '.tarch=' boards.txt)
while read -r line; do
for line in `grep '.tarch=' boards.txt`; do
board_name=$(echo "$line" | cut -d '.' -f1 | cut -d '#' -f1)
# skip esp32c2 as we dont build libs for it
if [ "$board_name" == "esp32c2" ]; then
echo "Skipping 'espressif:esp32:$board_name'"
continue
fi
boards_array+=("espressif:esp32:$board_name")
echo "Added 'espressif:esp32:$board_name' to array"
done <<< "$boards_list"
done
# Create JSON like string with all boards found and pass it to env variable
board_count=${#boards_array[@]}
echo "Boards found: $board_count"
echo "BOARD-COUNT=$board_count" >> "$GITHUB_ENV"
echo "BOARD-COUNT=$board_count" >> $GITHUB_ENV
if [ "$board_count" -gt 0 ]; then
if [ $board_count -gt 0 ]
then
json_matrix='['
for board in "${boards_array[@]}"; do
for board in ${boards_array[@]}
do
json_matrix+='"'$board'"'
if [ "$board_count" -gt 1 ]; then
if [ $board_count -gt 1 ]
then
json_matrix+=","
fi
board_count=$((board_count - 1))
board_count=$(($board_count - 1))
done
json_matrix+=']'
echo "$json_matrix"
echo "FQBNS=${json_matrix}" >> "$GITHUB_ENV"
echo $json_matrix
echo "FQBNS=${json_matrix}" >> $GITHUB_ENV
else
echo "FQBNS=" >> "$GITHUB_ENV"
echo "FQBNS=" >> $GITHUB_ENV
fi

View file

@ -2,61 +2,89 @@
# Get inputs from command
owner_repository=$1
base_ref=$2
pr_number=$2
# Download the boards.txt file from the base branch
curl -L -o boards_base.txt https://raw.githubusercontent.com/"$owner_repository"/"$base_ref"/boards.txt
url="https://api.github.com/repos/$owner_repository/pulls/$pr_number/files"
echo $url
# Compare boards.txt file in the repo with the modified file from PR
diff=$(diff -u boards_base.txt boards.txt)
# Get changes in boards.txt file from PR
Patch=$(curl $url | jq -r '.[] | select(.filename == "boards.txt") | .patch ')
# Check if the diff is empty
if [ -z "$diff" ]; then
echo "No changes in boards.txt file"
echo "FQBNS="
exit 0
fi
# Extract only changed lines number and count
substring_patch=$(echo "$Patch" | grep -o '@@[^@]*@@')
# Extract added or modified lines (lines starting with '+' or '-')
modified_lines=$(echo "$diff" | grep -E '^[+-][^+-]')
params_array=()
# Print the modified lines for debugging
echo "Modified lines:"
echo "$modified_lines"
IFS=$'\n' read -d '' -ra params <<< $(echo "$substring_patch" | grep -oE '[-+][0-9]+,[0-9]+')
for param in "${params[@]}"
do
echo "The parameter is $param"
params_array+=("$param")
done
boards_array=()
previous_board=""
file="boards.txt"
# Extract board names from the modified lines, and add them to the boards_array
while read -r line; do
# Loop through boards.txt file and extract all boards that were added
for (( c=0; c<${#params_array[@]}; c+=2 ))
do
deletion_count=$( echo "${params_array[c]}" | cut -d',' -f2 | cut -d' ' -f1 )
addition_line=$( echo "${params_array[c+1]}" | cut -d'+' -f2 | cut -d',' -f1 )
addition_count=$( echo "${params_array[c+1]}" | cut -d'+' -f2 | cut -d',' -f2 | cut -d' ' -f1 )
addition_end=$(($addition_line+$addition_count))
addition_line=$(($addition_line + 3))
addition_end=$(($addition_end - $deletion_count))
echo $addition_line
echo $addition_end
i=0
while read -r line
do
i=$((i+1))
if [ $i -lt $addition_line ]
then
continue
elif [ $i -gt $addition_end ]
then
break
fi
board_name=$(echo "$line" | cut -d '.' -f1 | cut -d '#' -f1)
# remove + or - from the board name at the beginning
board_name=${board_name#[-+]}
if [ "$board_name" != "" ] && [ "$board_name" != "+" ] && [ "$board_name" != "-" ] && [ "$board_name" != "esp32_family" ]; then
if [ "$board_name" != "$previous_board" ]; then
if [ "$board_name" != "" ]
then
if [ "$board_name" != "$previous_board" ]
then
boards_array+=("espressif:esp32:$board_name")
previous_board="$board_name"
echo "Added 'espressif:esp32:$board_name' to array"
fi
fi
done <<< "$modified_lines"
done < "$file"
done
# Create JSON like string with all boards found and pass it to env variable
board_count=${#boards_array[@]}
if [ "$board_count" -gt 0 ]; then
if [ $board_count -gt 0 ]
then
json_matrix='{"fqbn": ['
for board in "${boards_array[@]}"; do
for board in ${boards_array[@]}
do
json_matrix+='"'$board'"'
if [ "$board_count" -gt 1 ]; then
if [ $board_count -gt 1 ]
then
json_matrix+=","
fi
board_count=$((board_count - 1))
board_count=$(($board_count - 1))
done
json_matrix+=']}'
echo "$json_matrix"
echo "FQBNS=${json_matrix}" >> "$GITHUB_ENV"
echo $json_matrix
echo "FQBNS=${json_matrix}" >> $GITHUB_ENV
else
echo "FQBNS=" >> "$GITHUB_ENV"
echo "FQBNS=" >> $GITHUB_ENV
fi

View file

@ -1,6 +1,6 @@
#!/bin/bash
OSBITS=$(uname -m)
OSBITS=`arch`
if [[ "$OSTYPE" == "linux"* ]]; then
export OS_IS_LINUX="1"
if [[ "$OSBITS" == "i686" ]]; then
@ -41,11 +41,6 @@ fi
if [ ! -d "$ARDUINO_IDE_PATH" ] || [ ! -f "$ARDUINO_IDE_PATH/arduino-cli" ]; then
echo "Installing Arduino CLI on $OS_NAME ..."
mkdir -p "$ARDUINO_IDE_PATH"
if [ "$OS_IS_WINDOWS" == "1" ]; then
curl -fsSL https://downloads.arduino.cc/arduino-cli/arduino-cli_latest_Windows_64bit.zip -o arduino-cli.zip
unzip -q arduino-cli.zip -d "$ARDUINO_IDE_PATH"
rm arduino-cli.zip
else
curl -fsSL https://raw.githubusercontent.com/arduino/arduino-cli/master/install.sh | BINDIR="$ARDUINO_IDE_PATH" sh
fi
fi

View file

@ -5,7 +5,7 @@ if [ ! -d "$ARDUINO_ESP32_PATH" ]; then
echo "Installing ESP32 Arduino Core ..."
script_init_path="$PWD"
mkdir -p "$ARDUINO_USR_PATH/hardware/espressif"
cd "$ARDUINO_USR_PATH/hardware/espressif" || exit
cd "$ARDUINO_USR_PATH/hardware/espressif"
echo "Installing Python Serial ..."
pip install pyserial > /dev/null
@ -15,25 +15,21 @@ if [ ! -d "$ARDUINO_ESP32_PATH" ]; then
pip install requests > /dev/null
fi
if [ -n "$GITHUB_REPOSITORY" ]; then
if [ ! -z "$GITHUB_REPOSITORY" ]; then
echo "Linking Core..."
ln -s "$GITHUB_WORKSPACE" esp32
ln -s $GITHUB_WORKSPACE esp32
else
echo "Cloning Core Repository..."
git clone https://github.com/espressif/arduino-esp32.git esp32 > /dev/null 2>&1
fi
#echo "Updating Submodules ..."
cd esp32 || exit
cd esp32
#git submodule update --init --recursive > /dev/null 2>&1
echo "Installing Platform Tools ..."
if [ "$OS_IS_WINDOWS" == "1" ]; then
cd tools && ./get.exe
else
cd tools && python get.py
fi
cd "$script_init_path" || exit
cd $script_init_path
echo "ESP32 Arduino has been installed in '$ARDUINO_ESP32_PATH'"
echo ""

View file

@ -4,7 +4,7 @@
#OSTYPE: 'msys', ARCH: 'x86_64' => win32
#OSTYPE: 'darwin18', ARCH: 'i386' => macos
OSBITS=$(uname -m)
OSBITS=`arch`
if [[ "$OSTYPE" == "linux"* ]]; then
export OS_IS_LINUX="1"
ARCHIVE_FORMAT="tar.xz"
@ -77,3 +77,4 @@ if [ ! -d "$ARDUINO_IDE_PATH" ]; then
echo "Arduino IDE Installed in '$ARDUINO_IDE_PATH'"
echo ""
fi

180
.github/scripts/install-platformio-esp32.sh vendored Executable file
View file

@ -0,0 +1,180 @@
#!/bin/bash
export PLATFORMIO_ESP32_PATH="$HOME/.platformio/packages/framework-arduinoespressif32"
PLATFORMIO_ESP32_URL="https://github.com/platformio/platform-espressif32.git"
TOOLCHAIN_VERSION="12.2.0+20230208"
ESPTOOLPY_VERSION="~1.40501.0"
ESPRESSIF_ORGANIZATION_NAME="espressif"
echo "Installing Python Wheel ..."
pip install wheel > /dev/null 2>&1
echo "Installing PlatformIO ..."
pip install -U https://github.com/platformio/platformio/archive/master.zip > /dev/null 2>&1
echo "Installing Platform ESP32 ..."
python -m platformio platform install $PLATFORMIO_ESP32_URL > /dev/null 2>&1
echo "Replacing the package versions ..."
replace_script="import json; import os;"
replace_script+="fp=open(os.path.expanduser('~/.platformio/platforms/espressif32/platform.json'), 'r+');"
replace_script+="data=json.load(fp);"
# Use framework sources from the repository
replace_script+="data['packages']['framework-arduinoespressif32']['version'] = '*';"
replace_script+="del data['packages']['framework-arduinoespressif32']['owner'];"
# Use toolchain packages from the "espressif" organization
replace_script+="data['packages']['toolchain-xtensa-esp32']['owner']='$ESPRESSIF_ORGANIZATION_NAME';"
replace_script+="data['packages']['toolchain-xtensa-esp32s2']['owner']='$ESPRESSIF_ORGANIZATION_NAME';"
replace_script+="data['packages']['toolchain-riscv32-esp']['owner']='$ESPRESSIF_ORGANIZATION_NAME';"
# Update versions to use the upstream
replace_script+="data['packages']['toolchain-xtensa-esp32']['version']='$TOOLCHAIN_VERSION';"
replace_script+="data['packages']['toolchain-xtensa-esp32s2']['version']='$TOOLCHAIN_VERSION';"
replace_script+="data['packages']['toolchain-xtensa-esp32s3']['version']='$TOOLCHAIN_VERSION';"
replace_script+="data['packages']['toolchain-riscv32-esp']['version']='$TOOLCHAIN_VERSION';"
# Add new "framework-arduinoespressif32-libs" package
# Read "package_esp32_index.template.json" to extract a url to a zip package for "esp32-arduino-libs"
replace_script+="fpackage=open(os.path.join('package', 'package_esp32_index.template.json'), 'r+');"
replace_script+="package_data=json.load(fpackage);"
replace_script+="fpackage.close();"
replace_script+="libs_package_archive_url=next(next(system['url'] for system in tool['systems'] if system['host'] == 'x86_64-pc-linux-gnu') for tool in package_data['packages'][0]['tools'] if tool['name'] == 'esp32-arduino-libs');"
replace_script+="data['packages'].update({'framework-arduinoespressif32-libs':{'type':'framework','optional':False,'version':libs_package_archive_url}});"
replace_script+="data['packages']['toolchain-xtensa-esp32'].update({'optional':False});"
# esptool.py may require an upstream version (for now platformio is the owner)
replace_script+="data['packages']['tool-esptoolpy']['version']='$ESPTOOLPY_VERSION';"
# Save results
replace_script+="fp.seek(0);fp.truncate();json.dump(data, fp, indent=2);fp.close()"
python -c "$replace_script"
if [ "$GITHUB_REPOSITORY" == "espressif/arduino-esp32" ]; then
echo "Linking Core..."
ln -s $GITHUB_WORKSPACE "$PLATFORMIO_ESP32_PATH"
else
echo "Cloning Core Repository ..."
git clone --recursive https://github.com/espressif/arduino-esp32.git "$PLATFORMIO_ESP32_PATH" > /dev/null 2>&1
fi
echo "PlatformIO for ESP32 has been installed"
echo ""
function build_pio_sketch(){ # build_pio_sketch <board> <options> <path-to-ino>
if [ "$#" -lt 3 ]; then
echo "ERROR: Illegal number of parameters"
echo "USAGE: build_pio_sketch <board> <options> <path-to-ino>"
return 1
fi
local board="$1"
local options="$2"
local sketch="$3"
local sketch_dir=$(dirname "$sketch")
echo ""
echo "Compiling '"$(basename "$sketch")"' ..."
python -m platformio ci --board "$board" "$sketch_dir" --project-option="$options"
}
function count_sketches(){ # count_sketches <examples-path>
local examples="$1"
rm -rf sketches.txt
if [ ! -d "$examples" ]; then
touch sketches.txt
return 0
fi
local sketches=$(find $examples -name *.ino)
local sketchnum=0
for sketch in $sketches; do
local sketchdir=$(dirname $sketch)
local sketchdirname=$(basename $sketchdir)
local sketchname=$(basename $sketch)
if [[ "${sketchdirname}.ino" != "$sketchname" ]]; then
continue
fi
if [[ -f "$sketchdir/.test.skip" ]]; then
continue
fi
echo $sketch >> sketches.txt
sketchnum=$(($sketchnum + 1))
done
return $sketchnum
}
function build_pio_sketches(){ # build_pio_sketches <board> <options> <examples-path> <chunk> <total-chunks>
if [ "$#" -lt 3 ]; then
echo "ERROR: Illegal number of parameters"
echo "USAGE: build_pio_sketches <board> <options> <examples-path> [<chunk> <total-chunks>]"
return 1
fi
local board=$1
local options="$2"
local examples=$3
local chunk_idex=$4
local chunks_num=$5
if [ "$#" -lt 5 ]; then
chunk_idex="0"
chunks_num="1"
fi
if [ "$chunks_num" -le 0 ]; then
echo "ERROR: Chunks count must be positive number"
return 1
fi
if [ "$chunk_idex" -ge "$chunks_num" ]; then
echo "ERROR: Chunk index must be less than chunks count"
return 1
fi
set +e
count_sketches "$examples"
local sketchcount=$?
set -e
local sketches=$(cat sketches.txt)
rm -rf sketches.txt
local chunk_size=$(( $sketchcount / $chunks_num ))
local all_chunks=$(( $chunks_num * $chunk_size ))
if [ "$all_chunks" -lt "$sketchcount" ]; then
chunk_size=$(( $chunk_size + 1 ))
fi
local start_index=$(( $chunk_idex * $chunk_size ))
if [ "$sketchcount" -le "$start_index" ]; then
echo "Skipping job"
return 0
fi
local end_index=$(( $(( $chunk_idex + 1 )) * $chunk_size ))
if [ "$end_index" -gt "$sketchcount" ]; then
end_index=$sketchcount
fi
local start_num=$(( $start_index + 1 ))
echo "Found $sketchcount Sketches";
echo "Chunk Count : $chunks_num"
echo "Chunk Size : $chunk_size"
echo "Start Sketch: $start_num"
echo "End Sketch : $end_index"
local sketchnum=0
for sketch in $sketches; do
local sketchdir=$(dirname $sketch)
local sketchdirname=$(basename $sketchdir)
local sketchname=$(basename $sketch)
if [ "${sketchdirname}.ino" != "$sketchname" ] \
|| [ -f "$sketchdir/.test.skip" ]; then
continue
fi
sketchnum=$(($sketchnum + 1))
if [ "$sketchnum" -le "$start_index" ] \
|| [ "$sketchnum" -gt "$end_index" ]; then
continue
fi
build_pio_sketch "$board" "$options" "$sketch"
local result=$?
if [ $result -ne 0 ]; then
return $result
fi
done
return 0
}

View file

@ -1,55 +1,46 @@
#!/usr/bin/env python
# This script merges two Arduino Board Manager package json files.
# Usage:
# python merge_packages.py package_esp8266com_index.json version/new/package_esp8266com_index.json
# Written by Ivan Grokhotkov, 2015
# Updated by lucasssvaz to handle Chinese version sorting, 2025
#
from __future__ import print_function
# from distutils.version import LooseVersion
from packaging.version import Version
from distutils.version import LooseVersion
import re
import json
import sys
def load_package(filename):
pkg = json.load(open(filename))["packages"][0]
print("Loaded package {0} from {1}".format(pkg["name"], filename), file=sys.stderr)
print("{0} platform(s), {1} tools".format(len(pkg["platforms"]), len(pkg["tools"])), file=sys.stderr)
pkg = json.load(open(filename))['packages'][0]
print("Loaded package {0} from {1}".format(pkg['name'], filename), file=sys.stderr)
print("{0} platform(s), {1} tools".format(len(pkg['platforms']), len(pkg['tools'])), file=sys.stderr)
return pkg
def merge_objects(versions, obj):
for o in obj:
name = o["name"].encode("ascii")
ver = o["version"].encode("ascii")
if name not in versions:
name = o['name'].encode('ascii')
ver = o['version'].encode('ascii')
if not name in versions:
print("found new object, {0}".format(name), file=sys.stderr)
versions[name] = {}
if ver not in versions[name]:
if not ver in versions[name]:
print("found new version {0} for object {1}".format(ver, name), file=sys.stderr)
versions[name][ver] = o
return versions
# Normalize ESP release version string (x.x.x) by adding '-rc<MAXINT>' (x.x.x-rc9223372036854775807)
# to ensure having REL above any RC. CN version will be sorted after the official version if they happen
# to be mixed (normally, CN and non-CN versions should not be mixed)
# Dummy approach, functional anyway for current ESP package versioning
# (unlike NormalizedVersion/LooseVersion/StrictVersion & similar crap)
# Normalize ESP release version string (x.x.x) by adding '-rc<MAXINT>' (x.x.x-rc9223372036854775807) to ensure having REL above any RC
# Dummy approach, functional anyway for current ESP package versioning (unlike NormalizedVersion/LooseVersion/StrictVersion & similar crap)
def pkgVersionNormalized(versionString):
verStr = str(versionString).replace("-cn", "")
verParts = re.split(r"\.|-rc|-alpha", verStr, flags=re.IGNORECASE)
verStr = str(versionString)
verParts = re.split('\.|-rc', verStr, flags=re.IGNORECASE)
if len(verParts) == 3:
if "-cn" in str(versionString):
verStr = verStr + "-rc" + str(sys.maxsize // 2)
else:
verStr = verStr + "-rc" + str(sys.maxsize)
if (sys.version_info > (3, 0)): # Python 3
verStr = str(versionString) + '-rc' + str(sys.maxsize)
else: # Python 2
verStr = str(versionString) + '-rc' + str(sys.maxint)
elif len(verParts) != 4:
print("pkgVersionNormalized WARNING: unexpected version format: {0})".format(verStr), file=sys.stderr)
@ -64,35 +55,28 @@ def main(args):
tools = {}
platforms = {}
pkg1 = load_package(args[1])
tools = merge_objects(tools, pkg1["tools"])
platforms = merge_objects(platforms, pkg1["platforms"])
tools = merge_objects(tools, pkg1['tools']);
platforms = merge_objects(platforms, pkg1['platforms']);
pkg2 = load_package(args[2])
tools = merge_objects(tools, pkg2["tools"])
platforms = merge_objects(platforms, pkg2["platforms"])
tools = merge_objects(tools, pkg2['tools']);
platforms = merge_objects(platforms, pkg2['platforms']);
pkg1["tools"] = []
pkg1["platforms"] = []
pkg1['tools'] = []
pkg1['platforms'] = []
for name in tools:
for version in tools[name]:
print("Adding tool {0}-{1}".format(name, version), file=sys.stderr)
pkg1["tools"].append(tools[name][version])
pkg1['tools'].append(tools[name][version])
for name in platforms:
for version in platforms[name]:
print("Adding platform {0}-{1}".format(name, version), file=sys.stderr)
pkg1["platforms"].append(platforms[name][version])
pkg1['platforms'].append(platforms[name][version])
# pkg1["platforms"] = sorted(
# pkg1["platforms"], key=lambda k: LooseVersion(pkgVersionNormalized(k["version"])), reverse=True
# )
pkg1['platforms'] = sorted(pkg1['platforms'], key=lambda k: LooseVersion(pkgVersionNormalized(k['version'])), reverse=True)
pkg1["platforms"] = sorted(
pkg1["platforms"], key=lambda k: Version(pkgVersionNormalized(k["version"])), reverse=True
)
json.dump({'packages':[pkg1]}, sys.stdout, indent=2)
json.dump({"packages": [pkg1]}, sys.stdout, indent=2)
if __name__ == "__main__":
if __name__ == '__main__':
sys.exit(main(sys.argv))

View file

@ -1,13 +1,12 @@
#!/bin/bash
#/bin/bash
set -e
function get_file_size {
function get_file_size(){
local file="$1"
if [[ "$OSTYPE" == "darwin"* ]]; then
eval "$(stat -s "$file")"
eval `stat -s "$file"`
local res="$?"
echo "${st_size:?}"
echo "$st_size"
return $res
else
stat --printf="%s" "$file"
@ -16,32 +15,25 @@ function get_file_size {
}
#git_remove_from_pages <file>
function git_remove_from_pages {
function git_remove_from_pages(){
local path=$1
local info
local type
local sha
local message
info=$(curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages")
type=$(echo "$info" | jq -r '.type')
if [ ! "$type" == "file" ]; then
if [ ! "$type" == "null" ]; then
local info=`curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages"`
local type=`echo "$info" | jq -r '.type'`
if [ ! $type == "file" ]; then
if [ ! $type == "null" ]; then
echo "Wrong type '$type'"
else
echo "File is not on Pages"
fi
return 0
fi
sha=$(echo "$info" | jq -r '.sha')
message="Deleting "$(basename "$path")
local sha=`echo "$info" | jq -r '.sha'`
local message="Deleting "$(basename $path)
local json="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"sha\":\"$sha\"}"
echo "$json" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X DELETE --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path"
}
function git_upload_to_pages {
function git_upload_to_pages(){
local path=$1
local src=$2
@ -50,50 +42,41 @@ function git_upload_to_pages {
return 1
fi
local info
local type
local message
local info=`curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages"`
local type=`echo "$info" | jq -r '.type'`
local message=$(basename $path)
local sha=""
local content=""
info=$(curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages")
type=$(echo "$info" | jq -r '.type')
message=$(basename "$path")
if [ "$type" == "file" ]; then
sha=$(echo "$info" | jq -r '.sha')
if [ $type == "file" ]; then
sha=`echo "$info" | jq -r '.sha'`
sha=",\"sha\":\"$sha\""
message="Updating $message"
elif [ ! "$type" == "null" ]; then
elif [ ! $type == "null" ]; then
>&2 echo "Wrong type '$type'"
return 1
else
message="Creating $message"
fi
content=$(base64 -i "$src")
content=`base64 -i "$src"`
data="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"content\":\"$content\"$sha}"
echo "$data" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X PUT --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path"
}
function git_safe_upload_to_pages {
function git_safe_upload_to_pages(){
local path=$1
local file="$2"
local name
local size
local upload_res
name=$(basename "$file")
size=$(get_file_size "$file")
if ! upload_res=$(git_upload_to_pages "$path" "$file"); then
local name=$(basename "$file")
local size=`get_file_size "$file"`
local upload_res=`git_upload_to_pages "$path" "$file"`
if [ $? -ne 0 ]; then
>&2 echo "ERROR: Failed to upload '$name' ($?)"
return 1
fi
up_size=$(echo "$upload_res" | jq -r '.content.size')
if [ "$up_size" -ne "$size" ]; then
up_size=`echo "$upload_res" | jq -r '.content.size'`
if [ $up_size -ne $size ]; then
>&2 echo "ERROR: Uploaded size does not match! $up_size != $size"
#git_delete_asset
return 1

View file

@ -1,33 +0,0 @@
#!/bin/bash
set -e
CHECK_REQUIREMENTS="./components/arduino-esp32/.github/scripts/sketch_utils.sh check_requirements"
# Export IDF environment
. ${IDF_PATH}/export.sh
# Find all examples in ./components/arduino-esp32/idf_component_examples
idf_component_examples=$(find ./components/arduino-esp32/idf_component_examples -mindepth 1 -maxdepth 1 -type d)
for example in $idf_component_examples; do
if [ -f "$example"/ci.json ]; then
# If the target is listed as false, skip the sketch. Otherwise, include it.
is_target=$(jq -r --arg target "$IDF_TARGET" '.targets[$target]' "$example"/ci.json)
if [[ "$is_target" == "false" ]]; then
printf "\n\033[93mSkipping %s for target %s\033[0m\n\n" "$example" "$IDF_TARGET"
continue
fi
fi
idf.py -C "$example" set-target "$IDF_TARGET"
has_requirements=$(${CHECK_REQUIREMENTS} "$example" "$example/sdkconfig")
if [ "$has_requirements" -eq 0 ]; then
printf "\n\033[93m%s does not meet the requirements for %s. Skipping...\033[0m\n\n" "$example" "$IDF_TARGET"
continue
fi
printf "\n\033[95mBuilding %s\033[0m\n\n" "$example"
idf.py -C "$example" -DEXTRA_COMPONENT_DIRS="$PWD/components" build
done

View file

@ -4,45 +4,36 @@ set -e
export ARDUINO_BUILD_DIR="$HOME/.arduino/build.tmp"
function build {
function build(){
local target=$1
local chunk_index=$2
local chunks_cnt=$3
local build_log=$4
local log_level=${5:-none}
local sketches_file=$6
shift 6
local sketches=("$@")
local fqbn=$2
local chunk_index=$3
local chunks_cnt=$4
shift; shift; shift; shift;
local sketches=$*
local BUILD_SKETCH="${SCRIPTS_DIR}/sketch_utils.sh build"
local BUILD_SKETCHES="${SCRIPTS_DIR}/sketch_utils.sh chunk_build"
local args=("-ai" "$ARDUINO_IDE_PATH" "-au" "$ARDUINO_USR_PATH" "-t" "$target")
local args="-ai $ARDUINO_IDE_PATH -au $ARDUINO_USR_PATH"
args+=" -t $target -fqbn $fqbn"
if [ "$OS_IS_LINUX" == "1" ]; then
args+=("-p" "$ARDUINO_ESP32_PATH/libraries" "-i" "$chunk_index" "-m" "$chunks_cnt" "-d" "$log_level")
if [ -n "$sketches_file" ]; then
args+=("-f" "$sketches_file")
fi
if [ "$build_log" -eq 1 ]; then
args+=("-l" "$build_log")
fi
${BUILD_SKETCHES} "${args[@]}"
args+=" -p $ARDUINO_ESP32_PATH/libraries"
args+=" -i $chunk_index -m $chunks_cnt"
${BUILD_SKETCHES} ${args}
else
for sketch in "${sketches[@]}"; do
local sargs=("${args[@]}")
local ctags_version
local preprocessor_version
sargs+=("-s" "$(dirname "$sketch")")
for sketch in ${sketches}; do
local sargs="$args -s $(dirname $sketch)"
if [ "$OS_IS_WINDOWS" == "1" ] && [ -d "$ARDUINO_IDE_PATH/tools-builder" ]; then
ctags_version=$(ls "$ARDUINO_IDE_PATH/tools-builder/ctags/")
preprocessor_version=$(ls "$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/")
sargs+=(
"-prefs=runtime.tools.ctags.path=$ARDUINO_IDE_PATH/tools-builder/ctags/$ctags_version"
"-prefs=runtime.tools.arduino-preprocessor.path=$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/$preprocessor_version"
)
local ctags_version=`ls "$ARDUINO_IDE_PATH/tools-builder/ctags/"`
local preprocessor_version=`ls "$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/"`
win_opts="-prefs=runtime.tools.ctags.path=$ARDUINO_IDE_PATH/tools-builder/ctags/$ctags_version
-prefs=runtime.tools.arduino-preprocessor.path=$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/$preprocessor_version"
sargs+=" ${win_opts}"
fi
${BUILD_SKETCH} "${sargs[@]}"
${BUILD_SKETCH} ${sargs}
done
fi
}
@ -54,54 +45,67 @@ fi
CHUNK_INDEX=$1
CHUNKS_CNT=$2
BUILD_LOG=$3
LOG_LEVEL=$4
SKETCHES_FILE=$5
BUILD_PIO=0
if [ "$#" -lt 2 ] || [ "$CHUNKS_CNT" -le 0 ]; then
CHUNK_INDEX=0
CHUNKS_CNT=1
elif [ "$CHUNK_INDEX" -gt "$CHUNKS_CNT" ] && [ "$CHUNKS_CNT" -ge 2 ]; then
CHUNK_INDEX=$CHUNKS_CNT
fi
if [ -z "$BUILD_LOG" ] || [ "$BUILD_LOG" -le 0 ]; then
BUILD_LOG=0
elif [ "$CHUNK_INDEX" -eq "$CHUNKS_CNT" ]; then
BUILD_PIO=1
fi
#echo "Updating submodules ..."
#git -C "$GITHUB_WORKSPACE" submodule update --init --recursive > /dev/null 2>&1
SCRIPTS_DIR="./.github/scripts"
source "${SCRIPTS_DIR}/install-arduino-cli.sh"
source "${SCRIPTS_DIR}/install-arduino-core-esp32.sh"
if [ "$BUILD_PIO" -eq 0 ]; then
#source ${SCRIPTS_DIR}/install-arduino-ide.sh
source ${SCRIPTS_DIR}/install-arduino-cli.sh
source ${SCRIPTS_DIR}/install-arduino-core-esp32.sh
SKETCHES_ESP32=(
"$ARDUINO_ESP32_PATH/libraries/NetworkClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino"
"$ARDUINO_ESP32_PATH/libraries/BLE/examples/Server/Server.ino"
"$ARDUINO_ESP32_PATH/libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino"
"$ARDUINO_ESP32_PATH/libraries/Insights/examples/MinimalDiagnostics/MinimalDiagnostics.ino"
)
#create sizes_file
sizes_file="$GITHUB_WORKSPACE/cli_compile_$CHUNK_INDEX.json"
FQBN_ESP32="espressif:esp32:esp32:PSRAM=enabled,PartitionScheme=huge_app"
FQBN_ESP32S2="espressif:esp32:esp32s2:PSRAM=enabled,PartitionScheme=huge_app"
FQBN_ESP32S3="espressif:esp32:esp32s3:PSRAM=opi,USBMode=default,PartitionScheme=huge_app"
FQBN_ESP32C3="espressif:esp32:esp32c3:PartitionScheme=huge_app"
FQBN_ESP32C6="espressif:esp32:esp32c6:PartitionScheme=huge_app"
FQBN_ESP32H2="espressif:esp32:esp32h2:PartitionScheme=huge_app"
if [ "$BUILD_LOG" -eq 1 ]; then
#create sizes_file and echo start of JSON array with "boards" key
echo "{\"boards\": [" > "$sizes_file"
fi
#build sketches for different targets
build "esp32c5" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32p4" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32s3" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32s2" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32c3" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32c6" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32h2" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
if [ "$BUILD_LOG" -eq 1 ]; then
#remove last comma from the last JSON object
sed -i '$ s/,$//' "$sizes_file"
#echo end of JSON array
echo "]}" >> "$sizes_file"
SKETCHES_ESP32="\
$ARDUINO_ESP32_PATH/libraries/WiFiClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino\
$ARDUINO_ESP32_PATH/libraries/BLE/examples/BLE_server/BLE_server.ino\
$ARDUINO_ESP32_PATH/libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino\
$ARDUINO_ESP32_PATH/libraries/Insights/examples/MinimalDiagnostics/MinimalDiagnostics.ino\
"
SKETCHES_ESP32XX="\
$ARDUINO_ESP32_PATH/libraries/WiFiClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino\
$ARDUINO_ESP32_PATH/libraries/WiFi/examples/WiFiClient/WiFiClient.ino\
$ARDUINO_ESP32_PATH/libraries/Insights/examples/MinimalDiagnostics/MinimalDiagnostics.ino\
"
build "esp32s3" $FQBN_ESP32S3 $CHUNK_INDEX $CHUNKS_CNT $SKETCHES_ESP32
build "esp32s2" $FQBN_ESP32S2 $CHUNK_INDEX $CHUNKS_CNT $SKETCHES_ESP32XX
build "esp32c3" $FQBN_ESP32C3 $CHUNK_INDEX $CHUNKS_CNT $SKETCHES_ESP32XX
build "esp32c6" $FQBN_ESP32C6 $CHUNK_INDEX $CHUNKS_CNT $SKETCHES_ESP32XX
build "esp32h2" $FQBN_ESP32H2 $CHUNK_INDEX $CHUNKS_CNT $SKETCHES_ESP32XX
build "esp32" $FQBN_ESP32 $CHUNK_INDEX $CHUNKS_CNT $SKETCHES_ESP32
else
source ${SCRIPTS_DIR}/install-platformio-esp32.sh
# PlatformIO ESP32 Test
BOARD="esp32dev"
OPTIONS="board_build.partitions = huge_app.csv"
build_pio_sketch "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries/WiFi/examples/WiFiClient/WiFiClient.ino" && \
build_pio_sketch "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries/WiFiClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino" && \
build_pio_sketch "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries/BluetoothSerial/examples/SerialToSerialBT/SerialToSerialBT.ino" && \
build_pio_sketch "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries/BLE/examples/BLE_server/BLE_server.ino" && \
build_pio_sketch "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino"
# Basic sanity testing for other series
for board in "esp32-c3-devkitm-1" "esp32-s2-saola-1" "esp32-s3-devkitc-1"
do
python -m platformio ci --board "$board" "$PLATFORMIO_ESP32_PATH/libraries/WiFi/examples/WiFiClient" --project-option="board_build.partitions = huge_app.csv"
done
#build_pio_sketches "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries"
fi

View file

@ -1,65 +1,46 @@
#!/bin/bash
# Disable shellcheck warning about using 'cat' to read a file.
# Disable shellcheck warning about using individual redirections for each command.
# Disable shellcheck warning about $? uses.
# shellcheck disable=SC2002,SC2129,SC2181,SC2319
if [ ! "$GITHUB_EVENT_NAME" == "release" ]; then
if [ ! $GITHUB_EVENT_NAME == "release" ]; then
echo "Wrong event '$GITHUB_EVENT_NAME'!"
exit 1
fi
EVENT_JSON=$(cat "$GITHUB_EVENT_PATH")
EVENT_JSON=`cat $GITHUB_EVENT_PATH`
action=$(echo "$EVENT_JSON" | jq -r '.action')
if [ ! "$action" == "published" ]; then
action=`echo $EVENT_JSON | jq -r '.action'`
if [ ! $action == "published" ]; then
echo "Wrong action '$action'. Exiting now..."
exit 0
fi
draft=$(echo "$EVENT_JSON" | jq -r '.release.draft')
if [ "$draft" == "true" ]; then
draft=`echo $EVENT_JSON | jq -r '.release.draft'`
if [ $draft == "true" ]; then
echo "It's a draft release. Exiting now..."
exit 0
fi
RELEASE_PRE=$(echo "$EVENT_JSON" | jq -r '.release.prerelease')
RELEASE_TAG=$(echo "$EVENT_JSON" | jq -r '.release.tag_name')
RELEASE_BRANCH=$(echo "$EVENT_JSON" | jq -r '.release.target_commitish')
RELEASE_ID=$(echo "$EVENT_JSON" | jq -r '.release.id')
RELEASE_PRE=`echo $EVENT_JSON | jq -r '.release.prerelease'`
RELEASE_TAG=`echo $EVENT_JSON | jq -r '.release.tag_name'`
RELEASE_BRANCH=`echo $EVENT_JSON | jq -r '.release.target_commitish'`
RELEASE_ID=`echo $EVENT_JSON | jq -r '.release.id'`
SCRIPTS_DIR="./.github/scripts"
OUTPUT_DIR="$GITHUB_WORKSPACE/build"
PACKAGE_NAME="esp32-$RELEASE_TAG"
PACKAGE_JSON_MERGE="$GITHUB_WORKSPACE/.github/scripts/merge_packages.py"
PACKAGE_JSON_TEMPLATE="$GITHUB_WORKSPACE/package/package_esp32_index.template.json"
PACKAGE_JSON_DEV="package_esp32_dev_index.json"
PACKAGE_JSON_REL="package_esp32_index.json"
PACKAGE_JSON_DEV_CN="package_esp32_dev_index_cn.json"
PACKAGE_JSON_REL_CN="package_esp32_index_cn.json"
echo "Event: $GITHUB_EVENT_NAME, Repo: $GITHUB_REPOSITORY, Path: $GITHUB_WORKSPACE, Ref: $GITHUB_REF"
echo "Action: $action, Branch: $RELEASE_BRANCH, ID: $RELEASE_ID"
echo "Tag: $RELEASE_TAG, Draft: $draft, Pre-Release: $RELEASE_PRE"
# Try extracting something like a JSON with a "boards" array/element and "vendor" fields
BOARDS=$(echo "$RELEASE_BODY" | grep -Pzo '(?s){.*}' | jq -r '.boards[]? // .boards? // empty' | xargs echo -n 2>/dev/null)
VENDOR=$(echo "$RELEASE_BODY" | grep -Pzo '(?s){.*}' | jq -r '.vendor? // empty' | xargs echo -n 2>/dev/null)
if [ -n "${BOARDS}" ]; then
echo "Releasing board(s): $BOARDS"
fi
if [ -n "${VENDOR}" ]; then
echo "Setting packager: $VENDOR"
fi
function get_file_size {
function get_file_size(){
local file="$1"
if [[ "$OSTYPE" == "darwin"* ]]; then
eval "$(stat -s "$file")"
eval `stat -s "$file"`
local res="$?"
echo "${st_size:?}"
echo "$st_size"
return $res
else
stat --printf="%s" "$file"
@ -67,29 +48,23 @@ function get_file_size {
fi
}
function git_upload_asset {
local name
name=$(basename "$1")
function git_upload_asset(){
local name=$(basename "$1")
# local mime=$(file -b --mime-type "$1")
curl -k -X POST -sH "Authorization: token $GITHUB_TOKEN" -H "Content-Type: application/octet-stream" --data-binary @"$1" "https://uploads.github.com/repos/$GITHUB_REPOSITORY/releases/$RELEASE_ID/assets?name=$name"
}
function git_safe_upload_asset {
function git_safe_upload_asset(){
local file="$1"
local name
local size
local upload_res
name=$(basename "$file")
size=$(get_file_size "$file")
if ! upload_res=$(git_upload_asset "$file"); then
local name=$(basename "$file")
local size=`get_file_size "$file"`
local upload_res=`git_upload_asset "$file"`
if [ $? -ne 0 ]; then
>&2 echo "ERROR: Failed to upload '$name' ($?)"
return 1
fi
up_size=$(echo "$upload_res" | jq -r '.size')
if [ "$up_size" -ne "$size" ]; then
up_size=`echo "$upload_res" | jq -r '.size'`
if [ $up_size -ne $size ]; then
>&2 echo "ERROR: Uploaded size does not match! $up_size != $size"
#git_delete_asset
return 1
@ -98,7 +73,7 @@ function git_safe_upload_asset {
return $?
}
function git_upload_to_pages {
function git_upload_to_pages(){
local path=$1
local src=$2
@ -107,50 +82,41 @@ function git_upload_to_pages {
return 1
fi
local info
local type
local message
local info=`curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages"`
local type=`echo "$info" | jq -r '.type'`
local message=$(basename $path)
local sha=""
local content=""
info=$(curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages")
type=$(echo "$info" | jq -r '.type')
message=$(basename "$path")
if [ "$type" == "file" ]; then
sha=$(echo "$info" | jq -r '.sha')
if [ $type == "file" ]; then
sha=`echo "$info" | jq -r '.sha'`
sha=",\"sha\":\"$sha\""
message="Updating $message"
elif [ ! "$type" == "null" ]; then
elif [ ! $type == "null" ]; then
>&2 echo "Wrong type '$type'"
return 1
else
message="Creating $message"
fi
content=$(base64 -i "$src")
content=`base64 -i "$src"`
data="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"content\":\"$content\"$sha}"
echo "$data" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X PUT --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path"
}
function git_safe_upload_to_pages {
function git_safe_upload_to_pages(){
local path=$1
local file="$2"
local name
local size
local upload_res
name=$(basename "$file")
size=$(get_file_size "$file")
if ! upload_res=$(git_upload_to_pages "$path" "$file"); then
local name=$(basename "$file")
local size=`get_file_size "$file"`
local upload_res=`git_upload_to_pages "$path" "$file"`
if [ $? -ne 0 ]; then
>&2 echo "ERROR: Failed to upload '$name' ($?)"
return 1
fi
up_size=$(echo "$upload_res" | jq -r '.content.size')
if [ "$up_size" -ne "$size" ]; then
up_size=`echo "$upload_res" | jq -r '.content.size'`
if [ $up_size -ne $size ]; then
>&2 echo "ERROR: Uploaded size does not match! $up_size != $size"
#git_delete_asset
return 1
@ -159,20 +125,15 @@ function git_safe_upload_to_pages {
return $?
}
function merge_package_json {
function merge_package_json(){
local jsonLink=$1
local jsonOut=$2
local old_json=$OUTPUT_DIR/oldJson.json
local merged_json=$OUTPUT_DIR/mergedJson.json
local error_code=0
echo "Downloading previous JSON $jsonLink ..."
curl -L -o "$old_json" "https://github.com/$GITHUB_REPOSITORY/releases/download/$jsonLink?access_token=$GITHUB_TOKEN" 2>/dev/null
error_code=$?
if [ $error_code -ne 0 ]; then
echo "ERROR: Download Failed! $error_code"
exit 1
fi
if [ $? -ne 0 ]; then echo "ERROR: Download Failed! $?"; exit 1; fi
echo "Creating new JSON ..."
set +e
@ -180,7 +141,7 @@ function merge_package_json {
set -e
set -v
if [ ! -s "$merged_json" ]; then
if [ ! -s $merged_json ]; then
rm -f "$merged_json"
echo "Nothing to merge"
else
@ -209,30 +170,12 @@ mkdir -p "$PKG_DIR/tools"
# Copy all core files to the package folder
echo "Copying files for packaging ..."
if [ -z "${BOARDS}" ]; then
# Copy all variants
cp -f "$GITHUB_WORKSPACE/boards.txt" "$PKG_DIR/"
cp -Rf "$GITHUB_WORKSPACE/variants" "$PKG_DIR/"
else
# Remove all entries not starting with any board code or "menu." from boards.txt
cat "$GITHUB_WORKSPACE/boards.txt" | grep "^menu\." > "$PKG_DIR/boards.txt"
for board in ${BOARDS} ; do
cat "$GITHUB_WORKSPACE/boards.txt" | grep "^${board}\." >> "$PKG_DIR/boards.txt"
done
# Copy only relevant variant files
mkdir "$PKG_DIR/variants/"
board_list=$(cat "${PKG_DIR}"/boards.txt | grep "\.variant=" | cut -d= -f2)
while IFS= read -r variant; do
cp -Rf "$GITHUB_WORKSPACE/variants/${variant}" "$PKG_DIR/variants/"
done <<< "$board_list"
fi
cp -f "$GITHUB_WORKSPACE/CMakeLists.txt" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/idf_component.yml" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/Kconfig.projbuild" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/package.json" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/programmers.txt" "$PKG_DIR/"
cp -Rf "$GITHUB_WORKSPACE/cores" "$PKG_DIR/"
cp -Rf "$GITHUB_WORKSPACE/libraries" "$PKG_DIR/"
cp -Rf "$GITHUB_WORKSPACE/variants" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/tools/espota.exe" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/espota.py" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/gen_esp32part.py" "$PKG_DIR/tools/"
@ -241,7 +184,7 @@ cp -f "$GITHUB_WORKSPACE/tools/gen_insights_package.py" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/gen_insights_package.exe" "$PKG_DIR/tools/"
cp -Rf "$GITHUB_WORKSPACE/tools/partitions" "$PKG_DIR/tools/"
cp -Rf "$GITHUB_WORKSPACE/tools/ide-debug" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/pioarduino-build.py" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/platformio-build.py" "$PKG_DIR/tools/"
# Remove unnecessary files in the package folder
echo "Cleaning up folders ..."
@ -253,50 +196,44 @@ find "$PKG_DIR" -name '*.git*' -type f -delete
##
RVTC_NAME="riscv32-esp-elf-gcc"
RVTC_NEW_NAME="esp-rv32"
X32TC_NAME="xtensa-esp-elf-gcc"
X32TC_NEW_NAME="esp-x32"
# Replace tools locations in platform.txt
echo "Generating platform.txt..."
cat "$GITHUB_WORKSPACE/platform.txt" | \
sed "s/version=.*/version=$RELEASE_TAG/g" | \
sed 's/tools\.esp32-arduino-libs\.path\.windows=.*//g' | \
sed 's/{runtime\.platform\.path}.tools.esp32-arduino-libs/\{runtime.tools.esp32-arduino-libs.path\}/g' | \
sed 's/{runtime\.platform\.path}.tools.xtensa-esp-elf-gdb/\{runtime.tools.xtensa-esp-elf-gdb.path\}/g' | \
sed "s/{runtime\.platform\.path}.tools.xtensa-esp-elf/\\{runtime.tools.$X32TC_NEW_NAME.path\\}/g" | \
sed 's/{runtime\.platform\.path}.tools.riscv32-esp-elf-gdb/\{runtime.tools.riscv32-esp-elf-gdb.path\}/g' | \
sed "s/{runtime\.platform\.path}.tools.riscv32-esp-elf/\\{runtime.tools.$RVTC_NEW_NAME.path\\}/g" | \
sed 's/{runtime\.platform\.path}.tools.esptool/\{runtime.tools.esptool_py.path\}/g' | \
sed 's/{runtime\.platform\.path}.tools.openocd-esp32/\{runtime.tools.openocd-esp32.path\}/g' > "$PKG_DIR/platform.txt"
if [ -n "${VENDOR}" ]; then
# Append vendor name to platform.txt to create a separate section
sed -i "/^name=.*/s/$/ ($VENDOR)/" "$PKG_DIR/platform.txt"
fi
sed 's/tools.esp32-arduino-libs.path={runtime.platform.path}\/tools\/esp32-arduino-libs/tools.esp32-arduino-libs.path=\{runtime.tools.esp32-arduino-libs.path\}/g' | \
sed 's/tools.xtensa-esp32-elf-gcc.path={runtime.platform.path}\/tools\/xtensa-esp32-elf/tools.xtensa-esp32-elf-gcc.path=\{runtime.tools.xtensa-esp32-elf-gcc.path\}/g' | \
sed 's/tools.xtensa-esp32s2-elf-gcc.path={runtime.platform.path}\/tools\/xtensa-esp32s2-elf/tools.xtensa-esp32s2-elf-gcc.path=\{runtime.tools.xtensa-esp32s2-elf-gcc.path\}/g' | \
sed 's/tools.xtensa-esp32s3-elf-gcc.path={runtime.platform.path}\/tools\/xtensa-esp32s3-elf/tools.xtensa-esp32s3-elf-gcc.path=\{runtime.tools.xtensa-esp32s3-elf-gcc.path\}/g' | \
sed 's/tools.xtensa-esp-elf-gdb.path={runtime.platform.path}\/tools\/xtensa-esp-elf-gdb/tools.xtensa-esp-elf-gdb.path=\{runtime.tools.xtensa-esp-elf-gdb.path\}/g' | \
sed "s/tools.riscv32-esp-elf-gcc.path={runtime.platform.path}\\/tools\\/riscv32-esp-elf/tools.riscv32-esp-elf-gcc.path=\\{runtime.tools.$RVTC_NEW_NAME.path\\}/g" | \
sed 's/tools.riscv32-esp-elf-gdb.path={runtime.platform.path}\/tools\/riscv32-esp-elf-gdb/tools.riscv32-esp-elf-gdb.path=\{runtime.tools.riscv32-esp-elf-gdb.path\}/g' | \
sed 's/tools.esptool_py.path={runtime.platform.path}\/tools\/esptool/tools.esptool_py.path=\{runtime.tools.esptool_py.path\}/g' | \
sed 's/debug.server.openocd.path={runtime.platform.path}\/tools\/openocd-esp32\/bin\/openocd/debug.server.openocd.path=\{runtime.tools.openocd-esp32.path\}\/bin\/openocd/g' | \
sed 's/debug.server.openocd.scripts_dir={runtime.platform.path}\/tools\/openocd-esp32\/share\/openocd\/scripts\//debug.server.openocd.scripts_dir=\{runtime.tools.openocd-esp32.path\}\/share\/openocd\/scripts\//g' | \
sed 's/debug.server.openocd.scripts_dir.windows={runtime.platform.path}\\tools\\openocd-esp32\\share\\openocd\\scripts\\/debug.server.openocd.scripts_dir.windows=\{runtime.tools.openocd-esp32.path\}\\share\\openocd\\scripts\\/g' \
> "$PKG_DIR/platform.txt"
# Add header with version information
echo "Generating core_version.h ..."
ver_define=$(echo "$RELEASE_TAG" | tr "[:lower:].\055" "[:upper:]_")
ver_hex=$(git -C "$GITHUB_WORKSPACE" rev-parse --short=8 HEAD 2>/dev/null)
echo \#define ARDUINO_ESP32_GIT_VER 0x"$ver_hex" > "$PKG_DIR/cores/esp32/core_version.h"
echo \#define ARDUINO_ESP32_GIT_DESC "$(git -C "$GITHUB_WORKSPACE" describe --tags 2>/dev/null)" >> "$PKG_DIR/cores/esp32/core_version.h"
echo \#define ARDUINO_ESP32_RELEASE_"$ver_define" >> "$PKG_DIR/cores/esp32/core_version.h"
echo \#define ARDUINO_ESP32_RELEASE \""$ver_define"\" >> "$PKG_DIR/cores/esp32/core_version.h"
ver_define=`echo $RELEASE_TAG | tr "[:lower:].\055" "[:upper:]_"`
ver_hex=`git -C "$GITHUB_WORKSPACE" rev-parse --short=8 HEAD 2>/dev/null`
echo \#define ARDUINO_ESP32_GIT_VER 0x$ver_hex > "$PKG_DIR/cores/esp32/core_version.h"
echo \#define ARDUINO_ESP32_GIT_DESC `git -C "$GITHUB_WORKSPACE" describe --tags 2>/dev/null` >> "$PKG_DIR/cores/esp32/core_version.h"
echo \#define ARDUINO_ESP32_RELEASE_$ver_define >> "$PKG_DIR/cores/esp32/core_version.h"
echo \#define ARDUINO_ESP32_RELEASE \"$ver_define\" >> "$PKG_DIR/cores/esp32/core_version.h"
# Compress package folder
echo "Creating ZIP ..."
pushd "$OUTPUT_DIR" >/dev/null
zip -qr "$PACKAGE_ZIP" "$PACKAGE_NAME"
if [ $? -ne 0 ]; then
echo "ERROR: Failed to create $PACKAGE_ZIP ($?)"
exit 1
fi
if [ $? -ne 0 ]; then echo "ERROR: Failed to create $PACKAGE_ZIP ($?)"; exit 1; fi
# Calculate SHA-256
echo "Calculating SHA sum ..."
PACKAGE_PATH="$OUTPUT_DIR/$PACKAGE_ZIP"
PACKAGE_SHA=$(shasum -a 256 "$PACKAGE_ZIP" | cut -f 1 -d ' ')
PACKAGE_SIZE=$(get_file_size "$PACKAGE_ZIP")
PACKAGE_SHA=`shasum -a 256 "$PACKAGE_ZIP" | cut -f 1 -d ' '`
PACKAGE_SIZE=`get_file_size "$PACKAGE_ZIP"`
popd >/dev/null
rm -rf "$PKG_DIR"
echo "'$PACKAGE_ZIP' Created! Size: $PACKAGE_SIZE, SHA-256: $PACKAGE_SHA"
@ -304,28 +241,86 @@ echo
# Upload package to release page
echo "Uploading package to release page ..."
PACKAGE_URL=$(git_safe_upload_asset "$PACKAGE_PATH")
PACKAGE_URL=`git_safe_upload_asset "$PACKAGE_PATH"`
echo "Package Uploaded"
echo "Download URL: $PACKAGE_URL"
echo
##
## LIBS PACKAGE ZIP
##
LIBS_PROJ_NAME="esp32-arduino-libs"
LIBS_PKG_DIR="$OUTPUT_DIR/$LIBS_PROJ_NAME"
LIBS_PACKAGE_ZIP="$LIBS_PROJ_NAME-$RELEASE_TAG.zip"
# Get the libs package URL from the template
LIBS_PACKAGE_SRC_ZIP="$OUTPUT_DIR/src-$LIBS_PROJ_NAME.zip"
LIBS_PACKAGE_SRC_URL=`cat $PACKAGE_JSON_TEMPLATE | jq -r ".packages[0].tools[] | select(.name==\"$LIBS_PROJ_NAME\") | .systems[0].url"`
# Download the libs package
echo "Downloading the libs archive ..."
curl -o "$LIBS_PACKAGE_SRC_ZIP" -LJO --url "$LIBS_PACKAGE_SRC_URL" || exit 1
# Extract the libs package
echo "Extracting the archive ..."
unzip -q -d "$OUTPUT_DIR" "$LIBS_PACKAGE_SRC_ZIP" || exit 1
EXTRACTED_DIR=`ls "$OUTPUT_DIR" | grep "^$LIBS_PROJ_NAME"`
mv "$OUTPUT_DIR/$EXTRACTED_DIR" "$LIBS_PKG_DIR" || exit 1
# Remove unnecessary files in the package folder
echo "Cleaning up folders ..."
find "$LIBS_PKG_DIR" -name '*.DS_Store' -exec rm -f {} \;
find "$LIBS_PKG_DIR" -name '*.git*' -type f -delete
# Compress package folder
echo "Creating ZIP ..."
pushd "$OUTPUT_DIR" >/dev/null
zip -qr "$LIBS_PACKAGE_ZIP" "$LIBS_PROJ_NAME"
if [ $? -ne 0 ]; then echo "ERROR: Failed to create $LIBS_PACKAGE_ZIP ($?)"; exit 1; fi
# Calculate SHA-256
echo "Calculating SHA sum ..."
LIBS_PACKAGE_PATH="$OUTPUT_DIR/$LIBS_PACKAGE_ZIP"
LIBS_PACKAGE_SHA=`shasum -a 256 "$LIBS_PACKAGE_ZIP" | cut -f 1 -d ' '`
LIBS_PACKAGE_SIZE=`get_file_size "$LIBS_PACKAGE_ZIP"`
popd >/dev/null
rm -rf "$LIBS_PKG_DIR"
echo "'$LIBS_PACKAGE_ZIP' Created! Size: $LIBS_PACKAGE_SIZE, SHA-256: $LIBS_PACKAGE_SHA"
echo
# Upload package to release page
echo "Uploading libs package to release page ..."
LIBS_PACKAGE_URL=`git_safe_upload_asset "$LIBS_PACKAGE_PATH"`
echo "Libs Package Uploaded"
echo "Libs Download URL: $LIBS_PACKAGE_URL"
echo
# Construct JQ argument with libs package data
libs_jq_arg="\
(.packages[0].tools[] | select(.name==\"$LIBS_PROJ_NAME\")).systems[].url = \"$LIBS_PACKAGE_URL\" |\
(.packages[0].tools[] | select(.name==\"$LIBS_PROJ_NAME\")).systems[].archiveFileName = \"$LIBS_PACKAGE_ZIP\" |\
(.packages[0].tools[] | select(.name==\"$LIBS_PROJ_NAME\")).systems[].size = \"$LIBS_PACKAGE_SIZE\" |\
(.packages[0].tools[] | select(.name==\"$LIBS_PROJ_NAME\")).systems[].checksum = \"SHA-256:$LIBS_PACKAGE_SHA\""
# Update template values for the libs package and store it in the build folder
cat "$PACKAGE_JSON_TEMPLATE" | jq "$libs_jq_arg" > "$OUTPUT_DIR/package-$LIBS_PROJ_NAME.json"
# Overwrite the template location with the newly edited one
PACKAGE_JSON_TEMPLATE="$OUTPUT_DIR/package-$LIBS_PROJ_NAME.json"
##
## TEMP WORKAROUND FOR RV32 LONG PATH ON WINDOWS
##
RVTC_VERSION=$(cat "$PACKAGE_JSON_TEMPLATE" | jq -r ".packages[0].platforms[0].toolsDependencies[] | select(.name == \"$RVTC_NAME\") | .version" | cut -d '_' -f 2)
RVTC_VERSION=`cat $PACKAGE_JSON_TEMPLATE | jq -r ".packages[0].platforms[0].toolsDependencies[] | select(.name == \"$RVTC_NAME\") | .version" | cut -d '_' -f 2`
# RVTC_VERSION=`date -j -f '%Y%m%d' "$RVTC_VERSION" '+%y%m'` # MacOS
RVTC_VERSION=$(date -d "$RVTC_VERSION" '+%y%m')
RVTC_VERSION=`date -d "$RVTC_VERSION" '+%y%m'`
rvtc_jq_arg="\
(.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$RVTC_NAME\")).version = \"$RVTC_VERSION\" |\
(.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$RVTC_NAME\")).name = \"$RVTC_NEW_NAME\" |\
(.packages[0].tools[] | select(.name==\"$RVTC_NAME\")).version = \"$RVTC_VERSION\" |\
(.packages[0].tools[] | select(.name==\"$RVTC_NAME\")).name = \"$RVTC_NEW_NAME\" |\
(.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$X32TC_NAME\")).version = \"$RVTC_VERSION\" |\
(.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$X32TC_NAME\")).name = \"$X32TC_NEW_NAME\" |\
(.packages[0].tools[] | select(.name==\"$X32TC_NAME\")).version = \"$RVTC_VERSION\" |\
(.packages[0].tools[] | select(.name==\"$X32TC_NAME\")).name = \"$X32TC_NEW_NAME\""
cat "$PACKAGE_JSON_TEMPLATE" | jq "$rvtc_jq_arg" > "$OUTPUT_DIR/package-rvfix.json"
PACKAGE_JSON_TEMPLATE="$OUTPUT_DIR/package-rvfix.json"
(.packages[0].tools[] | select(.name==\"$RVTC_NAME\")).name = \"$RVTC_NEW_NAME\""
cat "$PACKAGE_JSON_TEMPLATE" | jq "$rvtc_jq_arg" > "$OUTPUT_DIR/package-$LIBS_PROJ_NAME-rvfix.json"
PACKAGE_JSON_TEMPLATE="$OUTPUT_DIR/package-$LIBS_PROJ_NAME-rvfix.json"
##
## PACKAGE JSON
@ -339,26 +334,17 @@ jq_arg=".packages[0].platforms[0].version = \"$RELEASE_TAG\" | \
.packages[0].platforms[0].checksum = \"SHA-256:$PACKAGE_SHA\""
# Generate package JSONs
echo "Generating $PACKAGE_JSON_DEV ..."
echo "Genarating $PACKAGE_JSON_DEV ..."
cat "$PACKAGE_JSON_TEMPLATE" | jq "$jq_arg" > "$OUTPUT_DIR/$PACKAGE_JSON_DEV"
# On MacOS the sed command won't skip the first match. Use gsed instead.
sed '0,/github\.com\//!s|github\.com/|dl.espressif.cn/github_assets/|g' "$OUTPUT_DIR/$PACKAGE_JSON_DEV" > "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN"
python "$SCRIPTS_DIR/release_append_cn.py" "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN"
if [ "$RELEASE_PRE" == "false" ]; then
echo "Generating $PACKAGE_JSON_REL ..."
echo "Genarating $PACKAGE_JSON_REL ..."
cat "$PACKAGE_JSON_TEMPLATE" | jq "$jq_arg" > "$OUTPUT_DIR/$PACKAGE_JSON_REL"
# On MacOS the sed command won't skip the first match. Use gsed instead.
sed '0,/github\.com\//!s|github\.com/|dl.espressif.cn/github_assets/|g' "$OUTPUT_DIR/$PACKAGE_JSON_REL" > "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN"
python "$SCRIPTS_DIR/release_append_cn.py" "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN"
fi
# Figure out the last release or pre-release
echo "Getting previous releases ..."
releasesJson=$(curl -sH "Authorization: token $GITHUB_TOKEN" "https://api.github.com/repos/$GITHUB_REPOSITORY/releases" 2>/dev/null)
if [ $? -ne 0 ]; then
echo "ERROR: Get Releases Failed! ($?)"
exit 1
fi
releasesJson=`curl -sH "Authorization: token $GITHUB_TOKEN" "https://api.github.com/repos/$GITHUB_REPOSITORY/releases" 2>/dev/null`
if [ $? -ne 0 ]; then echo "ERROR: Get Releases Failed! ($?)"; exit 1; fi
set +e
prev_release=$(echo "$releasesJson" | jq -e -r ". | map(select(.draft == false and .prerelease == false)) | sort_by(.published_at | - fromdateiso8601) | .[0].tag_name")
@ -378,94 +364,27 @@ echo "Previous (any)release: $prev_any_release"
echo
# Merge package JSONs with previous releases
if [ -n "$prev_any_release" ] && [ "$prev_any_release" != "null" ]; then
if [ ! -z "$prev_any_release" ] && [ "$prev_any_release" != "null" ]; then
echo "Merging with JSON from $prev_any_release ..."
merge_package_json "$prev_any_release/$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV"
merge_package_json "$prev_any_release/$PACKAGE_JSON_DEV_CN" "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN"
fi
if [ "$RELEASE_PRE" == "false" ]; then
if [ -n "$prev_release" ] && [ "$prev_release" != "null" ]; then
if [ ! -z "$prev_release" ] && [ "$prev_release" != "null" ]; then
echo "Merging with JSON from $prev_release ..."
merge_package_json "$prev_release/$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL"
merge_package_json "$prev_release/$PACKAGE_JSON_REL_CN" "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN"
fi
fi
# Test the package JSONs
echo "Installing arduino-cli ..."
export PATH="/home/runner/bin:$PATH"
source "${SCRIPTS_DIR}/install-arduino-cli.sh"
# For the Chinese mirror, we can't test the package JSONs as the Chinese mirror might not be updated yet.
echo "Testing $PACKAGE_JSON_DEV install ..."
echo "Installing esp32 ..."
arduino-cli core install esp32:esp32 --additional-urls "file://$OUTPUT_DIR/$PACKAGE_JSON_DEV"
if [ $? -ne 0 ]; then
echo "ERROR: Failed to install esp32 ($?)"
exit 1
fi
echo "Compiling example ..."
arduino-cli compile --fqbn esp32:esp32:esp32 "$GITHUB_WORKSPACE"/libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino
if [ $? -ne 0 ]; then
echo "ERROR: Failed to compile example ($?)"
exit 1
fi
echo "Uninstalling esp32 ..."
arduino-cli core uninstall esp32:esp32
if [ $? -ne 0 ]; then
echo "ERROR: Failed to uninstall esp32 ($?)"
exit 1
fi
echo "Test successful!"
if [ "$RELEASE_PRE" == "false" ]; then
echo "Testing $PACKAGE_JSON_REL install ..."
echo "Installing esp32 ..."
arduino-cli core install esp32:esp32 --additional-urls "file://$OUTPUT_DIR/$PACKAGE_JSON_REL"
if [ $? -ne 0 ]; then
echo "ERROR: Failed to install esp32 ($?)"
exit 1
fi
echo "Compiling example ..."
arduino-cli compile --fqbn esp32:esp32:esp32 "$GITHUB_WORKSPACE"/libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino
if [ $? -ne 0 ]; then
echo "ERROR: Failed to compile example ($?)"
exit 1
fi
echo "Uninstalling esp32 ..."
arduino-cli core uninstall esp32:esp32
if [ $? -ne 0 ]; then
echo "ERROR: Failed to uninstall esp32 ($?)"
exit 1
fi
echo "Test successful!"
fi
# Upload package JSONs
echo "Uploading $PACKAGE_JSON_DEV ..."
echo "Download URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_DEV")"
echo "Pages URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV")"
echo "Download CN URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN")"
echo "Pages CN URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_DEV_CN" "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN")"
echo "Download URL: "`git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_DEV"`
echo "Pages URL: "`git_safe_upload_to_pages "$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV"`
echo
if [ "$RELEASE_PRE" == "false" ]; then
echo "Uploading $PACKAGE_JSON_REL ..."
echo "Download URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_REL")"
echo "Pages URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL")"
echo "Download CN URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN")"
echo "Pages CN URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_REL_CN" "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN")"
echo "Download URL: "`git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_REL"`
echo "Pages URL: "`git_safe_upload_to_pages "$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL"`
echo
fi

View file

@ -1,57 +0,0 @@
#!/usr/bin/env python3
# Arduino IDE provides by default a package file for the ESP32. This causes version conflicts
# when the user tries to use the JSON file with the Chinese mirrors.
#
# The downside is that the Arduino IDE will always warn the user that updates are available as it
# will consider the version from the Chinese mirrors as a pre-release version.
#
# This script is used to append "-cn" to all versions in the package_esp32_index_cn.json file so that
# the user can select the Chinese mirrors without conflicts.
#
# If Arduino ever stops providing the package_esp32_index.json file by default,
# this script can be removed and the tags reverted.
import json
def append_cn_to_versions(obj):
if isinstance(obj, dict):
# Skip tools that are not from the esp32 package
packager = obj.get("packager")
if packager is not None and packager != "esp32":
return
for key, value in obj.items():
if key == "version" and isinstance(value, str):
if not value.endswith("-cn"):
obj[key] = value + "-cn"
else:
append_cn_to_versions(value)
elif isinstance(obj, list):
for item in obj:
append_cn_to_versions(item)
def process_json_file(input_path, output_path=None):
with open(input_path, "r", encoding="utf-8") as f:
data = json.load(f)
append_cn_to_versions(data)
if output_path is None:
output_path = input_path
with open(output_path, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2)
print(f"Updated JSON written to {output_path}")
if __name__ == "__main__":
import sys
if len(sys.argv) < 2:
print("Usage: python release_append_cn.py input.json [output.json]")
else:
input_file = sys.argv[1]
output_file = sys.argv[2] if len(sys.argv) > 2 else None
process_json_file(input_file, output_file)

View file

@ -1,84 +0,0 @@
#!/bin/bash
build_all=false
chunks_count=0
if [[ $CORE_CHANGED == 'true' ]] || [[ $IS_PR != 'true' ]]; then
echo "Core files changed or not a PR. Building all."
build_all=true
chunks_count=$MAX_CHUNKS
elif [[ $LIB_CHANGED == 'true' ]]; then
echo "Libraries changed. Building only affected sketches."
if [[ $NETWORKING_CHANGED == 'true' ]]; then
echo "Networking libraries changed. Building networking related sketches."
networking_sketches="$(find libraries/WiFi -name '*.ino') "
networking_sketches+="$(find libraries/Ethernet -name '*.ino') "
networking_sketches+="$(find libraries/PPP -name '*.ino') "
networking_sketches+="$(find libraries/NetworkClientSecure -name '*.ino') "
networking_sketches+="$(find libraries/WebServer -name '*.ino') "
fi
if [[ $FS_CHANGED == 'true' ]]; then
echo "FS libraries changed. Building FS related sketches."
fs_sketches="$(find libraries/SD -name '*.ino') "
fs_sketches+="$(find libraries/SD_MMC -name '*.ino') "
fs_sketches+="$(find libraries/SPIFFS -name '*.ino') "
fs_sketches+="$(find libraries/LittleFS -name '*.ino') "
fs_sketches+="$(find libraries/FFat -name '*.ino') "
fi
sketches="$networking_sketches $fs_sketches"
for file in $LIB_FILES; do
lib=$(echo "$file" | awk -F "/" '{print $1"/"$2}')
if [[ "$file" == *.ino ]]; then
# If file ends with .ino, add it to the list of sketches
echo "Sketch found: $file"
sketches+="$file "
elif [[ "$file" == "$lib/src/"* ]]; then
# If file is inside the src directory, find all sketches in the lib/examples directory
echo "Library src file found: $file"
if [[ -d $lib/examples ]]; then
lib_sketches=$(find "$lib"/examples -name '*.ino')
sketches+="$lib_sketches "
echo "Library sketches: $lib_sketches"
fi
else
# If file is in a example folder but it is not a sketch, find all sketches in the current directory
echo "File in example folder found: $file"
sketch=$(find "$(dirname "$file")" -name '*.ino')
sketches+="$sketch "
echo "Sketch in example folder: $sketch"
fi
echo ""
done
fi
if [[ -n $sketches ]]; then
# Remove duplicates
sketches=$(echo "$sketches" | tr ' ' '\n' | sort | uniq)
for sketch in $sketches; do
echo "$sketch" >> sketches_found.txt
chunks_count=$((chunks_count+1))
done
echo "Number of sketches found: $chunks_count"
echo "Sketches:"
echo "$sketches"
if [[ $chunks_count -gt $MAX_CHUNKS ]]; then
echo "More sketches than the allowed number of chunks found. Limiting to $MAX_CHUNKS chunks."
chunks_count=$MAX_CHUNKS
fi
fi
chunks='["0"'
for i in $(seq 1 $(( chunks_count - 1 )) ); do
chunks+=",\"$i\""
done
chunks+="]"
{
echo "build_all=$build_all"
echo "build_libraries=$BUILD_LIBRARIES"
echo "build_static_sketches=$BUILD_STATIC_SKETCHES"
echo "build_idf=$BUILD_IDF"
echo "chunk_count=$chunks_count"
echo "chunks=$chunks"
} >> "$GITHUB_OUTPUT"

View file

@ -1,60 +1,7 @@
#!/bin/bash
if [ -d "$ARDUINO_ESP32_PATH/tools/esp32-arduino-libs" ]; then
SDKCONFIG_DIR="$ARDUINO_ESP32_PATH/tools/esp32-arduino-libs"
elif [ -d "$GITHUB_WORKSPACE/tools/esp32-arduino-libs" ]; then
SDKCONFIG_DIR="$GITHUB_WORKSPACE/tools/esp32-arduino-libs"
else
SDKCONFIG_DIR="tools/esp32-arduino-libs"
fi
function check_requirements { # check_requirements <sketchdir> <sdkconfig_path>
local sketchdir=$1
local sdkconfig_path=$2
local has_requirements=1
local requirements
local requirements_or
if [ ! -f "$sdkconfig_path" ] || [ ! -f "$sketchdir/ci.json" ]; then
echo "WARNING: sdkconfig or ci.json not found. Assuming requirements are met." 1>&2
# Return 1 on error to force the sketch to be built and fail. This way the
# CI will fail and the user will know that the sketch has a problem.
else
# Check if the sketch requires any configuration options (AND)
requirements=$(jq -r '.requires[]? // empty' "$sketchdir/ci.json")
if [[ "$requirements" != "null" && "$requirements" != "" ]]; then
for requirement in $requirements; do
requirement=$(echo "$requirement" | xargs)
found_line=$(grep -E "^$requirement" "$sdkconfig_path")
if [[ "$found_line" == "" ]]; then
has_requirements=0
fi
done
fi
# Check if the sketch requires any configuration options (OR)
requirements_or=$(jq -r '.requires_any[]? // empty' "$sketchdir/ci.json")
if [[ "$requirements_or" != "null" && "$requirements_or" != "" ]]; then
local found=false
for requirement in $requirements_or; do
requirement=$(echo "$requirement" | xargs)
found_line=$(grep -E "^$requirement" "$sdkconfig_path")
if [[ "$found_line" != "" ]]; then
found=true
break
fi
done
if [[ "$found" == "false" ]]; then
has_requirements=0
fi
fi
fi
echo $has_requirements
}
function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [extra-options]
while [ -n "$1" ]; do
function build_sketch(){ # build_sketch <ide_path> <user_path> <path-to-ino> [extra-options]
while [ ! -z "$1" ]; do
case "$1" in
-ai )
shift
@ -80,18 +27,6 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
shift
sketchdir=$1
;;
-i )
shift
chunk_index=$1
;;
-l )
shift
log_compilation=$1
;;
-d )
shift
debug_level="DebugLevel=$1"
;;
* )
break
;;
@ -99,10 +34,9 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
shift
done
xtra_opts=("$@")
len=0
xtra_opts=$*
if [ -z "$sketchdir" ]; then
if [ -z $sketchdir ]; then
echo "ERROR: Sketch directory not provided"
echo "$USAGE"
exit 1
@ -110,8 +44,8 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
# No FQBN was passed, try to get it from other options
if [ -z "$fqbn" ]; then
if [ -z "$target" ]; then
if [ -z $fqbn ]; then
if [ -z $target ]; then
echo "ERROR: Unspecified chip"
echo "$USAGE"
exit 1
@ -122,83 +56,48 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
# precedence. Note that the following logic also falls to the default
# parameters if no arguments were passed and no file was found.
if [ -z "$options" ] && [ -f "$sketchdir"/ci.json ]; then
if [ -z $options ] && [ -f $sketchdir/cfg.json ]; then
# The config file could contain multiple FQBNs for one chip. If
# that's the case we build one time for every FQBN.
len=$(jq -r --arg target "$target" '.fqbn[$target] | length' "$sketchdir"/ci.json)
if [ "$len" -gt 0 ]; then
fqbn=$(jq -r --arg target "$target" '.fqbn[$target] | sort' "$sketchdir"/ci.json)
fi
fi
if [ -n "$options" ] || [ "$len" -eq 0 ]; then
len=`jq -r --arg chip $target '.targets[] | select(.name==$chip) | .fqbn | length' $sketchdir/cfg.json`
fqbn=`jq -r --arg chip $target '.targets[] | select(.name==$chip) | .fqbn' $sketchdir/cfg.json`
else
# Since we are passing options, we will end up with only one FQBN to
# build.
len=1
if [ -f "$sketchdir"/ci.json ]; then
fqbn_append=$(jq -r '.fqbn_append' "$sketchdir"/ci.json)
if [ "$fqbn_append" == "null" ]; then
fqbn_append=""
fi
fi
# Default FQBN options if none were passed in the command line.
# Replace any double commas with a single one and strip leading and
# trailing commas.
esp32_opts=$(echo "PSRAM=enabled,$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32s2_opts=$(echo "PSRAM=enabled,$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32s3_opts=$(echo "PSRAM=opi,USBMode=default,$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32c3_opts=$(echo "$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32c6_opts=$(echo "$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32h2_opts=$(echo "$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32p4_opts=$(echo "PSRAM=enabled,USBMode=default,$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32c5_opts=$(echo "$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32_opts="PSRAM=enabled,PartitionScheme=huge_app"
esp32s2_opts="PSRAM=enabled,PartitionScheme=huge_app"
esp32s3_opts="PSRAM=opi,USBMode=default,PartitionScheme=huge_app"
esp32c3_opts="PartitionScheme=huge_app"
esp32c6_opts="PartitionScheme=huge_app"
esp32h2_opts="PartitionScheme=huge_app"
# Select the common part of the FQBN based on the target. The rest will be
# appended depending on the passed options.
opt=""
case "$target" in
"esp32")
[ -n "${options:-$esp32_opts}" ] && opt=":${options:-$esp32_opts}"
fqbn="espressif:esp32:esp32$opt"
fqbn="espressif:esp32:esp32:${options:-$esp32_opts}"
;;
"esp32s2")
[ -n "${options:-$esp32s2_opts}" ] && opt=":${options:-$esp32s2_opts}"
fqbn="espressif:esp32:esp32s2$opt"
fqbn="espressif:esp32:esp32s2:${options:-$esp32s2_opts}"
;;
"esp32c3")
[ -n "${options:-$esp32c3_opts}" ] && opt=":${options:-$esp32c3_opts}"
fqbn="espressif:esp32:esp32c3$opt"
fqbn="espressif:esp32:esp32c3:${options:-$esp32c3_opts}"
;;
"esp32s3")
[ -n "${options:-$esp32s3_opts}" ] && opt=":${options:-$esp32s3_opts}"
fqbn="espressif:esp32:esp32s3$opt"
fqbn="espressif:esp32:esp32s3:${options:-$esp32s3_opts}"
;;
"esp32c6")
[ -n "${options:-$esp32c6_opts}" ] && opt=":${options:-$esp32c6_opts}"
fqbn="espressif:esp32:esp32c6$opt"
fqbn="espressif:esp32:esp32c6:${options:-$esp32c6_opts}"
;;
"esp32h2")
[ -n "${options:-$esp32h2_opts}" ] && opt=":${options:-$esp32h2_opts}"
fqbn="espressif:esp32:esp32h2$opt"
;;
"esp32p4")
[ -n "${options:-$esp32p4_opts}" ] && opt=":${options:-$esp32p4_opts}"
fqbn="espressif:esp32:esp32p4$opt"
;;
"esp32c5")
[ -n "${options:-$esp32c5_opts}" ] && opt=":${options:-$esp32c5_opts}"
fqbn="espressif:esp32:esp32c5$opt"
;;
*)
echo "ERROR: Invalid chip: $target"
exit 1
fqbn="espressif:esp32:esp32h2:${options:-$esp32h2_opts}"
;;
esac
@ -214,11 +113,11 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
fi
if [ -z "$fqbn" ]; then
echo "No FQBN passed or invalid chip: $target"
echo "No FQBN passed or unvalid chip: $target"
exit 1
fi
# The directory that will hold all the artifacts (the build directory) is
# The directory that will hold all the artifcats (the build directory) is
# provided through:
# 1. An env variable called ARDUINO_BUILD_DIR.
# 2. Created at the sketch level as "build" in the case of a single
@ -226,109 +125,54 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
# 3. Created at the sketch level as "buildX" where X is the number
# of configuration built in case of a multiconfiguration test.
sketchname=$(basename "$sketchdir")
local has_requirements
if [ -f "$sketchdir"/ci.json ]; then
# If the target is listed as false, skip the sketch. Otherwise, include it.
is_target=$(jq -r --arg target "$target" '.targets[$target]' "$sketchdir"/ci.json)
if [[ "$is_target" == "false" ]]; then
echo "Skipping $sketchname for target $target"
exit 0
fi
has_requirements=$(check_requirements "$sketchdir" "$SDKCONFIG_DIR/$target/sdkconfig")
if [ "$has_requirements" == "0" ]; then
echo "Target $target does not meet the requirements for $sketchname. Skipping."
exit 0
fi
fi
sketchname=$(basename $sketchdir)
ARDUINO_CACHE_DIR="$HOME/.arduino/cache.tmp"
if [ -n "$ARDUINO_BUILD_DIR" ]; then
build_dir="$ARDUINO_BUILD_DIR"
elif [ "$len" -eq 1 ]; then
elif [ $len -eq 1 ]; then
# build_dir="$sketchdir/build"
build_dir="$HOME/.arduino/tests/$target/$sketchname/build.tmp"
build_dir="$HOME/.arduino/tests/$sketchname/build.tmp"
fi
output_file="$HOME/.arduino/cli_compile_output.txt"
sizes_file="$GITHUB_WORKSPACE/cli_compile_$chunk_index.json"
mkdir -p "$ARDUINO_CACHE_DIR"
for i in $(seq 0 $((len - 1))); do
if [ "$len" -ne 1 ]; then
for i in `seq 0 $(($len - 1))`
do
if [ $len -ne 1 ]; then
# build_dir="$sketchdir/build$i"
build_dir="$HOME/.arduino/tests/$target/$sketchname/build$i.tmp"
build_dir="$HOME/.arduino/tests/$sketchname/build$i.tmp"
fi
rm -rf "$build_dir"
mkdir -p "$build_dir"
rm -rf $build_dir
mkdir -p $build_dir
currfqbn=$(echo "$fqbn" | jq -r --argjson i "$i" '.[$i]')
currfqbn=`echo $fqbn | jq -r --argjson i $i '.[$i]'`
if [ -f "$ide_path/arduino-cli" ]; then
echo "Building $sketchname with arduino-cli and FQBN=$currfqbn"
curroptions=$(echo "$currfqbn" | cut -d':' -f4)
currfqbn=$(echo "$currfqbn" | cut -d':' -f1-3)
"$ide_path"/arduino-cli compile \
curroptions=`echo "$currfqbn" | cut -d':' -f4`
currfqbn=`echo "$currfqbn" | cut -d':' -f1-3`
$ide_path/arduino-cli compile \
--fqbn "$currfqbn" \
--board-options "$curroptions" \
--warnings "all" \
--build-property "compiler.warning_flags.all=-Wall -Werror=all -Wextra" \
--build-cache-path "$ARDUINO_CACHE_DIR" \
--build-path "$build_dir" \
"${xtra_opts[@]}" "${sketchdir}" \
2>&1 | tee "$output_file"
exit_status=${PIPESTATUS[0]}
if [ "$exit_status" -ne 0 ]; then
echo "ERROR: Compilation failed with error code $exit_status"
exit "$exit_status"
fi
if [ -n "$log_compilation" ]; then
#Extract the program storage space and dynamic memory usage in bytes and percentage in separate variables from the output, just the value without the string
flash_bytes=$(grep -oE 'Sketch uses ([0-9]+) bytes' "$output_file" | awk '{print $3}')
flash_percentage=$(grep -oE 'Sketch uses ([0-9]+) bytes \(([0-9]+)%\)' "$output_file" | awk '{print $5}' | tr -d '(%)')
ram_bytes=$(grep -oE 'Global variables use ([0-9]+) bytes' "$output_file" | awk '{print $4}')
ram_percentage=$(grep -oE 'Global variables use ([0-9]+) bytes \(([0-9]+)%\)' "$output_file" | awk '{print $6}' | tr -d '(%)')
# Extract the directory path excluding the filename
directory_path=$(dirname "$sketch")
# Define the constant part
constant_part="/home/runner/Arduino/hardware/espressif/esp32/libraries/"
# Extract the desired substring
lib_sketch_name="${directory_path#"$constant_part"}"
#append json file where key is fqbn, sketch name, sizes -> extracted values
echo "{\"name\": \"$lib_sketch_name\",
\"sizes\": [{
\"flash_bytes\": $flash_bytes,
\"flash_percentage\": $flash_percentage,
\"ram_bytes\": $ram_bytes,
\"ram_percentage\": $ram_percentage
}]
}," >> "$sizes_file"
fi
$xtra_opts "${sketchdir}"
elif [ -f "$ide_path/arduino-builder" ]; then
echo "Building $sketchname with arduino-builder and FQBN=$currfqbn"
echo "Build path = $build_dir"
"$ide_path"/arduino-builder -compile -logger=human -core-api-version=10810 \
-fqbn=\""$currfqbn"\" \
$ide_path/arduino-builder -compile -logger=human -core-api-version=10810 \
-fqbn=\"$currfqbn\" \
-warnings="all" \
-tools "$ide_path/tools-builder" \
-hardware "$user_path/hardware" \
-libraries "$user_path/libraries" \
-build-cache "$ARDUINO_CACHE_DIR" \
-build-path "$build_dir" \
"${xtra_opts[@]}" "${sketchdir}/${sketchname}.ino"
$xtra_opts "${sketchdir}/${sketchname}.ino"
exit_status=$?
if [ $exit_status -ne 0 ]; then
echo "ERROR: Compilation failed with error code $exit_status"
exit $exit_status
fi
# $ide_path/arduino-builder -compile -logger=human -core-api-version=10810 \
# -fqbn=\"$currfqbn\" \
# -warnings="all" \
@ -343,18 +187,14 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
# $xtra_opts "${sketchdir}/${sketchname}.ino"
fi
done
unset fqbn
unset xtra_opts
unset options
}
function count_sketches { # count_sketches <path> [target] [file] [ignore-requirements]
function count_sketches(){ # count_sketches <path> [target]
local path=$1
local target=$2
local ignore_requirements=$3
local file=$4
local sketches
if [ $# -lt 1 ]; then
echo "ERROR: Illegal number of parameters"
@ -362,53 +202,33 @@ function count_sketches { # count_sketches <path> [target] [file] [ignore-requir
fi
rm -rf sketches.txt
touch sketches.txt
if [ ! -d "$path" ]; then
touch sketches.txt
return 0
fi
if [ -f "$file" ]; then
sketches=$(cat "$file")
else
sketches=$(find "$path" -name '*.ino' | sort)
fi
local sketches=$(find $path -name *.ino | sort)
local sketchnum=0
for sketch in $sketches; do
local sketchdir
local sketchdirname
local sketchname
local has_requirements
sketchdir=$(dirname "$sketch")
sketchdirname=$(basename "$sketchdir")
sketchname=$(basename "$sketch")
local sketchdir=$(dirname $sketch)
local sketchdirname=$(basename $sketchdir)
local sketchname=$(basename $sketch)
if [[ "$sketchdirname.ino" != "$sketchname" ]]; then
continue
elif [[ -n $target ]] && [[ -f $sketchdir/ci.json ]]; then
# If the target is listed as false, skip the sketch. Otherwise, include it.
is_target=$(jq -r --arg target "$target" '.targets[$target]' "$sketchdir"/ci.json)
if [[ "$is_target" == "false" ]]; then
elif [[ -n $target ]] && [[ -f "$sketchdir/.skip.$target" ]]; then
continue
else
echo $sketch >> sketches.txt
sketchnum=$(($sketchnum + 1))
fi
if [ "$ignore_requirements" != "1" ]; then
has_requirements=$(check_requirements "$sketchdir" "$SDKCONFIG_DIR/$target/sdkconfig")
if [ "$has_requirements" == "0" ]; then
continue
fi
fi
fi
echo "$sketch" >> sketches.txt
sketchnum=$((sketchnum + 1))
done
return $sketchnum
}
function build_sketches { # build_sketches <ide_path> <user_path> <target> <path> <chunk> <total-chunks> [extra-options]
local args=()
while [ -n "$1" ]; do
function build_sketches(){ # build_sketches <ide_path> <user_path> <target> <path> <chunk> <total-chunks> [extra-options]
local args=""
while [ ! -z "$1" ]; do
case $1 in
-ai )
shift
@ -421,12 +241,12 @@ function build_sketches { # build_sketches <ide_path> <user_path> <target> <path
-t )
shift
target=$1
args+=("-t" "$target")
args+=" -t $target"
;;
-fqbn )
shift
fqbn=$1
args+=("-fqbn" "$fqbn")
args+=" -fqbn $fqbn"
;;
-p )
shift
@ -440,19 +260,6 @@ function build_sketches { # build_sketches <ide_path> <user_path> <target> <path
shift
chunk_max=$1
;;
-l )
shift
log_compilation=$1
;;
-f )
shift
sketches_file=$1
;;
-d )
shift
debug_level="$1"
args+=("-d" "$debug_level")
;;
* )
break
;;
@ -460,10 +267,10 @@ function build_sketches { # build_sketches <ide_path> <user_path> <target> <path
shift
done
local xtra_opts=("$@")
local xtra_opts=$*
if [ -z "$chunk_index" ] || [ -z "$chunk_max" ]; then
echo "ERROR: Invalid chunk parameters"
if [ -z $chunk_index ] || [ -z $chunk_max ]; then
echo "ERROR: Invalid chunk paramters"
echo "$USAGE"
exit 1
fi
@ -478,24 +285,16 @@ function build_sketches { # build_sketches <ide_path> <user_path> <target> <path
fi
set +e
if [ -n "$sketches_file" ]; then
count_sketches "$path" "$target" "0" "$sketches_file"
local sketchcount=$?
else
count_sketches "$path" "$target"
local sketchcount=$?
fi
set -e
local sketches
sketches=$(cat sketches.txt)
local sketches=$(cat sketches.txt)
rm -rf sketches.txt
local chunk_size
local all_chunks
chunk_size=$(( sketchcount / chunk_max ))
all_chunks=$(( chunk_max * chunk_size ))
local chunk_size=$(( $sketchcount / $chunk_max ))
local all_chunks=$(( $chunk_max * $chunk_size ))
if [ "$all_chunks" -lt "$sketchcount" ]; then
chunk_size=$(( chunk_size + 1 ))
chunk_size=$(( $chunk_size + 1 ))
fi
local start_index=0
@ -504,20 +303,19 @@ function build_sketches { # build_sketches <ide_path> <user_path> <target> <path
start_index=$chunk_index
end_index=$sketchcount
else
start_index=$(( chunk_index * chunk_size ))
start_index=$(( $chunk_index * $chunk_size ))
if [ "$sketchcount" -le "$start_index" ]; then
echo "No sketches to build for $target in this chunk"
echo "Skipping job"
return 0
fi
end_index=$(( $(( chunk_index + 1 )) * chunk_size ))
end_index=$(( $(( $chunk_index + 1 )) * $chunk_size ))
if [ "$end_index" -gt "$sketchcount" ]; then
end_index=$sketchcount
fi
fi
local start_num
start_num=$(( start_index + 1 ))
local start_num=$(( $start_index + 1 ))
echo "Found $sketchcount Sketches for target '$target'";
echo "Chunk Index : $chunk_index"
echo "Chunk Count : $chunk_max"
@ -525,58 +323,24 @@ function build_sketches { # build_sketches <ide_path> <user_path> <target> <path
echo "Start Sketch: $start_num"
echo "End Sketch : $end_index"
#if fqbn is not passed then set it to default for compilation log
if [ -z "$fqbn" ]; then
log_fqbn="espressif:esp32:$target"
else
log_fqbn=$fqbn
fi
sizes_file="$GITHUB_WORKSPACE/cli_compile_$chunk_index.json"
if [ -n "$log_compilation" ]; then
#echo board,target and start of sketches to sizes_file json
echo "{ \"board\": \"$log_fqbn\",
\"target\": \"$target\",
\"sketches\": [" >> "$sizes_file"
fi
local sketchnum=0
args+=("-ai" "$ide_path" "-au" "$user_path" "-i" "$chunk_index")
if [ -n "$log_compilation" ]; then
args+=("-l" "$log_compilation")
fi
args+=" -ai $ide_path -au $user_path"
for sketch in $sketches; do
local sketchdir
local sketchdirname
sketchdir=$(dirname "$sketch")
sketchdirname=$(basename "$sketchdir")
sketchnum=$((sketchnum + 1))
local sketchdir=$(dirname $sketch)
local sketchdirname=$(basename $sketchdir)
sketchnum=$(($sketchnum + 1))
if [ "$sketchnum" -le "$start_index" ] \
|| [ "$sketchnum" -gt "$end_index" ]; then
continue
fi
echo ""
echo "Building Sketch Index $sketchnum - $sketchdirname"
build_sketch "${args[@]}" -s "$sketchdir" "${xtra_opts[@]}"
echo "Building Sketch Index $(($sketchnum - 1)) - $sketchdirname"
build_sketch $args -s $sketchdir $xtra_opts
local result=$?
if [ $result -ne 0 ]; then
return $result
fi
done
if [ -n "$log_compilation" ]; then
#remove last comma from json
if [ "$i" -eq $((len - 1)) ]; then
sed -i '$ s/.$//' "$sizes_file"
fi
#echo end of sketches sizes_file json
echo "]" >> "$sizes_file"
#echo end of board sizes_file json
echo "}," >> "$sizes_file"
fi
return 0
}
@ -586,28 +350,26 @@ Available commands:
count: Count sketches.
build: Build a sketch.
chunk_build: Build a chunk of sketches.
check_requirements: Check if target meets sketch requirements.
"
cmd=$1
shift
if [ -z "$cmd" ]; then
if [ -z $cmd ]; then
echo "ERROR: No command supplied"
echo "$USAGE"
exit 2
fi
case "$cmd" in
"count") count_sketches "$@"
"count") count_sketches $*
;;
"build") build_sketch "$@"
"build") build_sketch $*
;;
"chunk_build") build_sketches "$@"
;;
"check_requirements") check_requirements "$@"
"chunk_build") build_sketches $*
;;
*)
echo "ERROR: Unrecognized command"
echo "$USAGE"
exit 2
esac

View file

@ -2,20 +2,19 @@
USAGE="
USAGE:
${0} -c -type <test_type> <chunk_build_opts>
Example: ${0} -c -type validation -t esp32 -i 0 -m 15
${0} -c <chunk_build_opts>
Example: ${0} -c -t esp32 -i 0 -m 15
${0} -s sketch_name <build_opts>
Example: ${0} -s hello_world -t esp32
${0} -clean
Remove build and test generated files
"
function clean {
function clean(){
rm -rf tests/*/build*/
rm -rf tests/.pytest_cache
find tests/ -type d -name 'build*' -exec rm -rf "{}" \+
find tests/ -type d -name '__pycache__' -exec rm -rf "{}" \+
find tests/ -name '*.xml' -exec rm -rf "{}" \+
find tests/ -name 'result_*.json' -exec rm -rf "{}" \+
rm -rf tests/*/__pycache__/
rm -rf tests/*/*.xml
}
SCRIPTS_DIR="./.github/scripts"
@ -23,7 +22,7 @@ BUILD_CMD=""
chunk_build=0
while [ -n "$1" ]; do
while [ ! -z "$1" ]; do
case $1 in
-c )
chunk_build=1
@ -36,10 +35,6 @@ while [ -n "$1" ]; do
echo "$USAGE"
exit 0
;;
-type )
shift
test_type=$1
;;
-clean )
clean
exit 0
@ -51,30 +46,18 @@ while [ -n "$1" ]; do
shift
done
source "${SCRIPTS_DIR}/install-arduino-cli.sh"
source "${SCRIPTS_DIR}/install-arduino-core-esp32.sh"
source ${SCRIPTS_DIR}/install-arduino-ide.sh
source ${SCRIPTS_DIR}/install-arduino-core-esp32.sh
args=("-ai" "$ARDUINO_IDE_PATH" "-au" "$ARDUINO_USR_PATH")
if [[ $test_type == "all" ]] || [[ -z $test_type ]]; then
if [ -n "$sketch" ]; then
tmp_sketch_path=$(find tests -name "$sketch".ino)
test_type=$(basename "$(dirname "$(dirname "$tmp_sketch_path")")")
echo "Sketch $sketch test type: $test_type"
test_folder="$PWD/tests/$test_type"
else
test_folder="$PWD/tests"
fi
else
test_folder="$PWD/tests/$test_type"
fi
args="-ai $ARDUINO_IDE_PATH -au $ARDUINO_USR_PATH"
if [ $chunk_build -eq 1 ]; then
BUILD_CMD="${SCRIPTS_DIR}/sketch_utils.sh chunk_build"
args+=("-p" "$test_folder" "-i" "0" "-m" "1")
args+=" -p $PWD/tests"
else
BUILD_CMD="${SCRIPTS_DIR}/sketch_utils.sh build"
args+=("-s" "$test_folder/$sketch")
args+=" -s $PWD/tests/$sketch"
fi
${BUILD_CMD} "${args[@]}" "$@"
${BUILD_CMD} ${args} $*

View file

@ -1,28 +0,0 @@
#!/bin/bash
build_types="'validation'"
hw_types="'validation'"
wokwi_types="'validation'"
qemu_types="'validation'"
if [[ $IS_PR != 'true' ]] || [[ $PERFORMANCE_ENABLED == 'true' ]]; then
build_types+=",'performance'"
hw_types+=",'performance'"
#wokwi_types+=",'performance'"
#qemu_types+=",'performance'"
fi
targets="'esp32','esp32s2','esp32s3','esp32c3','esp32c6','esp32h2','esp32p4'"
mkdir -p info
echo "[$wokwi_types]" > info/wokwi_types.txt
echo "[$targets]" > info/targets.txt
{
echo "build-types=[$build_types]"
echo "hw-types=[$hw_types]"
echo "wokwi-types=[$wokwi_types]"
echo "qemu-types=[$qemu_types]"
echo "targets=[$targets]"
} >> "$GITHUB_OUTPUT"

View file

@ -1,168 +1,58 @@
#!/bin/bash
function run_test {
function run_test() {
local target=$1
local sketch=$2
local options=$3
local erase_flash=$4
local sketchdir
local sketchname
local result=0
local error=0
local sdkconfig_path
local extra_args
local test_type
local sketchdir=$(dirname $sketch)
local sketchname=$(basename $sketchdir)
sketchdir=$(dirname "$sketch")
sketchname=$(basename "$sketchdir")
test_type=$(basename "$(dirname "$sketchdir")")
if [ "$options" -eq 0 ] && [ -f "$sketchdir"/ci.json ]; then
len=$(jq -r --arg target "$target" '.fqbn[$target] | length' "$sketchdir"/ci.json)
if [ "$len" -eq 0 ]; then
len=1
fi
if [ $options -eq 0 ] && [ -f $sketchdir/cfg.json ]; then
len=`jq -r --arg chip $target '.targets[] | select(.name==$chip) | .fqbn | length' $sketchdir/cfg.json`
else
len=1
fi
if [ "$len" -eq 1 ]; then
sdkconfig_path="$HOME/.arduino/tests/$target/$sketchname/build.tmp/sdkconfig"
else
sdkconfig_path="$HOME/.arduino/tests/$target/$sketchname/build0.tmp/sdkconfig"
if [ $len -eq 1 ]; then
# build_dir="tests/$sketchname/build"
build_dir="$HOME/.arduino/tests/$sketchname/build.tmp"
report_file="tests/$sketchname/$sketchname.xml"
fi
if [ -f "$sketchdir"/ci.json ]; then
# If the target or platform is listed as false, skip the sketch. Otherwise, include it.
is_target=$(jq -r --arg target "$target" '.targets[$target]' "$sketchdir"/ci.json)
selected_platform=$(jq -r --arg platform "$platform" '.platforms[$platform]' "$sketchdir"/ci.json)
if [[ $is_target == "false" ]] || [[ $selected_platform == "false" ]]; then
printf "\033[93mSkipping %s test for %s, platform: %s\033[0m\n" "$sketchname" "$target" "$platform"
printf "\n\n\n"
return 0
fi
for i in `seq 0 $(($len - 1))`
do
echo "Running test: $sketchname -- Config: $i"
if [ $erase_flash -eq 1 ]; then
esptool.py -c $target erase_flash
fi
if [ ! -f "$sdkconfig_path" ]; then
printf "\033[93mSketch %s build not found in %s\nMight be due to missing target requirements or build failure\033[0m\n" "$(dirname "$sdkconfig_path")" "$sketchname"
printf "\n\n\n"
return 0
if [ $len -ne 1 ]; then
# build_dir="tests/$sketchname/build$i"
build_dir="$HOME/.arduino/tests/$sketchname/build$i.tmp"
report_file="tests/$sketchname/$sketchname$i.xml"
fi
local compiled_target
compiled_target=$(grep -E "CONFIG_IDF_TARGET=" "$sdkconfig_path" | cut -d'"' -f2)
if [ "$compiled_target" != "$target" ]; then
printf "\033[91mError: Sketch %s compiled for %s, expected %s\033[0m\n" "$sketchname" "$compiled_target" "$target"
printf "\n\n\n"
return 1
fi
if [ "$len" -eq 1 ]; then
# build_dir="$sketchdir/build"
build_dir="$HOME/.arduino/tests/$target/$sketchname/build.tmp"
report_file="$sketchdir/$target/$sketchname.xml"
fi
for i in $(seq 0 $((len - 1))); do
fqbn="Default"
if [ "$len" -ne 1 ]; then
fqbn=$(jq -r --arg target "$target" --argjson i "$i" '.fqbn[$target] | sort | .[$i]' "$sketchdir"/ci.json)
elif [ -f "$sketchdir"/ci.json ]; then
has_fqbn=$(jq -r --arg target "$target" '.fqbn[$target]' "$sketchdir"/ci.json)
if [ "$has_fqbn" != "null" ]; then
fqbn=$(jq -r --arg target "$target" '.fqbn[$target] | .[0]' "$sketchdir"/ci.json)
fi
fi
printf "\033[95mRunning test: %s -- Config: %s\033[0m\n" "$sketchname" "$fqbn"
if [ "$erase_flash" -eq 1 ]; then
esptool.py -c "$target" erase_flash
fi
if [ "$len" -ne 1 ]; then
# build_dir="$sketchdir/build$i"
build_dir="$HOME/.arduino/tests/$target/$sketchname/build$i.tmp"
report_file="$sketchdir/$target/$sketchname$i.xml"
fi
if [ $platform == "wokwi" ]; then
extra_args=("--target" "$target" "--embedded-services" "arduino,wokwi" "--wokwi-timeout=$wokwi_timeout")
if [[ -f "$sketchdir/scenario.yaml" ]]; then
extra_args+=("--wokwi-scenario" "$sketchdir/scenario.yaml")
fi
if [[ -f "$sketchdir/diagram.$target.json" ]]; then
extra_args+=("--wokwi-diagram" "$sketchdir/diagram.$target.json")
fi
elif [ $platform == "qemu" ]; then
PATH=$HOME/qemu/bin:$PATH
extra_args=("--embedded-services" "qemu" "--qemu-image-path" "$build_dir/$sketchname.ino.merged.bin")
if [ "$target" == "esp32" ] || [ "$target" == "esp32s3" ]; then
extra_args+=("--qemu-prog-path" "qemu-system-xtensa" "--qemu-cli-args=\"-machine $target -m 4M -nographic\"")
elif [ "$target" == "esp32c3" ]; then
extra_args+=("--qemu-prog-path" "qemu-system-riscv32" "--qemu-cli-args=\"-machine $target -icount 3 -nographic\"")
else
printf "\033[91mUnsupported QEMU target: %s\033[0m\n" "$target"
exit 1
fi
else
extra_args=("--embedded-services" "esp,arduino")
fi
rm "$sketchdir"/diagram.json 2>/dev/null || true
result=0
printf "\033[95mpytest \"%s/test_%s.py\" --build-dir \"%s\" --junit-xml=\"%s\" -o junit_suite_name=%s_%s_%s_%s%s %s\033[0m\n" "$sketchdir" "$sketchname" "$build_dir" "$report_file" "$test_type" "$platform" "$target" "$sketchname" "$i" "${extra_args[*]@Q}"
bash -c "set +e; pytest \"$sketchdir/test_$sketchname.py\" --build-dir \"$build_dir\" --junit-xml=\"$report_file\" -o junit_suite_name=${test_type}_${platform}_${target}_${sketchname}${i} ${extra_args[*]@Q}; exit \$?" || result=$?
printf "\n"
pytest tests --build-dir $build_dir -k test_$sketchname --junit-xml=$report_file
result=$?
if [ $result -ne 0 ]; then
result=0
printf "\033[95mRetrying test: %s -- Config: %s\033[0m\n" "$sketchname" "$i"
printf "\033[95mpytest \"%s/test_%s.py\" --build-dir \"%s\" --junit-xml=\"%s\" -o junit_suite_name=%s_%s_%s_%s%s %s\033[0m\n" "$sketchdir" "$sketchname" "$build_dir" "$report_file" "$test_type" "$platform" "$target" "$sketchname" "$i" "${extra_args[*]@Q}"
bash -c "set +e; pytest \"$sketchdir/test_$sketchname.py\" --build-dir \"$build_dir\" --junit-xml=\"$report_file\" -o junit_suite_name=${test_type}_${platform}_${target}_${sketchname}${i} ${extra_args[*]@Q}; exit \$?" || result=$?
printf "\n"
if [ $result -ne 0 ]; then
printf "\033[91mFailed test: %s -- Config: %s\033[0m\n\n" "$sketchname" "$i"
error=$result
fi
return $result
fi
done
return $error
}
SCRIPTS_DIR="./.github/scripts"
COUNT_SKETCHES="${SCRIPTS_DIR}/sketch_utils.sh count"
platform="hardware"
wokwi_timeout=60000
chunk_run=0
options=0
erase=0
while [ -n "$1" ]; do
while [ ! -z "$1" ]; do
case $1 in
-c )
chunk_run=1
;;
-Q )
if [ ! -d "$QEMU_PATH" ]; then
echo "QEMU path $QEMU_PATH does not exist"
exit 1
fi
platform="qemu"
;;
-W )
shift
wokwi_timeout=$1
if [[ -z $WOKWI_CLI_TOKEN ]]; then
echo "Wokwi CLI token is not set"
exit 1
fi
platform="wokwi"
;;
-o )
options=1
;;
@ -189,10 +79,6 @@ while [ -n "$1" ]; do
echo "$USAGE"
exit 0
;;
-type )
shift
test_type=$1
;;
* )
break
;;
@ -200,54 +86,32 @@ while [ -n "$1" ]; do
shift
done
if [ ! $platform == "qemu" ]; then
source "${SCRIPTS_DIR}/install-arduino-ide.sh"
fi
# If sketch is provided and test type is not, test type is inferred from the sketch path
if [[ $test_type == "all" ]] || [[ -z $test_type ]]; then
if [ -n "$sketch" ]; then
tmp_sketch_path=$(find tests -name "$sketch".ino)
test_type=$(basename "$(dirname "$(dirname "$tmp_sketch_path")")")
echo "Sketch $sketch test type: $test_type"
test_folder="$PWD/tests/$test_type"
else
test_folder="$PWD/tests"
fi
else
test_folder="$PWD/tests/$test_type"
fi
source ${SCRIPTS_DIR}/install-arduino-ide.sh
if [ $chunk_run -eq 0 ]; then
if [ -z "$sketch" ]; then
echo "ERROR: Sketch name is required for single test run"
exit 1
fi
run_test "$target" "$test_folder"/"$sketch"/"$sketch".ino $options $erase
exit $?
run_test $target $PWD/tests/$sketch/$sketch.ino $options $erase
else
if [ "$chunk_max" -le 0 ]; then
echo "ERROR: Chunks count must be positive number"
exit 1
return 1
fi
if [ "$chunk_index" -ge "$chunk_max" ] && [ "$chunk_max" -ge 2 ]; then
echo "ERROR: Chunk index must be less than chunks count"
exit 1
return 1
fi
set +e
# Ignore requirements as we don't have the libs. The requirements will be checked in the run_test function
${COUNT_SKETCHES} "$test_folder" "$target" "1"
${COUNT_SKETCHES} $PWD/tests $target
sketchcount=$?
set -e
sketches=$(cat sketches.txt)
rm -rf sketches.txt
chunk_size=$(( sketchcount / chunk_max ))
all_chunks=$(( chunk_max * chunk_size ))
chunk_size=$(( $sketchcount / $chunk_max ))
all_chunks=$(( $chunk_max * $chunk_size ))
if [ "$all_chunks" -lt "$sketchcount" ]; then
chunk_size=$(( chunk_size + 1 ))
chunk_size=$(( $chunk_size + 1 ))
fi
start_index=0
@ -256,35 +120,31 @@ else
start_index=$chunk_index
end_index=$sketchcount
else
start_index=$(( chunk_index * chunk_size ))
start_index=$(( $chunk_index * $chunk_size ))
if [ "$sketchcount" -le "$start_index" ]; then
exit 0
echo "Skipping job"
return 0
fi
end_index=$(( $(( chunk_index + 1 )) * chunk_size ))
end_index=$(( $(( $chunk_index + 1 )) * $chunk_size ))
if [ "$end_index" -gt "$sketchcount" ]; then
end_index=$sketchcount
fi
fi
start_num=$(( $start_index + 1 ))
sketchnum=0
error=0
for sketch in $sketches; do
sketchnum=$((sketchnum + 1))
sketchnum=$(($sketchnum + 1))
if [ "$sketchnum" -le "$start_index" ] \
|| [ "$sketchnum" -gt "$end_index" ]; then
continue
fi
echo ""
echo "Sketch Index $(($sketchnum - 1))"
printf "\033[95mSketch Index %s\033[0m\n" "$((sketchnum - 1))"
exit_code=0
run_test "$target" "$sketch" $options $erase || exit_code=$?
if [ $exit_code -ne 0 ]; then
error=$exit_code
fi
run_test $target $sketch $options $erase
done
exit $error
fi

View file

@ -1,6 +1,4 @@
#!/bin/bash
# Disable shellcheck warning about using 'cat' to read a file.
# shellcheck disable=SC2002
# For reference: add tools for all boards by replacing one line in each board
# "[board].upload.tool=esptool_py" to "[board].upload.tool=esptool_py\n[board].upload.tool.default=esptool_py\n[board].upload.tool.network=esp_ota"
@ -24,15 +22,7 @@ ESP_ARDUINO_VERSION_MINOR="$2"
ESP_ARDUINO_VERSION_PATCH="$3"
ESP_ARDUINO_VERSION="$ESP_ARDUINO_VERSION_MAJOR.$ESP_ARDUINO_VERSION_MINOR.$ESP_ARDUINO_VERSION_PATCH"
# Get ESP-IDF version from push.yml (this way we can ensure that the version is correct even if the local libs are not up to date)
ESP_IDF_VERSION=$(grep "idf_ver:" .github/workflows/push.yml | sed 's/.*release-v\([^"]*\).*/\1/')
if [ -z "$ESP_IDF_VERSION" ]; then
echo "Error: ESP-IDF version not found in push.yml" >&2
exit 1
fi
echo "New Arduino Version: $ESP_ARDUINO_VERSION"
echo "ESP-IDF Version: $ESP_IDF_VERSION"
echo "Updating platform.txt..."
cat platform.txt | sed "s/version=.*/version=$ESP_ARDUINO_VERSION/g" > __platform.txt && mv __platform.txt platform.txt
@ -40,28 +30,10 @@ cat platform.txt | sed "s/version=.*/version=$ESP_ARDUINO_VERSION/g" > __platfor
echo "Updating package.json..."
cat package.json | sed "s/.*\"version\":.*/ \"version\": \"$ESP_ARDUINO_VERSION\",/g" > __package.json && mv __package.json package.json
echo "Updating docs/conf_common.py..."
cat docs/conf_common.py | \
sed "s/.. |version| replace:: .*/.. |version| replace:: $ESP_ARDUINO_VERSION/g" | \
sed "s/.. |idf_version| replace:: .*/.. |idf_version| replace:: $ESP_IDF_VERSION/g" > docs/__conf_common.py && mv docs/__conf_common.py docs/conf_common.py
echo "Updating .gitlab/workflows/common.yml..."
cat .gitlab/workflows/common.yml | \
sed "s/ESP_IDF_VERSION:.*/ESP_IDF_VERSION: \"$ESP_IDF_VERSION\"/g" | \
sed "s/ESP_ARDUINO_VERSION:.*/ESP_ARDUINO_VERSION: \"$ESP_ARDUINO_VERSION\"/g" > .gitlab/workflows/__common.yml && mv .gitlab/workflows/__common.yml .gitlab/workflows/common.yml
echo "Updating cores/esp32/esp_arduino_version.h..."
cat cores/esp32/esp_arduino_version.h | \
sed "s/#define ESP_ARDUINO_VERSION_MAJOR.*/#define ESP_ARDUINO_VERSION_MAJOR $ESP_ARDUINO_VERSION_MAJOR/g" | \
sed "s/#define ESP_ARDUINO_VERSION_MINOR.*/#define ESP_ARDUINO_VERSION_MINOR $ESP_ARDUINO_VERSION_MINOR/g" | \
sed "s/#define ESP_ARDUINO_VERSION_PATCH.*/#define ESP_ARDUINO_VERSION_PATCH $ESP_ARDUINO_VERSION_PATCH/g" > __esp_arduino_version.h && mv __esp_arduino_version.h cores/esp32/esp_arduino_version.h
libraries=$(find libraries -maxdepth 1 -mindepth 1 -type d -exec basename {} \;)
for lib in $libraries; do
if [ -f "libraries/$lib/library.properties" ]; then
echo "Updating Library $lib..."
cat "libraries/$lib/library.properties" | sed "s/version=.*/version=$ESP_ARDUINO_VERSION/g" > "libraries/$lib/__library.properties" && mv "libraries/$lib/__library.properties" "libraries/$lib/library.properties"
fi
done
exit 0

View file

@ -1,236 +0,0 @@
#!/usr/bin/env python3
# This script is used to re-package the esptool if needed and update the JSON file
# for the Arduino ESP32 platform.
#
# The script has only been tested on macOS.
#
# For regular esptool releases, the generated packages already contain the correct permissions,
# extensions and are uploaded to the GitHub release assets. In this case, the script will only
# update the JSON file with the information from the GitHub release.
#
# The script can be used in two modes:
# 1. Local build: The build artifacts must be already downloaded and extracted in the base_folder.
# This is useful for esptool versions that are not yet released and that are grabbed from the
# GitHub build artifacts.
# 2. Release build: The script will get the release information from GitHub and update the JSON file.
# This is useful for esptool versions that are already released and that are uploaded to the
# GitHub release assets.
#
# For local build, the artifacts must be already downloaded and extracted in the base_folder
# set with the -l option.
# For example, a base folder "esptool" should contain the following folders extracted directly
# from the GitHub build artifacts:
# esptool/esptool-linux-aarch64
# esptool/esptool-linux-amd64
# esptool/esptool-linux-armv7
# esptool/esptool-macos-amd64
# esptool/esptool-macos-arm64
# esptool/esptool-windows-amd64
import argparse
import json
import os
import shutil
import stat
import tarfile
import zipfile
import hashlib
import requests
from pathlib import Path
def compute_sha256(filepath):
sha256 = hashlib.sha256()
with open(filepath, "rb") as f:
for block in iter(lambda: f.read(4096), b""):
sha256.update(block)
return f"SHA-256:{sha256.hexdigest()}"
def get_file_size(filepath):
return os.path.getsize(filepath)
def update_json_for_host(tmp_json_path, version, host, url, archiveFileName, checksum, size):
with open(tmp_json_path) as f:
data = json.load(f)
for pkg in data.get("packages", []):
for tool in pkg.get("tools", []):
if tool.get("name") == "esptool_py":
tool["version"] = version
if url is None:
# If the URL is not set, we need to find the old URL and update it
for system in tool.get("systems", []):
if system.get("host") == host:
url = system.get("url").replace(system.get("archiveFileName"), archiveFileName)
break
else:
print(f"No old URL found for host {host}. Using empty URL.")
url = ""
# Preserve existing systems order and update or append the new system
systems = tool.get("systems", [])
system_updated = False
for i, system in enumerate(systems):
if system.get("host") == host:
systems[i] = {
"host": host,
"url": url,
"archiveFileName": archiveFileName,
"checksum": checksum,
"size": str(size),
}
system_updated = True
break
if not system_updated:
systems.append({
"host": host,
"url": url,
"archiveFileName": archiveFileName,
"checksum": checksum,
"size": str(size),
})
tool["systems"] = systems
with open(tmp_json_path, "w") as f:
json.dump(data, f, indent=2, sort_keys=False, ensure_ascii=False)
f.write("\n")
def update_tools_dependencies(tmp_json_path, version):
with open(tmp_json_path) as f:
data = json.load(f)
for pkg in data.get("packages", []):
for platform in pkg.get("platforms", []):
for dep in platform.get("toolsDependencies", []):
if dep.get("name") == "esptool_py":
dep["version"] = version
with open(tmp_json_path, "w") as f:
json.dump(data, f, indent=2, sort_keys=False, ensure_ascii=False)
f.write("\n")
def create_archives(version, base_folder):
archive_files = []
for dirpath in Path(base_folder).glob("esptool-*"):
if not dirpath.is_dir():
continue
base = dirpath.name[len("esptool-"):]
if "windows" in dirpath.name:
zipfile_name = f"esptool-v{version}-{base}.zip"
print(f"Creating {zipfile_name} from {dirpath} ...")
with zipfile.ZipFile(zipfile_name, "w", zipfile.ZIP_DEFLATED) as zipf:
for root, _, files in os.walk(dirpath):
for file in files:
full_path = os.path.join(root, file)
zipf.write(full_path, os.path.relpath(full_path, start=dirpath))
archive_files.append(zipfile_name)
else:
tarfile_name = f"esptool-v{version}-{base}.tar.gz"
print(f"Creating {tarfile_name} from {dirpath} ...")
for root, dirs, files in os.walk(dirpath):
for name in dirs + files:
os.chmod(os.path.join(root, name), stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
stat.S_IRGRP | stat.S_IXGRP |
stat.S_IROTH | stat.S_IXOTH)
with tarfile.open(tarfile_name, "w:gz") as tar:
tar.add(dirpath, arcname=dirpath.name)
archive_files.append(tarfile_name)
return archive_files
def determine_hosts(archive_name):
if "linux-amd64" in archive_name:
return ["x86_64-pc-linux-gnu"]
elif "linux-armv7" in archive_name:
return ["arm-linux-gnueabihf"]
elif "linux-aarch64" in archive_name:
return ["aarch64-linux-gnu"]
elif "macos-amd64" in archive_name:
return ["x86_64-apple-darwin"]
elif "macos-arm64" in archive_name:
return ["arm64-apple-darwin"]
elif "windows-amd64" in archive_name:
return ["x86_64-mingw32", "i686-mingw32"]
else:
return []
def update_json_from_local_build(tmp_json_path, version, base_folder, archive_files):
for archive in archive_files:
print(f"Processing archive: {archive}")
hosts = determine_hosts(archive)
if not hosts:
print(f"Skipping unknown archive type: {archive}")
continue
archive_path = Path(archive)
checksum = compute_sha256(archive_path)
size = get_file_size(archive_path)
for host in hosts:
update_json_for_host(tmp_json_path, version, host, None, archive_path.name, checksum, size)
def update_json_from_release(tmp_json_path, version, release_info):
assets = release_info.get("assets", [])
for asset in assets:
if (asset.get("name").endswith(".tar.gz") or asset.get("name").endswith(".zip")) and "esptool" in asset.get("name"):
asset_fname = asset.get("name")
print(f"Processing asset: {asset_fname}")
hosts = determine_hosts(asset_fname)
if not hosts:
print(f"Skipping unknown archive type: {asset_fname}")
continue
asset_url = asset.get("browser_download_url")
asset_checksum = asset.get("digest").replace("sha256:", "SHA-256:")
asset_size = asset.get("size")
if asset_checksum is None:
asset_checksum = ""
print(f"Asset {asset_fname} has no checksum. Please set the checksum in the JSON file.")
for host in hosts:
update_json_for_host(tmp_json_path, version, host, asset_url, asset_fname, asset_checksum, asset_size)
def get_release_info(version):
url = f"https://api.github.com/repos/espressif/esptool/releases/tags/v{version}"
response = requests.get(url)
response.raise_for_status()
return response.json()
def main():
parser = argparse.ArgumentParser(description="Repack esptool and update JSON metadata.")
parser.add_argument("version", help="Version of the esptool (e.g. 5.0.dev1)")
parser.add_argument("-l", "--local", dest="base_folder", help="Enable local build mode and set the base folder with unpacked artifacts")
args = parser.parse_args()
script_dir = Path(__file__).resolve().parent
json_path = (script_dir / "../../package/package_esp32_index.template.json").resolve()
tmp_json_path = Path(str(json_path) + ".tmp")
shutil.copy(json_path, tmp_json_path)
local_build = args.base_folder is not None
if local_build:
os.chdir(args.base_folder)
os.environ['COPYFILE_DISABLE'] = 'true' # this disables including resource forks in tar files on macOS
# Clear any existing archive files
for file in Path(args.base_folder).glob("esptool-*.*"):
file.unlink()
archive_files = create_archives(args.version, args.base_folder)
update_json_from_local_build(tmp_json_path, args.version, args.base_folder, archive_files)
else:
release_info = get_release_info(args.version)
update_json_from_release(tmp_json_path, args.version, release_info)
print(f"Updating esptool version fields to {args.version}")
update_tools_dependencies(tmp_json_path, args.version)
shutil.move(tmp_json_path, json_path)
print(f"Done. JSON updated at {json_path}")
if __name__ == "__main__":
main()

View file

@ -1,12 +1,11 @@
#!/bin/bash
CHANGED_FILES=$1
echo "Pushing '$CHANGED_FILES' as github-actions[bot]"
git config --global github.user "github-actions[bot]"
git config --global user.name "github-actions[bot]"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
echo "Pushing '$CHANGED_FILES' as $GITHUB_ACTOR"
git config --global github.user "$GITHUB_ACTOR"
git config --global user.name "$GITHUB_ACTOR"
git config --global user.email "$GITHUB_ACTOR@users.noreply.github.com"
for tool in $CHANGED_FILES; do
git add tools/"$tool".exe
git add tools/$tool.exe
done
git commit -m "change(tools): Push generated binaries to PR"
git commit -m "Push binary to tools"
git push

View file

@ -1,6 +1,6 @@
name: Boards Test - Remote trigger
# The workflow will run on remote dispatch with event-type set to "test-boards"
# The workflow will run on remote dispath with event-type set to "test-boards"
on:
repository_dispatch:
types: [test-boards]
@ -15,12 +15,13 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v3
with:
ref: ${{ github.event.client_payload.branch }}
- name: Get boards fqbns
run: bash .github/scripts/find_all_boards.sh
run:
bash .github/scripts/find_all_boards.sh
setup-chunks:
needs: find-boards
@ -32,17 +33,18 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v3
with:
ref: ${{ github.event.client_payload.branch }}
- run: npm install
- name: Setup jq
uses: dcarbone/install-jq-action@e397bd87438d72198f81efd21f876461183d383a # v3.0.1
uses: dcarbone/install-jq-action@v1.0.1
- id: set-test-chunks
name: Set Chunks
run: echo "test-chunks<<EOF" >> $GITHUB_OUTPUT
run:
echo "test-chunks<<EOF" >> $GITHUB_OUTPUT
echo "$( jq -nc '${{ needs.find-boards.outputs.fqbns }} | [_nwise( ${{ needs.find-boards.outputs.board-count }}/15 | ceil)]')" >> $GITHUB_OUTPUT
@ -64,17 +66,18 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v3
with:
ref: ${{ github.event.client_payload.branch }}
- name: Echo FQBNS to file
run: echo "$FQBN" > fqbns.json
run:
echo "$FQBN" > fqbns.json
env:
FQBN: ${{ toJSON(matrix.chunk) }}
- name: Compile sketch
uses: P-R-O-C-H-Y/compile-sketches@a62f069b92dc8f5053da4ac439ea6d1950cf6379 # main
uses: P-R-O-C-H-Y/compile-sketches@main
with:
platforms: |
${{ env.REPOSITORY }}
@ -85,4 +88,5 @@ jobs:
enable-warnings-report: false
cli-compile-flags: |
- --warnings="all"
sketch-paths: "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"
sketch-paths:
"- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"

View file

@ -3,10 +3,6 @@ name: Boards Test
# The workflow will run on schedule and labeled pull requests
on:
pull_request:
paths:
- "boards.txt"
- "libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"
- ".github/workflows/boards.yml"
env:
# It's convenient to set variables for values used multiple times in the workflow
@ -22,13 +18,14 @@ jobs:
steps:
# This step makes the contents of the repository available to the workflow
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v3
- name: Setup jq
uses: dcarbone/install-jq-action@e397bd87438d72198f81efd21f876461183d383a # v3.0.1
uses: dcarbone/install-jq-action@v1.0.1
- name: Get board name
run: bash .github/scripts/find_new_boards.sh ${{ github.repository }} ${{github.base_ref}}
run:
bash .github/scripts/find_new_boards.sh ${{ github.repository }} ${{github.event.number}}
test-boards:
needs: find-boards
@ -41,39 +38,15 @@ jobs:
name: "espressif:esp32"
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.find-boards.outputs.fqbns) }}
steps:
# This step makes the contents of the repository available to the workflow
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Check if build.board is uppercase
run: |
board_name=$(echo ${{ matrix.fqbn }} | awk -F':' '{print $NF}')
if grep -q "^$board_name.build.board=[A-Z0-9_]*$" boards.txt; then
echo "$board_name.build.board is valid.";
else
echo "Error: $board_name.build.board is not uppercase!";
exit 1;
fi
- name: Get libs cache
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }}
path: |
./tools/dist
./tools/esp32-arduino-libs
./tools/esptool
./tools/mk*
./tools/openocd-esp32
./tools/riscv32-*
./tools/xtensa-*
uses: actions/checkout@v3
- name: Compile sketch
uses: P-R-O-C-H-Y/compile-sketches@a62f069b92dc8f5053da4ac439ea6d1950cf6379 # main
uses: P-R-O-C-H-Y/compile-sketches@main
with:
platforms: |
${{ env.REPOSITORY }}
@ -84,5 +57,5 @@ jobs:
cli-compile-flags: |
- --warnings="all"
exit-on-fail: true
sketch-paths: "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"
verbose: true
sketch-paths:
"- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"

View file

@ -3,44 +3,35 @@ name: Build Python Tools
on:
pull_request:
paths:
- ".github/workflows/build_py_tools.yml"
- "tools/get.py"
- "tools/espota.py"
- "tools/gen_esp32part.py"
- "tools/gen_insights_package.py"
- 'tools/get.py'
- 'tools/espota.py'
- 'tools/gen_esp32part.py'
- 'tools/gen_insights_package.py'
jobs:
find-changed-tools:
name: Check if tools have been changed
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
outputs:
any_changed: ${{ steps.verify-changed-files.outputs.any_changed }}
all_changed_files: ${{ steps.verify-changed-files.outputs.all_changed_files }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v3
with:
fetch-depth: 2
ref: ${{ github.event.pull_request.head.ref }}
- name: Check if checkout failed
if: failure()
run: |
echo "Checkout failed."
echo "Make sure you are using a branch inside the repository and not a fork."
- name: Verify Python Tools Changed
uses: tj-actions/changed-files@2f7c5bfce28377bc069a65ba478de0a74aa0ca32 # v46.0.1
uses: tj-actions/changed-files@v36
id: verify-changed-files
with:
fetch_depth: "2"
since_last_remote_commit: "true"
fetch_depth: '2'
since_last_remote_commit: 'true'
files: |
tools/get.py
tools/espota.py
tools/gen_esp32part.py
tools/gen_insights_package.py
- name: List all changed files
shell: bash
run: |
@ -56,21 +47,27 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [windows-latest, macos-latest, ubuntu-latest, ubuntu-24.04-arm]
os: [windows-latest, macos-latest, ubuntu-20.04, ARM, ARM64]
include:
- os: windows-latest
TARGET: win64
EXTEN: .exe
SEPARATOR: ";"
SEPARATOR: ';'
- os: macos-latest
TARGET: macos
SEPARATOR: ":"
- os: ubuntu-latest
SEPARATOR: ':'
- os: ubuntu-20.04
TARGET: linux-amd64
SEPARATOR: ":"
- os: ubuntu-24.04-arm
SEPARATOR: ':'
- os: ARM
CONTAINER: python:3.8-bullseye
TARGET: arm
SEPARATOR: ":"
SEPARATOR: ':'
- os: ARM64
CONTAINER: python:3.8-bullseye
TARGET: arm64
SEPARATOR: ':'
container: ${{ matrix.CONTAINER }} # use python container on ARM
env:
DISTPATH: pytools-${{ matrix.TARGET }}
PIP_EXTRA_INDEX_URL: "https://dl.espressif.com/pypi"
@ -89,30 +86,26 @@ jobs:
for tool in ${{ env.CHANGED_TOOLS }}; do
echo "tool $tool was changed"
done
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v3
with:
token: ${{ secrets.TOOLS_UPLOAD_PAT }}
ref: ${{ github.event.pull_request.head.ref }}
- name: Set up Python 3.8
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
# Skip setting python on ARM because of missing compatibility: https://github.com/actions/setup-python/issues/108
if: matrix.os != 'ARM' && matrix.os != 'ARM64'
uses: actions/setup-python@master
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pyinstaller requests
- name: Build with PyInstaller
shell: bash
run: |
for tool in ${{ env.CHANGED_TOOLS }}; do
pyinstaller --distpath ./${{ env.DISTPATH }} -F --icon=.github/pytools/espressif.ico tools/$tool.py
done
- name: Sign binaries
if: matrix.os == 'windows-latest'
env:
@ -125,14 +118,12 @@ jobs:
{
./.github/pytools/Sign-File.ps1 -Path ./${{ env.DISTPATH }}/$node.exe
}
- name: Test binaries
shell: bash
run: |
for tool in ${{ env.CHANGED_TOOLS }}; do
./${{ env.DISTPATH }}/$tool${{ matrix.EXTEN }} -h
done
- name: Push binary to tools
if: matrix.os == 'windows-latest'
env:
@ -143,9 +134,8 @@ jobs:
cp -f ./${{ env.DISTPATH }}/$tool.exe tools/$tool.exe
done
bash .github/scripts/upload_py_tools.sh "${{ env.CHANGED_TOOLS }}"
- name: Archive artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@master
with:
name: ${{ env.DISTPATH }}
path: ${{ env.DISTPATH }}

View file

@ -1,31 +0,0 @@
name: CodeQL Actions Analysis
on:
workflow_dispatch:
push:
branches:
- master
pull_request:
paths:
- ".github/workflows/*.yml"
- ".github/workflows/*.yaml"
jobs:
codeql-analysis:
name: CodeQL Actions Analysis
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
languages: actions
config-file: ./.github/codeql/codeql-config.yml
- name: Run CodeQL Analysis
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
category: "Analysis: Actions"

View file

@ -1,30 +0,0 @@
name: CodeQL Python Analysis
on:
workflow_dispatch:
push:
branches:
- master
pull_request:
paths:
- "**/*.py"
jobs:
codeql-analysis:
name: CodeQL Python Analysis
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
languages: python
config-file: ./.github/codeql/codeql-config.yml
- name: Run CodeQL Analysis
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
category: "Analysis: Python"

View file

@ -1,28 +0,0 @@
name: DangerJS Pull Request linter
on:
pull_request_target:
types: [opened, edited, reopened, synchronize]
permissions:
pull-requests: write
contents: write
jobs:
pull-request-style-linter:
runs-on: ubuntu-latest
steps:
- name: Check out PR head
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: DangerJS pull request linter
uses: espressif/shared-github-dangerjs@fb17367fd3e8ff7412603b8e946d9b19ffdb2d7f # v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
instructions-cla-link: "https://cla-assistant.io/espressif/arduino-esp32"
instructions-contributions-file: "docs/en/contributing.rst"
rule-max-commits: "false"
rule-target-branch: "false"
commit-messages-min-summary-length: "10"

38
.github/workflows/docs.yml vendored Normal file
View file

@ -0,0 +1,38 @@
name: ReadTheDocs CI
on:
push:
branches:
- master
- release/*
paths:
- 'docs/**'
- '.github/workflows/docs.yml'
pull_request:
paths:
- 'docs/**'
- '.github/workflows/docs.yml'
jobs:
build-docs:
name: Build ReadTheDocs
runs-on: ubuntu-22.04
defaults:
run:
shell: bash
steps:
- uses: actions/checkout@v3
with:
submodules: true
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Build
run: |
sudo apt update
sudo apt install python3-pip python3-setuptools
# GitHub CI installs pip3 and setuptools outside the path.
# Update the path to include them and run.
PATH=/home/runner/.local/bin:$PATH pip3 install --user -r ./docs/requirements.txt
cd ./docs && PATH=/home/runner/.local/bin:$PATH SPHINXOPTS="-W" make html

View file

@ -1,48 +0,0 @@
name: Documentation Build and Deploy CI
on:
push:
branches:
- master
- release/v2.x
paths:
- "docs/**"
- ".github/workflows/docs_build.yml"
pull_request:
paths:
- "docs/**"
- ".github/workflows/docs_build.yml"
jobs:
build-docs:
name: Build ESP-Docs
runs-on: ubuntu-22.04
defaults:
run:
shell: bash
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
submodules: true
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with:
cache-dependency-path: docs/requirements.txt
cache: "pip"
python-version: "3.10"
- name: Build
run: |
sudo apt update
sudo apt install python3-pip python3-setuptools
# GitHub CI installs pip3 and setuptools outside the path.
# Update the path to include them and run.
cd ./docs
PATH=/home/runner/.local/bin:$PATH pip3 install -r requirements.txt --prefer-binary
PATH=/home/runner/.local/bin:$PATH SPHINXOPTS="-W" build-docs -l en
- name: Archive Docs
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: docs
path: docs

View file

@ -1,62 +0,0 @@
name: Documentation Build and Production Deploy CI
on:
workflow_run:
workflows: ["ESP32 Arduino Release"]
types:
- completed
push:
branches:
- release/v2.x
- master
paths:
- "docs/**"
- ".github/workflows/docs_deploy.yml"
jobs:
deploy-prod-docs:
name: Deploy Documentation on Production
runs-on: ubuntu-22.04
defaults:
run:
shell: bash
steps:
- name: Check if release workflow is successful
if: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.conclusion != 'success' }}
run: |
echo "Release workflow failed. Exiting..."
exit 1
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
submodules: true
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with:
cache-dependency-path: docs/requirements.txt
cache: "pip"
python-version: "3.10"
- name: Deploy Documentation
env:
# Deploy to production server
# DOCS_BUILD_DIR: "./docs/_build/"
DOCS_DEPLOY_PRIVATEKEY: ${{ secrets.DOCS_KEY }}
DOCS_DEPLOY_PATH: ${{ secrets.DOCS_PATH }}
DOCS_DEPLOY_SERVER: ${{ secrets.DOCS_SERVER }}
DOCS_DEPLOY_SERVER_USER: ${{ secrets.DOCS_USER }}
DOCS_DEPLOY_URL_BASE: ${{ secrets.DOCS_URL }}
run: |
sudo apt update
sudo apt install python3-pip python3-setuptools
source ./docs/utils.sh
add_doc_server_ssh_keys $DOCS_DEPLOY_PRIVATEKEY $DOCS_DEPLOY_SERVER $DOCS_DEPLOY_SERVER_USER
export GIT_VER=$(git describe --always)
echo "PIP install requirements..."
pip3 install --user -r ./docs/requirements.txt
echo "Building the Docs..."
cd ./docs && build-docs -l en
echo "Deploy the Docs..."
export DOCS_BUILD_DIR=$GITHUB_WORKSPACE/docs/
cd $GITHUB_WORKSPACE/docs
deploy-docs

View file

@ -6,18 +6,17 @@ on:
- master
- pages
paths:
- "README.md"
- ".github/scripts/on-pages.sh"
- ".github/workflows/gh-pages.yml"
- 'README.md'
- '.github/scripts/on-pages.sh'
- '.github/workflows/gh-pages.yml'
jobs:
build-pages:
name: Build GitHub Pages
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v3
- name: Copy Files
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

118
.github/workflows/hil.yml vendored Normal file
View file

@ -0,0 +1,118 @@
name: Run tests in hardware
on:
pull_request:
types: [opened, reopened, synchronize, labeled]
schedule:
- cron: '0 2 * * *'
env:
MAX_CHUNKS: 15
concurrency:
group: hil-${{github.event.pull_request.number || github.ref}}
cancel-in-progress: true
jobs:
gen_chunks:
if: |
contains(github.event.pull_request.labels.*.name, 'hil_test') ||
(github.event_name == 'schedule' && github.repository == 'espressif/arduino-esp32')
name: Generate Chunks matrix
runs-on: ubuntu-latest
outputs:
chunks: ${{ steps.gen-chunks.outputs.chunks }}
steps:
- name: Checkout Repository
uses: actions/checkout@v3
- name: Generate Chunks matrix
id: gen-chunks
run: |
set +e
.github/scripts/sketch_utils.sh count tests
sketches=$?
if [[ $sketches -ge ${{env.MAX_CHUNKS}} ]]; then
$sketches=${{env.MAX_CHUNKS}}
fi
set -e
rm sketches.txt
CHUNKS=$(jq -c -n '$ARGS.positional' --args `seq 0 1 $((sketches - 1))`)
echo "chunks=${CHUNKS}" >>$GITHUB_OUTPUT
Build:
needs: gen_chunks
name: ${{matrix.chip}}-Build#${{matrix.chunks}}
runs-on: ubuntu-latest
strategy:
matrix:
chip: ['esp32', 'esp32s2', 'esp32s3', 'esp32c3', 'esp32c6', 'esp32h2']
chunks: ${{fromJson(needs.gen_chunks.outputs.chunks)}}
steps:
- name: Checkout Repository
uses: actions/checkout@v3
- name: Build sketches
run: |
bash .github/scripts/tests_build.sh -c -t ${{matrix.chip}} -i ${{matrix.chunks}} -m ${{env.MAX_CHUNKS}}
- name: Upload ${{matrix.chip}}-${{matrix.chunks}} artifacts
uses: actions/upload-artifact@v3
with:
name: ${{matrix.chip}}-${{matrix.chunks}}.artifacts
path: |
~/.arduino/tests/*/build*.tmp/*.bin
~/.arduino/tests/*/build*.tmp/*.json
if-no-files-found: error
Test:
needs: [gen_chunks, Build]
name: ${{matrix.chip}}-Test#${{matrix.chunks}}
strategy:
fail-fast: false
matrix:
chip: ['esp32', 'esp32s2', 'esp32s3', 'esp32c3', 'esp32c6', 'esp32h2']
chunks: ${{fromJson(needs.gen_chunks.outputs.chunks)}}
runs-on: [arduino, "${{matrix.chip}}"]
container:
image: python:3.10.1-bullseye
options: --privileged
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Download ${{matrix.chip}}-${{matrix.chunks}} artifacts
uses: actions/download-artifact@v3
with:
name: ${{matrix.chip}}-${{matrix.chunks}}.artifacts
path: ~/.arduino/tests/
- name: Install dependencies
run: |
pip install -U pip
pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi
apt update && apt install -y -qq jq
- name: Run Tests
run: |
bash .github/scripts/tests_run.sh -c -t ${{matrix.chip}} -i ${{matrix.chunks}} -m ${{env.MAX_CHUNKS}} -e
- name: Upload test result artifacts
uses: actions/upload-artifact@v3
if: always()
with:
name: test_results-${{matrix.chip}}-${{matrix.chunks}}
path: tests/*/*.xml
event_file:
name: "Event File"
if: |
contains(github.event.pull_request.labels.*.name, 'hil_test') ||
github.event_name == 'schedule'
needs: Test
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@v3
with:
name: Event File
path: ${{github.event_path}}

View file

@ -9,8 +9,7 @@
{
"name": "ArduinoBLE",
"exclude_targets": [
"esp32s2",
"esp32p4"
"esp32s2"
],
"sketch_path": [
"~/Arduino/libraries/ArduinoBLE/examples/Central/Scan/Scan.ino"
@ -24,52 +23,16 @@
]
},
{
"source-url": "https://github.com/ESP32Async/ESPAsyncWebServer.git",
"source-url": "https://github.com/me-no-dev/ESPAsyncWebServer.git",
"required-libs": [
{"source-url": "https://github.com/ESP32Async/AsyncTCP.git"}
{"source-url": "https://github.com/me-no-dev/AsyncTCP.git"}
],
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/ESPAsyncWebServer/examples/Auth/Auth.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/CORS/CORS.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/CaptivePortal/CaptivePortal.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/CatchAllHandler/CatchAllHandler.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ChunkResponse/ChunkResponse.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ChunkRetryResponse/ChunkRetryResponse.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/EndBegin/EndBegin.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Filters/Filters.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/FlashResponse/FlashResponse.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/HeaderManipulation/HeaderManipulation.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Headers/Headers.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Json/Json.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Logging/Logging.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/MessagePack/MessagePack.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Middleware/Middleware.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Params/Params.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/PartitionDownloader/PartitionDownloader.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/PerfTests/PerfTests.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/RateLimit/RateLimit.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Redirect/Redirect.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/RequestContinuation/RequestContinuation.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/RequestContinuationComplete/RequestContinuationComplete.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ResumableDownload/ResumableDownload.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Rewrite/Rewrite.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ServerSentEvents/ServerSentEvents.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ServerState/ServerState.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/SkipServerMiddleware/SkipServerMiddleware.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/SlowChunkResponse/SlowChunkResponse.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/StaticFile/StaticFile.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Templates/Templates.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Upload/Upload.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/WebSocket/WebSocket.ino"
]
},
{
"name": "EthernetESP32",
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/EthernetESP32/examples/LegacyEthernetTest/LegacyEthernetTest.ino",
"~/Arduino/libraries/EthernetESP32/examples/TwoEthernets/TwoEthernets.ino"
"~/Arduino/libraries/ESPAsyncWebServer/examples/ESP_AsyncFSBrowser/ESP_AsyncFSBrowser.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/regex_patterns/regex_patterns.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/simple_server/simple_server.ino"
]
},
{
@ -99,13 +62,5 @@
"sketch_path": [
"~/Arduino/libraries/WS2812FX/examples/ws2812fx_spi/ws2812fx_spi.ino"
]
},
{
"name": "ZACwire for TSic",
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/ZACwire_for_TSic/examples/ReadingTwoTSICs/ReadingTwoTSICs.ino",
"~/Arduino/libraries/ZACwire_for_TSic/examples/ReadSingleTSIC206/ReadSingleTSIC206.ino"
]
}
]

View file

@ -7,11 +7,7 @@ on:
# Schedule weekly builds on every Sunday at 4 am
schedule:
- cron: "0 4 * * SUN"
concurrency:
group: libs-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
- cron: '0 4 * * SUN'
env:
# It's convenient to set variables for values used multiple times in the workflow
@ -27,6 +23,7 @@ jobs:
contains(github.event.pull_request.labels.*.name, 'lib_test') ||
(github.event_name == 'schedule' && github.repository == 'espressif/arduino-esp32')
runs-on: ubuntu-latest
env:
REPOSITORY: |
- source-path: '.'
@ -41,7 +38,6 @@ jobs:
- esp32s3
- esp32c6
- esp32h2
- esp32p4
include:
- target: esp32
@ -56,16 +52,15 @@ jobs:
fqbn: espressif:esp32:esp32c6
- target: esp32h2
fqbn: espressif:esp32:esp32h2
- target: esp32p4
fqbn: espressif:esp32:esp32p4
steps:
# This step makes the contents of the repository available to the workflow
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v3
- name: Compile sketch
uses: P-R-O-C-H-Y/compile-sketches@a62f069b92dc8f5053da4ac439ea6d1950cf6379 # main
uses: P-R-O-C-H-Y/compile-sketches@main
with:
platforms: |
${{ env.REPOSITORY }}
@ -80,9 +75,9 @@ jobs:
- --warnings="all"
- name: Upload artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v3
with:
name: ${{ env.SKETCHES_REPORTS_ARTIFACT_NAME }}-${{ matrix.target }}
name: ${{ env.SKETCHES_REPORTS_ARTIFACT_NAME }}
path: ${{ env.SKETCHES_REPORTS_PATH }}
report-to-file:
@ -92,54 +87,49 @@ jobs:
steps:
# Check out repository
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v3
with:
token: ${{ env.GITHUB_TOKEN }}
fetch-depth: "0"
fetch-depth: '0'
- name: Switch branch
run: git checkout remotes/origin/gh-pages
run:
git checkout remotes/origin/gh-pages
# This step is needed to get the size data produced by the compile jobs
- name: Download sketches reports artifact
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
uses: actions/download-artifact@v3
with:
pattern: ${{ env.SKETCHES_REPORTS_ARTIFACT_NAME }}-*
merge-multiple: true
name: ${{ env.SKETCHES_REPORTS_ARTIFACT_NAME }}
path: ${{ env.SKETCHES_REPORTS_PATH }}
- name: Report results
uses: P-R-O-C-H-Y/report-size-deltas@4a79caa6dcc3579024293638b97156106edc588e # main
uses: P-R-O-C-H-Y/report-size-deltas@main
with:
sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }}
destination-file: ${{ env.RESULT_LIBRARY_TEST_FILE }}
- name: Append file with action URL
run: echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_LIBRARY_TEST_FILE }}
run:
echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_LIBRARY_TEST_FILE }}
- name: Push to github repo
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config user.name github-actions
git config user.email github-actions@github.com
git add ${{ env.RESULT_LIBRARY_TEST_FILE }}
git commit -m "Generated External Libraries Test Results"
git push origin HEAD:gh-pages
#Upload PR number as artifact
upload-pr-number:
name: Upload PR number
if: (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'lib_test'))
event_file:
name: "Event File"
if: |
contains(github.event.pull_request.labels.*.name, 'lib_test')
needs: compile-sketch
runs-on: ubuntu-latest
steps:
- name: Save the PR number in an artifact
shell: bash
env:
PR_NUM: ${{ github.event.number }}
run: echo $PR_NUM > pr_num.txt
- name: Upload PR number
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
- name: Upload
uses: actions/upload-artifact@v2
with:
name: pr_number
path: ./pr_num.txt
overwrite: true
name: Event File
path: ${{github.event_path}}

View file

@ -1,64 +0,0 @@
# This needs to be in a separate workflow because it requires higher permissions than the calling workflow
name: Report Pre-commit Check Status
on:
workflow_run:
workflows: [Pre-commit hooks]
types:
- completed
permissions:
statuses: write
jobs:
report-success:
name: Report pre-commit success
if: github.event.workflow_run.conclusion == 'success'
runs-on: ubuntu-latest
steps:
- name: Report success
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Pre-commit checks',
description: 'Pre-commit checks successful',
owner: owner,
repo: repo,
sha: sha,
state: 'success',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}'
})).data;
core.info(`${name} is ${state}`);
report-pending:
name: Report pre-commit pending
if: github.event.workflow_run.conclusion != 'success'
runs-on: ubuntu-latest
steps:
- name: Report pending
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Pre-commit checks',
description: 'The pre-commit checks need to be successful before merging',
owner: owner,
repo: repo,
sha: sha,
state: 'pending',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}'
})).data;
core.info(`${name} is ${state}`);

View file

@ -1,80 +0,0 @@
name: Pre-commit hooks
on:
workflow_dispatch:
push:
branches:
- master
pull_request:
types: [opened, reopened, synchronize, labeled]
concurrency:
group: pre-commit-${{github.event.pull_request.number || github.ref}}
cancel-in-progress: true
jobs:
lint:
if: |
github.event_name != 'pull_request' ||
contains(github.event.pull_request.labels.*.name, 'Status: Pending Merge') ||
contains(github.event.pull_request.labels.*.name, 'Re-trigger Pre-commit Hooks')
name: Check if fixes are needed
runs-on: ubuntu-latest
steps:
- name: Checkout latest commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 2
- name: Remove Label
if: contains(github.event.pull_request.labels.*.name, 'Re-trigger Pre-commit Hooks')
run: gh pr edit ${{ github.event.number }} --remove-label 'Re-trigger Pre-commit Hooks'
env:
GH_TOKEN: ${{ github.token }}
- name: Set up Python 3
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with:
cache-dependency-path: tools/pre-commit/requirements.txt
cache: "pip"
python-version: "3.x"
- name: Get Python version hash
run: |
echo "Using $(python -VV)"
echo "PY_HASH=$(python -VV | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV
- name: Restore pre-commit cache
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
id: restore-cache
with:
path: |
~/.cache/pre-commit
key: pre-commit-${{ env.PY_HASH }}-${{ hashFiles('.pre-commit-config.yaml', '.github/workflows/pre-commit.yml', 'tools/pre-commit/requirements.txt') }}
- name: Install python dependencies
run: python -m pip install -r tools/pre-commit/requirements.txt
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@2f7c5bfce28377bc069a65ba478de0a74aa0ca32 # v46.0.1
- name: Run pre-commit hooks in changed files
run: pre-commit run --color=always --show-diff-on-failure --files ${{ steps.changed-files.outputs.all_changed_files }}
- name: Save pre-commit cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: ${{ always() && steps.restore-cache.outputs.cache-hit != 'true' }}
continue-on-error: true
with:
path: |
~/.cache/pre-commit
key: ${{ steps.restore-cache.outputs.cache-primary-key }}
- name: Push changes using pre-commit-ci-lite
uses: pre-commit-ci/lite-action@5d6cc0eb514c891a40562a58a8e71576c5c7fb43 # v1.1.0
# Only push changes in PRs
if: ${{ always() && github.event_name == 'pull_request' }}
with:
msg: "ci(pre-commit): Apply automatic fixes"

38
.github/workflows/publish.yml vendored Normal file
View file

@ -0,0 +1,38 @@
name: Unit Test Results
on:
workflow_run:
workflows: [Run tests in hardware]
branches-ignore: [master]
types:
- completed
jobs:
unit-test-results:
name: Unit Test Results
runs-on: ubuntu-latest
if: |
github.event.workflow_run.event == 'pull_request' &&
(github.event.workflow_run.conclusion == 'success' ||
github.event.workflow_run.conclusion == 'failure')
steps:
- name: Download and Extract Artifacts
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
run: |
mkdir -p artifacts && cd artifacts
artifacts_url=${{ github.event.workflow_run.artifacts_url }}
gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
do
IFS=$'\t' read name url <<< "$artifact"
gh api $url > "$name.zip"
unzip -d "$name" "$name.zip"
done
- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@v1
with:
commit: ${{ github.event.workflow_run.head_sha }}
event_file: artifacts/Event File/event.json
event_name: ${{ github.event.workflow_run.event }}
files: "artifacts/**/*.xml"

View file

@ -11,6 +11,7 @@ env:
# It's convenient to set variables for values used multiple times in the workflow
SKETCHES_REPORTS_PATH: artifacts/libraries-report
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
PR_EVENT_PATH: artifacts/Event File/event.json
jobs:
lib-test-results:
@ -24,33 +25,17 @@ jobs:
- name: Download and Extract Artifacts
run: |
mkdir -p artifacts && cd artifacts
mkdir -p libraries-report
mkdir -p workflows
artifacts_url=${{ github.event.workflow_run.artifacts_url }}
gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
do
IFS=$'\t' read name url <<< "$artifact"
gh api $url > "$name.zip"
unzip -j "$name.zip" -d "temp_$name"
if [[ "$name" == "pr_number" ]]; then
mv "temp_$name"/* workflows
else
mv "temp_$name"/* libraries-report
fi
rm -r "temp_$name"
unzip -d "$name" "$name.zip"
done
echo "Contents of parent directory:"
ls -R ..
- name: Read the pr_num file
id: pr_num_reader
uses: juliangruber/read-file-action@b549046febe0fe86f8cb4f93c24e284433f9ab58 # v1.1.7
with:
path: ./artifacts/workflows/pr_num.txt
- name: Report results
uses: P-R-O-C-H-Y/report-size-deltas@256d1f13e4195cd7fd436d2f959e6dc4d5e4b406 # libs
uses: P-R-O-C-H-Y/report-size-deltas@main
with:
sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }}
github-token: ${{ env.GITHUB_TOKEN }}
pr-number: "${{ steps.pr_num_reader.outputs.content }}"
pr-event-path: ${{ env.PR_EVENT_PATH }}

View file

@ -1,52 +0,0 @@
name: Sizes Results (master-v2.x)
on:
workflow_dispatch:
env:
# It's convenient to set variables for values used multiple times in the workflow
SKETCHES_REPORTS_PATH: artifacts/sizes-report
RESULT_SIZES_TEST_FILE: SIZES_TEST.md
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
jobs:
sizes-test-results:
name: Sizes Comparison Results
runs-on: ubuntu-latest
steps:
- name: Checkout gh-pages branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: gh-pages
- name: Create folder structure
run: |
mkdir -p artifacts && cd artifacts
mkdir -p sizes-report
mkdir -p sizes-report/master
mkdir -p sizes-report/pr
# master folder is a base for comparison
# pr folder is for comparison with master
- name: Download JSON file
run: |
mv master_cli_compile/*.json artifacts/sizes-report/pr/
mv v2.x_cli_compile/*.json artifacts/sizes-report/master/
- name: Report results
uses: P-R-O-C-H-Y/report-size-deltas@2043188c68f483a7b50527c4eacf609d05bb67a5 # sizes_v2
with:
sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }}
github-token: ${{ env.GITHUB_TOKEN }}
destination-file: ${{ env.RESULT_SIZES_TEST_FILE }}
- name: Append file with action URL
run: echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_SIZES_TEST_FILE }}
- name: Push to github repo
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add ${{ env.RESULT_SIZES_TEST_FILE }}
git commit -m "Generated Sizes Results (master-v2.x)"
git push origin HEAD:gh-pages

View file

@ -1,73 +0,0 @@
name: Sizes Results
on:
workflow_run:
workflows: [Compilation Tests]
types:
- completed
workflow_dispatch:
env:
# It's convenient to set variables for values used multiple times in the workflow
SKETCHES_REPORTS_PATH: artifacts/sizes-report
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
jobs:
sizes-test-results:
name: Sizes Comparison Results
runs-on: ubuntu-latest
if: |
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
steps:
- name: Checkout gh-pages branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: gh-pages
- name: Create folder structure
run: |
mkdir -p artifacts && cd artifacts
mkdir -p sizes-report
mkdir -p sizes-report/master
mkdir -p sizes-report/pr
- name: Download JSON file
run: |
mv master_cli_compile/*.json artifacts/sizes-report/master/
- name: Download and Extract Artifacts
run: |
cd artifacts
artifacts_url=${{ github.event.workflow_run.artifacts_url }}
gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
do
IFS=$'\t' read name url <<< "$artifact"
# Only process pr_number and pr_cli_compile artifacts
if [[ "$name" == "pr_number" || "$name" =~ ^pr_cli_compile_[0-9]+$ ]]; then
gh api $url > "$name.zip"
unzip -o -j "$name.zip" -d "temp_$name"
if [[ "$name" == "pr_number" ]]; then
mv "temp_$name"/* sizes-report
elif [[ "$name" =~ ^pr_cli_compile_[0-9]+$ ]]; then
mv "temp_$name"/* sizes-report/pr
fi
rm -r "temp_$name"
fi
done
echo "Contents of parent directory:"
ls -R ..
- name: Read the pr_num file
id: pr_num_reader
uses: juliangruber/read-file-action@b549046febe0fe86f8cb4f93c24e284433f9ab58 # v1.1.7
with:
path: ./artifacts/sizes-report/pr_num.txt
- name: Report results
uses: P-R-O-C-H-Y/report-size-deltas@bea91d2c99ca80c88a883b39b1c4012f00ec3d09 # sizes_v2
with:
sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }}
github-token: ${{ env.GITHUB_TOKEN }}
pr-number: "${{ steps.pr_num_reader.outputs.content }}"

View file

@ -1,224 +1,56 @@
name: Compilation Tests
name: ESP32 Arduino CI
on:
workflow_dispatch:
inputs:
log_level:
description: "Log level"
default: "none"
type: "choice"
required: true
options:
- "none"
- "error"
- "warn"
- "info"
- "debug"
- "verbose"
schedule:
# Every Sunday at 2:00 UTC run a build with verbose log level
- cron: "0 2 * * SUN"
push:
branches:
- master
- release/*
pull_request:
paths:
- "cores/**"
- "libraries/**"
- "!libraries/**.md"
- "!libraries/**.txt"
- "!libraries/**.properties"
- "!libraries/**.py"
- "package/**"
- "idf_component_examples/**"
- "tools/**.py"
- "platform.txt"
- "programmers.txt"
- "idf_component.yml"
- "Kconfig.projbuild"
- "package.json"
- "CMakeLists.txt"
- ".github/workflows/push.yml"
- ".github/scripts/**"
- "!.github/scripts/find_*"
- "!.github/scripts/on-release.sh"
- "!.github/scripts/tests_*"
- "!.github/scripts/upload_*"
- "variants/esp32/**/*"
- "variants/esp32c3/**/*"
- "variants/esp32c5/**/*"
- "variants/esp32c6/**/*"
- "variants/esp32h2/**/*"
- "variants/esp32p4/**/*"
- "variants/esp32s2/**/*"
- "variants/esp32s3/**/*"
concurrency:
group: build-${{github.event.pull_request.number || github.ref}}
cancel-in-progress: true
env:
MAX_CHUNKS: 15
jobs:
cmake-check:
name: Check cmake file
runs-on: ubuntu-latest
if: ${{ !(github.event_name == 'pull_request' && startsWith(github.head_ref, 'release/')) }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v3
- run: bash ./.github/scripts/check-cmakelists.sh
gen-chunks:
name: Generate chunks
runs-on: ubuntu-latest
if: ${{ !(github.event_name == 'pull_request' && startsWith(github.head_ref, 'release/')) }}
outputs:
build_all: ${{ steps.set-chunks.outputs.build_all }}
build_libraries: ${{ steps.set-chunks.outputs.build_libraries }}
build_static_sketches: ${{ steps.set-chunks.outputs.build_static_sketches }}
build_idf: ${{ steps.set-chunks.outputs.build_idf }}
chunk_count: ${{ steps.set-chunks.outputs.chunk_count }}
chunks: ${{ steps.set-chunks.outputs.chunks }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 2
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@2f7c5bfce28377bc069a65ba478de0a74aa0ca32 # v46.0.1
with:
files_yaml: |
core:
- '.github/**'
- 'cores/**'
- 'package/**'
- 'tools/**'
- 'platform.txt'
- 'programmers.txt'
- "variants/esp32/**/*"
- "variants/esp32c3/**/*"
- "variants/esp32c6/**/*"
- "variants/esp32h2/**/*"
- "variants/esp32p4/**/*"
- "variants/esp32s2/**/*"
- "variants/esp32s3/**/*"
libraries:
- 'libraries/**/examples/**'
- 'libraries/**/src/**'
networking:
- 'libraries/Network/src/**'
fs:
- 'libraries/FS/src/**'
static_sketeches:
- 'libraries/NetworkClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino'
- 'libraries/BLE/examples/Server/Server.ino'
- 'libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino'
- 'libraries/Insights/examples/MinimalDiagnostics/MinimalDiagnostics.ino'
- 'libraries/NetworkClientSecure/src/**'
- 'libraries/BLE/src/**'
- 'libraries/Insights/src/**'
idf:
- 'idf_component.yml'
- 'Kconfig.projbuild'
- 'CMakeLists.txt'
- "idf_component_examples/**"
- name: Set chunks
id: set-chunks
env:
LIB_FILES: ${{ steps.changed-files.outputs.libraries_all_changed_files }}
IS_PR: ${{ github.event_name == 'pull_request' }}
MAX_CHUNKS: ${{ env.MAX_CHUNKS }}
BUILD_IDF: ${{ steps.changed-files.outputs.idf_any_changed == 'true' }}
BUILD_LIBRARIES: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }}
BUILD_STATIC_SKETCHES: ${{ steps.changed-files.outputs.static_sketeches_any_changed == 'true' }}
FS_CHANGED: ${{ steps.changed-files.outputs.fs_any_changed == 'true' }}
NETWORKING_CHANGED: ${{ steps.changed-files.outputs.networking_any_changed == 'true' }}
CORE_CHANGED: ${{ steps.changed-files.outputs.core_any_changed == 'true' }}
LIB_CHANGED: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }}
run: |
bash ./.github/scripts/set_push_chunks.sh
- name: Upload sketches found
if: ${{ steps.set-chunks.outputs.build_all == 'false' && steps.set-chunks.outputs.build_libraries == 'true' }}
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: sketches_found
path: sketches_found.txt
overwrite: true
if-no-files-found: error
# Ubuntu
build-arduino-linux:
name: Arduino ${{ matrix.chunk }} on ubuntu-latest
if: ${{ needs.gen-chunks.outputs.build_all == 'true' || needs.gen-chunks.outputs.build_libraries == 'true' }}
needs: gen-chunks
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
chunk: ${{ fromJson(needs.gen-chunks.outputs.chunks) }}
chunk: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.x"
- name: Get libs cache
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
python-version: '3.x'
- name: Cache tools
id: cache-linux
uses: actions/cache@v3
with:
key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }}
path: |
./tools/dist
./tools/esp32-arduino-libs
./tools/esptool
./tools/mk*
./tools/openocd-esp32
./tools/riscv32-*
./tools/xtensa-*
- name: Set Log Level
run: |
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo "LOG_LEVEL=${{ github.event.inputs.log_level }}" >> $GITHUB_ENV
elif [ "${{ github.event_name }}" == "schedule" ]; then
echo "LOG_LEVEL=verbose" >> $GITHUB_ENV
else
echo "LOG_LEVEL=none" >> $GITHUB_ENV
fi
- name: Build all sketches
if: ${{ needs.gen-chunks.outputs.build_all == 'true' }}
run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ env.MAX_CHUNKS }} 1 ${{ env.LOG_LEVEL }}
- name: Download sketches found
if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }}
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
name: sketches_found
- name: Build selected sketches
if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }}
run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ needs.gen-chunks.outputs.chunk_count }} 1 ${{ env.LOG_LEVEL }} sketches_found.txt
#Upload cli compile json as artifact
- name: Upload cli compile json
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: pr_cli_compile_${{ matrix.chunk }}
path: cli_compile_${{ matrix.chunk }}.json
overwrite: true
~/arduino_ide
key: ${{ runner.os }}-${{ hashFiles('package/package_esp32_index.template.json',
'tools/get.py',
'.github/scripts/install-arduino-ide.sh') }}
- name: Build Sketches
run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} 15
# Windows and MacOS
build-arduino-win-mac:
name: Arduino on ${{ matrix.os }}
needs: gen-chunks
if: ${{ needs.gen-chunks.outputs.build_all == 'true' || needs.gen-chunks.outputs.build_static_sketches == 'true' }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
@ -226,21 +58,33 @@ jobs:
os: [windows-latest, macOS-latest]
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.x"
python-version: '3.x'
- name: Build Sketches
run: bash ./.github/scripts/on-push.sh
# PlatformIO on Windows, Ubuntu and Mac
build-platformio:
name: PlatformIO on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Build Sketches
run: bash ./.github/scripts/on-push.sh 1 1 #equal and non-zero to trigger PIO
build-esp-idf-component:
name: Build with ESP-IDF ${{ matrix.idf_ver }} for ${{ matrix.idf_target }}
needs: gen-chunks
if: |
needs.gen-chunks.outputs.build_all == 'true' ||
needs.gen-chunks.outputs.build_libraries == 'true' ||
needs.gen-chunks.outputs.build_idf == 'true'
runs-on: ubuntu-latest
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
@ -248,96 +92,21 @@ jobs:
# See https://hub.docker.com/r/espressif/idf/tags and
# https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-guides/tools/idf-docker-image.html
# for details.
idf_ver: ["release-v5.3","release-v5.4","release-v5.5"]
idf_target:
[
"esp32",
"esp32s2",
"esp32s3",
"esp32c2",
"esp32c3",
"esp32c6",
"esp32h2",
"esp32p4"
]
idf_ver: ["release-v5.1"]
idf_target: ["esp32", "esp32s2", "esp32s3", "esp32c3", "esp32c6", "esp32h2"]
container: espressif/idf:${{ matrix.idf_ver }}
steps:
- name: Check out arduino-esp32 as a component
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v3
with:
submodules: recursive
path: components/arduino-esp32
- name: Setup jq
uses: dcarbone/install-jq-action@e397bd87438d72198f81efd21f876461183d383a # v3.0.1
- name: Build
env:
IDF_TARGET: ${{ matrix.idf_target }}
shell: bash
run: |
chmod a+x ./components/arduino-esp32/.github/scripts/*
./components/arduino-esp32/.github/scripts/on-push-idf.sh
- name: Upload generated sdkconfig files for debugging
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: sdkconfig-${{ matrix.idf_ver }}-${{ matrix.idf_target }}
path: ./components/arduino-esp32/idf_component_examples/**/sdkconfig
# Save artifacts to gh-pages
save-master-artifacts:
name: Save master artifacts
needs: build-arduino-linux
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
runs-on: ubuntu-latest
steps:
# Check out repository
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
token: ${{secrets.GITHUB_TOKEN}}
fetch-depth: "0"
- name: Switch branch
run: git checkout remotes/origin/gh-pages
- name: Download sketches reports artifact
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
pattern: pr_cli_compile_*
merge-multiple: true
path: master_cli_compile
- name: List files in the directory
run: ls -R
- name: Commit json files to gh-pages if on master
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
continue-on-error: true
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add --all
git commit -m "Updated cli compile json files"
git push origin HEAD:gh-pages
#Upload PR number as artifact
upload-pr-number:
name: Upload PR number
if: ${{ github.event_name == 'pull_request' && !startsWith(github.head_ref, 'release/') }}
runs-on: ubuntu-latest
steps:
- name: Save the PR number in an artifact
shell: bash
env:
PR_NUM: ${{ github.event.number }}
run: echo $PR_NUM > pr_num.txt
- name: Upload PR number
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: pr_number
path: ./pr_num.txt
overwrite: true
. ${IDF_PATH}/export.sh
idf.py create-project test
echo CONFIG_FREERTOS_HZ=1000 > test/sdkconfig.defaults
idf.py -C test -DEXTRA_COMPONENT_DIRS=$PWD/components build

View file

@ -10,22 +10,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
- uses: actions/setup-python@v4
with:
python-version: "3.x"
- name: Install packaging
run: pip install packaging
- name: Install pyserial
run: pip install pyserial
python-version: '3.x'
- name: Build Release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -1,123 +0,0 @@
# This file is used to run the runtime tests for the Arduino core for ESP32.
# The tests are run on the hardware, Wokwi and QEMU emulators.
# The QEMU tests are disabled for now as they are redundant with most of the Wokwi tests.
# As the Wokwi tests require access to secrets, they are run in a separate workflow.
# We need to ensure that the artifacts from previous tests in the chain are propagated for publishing the results.
# This is the current trigger sequence for the tests:
# tests.yml -> tests_wokwi.yml -> tests_results.yml
# ⌙> tests_build.yml
# ⌙> tests_hw.yml
# ⌙> tests_qemu.yml
name: Runtime Tests
on:
workflow_dispatch:
pull_request:
types: [opened, reopened, closed, synchronize, labeled, unlabeled]
paths:
- ".github/workflows/tests*"
- ".github/scripts/*.sh"
- "!.github/scripts/check-cmakelists.sh"
- "!.github/scripts/find_*"
- "!.github/scripts/on-*.sh"
- "!.github/scripts/set_push_chunks.sh"
- "!.github/scripts/update-version.sh"
- "!.github/scripts/upload_py_tools.sh"
- "tests/**"
- "cores/**"
- "libraries/*/src/**.cpp"
- "libraries/*/src/**.h"
- "libraries/*/src/**.c"
- "package/**"
schedule:
- cron: "0 2 * * *"
concurrency:
group: tests-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
push-event-file:
name: Push event file
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: event_file
path: ${{ github.event_path }}
gen-matrix:
name: Generate matrix
runs-on: ubuntu-latest
outputs:
build-types: ${{ steps.set-matrix.outputs.build-types }}
hw-types: ${{ steps.set-matrix.outputs.hw-types }}
wokwi-types: ${{ steps.set-matrix.outputs.wokwi-types }}
qemu-types: ${{ steps.set-matrix.outputs.qemu-types }}
targets: ${{ steps.set-matrix.outputs.targets }}
env:
IS_PR: ${{ github.event.pull_request.number != null }}
PERFORMANCE_ENABLED: ${{ contains(github.event.pull_request.labels.*.name, 'perf_test') }}
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
sparse-checkout: .github/scripts/tests_matrix.sh
- name: Set matrix
id: set-matrix
run: bash .github/scripts/tests_matrix.sh
- name: Upload
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: matrix_info
path: info/*
call-build-tests:
name: Build
uses: ./.github/workflows/tests_build.yml
needs: gen-matrix
strategy:
matrix:
type: ${{ fromJson(needs.gen-matrix.outputs.build-types) }}
chip: ${{ fromJson(needs.gen-matrix.outputs.targets) }}
with:
type: ${{ matrix.type }}
chip: ${{ matrix.chip }}
call-hardware-tests:
name: Hardware
uses: ./.github/workflows/tests_hw.yml
needs: [gen-matrix, call-build-tests]
if: |
github.repository == 'espressif/arduino-esp32' &&
(github.event_name != 'pull_request' ||
contains(github.event.pull_request.labels.*.name, 'hil_test'))
strategy:
fail-fast: false
matrix:
type: ${{ fromJson(needs.gen-matrix.outputs.hw-types) }}
chip: ${{ fromJson(needs.gen-matrix.outputs.targets) }}
with:
type: ${{ matrix.type }}
chip: ${{ matrix.chip }}
# This job is disabled for now
call-qemu-tests:
name: QEMU
uses: ./.github/workflows/tests_qemu.yml
needs: [gen-matrix, call-build-tests]
if: false
strategy:
fail-fast: false
matrix:
type: ${{ fromJson(needs.gen-matrix.outputs.qemu-types) }}
chip: ["esp32", "esp32c3"]
with:
type: ${{ matrix.type }}
chip: ${{ matrix.chip }}
# Wokwi tests are run after this workflow as it needs access to secrets

View file

@ -1,90 +0,0 @@
name: Build tests
on:
workflow_call:
inputs:
type:
type: string
description: "Type of tests to build"
required: true
chip:
type: string
description: "Chip to build tests for"
required: true
jobs:
build-tests:
name: Build ${{ inputs.type }} tests for ${{ inputs.chip }}
runs-on: ubuntu-latest
env:
id: ${{ github.event.pull_request.number || github.ref }}-${{ github.event.pull_request.head.sha || github.sha }}-${{ inputs.chip }}-${{ inputs.type }}
steps:
- name: Check if already built
id: cache-build-binaries
if: github.event.pull_request.number != null
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: tests-${{ env.id }}-bin
path: |
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.bin
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.elf
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.json
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/sdkconfig
- name: Evaluate if tests should be built
id: check-build
run: |
cache_exists=${{ steps.cache-build-binaries.outputs.cache-hit == 'true' }}
enabled=true
if [[ $cache_exists == 'true' ]]; then
echo "Already built, skipping"
enabled=false
fi
echo "enabled=$enabled" >> $GITHUB_OUTPUT
- name: Checkout user repository
if: ${{ steps.check-build.outputs.enabled == 'true' }}
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Get libs cache
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: ${{ steps.check-build.outputs.enabled == 'true' }}
with:
key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }}
path: |
./tools/dist
./tools/esp32-arduino-libs
./tools/esptool
./tools/mk*
./tools/openocd-esp32
./tools/riscv32-*
./tools/xtensa-*
- name: Build sketches
if: ${{ steps.check-build.outputs.enabled == 'true' }}
run: |
bash .github/scripts/tests_build.sh -c -type ${{ inputs.type }} -t ${{ inputs.chip }}
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} binaries as cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: steps.check-build.outputs.enabled == 'true' && github.event.pull_request.number != null
with:
key: tests-${{ env.id }}-bin
path: |
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.bin
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.elf
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.json
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/sdkconfig
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} binaries as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: tests-bin-${{ inputs.chip }}-${{ inputs.type }}
overwrite: true
path: |
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.bin
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.elf
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.json
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/sdkconfig

View file

@ -1,118 +0,0 @@
name: Hardware tests
on:
workflow_call:
inputs:
type:
type: string
description: "Type of tests to run"
required: true
chip:
type: string
description: "Chip to run tests for"
required: true
env:
DEBIAN_FRONTEND: noninteractive
defaults:
run:
shell: bash
jobs:
hardware-test:
name: Hardware ${{ inputs.chip }} ${{ inputs.type }} tests
runs-on: ["arduino", "${{ inputs.chip }}"]
env:
id: ${{ github.event.pull_request.number || github.ref }}-${{ github.event.pull_request.head.sha || github.sha }}-${{ inputs.chip }}-${{ inputs.type }}
container:
image: python:3.10.1-bullseye
options: --privileged --device-cgroup-rule="c 188:* rmw" --device-cgroup-rule="c 166:* rmw"
steps:
- name: Clean workspace
run: |
rm -rf ./*
rm -rf ~/.arduino/tests
- name: Check if already passed
id: cache-results
if: github.event.pull_request.number != null
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: tests-${{ env.id }}-results-hw
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Evaluate if tests should be run
id: check-tests
run: |
cache_exists=${{ steps.cache-results.outputs.cache-hit == 'true' }}
enabled=true
if [[ $cache_exists == 'true' ]]; then
echo "Already ran, skipping"
enabled=false
fi
echo "enabled=$enabled" >> $GITHUB_OUTPUT
- name: Checkout user repository
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
sparse-checkout: |
*
# setup-python currently only works on ubuntu images
# - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
# if: ${{ steps.check-tests.outputs.enabled == 'true' }}
# with:
# cache-dependency-path: tests/requirements.txt
# cache: 'pip'
# python-version: '3.10.1'
- name: Install dependencies
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
pip install -U pip
pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi
apt update
apt install -y jq
- name: Get binaries
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
name: tests-bin-${{ inputs.chip }}-${{ inputs.type }}
path: |
~/.arduino/tests/${{ inputs.chip }}
- name: List binaries
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
ls -laR ~/.arduino/tests
- name: Run Tests
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
bash .github/scripts/tests_run.sh -c -type ${{ inputs.type }} -t ${{ inputs.chip }} -i 0 -m 1 -e
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} hardware results as cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: steps.check-tests.outputs.enabled == 'true' && github.event.pull_request.number != null
with:
key: tests-${{ env.id }}-results-hw
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} hardware results as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: tests-results-hw-${{ inputs.chip }}-${{ inputs.type }}
overwrite: true
path: |
tests/**/*.xml
tests/**/result_*.json

View file

@ -1,143 +0,0 @@
name: QEMU tests
on:
workflow_call:
inputs:
chip:
required: true
type: string
type:
required: true
type: string
jobs:
qemu-test:
name: QEMU ${{ inputs.chip }} ${{ inputs.type }} tests
env:
id: ${{ github.event.pull_request.number || github.ref }}-${{ github.event.pull_request.head.sha || github.sha }}-${{ inputs.chip }}-${{ inputs.type }}
QEMU_INSTALL_PATH: "$HOME"
runs-on: ubuntu-latest
steps:
- name: Check if already passed
id: get-cache-results
if: github.event.pull_request.number != null
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: tests-${{ env.id }}-results-qemu
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Evaluate if tests should be run
id: check-tests
run: |
cache_exists=${{ steps.get-cache-results.outputs.cache-hit == 'true' }}
enabled=true
if [[ $cache_exists == 'true' ]]; then
echo "Already ran, skipping"
enabled=false
fi
echo "enabled=$enabled" >> $GITHUB_OUTPUT
- name: Checkout user repository
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
sparse-checkout-cone-mode: false
sparse-checkout: |
/*
!.github
# To avoid giving unknown scripts elevated permissions, download them from the master branch
- name: Get CI scripts from master
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
mkdir -p .github
cd .github
curl https://codeload.github.com/${{ github.repository }}/tar.gz/master | tar -xz --strip=2 arduino-esp32-master/.github
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
with:
cache-dependency-path: tests/requirements.txt
cache: "pip"
python-version: "3.x"
- name: Install Python dependencies
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
pip install -U pip
pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi
- name: Install APT dependencies
uses: awalsh128/cache-apt-pkgs-action@5902b33ae29014e6ca012c5d8025d4346556bd40 # v1.4.3
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
with:
packages: libpixman-1-0 libnuma1 libglib2.0-0 libslirp0 libsdl2-2.0-0
version: 1.0
- name: Get QEMU version
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
id: get-qemu-version
with:
token: ${{secrets.GITHUB_TOKEN}}
owner: espressif
repo: qemu
excludes: prerelease, draft
- name: Cache QEMU
id: cache-qemu
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
with:
path: |
~/qemu
key: qemu-${{ steps.get-qemu-version.outputs.release }}-${{ hashFiles('.github/workflows/tests_qemu.yml') }}
- name: Download QEMU
if: ${{ steps.cache-qemu.outputs.cache-hit != 'true' && steps.check-tests.outputs.enabled == 'true' }}
run: |
cd ${{ env.QEMU_INSTALL_PATH }}
underscore_release=$(echo ${{ steps.get-qemu-version.outputs.release }} | sed 's/\-/_/g')
curl -L https://github.com/espressif/qemu/releases/download/${{ steps.get-qemu-version.outputs.release }}/qemu-riscv32-softmmu-${underscore_release}-x86_64-linux-gnu.tar.xz > qemu-riscv32.tar.xz
curl -L https://github.com/espressif/qemu/releases/download/${{ steps.get-qemu-version.outputs.release }}/qemu-xtensa-softmmu-${underscore_release}-x86_64-linux-gnu.tar.xz > qemu-xtensa.tar.xz
tar -xf qemu-riscv32.tar.xz
tar -xf qemu-xtensa.tar.xz
rm qemu-*
echo "QEMU_PATH=${{ env.QEMU_INSTALL_PATH }}/qemu" >> $GITHUB_ENV
- name: Get binaries
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
name: tests-bin-${{ inputs.chip }}-${{ inputs.type }}
path: |
~/.arduino/tests/${{ inputs.chip }}
- name: Run Tests
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: QEMU_PATH="${{ env.QEMU_INSTALL_PATH }}" bash .github/scripts/tests_run.sh -c -type ${{inputs.type}} -t ${{inputs.chip}} -i 0 -m 1 -Q
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} QEMU results as cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: steps.check-tests.outputs.enabled == 'true' && github.event.pull_request.number != null
with:
key: tests-${{ env.id }}-results-qemu
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} QEMU results as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: tests-results-qemu-${{ inputs.chip }}-${{ inputs.type }}
overwrite: true
path: |
tests/**/*.xml
tests/**/result_*.json

View file

@ -1,195 +0,0 @@
name: Publish and clean test results
on:
workflow_run:
workflows: ["Wokwi tests"]
types:
- completed
# No permissions by default
permissions: { contents: read }
jobs:
unit-test-results:
name: Unit Test Results
if: |
github.event.workflow_run.conclusion == 'success' ||
github.event.workflow_run.conclusion == 'failure' ||
github.event.workflow_run.conclusion == 'timed_out'
runs-on: ubuntu-latest
permissions:
actions: write
statuses: write
checks: write
pull-requests: write
contents: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: gh-pages
- name: Download and Extract Artifacts
uses: dawidd6/action-download-artifact@07ab29fd4a977ae4d2b275087cf67563dfdf0295 # v9
with:
run_id: ${{ github.event.workflow_run.id }}
path: ./artifacts
- name: Get original info
run: |
original_event=$(cat ./artifacts/parent-artifacts/event.txt)
original_action=$(cat ./artifacts/parent-artifacts/action.txt)
original_sha=$(cat ./artifacts/parent-artifacts/sha.txt)
original_ref=$(cat ./artifacts/parent-artifacts/ref.txt)
original_conclusion=$(cat ./artifacts/parent-artifacts/conclusion.txt)
original_run_id=$(cat ./artifacts/parent-artifacts/run_id.txt)
# Sanitize the values to avoid security issues
# Event: Allow alphabetical characters and underscores
original_event=$(echo "$original_event" | tr -cd '[:alpha:]_')
# Action: Allow alphabetical characters and underscores
original_action=$(echo "$original_action" | tr -cd '[:alpha:]_')
# SHA: Allow alphanumeric characters
original_sha=$(echo "$original_sha" | tr -cd '[:alnum:]')
# Ref: Allow alphanumeric characters, slashes, underscores, dots, and dashes
original_ref=$(echo "$original_ref" | tr -cd '[:alnum:]/_.-')
# Conclusion: Allow alphabetical characters and underscores
original_conclusion=$(echo "$original_conclusion" | tr -cd '[:alpha:]_')
# Run ID: Allow numeric characters
original_run_id=$(echo "$original_run_id" | tr -cd '[:digit:]')
echo "original_event=$original_event" >> $GITHUB_ENV
echo "original_action=$original_action" >> $GITHUB_ENV
echo "original_sha=$original_sha" >> $GITHUB_ENV
echo "original_ref=$original_ref" >> $GITHUB_ENV
echo "original_conclusion=$original_conclusion" >> $GITHUB_ENV
echo "original_run_id=$original_run_id" >> $GITHUB_ENV
echo "original_event = $original_event"
echo "original_action = $original_action"
echo "original_sha = $original_sha"
echo "original_ref = $original_ref"
echo "original_conclusion = $original_conclusion"
echo "original_run_id = $original_run_id"
- name: Print links to other runs
run: |
echo "Build, Hardware and QEMU tests: https://github.com/${{ github.repository }}/actions/runs/${{ env.original_run_id }}"
echo "Wokwi tests: https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}"
- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@170bf24d20d201b842d7a52403b73ed297e6645b # v2.18.0
with:
commit: ${{ env.original_sha }}
event_file: ./artifacts/parent-artifacts/event_file/event.json
event_name: ${{ env.original_event }}
files: ./artifacts/**/*.xml
action_fail: true
compare_to_earlier_commit: false
json_file: ./unity_results.json
json_suite_details: true
- name: Upload JSON
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: ${{ always() }}
with:
name: unity_results
overwrite: true
path: |
./unity_results.json
- name: Fail if tests failed
if: ${{ env.original_conclusion == 'failure' || env.original_conclusion == 'timed_out' || github.event.workflow_run.conclusion == 'failure' || github.event.workflow_run.conclusion == 'timed_out' }}
run: exit 1
- name: Clean up caches
if: always()
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const ref = process.env.original_ref;
const key_prefix = 'tests-' + ref + '-';
if (process.env.original_event == 'pull_request' && process.env.original_action != 'closed') {
console.log('Skipping cache cleanup for open PR');
return;
}
await github.paginate(github.rest.actions.getActionsCacheList, {
owner: context.repo.owner,
repo: context.repo.repo,
per_page: 100,
key: key_prefix
}).then(caches => {
if (caches) {
for (const cache of caches) {
console.log(`Deleting cache: ${cache.key}`);
github.rest.actions.deleteActionsCacheById({
owner: context.repo.owner,
repo: context.repo.repo,
cache_id: cache.id
});
}
}
});
- name: Report conclusion
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
if: always()
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = process.env.original_sha;
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: `Runtime Tests / Report results (${process.env.original_event} -> workflow_run -> workflow_run)`,
owner: owner,
repo: repo,
sha: sha,
state: '${{ job.status }}',
description: '${{ job.status }}' == 'success' ? 'Runtime tests successful' : 'Runtime tests failed',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);
- name: Generate report
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
env:
REPORT_FILE: ./runtime-tests-results/RUNTIME_TESTS_REPORT.md
WOKWI_RUN_ID: ${{ github.event.workflow_run.id }}
BUILD_RUN_ID: ${{ env.original_run_id }}
IS_FAILING: ${{ env.original_conclusion == 'failure' || env.original_conclusion == 'timed_out' || github.event.workflow_run.conclusion == 'failure' || github.event.workflow_run.conclusion == 'timed_out' || job.status == 'failure' }}
run: |
rm -rf artifacts $REPORT_FILE
mv -f ./unity_results.json ./runtime-tests-results/unity_results.json
touch $REPORT_FILE
python3 ./runtime-tests-results/table_generator.py ./runtime-tests-results/unity_results.json >> $REPORT_FILE
- name: Generate badge
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
uses: jaywcjlove/generated-badges@0e078ae4d4bab3777ea4f137de496ab44688f5ad # v1.0.13
with:
label: Runtime Tests
status: ${{ job.status == 'success' && 'passing' || 'failing' }}
output: runtime-tests-results/badge.svg
color: ${{ job.status == 'success' && 'green' || 'red' }}
style: flat
- name: Push badge
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
if [[ `git status --porcelain` ]]; then
git add --all
git commit -m "Updated runtime tests report"
git push origin HEAD:gh-pages
fi

View file

@ -1,326 +0,0 @@
name: Wokwi tests
on:
workflow_run:
workflows: ["Runtime Tests"]
types:
- completed
# No permissions by default
permissions: { contents: read }
env:
WOKWI_TIMEOUT: 600000 # Milliseconds
jobs:
get-artifacts:
name: Get required artifacts
runs-on: ubuntu-latest
permissions:
actions: read
statuses: write
outputs:
pr_num: ${{ steps.set-ref.outputs.pr_num }}
ref: ${{ steps.set-ref.outputs.ref }}
base: ${{ steps.set-ref.outputs.base }}
targets: ${{ steps.set-ref.outputs.targets }}
types: ${{ steps.set-ref.outputs.types }}
steps:
- name: Report pending
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Runtime Tests / Wokwi (Get artifacts) (${{ github.event.workflow_run.event }} -> workflow_run)',
owner: owner,
repo: repo,
sha: sha,
state: 'pending',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);
- name: Download and extract event file
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
name: event_file
path: artifacts/event_file
- name: Download and extract matrix info
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
name: matrix_info
path: artifacts/matrix_info
- name: Try to read PR number
id: set-ref
run: |
pr_num=$(jq -r '.pull_request.number' artifacts/event_file/event.json | tr -cd "[:digit:]")
if [ -z "$pr_num" ] || [ "$pr_num" == "null" ]; then
pr_num=""
fi
ref=$pr_num
if [ -z "$ref" ] || [ "$ref" == "null" ]; then
ref=${{ github.ref }}
fi
action=$(jq -r '.action' artifacts/event_file/event.json | tr -cd "[:alpha:]_")
if [ "$action" == "null" ]; then
action=""
fi
base=$(jq -r '.pull_request.base.ref' artifacts/event_file/event.json | tr -cd "[:alnum:]/_.-")
if [ -z "$base" ] || [ "$base" == "null" ]; then
base=${{ github.ref }}
fi
types=$(cat artifacts/matrix_info/wokwi_types.txt | tr -cd "[:alpha:],[]'")
targets=$(cat artifacts/matrix_info/targets.txt | tr -cd "[:alnum:],[]'")
echo "base = $base"
echo "targets = $targets"
echo "types = $types"
echo "pr_num = $pr_num"
printf "$ref" >> artifacts/ref.txt
printf "Ref = "
cat artifacts/ref.txt
printf "${{ github.event.workflow_run.event }}" >> artifacts/event.txt
printf "\nEvent name = "
cat artifacts/event.txt
printf "${{ github.event.workflow_run.head_sha || github.sha }}" >> artifacts/sha.txt
printf "\nHead SHA = "
cat artifacts/sha.txt
printf "$action" >> artifacts/action.txt
printf "\nAction = "
cat artifacts/action.txt
printf "${{ github.event.workflow_run.id }}" >> artifacts/run_id.txt
printf "\nRun ID = "
cat artifacts/run_id.txt
if [ -z "$ref" ] || [ "$ref" == "null" ]; then
echo "Failed to get PR number or ref"
exit 1
fi
conclusion="${{ github.event.workflow_run.conclusion }}"
printf "$conclusion" >> artifacts/conclusion.txt
printf "\nConclusion = "
cat artifacts/conclusion.txt
echo "pr_num=$pr_num" >> $GITHUB_OUTPUT
echo "base=$base" >> $GITHUB_OUTPUT
echo "targets=$targets" >> $GITHUB_OUTPUT
echo "types=$types" >> $GITHUB_OUTPUT
echo "ref=$ref" >> $GITHUB_OUTPUT
- name: Download and extract parent hardware results
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
continue-on-error: true
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
pattern: tests-results-hw-*
merge-multiple: true
path: artifacts/results/hw
- name: Download and extract parent QEMU results
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
continue-on-error: true
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
pattern: tests-results-qemu-*
merge-multiple: true
path: artifacts/results/qemu
- name: Upload parent artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: parent-artifacts
path: artifacts
if-no-files-found: error
- name: Report conclusion
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
if: always()
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Runtime Tests / Wokwi (Get artifacts) (${{ github.event.workflow_run.event }} -> workflow_run)',
owner: owner,
repo: repo,
sha: sha,
state: '${{ job.status }}',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);
wokwi-test:
name: Wokwi ${{ matrix.chip }} ${{ matrix.type }} tests
if: |
github.event.workflow_run.conclusion == 'success' ||
github.event.workflow_run.conclusion == 'failure' ||
github.event.workflow_run.conclusion == 'timed_out'
runs-on: ubuntu-latest
needs: get-artifacts
env:
id: ${{ needs.get-artifacts.outputs.ref }}-${{ github.event.workflow_run.head_sha || github.sha }}-${{ matrix.chip }}-${{ matrix.type }}
permissions:
actions: read
statuses: write
strategy:
fail-fast: false
matrix:
type: ${{ fromJson(needs.get-artifacts.outputs.types) }}
chip: ${{ fromJson(needs.get-artifacts.outputs.targets) }}
steps:
- name: Report pending
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Runtime Tests / Wokwi (${{ matrix.type }}, ${{ matrix.chip }}) / Wokwi ${{ matrix.chip }} ${{ matrix.type }} tests (${{ github.event.workflow_run.event }} -> workflow_run)',
owner: owner,
repo: repo,
sha: sha,
state: 'pending',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);
- name: Check if already passed
id: get-cache-results
if: needs.get-artifacts.outputs.pr_num
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: tests-${{ env.id }}-results-wokwi
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Evaluate if tests should be run
id: check-tests
run: |
cache_exists=${{ steps.get-cache-results.outputs.cache-hit == 'true' }}
enabled=true
if [[ $cache_exists == 'true' ]]; then
echo "Already ran, skipping"
enabled=false
fi
echo "enabled=$enabled" >> $GITHUB_OUTPUT
# Note that changes to the workflows and tests will only be picked up after the PR is merged
# DO NOT CHECKOUT THE USER'S REPOSITORY IN THIS WORKFLOW. IT HAS HIGH SECURITY RISKS.
- name: Checkout repository
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ needs.get-artifacts.outputs.base || github.ref }}
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
with:
cache-dependency-path: tests/requirements.txt
cache: "pip"
python-version: "3.x"
- name: Install dependencies
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
pip install -U pip
pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi
- name: Install Wokwi CLI
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: curl -L https://wokwi.com/ci/install.sh | sh
- name: Wokwi CI Server
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: wokwi/wokwi-ci-server-action@a6fabb5a49e080158c7a1d121ea5b789536a82c3 # v1
- name: Get binaries
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
name: tests-bin-${{ matrix.chip }}-${{ matrix.type }}
path: |
~/.arduino/tests/${{ matrix.chip }}
- name: Run Tests
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
env:
WOKWI_CLI_TOKEN: ${{ secrets.WOKWI_CLI_TOKEN }}
run: |
bash .github/scripts/tests_run.sh -c -type ${{ matrix.type }} -t ${{ matrix.chip }} -i 0 -m 1 -W ${{ env.WOKWI_TIMEOUT }}
- name: Upload ${{ matrix.chip }} ${{ matrix.type }} Wokwi results as cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: steps.check-tests.outputs.enabled == 'true' && needs.get-artifacts.outputs.pr_num
with:
key: tests-${{ env.id }}-results-wokwi
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Upload ${{ matrix.chip }} ${{ matrix.type }} Wokwi results as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: tests-results-wokwi-${{ matrix.chip }}-${{ matrix.type }}
overwrite: true
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Report conclusion
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
if: always()
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Runtime Tests / Wokwi (${{ matrix.type }}, ${{ matrix.chip }}) / Wokwi ${{ matrix.chip }} ${{ matrix.type }} tests (${{ github.event.workflow_run.event }} -> workflow_run)',
owner: owner,
repo: repo,
sha: sha,
state: '${{ job.status }}',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);

View file

@ -1,59 +1,20 @@
name: Push components to https://components.espressif.com
on:
workflow_dispatch:
inputs:
tag:
description: 'Version to push to the component registry'
required: true
git_ref:
description: 'Git ref with the source to push to the component registry'
required: true
workflow_run:
workflows: ["ESP32 Arduino Release"]
types:
- completed
permissions:
contents: read
push:
tags:
- '*'
jobs:
upload_components:
runs-on: ubuntu-latest
steps:
- name: Get the release tag
env:
head_branch: ${{ inputs.tag || github.event.workflow_run.head_branch }}
run: |
if [ "${{ github.event.workflow_run.conclusion }}" != "success" ] && [ "${{ github.event_name }}" == "workflow_run" ]; then
echo "Release workflow failed. Exiting..."
exit 1
fi
# Read and sanitize the branch/tag name
branch=$(echo "$head_branch" | tr -cd '[:alnum:]/_.-')
if [[ $branch == refs/tags/* ]]; then
tag="${branch#refs/tags/}"
elif [[ $branch =~ ^[0-9]+\.[0-9]+\.[0-9]+.*$ ]]; then
tag=$branch
else
echo "Tag not found in $branch. Exiting..."
exit 1
fi
echo "Tag: $tag"
echo "RELEASE_TAG=$tag" >> $GITHUB_ENV
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v3
with:
ref: ${{ inputs.git_ref || env.RELEASE_TAG }}
submodules: "recursive"
- name: Upload components to the component registry
uses: espressif/upload-components-ci-action@b78a19fa5424714997596d3ecffa634aef8ae20b # v1.0.5
uses: espressif/upload-components-ci-action@v1
with:
name: arduino-esp32
version: ${{ env.RELEASE_TAG }}
version: ${{ github.ref_name }}
namespace: espressif
api_token: ${{ secrets.IDF_COMPONENT_API_TOKEN }}

11
.gitignore vendored
View file

@ -1,5 +1,4 @@
tools/esp32-arduino-libs
tools/xtensa-esp-elf
tools/xtensa-esp32-elf
tools/xtensa-esp32s2-elf
tools/xtensa-esp32s3-elf
@ -45,13 +44,3 @@ debug.cfg
debug.svd
debug_custom.json
libraries/Insights/examples/*/*.ino.zip
# Vale Style
.vale/styles/*
!.vale/styles/Vocab/
.vale/styles/Vocab/*
!.vale/styles/Vocab/Espressif/
# Ignore Lib Builder Docker run scripts
/run.sh
/run.ps1

View file

@ -1,25 +0,0 @@
workflow:
rules:
# Disable those non-protected push triggered pipelines
- if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^\d+\.\d+(\.\d+)?($|-)/ && $CI_PIPELINE_SOURCE == "push"'
when: never
# when running merged result pipelines, CI_COMMIT_SHA represents the temp commit it created.
# Please use PIPELINE_COMMIT_SHA at all places that require a commit sha of the original commit.
- if: $CI_OPEN_MERGE_REQUESTS != null
variables:
PIPELINE_COMMIT_SHA: $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
IS_MR_PIPELINE: 1
- if: $CI_OPEN_MERGE_REQUESTS == null
variables:
PIPELINE_COMMIT_SHA: $CI_COMMIT_SHA
IS_MR_PIPELINE: 0
- if: '$CI_PIPELINE_SOURCE == "schedule"'
variables:
IS_SCHEDULED_RUN: "true"
- when: always
# Place the default settings in `.gitlab/workflows/common.yml` instead
include:
- ".gitlab/workflows/common.yml"
- ".gitlab/workflows/sample.yml"

View file

@ -1,26 +0,0 @@
#####################
# Default Variables #
#####################
stages:
- pre_check
- build
- test
- result
variables:
ESP_IDF_VERSION: "5.5"
ESP_ARDUINO_VERSION: "3.3.0"
#############
# `default` #
#############
default:
retry:
max: 2
when:
# In case of a runner failure we could hop to another one, or a network error could go away.
- runner_system_failure
# Job execution timeout may be caused by a network issue.
- job_execution_timeout

View file

@ -1,6 +0,0 @@
hello-world:
stage: test
script:
- echo "Hello, World from GitLab CI!"
rules:
- if: $CI_PIPELINE_SOURCE == "push"

View file

@ -1,110 +0,0 @@
exclude: |
(?x)(
^\.github\/|
^tests\/performance\/coremark\/.*\.[ch]$|
^tests\/performance\/superpi\/.*\.(cpp|h)$|
LICENSE\.md$
)
default_language_version:
# force all unspecified python hooks to run python3
python: python3
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: "cef0300fd0fc4d2a87a85fa2093c6b283ea36f4b" # v5.0.0
hooks:
# Generic checks
- id: check-case-conflict
- id: check-symlinks
- id: debug-statements
- id: destroyed-symlinks
- id: detect-private-key
- id: end-of-file-fixer
exclude: ^.*\.(bin|BIN)$
- id: mixed-line-ending
args: [--fix=lf]
- id: trailing-whitespace
args: [--markdown-linebreak-ext=md]
# JSON formatting
- id: pretty-format-json
stages: [manual]
args: [--autofix]
types_or: [json]
exclude: |
(?x)(
diagram\..*\.json$|
package\.json$|
^package\/.*$
)
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: "f6446549e5e97ec9665b9b03e75b87b445857f9a" # v18.1.3
hooks:
# C/C++ formatting
- id: clang-format
types_or: [c, c++]
exclude: ^.*\/build_opt\.h$
- repo: https://github.com/psf/black-pre-commit-mirror
rev: "a4920527036bb9a3f3e6055d595849d67d0da066" # 25.1.0
hooks:
# Python formatting
- id: black
types_or: [python]
args: [--line-length=120] #From the arduino code style. Add as argument rather than creating a new config file.
- repo: https://github.com/PyCQA/flake8
rev: "16f5f28a384f0781bebb37a08aa45e65b9526c50" # 7.2.0
hooks:
# Python linting
- id: flake8
types_or: [python]
additional_dependencies:
- flake8-bugbear
- flake8-comprehensions
- flake8-simplify
- repo: https://github.com/pre-commit/mirrors-prettier
rev: "ffb6a759a979008c0e6dff86e39f4745a2d9eac4" # v3.1.0
hooks:
# YAML formatting
- id: prettier
types_or: [yaml]
- repo: https://github.com/codespell-project/codespell
rev: "63c8f8312b7559622c0d82815639671ae42132ac" # v2.4.1
hooks:
# Spell checking
- id: codespell
exclude: ^.*\.(svd|SVD)$
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: "a23f6b85d0fdd5bb9d564e2579e678033debbdff" # v0.10.0.1
hooks:
# Bash linting
- id: shellcheck
types: [shell]
- repo: https://github.com/openstack/bashate
rev: "fbd7c2534c2701351c603ff700ddf08202430a31" # 2.1.1
hooks:
# Bash formatting
- id: bashate
types: [shell]
args: ["-i", "E006"] # Ignore E006: Line too long
- repo: https://github.com/errata-ai/vale
rev: "dc4c47923788a413fb5677de6e3370d514aecb78" # v3.11.2
hooks:
# Sync vale styles and lint markdown and reStructuredText
- id: vale
name: vale-sync
language_version: "1.23.2"
pass_filenames: false
args: [sync]
types_or: [markdown, rst]
- id: vale
language_version: "1.23.2"
types_or: [markdown, rst]

View file

@ -1,4 +0,0 @@
__pycache__/
.clang-format
.licenses/
/.git/

View file

@ -1,11 +0,0 @@
# Shellcheck configuration file for ESP32 Arduino core
# Optional checks. https://github.com/koalaman/shellcheck/wiki/optional
enable=add-default-case,deprecate-which,avoid-nullary-conditions
# Enable search for external sources
external-sources=true
# Search folder for sourced files.
# Set to the folder where the original script is located.
source-path=SCRIPTDIR

118
.vale.ini
View file

@ -1,118 +0,0 @@
###################
### Vale Config ###
###################
# This is a Vale linter configuration file.
# - Repo: arduino-esp32
# - Based on Default config: v0-1-1
# It lists all necessary parameters to configure Vale for your project.
# For official documentation on all config settings, see
# https://vale.sh/docs/topics/config
##############
### Global ###
##############
# This section lists core settings applying to Vale itself.
# Specify path to external resources (e.g., styles and vocab files).
# The path value may be absolute or relative to this configuration file.
StylesPath = .vale/styles
# Specify the minimum alert severity that Vale will report.
MinAlertLevel = error # "suggestion", "warning", or "error"
# Specify vocabulary for special treatment.
# Create a folder in <StylesPath>/Vocab/<name>/and add its name here
# The folder should contain two files:
# - accept.txt -- lists words with accepted case-sensitive spelling
# - reject.txt -- lists words whose occurrences throw an error
# Vocab = Espressif
# Specify the packages to import into your project.
# A package is a zip file containing a number of rules (style) written in YAML.
# For a list of official packages, see Package Hub at https://vale.sh/hub/
# For official documentation on packages, see
# https://vale.sh/docs/topics/packages/
# Before linting, navigate to your project and run `vale sync` to download
# the official packages specified below.
# Packages = Package1, Package2, \
# https://example.com/path/to/package/Package.zip
Packages = Google, Microsoft, RedHat, \
https://dl.espressif.com/dl/esp-vale-config/Espressif-latest.zip
###############
### Formats ###
###############
# This section enables association of "unknown" formats with the ones
# supported by Vale. For official documentation on supported formats, see
# https://vale.sh/docs/topics/scoping/
[formats]
# For example, treat MDX files as Markdown files.
# mdx = md
################################
### Format-specific settings ###
################################
# This section lists the settings that apply to specific file formats
# based on their glob pattern.
# Settings provided under a more specific glob pattern,
# such as [*.{md,txt}] will override those in [*].
[*.{md,rst}]
# Enable styles to activate all rules included in them.
# BasedOnStyles = Style1, Style2
BasedOnStyles = Vale, Espressif-latest
### Deactivate individual rules ###
### in enabled styles.
# Style1.Rule1 = NO
Vale.Repetition = NO
Vale.Spelling = NO
Espressif-latest.Admonitions = NO
Espressif-latest.Contractions = NO
Espressif-latest.Monospace = NO
### Change default severity level ###
### of an activated rule.
# Choose between "suggestion", "warning", or "error".
# Style1.Rule2 = error
### Activate individual rules ###
### in non-enabled styles stored in <StylesPath>.
# Style1.Rule = YES
Google.Gender = YES
Google.GenderBias = YES
Google.Slang = YES
Google.Spacing = YES
Microsoft.DateNumbers = YES
Microsoft.Ellipses = YES
Microsoft.FirstPerson = YES
Microsoft.Hyphens = YES
Microsoft.Ordinal = YES
Microsoft.OxfordComma = YES
Microsoft.Percentages = YES
Microsoft.RangeTime = YES
Microsoft.Semicolon = YES
Microsoft.SentenceLength = YES
Microsoft.Suspended = YES
Microsoft.Units = YES
Microsoft.URLFormat = YES
Microsoft.We = YES
Microsoft.Wordiness = YES
RedHat.Contractions = YES
RedHat.RepeatedWords = YES

View file

@ -5,8 +5,8 @@
# export ARDUINO_SKIP_IDF_VERSION_CHECK=1
# idf.py build
set(min_supported_idf_version "5.3.0")
set(max_supported_idf_version "5.5.99")
set(min_supported_idf_version "5.1.0")
set(max_supported_idf_version "5.1.99")
set(idf_version "${IDF_VERSION_MAJOR}.${IDF_VERSION_MINOR}.${IDF_VERSION_PATCH}")
if ("${idf_version}" AND NOT "$ENV{ARDUINO_SKIP_IDF_VERSION_CHECK}")
@ -25,7 +25,6 @@ endif()
set(CORE_SRCS
cores/esp32/base64.cpp
cores/esp32/cbuf.cpp
cores/esp32/ColorFormat.c
cores/esp32/chip-debug-report.cpp
cores/esp32/esp32-hal-adc.c
cores/esp32/esp32-hal-bt.c
@ -33,7 +32,6 @@ set(CORE_SRCS
cores/esp32/esp32-hal-dac.c
cores/esp32/esp32-hal-gpio.c
cores/esp32/esp32-hal-i2c.c
cores/esp32/esp32-hal-i2c-ng.c
cores/esp32/esp32-hal-i2c-slave.c
cores/esp32/esp32-hal-ledc.c
cores/esp32/esp32-hal-matrix.c
@ -47,22 +45,18 @@ set(CORE_SRCS
cores/esp32/esp32-hal-timer.c
cores/esp32/esp32-hal-tinyusb.c
cores/esp32/esp32-hal-touch.c
cores/esp32/esp32-hal-touch-ng.c
cores/esp32/esp32-hal-uart.c
cores/esp32/esp32-hal-rmt.c
cores/esp32/Esp.cpp
cores/esp32/freertos_stats.cpp
cores/esp32/FunctionalInterrupt.cpp
cores/esp32/HardwareSerial.cpp
cores/esp32/HEXBuilder.cpp
cores/esp32/IPAddress.cpp
cores/esp32/IPv6Address.cpp
cores/esp32/libb64/cdecode.c
cores/esp32/libb64/cencode.c
cores/esp32/MacAddress.cpp
cores/esp32/main.cpp
cores/esp32/MD5Builder.cpp
cores/esp32/Print.cpp
cores/esp32/SHA1Builder.cpp
cores/esp32/stdlib_noniso.c
cores/esp32/Stream.cpp
cores/esp32/StreamString.cpp
@ -79,121 +73,29 @@ set(CORE_SRCS
cores/esp32/WString.cpp
)
set(ARDUINO_ALL_LIBRARIES
ArduinoOTA
AsyncUDP
BLE
BluetoothSerial
DNSServer
EEPROM
ESP_I2S
ESP_NOW
ESP_SR
ESPmDNS
Ethernet
FFat
FS
HTTPClient
HTTPUpdate
Insights
LittleFS
Matter
NetBIOS
Network
OpenThread
PPP
Preferences
RainMaker
SD_MMC
SD
SimpleBLE
SPIFFS
SPI
Ticker
Update
USB
WebServer
NetworkClientSecure
WiFi
WiFiProv
Wire
Zigbee
)
set(ARDUINO_LIBRARY_ArduinoOTA_SRCS libraries/ArduinoOTA/src/ArduinoOTA.cpp)
set(ARDUINO_LIBRARY_AsyncUDP_SRCS libraries/AsyncUDP/src/AsyncUDP.cpp)
set(ARDUINO_LIBRARY_BluetoothSerial_SRCS
set(LIBRARY_SRCS
libraries/ArduinoOTA/src/ArduinoOTA.cpp
libraries/AsyncUDP/src/AsyncUDP.cpp
libraries/BluetoothSerial/src/BluetoothSerial.cpp
libraries/BluetoothSerial/src/BTAddress.cpp
libraries/BluetoothSerial/src/BTAdvertisedDeviceSet.cpp
libraries/BluetoothSerial/src/BTScanResultsSet.cpp)
set(ARDUINO_LIBRARY_DNSServer_SRCS libraries/DNSServer/src/DNSServer.cpp)
set(ARDUINO_LIBRARY_EEPROM_SRCS libraries/EEPROM/src/EEPROM.cpp)
set(ARDUINO_LIBRARY_ESP_I2S_SRCS libraries/ESP_I2S/src/ESP_I2S.cpp)
set(ARDUINO_LIBRARY_ESP_NOW_SRCS
libraries/ESP_NOW/src/ESP32_NOW.cpp
libraries/ESP_NOW/src/ESP32_NOW_Serial.cpp)
set(ARDUINO_LIBRARY_ESP_SR_SRCS
libraries/BluetoothSerial/src/BTScanResultsSet.cpp
libraries/DNSServer/src/DNSServer.cpp
libraries/EEPROM/src/EEPROM.cpp
libraries/ESP_I2S/src/ESP_I2S.cpp
libraries/ESP_SR/src/ESP_SR.cpp
libraries/ESP_SR/src/esp32-hal-sr.c)
set(ARDUINO_LIBRARY_ESPmDNS_SRCS libraries/ESPmDNS/src/ESPmDNS.cpp)
set(ARDUINO_LIBRARY_Ethernet_SRCS libraries/Ethernet/src/ETH.cpp)
set(ARDUINO_LIBRARY_FFat_SRCS libraries/FFat/src/FFat.cpp)
set(ARDUINO_LIBRARY_FS_SRCS
libraries/ESP_SR/src/esp32-hal-sr.c
libraries/ESPmDNS/src/ESPmDNS.cpp
libraries/Ethernet/src/ETH.cpp
libraries/FFat/src/FFat.cpp
libraries/FS/src/FS.cpp
libraries/FS/src/vfs_api.cpp)
set(ARDUINO_LIBRARY_HTTPClient_SRCS libraries/HTTPClient/src/HTTPClient.cpp)
set(ARDUINO_LIBRARY_HTTPUpdate_SRCS libraries/HTTPUpdate/src/HTTPUpdate.cpp)
set(ARDUINO_LIBRARY_Insights_SRCS libraries/Insights/src/Insights.cpp)
set(ARDUINO_LIBRARY_LittleFS_SRCS libraries/LittleFS/src/LittleFS.cpp)
set(ARDUINO_LIBRARY_NetBIOS_SRCS libraries/NetBIOS/src/NetBIOS.cpp)
set(ARDUINO_LIBRARY_OpenThread_SRCS
libraries/OpenThread/src/OThread.cpp
libraries/OpenThread/src/OThreadCLI.cpp
libraries/OpenThread/src/OThreadCLI_Util.cpp)
set(ARDUINO_LIBRARY_Matter_SRCS
libraries/Matter/src/MatterEndpoints/MatterGenericSwitch.cpp
libraries/Matter/src/MatterEndpoints/MatterOnOffLight.cpp
libraries/Matter/src/MatterEndpoints/MatterDimmableLight.cpp
libraries/Matter/src/MatterEndpoints/MatterColorTemperatureLight.cpp
libraries/Matter/src/MatterEndpoints/MatterColorLight.cpp
libraries/Matter/src/MatterEndpoints/MatterEnhancedColorLight.cpp
libraries/Matter/src/MatterEndpoints/MatterFan.cpp
libraries/Matter/src/MatterEndpoints/MatterTemperatureSensor.cpp
libraries/Matter/src/MatterEndpoints/MatterHumiditySensor.cpp
libraries/Matter/src/MatterEndpoints/MatterContactSensor.cpp
libraries/Matter/src/MatterEndpoints/MatterPressureSensor.cpp
libraries/Matter/src/MatterEndpoints/MatterOccupancySensor.cpp
libraries/Matter/src/MatterEndpoints/MatterOnOffPlugin.cpp
libraries/Matter/src/MatterEndpoints/MatterThermostat.cpp
libraries/Matter/src/Matter.cpp
libraries/Matter/src/MatterEndPoint.cpp)
set(ARDUINO_LIBRARY_PPP_SRCS
libraries/PPP/src/PPP.cpp
libraries/PPP/src/ppp.c)
set(ARDUINO_LIBRARY_Preferences_SRCS libraries/Preferences/src/Preferences.cpp)
set(ARDUINO_LIBRARY_RainMaker_SRCS
libraries/FS/src/vfs_api.cpp
libraries/HTTPClient/src/HTTPClient.cpp
libraries/HTTPUpdate/src/HTTPUpdate.cpp
libraries/LittleFS/src/LittleFS.cpp
libraries/Insights/src/Insights.cpp
libraries/NetBIOS/src/NetBIOS.cpp
libraries/Preferences/src/Preferences.cpp
libraries/RainMaker/src/RMaker.cpp
libraries/RainMaker/src/RMakerNode.cpp
libraries/RainMaker/src/RMakerParam.cpp
@ -201,116 +103,44 @@ set(ARDUINO_LIBRARY_RainMaker_SRCS
libraries/RainMaker/src/RMakerType.cpp
libraries/RainMaker/src/RMakerQR.cpp
libraries/RainMaker/src/RMakerUtils.cpp
libraries/RainMaker/src/AppInsights.cpp)
set(ARDUINO_LIBRARY_SD_MMC_SRCS libraries/SD_MMC/src/SD_MMC.cpp)
set(ARDUINO_LIBRARY_SD_SRCS
libraries/RainMaker/src/AppInsights.cpp
libraries/SD_MMC/src/SD_MMC.cpp
libraries/SD/src/SD.cpp
libraries/SD/src/sd_diskio.cpp
libraries/SD/src/sd_diskio_crc.c)
set(ARDUINO_LIBRARY_SimpleBLE_SRCS libraries/SimpleBLE/src/SimpleBLE.cpp)
set(ARDUINO_LIBRARY_SPIFFS_SRCS libraries/SPIFFS/src/SPIFFS.cpp)
set(ARDUINO_LIBRARY_SPI_SRCS libraries/SPI/src/SPI.cpp)
set(ARDUINO_LIBRARY_Ticker_SRCS libraries/Ticker/src/Ticker.cpp)
set(ARDUINO_LIBRARY_Update_SRCS
libraries/SD/src/sd_diskio_crc.c
libraries/SimpleBLE/src/SimpleBLE.cpp
libraries/SPIFFS/src/SPIFFS.cpp
libraries/SPI/src/SPI.cpp
libraries/Ticker/src/Ticker.cpp
libraries/Update/src/Updater.cpp
libraries/Update/src/HttpsOTAUpdate.cpp)
set(ARDUINO_LIBRARY_USB_SRCS
libraries/Update/src/HttpsOTAUpdate.cpp
libraries/USB/src/USBHID.cpp
libraries/USB/src/USBMIDI.cpp
libraries/USB/src/USBHIDMouse.cpp
libraries/USB/src/USBHIDKeyboard.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_da_DK.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_de_DE.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_en_US.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_es_ES.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_fr_FR.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_hu_HU.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_it_IT.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_pt_BR.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_pt_PT.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_sv_SE.cpp
libraries/USB/src/USBHIDGamepad.cpp
libraries/USB/src/USBHIDConsumerControl.cpp
libraries/USB/src/USBHIDSystemControl.cpp
libraries/USB/src/USBHIDVendor.cpp
libraries/USB/src/USBVendor.cpp)
set(ARDUINO_LIBRARY_WebServer_SRCS
libraries/USB/src/USBVendor.cpp
libraries/WebServer/src/WebServer.cpp
libraries/WebServer/src/Parsing.cpp
libraries/WebServer/src/detail/mimetable.cpp
libraries/WebServer/src/middleware/MiddlewareChain.cpp
libraries/WebServer/src/middleware/AuthenticationMiddleware.cpp
libraries/WebServer/src/middleware/CorsMiddleware.cpp
libraries/WebServer/src/middleware/LoggingMiddleware.cpp)
set(ARDUINO_LIBRARY_NetworkClientSecure_SRCS
libraries/NetworkClientSecure/src/ssl_client.cpp
libraries/NetworkClientSecure/src/NetworkClientSecure.cpp)
set(ARDUINO_LIBRARY_Network_SRCS
libraries/Network/src/NetworkInterface.cpp
libraries/Network/src/NetworkEvents.cpp
libraries/Network/src/NetworkManager.cpp
libraries/Network/src/NetworkClient.cpp
libraries/Network/src/NetworkServer.cpp
libraries/Network/src/NetworkUdp.cpp)
set(ARDUINO_LIBRARY_WiFi_SRCS
libraries/WiFiClientSecure/src/ssl_client.cpp
libraries/WiFiClientSecure/src/WiFiClientSecure.cpp
libraries/WiFi/src/WiFiAP.cpp
libraries/WiFi/src/WiFiClient.cpp
libraries/WiFi/src/WiFi.cpp
libraries/WiFi/src/WiFiGeneric.cpp
libraries/WiFi/src/WiFiMulti.cpp
libraries/WiFi/src/WiFiScan.cpp
libraries/WiFi/src/WiFiServer.cpp
libraries/WiFi/src/WiFiSTA.cpp
libraries/WiFi/src/STA.cpp
libraries/WiFi/src/AP.cpp)
set(ARDUINO_LIBRARY_WiFiProv_SRCS libraries/WiFiProv/src/WiFiProv.cpp)
set(ARDUINO_LIBRARY_Wire_SRCS libraries/Wire/src/Wire.cpp)
set(ARDUINO_LIBRARY_Zigbee_SRCS
libraries/Zigbee/src/ZigbeeCore.cpp
libraries/Zigbee/src/ZigbeeEP.cpp
libraries/Zigbee/src/ZigbeeHandlers.cpp
libraries/Zigbee/src/ep/ZigbeeColorDimmableLight.cpp
libraries/Zigbee/src/ep/ZigbeeColorDimmerSwitch.cpp
libraries/Zigbee/src/ep/ZigbeeDimmableLight.cpp
libraries/Zigbee/src/ep/ZigbeeLight.cpp
libraries/Zigbee/src/ep/ZigbeeSwitch.cpp
libraries/Zigbee/src/ep/ZigbeeTempSensor.cpp
libraries/Zigbee/src/ep/ZigbeeThermostat.cpp
libraries/Zigbee/src/ep/ZigbeeFlowSensor.cpp
libraries/Zigbee/src/ep/ZigbeePressureSensor.cpp
libraries/Zigbee/src/ep/ZigbeeOccupancySensor.cpp
libraries/Zigbee/src/ep/ZigbeeCarbonDioxideSensor.cpp
libraries/Zigbee/src/ep/ZigbeeContactSwitch.cpp
libraries/Zigbee/src/ep/ZigbeeDoorWindowHandle.cpp
libraries/Zigbee/src/ep/ZigbeeWindowCovering.cpp
libraries/Zigbee/src/ep/ZigbeeVibrationSensor.cpp
libraries/Zigbee/src/ep/ZigbeeAnalog.cpp
libraries/Zigbee/src/ep/ZigbeeRangeExtender.cpp
libraries/Zigbee/src/ep/ZigbeeGateway.cpp
libraries/Zigbee/src/ep/ZigbeeWindSpeedSensor.cpp
libraries/Zigbee/src/ep/ZigbeeIlluminanceSensor.cpp
libraries/Zigbee/src/ep/ZigbeePM25Sensor.cpp
libraries/Zigbee/src/ep/ZigbeeElectricalMeasurement.cpp
libraries/Zigbee/src/ep/ZigbeeBinary.cpp
libraries/Zigbee/src/ep/ZigbeePowerOutlet.cpp
libraries/Zigbee/src/ep/ZigbeeFanControl.cpp
libraries/WiFi/src/WiFiUdp.cpp
libraries/WiFiProv/src/WiFiProv.cpp
libraries/Wire/src/Wire.cpp
)
set(ARDUINO_LIBRARY_BLE_SRCS
libraries/BLE/src/BLE2901.cpp
set(BLE_SRCS
libraries/BLE/src/BLE2902.cpp
libraries/BLE/src/BLE2904.cpp
libraries/BLE/src/BLEAddress.cpp
@ -342,40 +172,48 @@ set(ARDUINO_LIBRARY_BLE_SRCS
libraries/BLE/src/GeneralUtils.cpp
)
set(ARDUINO_LIBRARIES_SRCS)
set(ARDUINO_LIBRARIES_REQUIRES)
set(ARDUINO_LIBRARIES_INCLUDEDIRS)
foreach(libname IN LISTS ARDUINO_ALL_LIBRARIES)
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_${libname})
if(ARDUINO_LIBRARY_${libname}_SRCS)
list(APPEND ARDUINO_LIBRARIES_SRCS ${ARDUINO_LIBRARY_${libname}_SRCS})
endif()
if(ARDUINO_LIBRARY_${libname}_REQUIRES)
list(APPEND ARDUINO_LIBRARIES_REQUIRES ${ARDUINO_LIBRARY_${libname}_REQUIRES})
endif()
if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/libraries/${libname}/src)
list(APPEND ARDUINO_LIBRARIES_INCLUDEDIRS libraries/${libname}/src)
endif()
endif()
endforeach()
set(includedirs
variants/${CONFIG_ARDUINO_VARIANT}/
cores/esp32/
libraries/ArduinoOTA/src
libraries/AsyncUDP/src
libraries/BLE/src
libraries/BluetoothSerial/src
libraries/DNSServer/src
libraries/EEPROM/src
libraries/ESP_I2S/src
libraries/ESP_SR/src
libraries/ESP32/src
libraries/ESPmDNS/src
libraries/Ethernet/src
libraries/FFat/src
libraries/FS/src
libraries/HTTPClient/src
libraries/HTTPUpdate/src
libraries/LittleFS/src
libraries/Insights/src
libraries/NetBIOS/src
libraries/Preferences/src
libraries/RainMaker/src
libraries/SD_MMC/src
libraries/SD/src
libraries/SimpleBLE/src
libraries/SPIFFS/src
libraries/SPI/src
libraries/Ticker/src
libraries/Update/src
libraries/USB/src
libraries/WebServer/src
libraries/WiFiClientSecure/src
libraries/WiFi/src
libraries/WiFiProv/src
libraries/Wire/src
)
set(includedirs variants/${CONFIG_ARDUINO_VARIANT}/ cores/esp32/ ${ARDUINO_LIBRARIES_INCLUDEDIRS})
set(srcs ${CORE_SRCS} ${ARDUINO_LIBRARIES_SRCS})
set(srcs ${CORE_SRCS} ${LIBRARY_SRCS} ${BLE_SRCS})
set(priv_includes cores/esp32/libb64)
set(requires spi_flash esp_partition mbedtls wpa_supplicant esp_adc esp_eth http_parser esp_ringbuf esp_driver_gptimer esp_driver_usb_serial_jtag driver esp_http_client esp_https_ota)
set(priv_requires fatfs nvs_flash app_update spiffs bootloader_support bt esp_hid usb esp_psram ${ARDUINO_LIBRARIES_REQUIRES})
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_OpenThread)
#if(CONFIG_SOC_IEEE802154_SUPPORTED) # Does not work!
#if(CONFIG_OPENTHREAD_ENABLED) # Does not work!
if(IDF_TARGET STREQUAL "esp32c6" OR IDF_TARGET STREQUAL "esp32h2" OR IDF_TARGET STREQUAL "esp32c5") # Sadly only this works
list(APPEND requires openthread)
endif()
endif()
if(IDF_TARGET STREQUAL "esp32p4")
list(APPEND requires esp_driver_touch_sens)
endif()
set(requires spi_flash esp_partition mbedtls wifi_provisioning wpa_supplicant esp_adc esp_eth http_parser)
set(priv_requires fatfs nvs_flash app_update spiffs bootloader_support bt esp_hid)
idf_component_register(INCLUDE_DIRS ${includedirs} PRIV_INCLUDE_DIRS ${priv_includes} SRCS ${srcs} REQUIRES ${requires} PRIV_REQUIRES ${priv_requires})
@ -393,7 +231,7 @@ target_compile_options(${COMPONENT_TARGET} PUBLIC
-DARDUINO_ARCH_ESP32
-DARDUINO_BOARD="${idf_target_caps}_DEV"
-DARDUINO_VARIANT="${CONFIG_ARDUINO_VARIANT}"
-DESP32=ESP32)
-DESP32)
if(CONFIG_AUTOSTART_ARDUINO)
# in autostart mode, arduino-esp32 contains app_main() function and needs to
@ -416,21 +254,9 @@ function(maybe_add_component component_name)
endif()
endfunction()
if(IDF_TARGET MATCHES "esp32s2|esp32s3|esp32p4" AND CONFIG_TINYUSB_ENABLED)
if(IDF_TARGET MATCHES "esp32s2|esp32s3" AND CONFIG_TINYUSB_ENABLED)
maybe_add_component(arduino_tinyusb)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_ArduinoOTA)
maybe_add_component(esp_https_ota)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_ESP_SR)
maybe_add_component(espressif__esp_sr)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_Matter)
maybe_add_component(espressif__esp_matter)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_LittleFS)
maybe_add_component(joltwallet__littlefs)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_WiFiProv)
maybe_add_component(espressif__network_provisioning)
endif()

View file

@ -5,7 +5,7 @@
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socioeconomic status,
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.

View file

@ -209,7 +209,7 @@ config ARDUHAL_ESP_LOG
default "n"
help
This option will redefine the ESP_LOGx macros to Arduino's log_x macros.
To enable for your application, add the following after your includes:
To enable for your application, add the follwing after your includes:
#ifdef ARDUINO_ARCH_ESP32
#include "esp32-hal-log.h"
#endif
@ -256,147 +256,22 @@ config ARDUINO_SELECTIVE_COMPILATION
bool "Include only specific Arduino libraries"
default n
config ARDUINO_SELECTIVE_SPI
bool "Enable SPI"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Wire
bool "Enable Wire"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_ESP_SR
bool "Enable ESP-SR"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_EEPROM
bool "Enable EEPROM"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Preferences
bool "Enable Preferences"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Ticker
bool "Enable Ticker"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Update
bool "Enable Update"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Zigbee
bool "Enable Zigbee"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_FS
bool "Enable FS"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_SD
bool "Enable SD"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_SD_MMC
bool "Enable SD_MMC"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_SPIFFS
bool "Enable SPIFFS"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_FFat
bool "Enable FFat"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_LittleFS
bool "Enable LittleFS"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_Network
bool "Enable Networking"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Ethernet
bool "Enable Ethernet"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_PPP
bool "Enable PPP"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_ArduinoOTA
bool "Enable ArduinoOTA"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
select ARDUINO_SELECTIVE_ESPmDNS
default y
config ARDUINO_SELECTIVE_AsyncUDP
bool "Enable AsyncUDP"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_DNSServer
bool "Enable DNSServer"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_ESPmDNS
bool "Enable ESPmDNS"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_HTTPClient
bool "Enable HTTPClient"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
select ARDUINO_SELECTIVE_NetworkClientSecure
default y
config ARDUINO_SELECTIVE_Matter
bool "Enable Matter"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_NetBIOS
bool "Enable NetBIOS"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_WebServer
bool "Enable WebServer"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
select ARDUINO_SELECTIVE_FS
config ARDUINO_SELECTIVE_WiFi
bool "Enable WiFi"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_NetworkClientSecure
bool "Enable NetworkClientSecure"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_WiFiProv
bool "Enable WiFiProv"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network && ARDUINO_SELECTIVE_WiFi
config ARDUINO_SELECTIVE_AzureIoT
bool "Enable AzureIoT"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_HTTPClient
default y
config ARDUINO_SELECTIVE_BLE
@ -409,24 +284,129 @@ config ARDUINO_SELECTIVE_BluetoothSerial
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_DNSServer
bool "Enable DNSServer"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
default y
config ARDUINO_SELECTIVE_EEPROM
bool "Enable EEPROM"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_ESP32
bool "Enable ESP32"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_ESPmDNS
bool "Enable ESPmDNS"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
default y
config ARDUINO_SELECTIVE_FFat
bool "Enable FFat"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_FS
bool "Enable FS"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_HTTPClient
bool "Enable HTTPClient"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
select ARDUINO_SELECTIVE_WiFiClientSecure
default y
config ARDUINO_SELECTIVE_LITTLEFS
bool "Enable LITTLEFS"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_NetBIOS
bool "Enable NetBIOS"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
default y
config ARDUINO_SELECTIVE_Preferences
bool "Enable Preferences"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_SD
bool "Enable SD"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_SD_MMC
bool "Enable SD_MMC"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_SimpleBLE
bool "Enable SimpleBLE"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_RainMaker
bool "Enable RainMaker"
config ARDUINO_SELECTIVE_SPI
bool "Enable SPI"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_OpenThread
bool "Enable OpenThread"
config ARDUINO_SELECTIVE_SPIFFS
bool "Enable SPIFFS"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_Ticker
bool "Enable Ticker"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Insights
bool "Enable Insights"
config ARDUINO_SELECTIVE_Update
bool "Enable Update"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_WebServer
bool "Enable WebServer"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
select ARDUINO_SELECTIVE_FS
config ARDUINO_SELECTIVE_WiFi
bool "Enable WiFi"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_WiFiClientSecure
bool "Enable WiFiClientSecure"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
default y
config ARDUINO_SELECTIVE_WiFiProv
bool "Enable WiFiProv"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
default y
config ARDUINO_SELECTIVE_Wire
bool "Enable Wire"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
endmenu

View file

@ -1,13 +1,9 @@
# Arduino core for the ESP32, ESP32-C3, ESP32-C6, ESP32-H2, ESP32-P4, ESP32-S2 and ESP32-S3.
# Arduino core for the ESP32, ESP32-S2, ESP32-S3, ESP32-C3, ESP32-C6 and ESP32-H2
[![Build Status](https://img.shields.io/github/actions/workflow/status/espressif/arduino-esp32/push.yml?branch=master&event=push&label=Compilation%20Tests)](https://github.com/espressif/arduino-esp32/actions/workflows/push.yml?query=branch%3Amaster+event%3Apush)
[![Verbose Build Status](https://img.shields.io/github/actions/workflow/status/espressif/arduino-esp32/push.yml?branch=master&event=schedule&label=Compilation%20Tests%20(Verbose))](https://github.com/espressif/arduino-esp32/actions/workflows/push.yml?query=branch%3Amaster+event%3Aschedule)
[![External Libraries Test](https://img.shields.io/github/actions/workflow/status/espressif/arduino-esp32/lib.yml?branch=master&event=schedule&label=External%20Libraries%20Test)](https://github.com/espressif/arduino-esp32/blob/gh-pages/LIBRARIES_TEST.md)
[![Runtime Tests](https://github.com/espressif/arduino-esp32/blob/gh-pages/runtime-tests-results/badge.svg)](https://github.com/espressif/arduino-esp32/blob/gh-pages/runtime-tests-results/RUNTIME_TESTS_REPORT.md)
![Build Status](https://github.com/espressif/arduino-esp32/workflows/ESP32%20Arduino%20CI/badge.svg) [![Documentation Status](https://readthedocs.com/projects/espressif-arduino-esp32/badge/?version=latest)](https://docs.espressif.com/projects/arduino-esp32/en/latest/?badge=latest)
[![External Libraries Test](https://github.com/espressif/arduino-esp32/actions/workflows/lib.yml/badge.svg?branch=master&event=schedule)](https://github.com/espressif/arduino-esp32/actions/workflows/lib.yml?link=http://https://github.com/espressif/arduino-esp32/blob/master/LIBRARIES_TEST.md)
### Need help or have a question? Join the chat at [Discord](https://discord.gg/8xY6e9crwv) or [open a new Discussion](https://github.com/espressif/arduino-esp32/discussions)
[![Discord invite](https://img.shields.io/discord/1327272229427216425?logo=discord&logoColor=white&logoSize=auto&label=Discord)](https://discord.gg/8xY6e9crwv)
### Need help or have a question? Join the chat at [Gitter](https://gitter.im/espressif/arduino-esp32) or [open a new Discussion](https://github.com/espressif/arduino-esp32/discussions)
## Contents
@ -21,17 +17,9 @@
### Development Status
#### Latest Stable Release
Latest Stable Release [![Release Version](https://img.shields.io/github/release/espressif/arduino-esp32.svg?style=plastic)](https://github.com/espressif/arduino-esp32/releases/latest/) [![Release Date](https://img.shields.io/github/release-date/espressif/arduino-esp32.svg?style=plastic)](https://github.com/espressif/arduino-esp32/releases/latest/) [![Downloads](https://img.shields.io/github/downloads/espressif/arduino-esp32/latest/total.svg?style=plastic)](https://github.com/espressif/arduino-esp32/releases/latest/)
[![Release Version](https://img.shields.io/github/release/espressif/arduino-esp32.svg)](https://github.com/espressif/arduino-esp32/releases/latest/)
[![Release Date](https://img.shields.io/github/release-date/espressif/arduino-esp32.svg)](https://github.com/espressif/arduino-esp32/releases/latest/)
[![Downloads](https://img.shields.io/github/downloads/espressif/arduino-esp32/latest/total.svg)](https://github.com/espressif/arduino-esp32/releases/latest/)
#### Latest Development Release
[![Release Version](https://img.shields.io/github/release/espressif/arduino-esp32/all.svg)](https://github.com/espressif/arduino-esp32/releases/)
[![Release Date](https://img.shields.io/github/release-date-pre/espressif/arduino-esp32.svg)](https://github.com/espressif/arduino-esp32/releases/)
[![Downloads](https://img.shields.io/github/downloads-pre/espressif/arduino-esp32/latest/total.svg)](https://github.com/espressif/arduino-esp32/releases/)
Latest Development Release [![Release Version](https://img.shields.io/github/release/espressif/arduino-esp32/all.svg?style=plastic)](https://github.com/espressif/arduino-esp32/releases/) [![Release Date](https://img.shields.io/github/release-date-pre/espressif/arduino-esp32.svg?style=plastic)](https://github.com/espressif/arduino-esp32/releases/) [![Downloads](https://img.shields.io/github/downloads-pre/espressif/arduino-esp32/latest/total.svg?style=plastic)](https://github.com/espressif/arduino-esp32/releases/)
### Development Planning
@ -49,10 +37,6 @@ You can use the [Arduino-ESP32 Online Documentation](https://docs.espressif.com/
---
**APIs compatibility with ESP8266 and Arduino-CORE (Arduino.cc) is explained [here](https://docs.espressif.com/projects/arduino-esp32/en/latest/libraries.html#apis).**
---
* [Getting Started](https://docs.espressif.com/projects/arduino-esp32/en/latest/getting_started.html)
* [Installing (Windows, Linux and macOS)](https://docs.espressif.com/projects/arduino-esp32/en/latest/installing.html)
* [Libraries](https://docs.espressif.com/projects/arduino-esp32/en/latest/libraries.html)
@ -67,17 +51,11 @@ Here are the ESP32 series supported by the Arduino-ESP32 project:
| **SoC** | **Stable** | **Development** | **Datasheet** |
|----------|:----------:|:---------------:|:-------------------------------------------------------------------------------------------------:|
| ESP32 | Yes | Yes | [ESP32](https://www.espressif.com/sites/default/files/documentation/esp32_datasheet_en.pdf) |
| ESP32-C3 | Yes | Yes | [ESP32-C3](https://www.espressif.com/sites/default/files/documentation/esp32-c3_datasheet_en.pdf) |
| ESP32-C6 | Yes | Yes | [ESP32-C6](https://www.espressif.com/sites/default/files/documentation/esp32-c6_datasheet_en.pdf) |
| ESP32-H2 | Yes | Yes | [ESP32-H2](https://www.espressif.com/sites/default/files/documentation/esp32-h2_datasheet_en.pdf) |
| ESP32-P4 | Yes | Yes | [ESP32-P4](https://www.espressif.com/sites/default/files/documentation/esp32-p4_datasheet_en.pdf) |
| ESP32-S2 | Yes | Yes | [ESP32-S2](https://www.espressif.com/sites/default/files/documentation/esp32-s2_datasheet_en.pdf) |
| ESP32-C3 | Yes | Yes | [ESP32-C3](https://www.espressif.com/sites/default/files/documentation/esp32-c3_datasheet_en.pdf) |
| ESP32-S3 | Yes | Yes | [ESP32-S3](https://www.espressif.com/sites/default/files/documentation/esp32-s3_datasheet_en.pdf) |
> [!NOTE]
> ESP32-C2 is also supported by Arduino-ESP32 but requires using Arduino as an ESP-IDF component or rebuilding the static libraries.
> For more information, see the [Arduino as an ESP-IDF component documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/esp-idf_component.html) or the
> [Lib Builder documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/lib_builder.html), respectively.
| ESP32-C6 | No | Yes | [ESP32-C6](https://www.espressif.com/sites/default/files/documentation/esp32-c6_datasheet_en.pdf) |
| ESP32-H2 | No | Yes | [ESP32-H2](https://www.espressif.com/sites/default/files/documentation/esp32-h2_datasheet_en.pdf) |
For more details visit the [supported chips](https://docs.espressif.com/projects/arduino-esp32/en/latest/getting_started.html#supported-soc-s) documentation page.
@ -87,7 +65,7 @@ You can use [EspExceptionDecoder](https://github.com/me-no-dev/EspExceptionDecod
### Issue/Bug report template
Before reporting an issue, make sure you've searched for similar one that was already created. Also make sure to go through all the issues labeled as [Type: For reference](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue+label%3A%22Type%3A+For+reference%22+).
Before reporting an issue, make sure you've searched for similar one that was already created. Also make sure to go through all the issues labelled as [Type: For reference](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue+label%3A%22Type%3A+For+reference%22+).
Finally, if you are sure no one else had the issue, follow the **Issue template** or **Feature request template** while reporting any [new Issue](https://github.com/espressif/arduino-esp32/issues/new/choose).

27279
boards.txt

File diff suppressed because it is too large Load diff

View file

@ -33,15 +33,12 @@
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "freertos/semphr.h"
#include "esp32-hal.h"
#include "esp8266-compat.h"
#include "soc/gpio_reg.h"
#include "stdlib_noniso.h"
#include "binary.h"
#include "extra_attr.h"
#include "pins_arduino.h"
#include "esp32-hal.h"
#define PI 3.1415926535897932384626433832795
#define HALF_PI 1.5707963267948966192313216916398
@ -103,10 +100,7 @@
// avr-libc defines _NOP() since 1.6.2
#ifndef _NOP
#define _NOP() \
do { \
__asm__ volatile("nop"); \
} while (0)
#define _NOP() do { __asm__ volatile ("nop"); } while (0)
#endif
#define bit(b) (1UL << (b))
@ -116,13 +110,13 @@
#define analogInPinToBit(P) (P)
#if SOC_GPIO_PIN_COUNT <= 32
#define digitalPinToPort(pin) (0)
#define digitalPinToBitMask(pin) (1UL << digitalPinToGPIONumber(pin))
#define digitalPinToBitMask(pin) (1UL << (pin))
#define portOutputRegister(port) ((volatile uint32_t*)GPIO_OUT_REG)
#define portInputRegister(port) ((volatile uint32_t*)GPIO_IN_REG)
#define portModeRegister(port) ((volatile uint32_t*)GPIO_ENABLE_REG)
#elif SOC_GPIO_PIN_COUNT <= 64
#define digitalPinToPort(pin) ((digitalPinToGPIONumber(pin) > 31) ? 1 : 0)
#define digitalPinToBitMask(pin) (1UL << (digitalPinToGPIONumber(pin) & 31))
#define digitalPinToPort(pin) (((pin)>31)?1:0)
#define digitalPinToBitMask(pin) (1UL << (((pin)>31)?((pin)-32):(pin)))
#define portOutputRegister(port) ((volatile uint32_t*)((port)?GPIO_OUT1_REG:GPIO_OUT_REG))
#define portInputRegister(port) ((volatile uint32_t*)((port)?GPIO_IN1_REG:GPIO_IN_REG))
#define portModeRegister(port) ((volatile uint32_t*)((port)?GPIO_ENABLE1_REG:GPIO_ENABLE_REG))
@ -144,8 +138,8 @@
#endif
#define EXTERNAL_NUM_INTERRUPTS NUM_DIGITAL_PINS // All GPIOs
#define analogInputToDigitalPin(p) (((p)<NUM_ANALOG_INPUTS)?(analogChannelToDigitalPin(p)):-1)
#define digitalPinToInterrupt(p) ((((uint8_t)digitalPinToGPIONumber(p)) < NUM_DIGITAL_PINS) ? (p) : NOT_AN_INTERRUPT)
#define digitalPinHasPWM(p) (((uint8_t)digitalPinToGPIONumber(p)) < NUM_DIGITAL_PINS)
#define digitalPinToInterrupt(p) (((p)<NUM_DIGITAL_PINS)?(p):NOT_AN_INTERRUPT)
#define digitalPinHasPWM(p) ((p)<NUM_DIGITAL_PINS)
typedef bool boolean;
typedef uint8_t byte;
@ -181,7 +175,7 @@ void initArduino(void);
unsigned long pulseIn(uint8_t pin, uint8_t state, unsigned long timeout);
unsigned long pulseInLong(uint8_t pin, uint8_t state, unsigned long timeout);
uint8_t shiftIn(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder); // codespell:ignore shiftin
uint8_t shiftIn(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder);
void shiftOut(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder, uint8_t val);
#ifdef __cplusplus
@ -201,7 +195,6 @@ void shiftOut(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder, uint8_t val);
#include "Udp.h"
#include "HardwareSerial.h"
#include "Esp.h"
#include "freertos_stats.h"
// Use float-compatible stl abs() and round(), we don't use Arduino macros to avoid issues with the C++ libraries
using std::abs;
@ -217,33 +210,23 @@ uint16_t makeWord(uint8_t h, uint8_t l);
#define word(...) makeWord(__VA_ARGS__)
size_t getArduinoLoopTaskStackSize(void);
#define SET_LOOP_TASK_STACK_SIZE(sz) \
size_t getArduinoLoopTaskStackSize() { \
return sz; \
}
#define SET_LOOP_TASK_STACK_SIZE(sz) size_t getArduinoLoopTaskStackSize() { return sz;}
bool shouldPrintChipDebugReport(void);
#define ENABLE_CHIP_DEBUG_REPORT \
bool shouldPrintChipDebugReport(void) { \
return true; \
}
#define ENABLE_CHIP_DEBUG_REPORT bool shouldPrintChipDebugReport(void){return true;}
// allows user to bypass esp_spiram_test()
bool esp_psram_extram_test(void);
#define BYPASS_SPIRAM_TEST(bypass) \
bool testSPIRAM(void) { \
if (bypass) \
return true; \
else \
return esp_psram_extram_test(); \
}
#define BYPASS_SPIRAM_TEST(bypass) bool testSPIRAM(void) { if (bypass) return true; else return esp_psram_extram_test(); }
unsigned long pulseIn(uint8_t pin, uint8_t state, unsigned long timeout = 1000000L);
unsigned long pulseInLong(uint8_t pin, uint8_t state, unsigned long timeout = 1000000L);
extern "C" bool getLocalTime(struct tm * info, uint32_t ms = 5000);
extern "C" void configTime(long gmtOffset_sec, int daylightOffset_sec, const char *server1, const char *server2 = nullptr, const char *server3 = nullptr);
extern "C" void configTzTime(const char *tz, const char *server1, const char *server2 = nullptr, const char *server3 = nullptr);
extern "C" void configTime(long gmtOffset_sec, int daylightOffset_sec,
const char* server1, const char* server2 = nullptr, const char* server3 = nullptr);
extern "C" void configTzTime(const char* tz,
const char* server1, const char* server2 = nullptr, const char* server3 = nullptr);
void setToneChannel(uint8_t channel = 0);
void tone(uint8_t _pin, unsigned int frequency, unsigned long duration = 0);
@ -251,7 +234,6 @@ void noTone(uint8_t _pin);
#endif /* __cplusplus */
// must be applied last as it overrides some of the above
#include "io_pin_remap.h"
#include "pins_arduino.h"
#endif /* _ESP32_CORE_ARDUINO_H_ */

View file

@ -23,7 +23,8 @@
#include "Stream.h"
#include "IPAddress.h"
class Client : public Stream {
class Client: public Stream
{
public:
virtual int connect(IPAddress ip, uint16_t port) =0;
virtual int connect(const char *host, uint16_t port) =0;
@ -37,9 +38,9 @@ public:
virtual void stop() = 0;
virtual uint8_t connected() = 0;
virtual operator bool() = 0;
protected:
uint8_t *rawIPAddress(IPAddress &addr) {
uint8_t* rawIPAddress(IPAddress& addr)
{
return addr.raw_address();
}
};

View file

@ -1,281 +0,0 @@
/*
*
* Copyright (c) 2021 Project CHIP Authors
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ColorFormat.h"
#include <math.h>
// define a clamp macro to substitute the std::clamp macro which is available from C++17 onwards
#define clamp(a, min, max) ((a) < (min) ? (min) : ((a) > (max) ? (max) : (a)))
const espHsvColor_t HSV_BLACK = {0, 0, 0};
const espHsvColor_t HSV_WHITE = {0, 0, 254};
const espHsvColor_t HSV_RED = {0, 254, 254};
const espHsvColor_t HSV_YELLOW = {42, 254, 254};
const espHsvColor_t HSV_GREEN = {84, 254, 254};
const espHsvColor_t HSV_CYAN = {127, 254, 254};
const espHsvColor_t HSV_BLUE = {169, 254, 254};
const espHsvColor_t HSV_MAGENTA = {211, 254, 254};
const espRgbColor_t RGB_BLACK = {0, 0, 0};
const espRgbColor_t RGB_WHITE = {255, 255, 255};
const espRgbColor_t RGB_RED = {255, 0, 0};
const espRgbColor_t RGB_YELLOW = {255, 255, 0};
const espRgbColor_t RGB_GREEN = {0, 255, 0};
const espRgbColor_t RGB_CYAN = {0, 255, 255};
const espRgbColor_t RGB_BLUE = {0, 0, 255};
const espRgbColor_t RGB_MAGENTA = {255, 0, 255};
// main color temperature values
const espCtColor_t COOL_WHITE_COLOR_TEMPERATURE = {142};
const espCtColor_t DAYLIGHT_WHITE_COLOR_TEMPERATURE = {181};
const espCtColor_t WHITE_COLOR_TEMPERATURE = {250};
const espCtColor_t SOFT_WHITE_COLOR_TEMPERATURE = {370};
const espCtColor_t WARM_WHITE_COLOR_TEMPERATURE = {454};
espRgbColor_t espHsvToRgbColor(uint16_t h, uint8_t s, uint8_t v) {
espHsvColor_t hsv = {h, s, v};
return espHsvColorToRgbColor(hsv);
}
espRgbColor_t espHsvColorToRgbColor(espHsvColor_t hsv) {
espRgbColor_t rgb;
uint8_t region, p, q, t;
uint32_t h, s, v, remainder;
if (hsv.s == 0) {
rgb.r = rgb.g = rgb.b = hsv.v;
} else {
h = hsv.h;
s = hsv.s;
v = hsv.v;
region = h / 43;
remainder = (h - (region * 43)) * 6;
p = (v * (255 - s)) >> 8;
q = (v * (255 - ((s * remainder) >> 8))) >> 8;
t = (v * (255 - ((s * (255 - remainder)) >> 8))) >> 8;
switch (region) {
case 0: rgb.r = v, rgb.g = t, rgb.b = p; break;
case 1: rgb.r = q, rgb.g = v, rgb.b = p; break;
case 2: rgb.r = p, rgb.g = v, rgb.b = t; break;
case 3: rgb.r = p, rgb.g = q, rgb.b = v; break;
case 4: rgb.r = t, rgb.g = p, rgb.b = v; break;
case 5:
default: rgb.r = v, rgb.g = p, rgb.b = q; break;
}
}
return rgb;
}
espHsvColor_t espRgbToHsvColor(uint8_t r, uint8_t g, uint8_t b) {
espRgbColor_t rgb = {r, g, b};
return espRgbColorToHsvColor(rgb);
}
espHsvColor_t espRgbColorToHsvColor(espRgbColor_t rgb) {
espHsvColor_t hsv;
uint8_t rgbMin, rgbMax;
rgbMin = rgb.r < rgb.g ? (rgb.r < rgb.b ? rgb.r : rgb.b) : (rgb.g < rgb.b ? rgb.g : rgb.b);
rgbMax = rgb.r > rgb.g ? (rgb.r > rgb.b ? rgb.r : rgb.b) : (rgb.g > rgb.b ? rgb.g : rgb.b);
hsv.v = rgbMax;
if (hsv.v == 0) {
hsv.h = 0;
hsv.s = 0;
return hsv;
}
hsv.s = 255 * (rgbMax - rgbMin) / hsv.v;
if (hsv.s == 0) {
hsv.h = 0;
return hsv;
}
if (rgbMax == rgb.r) {
hsv.h = 0 + 43 * (rgb.g - rgb.b) / (rgbMax - rgbMin);
} else if (rgbMax == rgb.g) {
hsv.h = 85 + 43 * (rgb.b - rgb.r) / (rgbMax - rgbMin);
} else {
hsv.h = 171 + 43 * (rgb.r - rgb.g) / (rgbMax - rgbMin);
}
return hsv;
}
espRgbColor_t espXYColorToRgbColor(uint8_t Level, espXyColor_t xy) {
return espXYToRgbColor(Level, xy.x, xy.y, true);
}
espRgbColor_t espXYToRgbColor(uint8_t Level, uint16_t current_X, uint16_t current_Y, bool addXYZScaling) {
// convert xyY color space to RGB
// https://www.easyrgb.com/en/math.php
// https://en.wikipedia.org/wiki/SRGB
// refer https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
// The current_X/current_Y attribute contains the current value of the normalized chromaticity value of x/y.
// The value of x/y shall be related to the current_X/current_Y attribute by the relationship
// x = current_X/65536
// y = current_Y/65536
// z = 1-x-y
espRgbColor_t rgb;
float x, y, z;
float X, Y, Z;
float r, g, b;
x = ((float)current_X) / 65535.0f;
y = ((float)current_Y) / 65535.0f;
z = 1.0f - x - y;
// Calculate XYZ values
// Y - given brightness in 0 - 1 range
Y = ((float)Level) / 254.0f;
X = (Y / y) * x;
Z = (Y / y) * z;
// X, Y and Z input refer to a D65/2° standard illuminant.
// sR, sG and sB (standard RGB) output range = 0 ÷ 255
// convert XYZ to RGB - CIE XYZ to sRGB
if (addXYZScaling) {
X = X / 100.0f;
Y = Y / 100.0f;
Z = Z / 100.0f;
}
r = (X * 3.2406f) - (Y * 1.5372f) - (Z * 0.4986f);
g = -(X * 0.9689f) + (Y * 1.8758f) + (Z * 0.0415f);
b = (X * 0.0557f) - (Y * 0.2040f) + (Z * 1.0570f);
// apply gamma 2.2 correction
r = (r <= 0.0031308f ? 12.92f * r : (1.055f) * pow(r, (1.0f / 2.4f)) - 0.055f);
g = (g <= 0.0031308f ? 12.92f * g : (1.055f) * pow(g, (1.0f / 2.4f)) - 0.055f);
b = (b <= 0.0031308f ? 12.92f * b : (1.055f) * pow(b, (1.0f / 2.4f)) - 0.055f);
// Round off
r = clamp(r, 0, 1);
g = clamp(g, 0, 1);
b = clamp(b, 0, 1);
// these rgb values are in the range of 0 to 1, convert to limit of HW specific LED
rgb.r = (uint8_t)(r * 255);
rgb.g = (uint8_t)(g * 255);
rgb.b = (uint8_t)(b * 255);
return rgb;
}
espXyColor_t espRgbToXYColor(uint8_t r, uint8_t g, uint8_t b) {
espRgbColor_t rgb = {r, g, b};
return espRgbColorToXYColor(rgb);
}
espXyColor_t espRgbColorToXYColor(espRgbColor_t rgb) {
// convert RGB to xy color space
// https://www.easyrgb.com/en/math.php
// https://en.wikipedia.org/wiki/SRGB
// refer https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
espXyColor_t xy;
float r, g, b;
float X, Y, Z;
float x, y;
r = ((float)rgb.r) / 255.0f;
g = ((float)rgb.g) / 255.0f;
b = ((float)rgb.b) / 255.0f;
// convert RGB to XYZ - sRGB to CIE XYZ
r = (r <= 0.04045f ? r / 12.92f : pow((r + 0.055f) / 1.055f, 2.4f));
g = (g <= 0.04045f ? g / 12.92f : pow((g + 0.055f) / 1.055f, 2.4f));
b = (b <= 0.04045f ? b / 12.92f : pow((b + 0.055f) / 1.055f, 2.4f));
// https://gist.github.com/popcorn245/30afa0f98eea1c2fd34d
X = r * 0.649926f + g * 0.103455f + b * 0.197109f;
Y = r * 0.234327f + g * 0.743075f + b * 0.022598f;
Z = r * 0.0000000f + g * 0.053077f + b * 1.035763f;
// sR, sG and sB (standard RGB) input range = 0 ÷ 255
// X, Y and Z output refer to a D65/2° standard illuminant.
X = r * 0.4124564f + g * 0.3575761f + b * 0.1804375f;
Y = r * 0.2126729f + g * 0.7151522f + b * 0.0721750f;
Z = r * 0.0193339f + g * 0.1191920f + b * 0.9503041f;
// Calculate xy values
x = X / (X + Y + Z);
y = Y / (X + Y + Z);
// convert to 0-65535 range
xy.x = (uint16_t)(x * 65535);
xy.y = (uint16_t)(y * 65535);
return xy;
}
espRgbColor_t espCTToRgbColor(uint16_t ct) {
espCtColor_t ctColor = {ct};
return espCTColorToRgbColor(ctColor);
}
espRgbColor_t espCTColorToRgbColor(espCtColor_t ct) {
espRgbColor_t rgb = {0, 0, 0};
float r, g, b;
if (ct.ctMireds == 0) {
return rgb;
}
// Algorithm credits to Tanner Helland: https://tannerhelland.com/2012/09/18/convert-temperature-rgb-algorithm-code.html
// Convert Mireds to centiKelvins. k = 1,000,000/mired
float ctCentiKelvin = 10000 / ct.ctMireds;
// Red
if (ctCentiKelvin <= 66) {
r = 255;
} else {
r = 329.698727446f * pow(ctCentiKelvin - 60, -0.1332047592f);
}
// Green
if (ctCentiKelvin <= 66) {
g = 99.4708025861f * log(ctCentiKelvin) - 161.1195681661f;
} else {
g = 288.1221695283f * pow(ctCentiKelvin - 60, -0.0755148492f);
}
// Blue
if (ctCentiKelvin >= 66) {
b = 255;
} else {
if (ctCentiKelvin <= 19) {
b = 0;
} else {
b = 138.5177312231 * log(ctCentiKelvin - 10) - 305.0447927307;
}
}
rgb.r = (uint8_t)clamp(r, 0, 255);
rgb.g = (uint8_t)clamp(g, 0, 255);
rgb.b = (uint8_t)clamp(b, 0, 255);
return rgb;
}

View file

@ -1,71 +0,0 @@
/*
*
* Copyright (c) 2021 Project CHIP Authors
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <stdint.h>
#include <stdbool.h>
#ifdef __cplusplus
extern "C" {
#endif
struct RgbColor_t {
uint8_t r;
uint8_t g;
uint8_t b;
};
struct HsvColor_t {
uint16_t h;
uint8_t s;
uint8_t v;
};
struct XyColor_t {
uint16_t x;
uint16_t y;
};
struct CtColor_t {
uint16_t ctMireds;
};
typedef struct RgbColor_t espRgbColor_t;
typedef struct HsvColor_t espHsvColor_t;
typedef struct XyColor_t espXyColor_t;
typedef struct CtColor_t espCtColor_t;
espRgbColor_t espXYToRgbColor(uint8_t Level, uint16_t current_X, uint16_t current_Y, bool addXYZScaling);
espRgbColor_t espXYColorToRgb(uint8_t Level, espXyColor_t xy);
espXyColor_t espRgbColorToXYColor(espRgbColor_t rgb);
espXyColor_t espRgbToXYColor(uint8_t r, uint8_t g, uint8_t b);
espRgbColor_t espHsvColorToRgbColor(espHsvColor_t hsv);
espRgbColor_t espHsvToRgbColor(uint16_t h, uint8_t s, uint8_t v);
espRgbColor_t espCTColorToRgbColor(espCtColor_t ct);
espRgbColor_t espCTToRgbColor(uint16_t ct);
espHsvColor_t espRgbColorToHsvColor(espRgbColor_t rgb);
espHsvColor_t espRgbToHsvColor(uint8_t r, uint8_t g, uint8_t b);
extern const espHsvColor_t HSV_BLACK, HSV_WHITE, HSV_RED, HSV_YELLOW, HSV_GREEN, HSV_CYAN, HSV_BLUE, HSV_MAGENTA;
extern const espCtColor_t COOL_WHITE_COLOR_TEMPERATURE, DAYLIGHT_WHITE_COLOR_TEMPERATURE, WHITE_COLOR_TEMPERATURE, SOFT_WHITE_COLOR_TEMPERATURE,
WARM_WHITE_COLOR_TEMPERATURE;
extern const espRgbColor_t RGB_BLACK, RGB_WHITE, RGB_RED, RGB_YELLOW, RGB_GREEN, RGB_CYAN, RGB_BLUE, RGB_MAGENTA;
#ifdef __cplusplus
}
#endif

View file

@ -21,7 +21,6 @@
#include "Esp.h"
#include "esp_sleep.h"
#include "spi_flash_mmap.h"
#include "esp_idf_version.h"
#include <memory>
#include <soc/soc.h>
#include <esp_partition.h>
@ -49,9 +48,6 @@ extern "C" {
#include "esp32s3/rom/spi_flash.h"
#include "soc/efuse_reg.h"
#define ESP_FLASH_IMAGE_BASE 0x0000 // Esp32s3 is located at 0x0000
#elif CONFIG_IDF_TARGET_ESP32C2
#include "esp32c2/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x0000 // Esp32c2 is located at 0x0000
#elif CONFIG_IDF_TARGET_ESP32C3
#include "esp32c3/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x0000 // Esp32c3 is located at 0x0000
@ -61,12 +57,6 @@ extern "C" {
#elif CONFIG_IDF_TARGET_ESP32H2
#include "esp32h2/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x0000 // Esp32h2 is located at 0x0000
#elif CONFIG_IDF_TARGET_ESP32P4
#include "esp32p4/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x2000 // Esp32p4 is located at 0x2000
#elif CONFIG_IDF_TARGET_ESP32C5
#include "esp32c5/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x2000 // Esp32c5 is located at 0x2000
#else
#error Target CONFIG_IDF_TARGET is not supported
#endif
@ -78,10 +68,9 @@ extern "C" {
// REG_SPI_BASE is not defined for S3/C3 ??
#if CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3
#ifdef REG_SPI_BASE
#undef REG_SPI_BASE
#endif // REG_SPI_BASE
#ifndef REG_SPI_BASE
#define REG_SPI_BASE(i) (DR_REG_SPI1_BASE + (((i)>1) ? (((i)* 0x1000) + 0x20000) : (((~(i)) & 1)* 0x1000 )))
#endif // REG_SPI_BASE
#endif // TARGET
/**
@ -91,90 +80,110 @@ extern "C" {
* uint32_t = test = 10_MHz; // --> 10000000
*/
unsigned long long operator"" _kHz(unsigned long long x) {
unsigned long long operator"" _kHz(unsigned long long x)
{
return x * 1000;
}
unsigned long long operator"" _MHz(unsigned long long x) {
unsigned long long operator"" _MHz(unsigned long long x)
{
return x * 1000 * 1000;
}
unsigned long long operator"" _GHz(unsigned long long x) {
unsigned long long operator"" _GHz(unsigned long long x)
{
return x * 1000 * 1000 * 1000;
}
unsigned long long operator"" _kBit(unsigned long long x) {
unsigned long long operator"" _kBit(unsigned long long x)
{
return x * 1024;
}
unsigned long long operator"" _MBit(unsigned long long x) {
unsigned long long operator"" _MBit(unsigned long long x)
{
return x * 1024 * 1024;
}
unsigned long long operator"" _GBit(unsigned long long x) {
unsigned long long operator"" _GBit(unsigned long long x)
{
return x * 1024 * 1024 * 1024;
}
unsigned long long operator"" _kB(unsigned long long x) {
unsigned long long operator"" _kB(unsigned long long x)
{
return x * 1024;
}
unsigned long long operator"" _MB(unsigned long long x) {
unsigned long long operator"" _MB(unsigned long long x)
{
return x * 1024 * 1024;
}
unsigned long long operator"" _GB(unsigned long long x) {
unsigned long long operator"" _GB(unsigned long long x)
{
return x * 1024 * 1024 * 1024;
}
EspClass ESP;
void EspClass::deepSleep(uint64_t time_us) {
void EspClass::deepSleep(uint32_t time_us)
{
esp_deep_sleep(time_us);
}
void EspClass::restart(void) {
void EspClass::restart(void)
{
esp_restart();
}
uint32_t EspClass::getHeapSize(void) {
uint32_t EspClass::getHeapSize(void)
{
return heap_caps_get_total_size(MALLOC_CAP_INTERNAL);
}
uint32_t EspClass::getFreeHeap(void) {
uint32_t EspClass::getFreeHeap(void)
{
return heap_caps_get_free_size(MALLOC_CAP_INTERNAL);
}
uint32_t EspClass::getMinFreeHeap(void) {
uint32_t EspClass::getMinFreeHeap(void)
{
return heap_caps_get_minimum_free_size(MALLOC_CAP_INTERNAL);
}
uint32_t EspClass::getMaxAllocHeap(void) {
uint32_t EspClass::getMaxAllocHeap(void)
{
return heap_caps_get_largest_free_block(MALLOC_CAP_INTERNAL);
}
uint32_t EspClass::getPsramSize(void) {
uint32_t EspClass::getPsramSize(void)
{
if(psramFound()){
return heap_caps_get_total_size(MALLOC_CAP_SPIRAM);
}
return 0;
}
uint32_t EspClass::getFreePsram(void) {
uint32_t EspClass::getFreePsram(void)
{
if(psramFound()){
return heap_caps_get_free_size(MALLOC_CAP_SPIRAM);
}
return 0;
}
uint32_t EspClass::getMinFreePsram(void) {
uint32_t EspClass::getMinFreePsram(void)
{
if(psramFound()){
return heap_caps_get_minimum_free_size(MALLOC_CAP_SPIRAM);
}
return 0;
}
uint32_t EspClass::getMaxAllocPsram(void) {
uint32_t EspClass::getMaxAllocPsram(void)
{
if(psramFound()){
return heap_caps_get_largest_free_block(MALLOC_CAP_SPIRAM);
}
@ -184,9 +193,7 @@ uint32_t EspClass::getMaxAllocPsram(void) {
static uint32_t sketchSize(sketchSize_t response) {
esp_image_metadata_t data;
const esp_partition_t *running = esp_ota_get_running_partition();
if (!running) {
return 0;
}
if (!running) return 0;
const esp_partition_pos_t running_pos = {
.offset = running->address,
.size = running->size,
@ -204,7 +211,8 @@ uint32_t EspClass::getSketchSize() {
return sketchSize(SKETCH_SIZE_TOTAL);
}
String EspClass::getSketchMD5() {
String EspClass::getSketchMD5()
{
static String result;
if (result.length()) {
return result;
@ -214,35 +222,30 @@ String EspClass::getSketchMD5() {
const esp_partition_t *running = esp_ota_get_running_partition();
if (!running) {
log_e("Partition could not be found");
return String();
}
const size_t bufSize = SPI_FLASH_SEC_SIZE;
uint8_t *pb = (uint8_t *)malloc(bufSize);
if (!pb) {
std::unique_ptr<uint8_t[]> buf(new uint8_t[bufSize]);
uint32_t offset = 0;
if(!buf.get()) {
log_e("Not enough memory to allocate buffer");
return String();
}
uint32_t offset = 0;
MD5Builder md5;
md5.begin();
while( lengthLeft > 0) {
size_t readBytes = (lengthLeft < bufSize) ? lengthLeft : bufSize;
if (!ESP.flashRead(running->address + offset, (uint32_t *)pb, (readBytes + 3) & ~3)) {
free(pb);
if (!ESP.flashRead(running->address + offset, reinterpret_cast<uint32_t*>(buf.get()), (readBytes + 3) & ~3)) {
log_e("Could not read buffer from flash");
return String();
}
md5.add(pb, readBytes);
md5.add(buf.get(), readBytes);
lengthLeft -= readBytes;
offset += readBytes;
#if CONFIG_FREERTOS_UNICORE
delay(1); // Fix solo WDT
#endif
}
free(pb);
md5.calculate();
result = md5.toString();
return result;
@ -257,43 +260,53 @@ uint32_t EspClass::getFreeSketchSpace() {
return _partition->size;
}
uint16_t EspClass::getChipRevision(void) {
uint16_t EspClass::getChipRevision(void)
{
esp_chip_info_t chip_info;
esp_chip_info(&chip_info);
return chip_info.revision;
}
const char *EspClass::getChipModel(void) {
const char * EspClass::getChipModel(void)
{
#if CONFIG_IDF_TARGET_ESP32
uint32_t chip_ver = REG_GET_FIELD(EFUSE_BLK0_RDATA3_REG, EFUSE_RD_CHIP_PACKAGE);
uint32_t pkg_ver = chip_ver & 0x7;
switch (pkg_ver) {
case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDQ6 :
if ((getChipRevision() / 100) == 3) {
if (getChipRevision() == 3)
return "ESP32-D0WDQ6-V3";
} else {
else
return "ESP32-D0WDQ6";
}
case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDQ5 :
if ((getChipRevision() / 100) == 3) {
if (getChipRevision() == 3)
return "ESP32-D0WD-V3";
} else {
else
return "ESP32-D0WD";
}
case EFUSE_RD_CHIP_VER_PKG_ESP32D2WDQ5: return "ESP32-D2WD";
case EFUSE_RD_CHIP_VER_PKG_ESP32U4WDH: return "ESP32-U4WDH";
case EFUSE_RD_CHIP_VER_PKG_ESP32PICOD4: return "ESP32-PICO-D4";
case EFUSE_RD_CHIP_VER_PKG_ESP32PICOV302: return "ESP32-PICO-V3-02";
case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDR2V3: return "ESP32-D0WDR2-V3";
default: return "Unknown";
case EFUSE_RD_CHIP_VER_PKG_ESP32D2WDQ5 :
return "ESP32-D2WD";
case EFUSE_RD_CHIP_VER_PKG_ESP32PICOD2 :
return "ESP32-PICO-D2";
case EFUSE_RD_CHIP_VER_PKG_ESP32PICOD4 :
return "ESP32-PICO-D4";
case EFUSE_RD_CHIP_VER_PKG_ESP32PICOV302 :
return "ESP32-PICO-V3-02";
case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDR2V3 :
return "ESP32-D0WDR2-V3";
default:
return "Unknown";
}
#elif CONFIG_IDF_TARGET_ESP32S2
uint32_t pkg_ver = REG_GET_FIELD(EFUSE_RD_MAC_SPI_SYS_3_REG, EFUSE_PKG_VERSION);
switch (pkg_ver) {
case 0: return "ESP32-S2";
case 1: return "ESP32-S2FH16";
case 2: return "ESP32-S2FH32";
default: return "ESP32-S2 (Unknown)";
case 0:
return "ESP32-S2";
case 1:
return "ESP32-S2FH16";
case 2:
return "ESP32-S2FH32";
default:
return "ESP32-S2 (Unknown)";
}
#else
esp_chip_info_t chip_info;
@ -304,43 +317,43 @@ const char *EspClass::getChipModel(void) {
case CHIP_ESP32C2: return "ESP32-C2";
case CHIP_ESP32C6: return "ESP32-C6";
case CHIP_ESP32H2: return "ESP32-H2";
case CHIP_ESP32P4: return "ESP32-P4";
#if ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(5, 5, 0)
case CHIP_ESP32C5: return "ESP32-C5";
case CHIP_ESP32C61: return "ESP32-C61";
case CHIP_ESP32H21: return "ESP32-H21";
#endif
default: return "UNKNOWN";
}
#endif
}
uint8_t EspClass::getChipCores(void) {
uint8_t EspClass::getChipCores(void)
{
esp_chip_info_t chip_info;
esp_chip_info(&chip_info);
return chip_info.cores;
}
const char *EspClass::getSdkVersion(void) {
const char * EspClass::getSdkVersion(void)
{
return esp_get_idf_version();
}
const char *EspClass::getCoreVersion(void) {
const char * EspClass::getCoreVersion(void)
{
return ESP_ARDUINO_VERSION_STR;
}
uint32_t ESP_getFlashChipId(void) {
uint32_t ESP_getFlashChipId(void)
{
uint32_t id = g_rom_flashchip.device_id;
id = ((id & 0xff) << 16) | ((id >> 16) & 0xff) | (id & 0xff00);
return id;
}
uint32_t EspClass::getFlashChipSize(void) {
uint32_t EspClass::getFlashChipSize(void)
{
uint32_t id = (ESP_getFlashChipId() >> 16) & 0xFF;
return 2 << (id - 1);
}
uint32_t EspClass::getFlashChipSpeed(void) {
uint32_t EspClass::getFlashChipSpeed(void)
{
esp_image_header_t fhdr;
if(esp_flash_read(esp_flash_default_chip, (void*)&fhdr, ESP_FLASH_IMAGE_BASE, sizeof(esp_image_header_t)) && fhdr.magic != ESP_IMAGE_HEADER_MAGIC) {
return 0;
@ -348,13 +361,12 @@ uint32_t EspClass::getFlashChipSpeed(void) {
return magicFlashChipSpeed(fhdr.spi_speed);
}
// FIXME for P4
#if !defined(CONFIG_IDF_TARGET_ESP32P4)
FlashMode_t EspClass::getFlashChipMode(void) {
FlashMode_t EspClass::getFlashChipMode(void)
{
#if CONFIG_IDF_TARGET_ESP32S2
uint32_t spi_ctrl = REG_READ(PERIPHS_SPI_FLASH_CTRL);
#else
#if CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP32C2 || CONFIG_IDF_TARGET_ESP32C6
#if CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP32C6
uint32_t spi_ctrl = REG_READ(DR_REG_SPI0_BASE + 0x8);
#else
uint32_t spi_ctrl = REG_READ(SPI_CTRL_REG(0));
@ -376,9 +388,9 @@ FlashMode_t EspClass::getFlashChipMode(void) {
}
return (FM_DOUT);
}
#endif // if !defined(CONFIG_IDF_TARGET_ESP32P4)
uint32_t EspClass::magicFlashChipSize(uint8_t byte) {
uint32_t EspClass::magicFlashChipSize(uint8_t byte)
{
/*
FLASH_SIZES = {
"1MB": 0x00,
@ -405,7 +417,8 @@ uint32_t EspClass::magicFlashChipSize(uint8_t byte) {
}
}
uint32_t EspClass::magicFlashChipSpeed(uint8_t byte) {
uint32_t EspClass::magicFlashChipSpeed(uint8_t byte)
{
#if CONFIG_IDF_TARGET_ESP32C2
/*
FLASH_FREQUENCY = {
@ -424,6 +437,7 @@ uint32_t EspClass::magicFlashChipSpeed(uint8_t byte) {
return 0;
}
#elif CONFIG_IDF_TARGET_ESP32C6
/*
FLASH_FREQUENCY = {
@ -458,6 +472,7 @@ uint32_t EspClass::magicFlashChipSpeed(uint8_t byte) {
return 0;
}
#else
/*
FLASH_FREQUENCY = {
@ -478,7 +493,9 @@ uint32_t EspClass::magicFlashChipSpeed(uint8_t byte) {
#endif
}
FlashMode_t EspClass::magicFlashChipMode(uint8_t byte) {
FlashMode_t EspClass::magicFlashChipMode(uint8_t byte)
{
FlashMode_t mode = (FlashMode_t) byte;
if(mode > FM_SLOW_READ) {
mode = FM_UNKNOWN;
@ -486,32 +503,39 @@ FlashMode_t EspClass::magicFlashChipMode(uint8_t byte) {
return mode;
}
bool EspClass::flashEraseSector(uint32_t sector) {
bool EspClass::flashEraseSector(uint32_t sector)
{
return esp_flash_erase_region(esp_flash_default_chip, sector * SPI_FLASH_SEC_SIZE, SPI_FLASH_SEC_SIZE) == ESP_OK;
}
// Warning: These functions do not work with encrypted flash
bool EspClass::flashWrite(uint32_t offset, uint32_t *data, size_t size) {
bool EspClass::flashWrite(uint32_t offset, uint32_t *data, size_t size)
{
return esp_flash_write(esp_flash_default_chip, (const void*) data, offset, size) == ESP_OK;
}
bool EspClass::flashRead(uint32_t offset, uint32_t *data, size_t size) {
bool EspClass::flashRead(uint32_t offset, uint32_t *data, size_t size)
{
return esp_flash_read(esp_flash_default_chip, (void*) data, offset, size) == ESP_OK;
}
bool EspClass::partitionEraseRange(const esp_partition_t *partition, uint32_t offset, size_t size) {
bool EspClass::partitionEraseRange(const esp_partition_t *partition, uint32_t offset, size_t size)
{
return esp_partition_erase_range(partition, offset, size) == ESP_OK;
}
bool EspClass::partitionWrite(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size) {
bool EspClass::partitionWrite(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size)
{
return esp_partition_write(partition, offset, data, size) == ESP_OK;
}
bool EspClass::partitionRead(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size) {
bool EspClass::partitionRead(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size)
{
return esp_partition_read(partition, offset, data, size) == ESP_OK;
}
uint64_t EspClass::getEfuseMac(void) {
uint64_t EspClass::getEfuseMac(void)
{
uint64_t _chipmacid = 0LL;
esp_efuse_mac_get_default((uint8_t*) (&_chipmacid));
return _chipmacid;

View file

@ -26,7 +26,7 @@
#include "esp_cpu.h"
/**
* AVR macros for WDT management
* AVR macros for WDT managment
*/
typedef enum {
WDTO_0MS = 0, //!< WDTO_0MS
@ -42,6 +42,7 @@ typedef enum {
WDTO_8S = 8000 //!< WDTO_8S
} WDTO_t;
typedef enum {
FM_QIO = 0x00,
FM_QOUT = 0x01,
@ -57,7 +58,8 @@ typedef enum {
SKETCH_SIZE_FREE = 1
} sketchSize_t;
class EspClass {
class EspClass
{
public:
EspClass() {}
~EspClass() {}
@ -78,15 +80,13 @@ public:
uint16_t getChipRevision();
const char * getChipModel();
uint8_t getChipCores();
uint32_t getCpuFreqMHz() {
return getCpuFrequencyMhz();
}
uint32_t getCpuFreqMHz(){ return getCpuFrequencyMhz(); }
inline uint32_t getCycleCount() __attribute__((always_inline));
const char * getSdkVersion(); //version of ESP-IDF
const char * getCoreVersion();//version of this core
void deepSleep(uint64_t time_us);
void deepSleep(uint32_t time_us);
uint32_t getFlashChipSize();
uint32_t getFlashChipSpeed();
@ -109,9 +109,11 @@ public:
bool partitionRead(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size);
uint64_t getEfuseMac();
};
uint32_t ARDUINO_ISR_ATTR EspClass::getCycleCount() {
uint32_t ARDUINO_ISR_ATTR EspClass::getCycleCount()
{
return (uint32_t)esp_cpu_get_cycle_count();
}

View file

@ -19,8 +19,8 @@
#include "esp_partition.h"
#include "esp_ota_ops.h"
#include "esp_image_format.h"
#include "pins_arduino.h"
#include "esp32-hal.h"
#include "pins_arduino.h"
#include "firmware_msc_fat.h"
#include "spi_flash_mmap.h"
@ -310,8 +310,7 @@ static int32_t msc_write(uint32_t lba, uint32_t offset, uint8_t *buffer, uint32_
return 0;
}
} else if(msc_update_state == MSC_UPDATE_RUNNING){
if (msc_update_entry && msc_update_entry->file_size && msc_update_bytes_written < msc_update_entry->file_size
&& (msc_update_bytes_written + bufsize) >= msc_update_entry->file_size) {
if(msc_update_entry && msc_update_entry->file_size && msc_update_bytes_written < msc_update_entry->file_size && (msc_update_bytes_written + bufsize) >= msc_update_entry->file_size){
bufsize = msc_update_entry->file_size - msc_update_bytes_written;
}
if(msc_update_write(msc_ota_partition, ((lba - msc_update_start_sector) * DISK_SECTOR_SIZE) + offset, buffer, bufsize) == ESP_OK){

View file

@ -11,24 +11,34 @@
typedef void (*voidFuncPtr)(void);
typedef void (*voidFuncPtrArg)(void*);
extern "C" {
extern "C"
{
extern void __attachInterruptFunctionalArg(uint8_t pin, voidFuncPtrArg userFunc, void * arg, int intr_type, bool functional);
}
void ARDUINO_ISR_ATTR interruptFunctional(void *arg) {
void ARDUINO_ISR_ATTR interruptFunctional(void* arg)
{
InterruptArgStructure* localArg = (InterruptArgStructure*)arg;
if (localArg->interruptFunction) {
if (localArg->interruptFunction)
{
localArg->interruptFunction();
}
}
void attachInterrupt(uint8_t pin, std::function<void(void)> intRoutine, int mode) {
void attachInterrupt(uint8_t pin, std::function<void(void)> intRoutine, int mode)
{
// use the local interrupt routine which takes the ArgStructure as argument
__attachInterruptFunctionalArg (pin, (voidFuncPtrArg)interruptFunctional, new InterruptArgStructure{intRoutine}, mode, true);
}
extern "C" {
void cleanupFunctional(void *arg) {
extern "C"
{
void cleanupFunctional(void* arg)
{
delete (InterruptArgStructure*)arg;
}
}

View file

@ -15,8 +15,7 @@ struct InterruptArgStructure {
std::function<void(void)> interruptFunction;
};
// The extra set of parentheses here prevents macros defined
// in io_pin_remap.h from applying to this declaration.
void(attachInterrupt)(uint8_t pin, std::function<void(void)> intRoutine, int mode);
void attachInterrupt(uint8_t pin, std::function<void(void)> intRoutine, int mode);
#endif /* CORE_CORE_FUNCTIONALINTERRUPT_H_ */

View file

@ -1,76 +0,0 @@
/*
Copyright (c) 2015 Hristo Gochkov. All rights reserved.
This file is part of the esp32 core for Arduino environment.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <Arduino.h>
#include <HEXBuilder.h>
static uint8_t hex_char_to_byte(uint8_t c) {
return (c >= 'a' && c <= 'f') ? (c - ((uint8_t)'a' - 0xa))
: (c >= 'A' && c <= 'F') ? (c - ((uint8_t)'A' - 0xA))
: (c >= '0' && c <= '9') ? (c - (uint8_t)'0')
: 0x10; // unknown char is 16
}
size_t HEXBuilder::hex2bytes(unsigned char *out, size_t maxlen, String &in) {
return hex2bytes(out, maxlen, in.c_str());
}
size_t HEXBuilder::hex2bytes(unsigned char *out, size_t maxlen, const char *in) {
size_t len = 0;
for (; *in; in++) {
uint8_t c = hex_char_to_byte(*in);
// Silently skip anything unknown.
if (c > 15) {
continue;
}
if (len & 1) {
if (len / 2 < maxlen) {
out[len / 2] |= c;
}
} else {
if (len / 2 < maxlen) {
out[len / 2] = c << 4;
}
}
len++;
}
return (len + 1) / 2;
}
size_t HEXBuilder::bytes2hex(char *out, size_t maxlen, const unsigned char *in, size_t len) {
for (size_t i = 0; i < len; i++) {
if (i * 2 + 1 < maxlen) {
sprintf(out + (i * 2), "%02x", in[i]);
}
}
return len * 2 + 1;
}
String HEXBuilder::bytes2hex(const unsigned char *in, size_t len) {
size_t maxlen = len * 2 + 1;
char *out = (char *)malloc(maxlen);
if (!out) {
return String();
}
bytes2hex(out, maxlen, in, len);
String ret = String(out);
free(out);
return ret;
}

View file

@ -1,34 +0,0 @@
/*
Copyright (c) 2015 Hristo Gochkov. All rights reserved.
This file is part of the esp32 core for Arduino environment.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef HEXBuilder_h
#define HEXBuilder_h
#include <WString.h>
#include <Stream.h>
class HEXBuilder {
public:
static size_t hex2bytes(unsigned char *out, size_t maxlen, String &in);
static size_t hex2bytes(unsigned char *out, size_t maxlen, const char *in);
static String bytes2hex(const unsigned char *in, size_t len);
static size_t bytes2hex(char *out, size_t maxlen, const unsigned char *in, size_t len);
};
#endif

View file

@ -1,4 +1,4 @@
// Copyright 2015-2024 Espressif Systems (Shanghai) PTE LTD
// Copyright 2015-2020 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -24,7 +24,6 @@
#include "esp_intr_alloc.h"
#include "soc/periph_defs.h"
#include "soc/io_mux_reg.h"
#include "soc/usb_serial_jtag_struct.h"
#pragma GCC diagnostic ignored "-Wvolatile"
#include "hal/usb_serial_jtag_ll.h"
#pragma GCC diagnostic warning "-Wvolatile"
@ -36,31 +35,30 @@ static RingbufHandle_t tx_ring_buf = NULL;
static QueueHandle_t rx_queue = NULL;
static uint8_t rx_data_buf[64] = {0};
static intr_handle_t intr_handle = NULL;
static volatile bool initial_empty = false;
static SemaphoreHandle_t tx_lock = NULL;
static volatile bool connected = false;
// SOF in ISR causes problems for uploading firmware
//static volatile unsigned long lastSOF_ms;
//static volatile uint8_t SOF_TIMEOUT;
// timeout has no effect when USB CDC is unplugged
static uint32_t tx_timeout_ms = 100;
// workaround for when USB CDC is not connected
static uint32_t tx_timeout_ms = 0;
static bool tx_timeout_change_request = false;
static esp_event_loop_handle_t arduino_hw_cdc_event_loop_handle = NULL;
static esp_err_t
arduino_hw_cdc_event_post(esp_event_base_t event_base, int32_t event_id, void *event_data, size_t event_data_size, BaseType_t *task_unblocked) {
static esp_err_t arduino_hw_cdc_event_post(esp_event_base_t event_base, int32_t event_id, void *event_data, size_t event_data_size, BaseType_t *task_unblocked){
if(arduino_hw_cdc_event_loop_handle == NULL){
return ESP_FAIL;
}
return esp_event_isr_post_to(arduino_hw_cdc_event_loop_handle, event_base, event_id, event_data, event_data_size, task_unblocked);
}
static esp_err_t
arduino_hw_cdc_event_handler_register_with(esp_event_base_t event_base, int32_t event_id, esp_event_handler_t event_handler, void *event_handler_arg) {
static esp_err_t arduino_hw_cdc_event_handler_register_with(esp_event_base_t event_base, int32_t event_id, esp_event_handler_t event_handler, void *event_handler_arg){
if (!arduino_hw_cdc_event_loop_handle) {
esp_event_loop_args_t event_task_args = {
.queue_size = 5, .task_name = "arduino_hw_cdc_events", .task_priority = 5, .task_stack_size = 2048, .task_core_id = tskNO_AFFINITY
.queue_size = 5,
.task_name = "arduino_hw_cdc_events",
.task_priority = 5,
.task_stack_size = 2048,
.task_core_id = tskNO_AFFINITY
};
if (esp_event_loop_create(&event_task_args, &arduino_hw_cdc_event_loop_handle) != ESP_OK) {
log_e("esp_event_loop_create failed");
@ -80,29 +78,31 @@ static void hw_cdc_isr_handler(void *arg) {
if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY) {
// Interrupt tells us the host picked up the data we sent.
if (!HWCDC::isPlugged()) {
connected = false;
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
// USB is unplugged, nothing to be done here
return;
} else {
connected = true;
}
if (tx_ring_buf != NULL && usb_serial_jtag_ll_txfifo_writable() == 1) {
if (usb_serial_jtag_ll_txfifo_writable() == 1) {
// We disable the interrupt here so that the interrupt won't be triggered if there is no data to send.
usb_serial_jtag_ll_disable_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
size_t queued_size = 0;
if(!initial_empty){
initial_empty = true;
// First time USB is plugged and the application has not explicitly set TX Timeout, set it to default 100ms.
// Otherwise, USB is still unplugged and the timeout will be kept as Zero in order to avoid any delay in the
// application whenever it uses write() and the TX Queue gets full.
if (!tx_timeout_change_request) {
tx_timeout_ms = 100;
}
//send event?
//ets_printf("CONNECTED\n");
arduino_hw_cdc_event_post(ARDUINO_HW_CDC_EVENTS, ARDUINO_HW_CDC_CONNECTED_EVENT, &event, sizeof(arduino_hw_cdc_event_data_t), &xTaskWoken);
}
size_t queued_size;
uint8_t *queued_buff = (uint8_t *)xRingbufferReceiveUpToFromISR(tx_ring_buf, &queued_size, 64);
// If the hardware fifo is available, write in it. Otherwise, do nothing.
// If the hardware fifo is avaliable, write in it. Otherwise, do nothing.
if (queued_buff != NULL) { //Although tx_queued_bytes may be larger than 0. We may have interrupt before xRingbufferSend() was called.
//Copy the queued buffer into the TX FIFO
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
usb_serial_jtag_ll_write_txfifo(queued_buff, queued_size);
usb_serial_jtag_ll_txfifo_flush();
vRingbufferReturnItemFromISR(tx_ring_buf, queued_buff, &xTaskWoken);
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
//send event?
//ets_printf("TX:%u\n", queued_size);
event.tx.len = queued_size;
@ -114,7 +114,7 @@ static void hw_cdc_isr_handler(void *arg) {
}
if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT) {
// read rx buffer(max length is 64), and send available data to ringbuffer.
// read rx buffer(max length is 64), and send avaliable data to ringbuffer.
// Ensure the rx buffer size is larger than RX_MAX_SIZE.
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT);
uint32_t rx_fifo_len = usb_serial_jtag_ll_read_rxfifo(rx_data_buf, 64);
@ -124,149 +124,45 @@ static void hw_cdc_isr_handler(void *arg) {
break;
}
}
//send event?
//ets_printf("RX:%u/%u\n", i, rx_fifo_len);
event.rx.len = i;
arduino_hw_cdc_event_post(ARDUINO_HW_CDC_EVENTS, ARDUINO_HW_CDC_RX_EVENT, &event, sizeof(arduino_hw_cdc_event_data_t), &xTaskWoken);
connected = true;
}
if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_BUS_RESET) {
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_BUS_RESET);
initial_empty = false;
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
//ets_printf("BUS_RESET\n");
arduino_hw_cdc_event_post(ARDUINO_HW_CDC_EVENTS, ARDUINO_HW_CDC_BUS_RESET_EVENT, &event, sizeof(arduino_hw_cdc_event_data_t), &xTaskWoken);
connected = false;
}
// SOF ISR is causing esptool to be unable to upload firmware to the board
// if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_SOF) {
// usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SOF);
// lastSOF_ms = millis();
// }
if (xTaskWoken == pdTRUE) {
portYIELD_FROM_ISR();
}
}
// Moved to header file as inline function. Kept just as future reference.
//inline bool HWCDC::isPlugged(void) {
// SOF ISR is causing esptool to be unable to upload firmware to the board
// Timer test for SOF seems to work when uploading firmware
// return usb_serial_jtag_is_connected();//(lastSOF_ms + SOF_TIMEOUT) >= millis();
//}
bool HWCDC::isCDC_Connected() {
static bool running = false;
// USB may be unplugged
if (!isPlugged()) {
connected = false;
running = false;
// SOF in ISR causes problems for uploading firmware
//SOF_TIMEOUT = 5; // SOF timeout when unplugged
return false;
}
//else {
// SOF_TIMEOUT = 50; // SOF timeout when plugged
//}
if (connected) {
running = false;
return true;
}
if (running == false && !connected) { // enables it only once!
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
// this will feed CDC TX FIFO to trigger IN_EMPTY
usb_serial_jtag_ll_txfifo_flush();
running = true;
return false;
}
static void flushTXBuffer(const uint8_t *buffer, size_t size) {
if (!tx_ring_buf) {
return;
}
UBaseType_t uxItemsWaiting = 0;
vRingbufferGetInfo(tx_ring_buf, NULL, NULL, NULL, NULL, &uxItemsWaiting);
size_t freeSpace = xRingbufferGetCurFreeSize(tx_ring_buf);
size_t ringbufferLength = freeSpace + uxItemsWaiting;
if (buffer == NULL) {
// just flush the whole ring buffer and exit - used by HWCDC::flush()
size_t queued_size = 0;
uint8_t *queued_buff = (uint8_t *)xRingbufferReceiveUpTo(tx_ring_buf, &queued_size, 0, ringbufferLength);
if (queued_size && queued_buff != NULL) {
vRingbufferReturnItem(tx_ring_buf, (void *)queued_buff);
}
return;
}
if (size == 0) {
return; // nothing to do
}
if (freeSpace >= size) {
// there is enough space, just add the data to the ring buffer
if (xRingbufferSend(tx_ring_buf, (void *)buffer, size, 0) != pdTRUE) {
return;
}
} else {
// how many byte should be flushed to make space for the new data
size_t to_flush = size - freeSpace;
if (to_flush > ringbufferLength) {
to_flush = ringbufferLength;
}
size_t queued_size = 0;
uint8_t *queued_buff = (uint8_t *)xRingbufferReceiveUpTo(tx_ring_buf, &queued_size, 0, to_flush);
if (queued_size && queued_buff != NULL) {
vRingbufferReturnItem(tx_ring_buf, (void *)queued_buff);
}
// now add the new data that fits into the ring buffer
uint8_t *bptr = (uint8_t *)buffer;
if (size >= ringbufferLength) {
size = ringbufferLength;
bptr = (uint8_t *)buffer + (size - ringbufferLength);
}
if (xRingbufferSend(tx_ring_buf, (void *)bptr, size, 0) != pdTRUE) {
return;
}
}
// flushes CDC FIFO
usb_serial_jtag_ll_txfifo_flush();
}
static void ARDUINO_ISR_ATTR cdc0_write_char(char c) {
if (tx_ring_buf == NULL) {
return;
}
if (!HWCDC::isConnected()) {
// just pop/push RingBuffer and apply FIFO policy
flushTXBuffer((const uint8_t *)&c, 1);
return;
}
if(xPortInIsrContext()){
xRingbufferSendFromISR(tx_ring_buf, (void*) (&c), 1, NULL);
} else {
xRingbufferSend(tx_ring_buf, (void*) (&c), 1, tx_timeout_ms / portTICK_PERIOD_MS);
}
usb_serial_jtag_ll_txfifo_flush();
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
HWCDC::HWCDC() {
perimanSetBusDeinit(ESP32_BUS_TYPE_USB_DM, HWCDC::deinit);
perimanSetBusDeinit(ESP32_BUS_TYPE_USB_DP, HWCDC::deinit);
// SOF in ISR causes problems for uploading firmware
// lastSOF_ms = 0;
// SOF_TIMEOUT = 5;
}
HWCDC::~HWCDC(){
end();
}
// It should return <true> just when USB is plugged and CDC is connected.
HWCDC::operator bool() const {
return HWCDC::isCDC_Connected();
HWCDC::operator bool() const
{
return initial_empty;
}
void HWCDC::onEvent(esp_event_handler_t callback){
@ -277,30 +173,30 @@ void HWCDC::onEvent(arduino_hw_cdc_event_t event, esp_event_handler_t callback)
arduino_hw_cdc_event_handler_register_with(ARDUINO_HW_CDC_EVENTS, event, callback, this);
}
bool HWCDC::deinit(void *busptr) {
bool HWCDC::deinit(void * busptr)
{
// avoid any recursion issue with Peripheral Manager perimanSetPinBus() call
static bool running = false;
if (running) {
return true;
}
if (running) return true;
running = true;
// Setting USB D+ D- pins
bool retCode = true;
retCode &= perimanClearPinBus(USB_INT_PHY0_DM_GPIO_NUM);
retCode &= perimanClearPinBus(USB_INT_PHY0_DP_GPIO_NUM);
retCode &= perimanSetPinBus(USB_DM_GPIO_NUM, ESP32_BUS_TYPE_INIT, NULL);
retCode &= perimanSetPinBus(USB_DP_GPIO_NUM, ESP32_BUS_TYPE_INIT, NULL);
if (retCode) {
// Force the host to re-enumerate (BUS_RESET)
pinMode(USB_INT_PHY0_DM_GPIO_NUM, OUTPUT_OPEN_DRAIN);
pinMode(USB_INT_PHY0_DP_GPIO_NUM, OUTPUT_OPEN_DRAIN);
digitalWrite(USB_INT_PHY0_DM_GPIO_NUM, LOW);
digitalWrite(USB_INT_PHY0_DP_GPIO_NUM, LOW);
pinMode(USB_DM_GPIO_NUM, OUTPUT_OPEN_DRAIN);
pinMode(USB_DP_GPIO_NUM, OUTPUT_OPEN_DRAIN);
digitalWrite(USB_DM_GPIO_NUM, LOW);
digitalWrite(USB_DP_GPIO_NUM, LOW);
}
// release the flag
running = false;
return retCode;
}
void HWCDC::begin(unsigned long baud) {
void HWCDC::begin(unsigned long baud)
{
if(tx_lock == NULL) {
tx_lock = xSemaphoreCreateMutex();
}
@ -316,53 +212,29 @@ void HWCDC::begin(unsigned long baud) {
log_e("HW CDC TX Buffer error");
}
}
// the HW Serial pins needs to be first deinited in order to allow `if(Serial)` to work :-(
// But this is also causing terminal to hang, so they are disabled
// deinit(NULL);
// delay(10); // USB Host has to enumerate it again
// Peripheral Manager setting for USB D+ D- pins
uint8_t pin = USB_INT_PHY0_DM_GPIO_NUM;
if (!perimanSetPinBus(pin, ESP32_BUS_TYPE_USB_DM, (void *)this, -1, -1)) {
goto err;
}
pin = USB_INT_PHY0_DP_GPIO_NUM;
if (!perimanSetPinBus(pin, ESP32_BUS_TYPE_USB_DP, (void *)this, -1, -1)) {
goto err;
}
// Configure PHY
// USB_Serial_JTAG use internal PHY
USB_SERIAL_JTAG.conf0.phy_sel = 0;
// Disable software control USB D+ D- pullup pulldown (Device FS: dp_pullup = 1)
USB_SERIAL_JTAG.conf0.pad_pull_override = 0;
// Enable USB D+ pullup
USB_SERIAL_JTAG.conf0.dp_pullup = 1;
// Enable USB pad function
USB_SERIAL_JTAG.conf0.usb_pad_enable = 1;
usb_serial_jtag_ll_disable_intr_mask(USB_SERIAL_JTAG_LL_INTR_MASK);
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_LL_INTR_MASK);
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY | USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT | USB_SERIAL_JTAG_INTR_BUS_RESET);
// SOF ISR is causing esptool to be unable to upload firmware to the board
// usb_serial_jtag_ll_ena_intr_mask(
// USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY | USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT | USB_SERIAL_JTAG_INTR_BUS_RESET | USB_SERIAL_JTAG_INTR_SOF
// );
if(!intr_handle && esp_intr_alloc(ETS_USB_SERIAL_JTAG_INTR_SOURCE, 0, hw_cdc_isr_handler, NULL, &intr_handle) != ESP_OK){
isr_log_e("HW USB CDC failed to init interrupts");
end();
return;
}
return;
err:
log_e("Serial JTAG Pin %u can't be set into Peripheral Manager.", pin);
end();
if (perimanSetBusDeinit(ESP32_BUS_TYPE_USB, HWCDC::deinit)) {
// Setting USB D+ D- pins
perimanSetPinBus(USB_DM_GPIO_NUM, ESP32_BUS_TYPE_USB, (void *) this);
perimanSetPinBus(USB_DP_GPIO_NUM, ESP32_BUS_TYPE_USB, (void *) this);
} else {
log_e("Serial JTAG Pins can't be set into Peripheral Manager.");
}
void HWCDC::end() {
//Disable/clear/free tx/rx interrupt.
usb_serial_jtag_ll_txfifo_flush();
}
void HWCDC::end()
{
//Disable tx/rx interrupt.
usb_serial_jtag_ll_disable_intr_mask(USB_SERIAL_JTAG_LL_INTR_MASK);
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_LL_INTR_MASK);
esp_intr_free(intr_handle);
intr_handle = NULL;
if(tx_lock != NULL) {
@ -376,12 +248,13 @@ void HWCDC::end() {
arduino_hw_cdc_event_loop_handle = NULL;
}
HWCDC::deinit(this);
setDebugOutput(false);
connected = false;
}
void HWCDC::setTxTimeoutMs(uint32_t timeout){
tx_timeout_ms = timeout;
// it registers that the user has explicitly requested to use a value as TX timeout
// used for the workaround with unplugged USB and TX Queue Full that causes a delay on every write()
tx_timeout_change_request = true;
}
/*
@ -403,7 +276,8 @@ size_t HWCDC::setTxBufferSize(size_t tx_queue_len) {
return tx_queue_len;
}
int HWCDC::availableForWrite(void) {
int HWCDC::availableForWrite(void)
{
if(tx_ring_buf == NULL || tx_lock == NULL){
return 0;
}
@ -415,17 +289,15 @@ int HWCDC::availableForWrite(void) {
return a;
}
size_t HWCDC::write(const uint8_t *buffer, size_t size) {
size_t HWCDC::write(const uint8_t *buffer, size_t size)
{
if(buffer == NULL || size == 0 || tx_ring_buf == NULL || tx_lock == NULL){
return 0;
}
if(xSemaphoreTake(tx_lock, tx_timeout_ms / portTICK_PERIOD_MS) != pdPASS){
return 0;
}
if (!isCDC_Connected()) {
// just pop/push RingBuffer and apply FIFO policy
flushTXBuffer(buffer, size);
} else {
size_t max_size = xRingbufferGetMaxItemSize(tx_ring_buf);
size_t space = xRingbufferGetCurFreeSize(tx_ring_buf);
size_t to_send = size, so_far = 0;
@ -433,101 +305,55 @@ size_t HWCDC::write(const uint8_t *buffer, size_t size) {
space = size;
}
// Non-Blocking method, Sending data to ringbuffer, and handle the data in ISR.
if (space > 0 && xRingbufferSend(tx_ring_buf, (void *)(buffer), space, 0) != pdTRUE) {
if(xRingbufferSend(tx_ring_buf, (void*) (buffer), space, 0) != pdTRUE){
size = 0;
} else {
to_send -= space;
so_far += space;
// Now trigger the ISR to read data from the ring buffer.
usb_serial_jtag_ll_txfifo_flush();
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
// tracks CDC transmission progress to avoid hanging if CDC is unplugged while still sending data
size_t last_toSend = to_send;
uint32_t tries = tx_timeout_ms; // waits 1ms per sending data attempt, in case CDC is unplugged
while (connected && to_send) {
space = xRingbufferGetCurFreeSize(tx_ring_buf);
if (space > to_send) {
space = to_send;
while(to_send){
if(max_size > to_send){
max_size = to_send;
}
// Blocking method, Sending data to ringbuffer, and handle the data in ISR.
if (xRingbufferSend(tx_ring_buf, (void *)(buffer + so_far), space, tx_timeout_ms / portTICK_PERIOD_MS) != pdTRUE) {
if(xRingbufferSend(tx_ring_buf, (void*) (buffer+so_far), max_size, tx_timeout_ms / portTICK_PERIOD_MS) != pdTRUE){
size = so_far;
log_w("write failed due to ring buffer full - timeout");
break;
}
so_far += space;
to_send -= space;
so_far += max_size;
to_send -= max_size;
// Now trigger the ISR to read data from the ring buffer.
usb_serial_jtag_ll_txfifo_flush();
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
if (last_toSend == to_send) {
// no progress in sending data... USB CDC is probably unplugged
tries--;
delay(1);
} else {
last_toSend = to_send;
tries = tx_timeout_ms; // reset the timeout
}
if (tries == 0) { // CDC isn't connected anymore...
size = so_far;
log_w("write failed due to waiting USB Host - timeout");
connected = false;
}
}
}
// CDC was disconnected while sending data ==> flush the TX buffer keeping the last data
if (to_send && !usb_serial_jtag_ll_txfifo_writable()) {
connected = false;
flushTXBuffer(buffer + so_far, to_send);
}
}
xSemaphoreGive(tx_lock);
return size;
}
size_t HWCDC::write(uint8_t c) {
size_t HWCDC::write(uint8_t c)
{
return write(&c, 1);
}
void HWCDC::flush(void) {
void HWCDC::flush(void)
{
if(tx_ring_buf == NULL || tx_lock == NULL){
return;
}
if(xSemaphoreTake(tx_lock, tx_timeout_ms / portTICK_PERIOD_MS) != pdPASS){
return;
}
if (!isCDC_Connected()) {
flushTXBuffer(NULL, 0);
} else {
UBaseType_t uxItemsWaiting = 0;
vRingbufferGetInfo(tx_ring_buf, NULL, NULL, NULL, NULL, &uxItemsWaiting);
if(uxItemsWaiting){
// Now trigger the ISR to read data from the ring buffer.
usb_serial_jtag_ll_txfifo_flush();
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
}
uint32_t tries = tx_timeout_ms; // waits 1ms per ISR sending data attempt, in case CDC is unplugged
while (connected && tries && uxItemsWaiting) {
delay(1);
UBaseType_t lastUxItemsWaiting = uxItemsWaiting;
while(uxItemsWaiting){
delay(5);
vRingbufferGetInfo(tx_ring_buf, NULL, NULL, NULL, NULL, &uxItemsWaiting);
if (lastUxItemsWaiting == uxItemsWaiting) {
tries--;
}
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
}
if (tries == 0) { // CDC isn't connected anymore...
connected = false;
flushTXBuffer(NULL, 0); // flushes all TX Buffer
}
}
xSemaphoreGive(tx_lock);
}
@ -551,14 +377,16 @@ size_t HWCDC::setRxBufferSize(size_t rx_queue_len) {
return rx_queue_len;
}
int HWCDC::available(void) {
int HWCDC::available(void)
{
if(rx_queue == NULL){
return -1;
}
return uxQueueMessagesWaiting(rx_queue);
}
int HWCDC::peek(void) {
int HWCDC::peek(void)
{
if(rx_queue == NULL){
return -1;
}
@ -569,7 +397,8 @@ int HWCDC::peek(void) {
return -1;
}
int HWCDC::read(void) {
int HWCDC::read(void)
{
if(rx_queue == NULL){
return -1;
}
@ -580,7 +409,8 @@ int HWCDC::read(void) {
return -1;
}
size_t HWCDC::read(uint8_t *buffer, size_t size) {
size_t HWCDC::read(uint8_t *buffer, size_t size)
{
if(rx_queue == NULL){
return -1;
}
@ -596,17 +426,17 @@ size_t HWCDC::read(uint8_t *buffer, size_t size) {
* DEBUG
*/
void HWCDC::setDebugOutput(bool en) {
void HWCDC::setDebugOutput(bool en)
{
if(en) {
uartSetDebug(NULL);
ets_install_putc2((void (*)(char)) & cdc0_write_char);
ets_install_putc1((void (*)(char)) &cdc0_write_char);
} else {
ets_install_putc2(NULL);
ets_install_putc1(NULL);
}
ets_install_putc1(NULL); // closes UART log output
}
#if ARDUINO_USB_MODE && ARDUINO_USB_CDC_ON_BOOT // Hardware JTAG CDC selected
#if ARDUINO_USB_MODE // Hardware JTAG CDC selected
// USBSerial is always available to be used
HWCDC HWCDCSerial;
#endif

View file

@ -1,4 +1,4 @@
// Copyright 2015-2024 Espressif Systems (Shanghai) PTE LTD
// Copyright 2015-2020 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -21,7 +21,6 @@
#include <inttypes.h>
#include "esp_event.h"
#include "Stream.h"
#include "driver/usb_serial_jtag.h"
ESP_EVENT_DECLARE_BASE(ARDUINO_HW_CDC_EVENTS);
@ -43,10 +42,10 @@ typedef union {
} tx;
} arduino_hw_cdc_event_data_t;
class HWCDC : public Stream {
class HWCDC: public Stream
{
private:
static bool deinit(void * busptr);
static bool isCDC_Connected();
public:
HWCDC();
@ -70,44 +69,40 @@ public:
size_t write(const uint8_t *buffer, size_t size);
void flush(void);
inline static bool isPlugged(void) {
// SOF ISR is causing esptool to be unable to upload firmware to the board
// Using IDF 5.1 helper function because it is based on Timer check instead of ISR
return usb_serial_jtag_is_connected();
}
inline static bool isConnected(void) {
return isCDC_Connected();
}
inline size_t read(char *buffer, size_t size) {
inline size_t read(char * buffer, size_t size)
{
return read((uint8_t*) buffer, size);
}
inline size_t write(const char *buffer, size_t size) {
inline size_t write(const char * buffer, size_t size)
{
return write((uint8_t*) buffer, size);
}
inline size_t write(const char *s) {
inline size_t write(const char * s)
{
return write((uint8_t*) s, strlen(s));
}
inline size_t write(unsigned long n) {
inline size_t write(unsigned long n)
{
return write((uint8_t) n);
}
inline size_t write(long n) {
inline size_t write(long n)
{
return write((uint8_t) n);
}
inline size_t write(unsigned int n) {
inline size_t write(unsigned int n)
{
return write((uint8_t) n);
}
inline size_t write(int n) {
inline size_t write(int n)
{
return write((uint8_t) n);
}
operator bool() const;
void setDebugOutput(bool);
uint32_t baudRate() {
return 115200;
}
uint32_t baudRate(){return 115200;}
};
#if ARDUINO_USB_MODE && ARDUINO_USB_CDC_ON_BOOT // Hardware JTAG CDC selected
#if ARDUINO_USB_MODE // Hardware JTAG CDC selected
#ifndef HWCDC_SERIAL_IS_DEFINED
#define HWCDC_SERIAL_IS_DEFINED 1
#endif

View file

@ -1,43 +0,0 @@
/*
Copyright (c) 2016 Arduino LLC. All right reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#pragma once
#include <inttypes.h>
#include "Stream.h"
#include <functional>
class HardwareI2C : public Stream {
public:
virtual bool begin() = 0;
virtual bool begin(uint8_t address) = 0;
virtual bool end() = 0;
virtual bool setClock(uint32_t freq) = 0;
virtual void beginTransmission(uint8_t address) = 0;
virtual uint8_t endTransmission(bool stopBit) = 0;
virtual uint8_t endTransmission(void) = 0;
virtual size_t requestFrom(uint8_t address, size_t len, bool stopBit) = 0;
virtual size_t requestFrom(uint8_t address, size_t len) = 0;
// Update base class to use std::function
virtual void onReceive(const std::function<void(int)> &) = 0;
virtual void onRequest(const std::function<void()> &) = 0;
};

View file

@ -5,40 +5,36 @@
#include <ctime>
#include "pins_arduino.h"
#include "io_pin_remap.h"
#include "HardwareSerial.h"
#include "soc/soc_caps.h"
#include "driver/uart.h"
#include "freertos/queue.h"
#if (SOC_UART_LP_NUM >= 1)
#define UART_HW_FIFO_LEN(uart_num) ((uart_num < SOC_UART_HP_NUM) ? SOC_UART_FIFO_LEN : SOC_LP_UART_FIFO_LEN)
#else
#define UART_HW_FIFO_LEN(uart_num) SOC_UART_FIFO_LEN
#ifndef ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE
#define ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE 2048
#endif
#ifndef ARDUINO_SERIAL_EVENT_TASK_PRIORITY
#define ARDUINO_SERIAL_EVENT_TASK_PRIORITY (configMAX_PRIORITIES-1)
#endif
#ifndef ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
#define ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE -1
#endif
void serialEvent(void) __attribute__((weak));
void serialEvent(void) {}
#if SOC_UART_NUM > 1
void serialEvent1(void) __attribute__((weak));
void serialEvent1(void) {}
#endif /* SOC_UART_NUM > 1 */
#if SOC_UART_NUM > 2
void serialEvent2(void) __attribute__((weak));
void serialEvent2(void) {}
#endif /* SOC_UART_NUM > 2 */
#if SOC_UART_NUM > 3
void serialEvent3(void) __attribute__((weak));
#endif /* SOC_UART_NUM > 3 */
#if SOC_UART_NUM > 4
void serialEvent4(void) __attribute__((weak));
#endif /* SOC_UART_NUM > 4 */
#if SOC_UART_NUM > 5
void serialEvent5(void) __attribute__((weak));
#endif /* SOC_UART_NUM > 5 */
#if !defined(NO_GLOBAL_INSTANCES) && !defined(NO_GLOBAL_SERIAL)
// There is always Seria0 for UART0
HardwareSerial Serial0(0);
@ -48,83 +44,58 @@ HardwareSerial Serial1(1);
#if SOC_UART_NUM > 2
HardwareSerial Serial2(2);
#endif
#if SOC_UART_NUM > 3
HardwareSerial Serial3(3);
#endif
#if SOC_UART_NUM > 4
HardwareSerial Serial4(4);
#endif
#if (SOC_UART_NUM > 5)
HardwareSerial Serial5(5);
#endif
#if HWCDC_SERIAL_IS_DEFINED == 1 // Hardware JTAG CDC Event
extern void HWCDCSerialEvent (void)__attribute__((weak));
void HWCDCSerialEvent(void) {}
#endif
#if USB_SERIAL_IS_DEFINED == 1 // Native USB CDC Event
// Used by Hardware Serial for USB CDC events
extern void USBSerialEvent (void)__attribute__((weak));
void USBSerialEvent(void) {}
#endif
void serialEventRun(void) {
void serialEventRun(void)
{
#if HWCDC_SERIAL_IS_DEFINED == 1 // Hardware JTAG CDC Event
if (HWCDCSerialEvent && HWCDCSerial.available()) {
HWCDCSerialEvent();
}
if(HWCDCSerial.available()) HWCDCSerialEvent();
#endif
#if USB_SERIAL_IS_DEFINED == 1 // Native USB CDC Event
if (USBSerialEvent && USBSerial.available()) {
USBSerialEvent();
}
if(USBSerial.available()) USBSerialEvent();
#endif
// UART0 is default serialEvent()
if (serialEvent && Serial0.available()) {
serialEvent();
}
if(Serial0.available()) serialEvent();
#if SOC_UART_NUM > 1
if (serialEvent1 && Serial1.available()) {
serialEvent1();
}
if(Serial1.available()) serialEvent1();
#endif
#if SOC_UART_NUM > 2
if (serialEvent2 && Serial2.available()) {
serialEvent2();
}
#endif
#if SOC_UART_NUM > 3
if (serialEvent3 && Serial3.available()) {
serialEvent3();
}
#endif
#if SOC_UART_NUM > 4
if (serialEvent4 && Serial4.available()) {
serialEvent4();
}
#endif
#if SOC_UART_NUM > 5
if (serialEvent5 && Serial5.available()) {
serialEvent5();
}
if(Serial2.available()) serialEvent2();
#endif
}
#endif
#if !CONFIG_DISABLE_HAL_LOCKS
#define HSERIAL_MUTEX_LOCK() \
do { \
} while (xSemaphoreTake(_lock, portMAX_DELAY) != pdPASS)
#define HSERIAL_MUTEX_LOCK() do {} while (xSemaphoreTake(_lock, portMAX_DELAY) != pdPASS)
#define HSERIAL_MUTEX_UNLOCK() xSemaphoreGive(_lock)
#else
#define HSERIAL_MUTEX_LOCK()
#define HSERIAL_MUTEX_UNLOCK()
#endif
HardwareSerial::HardwareSerial(uint8_t uart_nr)
: _uart_nr(uart_nr), _uart(NULL), _rxBufferSize(256), _txBufferSize(0), _onReceiveCB(NULL), _onReceiveErrorCB(NULL), _onReceiveTimeout(false), _rxTimeout(1),
_rxFIFOFull(0), _eventTask(NULL)
HardwareSerial::HardwareSerial(uint8_t uart_nr) :
_uart_nr(uart_nr),
_uart(NULL),
_rxBufferSize(256),
_txBufferSize(0),
_onReceiveCB(NULL),
_onReceiveErrorCB(NULL),
_onReceiveTimeout(false),
_rxTimeout(2),
_rxFIFOFull(0),
_eventTask(NULL)
#if !CONFIG_DISABLE_HAL_LOCKS
,
_lock(NULL)
,_lock(NULL)
#endif
{
#if !CONFIG_DISABLE_HAL_LOCKS
@ -140,8 +111,9 @@ HardwareSerial::HardwareSerial(uint8_t uart_nr)
uart_init_PeriMan();
}
HardwareSerial::~HardwareSerial() {
end(); // explicit Full UART termination
HardwareSerial::~HardwareSerial()
{
end(true); // explicit Full UART termination
#if !CONFIG_DISABLE_HAL_LOCKS
if(_lock != NULL){
vSemaphoreDelete(_lock);
@ -149,25 +121,26 @@ HardwareSerial::~HardwareSerial() {
#endif
}
void HardwareSerial::_createEventTask(void *args) {
void HardwareSerial::_createEventTask(void *args)
{
// Creating UART event Task
xTaskCreateUniversal(
_uartEventTask, "uart_event_task", ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE, this, ARDUINO_SERIAL_EVENT_TASK_PRIORITY, &_eventTask,
ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
);
xTaskCreateUniversal(_uartEventTask, "uart_event_task", ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE, this, ARDUINO_SERIAL_EVENT_TASK_PRIORITY, &_eventTask, ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE);
if (_eventTask == NULL) {
log_e(" -- UART%d Event Task not Created!", _uart_nr);
}
}
void HardwareSerial::_destroyEventTask(void) {
void HardwareSerial::_destroyEventTask(void)
{
if (_eventTask != NULL) {
vTaskDelete(_eventTask);
_eventTask = NULL;
}
}
void HardwareSerial::onReceiveError(OnReceiveErrorCb function) {
void HardwareSerial::onReceiveError(OnReceiveErrorCb function)
{
HSERIAL_MUTEX_LOCK();
// function may be NULL to cancel onReceive() from its respective task
_onReceiveErrorCB = function;
@ -178,7 +151,8 @@ void HardwareSerial::onReceiveError(OnReceiveErrorCb function) {
HSERIAL_MUTEX_UNLOCK();
}
void HardwareSerial::onReceive(OnReceiveCb function, bool onlyOnTimeout) {
void HardwareSerial::onReceive(OnReceiveCb function, bool onlyOnTimeout)
{
HSERIAL_MUTEX_LOCK();
// function may be NULL to cancel onReceive() from its respective task
_onReceiveCB = function;
@ -190,8 +164,7 @@ void HardwareSerial::onReceive(OnReceiveCb function, bool onlyOnTimeout) {
// in case that onReceive() shall work only with RX Timeout, FIFO shall be high
// this is a work around for an IDF issue with events and low FIFO Full value (< 3)
// Not valid for the LP UART
if (_onReceiveTimeout && _uart_nr < SOC_UART_HP_NUM) {
if (_onReceiveTimeout) {
uartSetRxFIFOFull(_uart, 120);
log_w("OnReceive is set to Timeout only, thus FIFO Full is now 120 bytes.");
}
@ -209,34 +182,31 @@ void HardwareSerial::onReceive(OnReceiveCb function, bool onlyOnTimeout) {
// A low value of FIFO Full bytes will consume more CPU time within the ISR
// A high value of FIFO Full bytes will make the application wait longer to have byte available for the Stkech in a streaming scenario
// Both RX FIFO Full and RX Timeout may affect when onReceive() will be called
bool HardwareSerial::setRxFIFOFull(uint8_t fifoBytes) {
bool HardwareSerial::setRxFIFOFull(uint8_t fifoBytes)
{
HSERIAL_MUTEX_LOCK();
// in case that onReceive() shall work only with RX Timeout, FIFO shall be high
// this is a work around for an IDF issue with events and low FIFO Full value (< 3)
// Not valid for the LP UART
if (_onReceiveCB != NULL && _onReceiveTimeout && _uart_nr < SOC_UART_HP_NUM) {
if (_onReceiveCB != NULL && _onReceiveTimeout) {
fifoBytes = 120;
log_w("OnReceive is set to Timeout only, thus FIFO Full is now 120 bytes.");
}
bool retCode = uartSetRxFIFOFull(_uart, fifoBytes); // Set new timeout
if (fifoBytes > 0 && fifoBytes < UART_HW_FIFO_LEN(_uart_nr) - 1) {
_rxFIFOFull = fifoBytes;
}
if (fifoBytes > 0 && fifoBytes < SOC_UART_FIFO_LEN - 1) _rxFIFOFull = fifoBytes;
HSERIAL_MUTEX_UNLOCK();
return retCode;
}
// timeout is calculates in time to receive UART symbols at the UART baudrate.
// timout is calculates in time to receive UART symbols at the UART baudrate.
// the estimation is about 11 bits per symbol (SERIAL_8N1)
bool HardwareSerial::setRxTimeout(uint8_t symbols_timeout) {
bool HardwareSerial::setRxTimeout(uint8_t symbols_timeout)
{
HSERIAL_MUTEX_LOCK();
// Zero disables timeout, thus, onReceive callback will only be called when RX FIFO reaches 120 bytes
// Any non-zero value will activate onReceive callback based on UART baudrate with about 11 bits per symbol
_rxTimeout = symbols_timeout;
if (!symbols_timeout) {
_onReceiveTimeout = false; // only when RX timeout is disabled, we also must disable this flag
}
if (!symbols_timeout) _onReceiveTimeout = false; // only when RX timeout is disabled, we also must disable this flag
bool retCode = uartSetRxTimeout(_uart, _rxTimeout); // Set new timeout
@ -244,7 +214,8 @@ bool HardwareSerial::setRxTimeout(uint8_t symbols_timeout) {
return retCode;
}
void HardwareSerial::eventQueueReset() {
void HardwareSerial::eventQueueReset()
{
QueueHandle_t uartEventQueue = NULL;
if (_uart == NULL) {
return;
@ -255,7 +226,8 @@ void HardwareSerial::eventQueueReset() {
}
}
void HardwareSerial::_uartEventTask(void *args) {
void HardwareSerial::_uartEventTask(void *args)
{
HardwareSerial *uart = (HardwareSerial *)args;
uart_event_t event;
QueueHandle_t uartEventQueue = NULL;
@ -267,9 +239,9 @@ void HardwareSerial::_uartEventTask(void *args) {
hardwareSerial_error_t currentErr = UART_NO_ERROR;
switch(event.type) {
case UART_DATA:
if (uart->_onReceiveCB && uart->available() > 0 && ((uart->_onReceiveTimeout && event.timeout_flag) || !uart->_onReceiveTimeout)) {
if(uart->_onReceiveCB && uart->available() > 0 &&
((uart->_onReceiveTimeout && event.timeout_flag) || !uart->_onReceiveTimeout) )
uart->_onReceiveCB();
}
break;
case UART_FIFO_OVF:
log_w("UART%d FIFO Overflow. Consider adding Hardware Flow Control to your Application.", uart->_uart_nr);
@ -280,23 +252,23 @@ void HardwareSerial::_uartEventTask(void *args) {
currentErr = UART_BUFFER_FULL_ERROR;
break;
case UART_BREAK:
log_v("UART%d RX break.", uart->_uart_nr);
log_w("UART%d RX break.", uart->_uart_nr);
currentErr = UART_BREAK_ERROR;
break;
case UART_PARITY_ERR:
log_v("UART%d parity error.", uart->_uart_nr);
log_w("UART%d parity error.", uart->_uart_nr);
currentErr = UART_PARITY_ERROR;
break;
case UART_FRAME_ERR:
log_v("UART%d frame error.", uart->_uart_nr);
log_w("UART%d frame error.", uart->_uart_nr);
currentErr = UART_FRAME_ERROR;
break;
default: log_v("UART%d unknown event type %d.", uart->_uart_nr, event.type); break;
default:
log_w("UART%d unknown event type %d.", uart->_uart_nr, event.type);
break;
}
if (currentErr != UART_NO_ERROR) {
if (uart->_onReceiveErrorCB) {
uart->_onReceiveErrorCB(currentErr);
}
if(uart->_onReceiveErrorCB) uart->_onReceiveErrorCB(currentErr);
}
}
}
@ -304,7 +276,8 @@ void HardwareSerial::_uartEventTask(void *args) {
vTaskDelete(NULL);
}
void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, int8_t txPin, bool invert, unsigned long timeout_ms, uint8_t rxfifo_full_thrhd) {
void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, int8_t txPin, bool invert, unsigned long timeout_ms, uint8_t rxfifo_full_thrhd)
{
if(_uart_nr >= SOC_UART_NUM) {
log_e("Serial number is invalid, please use a number from 0 to %u", SOC_UART_NUM - 1);
return;
@ -317,15 +290,6 @@ void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, in
}
#endif
// map logical pins to GPIO numbers
rxPin = digitalPinToGPIONumber(rxPin);
txPin = digitalPinToGPIONumber(txPin);
int8_t _rxPin = uart_get_RxPin(_uart_nr);
int8_t _txPin = uart_get_TxPin(_uart_nr);
rxPin = rxPin < 0 ? _rxPin : rxPin;
txPin = txPin < 0 ? _txPin : txPin;
HSERIAL_MUTEX_LOCK();
// First Time or after end() --> set default Pins
if (!uartIsDriverInstalled(_uart)) {
@ -340,7 +304,7 @@ void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, in
txPin = _txPin < 0 ? (int8_t)SOC_TX0 : _txPin;
}
break;
#if SOC_UART_HP_NUM > 1
#if SOC_UART_NUM > 1 // may save some flash bytes...
case UART_NUM_1:
if (rxPin < 0 && txPin < 0) {
// do not change RX1/TX1 if it has already been set before
@ -348,85 +312,28 @@ void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, in
txPin = _txPin < 0 ? (int8_t)TX1 : _txPin;
}
break;
#endif // UART_NUM_1
#if SOC_UART_HP_NUM > 2
#endif
#if SOC_UART_NUM > 2 // may save some flash bytes...
case UART_NUM_2:
if (rxPin < 0 && txPin < 0) {
// do not change RX2/TX2 if it has already been set before
#ifdef RX2
rxPin = _rxPin < 0 ? (int8_t)RX2 : _rxPin;
#endif
#ifdef TX2
txPin = _txPin < 0 ? (int8_t)TX2 : _txPin;
#endif
}
break;
#endif // UART_NUM_2
#if SOC_UART_HP_NUM > 3
case UART_NUM_3:
if (rxPin < 0 && txPin < 0) {
// do not change RX3/TX3 if it has already been set before
#ifdef RX3
rxPin = _rxPin < 0 ? (int8_t)RX3 : _rxPin;
#endif
#ifdef TX3
txPin = _txPin < 0 ? (int8_t)TX3 : _txPin;
#endif
}
break;
#endif // UART_NUM_3
#if SOC_UART_HP_NUM > 4
case UART_NUM_4:
if (rxPin < 0 && txPin < 0) {
// do not change RX4/TX4 if it has already been set before
#ifdef RX4
rxPin = _rxPin < 0 ? (int8_t)RX4 : _rxPin;
#endif
#ifdef TX4
txPin = _txPin < 0 ? (int8_t)TX4 : _txPin;
#endif
}
break;
#endif // UART_NUM_4
#if (SOC_UART_LP_NUM >= 1)
case LP_UART_NUM_0:
if (rxPin < 0 && txPin < 0) {
// do not change RX0_LP/TX0_LP if it has already been set before
#ifdef LP_RX0
rxPin = _rxPin < 0 ? (int8_t)LP_RX0 : _rxPin;
#endif
#ifdef LP_TX0
txPin = _txPin < 0 ? (int8_t)LP_TX0 : _txPin;
#endif
}
break;
#endif // LP_UART_NUM_0
}
}
// if no RX/TX pins are defined, it will not start the UART driver
if (rxPin < 0 && txPin < 0) {
log_e("No RX/TX pins defined. Please set RX/TX pins.");
HSERIAL_MUTEX_UNLOCK();
return;
}
// IDF UART driver keeps Pin setting on restarting. Negative Pin number will keep it unmodified.
// it will detach previous UART attached pins
// indicates that uartbegin() has to initialize a new IDF driver
if (_testUartBegin(_uart_nr, baud ? baud : 9600, config, rxPin, txPin, _rxBufferSize, _txBufferSize, invert, rxfifo_full_thrhd)) {
_destroyEventTask(); // when IDF uart driver must be restarted, _eventTask must finish too
if(_uart) {
// in this case it is a begin() over a previous begin() - maybe to change baud rate
// thus do not disable debug output
end(false); // disables IDF UART driver and UART event Task + sets _uart to NULL
}
// IDF UART driver keeps Pin setting on restarting. Negative Pin number will keep it unmodified.
// it will detach previous UART attached pins
_uart = uartBegin(_uart_nr, baud ? baud : 9600, config, rxPin, txPin, _rxBufferSize, _txBufferSize, invert, rxfifo_full_thrhd);
if (_uart == NULL) {
log_e("UART driver failed to start. Please check the logs.");
HSERIAL_MUTEX_UNLOCK();
return;
}
if (!baud) {
// using baud rate as zero, forces it to try to detect the current baud rate in place
uartStartDetectBaudrate(_uart);
@ -436,14 +343,11 @@ void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, in
yield();
}
end(false); // disables IDF UART driver and UART event Task + sets _uart to NULL
if(detectedBaudRate) {
delay(100); // Give some time...
_uart = uartBegin(_uart_nr, detectedBaudRate, config, rxPin, txPin, _rxBufferSize, _txBufferSize, invert, rxfifo_full_thrhd);
if (_uart == NULL) {
log_e("UART driver failed to start. Please check the logs.");
HSERIAL_MUTEX_UNLOCK();
return;
}
} else {
log_e("Could not detect baudrate. Serial data at the port must be present within the timeout for detection to be possible");
_uart = NULL;
@ -465,8 +369,7 @@ void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, in
if (!_rxFIFOFull) { // it has not being changed before calling begin()
// set a default FIFO Full value for the IDF driver
uint8_t fifoFull = 1;
// if baud rate is higher than 57600 or onReceive() is set, it will set FIFO Full to 120 bytes, except for LP UART
if (_uart_nr < SOC_UART_HP_NUM && (baud > 57600 || (_onReceiveCB != NULL && _onReceiveTimeout))) {
if (baud > 57600 || (_onReceiveCB != NULL && _onReceiveTimeout)) {
fifoFull = 120;
}
uartSetRxFIFOFull(_uart, fifoFull);
@ -476,13 +379,16 @@ void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, in
HSERIAL_MUTEX_UNLOCK();
}
void HardwareSerial::updateBaudRate(unsigned long baud) {
void HardwareSerial::updateBaudRate(unsigned long baud)
{
uartSetBaudRate(_uart, baud);
}
void HardwareSerial::end() {
void HardwareSerial::end(bool fullyTerminate)
{
// default Serial.end() will completely disable HardwareSerial,
// including any tasks or debug message channel (log_x()) - but not for IDF log messages!
if(fullyTerminate) {
_onReceiveCB = NULL;
_onReceiveErrorCB = NULL;
if (uartGetDebug() == _uart_nr) {
@ -490,20 +396,19 @@ void HardwareSerial::end() {
}
_rxFIFOFull = 0;
uartEnd(_uart_nr); // fully detach all pins and delete the UART driver
} else {
// do not invalidate callbacks, detach pins, invalidate DBG output
uart_driver_delete(_uart_nr);
}
_destroyEventTask(); // when IDF uart driver is deleted, _eventTask must finish too
_uart = NULL;
}
void HardwareSerial::setDebugOutput(bool en) {
void HardwareSerial::setDebugOutput(bool en)
{
if(_uart == 0) {
return;
}
#if (SOC_UART_LP_NUM >= 1)
if (_uart_nr >= SOC_UART_HP_NUM) {
log_e("LP UART does not support Debug Output.");
return;
}
#endif
if(en) {
uartSetDebug(_uart);
} else {
@ -513,21 +418,25 @@ void HardwareSerial::setDebugOutput(bool en) {
}
}
int HardwareSerial::available(void) {
int HardwareSerial::available(void)
{
return uartAvailable(_uart);
}
int HardwareSerial::availableForWrite(void) {
int HardwareSerial::availableForWrite(void)
{
return uartAvailableForWrite(_uart);
}
int HardwareSerial::peek(void) {
int HardwareSerial::peek(void)
{
if (available()) {
return uartPeek(_uart);
}
return -1;
}
int HardwareSerial::read(void) {
int HardwareSerial::read(void)
{
uint8_t c = 0;
if (uartReadBytes(_uart, &c, 1, 0) == 1) {
return c;
@ -540,53 +449,57 @@ int HardwareSerial::read(void) {
// terminates if size characters have been read, or no further are pending
// returns the number of characters placed in the buffer
// the buffer is NOT null terminated.
size_t HardwareSerial::read(uint8_t *buffer, size_t size) {
size_t HardwareSerial::read(uint8_t *buffer, size_t size)
{
return uartReadBytes(_uart, buffer, size, 0);
}
// Overrides Stream::readBytes() to be faster using IDF
size_t HardwareSerial::readBytes(uint8_t *buffer, size_t length) {
size_t HardwareSerial::readBytes(uint8_t *buffer, size_t length)
{
return uartReadBytes(_uart, buffer, length, (uint32_t)getTimeout());
}
void HardwareSerial::flush(void) {
void HardwareSerial::flush(void)
{
uartFlush(_uart);
}
void HardwareSerial::flush(bool txOnly) {
void HardwareSerial::flush(bool txOnly)
{
uartFlushTxOnly(_uart, txOnly);
}
size_t HardwareSerial::write(uint8_t c) {
size_t HardwareSerial::write(uint8_t c)
{
uartWrite(_uart, c);
return 1;
}
size_t HardwareSerial::write(const uint8_t *buffer, size_t size) {
size_t HardwareSerial::write(const uint8_t *buffer, size_t size)
{
uartWriteBuf(_uart, buffer, size);
return size;
}
uint32_t HardwareSerial::baudRate() {
uint32_t HardwareSerial::baudRate()
{
return uartGetBaudRate(_uart);
}
HardwareSerial::operator bool() const {
HardwareSerial::operator bool() const
{
return uartIsDriverInstalled(_uart);
}
void HardwareSerial::setRxInvert(bool invert) {
void HardwareSerial::setRxInvert(bool invert)
{
uartSetRxInvert(_uart, invert);
}
// negative Pin value will keep it unmodified
// can be called after or before begin()
bool HardwareSerial::setPins(int8_t rxPin, int8_t txPin, int8_t ctsPin, int8_t rtsPin) {
// map logical pins to GPIO numbers
rxPin = digitalPinToGPIONumber(rxPin);
txPin = digitalPinToGPIONumber(txPin);
ctsPin = digitalPinToGPIONumber(ctsPin);
rtsPin = digitalPinToGPIONumber(rtsPin);
bool HardwareSerial::setPins(int8_t rxPin, int8_t txPin, int8_t ctsPin, int8_t rtsPin)
{
// uartSetPins() checks if pins are valid and, if necessary, detaches the previous ones
return uartSetPins(_uart_nr, rxPin, txPin, ctsPin, rtsPin);
}
@ -595,7 +508,8 @@ bool HardwareSerial::setPins(int8_t rxPin, int8_t txPin, int8_t ctsPin, int8_t r
// must use setAllPins() in order to set RTS/CTS pins
// SerialHwFlowCtrl = UART_HW_FLOWCTRL_DISABLE, UART_HW_FLOWCTRL_RTS,
// UART_HW_FLOWCTRL_CTS, UART_HW_FLOWCTRL_CTS_RTS
bool HardwareSerial::setHwFlowCtrlMode(SerialHwFlowCtrl mode, uint8_t threshold) {
bool HardwareSerial::setHwFlowCtrlMode(SerialHwFlowCtrl mode, uint8_t threshold)
{
return uartSetHwFlowCtrlMode(_uart, mode, threshold);
}
@ -603,62 +517,39 @@ bool HardwareSerial::setHwFlowCtrlMode(SerialHwFlowCtrl mode, uint8_t threshold)
// HwFlowCtrl must be disabled and RTS pin set
// SerialMode = UART_MODE_UART, UART_MODE_RS485_HALF_DUPLEX, UART_MODE_IRDA,
// or testing mode: UART_MODE_RS485_COLLISION_DETECT, UART_MODE_RS485_APP_CTRL
bool HardwareSerial::setMode(SerialMode mode) {
bool HardwareSerial::setMode(SerialMode mode)
{
return uartSetMode(_uart, mode);
}
// Sets the UART Clock Source based on the compatible SoC options
// This method must be called before starting UART using begin(), otherwise it won't have any effect.
// Clock Source Options are:
// UART_CLK_SRC_DEFAULT :: any SoC - it will set whatever IDF defines as the default UART Clock Source
// UART_CLK_SRC_APB :: ESP32, ESP32-S2, ESP32-C3 and ESP32-S3
// UART_CLK_SRC_PLL :: ESP32-C2, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2 and ESP32-P4
// UART_CLK_SRC_XTAL :: ESP32-C2, ESP32-C3, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2, ESP32-S3 and ESP32-P4
// UART_CLK_SRC_RTC :: ESP32-C2, ESP32-C3, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2, ESP32-S3 and ESP32-P4
// UART_CLK_SRC_REF_TICK :: ESP32 and ESP32-S2
// Note: CLK_SRC_PLL Freq depends on the SoC - ESP32-C2 has 40MHz, ESP32-H2 has 48MHz and ESP32-C5, C6, C61 and P4 has 80MHz
// Note: ESP32-C6, C61, ESP32-P4 and ESP32-C5 have LP UART that will use only RTC_FAST or XTAL/2 as Clock Source
bool HardwareSerial::setClockSource(SerialClkSrc clkSrc) {
if (_uart) {
log_e("No Clock Source change was done. This function must be called before beginning UART%d.", _uart_nr);
return false;
}
return uartSetClockSource(_uart_nr, (uart_sclk_t)clkSrc);
}
// minimum total RX Buffer size is the UART FIFO space (128 bytes for most SoC) + 1. IDF imposition.
// LP UART has FIFO of 16 bytes
size_t HardwareSerial::setRxBufferSize(size_t new_size) {
if (_uart) {
log_e("RX Buffer can't be resized when Serial is already running. Set it before calling begin().");
log_e("RX Buffer can't be resized when Serial is already running.\n");
return 0;
}
uint8_t FIFOLen = UART_HW_FIFO_LEN(_uart_nr);
// Valid value is higher than the FIFO length
if (new_size <= FIFOLen) {
new_size = FIFOLen + 1;
log_w("RX Buffer set to minimum value: %d.", new_size);
if (new_size <= SOC_UART_FIFO_LEN) {
log_e("RX Buffer must be higher than %d.\n", SOC_UART_FIFO_LEN); // ESP32, S2, S3 and C3 means higher than 128
return 0;
}
_rxBufferSize = new_size;
return _rxBufferSize;
}
// minimum total TX Buffer size is the UART FIFO space (128 bytes for most SoC) + 1.
// LP UART has FIFO of 16 bytes
size_t HardwareSerial::setTxBufferSize(size_t new_size) {
if (_uart) {
log_e("TX Buffer can't be resized when Serial is already running. Set it before calling begin().");
log_e("TX Buffer can't be resized when Serial is already running.\n");
return 0;
}
uint8_t FIFOLen = UART_HW_FIFO_LEN(_uart_nr);
// Valid values are zero or higher than the FIFO length
if (new_size > 0 && new_size <= FIFOLen) {
new_size = FIFOLen + 1;
log_w("TX Buffer set to minimum value: %d.", new_size);
if (new_size <= SOC_UART_FIFO_LEN) {
log_e("TX Buffer must be higher than %d.\n", SOC_UART_FIFO_LEN); // ESP32, S2, S3 and C3 means higher than 128
return 0;
}
// if new_size is higher than SOC_UART_FIFO_LEN, TX Ringbuffer will be active and it will be used to report back "availableToWrite()"
_txBufferSize = new_size;
return new_size;
return _txBufferSize;
}

View file

@ -96,51 +96,17 @@ typedef enum {
UART_PARITY_ERROR
} hardwareSerial_error_t;
typedef enum {
UART_CLK_SRC_DEFAULT = UART_SCLK_DEFAULT,
#if SOC_UART_SUPPORT_APB_CLK
UART_CLK_SRC_APB = UART_SCLK_APB,
#endif
#if SOC_UART_SUPPORT_PLL_F40M_CLK
UART_CLK_SRC_PLL = UART_SCLK_PLL_F40M,
#elif SOC_UART_SUPPORT_PLL_F80M_CLK
UART_CLK_SRC_PLL = UART_SCLK_PLL_F80M,
#elif CONFIG_IDF_TARGET_ESP32H2
UART_CLK_SRC_PLL = UART_SCLK_PLL_F48M,
#endif
#if SOC_UART_SUPPORT_XTAL_CLK
UART_CLK_SRC_XTAL = UART_SCLK_XTAL,
#endif
#if SOC_UART_SUPPORT_RTC_CLK
UART_CLK_SRC_RTC = UART_SCLK_RTC,
#endif
#if SOC_UART_SUPPORT_REF_TICK
UART_CLK_SRC_REF_TICK = UART_SCLK_REF_TICK,
#endif
} SerialClkSrc;
#ifndef ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE
#ifndef CONFIG_ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE
#define ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE 2048
#else
#define ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE CONFIG_ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE
#endif
#endif
#ifndef ARDUINO_SERIAL_EVENT_TASK_PRIORITY
#ifndef CONFIG_ARDUINO_SERIAL_EVENT_TASK_PRIORITY
#define ARDUINO_SERIAL_EVENT_TASK_PRIORITY (configMAX_PRIORITIES-1)
#else
#define ARDUINO_SERIAL_EVENT_TASK_PRIORITY CONFIG_ARDUINO_SERIAL_EVENT_TASK_PRIORITY
#endif
#endif
#ifndef ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
#ifndef CONFIG_ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
#define ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE -1
#else
#define ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE CONFIG_ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
#endif
#endif
// UART0 pins are defined by default by the bootloader.
@ -152,18 +118,12 @@ typedef enum {
#define SOC_RX0 (gpio_num_t)3
#elif CONFIG_IDF_TARGET_ESP32S2 || CONFIG_IDF_TARGET_ESP32S3
#define SOC_RX0 (gpio_num_t)44
#elif CONFIG_IDF_TARGET_ESP32C2
#define SOC_RX0 (gpio_num_t)19
#elif CONFIG_IDF_TARGET_ESP32C3
#define SOC_RX0 (gpio_num_t)20
#elif CONFIG_IDF_TARGET_ESP32C6
#define SOC_RX0 (gpio_num_t)17
#elif CONFIG_IDF_TARGET_ESP32H2
#define SOC_RX0 (gpio_num_t)23
#elif CONFIG_IDF_TARGET_ESP32P4
#define SOC_RX0 (gpio_num_t)38
#elif CONFIG_IDF_TARGET_ESP32C5
#define SOC_RX0 (gpio_num_t)12
#endif
#endif
@ -172,31 +132,23 @@ typedef enum {
#define SOC_TX0 (gpio_num_t)1
#elif CONFIG_IDF_TARGET_ESP32S2 || CONFIG_IDF_TARGET_ESP32S3
#define SOC_TX0 (gpio_num_t)43
#elif CONFIG_IDF_TARGET_ESP32C2
#define SOC_TX0 (gpio_num_t)20
#elif CONFIG_IDF_TARGET_ESP32C3
#define SOC_TX0 (gpio_num_t)21
#elif CONFIG_IDF_TARGET_ESP32C6
#define SOC_TX0 (gpio_num_t)16
#elif CONFIG_IDF_TARGET_ESP32H2
#define SOC_TX0 (gpio_num_t)24
#elif CONFIG_IDF_TARGET_ESP32P4
#define SOC_TX0 (gpio_num_t)37
#elif CONFIG_IDF_TARGET_ESP32C5
#define SOC_TX0 (gpio_num_t)11
#endif
#endif
// Default pins for UART1 are arbitrary, and defined here for convenience.
#if SOC_UART_HP_NUM > 1
#if SOC_UART_NUM > 1
#ifndef RX1
#if CONFIG_IDF_TARGET_ESP32
#define RX1 (gpio_num_t)26
#elif CONFIG_IDF_TARGET_ESP32S2
#define RX1 (gpio_num_t)4
#elif CONFIG_IDF_TARGET_ESP32C2
#define RX1 (gpio_num_t)10
#elif CONFIG_IDF_TARGET_ESP32C3
#define RX1 (gpio_num_t)18
#elif CONFIG_IDF_TARGET_ESP32S3
@ -205,10 +157,6 @@ typedef enum {
#define RX1 (gpio_num_t)4
#elif CONFIG_IDF_TARGET_ESP32H2
#define RX1 (gpio_num_t)0
#elif CONFIG_IDF_TARGET_ESP32P4
#define RX1 (gpio_num_t)11
#elif CONFIG_IDF_TARGET_ESP32C5
#define RX1 (gpio_num_t)4
#endif
#endif
@ -217,8 +165,6 @@ typedef enum {
#define TX1 (gpio_num_t)27
#elif CONFIG_IDF_TARGET_ESP32S2
#define TX1 (gpio_num_t)5
#elif CONFIG_IDF_TARGET_ESP32C2
#define TX1 (gpio_num_t)18
#elif CONFIG_IDF_TARGET_ESP32C3
#define TX1 (gpio_num_t)19
#elif CONFIG_IDF_TARGET_ESP32S3
@ -227,17 +173,13 @@ typedef enum {
#define TX1 (gpio_num_t)5
#elif CONFIG_IDF_TARGET_ESP32H2
#define TX1 (gpio_num_t)1
#elif CONFIG_IDF_TARGET_ESP32P4
#define TX1 (gpio_num_t)10
#elif CONFIG_IDF_TARGET_ESP32C5
#define TX1 (gpio_num_t)5
#endif
#endif
#endif /* SOC_UART_HP_NUM > 1 */
#endif /* SOC_UART_NUM > 1 */
// Default pins for UART2 are arbitrary, and defined here for convenience.
#if SOC_UART_HP_NUM > 2
#if SOC_UART_NUM > 2
#ifndef RX2
#if CONFIG_IDF_TARGET_ESP32
#define RX2 (gpio_num_t)4
@ -253,22 +195,13 @@ typedef enum {
#define TX2 (gpio_num_t)20
#endif
#endif
#endif /* SOC_UART_HP_NUM > 2 */
#if SOC_UART_LP_NUM >= 1
#ifndef LP_RX0
#define LP_RX0 (gpio_num_t) LP_U0RXD_GPIO_NUM
#endif
#ifndef LP_TX0
#define LP_TX0 (gpio_num_t) LP_U0TXD_GPIO_NUM
#endif
#endif /* SOC_UART_LP_NUM >= 1 */
#endif /* SOC_UART_NUM > 2 */
typedef std::function<void(void)> OnReceiveCb;
typedef std::function<void(hardwareSerial_error_t)> OnReceiveErrorCb;
class HardwareSerial : public Stream {
class HardwareSerial: public Stream
{
public:
HardwareSerial(uint8_t uart_nr);
~HardwareSerial();
@ -289,7 +222,7 @@ public:
// onReceive will setup a callback that will be called whenever an UART interruption occurs (UART_INTR_RXFIFO_FULL or UART_INTR_RXFIFO_TOUT)
// UART_INTR_RXFIFO_FULL interrupt triggers at UART_FULL_THRESH_DEFAULT bytes received (defined as 120 bytes by default in IDF)
// UART_INTR_RXFIFO_TOUT interrupt triggers at UART_TOUT_THRESH_DEFAULT symbols passed without any reception (defined as 10 symbols by default in IDF)
// UART_INTR_RXFIFO_TOUT interrupt triggers at UART_TOUT_THRESH_DEFAULT symbols passed without any reception (defined as 10 symbos by default in IDF)
// onlyOnTimeout parameter will define how onReceive will behave:
// Default: true -- The callback will only be called when RX Timeout happens.
// Whole stream of bytes will be ready for being read on the callback function at once.
@ -302,7 +235,7 @@ public:
// onReceive will be called on error events (see hardwareSerial_error_t)
void onReceiveError(OnReceiveErrorCb function);
// eventQueueReset clears all events in the queue (the events that trigger onReceive and onReceiveError) - maybe useful in some use cases
// eventQueueReset clears all events in the queue (the events that trigger onReceive and onReceiveError) - maybe usefull in some use cases
void eventQueueReset();
// When pins are changed, it will detach the previous ones
@ -310,45 +243,50 @@ public:
// timeout_ms is used in baudrate detection (ESP32, ESP32S2 only)
// invert will invert RX/TX polarity
// rxfifo_full_thrhd if the UART Flow Control Threshold in the UART FIFO (max 127)
void begin(
unsigned long baud, uint32_t config = SERIAL_8N1, int8_t rxPin = -1, int8_t txPin = -1, bool invert = false, unsigned long timeout_ms = 20000UL,
uint8_t rxfifo_full_thrhd = 120
);
void end(void);
void begin(unsigned long baud, uint32_t config=SERIAL_8N1, int8_t rxPin=-1, int8_t txPin=-1, bool invert=false, unsigned long timeout_ms = 20000UL, uint8_t rxfifo_full_thrhd = 112);
void end(bool fullyTerminate = true);
void updateBaudRate(unsigned long baud);
int available(void);
int availableForWrite(void);
int peek(void);
int read(void);
size_t read(uint8_t *buffer, size_t size);
inline size_t read(char *buffer, size_t size) {
inline size_t read(char * buffer, size_t size)
{
return read((uint8_t*) buffer, size);
}
// Overrides Stream::readBytes() to be faster using IDF
size_t readBytes(uint8_t *buffer, size_t length);
size_t readBytes(char *buffer, size_t length) {
size_t readBytes(char *buffer, size_t length)
{
return readBytes((uint8_t *) buffer, length);
}
void flush(void);
void flush( bool txOnly);
size_t write(uint8_t);
size_t write(const uint8_t *buffer, size_t size);
inline size_t write(const char *buffer, size_t size) {
inline size_t write(const char * buffer, size_t size)
{
return write((uint8_t*) buffer, size);
}
inline size_t write(const char *s) {
inline size_t write(const char * s)
{
return write((uint8_t*) s, strlen(s));
}
inline size_t write(unsigned long n) {
inline size_t write(unsigned long n)
{
return write((uint8_t) n);
}
inline size_t write(long n) {
inline size_t write(long n)
{
return write((uint8_t) n);
}
inline size_t write(unsigned int n) {
inline size_t write(unsigned int n)
{
return write((uint8_t) n);
}
inline size_t write(int n) {
inline size_t write(int n)
{
return write((uint8_t) n);
}
uint32_t baudRate();
@ -375,17 +313,6 @@ public:
// UART_MODE_RS485_COLLISION_DETECT = 0x03 mode: RS485 collision detection UART mode (used for test purposes)
// UART_MODE_RS485_APP_CTRL = 0x04 mode: application control RS485 UART mode (used for test purposes)
bool setMode(SerialMode mode);
// Used to set the UART clock source mode. It must be set before calling begin(), otherwise it won't have any effect.
// Not all clock source are available to every SoC. The compatible option are listed here:
// UART_CLK_SRC_DEFAULT :: any SoC - it will set whatever IDF defines as the default UART Clock Source
// UART_CLK_SRC_APB :: ESP32, ESP32-S2, ESP32-C3 and ESP32-S3
// UART_CLK_SRC_PLL :: ESP32-C2, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2 and ESP32-P4
// UART_CLK_SRC_XTAL :: ESP32-C2, ESP32-C3, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2, ESP32-S3 and ESP32-P4
// UART_CLK_SRC_RTC :: ESP32-C2, ESP32-C3, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2, ESP32-S3 and ESP32-P4
// UART_CLK_SRC_REF_TICK :: ESP32 and ESP32-S2
// Note: CLK_SRC_PLL Freq depends on the SoC - ESP32-C2 has 40MHz, ESP32-H2 has 48MHz and ESP32-C5, C6, C61 and P4 has 80MHz
// Note: ESP32-C6, C61, ESP32-P4 and ESP32-C5 have LP UART that will use only RTC_FAST or XTAL/2 as Clock Source
bool setClockSource(SerialClkSrc clkSrc);
size_t setRxBufferSize(size_t new_size);
size_t setTxBufferSize(size_t new_size);
@ -435,15 +362,6 @@ extern HardwareSerial Serial1;
#if SOC_UART_NUM > 2
extern HardwareSerial Serial2;
#endif
#if SOC_UART_NUM > 3
extern HardwareSerial Serial3;
#endif
#if SOC_UART_NUM > 4
extern HardwareSerial Serial4;
#endif
#if SOC_UART_NUM > 5
extern HardwareSerial Serial5;
#endif
#endif //!defined(NO_GLOBAL_INSTANCES) && !defined(NO_GLOBAL_SERIAL)
#endif // HardwareSerial_h

View file

@ -1,48 +0,0 @@
// Copyright 2024 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef HashBuilder_h
#define HashBuilder_h
#include <WString.h>
#include <Stream.h>
#include "HEXBuilder.h"
class HashBuilder : public HEXBuilder {
public:
virtual ~HashBuilder() {}
virtual void begin() = 0;
virtual void add(const uint8_t *data, size_t len) = 0;
virtual void add(const char *data) {
add((const uint8_t *)data, strlen(data));
}
virtual void add(String data) {
add(data.c_str());
}
virtual void addHexString(const char *data) = 0;
virtual void addHexString(String data) {
addHexString(data.c_str());
}
virtual bool addStream(Stream &stream, const size_t maxLen) = 0;
virtual void calculate() = 0;
virtual void getBytes(uint8_t *output) = 0;
virtual void getChars(char *output) = 0;
virtual String toString() = 0;
};
#endif

View file

@ -17,135 +17,97 @@
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "IPAddress.h"
#include "Print.h"
#include "lwip/netif.h"
#include "StreamString.h"
#include <Arduino.h>
#include <IPAddress.h>
#include <Print.h>
#ifndef CONFIG_LWIP_IPV6
#define IP6_NO_ZONE 0
#endif
IPAddress::IPAddress() : IPAddress(IPv4) {}
IPAddress::IPAddress(IPType ip_type) {
_type = ip_type;
_zone = IP6_NO_ZONE;
memset(_address.bytes, 0, sizeof(_address.bytes));
IPAddress::IPAddress()
{
_address.dword = 0;
}
IPAddress::IPAddress(uint8_t first_octet, uint8_t second_octet, uint8_t third_octet, uint8_t fourth_octet) {
_type = IPv4;
_zone = IP6_NO_ZONE;
memset(_address.bytes, 0, sizeof(_address.bytes));
_address.bytes[IPADDRESS_V4_BYTES_INDEX] = first_octet;
_address.bytes[IPADDRESS_V4_BYTES_INDEX + 1] = second_octet;
_address.bytes[IPADDRESS_V4_BYTES_INDEX + 2] = third_octet;
_address.bytes[IPADDRESS_V4_BYTES_INDEX + 3] = fourth_octet;
IPAddress::IPAddress(uint8_t first_octet, uint8_t second_octet, uint8_t third_octet, uint8_t fourth_octet)
{
_address.bytes[0] = first_octet;
_address.bytes[1] = second_octet;
_address.bytes[2] = third_octet;
_address.bytes[3] = fourth_octet;
}
IPAddress::IPAddress(
uint8_t o1, uint8_t o2, uint8_t o3, uint8_t o4, uint8_t o5, uint8_t o6, uint8_t o7, uint8_t o8, uint8_t o9, uint8_t o10, uint8_t o11, uint8_t o12,
uint8_t o13, uint8_t o14, uint8_t o15, uint8_t o16, uint8_t z
) {
_type = IPv6;
_address.bytes[0] = o1;
_address.bytes[1] = o2;
_address.bytes[2] = o3;
_address.bytes[3] = o4;
_address.bytes[4] = o5;
_address.bytes[5] = o6;
_address.bytes[6] = o7;
_address.bytes[7] = o8;
_address.bytes[8] = o9;
_address.bytes[9] = o10;
_address.bytes[10] = o11;
_address.bytes[11] = o12;
_address.bytes[12] = o13;
_address.bytes[13] = o14;
_address.bytes[14] = o15;
_address.bytes[15] = o16;
_zone = z;
IPAddress::IPAddress(uint32_t address)
{
_address.dword = address;
}
IPAddress::IPAddress(uint32_t address) {
// IPv4 only
_type = IPv4;
_zone = IP6_NO_ZONE;
memset(_address.bytes, 0, sizeof(_address.bytes));
_address.dword[IPADDRESS_V4_DWORD_INDEX] = address;
// NOTE on conversion/comparison and uint32_t:
// These conversions are host platform dependent.
// There is a defined integer representation of IPv4 addresses,
// based on network byte order (will be the value on big endian systems),
// e.g. http://2398766798 is the same as http://142.250.70.206,
// However on little endian systems the octets 0x83, 0xFA, 0x46, 0xCE,
// in that order, will form the integer (uint32_t) 3460758158 .
}
IPAddress::IPAddress(const uint8_t *address) : IPAddress(IPv4, address) {}
IPAddress::IPAddress(IPType ip_type, const uint8_t *address, uint8_t z) {
_type = ip_type;
if (ip_type == IPv4) {
memset(_address.bytes, 0, sizeof(_address.bytes));
memcpy(&_address.bytes[IPADDRESS_V4_BYTES_INDEX], address, sizeof(uint32_t));
_zone = 0;
} else {
IPAddress::IPAddress(const uint8_t *address)
{
memcpy(_address.bytes, address, sizeof(_address.bytes));
_zone = z;
}
}
IPAddress::IPAddress(const char *address) {
fromString(address);
IPAddress& IPAddress::operator=(const uint8_t *address)
{
memcpy(_address.bytes, address, sizeof(_address.bytes));
return *this;
}
IPAddress::IPAddress(const IPAddress &address) {
*this = address;
IPAddress& IPAddress::operator=(uint32_t address)
{
_address.dword = address;
return *this;
}
String IPAddress::toString(bool includeZone) const {
StreamString s;
printTo(s, includeZone);
return String(s);
bool IPAddress::operator==(const uint8_t* addr) const
{
return memcmp(addr, _address.bytes, sizeof(_address.bytes)) == 0;
}
bool IPAddress::fromString(const char *address) {
if (!fromString4(address)) {
return fromString6(address);
size_t IPAddress::printTo(Print& p) const
{
size_t n = 0;
for(int i = 0; i < 3; i++) {
n += p.print(_address.bytes[i], DEC);
n += p.print('.');
}
return true;
n += p.print(_address.bytes[3], DEC);
return n;
}
bool IPAddress::fromString4(const char *address) {
String IPAddress::toString() const
{
char szRet[16];
sprintf(szRet,"%u.%u.%u.%u", _address.bytes[0], _address.bytes[1], _address.bytes[2], _address.bytes[3]);
return String(szRet);
}
bool IPAddress::fromString(const char *address)
{
// TODO: add support for "a", "a.b", "a.b.c" formats
int16_t acc = -1; // Accumulator
uint16_t acc = 0; // Accumulator
uint8_t dots = 0;
memset(_address.bytes, 0, sizeof(_address.bytes));
while (*address) {
while (*address)
{
char c = *address++;
if (c >= '0' && c <= '9') {
acc = (acc < 0) ? (c - '0') : acc * 10 + (c - '0');
if (c >= '0' && c <= '9')
{
acc = acc * 10 + (c - '0');
if (acc > 255) {
// Value out of [0..255] range
return false;
}
} else if (c == '.') {
}
else if (c == '.')
{
if (dots == 3) {
// Too many dots (there must be 3 dots)
// Too much dots (there must be 3 dots)
return false;
}
if (acc < 0) {
/* No value between dots, e.g. '1..' */
return false;
_address.bytes[dots++] = acc;
acc = 0;
}
_address.bytes[IPADDRESS_V4_BYTES_INDEX + dots++] = acc;
acc = -1;
} else {
else
{
// Invalid char
return false;
}
@ -155,299 +117,9 @@ bool IPAddress::fromString4(const char *address) {
// Too few dots (there must be 3 dots)
return false;
}
if (acc < 0) {
/* No value between dots, e.g. '1..' */
return false;
}
_address.bytes[IPADDRESS_V4_BYTES_INDEX + 3] = acc;
_type = IPv4;
_address.bytes[3] = acc;
return true;
}
bool IPAddress::fromString6(const char *address) {
uint32_t acc = 0; // Accumulator
int colons = 0, double_colons = -1;
while (*address) {
char c = tolower(*address++);
if (isalnum(c) && c <= 'f') {
if (c >= 'a') {
c -= 'a' - '0' - 10;
}
acc = acc * 16 + (c - '0');
if (acc > 0xffff) {
// Value out of range
return false;
}
} else if (c == ':') {
if (*address == ':') {
if (double_colons >= 0) {
// :: allowed once
return false;
}
if (*address != '\0' && *(address + 1) == ':') {
// ::: not allowed
return false;
}
// remember location
double_colons = colons + !!acc;
address++;
} else if (*address == '\0') {
// can't end with a single colon
return false;
}
if (colons == 7) {
// too many separators
return false;
}
_address.bytes[colons * 2] = acc >> 8;
_address.bytes[colons * 2 + 1] = acc & 0xff;
colons++;
acc = 0;
} else if (c == '%') {
// netif_index_to_name crashes on latest esp-idf
// _zone = netif_name_to_index(address);
// in the interim, we parse the suffix as a zone number
while ((*address != '\0') && (!isdigit(*address))) { // skip all non-digit after '%'
address++;
}
_zone = atol(address) + 1; // increase by one by convention, so we can have zone '0'
while (*address != '\0') {
address++;
}
} else {
// Invalid char
return false;
}
}
if (double_colons == -1 && colons != 7) {
// Too few separators
return false;
}
if (double_colons > -1 && colons > 6) {
// Too many segments (double colon must be at least one zero field)
return false;
}
_address.bytes[colons * 2] = acc >> 8;
_address.bytes[colons * 2 + 1] = acc & 0xff;
colons++;
if (double_colons != -1) {
for (int i = colons * 2 - double_colons * 2 - 1; i >= 0; i--) {
_address.bytes[16 - colons * 2 + double_colons * 2 + i] = _address.bytes[double_colons * 2 + i];
}
for (int i = double_colons * 2; i < 16 - colons * 2 + double_colons * 2; i++) {
_address.bytes[i] = 0;
}
}
_type = IPv6;
return true;
}
IPAddress &IPAddress::operator=(const uint8_t *address) {
// IPv4 only conversion from byte pointer
_type = IPv4;
memset(_address.bytes, 0, sizeof(_address.bytes));
memcpy(&_address.bytes[IPADDRESS_V4_BYTES_INDEX], address, sizeof(uint32_t));
return *this;
}
IPAddress &IPAddress::operator=(const char *address) {
fromString(address);
return *this;
}
IPAddress &IPAddress::operator=(uint32_t address) {
// IPv4 conversion
// See note on conversion/comparison and uint32_t
_type = IPv4;
memset(_address.bytes, 0, sizeof(_address.bytes));
_address.dword[IPADDRESS_V4_DWORD_INDEX] = address;
return *this;
}
IPAddress &IPAddress::operator=(const IPAddress &address) {
_type = address.type();
_zone = address.zone();
memcpy(_address.bytes, address._address.bytes, sizeof(_address.bytes));
return *this;
}
bool IPAddress::operator==(const IPAddress &addr) const {
return (addr._type == _type) && (_type == IPType::IPv4 ? addr._address.dword[IPADDRESS_V4_DWORD_INDEX] == _address.dword[IPADDRESS_V4_DWORD_INDEX] : memcmp(addr._address.bytes, _address.bytes, sizeof(_address.bytes)) == 0);
}
bool IPAddress::operator==(const uint8_t *addr) const {
// IPv4 only comparison to byte pointer
// Can't support IPv6 as we know our type, but not the length of the pointer
return _type == IPv4 && memcmp(addr, &_address.bytes[IPADDRESS_V4_BYTES_INDEX], sizeof(uint32_t)) == 0;
}
uint8_t IPAddress::operator[](int index) const {
if (_type == IPv4) {
return _address.bytes[IPADDRESS_V4_BYTES_INDEX + index];
}
return _address.bytes[index];
}
uint8_t &IPAddress::operator[](int index) {
if (_type == IPv4) {
return _address.bytes[IPADDRESS_V4_BYTES_INDEX + index];
}
return _address.bytes[index];
}
size_t IPAddress::printTo(Print &p) const {
return printTo(p, false);
}
size_t IPAddress::printTo(Print &p, bool includeZone) const {
size_t n = 0;
if (_type == IPv6) {
// IPv6 IETF canonical format: compress left-most longest run of two or more zero fields, lower case
int8_t longest_start = -1;
int8_t longest_length = 1;
int8_t current_start = -1;
int8_t current_length = 0;
for (int8_t f = 0; f < 8; f++) {
if (_address.bytes[f * 2] == 0 && _address.bytes[f * 2 + 1] == 0) {
if (current_start == -1) {
current_start = f;
current_length = 1;
} else {
current_length++;
}
if (current_length > longest_length) {
longest_start = current_start;
longest_length = current_length;
}
} else {
current_start = -1;
}
}
for (int f = 0; f < 8; f++) {
if (f < longest_start || f >= longest_start + longest_length) {
uint8_t c1 = _address.bytes[f * 2] >> 4;
uint8_t c2 = _address.bytes[f * 2] & 0xf;
uint8_t c3 = _address.bytes[f * 2 + 1] >> 4;
uint8_t c4 = _address.bytes[f * 2 + 1] & 0xf;
if (c1 > 0) {
n += p.print((char)(c1 < 10 ? '0' + c1 : 'a' + c1 - 10));
}
if (c1 > 0 || c2 > 0) {
n += p.print((char)(c2 < 10 ? '0' + c2 : 'a' + c2 - 10));
}
if (c1 > 0 || c2 > 0 || c3 > 0) {
n += p.print((char)(c3 < 10 ? '0' + c3 : 'a' + c3 - 10));
}
n += p.print((char)(c4 < 10 ? '0' + c4 : 'a' + c4 - 10));
if (f < 7) {
n += p.print(':');
}
} else if (f == longest_start) {
if (longest_start == 0) {
n += p.print(':');
}
n += p.print(':');
}
}
// add a zone if zone-id is non-zero (causes exception on recent IDF builds)
// if (_zone > 0 && includeZone) {
// n += p.print('%');
// char if_name[NETIF_NAMESIZE];
// netif_index_to_name(_zone, if_name);
// n += p.print(if_name);
// }
// In the interim, we just output the index number
if (_zone > 0 && includeZone) {
n += p.print('%');
// look for the interface name
for (netif *intf = netif_list; intf != nullptr; intf = intf->next) {
if (_zone - 1 == intf->num) {
n += p.print(intf->name[0]);
n += p.print(intf->name[1]);
break;
}
}
n += p.print(_zone - 1);
}
return n;
}
// IPv4
for (int i = 0; i < 3; i++) {
n += p.print(_address.bytes[IPADDRESS_V4_BYTES_INDEX + i], DEC);
n += p.print('.');
}
n += p.print(_address.bytes[IPADDRESS_V4_BYTES_INDEX + 3], DEC);
return n;
}
IPAddress::IPAddress(const ip_addr_t *addr) {
from_ip_addr_t(addr);
}
void IPAddress::to_ip_addr_t(ip_addr_t *addr) const {
#if CONFIG_LWIP_IPV6
if (_type == IPv6) {
addr->type = IPADDR_TYPE_V6;
addr->u_addr.ip6.addr[0] = _address.dword[0];
addr->u_addr.ip6.addr[1] = _address.dword[1];
addr->u_addr.ip6.addr[2] = _address.dword[2];
addr->u_addr.ip6.addr[3] = _address.dword[3];
#if LWIP_IPV6_SCOPES
addr->u_addr.ip6.zone = _zone;
#endif /* LWIP_IPV6_SCOPES */
} else {
addr->type = IPADDR_TYPE_V4;
addr->u_addr.ip4.addr = _address.dword[IPADDRESS_V4_DWORD_INDEX];
}
#else
addr->addr = _address.dword[IPADDRESS_V4_DWORD_INDEX];
#endif
}
IPAddress &IPAddress::from_ip_addr_t(const ip_addr_t *addr) {
#if CONFIG_LWIP_IPV6
if (addr->type == IPADDR_TYPE_V6) {
_type = IPv6;
_address.dword[0] = addr->u_addr.ip6.addr[0];
_address.dword[1] = addr->u_addr.ip6.addr[1];
_address.dword[2] = addr->u_addr.ip6.addr[2];
_address.dword[3] = addr->u_addr.ip6.addr[3];
#if LWIP_IPV6_SCOPES
_zone = addr->u_addr.ip6.zone;
#endif /* LWIP_IPV6_SCOPES */
} else {
#endif
_type = IPv4;
memset(_address.bytes, 0, sizeof(_address.bytes));
#if CONFIG_LWIP_IPV6
_address.dword[IPADDRESS_V4_DWORD_INDEX] = addr->u_addr.ip4.addr;
#else
_address.dword[IPADDRESS_V4_DWORD_INDEX] = addr->addr;
#endif
#if CONFIG_LWIP_IPV6
}
#endif
return *this;
}
#if CONFIG_LWIP_IPV6
esp_ip6_addr_type_t IPAddress::addr_type() const {
if (_type != IPv6) {
return ESP_IP6_ADDR_IS_UNKNOWN;
}
ip_addr_t addr;
to_ip_addr_t(&addr);
return esp_netif_ip6_get_addr_type((esp_ip6_addr_t *)(&(addr.u_addr.ip6)));
}
#endif
#if CONFIG_LWIP_IPV6
const IPAddress IN6ADDR_ANY(IPv6);
#endif
const IPAddress INADDR_NONE(0, 0, 0, 0);
// declared one time - as external in IPAddress.h
IPAddress INADDR_NONE(0, 0, 0, 0);

View file

@ -17,124 +17,80 @@
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#pragma once
#ifndef IPAddress_h
#define IPAddress_h
#include <stdint.h>
#include "Printable.h"
#include "WString.h"
#include "lwip/ip_addr.h"
#include "esp_netif_ip_addr.h"
#include "sdkconfig.h"
#define IPADDRESS_V4_BYTES_INDEX 12
#define IPADDRESS_V4_DWORD_INDEX 3
#include <WString.h>
#include <Printable.h>
// A class to make it easier to handle and pass around IP addresses
enum IPType {
IPv4,
IPv6
};
class IPAddress : public Printable {
class IPAddress: public Printable
{
private:
union {
uint8_t bytes[16];
uint32_t dword[4];
uint8_t bytes[4]; // IPv4 address
uint32_t dword;
} _address;
IPType _type;
uint8_t _zone;
// Access the raw byte array containing the address. Because this returns a pointer
// to the internal structure rather than a copy of the address this function should only
// be used when you know that the usage of the returned uint8_t* will be transient and not
// stored.
uint8_t *raw_address() {
return _type == IPv4 ? &_address.bytes[IPADDRESS_V4_BYTES_INDEX] : _address.bytes;
uint8_t* raw_address()
{
return _address.bytes;
}
public:
// Constructors
// Default IPv4
IPAddress();
IPAddress(IPType ip_type);
IPAddress(uint8_t first_octet, uint8_t second_octet, uint8_t third_octet, uint8_t fourth_octet);
IPAddress(
uint8_t o1, uint8_t o2, uint8_t o3, uint8_t o4, uint8_t o5, uint8_t o6, uint8_t o7, uint8_t o8, uint8_t o9, uint8_t o10, uint8_t o11, uint8_t o12,
uint8_t o13, uint8_t o14, uint8_t o15, uint8_t o16, uint8_t z = 0
);
// IPv4; see implementation note
IPAddress(uint32_t address);
// Default IPv4
IPAddress(const uint8_t *address);
IPAddress(IPType ip_type, const uint8_t *address, uint8_t z = 0);
// If IPv4 fails tries IPv6 see fromString function
IPAddress(const char *address);
IPAddress(const IPAddress &address);
virtual ~IPAddress() {}
bool fromString(const char *address);
bool fromString(const String &address) {
return fromString(address.c_str());
bool fromString(const String &address) { return fromString(address.c_str()); }
// Overloaded cast operator to allow IPAddress objects to be used where a pointer
// to a four-byte uint8_t array is expected
operator uint32_t() const
{
return _address.dword;
}
bool operator==(const IPAddress& addr) const
{
return _address.dword == addr._address.dword;
}
// Overloaded cast operator to allow IPAddress objects to be used where a uint32_t is expected
// NOTE: IPv4 only; see implementation note
operator uint32_t() const {
return _type == IPv4 ? _address.dword[IPADDRESS_V4_DWORD_INDEX] : 0;
};
bool operator==(const IPAddress &addr) const;
bool operator!=(const IPAddress &addr) const {
return !(*this == addr);
};
// NOTE: IPv4 only; we don't know the length of the pointer
bool operator==(const uint8_t* addr) const;
// Overloaded index operator to allow getting and setting individual octets of the address
uint8_t operator[](int index) const;
uint8_t &operator[](int index);
uint8_t operator[](int index) const
{
return _address.bytes[index];
}
uint8_t& operator[](int index)
{
return _address.bytes[index];
}
// Overloaded copy operators to allow initialization of IPAddress objects from other types
// NOTE: IPv4 only
// Overloaded copy operators to allow initialisation of IPAddress objects from other types
IPAddress& operator=(const uint8_t *address);
// NOTE: IPv4 only; see implementation note
IPAddress& operator=(uint32_t address);
// If IPv4 fails tries IPv6 see fromString function
IPAddress &operator=(const char *address);
IPAddress &operator=(const IPAddress &address);
virtual size_t printTo(Print& p) const;
String toString(bool includeZone = false) const;
IPType type() const {
return _type;
}
// Espresif LwIP conversions
IPAddress(const ip_addr_t *addr);
void to_ip_addr_t(ip_addr_t *addr) const;
IPAddress &from_ip_addr_t(const ip_addr_t *addr);
#if CONFIG_LWIP_IPV6
esp_ip6_addr_type_t addr_type() const;
#endif
uint8_t zone() const {
return (type() == IPv6) ? _zone : 0;
}
size_t printTo(Print &p, bool includeZone) const;
String toString() const;
friend class EthernetClass;
friend class UDP;
friend class Client;
friend class Server;
friend class EthernetClass;
friend class DhcpClass;
friend class DNSClient;
protected:
bool fromString4(const char *address);
bool fromString6(const char *address);
};
extern const IPAddress IN6ADDR_ANY;
extern const IPAddress INADDR_NONE;
// changed to extern because const declaration creates copies in BSS of INADDR_NONE for each CPP unit that includes it
extern IPAddress INADDR_NONE;
#endif

View file

@ -0,0 +1,90 @@
/*
IPv6Address.cpp - Base class that provides IPv6Address
Copyright (c) 2011 Adrian McEwen. All right reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <Arduino.h>
#include <IPv6Address.h>
#include <Print.h>
IPv6Address::IPv6Address()
{
memset(_address.bytes, 0, sizeof(_address.bytes));
}
IPv6Address::IPv6Address(const uint8_t *address)
{
memcpy(_address.bytes, address, sizeof(_address.bytes));
}
IPv6Address::IPv6Address(const uint32_t *address)
{
memcpy(_address.bytes, (const uint8_t *)address, sizeof(_address.bytes));
}
IPv6Address& IPv6Address::operator=(const uint8_t *address)
{
memcpy(_address.bytes, address, sizeof(_address.bytes));
return *this;
}
bool IPv6Address::operator==(const uint8_t* addr) const
{
return memcmp(addr, _address.bytes, sizeof(_address.bytes)) == 0;
}
size_t IPv6Address::printTo(Print& p) const
{
size_t n = 0;
for(int i = 0; i < 16; i+=2) {
if(i){
n += p.print(':');
}
n += p.printf("%02x", _address.bytes[i]);
n += p.printf("%02x", _address.bytes[i+1]);
}
return n;
}
String IPv6Address::toString() const
{
char szRet[40];
sprintf(szRet,"%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x",
_address.bytes[0], _address.bytes[1], _address.bytes[2], _address.bytes[3],
_address.bytes[4], _address.bytes[5], _address.bytes[6], _address.bytes[7],
_address.bytes[8], _address.bytes[9], _address.bytes[10], _address.bytes[11],
_address.bytes[12], _address.bytes[13], _address.bytes[14], _address.bytes[15]);
return String(szRet);
}
bool IPv6Address::fromString(const char *address)
{
//format 0011:2233:4455:6677:8899:aabb:ccdd:eeff
if(strlen(address) != 39){
return false;
}
char * pos = (char *)address;
size_t i = 0;
for(i = 0; i < 16; i+=2) {
if(!sscanf(pos, "%2hhx", &_address.bytes[i]) || !sscanf(pos+2, "%2hhx", &_address.bytes[i+1])){
return false;
}
pos += 5;
}
return true;
}

94
cores/esp32/IPv6Address.h Normal file
View file

@ -0,0 +1,94 @@
/*
IPv6Address.h - Base class that provides IPv6Address
Copyright (c) 2011 Adrian McEwen. All right reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef IPv6Address_h
#define IPv6Address_h
#include <stdint.h>
#include <WString.h>
#include <Printable.h>
// A class to make it easier to handle and pass around IP addresses
class IPv6Address: public Printable
{
private:
union {
uint8_t bytes[16]; // IPv4 address
uint32_t dword[4];
} _address;
// Access the raw byte array containing the address. Because this returns a pointer
// to the internal structure rather than a copy of the address this function should only
// be used when you know that the usage of the returned uint8_t* will be transient and not
// stored.
uint8_t* raw_address()
{
return _address.bytes;
}
public:
// Constructors
IPv6Address();
IPv6Address(const uint8_t *address);
IPv6Address(const uint32_t *address);
virtual ~IPv6Address() {}
bool fromString(const char *address);
bool fromString(const String &address) { return fromString(address.c_str()); }
operator const uint8_t*() const
{
return _address.bytes;
}
operator const uint32_t*() const
{
return _address.dword;
}
bool operator==(const IPv6Address& addr) const
{
return (_address.dword[0] == addr._address.dword[0])
&& (_address.dword[1] == addr._address.dword[1])
&& (_address.dword[2] == addr._address.dword[2])
&& (_address.dword[3] == addr._address.dword[3]);
}
bool operator==(const uint8_t* addr) const;
// Overloaded index operator to allow getting and setting individual octets of the address
uint8_t operator[](int index) const
{
return _address.bytes[index];
}
uint8_t& operator[](int index)
{
return _address.bytes[index];
}
// Overloaded copy operators to allow initialisation of IPv6Address objects from other types
IPv6Address& operator=(const uint8_t *address);
virtual size_t printTo(Print& p) const;
String toString() const;
friend class UDP;
friend class Client;
friend class Server;
};
#endif

View file

@ -16,32 +16,45 @@
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <Arduino.h>
#include <HEXBuilder.h>
#include <MD5Builder.h>
void MD5Builder::begin(void) {
static uint8_t hex_char_to_byte(uint8_t c)
{
return (c >= 'a' && c <= 'f') ? (c - ((uint8_t)'a' - 0xa)) :
(c >= 'A' && c <= 'F') ? (c - ((uint8_t)'A' - 0xA)) :
(c >= '0' && c<= '9') ? (c - (uint8_t)'0') : 0;
}
void MD5Builder::begin(void)
{
memset(_buf, 0x00, ESP_ROM_MD5_DIGEST_LEN);
esp_rom_md5_init(&_ctx);
}
void MD5Builder::add(const uint8_t *data, size_t len) {
void MD5Builder::add(uint8_t * data, uint16_t len)
{
esp_rom_md5_update(&_ctx, data, len);
}
void MD5Builder::addHexString(const char *data) {
size_t len = strlen(data);
void MD5Builder::addHexString(const char * data)
{
uint16_t i, len = strlen(data);
uint8_t * tmp = (uint8_t*)malloc(len/2);
if(tmp == NULL) {
return;
}
hex2bytes(tmp, len / 2, data);
for(i=0; i<len; i+=2) {
uint8_t high = hex_char_to_byte(data[i]);
uint8_t low = hex_char_to_byte(data[i+1]);
tmp[i/2] = (high & 0x0F) << 4 | (low & 0x0F);
}
add(tmp, len/2);
free(tmp);
}
bool MD5Builder::addStream(Stream &stream, const size_t maxLen) {
bool MD5Builder::addStream(Stream & stream, const size_t maxLen)
{
const int buf_size = 512;
int maxLengthLeft = maxLen;
uint8_t * buf = (uint8_t*) malloc(buf_size);
@ -80,19 +93,25 @@ bool MD5Builder::addStream(Stream &stream, const size_t maxLen) {
return true;
}
void MD5Builder::calculate(void) {
void MD5Builder::calculate(void)
{
esp_rom_md5_final(_buf, &_ctx);
}
void MD5Builder::getBytes(uint8_t *output) {
void MD5Builder::getBytes(uint8_t * output)
{
memcpy(output, _buf, ESP_ROM_MD5_DIGEST_LEN);
}
void MD5Builder::getChars(char *output) {
bytes2hex(output, ESP_ROM_MD5_DIGEST_LEN * 2 + 1, _buf, ESP_ROM_MD5_DIGEST_LEN);
void MD5Builder::getChars(char * output)
{
for(uint8_t i = 0; i < ESP_ROM_MD5_DIGEST_LEN; i++) {
sprintf(output + (i * 2), "%02x", _buf[i]);
}
}
String MD5Builder::toString(void) {
String MD5Builder::toString(void)
{
char out[(ESP_ROM_MD5_DIGEST_LEN * 2) + 1];
getChars(out);
return String(out);

View file

@ -1,6 +1,6 @@
/*
Copyright (c) 2015 Hristo Gochkov. All rights reserved.
This file is part of the esp32 core for Arduino environment.
This file is part of the esp8266 core for Arduino environment.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
@ -15,11 +15,9 @@
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Modified 10 Jan 2024 by Lucas Saavedra Vaz (Use abstract class HashBuilder)
*/
#ifndef MD5Builder_h
#define MD5Builder_h
#ifndef __ESP8266_MD5_BUILDER__
#define __ESP8266_MD5_BUILDER__
#include <WString.h>
#include <Stream.h>
@ -27,27 +25,41 @@
#include "esp_system.h"
#include "esp_rom_md5.h"
#include "HashBuilder.h"
class MD5Builder : public HashBuilder {
class MD5Builder
{
private:
md5_context_t _ctx;
uint8_t _buf[ESP_ROM_MD5_DIGEST_LEN];
public:
void begin(void) override;
using HashBuilder::add;
void add(const uint8_t *data, size_t len) override;
using HashBuilder::addHexString;
void addHexString(const char *data) override;
bool addStream(Stream &stream, const size_t maxLen) override;
void calculate(void) override;
void getBytes(uint8_t *output) override;
void getChars(char *output) override;
String toString(void) override;
void begin(void);
void add(uint8_t * data, uint16_t len);
void add(const char * data)
{
add((uint8_t*)data, strlen(data));
}
void add(char * data)
{
add((const char*)data);
}
void add(String data)
{
add(data.c_str());
}
void addHexString(const char * data);
void addHexString(char * data)
{
addHexString((const char*)data);
}
void addHexString(String data)
{
addHexString(data.c_str());
}
bool addStream(Stream & stream, const size_t maxLen);
void calculate(void);
void getBytes(uint8_t * output);
void getChars(char * output);
String toString(void);
};
#endif

View file

@ -1,228 +0,0 @@
#include <MacAddress.h>
#include <stdio.h>
#include <Print.h>
//Default constructor, blank mac address.
MacAddress::MacAddress() : MacAddress(MAC6) {}
MacAddress::MacAddress(MACType mac_type) {
_type = mac_type;
memset(_mac.bytes, 0, sizeof(_mac.bytes));
}
MacAddress::MacAddress(MACType mac_type, uint64_t mac) {
_type = mac_type;
_mac.val = mac;
}
MacAddress::MacAddress(MACType mac_type, const uint8_t *macbytearray) {
_type = mac_type;
memset(_mac.bytes, 0, sizeof(_mac.bytes));
if (_type == MAC6) {
memcpy(_mac.bytes, macbytearray, 6);
} else {
memcpy(_mac.bytes, macbytearray, 8);
}
}
MacAddress::MacAddress(const char *macstr) {
fromString(macstr);
}
MacAddress::MacAddress(const String &macstr) {
fromString(macstr.c_str());
}
MacAddress::MacAddress(uint8_t b1, uint8_t b2, uint8_t b3, uint8_t b4, uint8_t b5, uint8_t b6) {
_type = MAC6;
memset(_mac.bytes, 0, sizeof(_mac.bytes));
_mac.bytes[0] = b1;
_mac.bytes[1] = b2;
_mac.bytes[2] = b3;
_mac.bytes[3] = b4;
_mac.bytes[4] = b5;
_mac.bytes[5] = b6;
}
MacAddress::MacAddress(uint8_t b1, uint8_t b2, uint8_t b3, uint8_t b4, uint8_t b5, uint8_t b6, uint8_t b7, uint8_t b8) {
_type = MAC8;
_mac.bytes[0] = b1;
_mac.bytes[1] = b2;
_mac.bytes[2] = b3;
_mac.bytes[3] = b4;
_mac.bytes[4] = b5;
_mac.bytes[5] = b6;
_mac.bytes[6] = b7;
_mac.bytes[7] = b8;
}
//Parse user entered string into MAC address
bool MacAddress::fromString(const char *buf) {
if (strlen(buf) == 17) {
return fromString6(buf);
} else if (strlen(buf) == 23) {
return fromString8(buf);
}
return false;
}
//Parse user entered string into MAC address
bool MacAddress::fromString6(const char *buf) {
char cs[18]; // 17 + 1 for null terminator
char *token;
char *next; //Unused but required
int i;
strncpy(cs, buf, sizeof(cs) - 1); //strtok modifies the buffer: copy to working buffer.
for (i = 0; i < 6; i++) {
token = strtok((i == 0) ? cs : NULL, ":"); //Find first or next token
if (!token) { //No more tokens found
return false;
}
_mac.bytes[i] = strtol(token, &next, 16);
}
_type = MAC6;
return true;
}
bool MacAddress::fromString8(const char *buf) {
char cs[24]; // 23 + 1 for null terminator
char *token;
char *next; //Unused but required
int i;
strncpy(cs, buf, sizeof(cs) - 1); //strtok modifies the buffer: copy to working buffer.
for (i = 0; i < 8; i++) {
token = strtok((i == 0) ? cs : NULL, ":"); //Find first or next token
if (!token) { //No more tokens found
return false;
}
_mac.bytes[i] = strtol(token, &next, 16);
}
_type = MAC8;
return true;
}
//Copy MAC into byte array
void MacAddress::toBytes(uint8_t *buf) {
if (_type == MAC6) {
memcpy(buf, _mac.bytes, 6);
} else {
memcpy(buf, _mac.bytes, sizeof(_mac.bytes));
}
}
//Print MAC address into a C string.
//MAC: Buffer must be at least 18 chars
int MacAddress::toString(char *buf) {
if (_type == MAC6) {
return sprintf(buf, "%02X:%02X:%02X:%02X:%02X:%02X", _mac.bytes[0], _mac.bytes[1], _mac.bytes[2], _mac.bytes[3], _mac.bytes[4], _mac.bytes[5]);
} else {
return sprintf(
buf, "%02X:%02X:%02X:%02X:%02X:%02X:%02X:%02X", _mac.bytes[0], _mac.bytes[1], _mac.bytes[2], _mac.bytes[3], _mac.bytes[4], _mac.bytes[5], _mac.bytes[6],
_mac.bytes[7]
);
}
}
String MacAddress::toString() const {
uint8_t bytes = (_type == MAC6) ? 18 : 24;
char buf[bytes];
if (_type == MAC6) {
snprintf(buf, sizeof(buf), "%02X:%02X:%02X:%02X:%02X:%02X", _mac.bytes[0], _mac.bytes[1], _mac.bytes[2], _mac.bytes[3], _mac.bytes[4], _mac.bytes[5]);
} else {
snprintf(
buf, sizeof(buf), "%02X:%02X:%02X:%02X:%02X:%02X:%02X:%02X", _mac.bytes[0], _mac.bytes[1], _mac.bytes[2], _mac.bytes[3], _mac.bytes[4], _mac.bytes[5],
_mac.bytes[6], _mac.bytes[7]
);
}
return String(buf);
}
uint64_t MacAddress::Value() {
return _mac.val;
}
//Allow getting individual octets of the address. e.g. uint8_t b0 = ma[0];
uint8_t MacAddress::operator[](int index) const {
index = EnforceIndexBounds(index);
return _mac.bytes[index];
}
//Allow setting individual octets of the address. e.g. ma[2] = 255;
uint8_t &MacAddress::operator[](int index) {
index = EnforceIndexBounds(index);
return _mac.bytes[index];
}
//Overloaded copy operator: init MacAddress object from byte array
MacAddress &MacAddress::operator=(const uint8_t *macbytearray) {
// 6-bytes MacAddress only
_type = MAC6;
memset(_mac.bytes, 0, sizeof(_mac.bytes));
memcpy(_mac.bytes, macbytearray, 6);
return *this;
}
//Overloaded copy operator: init MacAddress object from uint64_t
MacAddress &MacAddress::operator=(uint64_t macval) {
// 6-bytes MacAddress only
_type = MAC6;
_mac.val = macval;
return *this;
}
//Compare class to byte array
bool MacAddress::operator==(const uint8_t *macbytearray) const {
return !memcmp(_mac.bytes, macbytearray, 6);
}
//Allow comparing value of two classes
bool MacAddress::operator==(const MacAddress &mac2) const {
return _mac.val == mac2._mac.val;
}
//Type converter object to uint64_t [same as .Value()]
MacAddress::operator uint64_t() const {
return _mac.val;
}
//Type converter object to read only pointer to mac bytes. e.g. const uint8_t *ip_8 = ma;
MacAddress::operator const uint8_t *() const {
return _mac.bytes;
}
//Type converter object to read only pointer to mac value. e.g. const uint32_t *ip_64 = ma;
MacAddress::operator const uint64_t *() const {
return &_mac.val;
}
size_t MacAddress::printTo(Print &p) const {
uint8_t bytes = (_type == MAC6) ? 6 : 8;
size_t n = 0;
for (int i = 0; i < bytes; i++) {
if (i) {
n += p.print(':');
}
n += p.printf("%02X", _mac.bytes[i]);
}
return n;
}
//Bounds checking
int MacAddress::EnforceIndexBounds(int i) const {
if (i < 0) {
return 0;
}
if (_type == MAC6) {
if (i >= 6) {
return 5;
}
} else {
if (i >= 8) {
return 7;
}
}
return i;
}

View file

@ -1,104 +0,0 @@
//-----------------------------------------------------------------------------
// MacAddress.h - class to make it easier to handle BSSID and MAC addresses.
//
// Copyright 2022 David McCurley
// Modified by Espressif Systems 2024
//
// Licensed under the Apache License, Version 2.0 (the "License").
// You may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//-----------------------------------------------------------------------------
#ifndef MacAddress_h
#define MacAddress_h
#include <stdint.h>
#include <WString.h>
#include <Printable.h>
enum MACType {
MAC6,
MAC8
};
// A class to make it easier to handle and pass around MAC addresses, supporting both 6-byte and 8-byte MAC addresses.
class MacAddress : public Printable {
private:
union {
uint8_t bytes[8];
uint64_t val;
} _mac;
MACType _type;
public:
//Default MAC6
MacAddress();
MacAddress(MACType mac_type);
MacAddress(uint8_t b1, uint8_t b2, uint8_t b3, uint8_t b4, uint8_t b5, uint8_t b6);
MacAddress(uint8_t b1, uint8_t b2, uint8_t b3, uint8_t b4, uint8_t b5, uint8_t b6, uint8_t b7, uint8_t b8);
MacAddress(MACType mac_type, uint64_t mac);
MacAddress(MACType mac_type, const uint8_t *macbytearray);
//Default MAC6
MacAddress(uint64_t mac) : MacAddress(MAC6, mac) {}
MacAddress(const uint8_t *macbytearray) : MacAddress(MAC6, macbytearray) {}
MacAddress(const char *macstr);
MacAddress(const String &macstr);
virtual ~MacAddress() {}
bool fromString(const char *buf);
bool fromString(const String &macstr) {
return fromString(macstr.c_str());
}
void toBytes(uint8_t *buf);
int toString(char *buf);
String toString() const;
uint64_t Value();
uint8_t operator[](int index) const;
uint8_t &operator[](int index);
//MAC6 only
MacAddress &operator=(const uint8_t *macbytearray);
MacAddress &operator=(uint64_t macval);
bool operator==(const uint8_t *macbytearray) const;
bool operator==(const MacAddress &mac2) const;
operator uint64_t() const;
operator const uint8_t *() const;
operator const uint64_t *() const;
virtual size_t printTo(Print &p) const;
// future use in Arduino Networking
/*
friend class EthernetClass;
friend class UDP;
friend class Client;
friend class Server;
friend class DhcpClass;
friend class DNSClient;
*/
protected:
bool fromString6(const char *buf);
bool fromString8(const char *buf);
private:
int EnforceIndexBounds(int i) const;
};
#endif

View file

@ -35,7 +35,8 @@ extern "C" {
// Public Methods //////////////////////////////////////////////////////////////
/* default implementation: may be overridden */
size_t Print::write(const uint8_t *buffer, size_t size) {
size_t Print::write(const uint8_t *buffer, size_t size)
{
size_t n = 0;
while(size--) {
n += write(*buffer++);
@ -43,7 +44,8 @@ size_t Print::write(const uint8_t *buffer, size_t size) {
return n;
}
size_t Print::vprintf(const char *format, va_list arg) {
size_t Print::vprintf(const char *format, va_list arg)
{
char loc_buf[64];
char * temp = loc_buf;
va_list copy;
@ -54,7 +56,7 @@ size_t Print::vprintf(const char *format, va_list arg) {
va_end(arg);
return 0;
}
if (len >= (int)sizeof(loc_buf)) { // comparison of same sign type for the compiler
if(len >= (int)sizeof(loc_buf)){ // comparation of same sign type for the compiler
temp = (char*) malloc(len+1);
if(temp == NULL) {
va_end(arg);
@ -70,7 +72,8 @@ size_t Print::vprintf(const char *format, va_list arg) {
return len;
}
size_t Print::printf(const __FlashStringHelper *ifsh, ...) {
size_t Print::printf(const __FlashStringHelper *ifsh, ...)
{
va_list arg;
va_start(arg, ifsh);
const char * format = (reinterpret_cast<const char *>(ifsh));
@ -79,7 +82,8 @@ size_t Print::printf(const __FlashStringHelper *ifsh, ...) {
return ret;
}
size_t Print::printf(const char *format, ...) {
size_t Print::printf(const char *format, ...)
{
va_list arg;
va_start(arg, format);
size_t ret = vprintf(format, arg);
@ -87,31 +91,38 @@ size_t Print::printf(const char *format, ...) {
return ret;
}
size_t Print::print(const String &s) {
size_t Print::print(const String &s)
{
return write(s.c_str(), s.length());
}
size_t Print::print(const char str[]) {
size_t Print::print(const char str[])
{
return write(str);
}
size_t Print::print(char c) {
size_t Print::print(char c)
{
return write(c);
}
size_t Print::print(unsigned char b, int base) {
size_t Print::print(unsigned char b, int base)
{
return print((unsigned long) b, base);
}
size_t Print::print(int n, int base) {
size_t Print::print(int n, int base)
{
return print((long) n, base);
}
size_t Print::print(unsigned int n, int base) {
size_t Print::print(unsigned int n, int base)
{
return print((unsigned long) n, base);
}
size_t Print::print(long n, int base) {
size_t Print::print(long n, int base)
{
int t = 0;
if (base == 10 && n < 0) {
t = print('-');
@ -120,7 +131,8 @@ size_t Print::print(long n, int base) {
return printNumber(static_cast<unsigned long>(n), base) + t;
}
size_t Print::print(unsigned long n, int base) {
size_t Print::print(unsigned long n, int base)
{
if(base == 0) {
return write(n);
} else {
@ -128,7 +140,8 @@ size_t Print::print(unsigned long n, int base) {
}
}
size_t Print::print(long long n, int base) {
size_t Print::print(long long n, int base)
{
int t = 0;
if (base == 10 && n < 0) {
t = print('-');
@ -137,7 +150,8 @@ size_t Print::print(long long n, int base) {
return printNumber(static_cast<unsigned long long>(n), base) + t;
}
size_t Print::print(unsigned long long n, int base) {
size_t Print::print(unsigned long long n, int base)
{
if (base == 0) {
return write(n);
} else {
@ -145,15 +159,18 @@ size_t Print::print(unsigned long long n, int base) {
}
}
size_t Print::print(double n, int digits) {
size_t Print::print(double n, int digits)
{
return printFloat(n, digits);
}
size_t Print::print(const Printable &x) {
size_t Print::print(const Printable& x)
{
return x.printTo(*this);
}
size_t Print::print(struct tm *timeinfo, const char *format) {
size_t Print::print(struct tm * timeinfo, const char * format)
{
const char * f = format;
if(!f){
f = "%c";
@ -166,83 +183,97 @@ size_t Print::print(struct tm *timeinfo, const char *format) {
return print(buf);
}
size_t Print::println(void) {
size_t Print::println(void)
{
return print("\r\n");
}
size_t Print::println(const String &s) {
size_t Print::println(const String &s)
{
size_t n = print(s);
n += println();
return n;
}
size_t Print::println(const char c[]) {
size_t Print::println(const char c[])
{
size_t n = print(c);
n += println();
return n;
}
size_t Print::println(char c) {
size_t Print::println(char c)
{
size_t n = print(c);
n += println();
return n;
}
size_t Print::println(unsigned char b, int base) {
size_t Print::println(unsigned char b, int base)
{
size_t n = print(b, base);
n += println();
return n;
}
size_t Print::println(int num, int base) {
size_t Print::println(int num, int base)
{
size_t n = print(num, base);
n += println();
return n;
}
size_t Print::println(unsigned int num, int base) {
size_t Print::println(unsigned int num, int base)
{
size_t n = print(num, base);
n += println();
return n;
}
size_t Print::println(long num, int base) {
size_t Print::println(long num, int base)
{
size_t n = print(num, base);
n += println();
return n;
}
size_t Print::println(unsigned long num, int base) {
size_t Print::println(unsigned long num, int base)
{
size_t n = print(num, base);
n += println();
return n;
}
size_t Print::println(long long num, int base) {
size_t Print::println(long long num, int base)
{
size_t n = print(num, base);
n += println();
return n;
}
size_t Print::println(unsigned long long num, int base) {
size_t Print::println(unsigned long long num, int base)
{
size_t n = print(num, base);
n += println();
return n;
}
size_t Print::println(double num, int digits) {
size_t Print::println(double num, int digits)
{
size_t n = print(num, digits);
n += println();
return n;
}
size_t Print::println(const Printable &x) {
size_t Print::println(const Printable& x)
{
size_t n = print(x);
n += println();
return n;
}
size_t Print::println(struct tm *timeinfo, const char *format) {
size_t Print::println(struct tm * timeinfo, const char * format)
{
size_t n = print(timeinfo, format);
n += println();
return n;
@ -250,7 +281,8 @@ size_t Print::println(struct tm *timeinfo, const char *format) {
// Private Methods /////////////////////////////////////////////////////////////
size_t Print::printNumber(unsigned long n, uint8_t base) {
size_t Print::printNumber(unsigned long n, uint8_t base)
{
char buf[8 * sizeof(n) + 1]; // Assumes 8-bit chars plus zero byte.
char *str = &buf[sizeof(buf) - 1];
@ -271,7 +303,8 @@ size_t Print::printNumber(unsigned long n, uint8_t base) {
return write(str);
}
size_t Print::printNumber(unsigned long long n, uint8_t base) {
size_t Print::printNumber(unsigned long long n, uint8_t base)
{
char buf[8 * sizeof(n) + 1]; // Assumes 8-bit chars plus zero byte.
char* str = &buf[sizeof(buf) - 1];
@ -293,7 +326,8 @@ size_t Print::printNumber(unsigned long long n, uint8_t base) {
return write(str);
}
size_t Print::printFloat(double number, uint8_t digits) {
size_t Print::printFloat(double number, uint8_t digits)
{
size_t n = 0;
if(isnan(number)) {

View file

@ -32,37 +32,44 @@
#define OCT 8
#define BIN 2
class Print {
class Print
{
private:
int write_error;
size_t printNumber(unsigned long, uint8_t);
size_t printNumber(unsigned long long, uint8_t);
size_t printFloat(double, uint8_t);
protected:
void setWriteError(int err = 1) {
void setWriteError(int err = 1)
{
write_error = err;
}
public:
Print() : write_error(0) {}
Print() :
write_error(0)
{
}
virtual ~Print() {}
int getWriteError() {
int getWriteError()
{
return write_error;
}
void clearWriteError() {
void clearWriteError()
{
setWriteError(0);
}
virtual size_t write(uint8_t) = 0;
size_t write(const char *str) {
size_t write(const char *str)
{
if(str == NULL) {
return 0;
}
return write((const uint8_t *) str, strlen(str));
}
virtual size_t write(const uint8_t *buffer, size_t size);
size_t write(const char *buffer, size_t size) {
size_t write(const char *buffer, size_t size)
{
return write((const uint8_t *) buffer, size);
}
@ -73,13 +80,9 @@ public:
// add availableForWrite to make compatible with Arduino Print.h
// default to zero, meaning "a single write may block"
// should be overridden by subclasses with buffering
virtual int availableForWrite() {
return 0;
}
size_t print(const __FlashStringHelper *ifsh) {
return print(reinterpret_cast<const char *>(ifsh));
}
// should be overriden by subclasses with buffering
virtual int availableForWrite() { return 0; }
size_t print(const __FlashStringHelper *ifsh) { return print(reinterpret_cast<const char *>(ifsh)); }
size_t print(const String &);
size_t print(const char[]);
size_t print(char);
@ -94,9 +97,7 @@ public:
size_t print(const Printable&);
size_t print(struct tm * timeinfo, const char * format = NULL);
size_t println(const __FlashStringHelper *ifsh) {
return println(reinterpret_cast<const char *>(ifsh));
}
size_t println(const __FlashStringHelper *ifsh) { return println(reinterpret_cast<const char *>(ifsh)); }
size_t println(const String &s);
size_t println(const char[]);
size_t println(char);
@ -113,6 +114,7 @@ public:
size_t println(void);
virtual void flush() { /* Empty implementation for backward compatibility */ }
};
#endif

View file

@ -30,10 +30,12 @@ class Print;
Print::print and Print::println methods.
*/
class Printable {
class Printable
{
public:
virtual ~Printable() {}
virtual size_t printTo(Print& p) const = 0;
};
#endif

Some files were not shown because too many files have changed in this diff Show more