Compare commits

..

2 commits

Author SHA1 Message Date
hathach
64bad17987 update adafruit tinyuf2 to 0.18.1 2024-02-27 00:25:22 +07:00
hathach
be60b0b9d8 add new tinyuf2 parition with 3.5MB app (for 4MB flash) 2024-02-27 00:23:07 +07:00
2225 changed files with 71192 additions and 158353 deletions

View file

@ -1,246 +0,0 @@
# Clang format version: 18.1.3
---
BasedOnStyle: LLVM
AccessModifierOffset: -2
AlignAfterOpenBracket: BlockIndent
AlignArrayOfStructures: None
AlignConsecutiveAssignments:
Enabled: false
AcrossEmptyLines: false
AcrossComments: false
AlignCompound: false
AlignFunctionPointers: false
PadOperators: true
AlignConsecutiveBitFields:
Enabled: true
AcrossEmptyLines: false
AcrossComments: false
AlignCompound: false
AlignFunctionPointers: false
PadOperators: false
AlignConsecutiveDeclarations:
Enabled: false
AcrossEmptyLines: false
AcrossComments: false
AlignCompound: false
AlignFunctionPointers: false
PadOperators: false
AlignConsecutiveMacros:
Enabled: true
AcrossEmptyLines: false
AcrossComments: false
AlignCompound: false
AlignFunctionPointers: false
PadOperators: false
AlignConsecutiveShortCaseStatements:
Enabled: true
AcrossEmptyLines: false
AcrossComments: false
AlignCaseColons: false
AlignEscapedNewlines: Left
AlignOperands: Align
AlignTrailingComments:
Kind: Always
OverEmptyLines: 0
AllowAllArgumentsOnNextLine: true
AllowAllParametersOfDeclarationOnNextLine: true
AllowBreakBeforeNoexceptSpecifier: Never
AllowShortBlocksOnASingleLine: Empty
AllowShortCaseLabelsOnASingleLine: true
AllowShortCompoundRequirementOnASingleLine: true
AllowShortEnumsOnASingleLine: false
AllowShortFunctionsOnASingleLine: Empty
AllowShortIfStatementsOnASingleLine: Never
AllowShortLambdasOnASingleLine: Empty
AllowShortLoopsOnASingleLine: true
AlwaysBreakAfterDefinitionReturnType: None
AlwaysBreakAfterReturnType: None
AlwaysBreakBeforeMultilineStrings: false
AlwaysBreakTemplateDeclarations: MultiLine
AttributeMacros:
- __capability
BinPackArguments: true
BinPackParameters: true
BitFieldColonSpacing: Both
BraceWrapping:
AfterCaseLabel: true
AfterClass: false
AfterControlStatement: Never
AfterEnum: false
AfterFunction: false
AfterNamespace: false
AfterObjCDeclaration: false
AfterStruct: false
AfterUnion: false
AfterExternBlock: false
BeforeCatch: false
BeforeElse: false
BeforeLambdaBody: false
BeforeWhile: false
IndentBraces: false
SplitEmptyFunction: false
SplitEmptyRecord: true
SplitEmptyNamespace: true
BreakAdjacentStringLiterals: true
BreakAfterAttributes: Always
BreakAfterJavaFieldAnnotations: false
BreakArrays: false
BreakBeforeBinaryOperators: NonAssignment
BreakBeforeBraces: Custom
BreakBeforeConceptDeclarations: Always
BreakBeforeInlineASMColon: OnlyMultiline
BreakBeforeTernaryOperators: true
BreakConstructorInitializers: BeforeColon
BreakInheritanceList: BeforeColon
BreakStringLiterals: true
ColumnLimit: 160
CommentPragmas: ""
CompactNamespaces: false
ConstructorInitializerIndentWidth: 2
ContinuationIndentWidth: 2
Cpp11BracedListStyle: true
DerivePointerAlignment: false
DisableFormat: false
EmptyLineAfterAccessModifier: Never
EmptyLineBeforeAccessModifier: LogicalBlock
ExperimentalAutoDetectBinPacking: false
FixNamespaceComments: true
ForEachMacros:
- foreach
- Q_FOREACH
- BOOST_FOREACH
IfMacros:
- KJ_IF_MAYBE
IncludeBlocks: Preserve
IncludeCategories:
- Regex: ^"(llvm|llvm-c|clang|clang-c)/
Priority: 2
SortPriority: 0
CaseSensitive: false
- Regex: ^(<|"(gtest|gmock|isl|json)/)
Priority: 3
SortPriority: 0
CaseSensitive: false
- Regex: .*
Priority: 1
SortPriority: 0
CaseSensitive: false
IncludeIsMainRegex: ""
IncludeIsMainSourceRegex: ""
IndentAccessModifiers: false
IndentCaseBlocks: false
IndentCaseLabels: true
IndentExternBlock: NoIndent
IndentGotoLabels: false
IndentPPDirectives: None
IndentRequiresClause: false
IndentWidth: 2
IndentWrappedFunctionNames: true
InsertBraces: true
InsertNewlineAtEOF: true
InsertTrailingCommas: None
IntegerLiteralSeparator:
Binary: 0
BinaryMinDigits: 0
Decimal: 0
DecimalMinDigits: 0
Hex: 0
HexMinDigits: 0
JavaScriptQuotes: Leave
JavaScriptWrapImports: true
KeepEmptyLinesAtEOF: false
KeepEmptyLinesAtTheStartOfBlocks: true
LambdaBodyIndentation: Signature
Language: Cpp
LineEnding: LF
MacroBlockBegin: ""
MacroBlockEnd: ""
MaxEmptyLinesToKeep: 1
NamespaceIndentation: None
ObjCBinPackProtocolList: Auto
ObjCBlockIndentWidth: 2
ObjCBreakBeforeNestedBlockParam: true
ObjCSpaceAfterProperty: false
ObjCSpaceBeforeProtocolList: true
PPIndentWidth: -1
PackConstructorInitializers: BinPack
PenaltyBreakAssignment: 2
PenaltyBreakBeforeFirstCallParameter: 19
PenaltyBreakComment: 300
PenaltyBreakFirstLessLess: 120
PenaltyBreakOpenParenthesis: 0
PenaltyBreakScopeResolution: 500
PenaltyBreakString: 1000
PenaltyBreakTemplateDeclaration: 10
PenaltyExcessCharacter: 1000000
PenaltyIndentedWhitespace: 0
PenaltyReturnTypeOnItsOwnLine: 60
PointerAlignment: Right
QualifierAlignment: Leave
ReferenceAlignment: Pointer
ReflowComments: false
RemoveBracesLLVM: false
RemoveParentheses: Leave
RemoveSemicolon: false
RequiresClausePosition: OwnLine
RequiresExpressionIndentation: OuterScope
SeparateDefinitionBlocks: Leave
ShortNamespaceLines: 1
SkipMacroDefinitionBody: false
SortIncludes: Never
SortJavaStaticImport: Before
SortUsingDeclarations: LexicographicNumeric
SpaceAfterCStyleCast: false
SpaceAfterLogicalNot: false
SpaceAfterTemplateKeyword: false
SpaceAroundPointerQualifiers: Default
SpaceBeforeAssignmentOperators: true
SpaceBeforeCaseColon: false
SpaceBeforeCpp11BracedList: false
SpaceBeforeCtorInitializerColon: true
SpaceBeforeInheritanceColon: true
SpaceBeforeJsonColon: false
SpaceBeforeParens: ControlStatements
SpaceBeforeParensOptions:
AfterControlStatements: true
AfterForeachMacros: true
AfterFunctionDeclarationName: false
AfterFunctionDefinitionName: false
AfterIfMacros: true
AfterOverloadedOperator: true
AfterPlacementOperator: true
AfterRequiresInClause: false
AfterRequiresInExpression: false
BeforeNonEmptyParentheses: false
SpaceBeforeRangeBasedForLoopColon: true
SpaceBeforeSquareBrackets: false
SpaceInEmptyBlock: false
SpacesBeforeTrailingComments: 2
SpacesInAngles: Never
SpacesInContainerLiterals: false
SpacesInLineCommentPrefix:
Minimum: 1
Maximum: -1
SpacesInParens: Never
SpacesInParensOptions:
InConditionalStatements: false
InCStyleCasts: false
InEmptyParentheses: false
Other: false
SpacesInSquareBrackets: false
Standard: Auto
StatementAttributeLikeMacros:
- Q_EMIT
StatementMacros:
- Q_UNUSED
- QT_REQUIRE_VERSION
TabWidth: 2
UseTab: Never
VerilogBreakBetweenInstancePorts: true
WhitespaceSensitiveMacros:
- BOOST_PP_STRINGIZE
- CF_SWIFT_NAME
- NS_SWIFT_NAME
- PP_STRINGIZE
- STRINGIZE
BracedInitializerIndentWidth: 2

View file

@ -1,8 +0,0 @@
[codespell]
# Source: https://github.com/arduino/tooling-project-assets/blob/main/workflow-templates/assets/spell-check/.codespellrc
# In the event of a false positive, add the problematic word, in all lowercase, to a comma-separated list here:
ignore-words-list = ba,licence,ot,dout,als,exten,emac
skip = ./.git,./.licenses,__pycache__,.clang-format,.codespellrc,.editorconfig,.flake8,.prettierignore,.yamllint.yml,.gitignore,boards.txt,platform.txt,programmers.txt
builtin = clear,informal,en-GB_to_en-US
check-filenames =
check-hidden =

View file

@ -1,60 +0,0 @@
# Source: https://github.com/arduino/tooling-project-assets/blob/main/workflow-templates/assets/general/.editorconfig
# See: https://editorconfig.org/
# The formatting style defined in this file is the official standardized style to be used in all Arduino Tooling
# projects and should not be modified.
# Note: indent style for each file type is defined even when it matches the universal config in order to make it clear
# that this type has an official style.
[*]
charset = utf-8
end_of_line = lf
indent_size = 2
indent_style = space
insert_final_newline = true
trim_trailing_whitespace = true
[*.{adoc,asc,asciidoc}]
indent_size = 2
indent_style = space
[*.{bash,sh}]
indent_size = 4
indent_style = space
[*.{c,cc,cp,cpp,cxx,h,hh,hpp,hxx,ii,inl,ino,ixx,pde,tpl,tpp,txx}]
indent_size = 2
indent_style = space
[*.{go,mod}]
indent_style = tab
[*.java]
indent_size = 2
indent_style = space
[*.{js,jsx,json,jsonc,json5,ts,tsx}]
indent_size = 2
indent_style = space
[*.{md,mdx,mkdn,mdown,markdown}]
indent_size = unset
indent_style = space
[*.proto]
indent_size = 2
indent_style = space
[*.py]
indent_size = 4
indent_style = space
[*.svg]
indent_size = 2
indent_style = space
[*.{yaml,yml}]
indent_size = 2
indent_style = space
[{.gitconfig,.gitmodules}]
indent_style = tab

10
.flake8
View file

@ -1,10 +0,0 @@
# Source: https://github.com/arduino/tooling-project-assets/blob/main/workflow-templates/assets/check-python/.flake8
# See: https://flake8.pycqa.org/en/latest/user/configuration.html
[flake8]
doctests = True
# W503 and W504 are mutually exclusive. PEP 8 recommends line break before.
ignore = W503,E203
max-complexity = 20
max-line-length = 120
select = E,W,F,C,N

81
.github/CODEOWNERS vendored
View file

@ -1,81 +0,0 @@
# CODEOWNERS for ESP32 Arduino Core
# This file is used to specify the code owners for the ESP32 Arduino Core.
# Read more about CODEOWNERS:
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners
# Note that order matters. The last matching pattern will be used.
# The default owners are the active developers of the ESP32 Arduino Core.
# Refrain from using @espressif/arduino-esp32 to avoid spamming non-developers with review requests.
* @espressif/arduino-devs
# CI
/.github/ @lucasssvaz @me-no-dev @P-R-O-C-H-Y
/.github/codeql/ @lucasssvaz
/.gitlab/ @lucasssvaz
/tests/ @lucasssvaz @P-R-O-C-H-Y
# Tools
/tools/ @me-no-dev
/tools/pre-commit/ @lucasssvaz
/tools/add_lib.sh @P-R-O-C-H-Y
# Pre-commit
/.* @lucasssvaz # Files in root directory that start with a dot.
# Git Files
/.gitignore @espressif/arduino-devs
/.gitmodules @espressif/arduino-devs
# Documentation
/docs/ @pedrominatel
/.github/ISSUE_TEMPLATE/ @pedrominatel
/.github/PULL_REQUEST_TEMPLATE.md @pedrominatel
/.readthedocs.yaml @pedrominatel
/*.md @pedrominatel
# Boards
/variants/ @P-R-O-C-H-Y
/boards.txt @P-R-O-C-H-Y
# Arduino as Component
/idf_component_examples/ @SuGlider
/idf_component.yml @SuGlider @me-no-dev
/CMakeLists.txt @SuGlider @me-no-dev
/Kconfig.projbuild @SuGlider @me-no-dev
# Build System
/package.json @me-no-dev
/platform.txt @me-no-dev
/programmers.txt @me-no-dev
/package/ @me-no-dev
# Libraries
/libraries/ArduinoOTA/ @me-no-dev
/libraries/AsyncUDP/ @me-no-dev
/libraries/BLE/ @lucasssvaz @SuGlider
/libraries/ESP_I2S/ @me-no-dev
/libraries/ESP_NOW/ @P-R-O-C-H-Y @lucasssvaz
/libraries/ESP_SR/ @me-no-dev
/libraries/ESPmDNS/ @me-no-dev
/libraries/Ethernet/ @me-no-dev
/libraries/Matter/ @SuGlider
/libraries/NetBIOS/ @me-no-dev
/libraries/Network/ @me-no-dev
/libraries/OpenThread/ @SuGlider
/libraries/PPP/ @me-no-dev
/libraries/SPI/ @me-no-dev
/libraries/Update/ @me-no-dev
/libraries/USB/ @SuGlider @me-no-dev
/libraries/WiFi/ @me-no-dev
/libraries/WiFiProv/ @me-no-dev
/libraries/Wire/ @me-no-dev
/libraries/Zigbee/ @P-R-O-C-H-Y
# CI JSON
# Keep this after other libraries and tests to avoid being overridden.
**/ci.json @lucasssvaz
# The CODEOWNERS file should be owned by the developers of the ESP32 Arduino Core.
# Leave this entry as the last one to avoid being overridden.
/.github/CODEOWNERS @espressif/arduino-devs

View file

@ -5,7 +5,6 @@ body:
- type: markdown - type: markdown
attributes: attributes:
value: | value: |
* Please note that we can only process feature requests reported in English to ensure effective communication and support. Feature requests written in other languages will be closed, with a request to rewrite them in English.
* We welcome any ideas or feature requests! It is helpful if you can explain exactly why the feature would be useful. * We welcome any ideas or feature requests! It is helpful if you can explain exactly why the feature would be useful.
* There are usually some outstanding feature requests in the [existing issues list](https://github.com/espressif/arduino-esp32/issues?q=is%3Aopen+is%3Aissue+label%3A%22Type%3A+Feature+request%22), feel free to add comments to them. * There are usually some outstanding feature requests in the [existing issues list](https://github.com/espressif/arduino-esp32/issues?q=is%3Aopen+is%3Aissue+label%3A%22Type%3A+Feature+request%22), feel free to add comments to them.
* If you would like to contribute, please read the [contributions guide](https://docs.espressif.com/projects/arduino-esp32/en/latest/contributing.html). * If you would like to contribute, please read the [contributions guide](https://docs.espressif.com/projects/arduino-esp32/en/latest/contributing.html).

View file

@ -5,7 +5,6 @@ body:
- type: markdown - type: markdown
attributes: attributes:
value: | value: |
* Please note that we can only process issues reported in English to ensure effective communication and support. Issues written in other languages will be closed, with a request to rewrite them in English.
* Before reporting a new issue please check and search in [List of existing issues](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue) * Before reporting a new issue please check and search in [List of existing issues](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue)
* Please check [Online Documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/index.html) * Please check [Online Documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/index.html)
* Take a look on [Troubleshooting guide](https://docs.espressif.com/projects/arduino-esp32/en/latest/troubleshooting.html) * Take a look on [Troubleshooting guide](https://docs.espressif.com/projects/arduino-esp32/en/latest/troubleshooting.html)
@ -40,27 +39,8 @@ body:
label: Version label: Version
description: What version of Arduino ESP32 are you running? If possible, consider updating to the latest version. description: What version of Arduino ESP32 are you running? If possible, consider updating to the latest version.
options: options:
- latest stable Release (if not listed below)
- latest development Release Candidate (RC-X)
- latest master (checkout manually) - latest master (checkout manually)
- v3.3.0 - latest development Release Candidate (RC-X)
- v3.2.1
- v3.2.0
- v3.1.3
- v3.1.2
- v3.1.1
- v3.1.0
- v3.0.7
- v3.0.6
- v3.0.5
- v3.0.4
- v3.0.3
- v3.0.2
- v3.0.1
- v3.0.0
- v2.0.17
- v2.0.16
- v2.0.15
- v2.0.14 - v2.0.14
- v2.0.13 - v2.0.13
- v2.0.12 - v2.0.12
@ -80,23 +60,12 @@ body:
- other - other
validations: validations:
required: true required: true
- type: dropdown
id: type
attributes:
label: Type
description: How would you define the type of the issue? Please select from the types below.
options:
- Task
- Bug
- Question
validations:
required: true
- type: input - type: input
id: IDE id: IDE
attributes: attributes:
label: IDE Name label: IDE Name
description: What IDE are you using? description: What IDE are you using?
placeholder: eg. Arduino IDE, VSCode, Sloeber... placeholder: eg. Arduino IDE, PlatformIO, Sloeber...
validations: validations:
required: true required: true
- type: input - type: input
@ -121,8 +90,8 @@ body:
label: PSRAM enabled label: PSRAM enabled
description: Is PSRAM enabled? description: Is PSRAM enabled?
options: options:
- "yes" - 'yes'
- "no" - 'no'
validations: validations:
required: true required: true
- type: input - type: input
@ -137,8 +106,8 @@ body:
id: Description id: Description
attributes: attributes:
label: Description label: Description
description: Please describe your problem here and expected behavior description: Please describe your problem here and expected behaviour
placeholder: ex. Can't connect/weird behavior/wrong function/missing parameter.. placeholder: ex. Can't connect/weird behaviour/wrong function/missing parameter..
validations: validations:
required: true required: true
- type: textarea - type: textarea

View file

@ -1,5 +1,8 @@
blank_issues_enabled: false blank_issues_enabled: false
contact_links: contact_links:
- name: Arduino Core for Espressif Discord Server - name: Arduino ESP32 Gitter Channel
url: https://discord.gg/8xY6e9crwv url: https://gitter.im/espressif/arduino-esp32
about: Community Discord server for questions and help about: Community channel for questions and help
- name: ESP32 Forum - Arduino
url: https://esp32.com/viewforum.php?f=19
about: Official Forum for questions

View file

@ -1,26 +0,0 @@
name: "CodeQL config"
packs:
- trailofbits/cpp-queries
- githubsecuritylab/codeql-cpp-queries
- githubsecuritylab/codeql-python-queries
queries:
- uses: security-extended
- uses: security-and-quality
query-filters:
- exclude:
query path:
- /^experimental\/.*/
- exclude:
tags contain:
- experimental
- exclude:
problem.severity:
- recommendation
- exclude:
id: tob/cpp/use-of-legacy-algorithm
paths-ignore:
- tests/**

View file

@ -1,5 +1,4 @@
#!/bin/bash #!/bin/bash
# #
# This script is used in the CI workflow. It checks all non-examples source files in libraries/ and cores/ are listed in # This script is used in the CI workflow. It checks all non-examples source files in libraries/ and cores/ are listed in
# CMakeLists.txt for the cmake-based IDF component # CMakeLists.txt for the cmake-based IDF component
@ -13,10 +12,10 @@ set -e
git submodule update --init --recursive git submodule update --init --recursive
# find all source files in repo # find all source files in repo
REPO_SRCS=$(find cores/esp32/ libraries/ -name 'examples' -prune -o -name '*.c' -print -o -name '*.cpp' -print | sort) REPO_SRCS=`find cores/esp32/ libraries/ -name 'examples' -prune -o -name '*.c' -print -o -name '*.cpp' -print | sort`
# find all source files named in CMakeLists.txt COMPONENT_SRCS # find all source files named in CMakeLists.txt COMPONENT_SRCS
CMAKE_SRCS=$(cmake --trace-expand -P CMakeLists.txt 2>&1 | grep set\(srcs | cut -d'(' -f3 | sed 's/ )//' | sed 's/srcs //' | tr ' ;' '\n' | sort) CMAKE_SRCS=`cmake --trace-expand -P CMakeLists.txt 2>&1 | grep set\(srcs | cut -d'(' -f3 | sed 's/ )//' | sed 's/srcs //' | tr ' ;' '\n' | sort`
if ! diff -u0 --label "Repo Files" --label "srcs" <(echo "$REPO_SRCS") <(echo "$CMAKE_SRCS"); then if ! diff -u0 --label "Repo Files" --label "srcs" <(echo "$REPO_SRCS") <(echo "$CMAKE_SRCS"); then
echo "Source files in repo (-) and source files in CMakeLists.txt (+) don't match" echo "Source files in repo (-) and source files in CMakeLists.txt (+) don't match"

View file

@ -3,37 +3,33 @@
# Get all boards # Get all boards
boards_array=() boards_array=()
boards_list=$(grep '.tarch=' boards.txt) for line in `grep '.tarch=' boards.txt`; do
while read -r line; do
board_name=$(echo "$line" | cut -d '.' -f1 | cut -d '#' -f1) board_name=$(echo "$line" | cut -d '.' -f1 | cut -d '#' -f1)
# skip esp32c2 as we dont build libs for it
if [ "$board_name" == "esp32c2" ]; then
echo "Skipping 'espressif:esp32:$board_name'"
continue
fi
boards_array+=("espressif:esp32:$board_name") boards_array+=("espressif:esp32:$board_name")
echo "Added 'espressif:esp32:$board_name' to array" echo "Added 'espressif:esp32:$board_name' to array"
done <<< "$boards_list" done
# Create JSON like string with all boards found and pass it to env variable # Create JSON like string with all boards found and pass it to env variable
board_count=${#boards_array[@]} board_count=${#boards_array[@]}
echo "Boards found: $board_count" echo "Boards found: $board_count"
echo "BOARD-COUNT=$board_count" >> "$GITHUB_ENV" echo "BOARD-COUNT=$board_count" >> $GITHUB_ENV
if [ "$board_count" -gt 0 ]; then if [ $board_count -gt 0 ]
then
json_matrix='[' json_matrix='['
for board in "${boards_array[@]}"; do for board in ${boards_array[@]}
do
json_matrix+='"'$board'"' json_matrix+='"'$board'"'
if [ "$board_count" -gt 1 ]; then if [ $board_count -gt 1 ]
then
json_matrix+="," json_matrix+=","
fi fi
board_count=$((board_count - 1)) board_count=$(($board_count - 1))
done done
json_matrix+=']' json_matrix+=']'
echo "$json_matrix" echo $json_matrix
echo "FQBNS=${json_matrix}" >> "$GITHUB_ENV" echo "FQBNS=${json_matrix}" >> $GITHUB_ENV
else else
echo "FQBNS=" >> "$GITHUB_ENV" echo "FQBNS=" >> $GITHUB_ENV
fi fi

View file

@ -2,61 +2,89 @@
# Get inputs from command # Get inputs from command
owner_repository=$1 owner_repository=$1
base_ref=$2 pr_number=$2
# Download the boards.txt file from the base branch url="https://api.github.com/repos/$owner_repository/pulls/$pr_number/files"
curl -L -o boards_base.txt https://raw.githubusercontent.com/"$owner_repository"/"$base_ref"/boards.txt echo $url
# Compare boards.txt file in the repo with the modified file from PR # Get changes in boards.txt file from PR
diff=$(diff -u boards_base.txt boards.txt) Patch=$(curl $url | jq -r '.[] | select(.filename == "boards.txt") | .patch ')
# Check if the diff is empty # Extract only changed lines number and count
if [ -z "$diff" ]; then substring_patch=$(echo "$Patch" | grep -o '@@[^@]*@@')
echo "No changes in boards.txt file"
echo "FQBNS="
exit 0
fi
# Extract added or modified lines (lines starting with '+' or '-') params_array=()
modified_lines=$(echo "$diff" | grep -E '^[+-][^+-]')
# Print the modified lines for debugging IFS=$'\n' read -d '' -ra params <<< $(echo "$substring_patch" | grep -oE '[-+][0-9]+,[0-9]+')
echo "Modified lines:"
echo "$modified_lines" for param in "${params[@]}"
do
echo "The parameter is $param"
params_array+=("$param")
done
boards_array=() boards_array=()
previous_board="" previous_board=""
file="boards.txt"
# Extract board names from the modified lines, and add them to the boards_array # Loop through boards.txt file and extract all boards that were added
while read -r line; do for (( c=0; c<${#params_array[@]}; c+=2 ))
do
deletion_count=$( echo "${params_array[c]}" | cut -d',' -f2 | cut -d' ' -f1 )
addition_line=$( echo "${params_array[c+1]}" | cut -d'+' -f2 | cut -d',' -f1 )
addition_count=$( echo "${params_array[c+1]}" | cut -d'+' -f2 | cut -d',' -f2 | cut -d' ' -f1 )
addition_end=$(($addition_line+$addition_count))
addition_line=$(($addition_line + 3))
addition_end=$(($addition_end - $deletion_count))
echo $addition_line
echo $addition_end
i=0
while read -r line
do
i=$((i+1))
if [ $i -lt $addition_line ]
then
continue
elif [ $i -gt $addition_end ]
then
break
fi
board_name=$(echo "$line" | cut -d '.' -f1 | cut -d '#' -f1) board_name=$(echo "$line" | cut -d '.' -f1 | cut -d '#' -f1)
# remove + or - from the board name at the beginning if [ "$board_name" != "" ]
board_name=${board_name#[-+]} then
if [ "$board_name" != "" ] && [ "$board_name" != "+" ] && [ "$board_name" != "-" ] && [ "$board_name" != "esp32_family" ]; then if [ "$board_name" != "$previous_board" ]
if [ "$board_name" != "$previous_board" ]; then then
boards_array+=("espressif:esp32:$board_name") boards_array+=("espressif:esp32:$board_name")
previous_board="$board_name" previous_board="$board_name"
echo "Added 'espressif:esp32:$board_name' to array" echo "Added 'espressif:esp32:$board_name' to array"
fi fi
fi fi
done <<< "$modified_lines" done < "$file"
done
# Create JSON like string with all boards found and pass it to env variable # Create JSON like string with all boards found and pass it to env variable
board_count=${#boards_array[@]} board_count=${#boards_array[@]}
if [ "$board_count" -gt 0 ]; then if [ $board_count -gt 0 ]
then
json_matrix='{"fqbn": [' json_matrix='{"fqbn": ['
for board in "${boards_array[@]}"; do for board in ${boards_array[@]}
do
json_matrix+='"'$board'"' json_matrix+='"'$board'"'
if [ "$board_count" -gt 1 ]; then if [ $board_count -gt 1 ]
then
json_matrix+="," json_matrix+=","
fi fi
board_count=$((board_count - 1)) board_count=$(($board_count - 1))
done done
json_matrix+=']}' json_matrix+=']}'
echo "$json_matrix" echo $json_matrix
echo "FQBNS=${json_matrix}" >> "$GITHUB_ENV" echo "FQBNS=${json_matrix}" >> $GITHUB_ENV
else else
echo "FQBNS=" >> "$GITHUB_ENV" echo "FQBNS=" >> $GITHUB_ENV
fi fi

View file

@ -1,6 +1,6 @@
#!/bin/bash #!/bin/bash
OSBITS=$(uname -m) OSBITS=`arch`
if [[ "$OSTYPE" == "linux"* ]]; then if [[ "$OSTYPE" == "linux"* ]]; then
export OS_IS_LINUX="1" export OS_IS_LINUX="1"
if [[ "$OSBITS" == "i686" ]]; then if [[ "$OSBITS" == "i686" ]]; then
@ -41,11 +41,6 @@ fi
if [ ! -d "$ARDUINO_IDE_PATH" ] || [ ! -f "$ARDUINO_IDE_PATH/arduino-cli" ]; then if [ ! -d "$ARDUINO_IDE_PATH" ] || [ ! -f "$ARDUINO_IDE_PATH/arduino-cli" ]; then
echo "Installing Arduino CLI on $OS_NAME ..." echo "Installing Arduino CLI on $OS_NAME ..."
mkdir -p "$ARDUINO_IDE_PATH" mkdir -p "$ARDUINO_IDE_PATH"
if [ "$OS_IS_WINDOWS" == "1" ]; then
curl -fsSL https://downloads.arduino.cc/arduino-cli/arduino-cli_latest_Windows_64bit.zip -o arduino-cli.zip
unzip -q arduino-cli.zip -d "$ARDUINO_IDE_PATH"
rm arduino-cli.zip
else
curl -fsSL https://raw.githubusercontent.com/arduino/arduino-cli/master/install.sh | BINDIR="$ARDUINO_IDE_PATH" sh curl -fsSL https://raw.githubusercontent.com/arduino/arduino-cli/master/install.sh | BINDIR="$ARDUINO_IDE_PATH" sh
fi fi
fi

View file

@ -5,7 +5,7 @@ if [ ! -d "$ARDUINO_ESP32_PATH" ]; then
echo "Installing ESP32 Arduino Core ..." echo "Installing ESP32 Arduino Core ..."
script_init_path="$PWD" script_init_path="$PWD"
mkdir -p "$ARDUINO_USR_PATH/hardware/espressif" mkdir -p "$ARDUINO_USR_PATH/hardware/espressif"
cd "$ARDUINO_USR_PATH/hardware/espressif" || exit cd "$ARDUINO_USR_PATH/hardware/espressif"
echo "Installing Python Serial ..." echo "Installing Python Serial ..."
pip install pyserial > /dev/null pip install pyserial > /dev/null
@ -15,25 +15,21 @@ if [ ! -d "$ARDUINO_ESP32_PATH" ]; then
pip install requests > /dev/null pip install requests > /dev/null
fi fi
if [ -n "$GITHUB_REPOSITORY" ]; then if [ ! -z "$GITHUB_REPOSITORY" ]; then
echo "Linking Core..." echo "Linking Core..."
ln -s "$GITHUB_WORKSPACE" esp32 ln -s $GITHUB_WORKSPACE esp32
else else
echo "Cloning Core Repository..." echo "Cloning Core Repository..."
git clone https://github.com/espressif/arduino-esp32.git esp32 > /dev/null 2>&1 git clone https://github.com/espressif/arduino-esp32.git esp32 > /dev/null 2>&1
fi fi
#echo "Updating Submodules ..." #echo "Updating Submodules ..."
cd esp32 || exit cd esp32
#git submodule update --init --recursive > /dev/null 2>&1 #git submodule update --init --recursive > /dev/null 2>&1
echo "Installing Platform Tools ..." echo "Installing Platform Tools ..."
if [ "$OS_IS_WINDOWS" == "1" ]; then
cd tools && ./get.exe
else
cd tools && python get.py cd tools && python get.py
fi cd $script_init_path
cd "$script_init_path" || exit
echo "ESP32 Arduino has been installed in '$ARDUINO_ESP32_PATH'" echo "ESP32 Arduino has been installed in '$ARDUINO_ESP32_PATH'"
echo "" echo ""

View file

@ -4,7 +4,7 @@
#OSTYPE: 'msys', ARCH: 'x86_64' => win32 #OSTYPE: 'msys', ARCH: 'x86_64' => win32
#OSTYPE: 'darwin18', ARCH: 'i386' => macos #OSTYPE: 'darwin18', ARCH: 'i386' => macos
OSBITS=$(uname -m) OSBITS=`arch`
if [[ "$OSTYPE" == "linux"* ]]; then if [[ "$OSTYPE" == "linux"* ]]; then
export OS_IS_LINUX="1" export OS_IS_LINUX="1"
ARCHIVE_FORMAT="tar.xz" ARCHIVE_FORMAT="tar.xz"
@ -77,3 +77,4 @@ if [ ! -d "$ARDUINO_IDE_PATH" ]; then
echo "Arduino IDE Installed in '$ARDUINO_IDE_PATH'" echo "Arduino IDE Installed in '$ARDUINO_IDE_PATH'"
echo "" echo ""
fi fi

180
.github/scripts/install-platformio-esp32.sh vendored Executable file
View file

@ -0,0 +1,180 @@
#!/bin/bash
export PLATFORMIO_ESP32_PATH="$HOME/.platformio/packages/framework-arduinoespressif32"
PLATFORMIO_ESP32_URL="https://github.com/platformio/platform-espressif32.git"
TOOLCHAIN_VERSION="12.2.0+20230208"
ESPTOOLPY_VERSION="~1.40501.0"
ESPRESSIF_ORGANIZATION_NAME="espressif"
echo "Installing Python Wheel ..."
pip install wheel > /dev/null 2>&1
echo "Installing PlatformIO ..."
pip install -U https://github.com/platformio/platformio/archive/master.zip > /dev/null 2>&1
echo "Installing Platform ESP32 ..."
python -m platformio platform install $PLATFORMIO_ESP32_URL > /dev/null 2>&1
echo "Replacing the package versions ..."
replace_script="import json; import os;"
replace_script+="fp=open(os.path.expanduser('~/.platformio/platforms/espressif32/platform.json'), 'r+');"
replace_script+="data=json.load(fp);"
# Use framework sources from the repository
replace_script+="data['packages']['framework-arduinoespressif32']['version'] = '*';"
replace_script+="del data['packages']['framework-arduinoespressif32']['owner'];"
# Use toolchain packages from the "espressif" organization
replace_script+="data['packages']['toolchain-xtensa-esp32']['owner']='$ESPRESSIF_ORGANIZATION_NAME';"
replace_script+="data['packages']['toolchain-xtensa-esp32s2']['owner']='$ESPRESSIF_ORGANIZATION_NAME';"
replace_script+="data['packages']['toolchain-riscv32-esp']['owner']='$ESPRESSIF_ORGANIZATION_NAME';"
# Update versions to use the upstream
replace_script+="data['packages']['toolchain-xtensa-esp32']['version']='$TOOLCHAIN_VERSION';"
replace_script+="data['packages']['toolchain-xtensa-esp32s2']['version']='$TOOLCHAIN_VERSION';"
replace_script+="data['packages']['toolchain-xtensa-esp32s3']['version']='$TOOLCHAIN_VERSION';"
replace_script+="data['packages']['toolchain-riscv32-esp']['version']='$TOOLCHAIN_VERSION';"
# Add new "framework-arduinoespressif32-libs" package
# Read "package_esp32_index.template.json" to extract a url to a zip package for "esp32-arduino-libs"
replace_script+="fpackage=open(os.path.join('package', 'package_esp32_index.template.json'), 'r+');"
replace_script+="package_data=json.load(fpackage);"
replace_script+="fpackage.close();"
replace_script+="libs_package_archive_url=next(next(system['url'] for system in tool['systems'] if system['host'] == 'x86_64-pc-linux-gnu') for tool in package_data['packages'][0]['tools'] if tool['name'] == 'esp32-arduino-libs');"
replace_script+="data['packages'].update({'framework-arduinoespressif32-libs':{'type':'framework','optional':False,'version':libs_package_archive_url}});"
replace_script+="data['packages']['toolchain-xtensa-esp32'].update({'optional':False});"
# esptool.py may require an upstream version (for now platformio is the owner)
replace_script+="data['packages']['tool-esptoolpy']['version']='$ESPTOOLPY_VERSION';"
# Save results
replace_script+="fp.seek(0);fp.truncate();json.dump(data, fp, indent=2);fp.close()"
python -c "$replace_script"
if [ "$GITHUB_REPOSITORY" == "espressif/arduino-esp32" ]; then
echo "Linking Core..."
ln -s $GITHUB_WORKSPACE "$PLATFORMIO_ESP32_PATH"
else
echo "Cloning Core Repository ..."
git clone --recursive https://github.com/espressif/arduino-esp32.git "$PLATFORMIO_ESP32_PATH" > /dev/null 2>&1
fi
echo "PlatformIO for ESP32 has been installed"
echo ""
function build_pio_sketch(){ # build_pio_sketch <board> <options> <path-to-ino>
if [ "$#" -lt 3 ]; then
echo "ERROR: Illegal number of parameters"
echo "USAGE: build_pio_sketch <board> <options> <path-to-ino>"
return 1
fi
local board="$1"
local options="$2"
local sketch="$3"
local sketch_dir=$(dirname "$sketch")
echo ""
echo "Compiling '"$(basename "$sketch")"' ..."
python -m platformio ci --board "$board" "$sketch_dir" --project-option="$options"
}
function count_sketches(){ # count_sketches <examples-path>
local examples="$1"
rm -rf sketches.txt
if [ ! -d "$examples" ]; then
touch sketches.txt
return 0
fi
local sketches=$(find $examples -name *.ino)
local sketchnum=0
for sketch in $sketches; do
local sketchdir=$(dirname $sketch)
local sketchdirname=$(basename $sketchdir)
local sketchname=$(basename $sketch)
if [[ "${sketchdirname}.ino" != "$sketchname" ]]; then
continue
fi
if [[ -f "$sketchdir/.test.skip" ]]; then
continue
fi
echo $sketch >> sketches.txt
sketchnum=$(($sketchnum + 1))
done
return $sketchnum
}
function build_pio_sketches(){ # build_pio_sketches <board> <options> <examples-path> <chunk> <total-chunks>
if [ "$#" -lt 3 ]; then
echo "ERROR: Illegal number of parameters"
echo "USAGE: build_pio_sketches <board> <options> <examples-path> [<chunk> <total-chunks>]"
return 1
fi
local board=$1
local options="$2"
local examples=$3
local chunk_idex=$4
local chunks_num=$5
if [ "$#" -lt 5 ]; then
chunk_idex="0"
chunks_num="1"
fi
if [ "$chunks_num" -le 0 ]; then
echo "ERROR: Chunks count must be positive number"
return 1
fi
if [ "$chunk_idex" -ge "$chunks_num" ]; then
echo "ERROR: Chunk index must be less than chunks count"
return 1
fi
set +e
count_sketches "$examples"
local sketchcount=$?
set -e
local sketches=$(cat sketches.txt)
rm -rf sketches.txt
local chunk_size=$(( $sketchcount / $chunks_num ))
local all_chunks=$(( $chunks_num * $chunk_size ))
if [ "$all_chunks" -lt "$sketchcount" ]; then
chunk_size=$(( $chunk_size + 1 ))
fi
local start_index=$(( $chunk_idex * $chunk_size ))
if [ "$sketchcount" -le "$start_index" ]; then
echo "Skipping job"
return 0
fi
local end_index=$(( $(( $chunk_idex + 1 )) * $chunk_size ))
if [ "$end_index" -gt "$sketchcount" ]; then
end_index=$sketchcount
fi
local start_num=$(( $start_index + 1 ))
echo "Found $sketchcount Sketches";
echo "Chunk Count : $chunks_num"
echo "Chunk Size : $chunk_size"
echo "Start Sketch: $start_num"
echo "End Sketch : $end_index"
local sketchnum=0
for sketch in $sketches; do
local sketchdir=$(dirname $sketch)
local sketchdirname=$(basename $sketchdir)
local sketchname=$(basename $sketch)
if [ "${sketchdirname}.ino" != "$sketchname" ] \
|| [ -f "$sketchdir/.test.skip" ]; then
continue
fi
sketchnum=$(($sketchnum + 1))
if [ "$sketchnum" -le "$start_index" ] \
|| [ "$sketchnum" -gt "$end_index" ]; then
continue
fi
build_pio_sketch "$board" "$options" "$sketch"
local result=$?
if [ $result -ne 0 ]; then
return $result
fi
done
return 0
}

View file

@ -1,55 +1,46 @@
#!/usr/bin/env python #!/usr/bin/env python
# This script merges two Arduino Board Manager package json files. # This script merges two Arduino Board Manager package json files.
# Usage: # Usage:
# python merge_packages.py package_esp8266com_index.json version/new/package_esp8266com_index.json # python merge_packages.py package_esp8266com_index.json version/new/package_esp8266com_index.json
# Written by Ivan Grokhotkov, 2015 # Written by Ivan Grokhotkov, 2015
# Updated by lucasssvaz to handle Chinese version sorting, 2025
# #
from __future__ import print_function from __future__ import print_function
from distutils.version import LooseVersion
# from distutils.version import LooseVersion
from packaging.version import Version
import re import re
import json import json
import sys import sys
def load_package(filename): def load_package(filename):
pkg = json.load(open(filename))["packages"][0] pkg = json.load(open(filename))['packages'][0]
print("Loaded package {0} from {1}".format(pkg["name"], filename), file=sys.stderr) print("Loaded package {0} from {1}".format(pkg['name'], filename), file=sys.stderr)
print("{0} platform(s), {1} tools".format(len(pkg["platforms"]), len(pkg["tools"])), file=sys.stderr) print("{0} platform(s), {1} tools".format(len(pkg['platforms']), len(pkg['tools'])), file=sys.stderr)
return pkg return pkg
def merge_objects(versions, obj): def merge_objects(versions, obj):
for o in obj: for o in obj:
name = o["name"].encode("ascii") name = o['name'].encode('ascii')
ver = o["version"].encode("ascii") ver = o['version'].encode('ascii')
if name not in versions: if not name in versions:
print("found new object, {0}".format(name), file=sys.stderr) print("found new object, {0}".format(name), file=sys.stderr)
versions[name] = {} versions[name] = {}
if ver not in versions[name]: if not ver in versions[name]:
print("found new version {0} for object {1}".format(ver, name), file=sys.stderr) print("found new version {0} for object {1}".format(ver, name), file=sys.stderr)
versions[name][ver] = o versions[name][ver] = o
return versions return versions
# Normalize ESP release version string (x.x.x) by adding '-rc<MAXINT>' (x.x.x-rc9223372036854775807) to ensure having REL above any RC
# Normalize ESP release version string (x.x.x) by adding '-rc<MAXINT>' (x.x.x-rc9223372036854775807) # Dummy approach, functional anyway for current ESP package versioning (unlike NormalizedVersion/LooseVersion/StrictVersion & similar crap)
# to ensure having REL above any RC. CN version will be sorted after the official version if they happen
# to be mixed (normally, CN and non-CN versions should not be mixed)
# Dummy approach, functional anyway for current ESP package versioning
# (unlike NormalizedVersion/LooseVersion/StrictVersion & similar crap)
def pkgVersionNormalized(versionString): def pkgVersionNormalized(versionString):
verStr = str(versionString).replace("-cn", "")
verParts = re.split(r"\.|-rc|-alpha", verStr, flags=re.IGNORECASE) verStr = str(versionString)
verParts = re.split('\.|-rc', verStr, flags=re.IGNORECASE)
if len(verParts) == 3: if len(verParts) == 3:
if "-cn" in str(versionString): if (sys.version_info > (3, 0)): # Python 3
verStr = verStr + "-rc" + str(sys.maxsize // 2) verStr = str(versionString) + '-rc' + str(sys.maxsize)
else: else: # Python 2
verStr = verStr + "-rc" + str(sys.maxsize) verStr = str(versionString) + '-rc' + str(sys.maxint)
elif len(verParts) != 4: elif len(verParts) != 4:
print("pkgVersionNormalized WARNING: unexpected version format: {0})".format(verStr), file=sys.stderr) print("pkgVersionNormalized WARNING: unexpected version format: {0})".format(verStr), file=sys.stderr)
@ -64,35 +55,28 @@ def main(args):
tools = {} tools = {}
platforms = {} platforms = {}
pkg1 = load_package(args[1]) pkg1 = load_package(args[1])
tools = merge_objects(tools, pkg1["tools"]) tools = merge_objects(tools, pkg1['tools']);
platforms = merge_objects(platforms, pkg1["platforms"]) platforms = merge_objects(platforms, pkg1['platforms']);
pkg2 = load_package(args[2]) pkg2 = load_package(args[2])
tools = merge_objects(tools, pkg2["tools"]) tools = merge_objects(tools, pkg2['tools']);
platforms = merge_objects(platforms, pkg2["platforms"]) platforms = merge_objects(platforms, pkg2['platforms']);
pkg1["tools"] = [] pkg1['tools'] = []
pkg1["platforms"] = [] pkg1['platforms'] = []
for name in tools: for name in tools:
for version in tools[name]: for version in tools[name]:
print("Adding tool {0}-{1}".format(name, version), file=sys.stderr) print("Adding tool {0}-{1}".format(name, version), file=sys.stderr)
pkg1["tools"].append(tools[name][version]) pkg1['tools'].append(tools[name][version])
for name in platforms: for name in platforms:
for version in platforms[name]: for version in platforms[name]:
print("Adding platform {0}-{1}".format(name, version), file=sys.stderr) print("Adding platform {0}-{1}".format(name, version), file=sys.stderr)
pkg1["platforms"].append(platforms[name][version]) pkg1['platforms'].append(platforms[name][version])
# pkg1["platforms"] = sorted( pkg1['platforms'] = sorted(pkg1['platforms'], key=lambda k: LooseVersion(pkgVersionNormalized(k['version'])), reverse=True)
# pkg1["platforms"], key=lambda k: LooseVersion(pkgVersionNormalized(k["version"])), reverse=True
# )
pkg1["platforms"] = sorted( json.dump({'packages':[pkg1]}, sys.stdout, indent=2)
pkg1["platforms"], key=lambda k: Version(pkgVersionNormalized(k["version"])), reverse=True
)
json.dump({"packages": [pkg1]}, sys.stdout, indent=2) if __name__ == '__main__':
if __name__ == "__main__":
sys.exit(main(sys.argv)) sys.exit(main(sys.argv))

View file

@ -1,13 +1,12 @@
#!/bin/bash #/bin/bash
set -e set -e
function get_file_size { function get_file_size(){
local file="$1" local file="$1"
if [[ "$OSTYPE" == "darwin"* ]]; then if [[ "$OSTYPE" == "darwin"* ]]; then
eval "$(stat -s "$file")" eval `stat -s "$file"`
local res="$?" local res="$?"
echo "${st_size:?}" echo "$st_size"
return $res return $res
else else
stat --printf="%s" "$file" stat --printf="%s" "$file"
@ -16,32 +15,25 @@ function get_file_size {
} }
#git_remove_from_pages <file> #git_remove_from_pages <file>
function git_remove_from_pages { function git_remove_from_pages(){
local path=$1 local path=$1
local info local info=`curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages"`
local type local type=`echo "$info" | jq -r '.type'`
local sha if [ ! $type == "file" ]; then
local message if [ ! $type == "null" ]; then
info=$(curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages")
type=$(echo "$info" | jq -r '.type')
if [ ! "$type" == "file" ]; then
if [ ! "$type" == "null" ]; then
echo "Wrong type '$type'" echo "Wrong type '$type'"
else else
echo "File is not on Pages" echo "File is not on Pages"
fi fi
return 0 return 0
fi fi
local sha=`echo "$info" | jq -r '.sha'`
sha=$(echo "$info" | jq -r '.sha') local message="Deleting "$(basename $path)
message="Deleting "$(basename "$path")
local json="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"sha\":\"$sha\"}" local json="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"sha\":\"$sha\"}"
echo "$json" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X DELETE --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path" echo "$json" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X DELETE --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path"
} }
function git_upload_to_pages { function git_upload_to_pages(){
local path=$1 local path=$1
local src=$2 local src=$2
@ -50,50 +42,41 @@ function git_upload_to_pages {
return 1 return 1
fi fi
local info local info=`curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages"`
local type local type=`echo "$info" | jq -r '.type'`
local message local message=$(basename $path)
local sha="" local sha=""
local content="" local content=""
info=$(curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages") if [ $type == "file" ]; then
type=$(echo "$info" | jq -r '.type') sha=`echo "$info" | jq -r '.sha'`
message=$(basename "$path")
if [ "$type" == "file" ]; then
sha=$(echo "$info" | jq -r '.sha')
sha=",\"sha\":\"$sha\"" sha=",\"sha\":\"$sha\""
message="Updating $message" message="Updating $message"
elif [ ! "$type" == "null" ]; then elif [ ! $type == "null" ]; then
>&2 echo "Wrong type '$type'" >&2 echo "Wrong type '$type'"
return 1 return 1
else else
message="Creating $message" message="Creating $message"
fi fi
content=$(base64 -i "$src") content=`base64 -i "$src"`
data="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"content\":\"$content\"$sha}" data="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"content\":\"$content\"$sha}"
echo "$data" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X PUT --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path" echo "$data" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X PUT --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path"
} }
function git_safe_upload_to_pages { function git_safe_upload_to_pages(){
local path=$1 local path=$1
local file="$2" local file="$2"
local name local name=$(basename "$file")
local size local size=`get_file_size "$file"`
local upload_res local upload_res=`git_upload_to_pages "$path" "$file"`
if [ $? -ne 0 ]; then
name=$(basename "$file")
size=$(get_file_size "$file")
if ! upload_res=$(git_upload_to_pages "$path" "$file"); then
>&2 echo "ERROR: Failed to upload '$name' ($?)" >&2 echo "ERROR: Failed to upload '$name' ($?)"
return 1 return 1
fi fi
up_size=`echo "$upload_res" | jq -r '.content.size'`
up_size=$(echo "$upload_res" | jq -r '.content.size') if [ $up_size -ne $size ]; then
if [ "$up_size" -ne "$size" ]; then
>&2 echo "ERROR: Uploaded size does not match! $up_size != $size" >&2 echo "ERROR: Uploaded size does not match! $up_size != $size"
#git_delete_asset #git_delete_asset
return 1 return 1

View file

@ -1,33 +0,0 @@
#!/bin/bash
set -e
CHECK_REQUIREMENTS="./components/arduino-esp32/.github/scripts/sketch_utils.sh check_requirements"
# Export IDF environment
. ${IDF_PATH}/export.sh
# Find all examples in ./components/arduino-esp32/idf_component_examples
idf_component_examples=$(find ./components/arduino-esp32/idf_component_examples -mindepth 1 -maxdepth 1 -type d)
for example in $idf_component_examples; do
if [ -f "$example"/ci.json ]; then
# If the target is listed as false, skip the sketch. Otherwise, include it.
is_target=$(jq -r --arg target "$IDF_TARGET" '.targets[$target]' "$example"/ci.json)
if [[ "$is_target" == "false" ]]; then
printf "\n\033[93mSkipping %s for target %s\033[0m\n\n" "$example" "$IDF_TARGET"
continue
fi
fi
idf.py -C "$example" set-target "$IDF_TARGET"
has_requirements=$(${CHECK_REQUIREMENTS} "$example" "$example/sdkconfig")
if [ "$has_requirements" -eq 0 ]; then
printf "\n\033[93m%s does not meet the requirements for %s. Skipping...\033[0m\n\n" "$example" "$IDF_TARGET"
continue
fi
printf "\n\033[95mBuilding %s\033[0m\n\n" "$example"
idf.py -C "$example" -DEXTRA_COMPONENT_DIRS="$PWD/components" build
done

View file

@ -4,45 +4,36 @@ set -e
export ARDUINO_BUILD_DIR="$HOME/.arduino/build.tmp" export ARDUINO_BUILD_DIR="$HOME/.arduino/build.tmp"
function build { function build(){
local target=$1 local target=$1
local chunk_index=$2 local fqbn=$2
local chunks_cnt=$3 local chunk_index=$3
local build_log=$4 local chunks_cnt=$4
local log_level=${5:-none} shift; shift; shift; shift;
local sketches_file=$6 local sketches=$*
shift 6
local sketches=("$@")
local BUILD_SKETCH="${SCRIPTS_DIR}/sketch_utils.sh build" local BUILD_SKETCH="${SCRIPTS_DIR}/sketch_utils.sh build"
local BUILD_SKETCHES="${SCRIPTS_DIR}/sketch_utils.sh chunk_build" local BUILD_SKETCHES="${SCRIPTS_DIR}/sketch_utils.sh chunk_build"
local args=("-ai" "$ARDUINO_IDE_PATH" "-au" "$ARDUINO_USR_PATH" "-t" "$target") local args="-ai $ARDUINO_IDE_PATH -au $ARDUINO_USR_PATH"
args+=" -t $target -fqbn $fqbn"
if [ "$OS_IS_LINUX" == "1" ]; then if [ "$OS_IS_LINUX" == "1" ]; then
args+=("-p" "$ARDUINO_ESP32_PATH/libraries" "-i" "$chunk_index" "-m" "$chunks_cnt" "-d" "$log_level") args+=" -p $ARDUINO_ESP32_PATH/libraries"
if [ -n "$sketches_file" ]; then args+=" -i $chunk_index -m $chunks_cnt"
args+=("-f" "$sketches_file") ${BUILD_SKETCHES} ${args}
fi
if [ "$build_log" -eq 1 ]; then
args+=("-l" "$build_log")
fi
${BUILD_SKETCHES} "${args[@]}"
else else
for sketch in "${sketches[@]}"; do for sketch in ${sketches}; do
local sargs=("${args[@]}") local sargs="$args -s $(dirname $sketch)"
local ctags_version
local preprocessor_version
sargs+=("-s" "$(dirname "$sketch")")
if [ "$OS_IS_WINDOWS" == "1" ] && [ -d "$ARDUINO_IDE_PATH/tools-builder" ]; then if [ "$OS_IS_WINDOWS" == "1" ] && [ -d "$ARDUINO_IDE_PATH/tools-builder" ]; then
ctags_version=$(ls "$ARDUINO_IDE_PATH/tools-builder/ctags/") local ctags_version=`ls "$ARDUINO_IDE_PATH/tools-builder/ctags/"`
preprocessor_version=$(ls "$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/") local preprocessor_version=`ls "$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/"`
sargs+=( win_opts="-prefs=runtime.tools.ctags.path=$ARDUINO_IDE_PATH/tools-builder/ctags/$ctags_version
"-prefs=runtime.tools.ctags.path=$ARDUINO_IDE_PATH/tools-builder/ctags/$ctags_version" -prefs=runtime.tools.arduino-preprocessor.path=$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/$preprocessor_version"
"-prefs=runtime.tools.arduino-preprocessor.path=$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/$preprocessor_version" sargs+=" ${win_opts}"
)
fi fi
${BUILD_SKETCH} "${sargs[@]}" ${BUILD_SKETCH} ${sargs}
done done
fi fi
} }
@ -54,54 +45,61 @@ fi
CHUNK_INDEX=$1 CHUNK_INDEX=$1
CHUNKS_CNT=$2 CHUNKS_CNT=$2
BUILD_LOG=$3 BUILD_PIO=0
LOG_LEVEL=$4
SKETCHES_FILE=$5
if [ "$#" -lt 2 ] || [ "$CHUNKS_CNT" -le 0 ]; then if [ "$#" -lt 2 ] || [ "$CHUNKS_CNT" -le 0 ]; then
CHUNK_INDEX=0 CHUNK_INDEX=0
CHUNKS_CNT=1 CHUNKS_CNT=1
elif [ "$CHUNK_INDEX" -gt "$CHUNKS_CNT" ] && [ "$CHUNKS_CNT" -ge 2 ]; then elif [ "$CHUNK_INDEX" -gt "$CHUNKS_CNT" ] && [ "$CHUNKS_CNT" -ge 2 ]; then
CHUNK_INDEX=$CHUNKS_CNT CHUNK_INDEX=$CHUNKS_CNT
fi elif [ "$CHUNK_INDEX" -eq "$CHUNKS_CNT" ]; then
BUILD_PIO=1
if [ -z "$BUILD_LOG" ] || [ "$BUILD_LOG" -le 0 ]; then
BUILD_LOG=0
fi fi
#echo "Updating submodules ..." #echo "Updating submodules ..."
#git -C "$GITHUB_WORKSPACE" submodule update --init --recursive > /dev/null 2>&1 #git -C "$GITHUB_WORKSPACE" submodule update --init --recursive > /dev/null 2>&1
SCRIPTS_DIR="./.github/scripts" SCRIPTS_DIR="./.github/scripts"
source "${SCRIPTS_DIR}/install-arduino-cli.sh" if [ "$BUILD_PIO" -eq 0 ]; then
source "${SCRIPTS_DIR}/install-arduino-core-esp32.sh" #source ${SCRIPTS_DIR}/install-arduino-ide.sh
source ${SCRIPTS_DIR}/install-arduino-cli.sh
source ${SCRIPTS_DIR}/install-arduino-core-esp32.sh
SKETCHES_ESP32=( FQBN_ESP32="espressif:esp32:esp32:PSRAM=enabled,PartitionScheme=huge_app"
"$ARDUINO_ESP32_PATH/libraries/NetworkClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino" FQBN_ESP32S2="espressif:esp32:esp32s2:PSRAM=enabled,PartitionScheme=huge_app"
"$ARDUINO_ESP32_PATH/libraries/BLE/examples/Server/Server.ino" FQBN_ESP32S3="espressif:esp32:esp32s3:PSRAM=opi,USBMode=default,PartitionScheme=huge_app"
"$ARDUINO_ESP32_PATH/libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino" FQBN_ESP32C3="espressif:esp32:esp32c3:PartitionScheme=huge_app"
"$ARDUINO_ESP32_PATH/libraries/Insights/examples/MinimalDiagnostics/MinimalDiagnostics.ino" FQBN_ESP32C6="espressif:esp32:esp32c6:PartitionScheme=huge_app"
) FQBN_ESP32H2="espressif:esp32:esp32h2:PartitionScheme=huge_app"
#create sizes_file
sizes_file="$GITHUB_WORKSPACE/cli_compile_$CHUNK_INDEX.json"
if [ "$BUILD_LOG" -eq 1 ]; then SKETCHES_ESP32="\
#create sizes_file and echo start of JSON array with "boards" key $ARDUINO_ESP32_PATH/libraries/WiFiClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino\
echo "{\"boards\": [" > "$sizes_file" $ARDUINO_ESP32_PATH/libraries/BLE/examples/Server/Server.ino\
fi $ARDUINO_ESP32_PATH/libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino\
$ARDUINO_ESP32_PATH/libraries/Insights/examples/MinimalDiagnostics/MinimalDiagnostics.ino\
#build sketches for different targets "
build "esp32c5" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32p4" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" build "esp32s3" $FQBN_ESP32S3 $CHUNK_INDEX $CHUNKS_CNT $SKETCHES_ESP32
build "esp32s3" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" build "esp32s2" $FQBN_ESP32S2 $CHUNK_INDEX $CHUNKS_CNT $SKETCHES_ESP32
build "esp32s2" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" build "esp32c3" $FQBN_ESP32C3 $CHUNK_INDEX $CHUNKS_CNT $SKETCHES_ESP32
build "esp32c3" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" build "esp32c6" $FQBN_ESP32C6 $CHUNK_INDEX $CHUNKS_CNT $SKETCHES_ESP32
build "esp32c6" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" build "esp32h2" $FQBN_ESP32H2 $CHUNK_INDEX $CHUNKS_CNT $SKETCHES_ESP32
build "esp32h2" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" build "esp32" $FQBN_ESP32 $CHUNK_INDEX $CHUNKS_CNT $SKETCHES_ESP32
build "esp32" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}" else
source ${SCRIPTS_DIR}/install-platformio-esp32.sh
if [ "$BUILD_LOG" -eq 1 ]; then # PlatformIO ESP32 Test
#remove last comma from the last JSON object BOARD="esp32dev"
sed -i '$ s/,$//' "$sizes_file" OPTIONS="board_build.partitions = huge_app.csv"
#echo end of JSON array build_pio_sketch "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries/WiFi/examples/WiFiClient/WiFiClient.ino" && \
echo "]}" >> "$sizes_file" build_pio_sketch "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries/WiFiClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino" && \
build_pio_sketch "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries/BluetoothSerial/examples/SerialToSerialBT/SerialToSerialBT.ino" && \
build_pio_sketch "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries/BLE/examples/Server/Server.ino" && \
build_pio_sketch "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino"
# Basic sanity testing for other series
for board in "esp32-c3-devkitm-1" "esp32-s2-saola-1" "esp32-s3-devkitc-1"
do
python -m platformio ci --board "$board" "$PLATFORMIO_ESP32_PATH/libraries/WiFi/examples/WiFiClient" --project-option="board_build.partitions = huge_app.csv"
done
#build_pio_sketches "$BOARD" "$OPTIONS" "$PLATFORMIO_ESP32_PATH/libraries"
fi fi

View file

@ -1,65 +1,52 @@
#!/bin/bash #!/bin/bash
# Disable shellcheck warning about using 'cat' to read a file.
# Disable shellcheck warning about using individual redirections for each command.
# Disable shellcheck warning about $? uses.
# shellcheck disable=SC2002,SC2129,SC2181,SC2319
if [ ! "$GITHUB_EVENT_NAME" == "release" ]; then if [ ! $GITHUB_EVENT_NAME == "release" ]; then
echo "Wrong event '$GITHUB_EVENT_NAME'!" echo "Wrong event '$GITHUB_EVENT_NAME'!"
exit 1 exit 1
fi fi
EVENT_JSON=$(cat "$GITHUB_EVENT_PATH") EVENT_JSON=`cat $GITHUB_EVENT_PATH`
action=$(echo "$EVENT_JSON" | jq -r '.action') action=`echo $EVENT_JSON | jq -r '.action'`
if [ ! "$action" == "published" ]; then if [ ! $action == "published" ]; then
echo "Wrong action '$action'. Exiting now..." echo "Wrong action '$action'. Exiting now..."
exit 0 exit 0
fi fi
draft=$(echo "$EVENT_JSON" | jq -r '.release.draft') draft=`echo $EVENT_JSON | jq -r '.release.draft'`
if [ "$draft" == "true" ]; then if [ $draft == "true" ]; then
echo "It's a draft release. Exiting now..." echo "It's a draft release. Exiting now..."
exit 0 exit 0
fi fi
RELEASE_PRE=$(echo "$EVENT_JSON" | jq -r '.release.prerelease') RELEASE_PRE=`echo $EVENT_JSON | jq -r '.release.prerelease'`
RELEASE_TAG=$(echo "$EVENT_JSON" | jq -r '.release.tag_name') RELEASE_TAG=`echo $EVENT_JSON | jq -r '.release.tag_name'`
RELEASE_BRANCH=$(echo "$EVENT_JSON" | jq -r '.release.target_commitish') RELEASE_BRANCH=`echo $EVENT_JSON | jq -r '.release.target_commitish'`
RELEASE_ID=$(echo "$EVENT_JSON" | jq -r '.release.id') RELEASE_ID=`echo $EVENT_JSON | jq -r '.release.id'`
SCRIPTS_DIR="./.github/scripts"
OUTPUT_DIR="$GITHUB_WORKSPACE/build" OUTPUT_DIR="$GITHUB_WORKSPACE/build"
PACKAGE_NAME="esp32-$RELEASE_TAG" PACKAGE_NAME="esp32-$RELEASE_TAG"
PACKAGE_JSON_MERGE="$GITHUB_WORKSPACE/.github/scripts/merge_packages.py" PACKAGE_JSON_MERGE="$GITHUB_WORKSPACE/.github/scripts/merge_packages.py"
PACKAGE_JSON_TEMPLATE="$GITHUB_WORKSPACE/package/package_esp32_index.template.json" PACKAGE_JSON_TEMPLATE="$GITHUB_WORKSPACE/package/package_esp32_index.template.json"
PACKAGE_JSON_DEV="package_esp32_dev_index.json" PACKAGE_JSON_DEV="package_esp32_dev_index.json"
PACKAGE_JSON_REL="package_esp32_index.json" PACKAGE_JSON_REL="package_esp32_index.json"
PACKAGE_JSON_DEV_CN="package_esp32_dev_index_cn.json"
PACKAGE_JSON_REL_CN="package_esp32_index_cn.json"
echo "Event: $GITHUB_EVENT_NAME, Repo: $GITHUB_REPOSITORY, Path: $GITHUB_WORKSPACE, Ref: $GITHUB_REF" echo "Event: $GITHUB_EVENT_NAME, Repo: $GITHUB_REPOSITORY, Path: $GITHUB_WORKSPACE, Ref: $GITHUB_REF"
echo "Action: $action, Branch: $RELEASE_BRANCH, ID: $RELEASE_ID" echo "Action: $action, Branch: $RELEASE_BRANCH, ID: $RELEASE_ID"
echo "Tag: $RELEASE_TAG, Draft: $draft, Pre-Release: $RELEASE_PRE" echo "Tag: $RELEASE_TAG, Draft: $draft, Pre-Release: $RELEASE_PRE"
# Try extracting something like a JSON with a "boards" array/element and "vendor" fields # Try extracting something like a JSON with a "boards" array/element and "vendor" fields
BOARDS=$(echo "$RELEASE_BODY" | grep -Pzo '(?s){.*}' | jq -r '.boards[]? // .boards? // empty' | xargs echo -n 2>/dev/null) BOARDS=`echo $RELEASE_BODY | grep -Pzo '(?s){.*}' | jq -r '.boards[]? // .boards? // empty' | xargs echo -n 2>/dev/null`
VENDOR=$(echo "$RELEASE_BODY" | grep -Pzo '(?s){.*}' | jq -r '.vendor? // empty' | xargs echo -n 2>/dev/null) VENDOR=`echo $RELEASE_BODY | grep -Pzo '(?s){.*}' | jq -r '.vendor? // empty' | xargs echo -n 2>/dev/null`
if ! [ -z "${BOARDS}" ]; then echo "Releasing board(s): $BOARDS" ; fi
if ! [ -z "${VENDOR}" ]; then echo "Setting packager: $VENDOR" ; fi
if [ -n "${BOARDS}" ]; then function get_file_size(){
echo "Releasing board(s): $BOARDS"
fi
if [ -n "${VENDOR}" ]; then
echo "Setting packager: $VENDOR"
fi
function get_file_size {
local file="$1" local file="$1"
if [[ "$OSTYPE" == "darwin"* ]]; then if [[ "$OSTYPE" == "darwin"* ]]; then
eval "$(stat -s "$file")" eval `stat -s "$file"`
local res="$?" local res="$?"
echo "${st_size:?}" echo "$st_size"
return $res return $res
else else
stat --printf="%s" "$file" stat --printf="%s" "$file"
@ -67,29 +54,23 @@ function get_file_size {
fi fi
} }
function git_upload_asset { function git_upload_asset(){
local name local name=$(basename "$1")
name=$(basename "$1")
# local mime=$(file -b --mime-type "$1") # local mime=$(file -b --mime-type "$1")
curl -k -X POST -sH "Authorization: token $GITHUB_TOKEN" -H "Content-Type: application/octet-stream" --data-binary @"$1" "https://uploads.github.com/repos/$GITHUB_REPOSITORY/releases/$RELEASE_ID/assets?name=$name" curl -k -X POST -sH "Authorization: token $GITHUB_TOKEN" -H "Content-Type: application/octet-stream" --data-binary @"$1" "https://uploads.github.com/repos/$GITHUB_REPOSITORY/releases/$RELEASE_ID/assets?name=$name"
} }
function git_safe_upload_asset { function git_safe_upload_asset(){
local file="$1" local file="$1"
local name local name=$(basename "$file")
local size local size=`get_file_size "$file"`
local upload_res local upload_res=`git_upload_asset "$file"`
if [ $? -ne 0 ]; then
name=$(basename "$file")
size=$(get_file_size "$file")
if ! upload_res=$(git_upload_asset "$file"); then
>&2 echo "ERROR: Failed to upload '$name' ($?)" >&2 echo "ERROR: Failed to upload '$name' ($?)"
return 1 return 1
fi fi
up_size=`echo "$upload_res" | jq -r '.size'`
up_size=$(echo "$upload_res" | jq -r '.size') if [ $up_size -ne $size ]; then
if [ "$up_size" -ne "$size" ]; then
>&2 echo "ERROR: Uploaded size does not match! $up_size != $size" >&2 echo "ERROR: Uploaded size does not match! $up_size != $size"
#git_delete_asset #git_delete_asset
return 1 return 1
@ -98,7 +79,7 @@ function git_safe_upload_asset {
return $? return $?
} }
function git_upload_to_pages { function git_upload_to_pages(){
local path=$1 local path=$1
local src=$2 local src=$2
@ -107,50 +88,41 @@ function git_upload_to_pages {
return 1 return 1
fi fi
local info local info=`curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages"`
local type local type=`echo "$info" | jq -r '.type'`
local message local message=$(basename $path)
local sha="" local sha=""
local content="" local content=""
info=$(curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages") if [ $type == "file" ]; then
type=$(echo "$info" | jq -r '.type') sha=`echo "$info" | jq -r '.sha'`
message=$(basename "$path")
if [ "$type" == "file" ]; then
sha=$(echo "$info" | jq -r '.sha')
sha=",\"sha\":\"$sha\"" sha=",\"sha\":\"$sha\""
message="Updating $message" message="Updating $message"
elif [ ! "$type" == "null" ]; then elif [ ! $type == "null" ]; then
>&2 echo "Wrong type '$type'" >&2 echo "Wrong type '$type'"
return 1 return 1
else else
message="Creating $message" message="Creating $message"
fi fi
content=$(base64 -i "$src") content=`base64 -i "$src"`
data="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"content\":\"$content\"$sha}" data="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"content\":\"$content\"$sha}"
echo "$data" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X PUT --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path" echo "$data" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X PUT --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path"
} }
function git_safe_upload_to_pages { function git_safe_upload_to_pages(){
local path=$1 local path=$1
local file="$2" local file="$2"
local name local name=$(basename "$file")
local size local size=`get_file_size "$file"`
local upload_res local upload_res=`git_upload_to_pages "$path" "$file"`
if [ $? -ne 0 ]; then
name=$(basename "$file")
size=$(get_file_size "$file")
if ! upload_res=$(git_upload_to_pages "$path" "$file"); then
>&2 echo "ERROR: Failed to upload '$name' ($?)" >&2 echo "ERROR: Failed to upload '$name' ($?)"
return 1 return 1
fi fi
up_size=`echo "$upload_res" | jq -r '.content.size'`
up_size=$(echo "$upload_res" | jq -r '.content.size') if [ $up_size -ne $size ]; then
if [ "$up_size" -ne "$size" ]; then
>&2 echo "ERROR: Uploaded size does not match! $up_size != $size" >&2 echo "ERROR: Uploaded size does not match! $up_size != $size"
#git_delete_asset #git_delete_asset
return 1 return 1
@ -159,20 +131,15 @@ function git_safe_upload_to_pages {
return $? return $?
} }
function merge_package_json { function merge_package_json(){
local jsonLink=$1 local jsonLink=$1
local jsonOut=$2 local jsonOut=$2
local old_json=$OUTPUT_DIR/oldJson.json local old_json=$OUTPUT_DIR/oldJson.json
local merged_json=$OUTPUT_DIR/mergedJson.json local merged_json=$OUTPUT_DIR/mergedJson.json
local error_code=0
echo "Downloading previous JSON $jsonLink ..." echo "Downloading previous JSON $jsonLink ..."
curl -L -o "$old_json" "https://github.com/$GITHUB_REPOSITORY/releases/download/$jsonLink?access_token=$GITHUB_TOKEN" 2>/dev/null curl -L -o "$old_json" "https://github.com/$GITHUB_REPOSITORY/releases/download/$jsonLink?access_token=$GITHUB_TOKEN" 2>/dev/null
error_code=$? if [ $? -ne 0 ]; then echo "ERROR: Download Failed! $?"; exit 1; fi
if [ $error_code -ne 0 ]; then
echo "ERROR: Download Failed! $error_code"
exit 1
fi
echo "Creating new JSON ..." echo "Creating new JSON ..."
set +e set +e
@ -180,7 +147,7 @@ function merge_package_json {
set -e set -e
set -v set -v
if [ ! -s "$merged_json" ]; then if [ ! -s $merged_json ]; then
rm -f "$merged_json" rm -f "$merged_json"
echo "Nothing to merge" echo "Nothing to merge"
else else
@ -221,14 +188,10 @@ else
done done
# Copy only relevant variant files # Copy only relevant variant files
mkdir "$PKG_DIR/variants/" mkdir "$PKG_DIR/variants/"
board_list=$(cat "${PKG_DIR}"/boards.txt | grep "\.variant=" | cut -d= -f2) for variant in `cat ${PKG_DIR}/boards.txt | grep "\.variant=" | cut -d= -f2` ; do
while IFS= read -r variant; do
cp -Rf "$GITHUB_WORKSPACE/variants/${variant}" "$PKG_DIR/variants/" cp -Rf "$GITHUB_WORKSPACE/variants/${variant}" "$PKG_DIR/variants/"
done <<< "$board_list" done
fi fi
cp -f "$GITHUB_WORKSPACE/CMakeLists.txt" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/idf_component.yml" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/Kconfig.projbuild" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/package.json" "$PKG_DIR/" cp -f "$GITHUB_WORKSPACE/package.json" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/programmers.txt" "$PKG_DIR/" cp -f "$GITHUB_WORKSPACE/programmers.txt" "$PKG_DIR/"
cp -Rf "$GITHUB_WORKSPACE/cores" "$PKG_DIR/" cp -Rf "$GITHUB_WORKSPACE/cores" "$PKG_DIR/"
@ -241,7 +204,7 @@ cp -f "$GITHUB_WORKSPACE/tools/gen_insights_package.py" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/gen_insights_package.exe" "$PKG_DIR/tools/" cp -f "$GITHUB_WORKSPACE/tools/gen_insights_package.exe" "$PKG_DIR/tools/"
cp -Rf "$GITHUB_WORKSPACE/tools/partitions" "$PKG_DIR/tools/" cp -Rf "$GITHUB_WORKSPACE/tools/partitions" "$PKG_DIR/tools/"
cp -Rf "$GITHUB_WORKSPACE/tools/ide-debug" "$PKG_DIR/tools/" cp -Rf "$GITHUB_WORKSPACE/tools/ide-debug" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/pioarduino-build.py" "$PKG_DIR/tools/" cp -f "$GITHUB_WORKSPACE/tools/platformio-build.py" "$PKG_DIR/tools/"
# Remove unnecessary files in the package folder # Remove unnecessary files in the package folder
echo "Cleaning up folders ..." echo "Cleaning up folders ..."
@ -253,50 +216,47 @@ find "$PKG_DIR" -name '*.git*' -type f -delete
## ##
RVTC_NAME="riscv32-esp-elf-gcc" RVTC_NAME="riscv32-esp-elf-gcc"
RVTC_NEW_NAME="esp-rv32" RVTC_NEW_NAME="esp-rv32"
X32TC_NAME="xtensa-esp-elf-gcc"
X32TC_NEW_NAME="esp-x32"
# Replace tools locations in platform.txt # Replace tools locations in platform.txt
echo "Generating platform.txt..." echo "Generating platform.txt..."
cat "$GITHUB_WORKSPACE/platform.txt" | \ cat "$GITHUB_WORKSPACE/platform.txt" | \
sed "s/version=.*/version=$RELEASE_TAG/g" | \ sed "s/version=.*/version=$RELEASE_TAG/g" | \
sed 's/tools\.esp32-arduino-libs\.path\.windows=.*//g' | \
sed 's/{runtime\.platform\.path}.tools.esp32-arduino-libs/\{runtime.tools.esp32-arduino-libs.path\}/g' | \ sed 's/{runtime\.platform\.path}.tools.esp32-arduino-libs/\{runtime.tools.esp32-arduino-libs.path\}/g' | \
sed 's/{runtime\.platform\.path}.tools.xtensa-esp-elf-gdb/\{runtime.tools.xtensa-esp-elf-gdb.path\}/g' | \ sed 's/{runtime\.platform\.path}.tools.xtensa-esp-elf-gdb/\{runtime.tools.xtensa-esp-elf-gdb.path\}/g' | \
sed "s/{runtime\.platform\.path}.tools.xtensa-esp-elf/\\{runtime.tools.$X32TC_NEW_NAME.path\\}/g" | \ sed 's/{runtime\.platform\.path}.tools.xtensa-esp32-elf/\{runtime.tools.xtensa-esp32-elf-gcc.path\}/g' | \
sed 's/{runtime\.platform\.path}.tools.xtensa-esp32s2-elf/\{runtime.tools.xtensa-esp32s2-elf-gcc.path\}/g' | \
sed 's/{runtime\.platform\.path}.tools.xtensa-esp32s3-elf/\{runtime.tools.xtensa-esp32s3-elf-gcc.path\}/g' | \
sed 's/{runtime\.platform\.path}.tools.riscv32-esp-elf-gdb/\{runtime.tools.riscv32-esp-elf-gdb.path\}/g' | \ sed 's/{runtime\.platform\.path}.tools.riscv32-esp-elf-gdb/\{runtime.tools.riscv32-esp-elf-gdb.path\}/g' | \
sed "s/{runtime\.platform\.path}.tools.riscv32-esp-elf/\\{runtime.tools.$RVTC_NEW_NAME.path\\}/g" | \ sed "s/{runtime\.platform\.path}.tools.riscv32-esp-elf/\\{runtime.tools.$RVTC_NEW_NAME.path\\}/g" | \
sed 's/{runtime\.platform\.path}.tools.esptool/\{runtime.tools.esptool_py.path\}/g' | \ sed 's/{runtime\.platform\.path}.tools.esptool/\{runtime.tools.esptool_py.path\}/g' | \
sed 's/{runtime\.platform\.path}.tools.openocd-esp32/\{runtime.tools.openocd-esp32.path\}/g' > "$PKG_DIR/platform.txt" sed 's/{runtime\.platform\.path}.tools.openocd-esp32/\{runtime.tools.openocd-esp32.path\}/g' \
> "$PKG_DIR/platform.txt"
if [ -n "${VENDOR}" ]; then if ! [ -z ${VENDOR} ]; then
# Append vendor name to platform.txt to create a separate section # Append vendor name to platform.txt to create a separate section
sed -i "/^name=.*/s/$/ ($VENDOR)/" "$PKG_DIR/platform.txt" sed -i "/^name=.*/s/$/ ($VENDOR)/" "$PKG_DIR/platform.txt"
fi fi
# Add header with version information # Add header with version information
echo "Generating core_version.h ..." echo "Generating core_version.h ..."
ver_define=$(echo "$RELEASE_TAG" | tr "[:lower:].\055" "[:upper:]_") ver_define=`echo $RELEASE_TAG | tr "[:lower:].\055" "[:upper:]_"`
ver_hex=$(git -C "$GITHUB_WORKSPACE" rev-parse --short=8 HEAD 2>/dev/null) ver_hex=`git -C "$GITHUB_WORKSPACE" rev-parse --short=8 HEAD 2>/dev/null`
echo \#define ARDUINO_ESP32_GIT_VER 0x"$ver_hex" > "$PKG_DIR/cores/esp32/core_version.h" echo \#define ARDUINO_ESP32_GIT_VER 0x$ver_hex > "$PKG_DIR/cores/esp32/core_version.h"
echo \#define ARDUINO_ESP32_GIT_DESC "$(git -C "$GITHUB_WORKSPACE" describe --tags 2>/dev/null)" >> "$PKG_DIR/cores/esp32/core_version.h" echo \#define ARDUINO_ESP32_GIT_DESC `git -C "$GITHUB_WORKSPACE" describe --tags 2>/dev/null` >> "$PKG_DIR/cores/esp32/core_version.h"
echo \#define ARDUINO_ESP32_RELEASE_"$ver_define" >> "$PKG_DIR/cores/esp32/core_version.h" echo \#define ARDUINO_ESP32_RELEASE_$ver_define >> "$PKG_DIR/cores/esp32/core_version.h"
echo \#define ARDUINO_ESP32_RELEASE \""$ver_define"\" >> "$PKG_DIR/cores/esp32/core_version.h" echo \#define ARDUINO_ESP32_RELEASE \"$ver_define\" >> "$PKG_DIR/cores/esp32/core_version.h"
# Compress package folder # Compress package folder
echo "Creating ZIP ..." echo "Creating ZIP ..."
pushd "$OUTPUT_DIR" >/dev/null pushd "$OUTPUT_DIR" >/dev/null
zip -qr "$PACKAGE_ZIP" "$PACKAGE_NAME" zip -qr "$PACKAGE_ZIP" "$PACKAGE_NAME"
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then echo "ERROR: Failed to create $PACKAGE_ZIP ($?)"; exit 1; fi
echo "ERROR: Failed to create $PACKAGE_ZIP ($?)"
exit 1
fi
# Calculate SHA-256 # Calculate SHA-256
echo "Calculating SHA sum ..." echo "Calculating SHA sum ..."
PACKAGE_PATH="$OUTPUT_DIR/$PACKAGE_ZIP" PACKAGE_PATH="$OUTPUT_DIR/$PACKAGE_ZIP"
PACKAGE_SHA=$(shasum -a 256 "$PACKAGE_ZIP" | cut -f 1 -d ' ') PACKAGE_SHA=`shasum -a 256 "$PACKAGE_ZIP" | cut -f 1 -d ' '`
PACKAGE_SIZE=$(get_file_size "$PACKAGE_ZIP") PACKAGE_SIZE=`get_file_size "$PACKAGE_ZIP"`
popd >/dev/null popd >/dev/null
rm -rf "$PKG_DIR" rm -rf "$PKG_DIR"
echo "'$PACKAGE_ZIP' Created! Size: $PACKAGE_SIZE, SHA-256: $PACKAGE_SHA" echo "'$PACKAGE_ZIP' Created! Size: $PACKAGE_SIZE, SHA-256: $PACKAGE_SHA"
@ -304,28 +264,86 @@ echo
# Upload package to release page # Upload package to release page
echo "Uploading package to release page ..." echo "Uploading package to release page ..."
PACKAGE_URL=$(git_safe_upload_asset "$PACKAGE_PATH") PACKAGE_URL=`git_safe_upload_asset "$PACKAGE_PATH"`
echo "Package Uploaded" echo "Package Uploaded"
echo "Download URL: $PACKAGE_URL" echo "Download URL: $PACKAGE_URL"
echo echo
##
## LIBS PACKAGE ZIP
##
LIBS_PROJ_NAME="esp32-arduino-libs"
LIBS_PKG_DIR="$OUTPUT_DIR/$LIBS_PROJ_NAME"
LIBS_PACKAGE_ZIP="$LIBS_PROJ_NAME-$RELEASE_TAG.zip"
# Get the libs package URL from the template
LIBS_PACKAGE_SRC_ZIP="$OUTPUT_DIR/src-$LIBS_PROJ_NAME.zip"
LIBS_PACKAGE_SRC_URL=`cat $PACKAGE_JSON_TEMPLATE | jq -r ".packages[0].tools[] | select(.name==\"$LIBS_PROJ_NAME\") | .systems[0].url"`
# Download the libs package
echo "Downloading the libs archive ..."
curl -o "$LIBS_PACKAGE_SRC_ZIP" -LJO --url "$LIBS_PACKAGE_SRC_URL" || exit 1
# Extract the libs package
echo "Extracting the archive ..."
unzip -q -d "$OUTPUT_DIR" "$LIBS_PACKAGE_SRC_ZIP" || exit 1
EXTRACTED_DIR=`ls "$OUTPUT_DIR" | grep "^$LIBS_PROJ_NAME"`
mv "$OUTPUT_DIR/$EXTRACTED_DIR" "$LIBS_PKG_DIR" || exit 1
# Remove unnecessary files in the package folder
echo "Cleaning up folders ..."
find "$LIBS_PKG_DIR" -name '*.DS_Store' -exec rm -f {} \;
find "$LIBS_PKG_DIR" -name '*.git*' -type f -delete
# Compress package folder
echo "Creating ZIP ..."
pushd "$OUTPUT_DIR" >/dev/null
zip -qr "$LIBS_PACKAGE_ZIP" "$LIBS_PROJ_NAME"
if [ $? -ne 0 ]; then echo "ERROR: Failed to create $LIBS_PACKAGE_ZIP ($?)"; exit 1; fi
# Calculate SHA-256
echo "Calculating SHA sum ..."
LIBS_PACKAGE_PATH="$OUTPUT_DIR/$LIBS_PACKAGE_ZIP"
LIBS_PACKAGE_SHA=`shasum -a 256 "$LIBS_PACKAGE_ZIP" | cut -f 1 -d ' '`
LIBS_PACKAGE_SIZE=`get_file_size "$LIBS_PACKAGE_ZIP"`
popd >/dev/null
rm -rf "$LIBS_PKG_DIR"
echo "'$LIBS_PACKAGE_ZIP' Created! Size: $LIBS_PACKAGE_SIZE, SHA-256: $LIBS_PACKAGE_SHA"
echo
# Upload package to release page
echo "Uploading libs package to release page ..."
LIBS_PACKAGE_URL=`git_safe_upload_asset "$LIBS_PACKAGE_PATH"`
echo "Libs Package Uploaded"
echo "Libs Download URL: $LIBS_PACKAGE_URL"
echo
# Construct JQ argument with libs package data
libs_jq_arg="\
(.packages[0].tools[] | select(.name==\"$LIBS_PROJ_NAME\")).systems[].url = \"$LIBS_PACKAGE_URL\" |\
(.packages[0].tools[] | select(.name==\"$LIBS_PROJ_NAME\")).systems[].archiveFileName = \"$LIBS_PACKAGE_ZIP\" |\
(.packages[0].tools[] | select(.name==\"$LIBS_PROJ_NAME\")).systems[].size = \"$LIBS_PACKAGE_SIZE\" |\
(.packages[0].tools[] | select(.name==\"$LIBS_PROJ_NAME\")).systems[].checksum = \"SHA-256:$LIBS_PACKAGE_SHA\""
# Update template values for the libs package and store it in the build folder
cat "$PACKAGE_JSON_TEMPLATE" | jq "$libs_jq_arg" > "$OUTPUT_DIR/package-$LIBS_PROJ_NAME.json"
# Overwrite the template location with the newly edited one
PACKAGE_JSON_TEMPLATE="$OUTPUT_DIR/package-$LIBS_PROJ_NAME.json"
## ##
## TEMP WORKAROUND FOR RV32 LONG PATH ON WINDOWS ## TEMP WORKAROUND FOR RV32 LONG PATH ON WINDOWS
## ##
RVTC_VERSION=$(cat "$PACKAGE_JSON_TEMPLATE" | jq -r ".packages[0].platforms[0].toolsDependencies[] | select(.name == \"$RVTC_NAME\") | .version" | cut -d '_' -f 2) RVTC_VERSION=`cat $PACKAGE_JSON_TEMPLATE | jq -r ".packages[0].platforms[0].toolsDependencies[] | select(.name == \"$RVTC_NAME\") | .version" | cut -d '_' -f 2`
# RVTC_VERSION=`date -j -f '%Y%m%d' "$RVTC_VERSION" '+%y%m'` # MacOS # RVTC_VERSION=`date -j -f '%Y%m%d' "$RVTC_VERSION" '+%y%m'` # MacOS
RVTC_VERSION=$(date -d "$RVTC_VERSION" '+%y%m') RVTC_VERSION=`date -d "$RVTC_VERSION" '+%y%m'`
rvtc_jq_arg="\ rvtc_jq_arg="\
(.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$RVTC_NAME\")).version = \"$RVTC_VERSION\" |\ (.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$RVTC_NAME\")).version = \"$RVTC_VERSION\" |\
(.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$RVTC_NAME\")).name = \"$RVTC_NEW_NAME\" |\ (.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$RVTC_NAME\")).name = \"$RVTC_NEW_NAME\" |\
(.packages[0].tools[] | select(.name==\"$RVTC_NAME\")).version = \"$RVTC_VERSION\" |\ (.packages[0].tools[] | select(.name==\"$RVTC_NAME\")).version = \"$RVTC_VERSION\" |\
(.packages[0].tools[] | select(.name==\"$RVTC_NAME\")).name = \"$RVTC_NEW_NAME\" |\ (.packages[0].tools[] | select(.name==\"$RVTC_NAME\")).name = \"$RVTC_NEW_NAME\""
(.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$X32TC_NAME\")).version = \"$RVTC_VERSION\" |\ cat "$PACKAGE_JSON_TEMPLATE" | jq "$rvtc_jq_arg" > "$OUTPUT_DIR/package-$LIBS_PROJ_NAME-rvfix.json"
(.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$X32TC_NAME\")).name = \"$X32TC_NEW_NAME\" |\ PACKAGE_JSON_TEMPLATE="$OUTPUT_DIR/package-$LIBS_PROJ_NAME-rvfix.json"
(.packages[0].tools[] | select(.name==\"$X32TC_NAME\")).version = \"$RVTC_VERSION\" |\
(.packages[0].tools[] | select(.name==\"$X32TC_NAME\")).name = \"$X32TC_NEW_NAME\""
cat "$PACKAGE_JSON_TEMPLATE" | jq "$rvtc_jq_arg" > "$OUTPUT_DIR/package-rvfix.json"
PACKAGE_JSON_TEMPLATE="$OUTPUT_DIR/package-rvfix.json"
## ##
## PACKAGE JSON ## PACKAGE JSON
@ -339,26 +357,17 @@ jq_arg=".packages[0].platforms[0].version = \"$RELEASE_TAG\" | \
.packages[0].platforms[0].checksum = \"SHA-256:$PACKAGE_SHA\"" .packages[0].platforms[0].checksum = \"SHA-256:$PACKAGE_SHA\""
# Generate package JSONs # Generate package JSONs
echo "Generating $PACKAGE_JSON_DEV ..." echo "Genarating $PACKAGE_JSON_DEV ..."
cat "$PACKAGE_JSON_TEMPLATE" | jq "$jq_arg" > "$OUTPUT_DIR/$PACKAGE_JSON_DEV" cat "$PACKAGE_JSON_TEMPLATE" | jq "$jq_arg" > "$OUTPUT_DIR/$PACKAGE_JSON_DEV"
# On MacOS the sed command won't skip the first match. Use gsed instead.
sed '0,/github\.com\//!s|github\.com/|dl.espressif.cn/github_assets/|g' "$OUTPUT_DIR/$PACKAGE_JSON_DEV" > "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN"
python "$SCRIPTS_DIR/release_append_cn.py" "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN"
if [ "$RELEASE_PRE" == "false" ]; then if [ "$RELEASE_PRE" == "false" ]; then
echo "Generating $PACKAGE_JSON_REL ..." echo "Genarating $PACKAGE_JSON_REL ..."
cat "$PACKAGE_JSON_TEMPLATE" | jq "$jq_arg" > "$OUTPUT_DIR/$PACKAGE_JSON_REL" cat "$PACKAGE_JSON_TEMPLATE" | jq "$jq_arg" > "$OUTPUT_DIR/$PACKAGE_JSON_REL"
# On MacOS the sed command won't skip the first match. Use gsed instead.
sed '0,/github\.com\//!s|github\.com/|dl.espressif.cn/github_assets/|g' "$OUTPUT_DIR/$PACKAGE_JSON_REL" > "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN"
python "$SCRIPTS_DIR/release_append_cn.py" "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN"
fi fi
# Figure out the last release or pre-release # Figure out the last release or pre-release
echo "Getting previous releases ..." echo "Getting previous releases ..."
releasesJson=$(curl -sH "Authorization: token $GITHUB_TOKEN" "https://api.github.com/repos/$GITHUB_REPOSITORY/releases" 2>/dev/null) releasesJson=`curl -sH "Authorization: token $GITHUB_TOKEN" "https://api.github.com/repos/$GITHUB_REPOSITORY/releases" 2>/dev/null`
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then echo "ERROR: Get Releases Failed! ($?)"; exit 1; fi
echo "ERROR: Get Releases Failed! ($?)"
exit 1
fi
set +e set +e
prev_release=$(echo "$releasesJson" | jq -e -r ". | map(select(.draft == false and .prerelease == false)) | sort_by(.published_at | - fromdateiso8601) | .[0].tag_name") prev_release=$(echo "$releasesJson" | jq -e -r ". | map(select(.draft == false and .prerelease == false)) | sort_by(.published_at | - fromdateiso8601) | .[0].tag_name")
@ -378,94 +387,27 @@ echo "Previous (any)release: $prev_any_release"
echo echo
# Merge package JSONs with previous releases # Merge package JSONs with previous releases
if [ -n "$prev_any_release" ] && [ "$prev_any_release" != "null" ]; then if [ ! -z "$prev_any_release" ] && [ "$prev_any_release" != "null" ]; then
echo "Merging with JSON from $prev_any_release ..." echo "Merging with JSON from $prev_any_release ..."
merge_package_json "$prev_any_release/$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV" merge_package_json "$prev_any_release/$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV"
merge_package_json "$prev_any_release/$PACKAGE_JSON_DEV_CN" "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN"
fi fi
if [ "$RELEASE_PRE" == "false" ]; then if [ "$RELEASE_PRE" == "false" ]; then
if [ -n "$prev_release" ] && [ "$prev_release" != "null" ]; then if [ ! -z "$prev_release" ] && [ "$prev_release" != "null" ]; then
echo "Merging with JSON from $prev_release ..." echo "Merging with JSON from $prev_release ..."
merge_package_json "$prev_release/$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL" merge_package_json "$prev_release/$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL"
merge_package_json "$prev_release/$PACKAGE_JSON_REL_CN" "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN"
fi fi
fi fi
# Test the package JSONs
echo "Installing arduino-cli ..."
export PATH="/home/runner/bin:$PATH"
source "${SCRIPTS_DIR}/install-arduino-cli.sh"
# For the Chinese mirror, we can't test the package JSONs as the Chinese mirror might not be updated yet.
echo "Testing $PACKAGE_JSON_DEV install ..."
echo "Installing esp32 ..."
arduino-cli core install esp32:esp32 --additional-urls "file://$OUTPUT_DIR/$PACKAGE_JSON_DEV"
if [ $? -ne 0 ]; then
echo "ERROR: Failed to install esp32 ($?)"
exit 1
fi
echo "Compiling example ..."
arduino-cli compile --fqbn esp32:esp32:esp32 "$GITHUB_WORKSPACE"/libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino
if [ $? -ne 0 ]; then
echo "ERROR: Failed to compile example ($?)"
exit 1
fi
echo "Uninstalling esp32 ..."
arduino-cli core uninstall esp32:esp32
if [ $? -ne 0 ]; then
echo "ERROR: Failed to uninstall esp32 ($?)"
exit 1
fi
echo "Test successful!"
if [ "$RELEASE_PRE" == "false" ]; then
echo "Testing $PACKAGE_JSON_REL install ..."
echo "Installing esp32 ..."
arduino-cli core install esp32:esp32 --additional-urls "file://$OUTPUT_DIR/$PACKAGE_JSON_REL"
if [ $? -ne 0 ]; then
echo "ERROR: Failed to install esp32 ($?)"
exit 1
fi
echo "Compiling example ..."
arduino-cli compile --fqbn esp32:esp32:esp32 "$GITHUB_WORKSPACE"/libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino
if [ $? -ne 0 ]; then
echo "ERROR: Failed to compile example ($?)"
exit 1
fi
echo "Uninstalling esp32 ..."
arduino-cli core uninstall esp32:esp32
if [ $? -ne 0 ]; then
echo "ERROR: Failed to uninstall esp32 ($?)"
exit 1
fi
echo "Test successful!"
fi
# Upload package JSONs # Upload package JSONs
echo "Uploading $PACKAGE_JSON_DEV ..." echo "Uploading $PACKAGE_JSON_DEV ..."
echo "Download URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_DEV")" echo "Download URL: "`git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_DEV"`
echo "Pages URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV")" echo "Pages URL: "`git_safe_upload_to_pages "$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV"`
echo "Download CN URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN")"
echo "Pages CN URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_DEV_CN" "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN")"
echo echo
if [ "$RELEASE_PRE" == "false" ]; then if [ "$RELEASE_PRE" == "false" ]; then
echo "Uploading $PACKAGE_JSON_REL ..." echo "Uploading $PACKAGE_JSON_REL ..."
echo "Download URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_REL")" echo "Download URL: "`git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_REL"`
echo "Pages URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL")" echo "Pages URL: "`git_safe_upload_to_pages "$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL"`
echo "Download CN URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN")"
echo "Pages CN URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_REL_CN" "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN")"
echo echo
fi fi

View file

@ -1,57 +0,0 @@
#!/usr/bin/env python3
# Arduino IDE provides by default a package file for the ESP32. This causes version conflicts
# when the user tries to use the JSON file with the Chinese mirrors.
#
# The downside is that the Arduino IDE will always warn the user that updates are available as it
# will consider the version from the Chinese mirrors as a pre-release version.
#
# This script is used to append "-cn" to all versions in the package_esp32_index_cn.json file so that
# the user can select the Chinese mirrors without conflicts.
#
# If Arduino ever stops providing the package_esp32_index.json file by default,
# this script can be removed and the tags reverted.
import json
def append_cn_to_versions(obj):
if isinstance(obj, dict):
# Skip tools that are not from the esp32 package
packager = obj.get("packager")
if packager is not None and packager != "esp32":
return
for key, value in obj.items():
if key == "version" and isinstance(value, str):
if not value.endswith("-cn"):
obj[key] = value + "-cn"
else:
append_cn_to_versions(value)
elif isinstance(obj, list):
for item in obj:
append_cn_to_versions(item)
def process_json_file(input_path, output_path=None):
with open(input_path, "r", encoding="utf-8") as f:
data = json.load(f)
append_cn_to_versions(data)
if output_path is None:
output_path = input_path
with open(output_path, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2)
print(f"Updated JSON written to {output_path}")
if __name__ == "__main__":
import sys
if len(sys.argv) < 2:
print("Usage: python release_append_cn.py input.json [output.json]")
else:
input_file = sys.argv[1]
output_file = sys.argv[2] if len(sys.argv) > 2 else None
process_json_file(input_file, output_file)

View file

@ -1,84 +0,0 @@
#!/bin/bash
build_all=false
chunks_count=0
if [[ $CORE_CHANGED == 'true' ]] || [[ $IS_PR != 'true' ]]; then
echo "Core files changed or not a PR. Building all."
build_all=true
chunks_count=$MAX_CHUNKS
elif [[ $LIB_CHANGED == 'true' ]]; then
echo "Libraries changed. Building only affected sketches."
if [[ $NETWORKING_CHANGED == 'true' ]]; then
echo "Networking libraries changed. Building networking related sketches."
networking_sketches="$(find libraries/WiFi -name '*.ino') "
networking_sketches+="$(find libraries/Ethernet -name '*.ino') "
networking_sketches+="$(find libraries/PPP -name '*.ino') "
networking_sketches+="$(find libraries/NetworkClientSecure -name '*.ino') "
networking_sketches+="$(find libraries/WebServer -name '*.ino') "
fi
if [[ $FS_CHANGED == 'true' ]]; then
echo "FS libraries changed. Building FS related sketches."
fs_sketches="$(find libraries/SD -name '*.ino') "
fs_sketches+="$(find libraries/SD_MMC -name '*.ino') "
fs_sketches+="$(find libraries/SPIFFS -name '*.ino') "
fs_sketches+="$(find libraries/LittleFS -name '*.ino') "
fs_sketches+="$(find libraries/FFat -name '*.ino') "
fi
sketches="$networking_sketches $fs_sketches"
for file in $LIB_FILES; do
lib=$(echo "$file" | awk -F "/" '{print $1"/"$2}')
if [[ "$file" == *.ino ]]; then
# If file ends with .ino, add it to the list of sketches
echo "Sketch found: $file"
sketches+="$file "
elif [[ "$file" == "$lib/src/"* ]]; then
# If file is inside the src directory, find all sketches in the lib/examples directory
echo "Library src file found: $file"
if [[ -d $lib/examples ]]; then
lib_sketches=$(find "$lib"/examples -name '*.ino')
sketches+="$lib_sketches "
echo "Library sketches: $lib_sketches"
fi
else
# If file is in a example folder but it is not a sketch, find all sketches in the current directory
echo "File in example folder found: $file"
sketch=$(find "$(dirname "$file")" -name '*.ino')
sketches+="$sketch "
echo "Sketch in example folder: $sketch"
fi
echo ""
done
fi
if [[ -n $sketches ]]; then
# Remove duplicates
sketches=$(echo "$sketches" | tr ' ' '\n' | sort | uniq)
for sketch in $sketches; do
echo "$sketch" >> sketches_found.txt
chunks_count=$((chunks_count+1))
done
echo "Number of sketches found: $chunks_count"
echo "Sketches:"
echo "$sketches"
if [[ $chunks_count -gt $MAX_CHUNKS ]]; then
echo "More sketches than the allowed number of chunks found. Limiting to $MAX_CHUNKS chunks."
chunks_count=$MAX_CHUNKS
fi
fi
chunks='["0"'
for i in $(seq 1 $(( chunks_count - 1 )) ); do
chunks+=",\"$i\""
done
chunks+="]"
{
echo "build_all=$build_all"
echo "build_libraries=$BUILD_LIBRARIES"
echo "build_static_sketches=$BUILD_STATIC_SKETCHES"
echo "build_idf=$BUILD_IDF"
echo "chunk_count=$chunks_count"
echo "chunks=$chunks"
} >> "$GITHUB_OUTPUT"

View file

@ -1,60 +1,7 @@
#!/bin/bash #!/bin/bash
if [ -d "$ARDUINO_ESP32_PATH/tools/esp32-arduino-libs" ]; then function build_sketch(){ # build_sketch <ide_path> <user_path> <path-to-ino> [extra-options]
SDKCONFIG_DIR="$ARDUINO_ESP32_PATH/tools/esp32-arduino-libs" while [ ! -z "$1" ]; do
elif [ -d "$GITHUB_WORKSPACE/tools/esp32-arduino-libs" ]; then
SDKCONFIG_DIR="$GITHUB_WORKSPACE/tools/esp32-arduino-libs"
else
SDKCONFIG_DIR="tools/esp32-arduino-libs"
fi
function check_requirements { # check_requirements <sketchdir> <sdkconfig_path>
local sketchdir=$1
local sdkconfig_path=$2
local has_requirements=1
local requirements
local requirements_or
if [ ! -f "$sdkconfig_path" ] || [ ! -f "$sketchdir/ci.json" ]; then
echo "WARNING: sdkconfig or ci.json not found. Assuming requirements are met." 1>&2
# Return 1 on error to force the sketch to be built and fail. This way the
# CI will fail and the user will know that the sketch has a problem.
else
# Check if the sketch requires any configuration options (AND)
requirements=$(jq -r '.requires[]? // empty' "$sketchdir/ci.json")
if [[ "$requirements" != "null" && "$requirements" != "" ]]; then
for requirement in $requirements; do
requirement=$(echo "$requirement" | xargs)
found_line=$(grep -E "^$requirement" "$sdkconfig_path")
if [[ "$found_line" == "" ]]; then
has_requirements=0
fi
done
fi
# Check if the sketch requires any configuration options (OR)
requirements_or=$(jq -r '.requires_any[]? // empty' "$sketchdir/ci.json")
if [[ "$requirements_or" != "null" && "$requirements_or" != "" ]]; then
local found=false
for requirement in $requirements_or; do
requirement=$(echo "$requirement" | xargs)
found_line=$(grep -E "^$requirement" "$sdkconfig_path")
if [[ "$found_line" != "" ]]; then
found=true
break
fi
done
if [[ "$found" == "false" ]]; then
has_requirements=0
fi
fi
fi
echo $has_requirements
}
function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [extra-options]
while [ -n "$1" ]; do
case "$1" in case "$1" in
-ai ) -ai )
shift shift
@ -80,18 +27,6 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
shift shift
sketchdir=$1 sketchdir=$1
;; ;;
-i )
shift
chunk_index=$1
;;
-l )
shift
log_compilation=$1
;;
-d )
shift
debug_level="DebugLevel=$1"
;;
* ) * )
break break
;; ;;
@ -99,10 +34,9 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
shift shift
done done
xtra_opts=("$@") xtra_opts=$*
len=0
if [ -z "$sketchdir" ]; then if [ -z $sketchdir ]; then
echo "ERROR: Sketch directory not provided" echo "ERROR: Sketch directory not provided"
echo "$USAGE" echo "$USAGE"
exit 1 exit 1
@ -110,8 +44,8 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
# No FQBN was passed, try to get it from other options # No FQBN was passed, try to get it from other options
if [ -z "$fqbn" ]; then if [ -z $fqbn ]; then
if [ -z "$target" ]; then if [ -z $target ]; then
echo "ERROR: Unspecified chip" echo "ERROR: Unspecified chip"
echo "$USAGE" echo "$USAGE"
exit 1 exit 1
@ -122,83 +56,48 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
# precedence. Note that the following logic also falls to the default # precedence. Note that the following logic also falls to the default
# parameters if no arguments were passed and no file was found. # parameters if no arguments were passed and no file was found.
if [ -z "$options" ] && [ -f "$sketchdir"/ci.json ]; then if [ -z $options ] && [ -f $sketchdir/cfg.json ]; then
# The config file could contain multiple FQBNs for one chip. If # The config file could contain multiple FQBNs for one chip. If
# that's the case we build one time for every FQBN. # that's the case we build one time for every FQBN.
len=$(jq -r --arg target "$target" '.fqbn[$target] | length' "$sketchdir"/ci.json) len=`jq -r --arg chip $target '.targets[] | select(.name==$chip) | .fqbn | length' $sketchdir/cfg.json`
if [ "$len" -gt 0 ]; then fqbn=`jq -r --arg chip $target '.targets[] | select(.name==$chip) | .fqbn' $sketchdir/cfg.json`
fqbn=$(jq -r --arg target "$target" '.fqbn[$target] | sort' "$sketchdir"/ci.json) else
fi
fi
if [ -n "$options" ] || [ "$len" -eq 0 ]; then
# Since we are passing options, we will end up with only one FQBN to # Since we are passing options, we will end up with only one FQBN to
# build. # build.
len=1 len=1
if [ -f "$sketchdir"/ci.json ]; then
fqbn_append=$(jq -r '.fqbn_append' "$sketchdir"/ci.json)
if [ "$fqbn_append" == "null" ]; then
fqbn_append=""
fi
fi
# Default FQBN options if none were passed in the command line. # Default FQBN options if none were passed in the command line.
# Replace any double commas with a single one and strip leading and
# trailing commas.
esp32_opts=$(echo "PSRAM=enabled,$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g') esp32_opts="PSRAM=enabled,PartitionScheme=huge_app"
esp32s2_opts=$(echo "PSRAM=enabled,$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g') esp32s2_opts="PSRAM=enabled,PartitionScheme=huge_app"
esp32s3_opts=$(echo "PSRAM=opi,USBMode=default,$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g') esp32s3_opts="PSRAM=opi,USBMode=default,PartitionScheme=huge_app"
esp32c3_opts=$(echo "$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g') esp32c3_opts="PartitionScheme=huge_app"
esp32c6_opts=$(echo "$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g') esp32c6_opts="PartitionScheme=huge_app"
esp32h2_opts=$(echo "$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g') esp32h2_opts="PartitionScheme=huge_app"
esp32p4_opts=$(echo "PSRAM=enabled,USBMode=default,$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32c5_opts=$(echo "$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
# Select the common part of the FQBN based on the target. The rest will be # Select the common part of the FQBN based on the target. The rest will be
# appended depending on the passed options. # appended depending on the passed options.
opt=""
case "$target" in case "$target" in
"esp32") "esp32")
[ -n "${options:-$esp32_opts}" ] && opt=":${options:-$esp32_opts}" fqbn="espressif:esp32:esp32:${options:-$esp32_opts}"
fqbn="espressif:esp32:esp32$opt"
;; ;;
"esp32s2") "esp32s2")
[ -n "${options:-$esp32s2_opts}" ] && opt=":${options:-$esp32s2_opts}" fqbn="espressif:esp32:esp32s2:${options:-$esp32s2_opts}"
fqbn="espressif:esp32:esp32s2$opt"
;; ;;
"esp32c3") "esp32c3")
[ -n "${options:-$esp32c3_opts}" ] && opt=":${options:-$esp32c3_opts}" fqbn="espressif:esp32:esp32c3:${options:-$esp32c3_opts}"
fqbn="espressif:esp32:esp32c3$opt"
;; ;;
"esp32s3") "esp32s3")
[ -n "${options:-$esp32s3_opts}" ] && opt=":${options:-$esp32s3_opts}" fqbn="espressif:esp32:esp32s3:${options:-$esp32s3_opts}"
fqbn="espressif:esp32:esp32s3$opt"
;; ;;
"esp32c6") "esp32c6")
[ -n "${options:-$esp32c6_opts}" ] && opt=":${options:-$esp32c6_opts}" fqbn="espressif:esp32:esp32c6:${options:-$esp32c6_opts}"
fqbn="espressif:esp32:esp32c6$opt"
;; ;;
"esp32h2") "esp32h2")
[ -n "${options:-$esp32h2_opts}" ] && opt=":${options:-$esp32h2_opts}" fqbn="espressif:esp32:esp32h2:${options:-$esp32h2_opts}"
fqbn="espressif:esp32:esp32h2$opt"
;;
"esp32p4")
[ -n "${options:-$esp32p4_opts}" ] && opt=":${options:-$esp32p4_opts}"
fqbn="espressif:esp32:esp32p4$opt"
;;
"esp32c5")
[ -n "${options:-$esp32c5_opts}" ] && opt=":${options:-$esp32c5_opts}"
fqbn="espressif:esp32:esp32c5$opt"
;;
*)
echo "ERROR: Invalid chip: $target"
exit 1
;; ;;
esac esac
@ -214,11 +113,11 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
fi fi
if [ -z "$fqbn" ]; then if [ -z "$fqbn" ]; then
echo "No FQBN passed or invalid chip: $target" echo "No FQBN passed or unvalid chip: $target"
exit 1 exit 1
fi fi
# The directory that will hold all the artifacts (the build directory) is # The directory that will hold all the artifcats (the build directory) is
# provided through: # provided through:
# 1. An env variable called ARDUINO_BUILD_DIR. # 1. An env variable called ARDUINO_BUILD_DIR.
# 2. Created at the sketch level as "build" in the case of a single # 2. Created at the sketch level as "build" in the case of a single
@ -226,107 +125,69 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
# 3. Created at the sketch level as "buildX" where X is the number # 3. Created at the sketch level as "buildX" where X is the number
# of configuration built in case of a multiconfiguration test. # of configuration built in case of a multiconfiguration test.
sketchname=$(basename "$sketchdir") sketchname=$(basename $sketchdir)
local has_requirements
if [ -f "$sketchdir"/ci.json ]; then if [[ -n $target ]] && [[ -f "$sketchdir/.skip.$target" ]]; then
# If the target is listed as false, skip the sketch. Otherwise, include it.
is_target=$(jq -r --arg target "$target" '.targets[$target]' "$sketchdir"/ci.json)
if [[ "$is_target" == "false" ]]; then
echo "Skipping $sketchname for target $target" echo "Skipping $sketchname for target $target"
exit 0 exit 0
fi fi
has_requirements=$(check_requirements "$sketchdir" "$SDKCONFIG_DIR/$target/sdkconfig")
if [ "$has_requirements" == "0" ]; then
echo "Target $target does not meet the requirements for $sketchname. Skipping."
exit 0
fi
fi
ARDUINO_CACHE_DIR="$HOME/.arduino/cache.tmp" ARDUINO_CACHE_DIR="$HOME/.arduino/cache.tmp"
if [ -n "$ARDUINO_BUILD_DIR" ]; then if [ -n "$ARDUINO_BUILD_DIR" ]; then
build_dir="$ARDUINO_BUILD_DIR" build_dir="$ARDUINO_BUILD_DIR"
elif [ "$len" -eq 1 ]; then elif [ $len -eq 1 ]; then
# build_dir="$sketchdir/build" # build_dir="$sketchdir/build"
build_dir="$HOME/.arduino/tests/$target/$sketchname/build.tmp" build_dir="$HOME/.arduino/tests/$sketchname/build.tmp"
fi fi
output_file="$HOME/.arduino/cli_compile_output.txt"
sizes_file="$GITHUB_WORKSPACE/cli_compile_$chunk_index.json"
mkdir -p "$ARDUINO_CACHE_DIR" mkdir -p "$ARDUINO_CACHE_DIR"
for i in $(seq 0 $((len - 1))); do for i in `seq 0 $(($len - 1))`
if [ "$len" -ne 1 ]; then do
if [ $len -ne 1 ]; then
# build_dir="$sketchdir/build$i" # build_dir="$sketchdir/build$i"
build_dir="$HOME/.arduino/tests/$target/$sketchname/build$i.tmp" build_dir="$HOME/.arduino/tests/$sketchname/build$i.tmp"
fi fi
rm -rf "$build_dir" rm -rf $build_dir
mkdir -p "$build_dir" mkdir -p $build_dir
currfqbn=$(echo "$fqbn" | jq -r --argjson i "$i" '.[$i]') currfqbn=`echo $fqbn | jq -r --argjson i $i '.[$i]'`
if [ -f "$ide_path/arduino-cli" ]; then if [ -f "$ide_path/arduino-cli" ]; then
echo "Building $sketchname with arduino-cli and FQBN=$currfqbn" echo "Building $sketchname with arduino-cli and FQBN=$currfqbn"
curroptions=$(echo "$currfqbn" | cut -d':' -f4) curroptions=`echo "$currfqbn" | cut -d':' -f4`
currfqbn=$(echo "$currfqbn" | cut -d':' -f1-3) currfqbn=`echo "$currfqbn" | cut -d':' -f1-3`
"$ide_path"/arduino-cli compile \ $ide_path/arduino-cli compile \
--fqbn "$currfqbn" \ --fqbn "$currfqbn" \
--board-options "$curroptions" \ --board-options "$curroptions" \
--warnings "all" \ --warnings "all" \
--build-property "compiler.warning_flags.all=-Wall -Werror=all -Wextra" \ --build-property "compiler.warning_flags.all=-Wall -Werror=all -Wextra" \
--build-cache-path "$ARDUINO_CACHE_DIR" \
--build-path "$build_dir" \ --build-path "$build_dir" \
"${xtra_opts[@]}" "${sketchdir}" \ $xtra_opts "${sketchdir}"
2>&1 | tee "$output_file"
exit_status=${PIPESTATUS[0]} exit_status=$?
if [ "$exit_status" -ne 0 ]; then if [ $exit_status -ne 0 ]; then
echo "ERROR: Compilation failed with error code $exit_status" echo ""ERROR: Compilation failed with error code $exit_status""
exit "$exit_status" exit $exit_status
fi fi
if [ -n "$log_compilation" ]; then
#Extract the program storage space and dynamic memory usage in bytes and percentage in separate variables from the output, just the value without the string
flash_bytes=$(grep -oE 'Sketch uses ([0-9]+) bytes' "$output_file" | awk '{print $3}')
flash_percentage=$(grep -oE 'Sketch uses ([0-9]+) bytes \(([0-9]+)%\)' "$output_file" | awk '{print $5}' | tr -d '(%)')
ram_bytes=$(grep -oE 'Global variables use ([0-9]+) bytes' "$output_file" | awk '{print $4}')
ram_percentage=$(grep -oE 'Global variables use ([0-9]+) bytes \(([0-9]+)%\)' "$output_file" | awk '{print $6}' | tr -d '(%)')
# Extract the directory path excluding the filename
directory_path=$(dirname "$sketch")
# Define the constant part
constant_part="/home/runner/Arduino/hardware/espressif/esp32/libraries/"
# Extract the desired substring
lib_sketch_name="${directory_path#"$constant_part"}"
#append json file where key is fqbn, sketch name, sizes -> extracted values
echo "{\"name\": \"$lib_sketch_name\",
\"sizes\": [{
\"flash_bytes\": $flash_bytes,
\"flash_percentage\": $flash_percentage,
\"ram_bytes\": $ram_bytes,
\"ram_percentage\": $ram_percentage
}]
}," >> "$sizes_file"
fi
elif [ -f "$ide_path/arduino-builder" ]; then elif [ -f "$ide_path/arduino-builder" ]; then
echo "Building $sketchname with arduino-builder and FQBN=$currfqbn" echo "Building $sketchname with arduino-builder and FQBN=$currfqbn"
echo "Build path = $build_dir" echo "Build path = $build_dir"
"$ide_path"/arduino-builder -compile -logger=human -core-api-version=10810 \ $ide_path/arduino-builder -compile -logger=human -core-api-version=10810 \
-fqbn=\""$currfqbn"\" \ -fqbn=\"$currfqbn\" \
-warnings="all" \ -warnings="all" \
-tools "$ide_path/tools-builder" \ -tools "$ide_path/tools-builder" \
-hardware "$user_path/hardware" \ -hardware "$user_path/hardware" \
-libraries "$user_path/libraries" \ -libraries "$user_path/libraries" \
-build-cache "$ARDUINO_CACHE_DIR" \ -build-cache "$ARDUINO_CACHE_DIR" \
-build-path "$build_dir" \ -build-path "$build_dir" \
"${xtra_opts[@]}" "${sketchdir}/${sketchname}.ino" $xtra_opts "${sketchdir}/${sketchname}.ino"
exit_status=$? exit_status=$?
if [ $exit_status -ne 0 ]; then if [ $exit_status -ne 0 ]; then
echo "ERROR: Compilation failed with error code $exit_status" echo ""ERROR: Compilation failed with error code $exit_status""
exit $exit_status exit $exit_status
fi fi
# $ide_path/arduino-builder -compile -logger=human -core-api-version=10810 \ # $ide_path/arduino-builder -compile -logger=human -core-api-version=10810 \
@ -343,18 +204,14 @@ function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [ext
# $xtra_opts "${sketchdir}/${sketchname}.ino" # $xtra_opts "${sketchdir}/${sketchname}.ino"
fi fi
done done
unset fqbn unset fqbn
unset xtra_opts unset xtra_opts
unset options unset options
} }
function count_sketches { # count_sketches <path> [target] [file] [ignore-requirements] function count_sketches(){ # count_sketches <path> [target]
local path=$1 local path=$1
local target=$2 local target=$2
local ignore_requirements=$3
local file=$4
local sketches
if [ $# -lt 1 ]; then if [ $# -lt 1 ]; then
echo "ERROR: Illegal number of parameters" echo "ERROR: Illegal number of parameters"
@ -362,53 +219,33 @@ function count_sketches { # count_sketches <path> [target] [file] [ignore-requir
fi fi
rm -rf sketches.txt rm -rf sketches.txt
touch sketches.txt
if [ ! -d "$path" ]; then if [ ! -d "$path" ]; then
touch sketches.txt
return 0 return 0
fi fi
if [ -f "$file" ]; then local sketches=$(find $path -name *.ino | sort)
sketches=$(cat "$file")
else
sketches=$(find "$path" -name '*.ino' | sort)
fi
local sketchnum=0 local sketchnum=0
for sketch in $sketches; do for sketch in $sketches; do
local sketchdir local sketchdir=$(dirname $sketch)
local sketchdirname local sketchdirname=$(basename $sketchdir)
local sketchname local sketchname=$(basename $sketch)
local has_requirements
sketchdir=$(dirname "$sketch")
sketchdirname=$(basename "$sketchdir")
sketchname=$(basename "$sketch")
if [[ "$sketchdirname.ino" != "$sketchname" ]]; then if [[ "$sketchdirname.ino" != "$sketchname" ]]; then
continue continue
elif [[ -n $target ]] && [[ -f $sketchdir/ci.json ]]; then elif [[ -n $target ]] && [[ -f "$sketchdir/.skip.$target" ]]; then
# If the target is listed as false, skip the sketch. Otherwise, include it.
is_target=$(jq -r --arg target "$target" '.targets[$target]' "$sketchdir"/ci.json)
if [[ "$is_target" == "false" ]]; then
continue continue
else
echo $sketch >> sketches.txt
sketchnum=$(($sketchnum + 1))
fi fi
if [ "$ignore_requirements" != "1" ]; then
has_requirements=$(check_requirements "$sketchdir" "$SDKCONFIG_DIR/$target/sdkconfig")
if [ "$has_requirements" == "0" ]; then
continue
fi
fi
fi
echo "$sketch" >> sketches.txt
sketchnum=$((sketchnum + 1))
done done
return $sketchnum return $sketchnum
} }
function build_sketches { # build_sketches <ide_path> <user_path> <target> <path> <chunk> <total-chunks> [extra-options] function build_sketches(){ # build_sketches <ide_path> <user_path> <target> <path> <chunk> <total-chunks> [extra-options]
local args=()
while [ -n "$1" ]; do local args=""
while [ ! -z "$1" ]; do
case $1 in case $1 in
-ai ) -ai )
shift shift
@ -421,12 +258,12 @@ function build_sketches { # build_sketches <ide_path> <user_path> <target> <path
-t ) -t )
shift shift
target=$1 target=$1
args+=("-t" "$target") args+=" -t $target"
;; ;;
-fqbn ) -fqbn )
shift shift
fqbn=$1 fqbn=$1
args+=("-fqbn" "$fqbn") args+=" -fqbn $fqbn"
;; ;;
-p ) -p )
shift shift
@ -440,19 +277,6 @@ function build_sketches { # build_sketches <ide_path> <user_path> <target> <path
shift shift
chunk_max=$1 chunk_max=$1
;; ;;
-l )
shift
log_compilation=$1
;;
-f )
shift
sketches_file=$1
;;
-d )
shift
debug_level="$1"
args+=("-d" "$debug_level")
;;
* ) * )
break break
;; ;;
@ -460,10 +284,10 @@ function build_sketches { # build_sketches <ide_path> <user_path> <target> <path
shift shift
done done
local xtra_opts=("$@") local xtra_opts=$*
if [ -z "$chunk_index" ] || [ -z "$chunk_max" ]; then if [ -z $chunk_index ] || [ -z $chunk_max ]; then
echo "ERROR: Invalid chunk parameters" echo "ERROR: Invalid chunk paramters"
echo "$USAGE" echo "$USAGE"
exit 1 exit 1
fi fi
@ -478,24 +302,16 @@ function build_sketches { # build_sketches <ide_path> <user_path> <target> <path
fi fi
set +e set +e
if [ -n "$sketches_file" ]; then
count_sketches "$path" "$target" "0" "$sketches_file"
local sketchcount=$?
else
count_sketches "$path" "$target" count_sketches "$path" "$target"
local sketchcount=$? local sketchcount=$?
fi
set -e set -e
local sketches local sketches=$(cat sketches.txt)
sketches=$(cat sketches.txt)
rm -rf sketches.txt rm -rf sketches.txt
local chunk_size local chunk_size=$(( $sketchcount / $chunk_max ))
local all_chunks local all_chunks=$(( $chunk_max * $chunk_size ))
chunk_size=$(( sketchcount / chunk_max ))
all_chunks=$(( chunk_max * chunk_size ))
if [ "$all_chunks" -lt "$sketchcount" ]; then if [ "$all_chunks" -lt "$sketchcount" ]; then
chunk_size=$(( chunk_size + 1 )) chunk_size=$(( $chunk_size + 1 ))
fi fi
local start_index=0 local start_index=0
@ -504,20 +320,19 @@ function build_sketches { # build_sketches <ide_path> <user_path> <target> <path
start_index=$chunk_index start_index=$chunk_index
end_index=$sketchcount end_index=$sketchcount
else else
start_index=$(( chunk_index * chunk_size )) start_index=$(( $chunk_index * $chunk_size ))
if [ "$sketchcount" -le "$start_index" ]; then if [ "$sketchcount" -le "$start_index" ]; then
echo "No sketches to build for $target in this chunk" echo "Skipping job"
return 0 return 0
fi fi
end_index=$(( $(( chunk_index + 1 )) * chunk_size )) end_index=$(( $(( $chunk_index + 1 )) * $chunk_size ))
if [ "$end_index" -gt "$sketchcount" ]; then if [ "$end_index" -gt "$sketchcount" ]; then
end_index=$sketchcount end_index=$sketchcount
fi fi
fi fi
local start_num local start_num=$(( $start_index + 1 ))
start_num=$(( start_index + 1 ))
echo "Found $sketchcount Sketches for target '$target'"; echo "Found $sketchcount Sketches for target '$target'";
echo "Chunk Index : $chunk_index" echo "Chunk Index : $chunk_index"
echo "Chunk Count : $chunk_max" echo "Chunk Count : $chunk_max"
@ -525,58 +340,24 @@ function build_sketches { # build_sketches <ide_path> <user_path> <target> <path
echo "Start Sketch: $start_num" echo "Start Sketch: $start_num"
echo "End Sketch : $end_index" echo "End Sketch : $end_index"
#if fqbn is not passed then set it to default for compilation log
if [ -z "$fqbn" ]; then
log_fqbn="espressif:esp32:$target"
else
log_fqbn=$fqbn
fi
sizes_file="$GITHUB_WORKSPACE/cli_compile_$chunk_index.json"
if [ -n "$log_compilation" ]; then
#echo board,target and start of sketches to sizes_file json
echo "{ \"board\": \"$log_fqbn\",
\"target\": \"$target\",
\"sketches\": [" >> "$sizes_file"
fi
local sketchnum=0 local sketchnum=0
args+=("-ai" "$ide_path" "-au" "$user_path" "-i" "$chunk_index") args+=" -ai $ide_path -au $user_path"
if [ -n "$log_compilation" ]; then
args+=("-l" "$log_compilation")
fi
for sketch in $sketches; do for sketch in $sketches; do
local sketchdir local sketchdir=$(dirname $sketch)
local sketchdirname local sketchdirname=$(basename $sketchdir)
sketchnum=$(($sketchnum + 1))
sketchdir=$(dirname "$sketch")
sketchdirname=$(basename "$sketchdir")
sketchnum=$((sketchnum + 1))
if [ "$sketchnum" -le "$start_index" ] \ if [ "$sketchnum" -le "$start_index" ] \
|| [ "$sketchnum" -gt "$end_index" ]; then || [ "$sketchnum" -gt "$end_index" ]; then
continue continue
fi fi
echo "" echo ""
echo "Building Sketch Index $sketchnum - $sketchdirname" echo "Building Sketch Index $(($sketchnum - 1)) - $sketchdirname"
build_sketch "${args[@]}" -s "$sketchdir" "${xtra_opts[@]}" build_sketch $args -s $sketchdir $xtra_opts
local result=$? local result=$?
if [ $result -ne 0 ]; then if [ $result -ne 0 ]; then
return $result return $result
fi fi
done done
if [ -n "$log_compilation" ]; then
#remove last comma from json
if [ "$i" -eq $((len - 1)) ]; then
sed -i '$ s/.$//' "$sizes_file"
fi
#echo end of sketches sizes_file json
echo "]" >> "$sizes_file"
#echo end of board sizes_file json
echo "}," >> "$sizes_file"
fi
return 0 return 0
} }
@ -586,28 +367,26 @@ Available commands:
count: Count sketches. count: Count sketches.
build: Build a sketch. build: Build a sketch.
chunk_build: Build a chunk of sketches. chunk_build: Build a chunk of sketches.
check_requirements: Check if target meets sketch requirements.
" "
cmd=$1 cmd=$1
shift shift
if [ -z "$cmd" ]; then if [ -z $cmd ]; then
echo "ERROR: No command supplied" echo "ERROR: No command supplied"
echo "$USAGE" echo "$USAGE"
exit 2 exit 2
fi fi
case "$cmd" in case "$cmd" in
"count") count_sketches "$@" "count") count_sketches $*
;; ;;
"build") build_sketch "$@" "build") build_sketch $*
;; ;;
"chunk_build") build_sketches "$@" "chunk_build") build_sketches $*
;;
"check_requirements") check_requirements "$@"
;; ;;
*) *)
echo "ERROR: Unrecognized command" echo "ERROR: Unrecognized command"
echo "$USAGE" echo "$USAGE"
exit 2 exit 2
esac esac

View file

@ -2,20 +2,19 @@
USAGE=" USAGE="
USAGE: USAGE:
${0} -c -type <test_type> <chunk_build_opts> ${0} -c <chunk_build_opts>
Example: ${0} -c -type validation -t esp32 -i 0 -m 15 Example: ${0} -c -t esp32 -i 0 -m 15
${0} -s sketch_name <build_opts> ${0} -s sketch_name <build_opts>
Example: ${0} -s hello_world -t esp32 Example: ${0} -s hello_world -t esp32
${0} -clean ${0} -clean
Remove build and test generated files Remove build and test generated files
" "
function clean { function clean(){
rm -rf tests/*/build*/
rm -rf tests/.pytest_cache rm -rf tests/.pytest_cache
find tests/ -type d -name 'build*' -exec rm -rf "{}" \+ rm -rf tests/*/__pycache__/
find tests/ -type d -name '__pycache__' -exec rm -rf "{}" \+ rm -rf tests/*/*.xml
find tests/ -name '*.xml' -exec rm -rf "{}" \+
find tests/ -name 'result_*.json' -exec rm -rf "{}" \+
} }
SCRIPTS_DIR="./.github/scripts" SCRIPTS_DIR="./.github/scripts"
@ -23,7 +22,7 @@ BUILD_CMD=""
chunk_build=0 chunk_build=0
while [ -n "$1" ]; do while [ ! -z "$1" ]; do
case $1 in case $1 in
-c ) -c )
chunk_build=1 chunk_build=1
@ -36,10 +35,6 @@ while [ -n "$1" ]; do
echo "$USAGE" echo "$USAGE"
exit 0 exit 0
;; ;;
-type )
shift
test_type=$1
;;
-clean ) -clean )
clean clean
exit 0 exit 0
@ -51,30 +46,19 @@ while [ -n "$1" ]; do
shift shift
done done
source "${SCRIPTS_DIR}/install-arduino-cli.sh" #source ${SCRIPTS_DIR}/install-arduino-ide.sh
source "${SCRIPTS_DIR}/install-arduino-core-esp32.sh" source ${SCRIPTS_DIR}/install-arduino-cli.sh
source ${SCRIPTS_DIR}/install-arduino-core-esp32.sh
args=("-ai" "$ARDUINO_IDE_PATH" "-au" "$ARDUINO_USR_PATH") args="-ai $ARDUINO_IDE_PATH -au $ARDUINO_USR_PATH"
if [[ $test_type == "all" ]] || [[ -z $test_type ]]; then
if [ -n "$sketch" ]; then
tmp_sketch_path=$(find tests -name "$sketch".ino)
test_type=$(basename "$(dirname "$(dirname "$tmp_sketch_path")")")
echo "Sketch $sketch test type: $test_type"
test_folder="$PWD/tests/$test_type"
else
test_folder="$PWD/tests"
fi
else
test_folder="$PWD/tests/$test_type"
fi
if [ $chunk_build -eq 1 ]; then if [ $chunk_build -eq 1 ]; then
BUILD_CMD="${SCRIPTS_DIR}/sketch_utils.sh chunk_build" BUILD_CMD="${SCRIPTS_DIR}/sketch_utils.sh chunk_build"
args+=("-p" "$test_folder" "-i" "0" "-m" "1") args+=" -p $PWD/tests"
else else
BUILD_CMD="${SCRIPTS_DIR}/sketch_utils.sh build" BUILD_CMD="${SCRIPTS_DIR}/sketch_utils.sh build"
args+=("-s" "$test_folder/$sketch") args+=" -s $PWD/tests/$sketch"
fi fi
${BUILD_CMD} "${args[@]}" "$@" ${BUILD_CMD} ${args} $*

View file

@ -1,28 +0,0 @@
#!/bin/bash
build_types="'validation'"
hw_types="'validation'"
wokwi_types="'validation'"
qemu_types="'validation'"
if [[ $IS_PR != 'true' ]] || [[ $PERFORMANCE_ENABLED == 'true' ]]; then
build_types+=",'performance'"
hw_types+=",'performance'"
#wokwi_types+=",'performance'"
#qemu_types+=",'performance'"
fi
targets="'esp32','esp32s2','esp32s3','esp32c3','esp32c6','esp32h2','esp32p4'"
mkdir -p info
echo "[$wokwi_types]" > info/wokwi_types.txt
echo "[$targets]" > info/targets.txt
{
echo "build-types=[$build_types]"
echo "hw-types=[$hw_types]"
echo "wokwi-types=[$wokwi_types]"
echo "qemu-types=[$qemu_types]"
echo "targets=[$targets]"
} >> "$GITHUB_OUTPUT"

View file

@ -1,168 +1,58 @@
#!/bin/bash #!/bin/bash
function run_test { function run_test() {
local target=$1 local target=$1
local sketch=$2 local sketch=$2
local options=$3 local options=$3
local erase_flash=$4 local erase_flash=$4
local sketchdir local sketchdir=$(dirname $sketch)
local sketchname local sketchname=$(basename $sketchdir)
local result=0
local error=0
local sdkconfig_path
local extra_args
local test_type
sketchdir=$(dirname "$sketch") if [ $options -eq 0 ] && [ -f $sketchdir/cfg.json ]; then
sketchname=$(basename "$sketchdir") len=`jq -r --arg chip $target '.targets[] | select(.name==$chip) | .fqbn | length' $sketchdir/cfg.json`
test_type=$(basename "$(dirname "$sketchdir")")
if [ "$options" -eq 0 ] && [ -f "$sketchdir"/ci.json ]; then
len=$(jq -r --arg target "$target" '.fqbn[$target] | length' "$sketchdir"/ci.json)
if [ "$len" -eq 0 ]; then
len=1
fi
else else
len=1 len=1
fi fi
if [ "$len" -eq 1 ]; then if [ $len -eq 1 ]; then
sdkconfig_path="$HOME/.arduino/tests/$target/$sketchname/build.tmp/sdkconfig" # build_dir="tests/$sketchname/build"
else build_dir="$HOME/.arduino/tests/$sketchname/build.tmp"
sdkconfig_path="$HOME/.arduino/tests/$target/$sketchname/build0.tmp/sdkconfig" report_file="tests/$sketchname/$sketchname.xml"
fi fi
if [ -f "$sketchdir"/ci.json ]; then for i in `seq 0 $(($len - 1))`
# If the target or platform is listed as false, skip the sketch. Otherwise, include it. do
is_target=$(jq -r --arg target "$target" '.targets[$target]' "$sketchdir"/ci.json) echo "Running test: $sketchname -- Config: $i"
selected_platform=$(jq -r --arg platform "$platform" '.platforms[$platform]' "$sketchdir"/ci.json) if [ $erase_flash -eq 1 ]; then
esptool.py -c $target erase_flash
if [[ $is_target == "false" ]] || [[ $selected_platform == "false" ]]; then
printf "\033[93mSkipping %s test for %s, platform: %s\033[0m\n" "$sketchname" "$target" "$platform"
printf "\n\n\n"
return 0
fi
fi fi
if [ ! -f "$sdkconfig_path" ]; then if [ $len -ne 1 ]; then
printf "\033[93mSketch %s build not found in %s\nMight be due to missing target requirements or build failure\033[0m\n" "$(dirname "$sdkconfig_path")" "$sketchname" # build_dir="tests/$sketchname/build$i"
printf "\n\n\n" build_dir="$HOME/.arduino/tests/$sketchname/build$i.tmp"
return 0 report_file="tests/$sketchname/$sketchname$i.xml"
fi fi
local compiled_target pytest tests --build-dir $build_dir -k test_$sketchname --junit-xml=$report_file
compiled_target=$(grep -E "CONFIG_IDF_TARGET=" "$sdkconfig_path" | cut -d'"' -f2) result=$?
if [ "$compiled_target" != "$target" ]; then
printf "\033[91mError: Sketch %s compiled for %s, expected %s\033[0m\n" "$sketchname" "$compiled_target" "$target"
printf "\n\n\n"
return 1
fi
if [ "$len" -eq 1 ]; then
# build_dir="$sketchdir/build"
build_dir="$HOME/.arduino/tests/$target/$sketchname/build.tmp"
report_file="$sketchdir/$target/$sketchname.xml"
fi
for i in $(seq 0 $((len - 1))); do
fqbn="Default"
if [ "$len" -ne 1 ]; then
fqbn=$(jq -r --arg target "$target" --argjson i "$i" '.fqbn[$target] | sort | .[$i]' "$sketchdir"/ci.json)
elif [ -f "$sketchdir"/ci.json ]; then
has_fqbn=$(jq -r --arg target "$target" '.fqbn[$target]' "$sketchdir"/ci.json)
if [ "$has_fqbn" != "null" ]; then
fqbn=$(jq -r --arg target "$target" '.fqbn[$target] | .[0]' "$sketchdir"/ci.json)
fi
fi
printf "\033[95mRunning test: %s -- Config: %s\033[0m\n" "$sketchname" "$fqbn"
if [ "$erase_flash" -eq 1 ]; then
esptool.py -c "$target" erase_flash
fi
if [ "$len" -ne 1 ]; then
# build_dir="$sketchdir/build$i"
build_dir="$HOME/.arduino/tests/$target/$sketchname/build$i.tmp"
report_file="$sketchdir/$target/$sketchname$i.xml"
fi
if [ $platform == "wokwi" ]; then
extra_args=("--target" "$target" "--embedded-services" "arduino,wokwi" "--wokwi-timeout=$wokwi_timeout")
if [[ -f "$sketchdir/scenario.yaml" ]]; then
extra_args+=("--wokwi-scenario" "$sketchdir/scenario.yaml")
fi
if [[ -f "$sketchdir/diagram.$target.json" ]]; then
extra_args+=("--wokwi-diagram" "$sketchdir/diagram.$target.json")
fi
elif [ $platform == "qemu" ]; then
PATH=$HOME/qemu/bin:$PATH
extra_args=("--embedded-services" "qemu" "--qemu-image-path" "$build_dir/$sketchname.ino.merged.bin")
if [ "$target" == "esp32" ] || [ "$target" == "esp32s3" ]; then
extra_args+=("--qemu-prog-path" "qemu-system-xtensa" "--qemu-cli-args=\"-machine $target -m 4M -nographic\"")
elif [ "$target" == "esp32c3" ]; then
extra_args+=("--qemu-prog-path" "qemu-system-riscv32" "--qemu-cli-args=\"-machine $target -icount 3 -nographic\"")
else
printf "\033[91mUnsupported QEMU target: %s\033[0m\n" "$target"
exit 1
fi
else
extra_args=("--embedded-services" "esp,arduino")
fi
rm "$sketchdir"/diagram.json 2>/dev/null || true
result=0
printf "\033[95mpytest \"%s/test_%s.py\" --build-dir \"%s\" --junit-xml=\"%s\" -o junit_suite_name=%s_%s_%s_%s%s %s\033[0m\n" "$sketchdir" "$sketchname" "$build_dir" "$report_file" "$test_type" "$platform" "$target" "$sketchname" "$i" "${extra_args[*]@Q}"
bash -c "set +e; pytest \"$sketchdir/test_$sketchname.py\" --build-dir \"$build_dir\" --junit-xml=\"$report_file\" -o junit_suite_name=${test_type}_${platform}_${target}_${sketchname}${i} ${extra_args[*]@Q}; exit \$?" || result=$?
printf "\n"
if [ $result -ne 0 ]; then if [ $result -ne 0 ]; then
result=0 return $result
printf "\033[95mRetrying test: %s -- Config: %s\033[0m\n" "$sketchname" "$i"
printf "\033[95mpytest \"%s/test_%s.py\" --build-dir \"%s\" --junit-xml=\"%s\" -o junit_suite_name=%s_%s_%s_%s%s %s\033[0m\n" "$sketchdir" "$sketchname" "$build_dir" "$report_file" "$test_type" "$platform" "$target" "$sketchname" "$i" "${extra_args[*]@Q}"
bash -c "set +e; pytest \"$sketchdir/test_$sketchname.py\" --build-dir \"$build_dir\" --junit-xml=\"$report_file\" -o junit_suite_name=${test_type}_${platform}_${target}_${sketchname}${i} ${extra_args[*]@Q}; exit \$?" || result=$?
printf "\n"
if [ $result -ne 0 ]; then
printf "\033[91mFailed test: %s -- Config: %s\033[0m\n\n" "$sketchname" "$i"
error=$result
fi
fi fi
done done
return $error
} }
SCRIPTS_DIR="./.github/scripts" SCRIPTS_DIR="./.github/scripts"
COUNT_SKETCHES="${SCRIPTS_DIR}/sketch_utils.sh count" COUNT_SKETCHES="${SCRIPTS_DIR}/sketch_utils.sh count"
platform="hardware"
wokwi_timeout=60000
chunk_run=0 chunk_run=0
options=0 options=0
erase=0 erase=0
while [ -n "$1" ]; do while [ ! -z "$1" ]; do
case $1 in case $1 in
-c ) -c )
chunk_run=1 chunk_run=1
;; ;;
-Q )
if [ ! -d "$QEMU_PATH" ]; then
echo "QEMU path $QEMU_PATH does not exist"
exit 1
fi
platform="qemu"
;;
-W )
shift
wokwi_timeout=$1
if [[ -z $WOKWI_CLI_TOKEN ]]; then
echo "Wokwi CLI token is not set"
exit 1
fi
platform="wokwi"
;;
-o ) -o )
options=1 options=1
;; ;;
@ -189,10 +79,6 @@ while [ -n "$1" ]; do
echo "$USAGE" echo "$USAGE"
exit 0 exit 0
;; ;;
-type )
shift
test_type=$1
;;
* ) * )
break break
;; ;;
@ -200,54 +86,32 @@ while [ -n "$1" ]; do
shift shift
done done
if [ ! $platform == "qemu" ]; then source ${SCRIPTS_DIR}/install-arduino-ide.sh
source "${SCRIPTS_DIR}/install-arduino-ide.sh"
fi
# If sketch is provided and test type is not, test type is inferred from the sketch path
if [[ $test_type == "all" ]] || [[ -z $test_type ]]; then
if [ -n "$sketch" ]; then
tmp_sketch_path=$(find tests -name "$sketch".ino)
test_type=$(basename "$(dirname "$(dirname "$tmp_sketch_path")")")
echo "Sketch $sketch test type: $test_type"
test_folder="$PWD/tests/$test_type"
else
test_folder="$PWD/tests"
fi
else
test_folder="$PWD/tests/$test_type"
fi
if [ $chunk_run -eq 0 ]; then if [ $chunk_run -eq 0 ]; then
if [ -z "$sketch" ]; then run_test $target $PWD/tests/$sketch/$sketch.ino $options $erase
echo "ERROR: Sketch name is required for single test run"
exit 1
fi
run_test "$target" "$test_folder"/"$sketch"/"$sketch".ino $options $erase
exit $?
else else
if [ "$chunk_max" -le 0 ]; then if [ "$chunk_max" -le 0 ]; then
echo "ERROR: Chunks count must be positive number" echo "ERROR: Chunks count must be positive number"
exit 1 return 1
fi fi
if [ "$chunk_index" -ge "$chunk_max" ] && [ "$chunk_max" -ge 2 ]; then if [ "$chunk_index" -ge "$chunk_max" ] && [ "$chunk_max" -ge 2 ]; then
echo "ERROR: Chunk index must be less than chunks count" echo "ERROR: Chunk index must be less than chunks count"
exit 1 return 1
fi fi
set +e set +e
# Ignore requirements as we don't have the libs. The requirements will be checked in the run_test function ${COUNT_SKETCHES} $PWD/tests $target
${COUNT_SKETCHES} "$test_folder" "$target" "1"
sketchcount=$? sketchcount=$?
set -e set -e
sketches=$(cat sketches.txt) sketches=$(cat sketches.txt)
rm -rf sketches.txt rm -rf sketches.txt
chunk_size=$(( sketchcount / chunk_max )) chunk_size=$(( $sketchcount / $chunk_max ))
all_chunks=$(( chunk_max * chunk_size )) all_chunks=$(( $chunk_max * $chunk_size ))
if [ "$all_chunks" -lt "$sketchcount" ]; then if [ "$all_chunks" -lt "$sketchcount" ]; then
chunk_size=$(( chunk_size + 1 )) chunk_size=$(( $chunk_size + 1 ))
fi fi
start_index=0 start_index=0
@ -256,35 +120,31 @@ else
start_index=$chunk_index start_index=$chunk_index
end_index=$sketchcount end_index=$sketchcount
else else
start_index=$(( chunk_index * chunk_size )) start_index=$(( $chunk_index * $chunk_size ))
if [ "$sketchcount" -le "$start_index" ]; then if [ "$sketchcount" -le "$start_index" ]; then
exit 0 echo "Skipping job"
return 0
fi fi
end_index=$(( $(( chunk_index + 1 )) * chunk_size )) end_index=$(( $(( $chunk_index + 1 )) * $chunk_size ))
if [ "$end_index" -gt "$sketchcount" ]; then if [ "$end_index" -gt "$sketchcount" ]; then
end_index=$sketchcount end_index=$sketchcount
fi fi
fi fi
start_num=$(( $start_index + 1 ))
sketchnum=0 sketchnum=0
error=0
for sketch in $sketches; do for sketch in $sketches; do
sketchnum=$((sketchnum + 1)) sketchnum=$(($sketchnum + 1))
if [ "$sketchnum" -le "$start_index" ] \ if [ "$sketchnum" -le "$start_index" ] \
|| [ "$sketchnum" -gt "$end_index" ]; then || [ "$sketchnum" -gt "$end_index" ]; then
continue continue
fi fi
echo ""
echo "Sketch Index $(($sketchnum - 1))"
printf "\033[95mSketch Index %s\033[0m\n" "$((sketchnum - 1))" run_test $target $sketch $options $erase
exit_code=0
run_test "$target" "$sketch" $options $erase || exit_code=$?
if [ $exit_code -ne 0 ]; then
error=$exit_code
fi
done done
exit $error
fi fi

View file

@ -1,6 +1,4 @@
#!/bin/bash #!/bin/bash
# Disable shellcheck warning about using 'cat' to read a file.
# shellcheck disable=SC2002
# For reference: add tools for all boards by replacing one line in each board # For reference: add tools for all boards by replacing one line in each board
# "[board].upload.tool=esptool_py" to "[board].upload.tool=esptool_py\n[board].upload.tool.default=esptool_py\n[board].upload.tool.network=esp_ota" # "[board].upload.tool=esptool_py" to "[board].upload.tool=esptool_py\n[board].upload.tool.default=esptool_py\n[board].upload.tool.network=esp_ota"
@ -24,15 +22,7 @@ ESP_ARDUINO_VERSION_MINOR="$2"
ESP_ARDUINO_VERSION_PATCH="$3" ESP_ARDUINO_VERSION_PATCH="$3"
ESP_ARDUINO_VERSION="$ESP_ARDUINO_VERSION_MAJOR.$ESP_ARDUINO_VERSION_MINOR.$ESP_ARDUINO_VERSION_PATCH" ESP_ARDUINO_VERSION="$ESP_ARDUINO_VERSION_MAJOR.$ESP_ARDUINO_VERSION_MINOR.$ESP_ARDUINO_VERSION_PATCH"
# Get ESP-IDF version from push.yml (this way we can ensure that the version is correct even if the local libs are not up to date)
ESP_IDF_VERSION=$(grep "idf_ver:" .github/workflows/push.yml | sed 's/.*release-v\([^"]*\).*/\1/')
if [ -z "$ESP_IDF_VERSION" ]; then
echo "Error: ESP-IDF version not found in push.yml" >&2
exit 1
fi
echo "New Arduino Version: $ESP_ARDUINO_VERSION" echo "New Arduino Version: $ESP_ARDUINO_VERSION"
echo "ESP-IDF Version: $ESP_IDF_VERSION"
echo "Updating platform.txt..." echo "Updating platform.txt..."
cat platform.txt | sed "s/version=.*/version=$ESP_ARDUINO_VERSION/g" > __platform.txt && mv __platform.txt platform.txt cat platform.txt | sed "s/version=.*/version=$ESP_ARDUINO_VERSION/g" > __platform.txt && mv __platform.txt platform.txt
@ -40,28 +30,10 @@ cat platform.txt | sed "s/version=.*/version=$ESP_ARDUINO_VERSION/g" > __platfor
echo "Updating package.json..." echo "Updating package.json..."
cat package.json | sed "s/.*\"version\":.*/ \"version\": \"$ESP_ARDUINO_VERSION\",/g" > __package.json && mv __package.json package.json cat package.json | sed "s/.*\"version\":.*/ \"version\": \"$ESP_ARDUINO_VERSION\",/g" > __package.json && mv __package.json package.json
echo "Updating docs/conf_common.py..."
cat docs/conf_common.py | \
sed "s/.. |version| replace:: .*/.. |version| replace:: $ESP_ARDUINO_VERSION/g" | \
sed "s/.. |idf_version| replace:: .*/.. |idf_version| replace:: $ESP_IDF_VERSION/g" > docs/__conf_common.py && mv docs/__conf_common.py docs/conf_common.py
echo "Updating .gitlab/workflows/common.yml..."
cat .gitlab/workflows/common.yml | \
sed "s/ESP_IDF_VERSION:.*/ESP_IDF_VERSION: \"$ESP_IDF_VERSION\"/g" | \
sed "s/ESP_ARDUINO_VERSION:.*/ESP_ARDUINO_VERSION: \"$ESP_ARDUINO_VERSION\"/g" > .gitlab/workflows/__common.yml && mv .gitlab/workflows/__common.yml .gitlab/workflows/common.yml
echo "Updating cores/esp32/esp_arduino_version.h..." echo "Updating cores/esp32/esp_arduino_version.h..."
cat cores/esp32/esp_arduino_version.h | \ cat cores/esp32/esp_arduino_version.h | \
sed "s/#define ESP_ARDUINO_VERSION_MAJOR.*/#define ESP_ARDUINO_VERSION_MAJOR $ESP_ARDUINO_VERSION_MAJOR/g" | \ sed "s/#define ESP_ARDUINO_VERSION_MAJOR.*/#define ESP_ARDUINO_VERSION_MAJOR $ESP_ARDUINO_VERSION_MAJOR/g" | \
sed "s/#define ESP_ARDUINO_VERSION_MINOR.*/#define ESP_ARDUINO_VERSION_MINOR $ESP_ARDUINO_VERSION_MINOR/g" | \ sed "s/#define ESP_ARDUINO_VERSION_MINOR.*/#define ESP_ARDUINO_VERSION_MINOR $ESP_ARDUINO_VERSION_MINOR/g" | \
sed "s/#define ESP_ARDUINO_VERSION_PATCH.*/#define ESP_ARDUINO_VERSION_PATCH $ESP_ARDUINO_VERSION_PATCH/g" > __esp_arduino_version.h && mv __esp_arduino_version.h cores/esp32/esp_arduino_version.h sed "s/#define ESP_ARDUINO_VERSION_PATCH.*/#define ESP_ARDUINO_VERSION_PATCH $ESP_ARDUINO_VERSION_PATCH/g" > __esp_arduino_version.h && mv __esp_arduino_version.h cores/esp32/esp_arduino_version.h
libraries=$(find libraries -maxdepth 1 -mindepth 1 -type d -exec basename {} \;)
for lib in $libraries; do
if [ -f "libraries/$lib/library.properties" ]; then
echo "Updating Library $lib..."
cat "libraries/$lib/library.properties" | sed "s/version=.*/version=$ESP_ARDUINO_VERSION/g" > "libraries/$lib/__library.properties" && mv "libraries/$lib/__library.properties" "libraries/$lib/library.properties"
fi
done
exit 0 exit 0

View file

@ -1,236 +0,0 @@
#!/usr/bin/env python3
# This script is used to re-package the esptool if needed and update the JSON file
# for the Arduino ESP32 platform.
#
# The script has only been tested on macOS.
#
# For regular esptool releases, the generated packages already contain the correct permissions,
# extensions and are uploaded to the GitHub release assets. In this case, the script will only
# update the JSON file with the information from the GitHub release.
#
# The script can be used in two modes:
# 1. Local build: The build artifacts must be already downloaded and extracted in the base_folder.
# This is useful for esptool versions that are not yet released and that are grabbed from the
# GitHub build artifacts.
# 2. Release build: The script will get the release information from GitHub and update the JSON file.
# This is useful for esptool versions that are already released and that are uploaded to the
# GitHub release assets.
#
# For local build, the artifacts must be already downloaded and extracted in the base_folder
# set with the -l option.
# For example, a base folder "esptool" should contain the following folders extracted directly
# from the GitHub build artifacts:
# esptool/esptool-linux-aarch64
# esptool/esptool-linux-amd64
# esptool/esptool-linux-armv7
# esptool/esptool-macos-amd64
# esptool/esptool-macos-arm64
# esptool/esptool-windows-amd64
import argparse
import json
import os
import shutil
import stat
import tarfile
import zipfile
import hashlib
import requests
from pathlib import Path
def compute_sha256(filepath):
sha256 = hashlib.sha256()
with open(filepath, "rb") as f:
for block in iter(lambda: f.read(4096), b""):
sha256.update(block)
return f"SHA-256:{sha256.hexdigest()}"
def get_file_size(filepath):
return os.path.getsize(filepath)
def update_json_for_host(tmp_json_path, version, host, url, archiveFileName, checksum, size):
with open(tmp_json_path) as f:
data = json.load(f)
for pkg in data.get("packages", []):
for tool in pkg.get("tools", []):
if tool.get("name") == "esptool_py":
tool["version"] = version
if url is None:
# If the URL is not set, we need to find the old URL and update it
for system in tool.get("systems", []):
if system.get("host") == host:
url = system.get("url").replace(system.get("archiveFileName"), archiveFileName)
break
else:
print(f"No old URL found for host {host}. Using empty URL.")
url = ""
# Preserve existing systems order and update or append the new system
systems = tool.get("systems", [])
system_updated = False
for i, system in enumerate(systems):
if system.get("host") == host:
systems[i] = {
"host": host,
"url": url,
"archiveFileName": archiveFileName,
"checksum": checksum,
"size": str(size),
}
system_updated = True
break
if not system_updated:
systems.append({
"host": host,
"url": url,
"archiveFileName": archiveFileName,
"checksum": checksum,
"size": str(size),
})
tool["systems"] = systems
with open(tmp_json_path, "w") as f:
json.dump(data, f, indent=2, sort_keys=False, ensure_ascii=False)
f.write("\n")
def update_tools_dependencies(tmp_json_path, version):
with open(tmp_json_path) as f:
data = json.load(f)
for pkg in data.get("packages", []):
for platform in pkg.get("platforms", []):
for dep in platform.get("toolsDependencies", []):
if dep.get("name") == "esptool_py":
dep["version"] = version
with open(tmp_json_path, "w") as f:
json.dump(data, f, indent=2, sort_keys=False, ensure_ascii=False)
f.write("\n")
def create_archives(version, base_folder):
archive_files = []
for dirpath in Path(base_folder).glob("esptool-*"):
if not dirpath.is_dir():
continue
base = dirpath.name[len("esptool-"):]
if "windows" in dirpath.name:
zipfile_name = f"esptool-v{version}-{base}.zip"
print(f"Creating {zipfile_name} from {dirpath} ...")
with zipfile.ZipFile(zipfile_name, "w", zipfile.ZIP_DEFLATED) as zipf:
for root, _, files in os.walk(dirpath):
for file in files:
full_path = os.path.join(root, file)
zipf.write(full_path, os.path.relpath(full_path, start=dirpath))
archive_files.append(zipfile_name)
else:
tarfile_name = f"esptool-v{version}-{base}.tar.gz"
print(f"Creating {tarfile_name} from {dirpath} ...")
for root, dirs, files in os.walk(dirpath):
for name in dirs + files:
os.chmod(os.path.join(root, name), stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
stat.S_IRGRP | stat.S_IXGRP |
stat.S_IROTH | stat.S_IXOTH)
with tarfile.open(tarfile_name, "w:gz") as tar:
tar.add(dirpath, arcname=dirpath.name)
archive_files.append(tarfile_name)
return archive_files
def determine_hosts(archive_name):
if "linux-amd64" in archive_name:
return ["x86_64-pc-linux-gnu"]
elif "linux-armv7" in archive_name:
return ["arm-linux-gnueabihf"]
elif "linux-aarch64" in archive_name:
return ["aarch64-linux-gnu"]
elif "macos-amd64" in archive_name:
return ["x86_64-apple-darwin"]
elif "macos-arm64" in archive_name:
return ["arm64-apple-darwin"]
elif "windows-amd64" in archive_name:
return ["x86_64-mingw32", "i686-mingw32"]
else:
return []
def update_json_from_local_build(tmp_json_path, version, base_folder, archive_files):
for archive in archive_files:
print(f"Processing archive: {archive}")
hosts = determine_hosts(archive)
if not hosts:
print(f"Skipping unknown archive type: {archive}")
continue
archive_path = Path(archive)
checksum = compute_sha256(archive_path)
size = get_file_size(archive_path)
for host in hosts:
update_json_for_host(tmp_json_path, version, host, None, archive_path.name, checksum, size)
def update_json_from_release(tmp_json_path, version, release_info):
assets = release_info.get("assets", [])
for asset in assets:
if (asset.get("name").endswith(".tar.gz") or asset.get("name").endswith(".zip")) and "esptool" in asset.get("name"):
asset_fname = asset.get("name")
print(f"Processing asset: {asset_fname}")
hosts = determine_hosts(asset_fname)
if not hosts:
print(f"Skipping unknown archive type: {asset_fname}")
continue
asset_url = asset.get("browser_download_url")
asset_checksum = asset.get("digest").replace("sha256:", "SHA-256:")
asset_size = asset.get("size")
if asset_checksum is None:
asset_checksum = ""
print(f"Asset {asset_fname} has no checksum. Please set the checksum in the JSON file.")
for host in hosts:
update_json_for_host(tmp_json_path, version, host, asset_url, asset_fname, asset_checksum, asset_size)
def get_release_info(version):
url = f"https://api.github.com/repos/espressif/esptool/releases/tags/v{version}"
response = requests.get(url)
response.raise_for_status()
return response.json()
def main():
parser = argparse.ArgumentParser(description="Repack esptool and update JSON metadata.")
parser.add_argument("version", help="Version of the esptool (e.g. 5.0.dev1)")
parser.add_argument("-l", "--local", dest="base_folder", help="Enable local build mode and set the base folder with unpacked artifacts")
args = parser.parse_args()
script_dir = Path(__file__).resolve().parent
json_path = (script_dir / "../../package/package_esp32_index.template.json").resolve()
tmp_json_path = Path(str(json_path) + ".tmp")
shutil.copy(json_path, tmp_json_path)
local_build = args.base_folder is not None
if local_build:
os.chdir(args.base_folder)
os.environ['COPYFILE_DISABLE'] = 'true' # this disables including resource forks in tar files on macOS
# Clear any existing archive files
for file in Path(args.base_folder).glob("esptool-*.*"):
file.unlink()
archive_files = create_archives(args.version, args.base_folder)
update_json_from_local_build(tmp_json_path, args.version, args.base_folder, archive_files)
else:
release_info = get_release_info(args.version)
update_json_from_release(tmp_json_path, args.version, release_info)
print(f"Updating esptool version fields to {args.version}")
update_tools_dependencies(tmp_json_path, args.version)
shutil.move(tmp_json_path, json_path)
print(f"Done. JSON updated at {json_path}")
if __name__ == "__main__":
main()

View file

@ -1,12 +1,11 @@
#!/bin/bash #!/bin/bash
CHANGED_FILES=$1 CHANGED_FILES=$1
echo "Pushing '$CHANGED_FILES' as github-actions[bot]" echo "Pushing '$CHANGED_FILES' as $GITHUB_ACTOR"
git config --global github.user "github-actions[bot]" git config --global github.user "$GITHUB_ACTOR"
git config --global user.name "github-actions[bot]" git config --global user.name "$GITHUB_ACTOR"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com" git config --global user.email "$GITHUB_ACTOR@users.noreply.github.com"
for tool in $CHANGED_FILES; do for tool in $CHANGED_FILES; do
git add tools/"$tool".exe git add tools/$tool.exe
done done
git commit -m "change(tools): Push generated binaries to PR" git commit -m "Push binary to tools"
git push git push

View file

@ -1,6 +1,6 @@
name: Boards Test - Remote trigger name: Boards Test - Remote trigger
# The workflow will run on remote dispatch with event-type set to "test-boards" # The workflow will run on remote dispath with event-type set to "test-boards"
on: on:
repository_dispatch: repository_dispatch:
types: [test-boards] types: [test-boards]
@ -15,12 +15,13 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.client_payload.branch }} ref: ${{ github.event.client_payload.branch }}
- name: Get boards fqbns - name: Get boards fqbns
run: bash .github/scripts/find_all_boards.sh run:
bash .github/scripts/find_all_boards.sh
setup-chunks: setup-chunks:
needs: find-boards needs: find-boards
@ -32,17 +33,18 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.client_payload.branch }} ref: ${{ github.event.client_payload.branch }}
- run: npm install - run: npm install
- name: Setup jq - name: Setup jq
uses: dcarbone/install-jq-action@e397bd87438d72198f81efd21f876461183d383a # v3.0.1 uses: dcarbone/install-jq-action@v1.0.1
- id: set-test-chunks - id: set-test-chunks
name: Set Chunks name: Set Chunks
run: echo "test-chunks<<EOF" >> $GITHUB_OUTPUT run:
echo "test-chunks<<EOF" >> $GITHUB_OUTPUT
echo "$( jq -nc '${{ needs.find-boards.outputs.fqbns }} | [_nwise( ${{ needs.find-boards.outputs.board-count }}/15 | ceil)]')" >> $GITHUB_OUTPUT echo "$( jq -nc '${{ needs.find-boards.outputs.fqbns }} | [_nwise( ${{ needs.find-boards.outputs.board-count }}/15 | ceil)]')" >> $GITHUB_OUTPUT
@ -64,17 +66,18 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.client_payload.branch }} ref: ${{ github.event.client_payload.branch }}
- name: Echo FQBNS to file - name: Echo FQBNS to file
run: echo "$FQBN" > fqbns.json run:
echo "$FQBN" > fqbns.json
env: env:
FQBN: ${{ toJSON(matrix.chunk) }} FQBN: ${{ toJSON(matrix.chunk) }}
- name: Compile sketch - name: Compile sketch
uses: P-R-O-C-H-Y/compile-sketches@a62f069b92dc8f5053da4ac439ea6d1950cf6379 # main uses: P-R-O-C-H-Y/compile-sketches@main
with: with:
platforms: | platforms: |
${{ env.REPOSITORY }} ${{ env.REPOSITORY }}
@ -85,4 +88,5 @@ jobs:
enable-warnings-report: false enable-warnings-report: false
cli-compile-flags: | cli-compile-flags: |
- --warnings="all" - --warnings="all"
sketch-paths: "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino" sketch-paths:
"- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"

View file

@ -3,10 +3,6 @@ name: Boards Test
# The workflow will run on schedule and labeled pull requests # The workflow will run on schedule and labeled pull requests
on: on:
pull_request: pull_request:
paths:
- "boards.txt"
- "libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"
- ".github/workflows/boards.yml"
env: env:
# It's convenient to set variables for values used multiple times in the workflow # It's convenient to set variables for values used multiple times in the workflow
@ -22,13 +18,14 @@ jobs:
steps: steps:
# This step makes the contents of the repository available to the workflow # This step makes the contents of the repository available to the workflow
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
- name: Setup jq - name: Setup jq
uses: dcarbone/install-jq-action@e397bd87438d72198f81efd21f876461183d383a # v3.0.1 uses: dcarbone/install-jq-action@v1.0.1
- name: Get board name - name: Get board name
run: bash .github/scripts/find_new_boards.sh ${{ github.repository }} ${{github.base_ref}} run:
bash .github/scripts/find_new_boards.sh ${{ github.repository }} ${{github.event.number}}
test-boards: test-boards:
needs: find-boards needs: find-boards
@ -41,13 +38,12 @@ jobs:
name: "espressif:esp32" name: "espressif:esp32"
strategy: strategy:
fail-fast: false
matrix: ${{ fromJson(needs.find-boards.outputs.fqbns) }} matrix: ${{ fromJson(needs.find-boards.outputs.fqbns) }}
steps: steps:
# This step makes the contents of the repository available to the workflow # This step makes the contents of the repository available to the workflow
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
- name: Check if build.board is uppercase - name: Check if build.board is uppercase
run: | run: |
@ -59,21 +55,8 @@ jobs:
exit 1; exit 1;
fi fi
- name: Get libs cache
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }}
path: |
./tools/dist
./tools/esp32-arduino-libs
./tools/esptool
./tools/mk*
./tools/openocd-esp32
./tools/riscv32-*
./tools/xtensa-*
- name: Compile sketch - name: Compile sketch
uses: P-R-O-C-H-Y/compile-sketches@a62f069b92dc8f5053da4ac439ea6d1950cf6379 # main uses: P-R-O-C-H-Y/compile-sketches@main
with: with:
platforms: | platforms: |
${{ env.REPOSITORY }} ${{ env.REPOSITORY }}
@ -84,5 +67,5 @@ jobs:
cli-compile-flags: | cli-compile-flags: |
- --warnings="all" - --warnings="all"
exit-on-fail: true exit-on-fail: true
sketch-paths: "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino" sketch-paths:
verbose: true "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"

View file

@ -3,44 +3,35 @@ name: Build Python Tools
on: on:
pull_request: pull_request:
paths: paths:
- ".github/workflows/build_py_tools.yml" - 'tools/get.py'
- "tools/get.py" - 'tools/espota.py'
- "tools/espota.py" - 'tools/gen_esp32part.py'
- "tools/gen_esp32part.py" - 'tools/gen_insights_package.py'
- "tools/gen_insights_package.py"
jobs: jobs:
find-changed-tools: find-changed-tools:
name: Check if tools have been changed name: Check if tools have been changed
runs-on: ubuntu-latest runs-on: ubuntu-20.04
outputs: outputs:
any_changed: ${{ steps.verify-changed-files.outputs.any_changed }} any_changed: ${{ steps.verify-changed-files.outputs.any_changed }}
all_changed_files: ${{ steps.verify-changed-files.outputs.all_changed_files }} all_changed_files: ${{ steps.verify-changed-files.outputs.all_changed_files }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
fetch-depth: 2 fetch-depth: 2
ref: ${{ github.event.pull_request.head.ref }} ref: ${{ github.event.pull_request.head.ref }}
- name: Check if checkout failed
if: failure()
run: |
echo "Checkout failed."
echo "Make sure you are using a branch inside the repository and not a fork."
- name: Verify Python Tools Changed - name: Verify Python Tools Changed
uses: tj-actions/changed-files@2f7c5bfce28377bc069a65ba478de0a74aa0ca32 # v46.0.1 uses: tj-actions/changed-files@v41
id: verify-changed-files id: verify-changed-files
with: with:
fetch_depth: "2" fetch_depth: '2'
since_last_remote_commit: "true" since_last_remote_commit: 'true'
files: | files: |
tools/get.py tools/get.py
tools/espota.py tools/espota.py
tools/gen_esp32part.py tools/gen_esp32part.py
tools/gen_insights_package.py tools/gen_insights_package.py
- name: List all changed files - name: List all changed files
shell: bash shell: bash
run: | run: |
@ -56,21 +47,27 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
os: [windows-latest, macos-latest, ubuntu-latest, ubuntu-24.04-arm] os: [windows-latest, macos-latest, ubuntu-20.04, ARM, ARM64]
include: include:
- os: windows-latest - os: windows-latest
TARGET: win64 TARGET: win64
EXTEN: .exe EXTEN: .exe
SEPARATOR: ";" SEPARATOR: ';'
- os: macos-latest - os: macos-latest
TARGET: macos TARGET: macos
SEPARATOR: ":" SEPARATOR: ':'
- os: ubuntu-latest - os: ubuntu-20.04
TARGET: linux-amd64 TARGET: linux-amd64
SEPARATOR: ":" SEPARATOR: ':'
- os: ubuntu-24.04-arm - os: ARM
CONTAINER: python:3.8-bullseye
TARGET: arm TARGET: arm
SEPARATOR: ":" SEPARATOR: ':'
- os: ARM64
CONTAINER: python:3.8-bullseye
TARGET: arm64
SEPARATOR: ':'
container: ${{ matrix.CONTAINER }} # use python container on ARM
env: env:
DISTPATH: pytools-${{ matrix.TARGET }} DISTPATH: pytools-${{ matrix.TARGET }}
PIP_EXTRA_INDEX_URL: "https://dl.espressif.com/pypi" PIP_EXTRA_INDEX_URL: "https://dl.espressif.com/pypi"
@ -89,30 +86,26 @@ jobs:
for tool in ${{ env.CHANGED_TOOLS }}; do for tool in ${{ env.CHANGED_TOOLS }}; do
echo "tool $tool was changed" echo "tool $tool was changed"
done done
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
token: ${{ secrets.TOOLS_UPLOAD_PAT }}
ref: ${{ github.event.pull_request.head.ref }} ref: ${{ github.event.pull_request.head.ref }}
- name: Set up Python 3.8 - name: Set up Python 3.8
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4 # Skip setting python on ARM because of missing compatibility: https://github.com/actions/setup-python/issues/108
if: matrix.os != 'ARM' && matrix.os != 'ARM64'
uses: actions/setup-python@master
with: with:
python-version: 3.8 python-version: 3.8
- name: Install dependencies - name: Install dependencies
run: | run: |
python -m pip install --upgrade pip python -m pip install --upgrade pip
pip install pyinstaller requests pip install pyinstaller requests
- name: Build with PyInstaller - name: Build with PyInstaller
shell: bash shell: bash
run: | run: |
for tool in ${{ env.CHANGED_TOOLS }}; do for tool in ${{ env.CHANGED_TOOLS }}; do
pyinstaller --distpath ./${{ env.DISTPATH }} -F --icon=.github/pytools/espressif.ico tools/$tool.py pyinstaller --distpath ./${{ env.DISTPATH }} -F --icon=.github/pytools/espressif.ico tools/$tool.py
done done
- name: Sign binaries - name: Sign binaries
if: matrix.os == 'windows-latest' if: matrix.os == 'windows-latest'
env: env:
@ -125,14 +118,12 @@ jobs:
{ {
./.github/pytools/Sign-File.ps1 -Path ./${{ env.DISTPATH }}/$node.exe ./.github/pytools/Sign-File.ps1 -Path ./${{ env.DISTPATH }}/$node.exe
} }
- name: Test binaries - name: Test binaries
shell: bash shell: bash
run: | run: |
for tool in ${{ env.CHANGED_TOOLS }}; do for tool in ${{ env.CHANGED_TOOLS }}; do
./${{ env.DISTPATH }}/$tool${{ matrix.EXTEN }} -h ./${{ env.DISTPATH }}/$tool${{ matrix.EXTEN }} -h
done done
- name: Push binary to tools - name: Push binary to tools
if: matrix.os == 'windows-latest' if: matrix.os == 'windows-latest'
env: env:
@ -143,9 +134,8 @@ jobs:
cp -f ./${{ env.DISTPATH }}/$tool.exe tools/$tool.exe cp -f ./${{ env.DISTPATH }}/$tool.exe tools/$tool.exe
done done
bash .github/scripts/upload_py_tools.sh "${{ env.CHANGED_TOOLS }}" bash .github/scripts/upload_py_tools.sh "${{ env.CHANGED_TOOLS }}"
- name: Archive artifact - name: Archive artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@master
with: with:
name: ${{ env.DISTPATH }} name: ${{ env.DISTPATH }}
path: ${{ env.DISTPATH }} path: ${{ env.DISTPATH }}

View file

@ -1,31 +0,0 @@
name: CodeQL Actions Analysis
on:
workflow_dispatch:
push:
branches:
- master
pull_request:
paths:
- ".github/workflows/*.yml"
- ".github/workflows/*.yaml"
jobs:
codeql-analysis:
name: CodeQL Actions Analysis
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
languages: actions
config-file: ./.github/codeql/codeql-config.yml
- name: Run CodeQL Analysis
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
category: "Analysis: Actions"

View file

@ -1,30 +0,0 @@
name: CodeQL Python Analysis
on:
workflow_dispatch:
push:
branches:
- master
pull_request:
paths:
- "**/*.py"
jobs:
codeql-analysis:
name: CodeQL Python Analysis
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
languages: python
config-file: ./.github/codeql/codeql-config.yml
- name: Run CodeQL Analysis
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
category: "Analysis: Python"

View file

@ -12,17 +12,11 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Check out PR head - name: Check out PR head
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
- name: DangerJS pull request linter - name: DangerJS pull request linter
uses: espressif/shared-github-dangerjs@fb17367fd3e8ff7412603b8e946d9b19ffdb2d7f # v1 uses: espressif/shared-github-dangerjs@v1
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
instructions-cla-link: "https://cla-assistant.io/espressif/arduino-esp32"
instructions-contributions-file: "docs/en/contributing.rst"
rule-max-commits: "false"
rule-target-branch: "false"
commit-messages-min-summary-length: "10"

View file

@ -4,16 +4,17 @@ on:
push: push:
branches: branches:
- master - master
- release/v2.x - release/*
paths: paths:
- "docs/**" - 'docs/**'
- ".github/workflows/docs_build.yml" - '.github/workflows/docs_build.yml'
pull_request: pull_request:
paths: paths:
- "docs/**" - 'docs/**'
- ".github/workflows/docs_build.yml" - '.github/workflows/docs_build.yml'
jobs: jobs:
build-docs: build-docs:
name: Build ESP-Docs name: Build ESP-Docs
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
@ -21,16 +22,12 @@ jobs:
run: run:
shell: bash shell: bash
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@v4
with: with:
submodules: true submodules: true
- uses: actions/setup-python@v5
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with: with:
cache-dependency-path: docs/requirements.txt python-version: '3.10'
cache: "pip"
python-version: "3.10"
- name: Build - name: Build
run: | run: |
sudo apt update sudo apt update
@ -40,9 +37,8 @@ jobs:
cd ./docs cd ./docs
PATH=/home/runner/.local/bin:$PATH pip3 install -r requirements.txt --prefer-binary PATH=/home/runner/.local/bin:$PATH pip3 install -r requirements.txt --prefer-binary
PATH=/home/runner/.local/bin:$PATH SPHINXOPTS="-W" build-docs -l en PATH=/home/runner/.local/bin:$PATH SPHINXOPTS="-W" build-docs -l en
- name: Archive Docs - name: Archive Docs
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@v2
with: with:
name: docs name: docs
path: docs path: docs

View file

@ -1,19 +1,18 @@
name: Documentation Build and Production Deploy CI name: Documentation Build and Production Deploy CI
on: on:
workflow_run: release:
workflows: ["ESP32 Arduino Release"] types: [published]
types:
- completed
push: push:
branches: branches:
- release/v2.x - release/*
- master - master
paths: paths:
- "docs/**" - 'docs/**'
- ".github/workflows/docs_deploy.yml" - '.github/workflows/docs_deploy.yml'
jobs: jobs:
deploy-prod-docs: deploy-prod-docs:
name: Deploy Documentation on Production name: Deploy Documentation on Production
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
@ -21,22 +20,12 @@ jobs:
run: run:
shell: bash shell: bash
steps: steps:
- name: Check if release workflow is successful - uses: actions/checkout@v4
if: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.conclusion != 'success' }}
run: |
echo "Release workflow failed. Exiting..."
exit 1
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
submodules: true submodules: true
- uses: actions/setup-python@v5
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with: with:
cache-dependency-path: docs/requirements.txt python-version: '3.10'
cache: "pip"
python-version: "3.10"
- name: Deploy Documentation - name: Deploy Documentation
env: env:
# Deploy to production server # Deploy to production server

View file

@ -6,18 +6,17 @@ on:
- master - master
- pages - pages
paths: paths:
- "README.md" - 'README.md'
- ".github/scripts/on-pages.sh" - '.github/scripts/on-pages.sh'
- ".github/workflows/gh-pages.yml" - '.github/workflows/gh-pages.yml'
jobs: jobs:
build-pages: build-pages:
name: Build GitHub Pages name: Build GitHub Pages
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - uses: actions/checkout@v4
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Copy Files - name: Copy Files
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

118
.github/workflows/hil.yml vendored Normal file
View file

@ -0,0 +1,118 @@
name: Run tests in hardware
on:
pull_request:
types: [opened, reopened, synchronize, labeled]
schedule:
- cron: '0 2 * * *'
env:
MAX_CHUNKS: 15
concurrency:
group: hil-${{github.event.pull_request.number || github.ref}}
cancel-in-progress: true
jobs:
gen_chunks:
if: |
contains(github.event.pull_request.labels.*.name, 'hil_test') ||
(github.event_name == 'schedule' && github.repository == 'espressif/arduino-esp32')
name: Generate Chunks matrix
runs-on: ubuntu-latest
outputs:
chunks: ${{ steps.gen-chunks.outputs.chunks }}
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Generate Chunks matrix
id: gen-chunks
run: |
set +e
.github/scripts/sketch_utils.sh count tests
sketches=$?
if [[ $sketches -ge ${{env.MAX_CHUNKS}} ]]; then
$sketches=${{env.MAX_CHUNKS}}
fi
set -e
rm sketches.txt
CHUNKS=$(jq -c -n '$ARGS.positional' --args `seq 0 1 $((sketches - 1))`)
echo "chunks=${CHUNKS}" >>$GITHUB_OUTPUT
Build:
needs: gen_chunks
name: ${{matrix.chip}}-Build#${{matrix.chunks}}
runs-on: ubuntu-latest
strategy:
matrix:
chip: ['esp32', 'esp32s2', 'esp32s3', 'esp32c3', 'esp32c6', 'esp32h2']
chunks: ${{fromJson(needs.gen_chunks.outputs.chunks)}}
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Build sketches
run: |
bash .github/scripts/tests_build.sh -c -t ${{matrix.chip}} -i ${{matrix.chunks}} -m ${{env.MAX_CHUNKS}}
- name: Upload ${{matrix.chip}}-${{matrix.chunks}} artifacts
uses: actions/upload-artifact@v3
with:
name: ${{matrix.chip}}-${{matrix.chunks}}.artifacts
path: |
~/.arduino/tests/*/build*.tmp/*.bin
~/.arduino/tests/*/build*.tmp/*.json
if-no-files-found: error
Test:
needs: [gen_chunks, Build]
name: ${{matrix.chip}}-Test#${{matrix.chunks}}
strategy:
fail-fast: false
matrix:
chip: ['esp32', 'esp32s2', 'esp32s3', 'esp32c3', 'esp32c6', 'esp32h2']
chunks: ${{fromJson(needs.gen_chunks.outputs.chunks)}}
runs-on: [arduino, "${{matrix.chip}}"]
container:
image: python:3.10.1-bullseye
options: --privileged
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Download ${{matrix.chip}}-${{matrix.chunks}} artifacts
uses: actions/download-artifact@v3
with:
name: ${{matrix.chip}}-${{matrix.chunks}}.artifacts
path: ~/.arduino/tests/
- name: Install dependencies
run: |
pip install -U pip
pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi
apt update && apt install -y -qq jq
- name: Run Tests
run: |
bash .github/scripts/tests_run.sh -c -t ${{matrix.chip}} -i ${{matrix.chunks}} -m ${{env.MAX_CHUNKS}} -e
- name: Upload test result artifacts
uses: actions/upload-artifact@v3
if: always()
with:
name: test_results-${{matrix.chip}}-${{matrix.chunks}}
path: tests/*/*.xml
event_file:
name: "Event File"
if: |
contains(github.event.pull_request.labels.*.name, 'hil_test') ||
github.event_name == 'schedule'
needs: Test
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@v3
with:
name: Event File
path: ${{github.event_path}}

View file

@ -9,8 +9,7 @@
{ {
"name": "ArduinoBLE", "name": "ArduinoBLE",
"exclude_targets": [ "exclude_targets": [
"esp32s2", "esp32s2"
"esp32p4"
], ],
"sketch_path": [ "sketch_path": [
"~/Arduino/libraries/ArduinoBLE/examples/Central/Scan/Scan.ino" "~/Arduino/libraries/ArduinoBLE/examples/Central/Scan/Scan.ino"
@ -24,52 +23,16 @@
] ]
}, },
{ {
"source-url": "https://github.com/ESP32Async/ESPAsyncWebServer.git", "source-url": "https://github.com/me-no-dev/ESPAsyncWebServer.git",
"required-libs": [ "required-libs": [
{"source-url": "https://github.com/ESP32Async/AsyncTCP.git"} {"source-url": "https://github.com/me-no-dev/AsyncTCP.git"}
], ],
"exclude_targets": [], "exclude_targets": [],
"sketch_path": [ "sketch_path": [
"~/Arduino/libraries/ESPAsyncWebServer/examples/Auth/Auth.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/CORS/CORS.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/CaptivePortal/CaptivePortal.ino", "~/Arduino/libraries/ESPAsyncWebServer/examples/CaptivePortal/CaptivePortal.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/CatchAllHandler/CatchAllHandler.ino", "~/Arduino/libraries/ESPAsyncWebServer/examples/ESP_AsyncFSBrowser/ESP_AsyncFSBrowser.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ChunkResponse/ChunkResponse.ino", "~/Arduino/libraries/ESPAsyncWebServer/examples/regex_patterns/regex_patterns.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ChunkRetryResponse/ChunkRetryResponse.ino", "~/Arduino/libraries/ESPAsyncWebServer/examples/simple_server/simple_server.ino"
"~/Arduino/libraries/ESPAsyncWebServer/examples/EndBegin/EndBegin.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Filters/Filters.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/FlashResponse/FlashResponse.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/HeaderManipulation/HeaderManipulation.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Headers/Headers.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Json/Json.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Logging/Logging.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/MessagePack/MessagePack.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Middleware/Middleware.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Params/Params.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/PartitionDownloader/PartitionDownloader.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/PerfTests/PerfTests.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/RateLimit/RateLimit.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Redirect/Redirect.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/RequestContinuation/RequestContinuation.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/RequestContinuationComplete/RequestContinuationComplete.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ResumableDownload/ResumableDownload.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Rewrite/Rewrite.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ServerSentEvents/ServerSentEvents.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ServerState/ServerState.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/SkipServerMiddleware/SkipServerMiddleware.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/SlowChunkResponse/SlowChunkResponse.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/StaticFile/StaticFile.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Templates/Templates.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Upload/Upload.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/WebSocket/WebSocket.ino"
]
},
{
"name": "EthernetESP32",
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/EthernetESP32/examples/LegacyEthernetTest/LegacyEthernetTest.ino",
"~/Arduino/libraries/EthernetESP32/examples/TwoEthernets/TwoEthernets.ino"
] ]
}, },
{ {
@ -99,13 +62,5 @@
"sketch_path": [ "sketch_path": [
"~/Arduino/libraries/WS2812FX/examples/ws2812fx_spi/ws2812fx_spi.ino" "~/Arduino/libraries/WS2812FX/examples/ws2812fx_spi/ws2812fx_spi.ino"
] ]
},
{
"name": "ZACwire for TSic",
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/ZACwire_for_TSic/examples/ReadingTwoTSICs/ReadingTwoTSICs.ino",
"~/Arduino/libraries/ZACwire_for_TSic/examples/ReadSingleTSIC206/ReadSingleTSIC206.ino"
]
} }
] ]

View file

@ -7,11 +7,7 @@ on:
# Schedule weekly builds on every Sunday at 4 am # Schedule weekly builds on every Sunday at 4 am
schedule: schedule:
- cron: "0 4 * * SUN" - cron: '0 4 * * SUN'
concurrency:
group: libs-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env: env:
# It's convenient to set variables for values used multiple times in the workflow # It's convenient to set variables for values used multiple times in the workflow
@ -27,6 +23,7 @@ jobs:
contains(github.event.pull_request.labels.*.name, 'lib_test') || contains(github.event.pull_request.labels.*.name, 'lib_test') ||
(github.event_name == 'schedule' && github.repository == 'espressif/arduino-esp32') (github.event_name == 'schedule' && github.repository == 'espressif/arduino-esp32')
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
REPOSITORY: | REPOSITORY: |
- source-path: '.' - source-path: '.'
@ -41,7 +38,6 @@ jobs:
- esp32s3 - esp32s3
- esp32c6 - esp32c6
- esp32h2 - esp32h2
- esp32p4
include: include:
- target: esp32 - target: esp32
@ -56,16 +52,15 @@ jobs:
fqbn: espressif:esp32:esp32c6 fqbn: espressif:esp32:esp32c6
- target: esp32h2 - target: esp32h2
fqbn: espressif:esp32:esp32h2 fqbn: espressif:esp32:esp32h2
- target: esp32p4
fqbn: espressif:esp32:esp32p4
steps: steps:
# This step makes the contents of the repository available to the workflow # This step makes the contents of the repository available to the workflow
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
- name: Compile sketch - name: Compile sketch
uses: P-R-O-C-H-Y/compile-sketches@a62f069b92dc8f5053da4ac439ea6d1950cf6379 # main uses: P-R-O-C-H-Y/compile-sketches@main
with: with:
platforms: | platforms: |
${{ env.REPOSITORY }} ${{ env.REPOSITORY }}
@ -80,9 +75,9 @@ jobs:
- --warnings="all" - --warnings="all"
- name: Upload artifact - name: Upload artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@v3
with: with:
name: ${{ env.SKETCHES_REPORTS_ARTIFACT_NAME }}-${{ matrix.target }} name: ${{ env.SKETCHES_REPORTS_ARTIFACT_NAME }}
path: ${{ env.SKETCHES_REPORTS_PATH }} path: ${{ env.SKETCHES_REPORTS_PATH }}
report-to-file: report-to-file:
@ -92,54 +87,49 @@ jobs:
steps: steps:
# Check out repository # Check out repository
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
token: ${{ env.GITHUB_TOKEN }} token: ${{ env.GITHUB_TOKEN }}
fetch-depth: "0" fetch-depth: '0'
- name: Switch branch - name: Switch branch
run: git checkout remotes/origin/gh-pages run:
git checkout remotes/origin/gh-pages
# This step is needed to get the size data produced by the compile jobs # This step is needed to get the size data produced by the compile jobs
- name: Download sketches reports artifact - name: Download sketches reports artifact
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 uses: actions/download-artifact@v3
with: with:
pattern: ${{ env.SKETCHES_REPORTS_ARTIFACT_NAME }}-* name: ${{ env.SKETCHES_REPORTS_ARTIFACT_NAME }}
merge-multiple: true
path: ${{ env.SKETCHES_REPORTS_PATH }} path: ${{ env.SKETCHES_REPORTS_PATH }}
- name: Report results - name: Report results
uses: P-R-O-C-H-Y/report-size-deltas@4a79caa6dcc3579024293638b97156106edc588e # main uses: P-R-O-C-H-Y/report-size-deltas@main
with: with:
sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }} sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }}
destination-file: ${{ env.RESULT_LIBRARY_TEST_FILE }} destination-file: ${{ env.RESULT_LIBRARY_TEST_FILE }}
- name: Append file with action URL - name: Append file with action URL
run: echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_LIBRARY_TEST_FILE }} run:
echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_LIBRARY_TEST_FILE }}
- name: Push to github repo - name: Push to github repo
run: | run: |
git config user.name "github-actions[bot]" git config user.name github-actions
git config user.email "41898282+github-actions[bot]@users.noreply.github.com" git config user.email github-actions@github.com
git add ${{ env.RESULT_LIBRARY_TEST_FILE }} git add ${{ env.RESULT_LIBRARY_TEST_FILE }}
git commit -m "Generated External Libraries Test Results" git commit -m "Generated External Libraries Test Results"
git push origin HEAD:gh-pages git push origin HEAD:gh-pages
#Upload PR number as artifact event_file:
upload-pr-number: name: "Event File"
name: Upload PR number if: |
if: (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'lib_test')) contains(github.event.pull_request.labels.*.name, 'lib_test')
needs: compile-sketch
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Save the PR number in an artifact - name: Upload
shell: bash uses: actions/upload-artifact@v2
env:
PR_NUM: ${{ github.event.number }}
run: echo $PR_NUM > pr_num.txt
- name: Upload PR number
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with: with:
name: pr_number name: Event File
path: ./pr_num.txt path: ${{github.event_path}}
overwrite: true

View file

@ -1,64 +0,0 @@
# This needs to be in a separate workflow because it requires higher permissions than the calling workflow
name: Report Pre-commit Check Status
on:
workflow_run:
workflows: [Pre-commit hooks]
types:
- completed
permissions:
statuses: write
jobs:
report-success:
name: Report pre-commit success
if: github.event.workflow_run.conclusion == 'success'
runs-on: ubuntu-latest
steps:
- name: Report success
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Pre-commit checks',
description: 'Pre-commit checks successful',
owner: owner,
repo: repo,
sha: sha,
state: 'success',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}'
})).data;
core.info(`${name} is ${state}`);
report-pending:
name: Report pre-commit pending
if: github.event.workflow_run.conclusion != 'success'
runs-on: ubuntu-latest
steps:
- name: Report pending
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Pre-commit checks',
description: 'The pre-commit checks need to be successful before merging',
owner: owner,
repo: repo,
sha: sha,
state: 'pending',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}'
})).data;
core.info(`${name} is ${state}`);

View file

@ -1,80 +0,0 @@
name: Pre-commit hooks
on:
workflow_dispatch:
push:
branches:
- master
pull_request:
types: [opened, reopened, synchronize, labeled]
concurrency:
group: pre-commit-${{github.event.pull_request.number || github.ref}}
cancel-in-progress: true
jobs:
lint:
if: |
github.event_name != 'pull_request' ||
contains(github.event.pull_request.labels.*.name, 'Status: Pending Merge') ||
contains(github.event.pull_request.labels.*.name, 'Re-trigger Pre-commit Hooks')
name: Check if fixes are needed
runs-on: ubuntu-latest
steps:
- name: Checkout latest commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 2
- name: Remove Label
if: contains(github.event.pull_request.labels.*.name, 'Re-trigger Pre-commit Hooks')
run: gh pr edit ${{ github.event.number }} --remove-label 'Re-trigger Pre-commit Hooks'
env:
GH_TOKEN: ${{ github.token }}
- name: Set up Python 3
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with:
cache-dependency-path: tools/pre-commit/requirements.txt
cache: "pip"
python-version: "3.x"
- name: Get Python version hash
run: |
echo "Using $(python -VV)"
echo "PY_HASH=$(python -VV | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV
- name: Restore pre-commit cache
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
id: restore-cache
with:
path: |
~/.cache/pre-commit
key: pre-commit-${{ env.PY_HASH }}-${{ hashFiles('.pre-commit-config.yaml', '.github/workflows/pre-commit.yml', 'tools/pre-commit/requirements.txt') }}
- name: Install python dependencies
run: python -m pip install -r tools/pre-commit/requirements.txt
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@2f7c5bfce28377bc069a65ba478de0a74aa0ca32 # v46.0.1
- name: Run pre-commit hooks in changed files
run: pre-commit run --color=always --show-diff-on-failure --files ${{ steps.changed-files.outputs.all_changed_files }}
- name: Save pre-commit cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: ${{ always() && steps.restore-cache.outputs.cache-hit != 'true' }}
continue-on-error: true
with:
path: |
~/.cache/pre-commit
key: ${{ steps.restore-cache.outputs.cache-primary-key }}
- name: Push changes using pre-commit-ci-lite
uses: pre-commit-ci/lite-action@5d6cc0eb514c891a40562a58a8e71576c5c7fb43 # v1.1.0
# Only push changes in PRs
if: ${{ always() && github.event_name == 'pull_request' }}
with:
msg: "ci(pre-commit): Apply automatic fixes"

38
.github/workflows/publish.yml vendored Normal file
View file

@ -0,0 +1,38 @@
name: Unit Test Results
on:
workflow_run:
workflows: [Run tests in hardware]
branches-ignore: [master]
types:
- completed
jobs:
unit-test-results:
name: Unit Test Results
runs-on: ubuntu-latest
if: |
github.event.workflow_run.event == 'pull_request' &&
(github.event.workflow_run.conclusion == 'success' ||
github.event.workflow_run.conclusion == 'failure')
steps:
- name: Download and Extract Artifacts
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
run: |
mkdir -p artifacts && cd artifacts
artifacts_url=${{ github.event.workflow_run.artifacts_url }}
gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
do
IFS=$'\t' read name url <<< "$artifact"
gh api $url > "$name.zip"
unzip -d "$name" "$name.zip"
done
- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@v1
with:
commit: ${{ github.event.workflow_run.head_sha }}
event_file: artifacts/Event File/event.json
event_name: ${{ github.event.workflow_run.event }}
files: "artifacts/**/*.xml"

View file

@ -11,6 +11,7 @@ env:
# It's convenient to set variables for values used multiple times in the workflow # It's convenient to set variables for values used multiple times in the workflow
SKETCHES_REPORTS_PATH: artifacts/libraries-report SKETCHES_REPORTS_PATH: artifacts/libraries-report
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
PR_EVENT_PATH: artifacts/Event File/event.json
jobs: jobs:
lib-test-results: lib-test-results:
@ -24,33 +25,17 @@ jobs:
- name: Download and Extract Artifacts - name: Download and Extract Artifacts
run: | run: |
mkdir -p artifacts && cd artifacts mkdir -p artifacts && cd artifacts
mkdir -p libraries-report
mkdir -p workflows
artifacts_url=${{ github.event.workflow_run.artifacts_url }} artifacts_url=${{ github.event.workflow_run.artifacts_url }}
gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
do do
IFS=$'\t' read name url <<< "$artifact" IFS=$'\t' read name url <<< "$artifact"
gh api $url > "$name.zip" gh api $url > "$name.zip"
unzip -j "$name.zip" -d "temp_$name" unzip -d "$name" "$name.zip"
if [[ "$name" == "pr_number" ]]; then
mv "temp_$name"/* workflows
else
mv "temp_$name"/* libraries-report
fi
rm -r "temp_$name"
done done
echo "Contents of parent directory:"
ls -R ..
- name: Read the pr_num file
id: pr_num_reader
uses: juliangruber/read-file-action@b549046febe0fe86f8cb4f93c24e284433f9ab58 # v1.1.7
with:
path: ./artifacts/workflows/pr_num.txt
- name: Report results - name: Report results
uses: P-R-O-C-H-Y/report-size-deltas@256d1f13e4195cd7fd436d2f959e6dc4d5e4b406 # libs uses: P-R-O-C-H-Y/report-size-deltas@main
with: with:
sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }} sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }}
github-token: ${{ env.GITHUB_TOKEN }} github-token: ${{ env.GITHUB_TOKEN }}
pr-number: "${{ steps.pr_num_reader.outputs.content }}" pr-event-path: ${{ env.PR_EVENT_PATH }}

View file

@ -1,52 +0,0 @@
name: Sizes Results (master-v2.x)
on:
workflow_dispatch:
env:
# It's convenient to set variables for values used multiple times in the workflow
SKETCHES_REPORTS_PATH: artifacts/sizes-report
RESULT_SIZES_TEST_FILE: SIZES_TEST.md
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
jobs:
sizes-test-results:
name: Sizes Comparison Results
runs-on: ubuntu-latest
steps:
- name: Checkout gh-pages branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: gh-pages
- name: Create folder structure
run: |
mkdir -p artifacts && cd artifacts
mkdir -p sizes-report
mkdir -p sizes-report/master
mkdir -p sizes-report/pr
# master folder is a base for comparison
# pr folder is for comparison with master
- name: Download JSON file
run: |
mv master_cli_compile/*.json artifacts/sizes-report/pr/
mv v2.x_cli_compile/*.json artifacts/sizes-report/master/
- name: Report results
uses: P-R-O-C-H-Y/report-size-deltas@2043188c68f483a7b50527c4eacf609d05bb67a5 # sizes_v2
with:
sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }}
github-token: ${{ env.GITHUB_TOKEN }}
destination-file: ${{ env.RESULT_SIZES_TEST_FILE }}
- name: Append file with action URL
run: echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_SIZES_TEST_FILE }}
- name: Push to github repo
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add ${{ env.RESULT_SIZES_TEST_FILE }}
git commit -m "Generated Sizes Results (master-v2.x)"
git push origin HEAD:gh-pages

View file

@ -1,73 +0,0 @@
name: Sizes Results
on:
workflow_run:
workflows: [Compilation Tests]
types:
- completed
workflow_dispatch:
env:
# It's convenient to set variables for values used multiple times in the workflow
SKETCHES_REPORTS_PATH: artifacts/sizes-report
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
jobs:
sizes-test-results:
name: Sizes Comparison Results
runs-on: ubuntu-latest
if: |
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
steps:
- name: Checkout gh-pages branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: gh-pages
- name: Create folder structure
run: |
mkdir -p artifacts && cd artifacts
mkdir -p sizes-report
mkdir -p sizes-report/master
mkdir -p sizes-report/pr
- name: Download JSON file
run: |
mv master_cli_compile/*.json artifacts/sizes-report/master/
- name: Download and Extract Artifacts
run: |
cd artifacts
artifacts_url=${{ github.event.workflow_run.artifacts_url }}
gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
do
IFS=$'\t' read name url <<< "$artifact"
# Only process pr_number and pr_cli_compile artifacts
if [[ "$name" == "pr_number" || "$name" =~ ^pr_cli_compile_[0-9]+$ ]]; then
gh api $url > "$name.zip"
unzip -o -j "$name.zip" -d "temp_$name"
if [[ "$name" == "pr_number" ]]; then
mv "temp_$name"/* sizes-report
elif [[ "$name" =~ ^pr_cli_compile_[0-9]+$ ]]; then
mv "temp_$name"/* sizes-report/pr
fi
rm -r "temp_$name"
fi
done
echo "Contents of parent directory:"
ls -R ..
- name: Read the pr_num file
id: pr_num_reader
uses: juliangruber/read-file-action@b549046febe0fe86f8cb4f93c24e284433f9ab58 # v1.1.7
with:
path: ./artifacts/sizes-report/pr_num.txt
- name: Report results
uses: P-R-O-C-H-Y/report-size-deltas@bea91d2c99ca80c88a883b39b1c4012f00ec3d09 # sizes_v2
with:
sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }}
github-token: ${{ env.GITHUB_TOKEN }}
pr-number: "${{ steps.pr_num_reader.outputs.content }}"

View file

@ -1,224 +1,56 @@
name: Compilation Tests name: ESP32 Arduino CI
on: on:
workflow_dispatch: workflow_dispatch:
inputs:
log_level:
description: "Log level"
default: "none"
type: "choice"
required: true
options:
- "none"
- "error"
- "warn"
- "info"
- "debug"
- "verbose"
schedule:
# Every Sunday at 2:00 UTC run a build with verbose log level
- cron: "0 2 * * SUN"
push: push:
branches: branches:
- master - master
- release/* - release/*
pull_request: pull_request:
paths:
- "cores/**"
- "libraries/**"
- "!libraries/**.md"
- "!libraries/**.txt"
- "!libraries/**.properties"
- "!libraries/**.py"
- "package/**"
- "idf_component_examples/**"
- "tools/**.py"
- "platform.txt"
- "programmers.txt"
- "idf_component.yml"
- "Kconfig.projbuild"
- "package.json"
- "CMakeLists.txt"
- ".github/workflows/push.yml"
- ".github/scripts/**"
- "!.github/scripts/find_*"
- "!.github/scripts/on-release.sh"
- "!.github/scripts/tests_*"
- "!.github/scripts/upload_*"
- "variants/esp32/**/*"
- "variants/esp32c3/**/*"
- "variants/esp32c5/**/*"
- "variants/esp32c6/**/*"
- "variants/esp32h2/**/*"
- "variants/esp32p4/**/*"
- "variants/esp32s2/**/*"
- "variants/esp32s3/**/*"
concurrency: concurrency:
group: build-${{github.event.pull_request.number || github.ref}} group: build-${{github.event.pull_request.number || github.ref}}
cancel-in-progress: true cancel-in-progress: true
env:
MAX_CHUNKS: 15
jobs: jobs:
cmake-check: cmake-check:
name: Check cmake file name: Check cmake file
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: ${{ !(github.event_name == 'pull_request' && startsWith(github.head_ref, 'release/')) }}
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@v4
- run: bash ./.github/scripts/check-cmakelists.sh - run: bash ./.github/scripts/check-cmakelists.sh
gen-chunks:
name: Generate chunks
runs-on: ubuntu-latest
if: ${{ !(github.event_name == 'pull_request' && startsWith(github.head_ref, 'release/')) }}
outputs:
build_all: ${{ steps.set-chunks.outputs.build_all }}
build_libraries: ${{ steps.set-chunks.outputs.build_libraries }}
build_static_sketches: ${{ steps.set-chunks.outputs.build_static_sketches }}
build_idf: ${{ steps.set-chunks.outputs.build_idf }}
chunk_count: ${{ steps.set-chunks.outputs.chunk_count }}
chunks: ${{ steps.set-chunks.outputs.chunks }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 2
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@2f7c5bfce28377bc069a65ba478de0a74aa0ca32 # v46.0.1
with:
files_yaml: |
core:
- '.github/**'
- 'cores/**'
- 'package/**'
- 'tools/**'
- 'platform.txt'
- 'programmers.txt'
- "variants/esp32/**/*"
- "variants/esp32c3/**/*"
- "variants/esp32c6/**/*"
- "variants/esp32h2/**/*"
- "variants/esp32p4/**/*"
- "variants/esp32s2/**/*"
- "variants/esp32s3/**/*"
libraries:
- 'libraries/**/examples/**'
- 'libraries/**/src/**'
networking:
- 'libraries/Network/src/**'
fs:
- 'libraries/FS/src/**'
static_sketeches:
- 'libraries/NetworkClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino'
- 'libraries/BLE/examples/Server/Server.ino'
- 'libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino'
- 'libraries/Insights/examples/MinimalDiagnostics/MinimalDiagnostics.ino'
- 'libraries/NetworkClientSecure/src/**'
- 'libraries/BLE/src/**'
- 'libraries/Insights/src/**'
idf:
- 'idf_component.yml'
- 'Kconfig.projbuild'
- 'CMakeLists.txt'
- "idf_component_examples/**"
- name: Set chunks
id: set-chunks
env:
LIB_FILES: ${{ steps.changed-files.outputs.libraries_all_changed_files }}
IS_PR: ${{ github.event_name == 'pull_request' }}
MAX_CHUNKS: ${{ env.MAX_CHUNKS }}
BUILD_IDF: ${{ steps.changed-files.outputs.idf_any_changed == 'true' }}
BUILD_LIBRARIES: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }}
BUILD_STATIC_SKETCHES: ${{ steps.changed-files.outputs.static_sketeches_any_changed == 'true' }}
FS_CHANGED: ${{ steps.changed-files.outputs.fs_any_changed == 'true' }}
NETWORKING_CHANGED: ${{ steps.changed-files.outputs.networking_any_changed == 'true' }}
CORE_CHANGED: ${{ steps.changed-files.outputs.core_any_changed == 'true' }}
LIB_CHANGED: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }}
run: |
bash ./.github/scripts/set_push_chunks.sh
- name: Upload sketches found
if: ${{ steps.set-chunks.outputs.build_all == 'false' && steps.set-chunks.outputs.build_libraries == 'true' }}
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: sketches_found
path: sketches_found.txt
overwrite: true
if-no-files-found: error
# Ubuntu # Ubuntu
build-arduino-linux: build-arduino-linux:
name: Arduino ${{ matrix.chunk }} on ubuntu-latest name: Arduino ${{ matrix.chunk }} on ubuntu-latest
if: ${{ needs.gen-chunks.outputs.build_all == 'true' || needs.gen-chunks.outputs.build_libraries == 'true' }}
needs: gen-chunks
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
chunk: ${{ fromJson(needs.gen-chunks.outputs.chunks) }} chunk: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@v4
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4 - uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: '3.x'
- name: Cache tools
- name: Get libs cache id: cache-linux
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 uses: actions/cache@v4
with: with:
key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }}
path: | path: |
./tools/dist ./tools/dist
./tools/esp32-arduino-libs ~/arduino_ide
./tools/esptool key: ${{ runner.os }}-${{ hashFiles('package/package_esp32_index.template.json',
./tools/mk* 'tools/get.py',
./tools/openocd-esp32 '.github/scripts/install-arduino-ide.sh') }}
./tools/riscv32-* - name: Build Sketches
./tools/xtensa-* run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} 15
- name: Set Log Level
run: |
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo "LOG_LEVEL=${{ github.event.inputs.log_level }}" >> $GITHUB_ENV
elif [ "${{ github.event_name }}" == "schedule" ]; then
echo "LOG_LEVEL=verbose" >> $GITHUB_ENV
else
echo "LOG_LEVEL=none" >> $GITHUB_ENV
fi
- name: Build all sketches
if: ${{ needs.gen-chunks.outputs.build_all == 'true' }}
run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ env.MAX_CHUNKS }} 1 ${{ env.LOG_LEVEL }}
- name: Download sketches found
if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }}
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
name: sketches_found
- name: Build selected sketches
if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }}
run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ needs.gen-chunks.outputs.chunk_count }} 1 ${{ env.LOG_LEVEL }} sketches_found.txt
#Upload cli compile json as artifact
- name: Upload cli compile json
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: pr_cli_compile_${{ matrix.chunk }}
path: cli_compile_${{ matrix.chunk }}.json
overwrite: true
# Windows and MacOS # Windows and MacOS
build-arduino-win-mac: build-arduino-win-mac:
name: Arduino on ${{ matrix.os }} name: Arduino on ${{ matrix.os }}
needs: gen-chunks
if: ${{ needs.gen-chunks.outputs.build_all == 'true' || needs.gen-chunks.outputs.build_static_sketches == 'true' }}
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
strategy: strategy:
fail-fast: false fail-fast: false
@ -226,21 +58,33 @@ jobs:
os: [windows-latest, macOS-latest] os: [windows-latest, macOS-latest]
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@v4
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4 - uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: '3.x'
- name: Build Sketches - name: Build Sketches
run: bash ./.github/scripts/on-push.sh run: bash ./.github/scripts/on-push.sh
# PlatformIO on Windows, Ubuntu and Mac
build-platformio:
name: PlatformIO on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: Build Sketches
run: bash ./.github/scripts/on-push.sh 1 1 #equal and non-zero to trigger PIO
build-esp-idf-component: build-esp-idf-component:
name: Build with ESP-IDF ${{ matrix.idf_ver }} for ${{ matrix.idf_target }} name: Build with ESP-IDF ${{ matrix.idf_ver }} for ${{ matrix.idf_target }}
needs: gen-chunks runs-on: ubuntu-20.04
if: |
needs.gen-chunks.outputs.build_all == 'true' ||
needs.gen-chunks.outputs.build_libraries == 'true' ||
needs.gen-chunks.outputs.build_idf == 'true'
runs-on: ubuntu-latest
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@ -248,96 +92,21 @@ jobs:
# See https://hub.docker.com/r/espressif/idf/tags and # See https://hub.docker.com/r/espressif/idf/tags and
# https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-guides/tools/idf-docker-image.html # https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-guides/tools/idf-docker-image.html
# for details. # for details.
idf_ver: ["release-v5.3","release-v5.4","release-v5.5"] idf_ver: ["release-v5.1"]
idf_target: idf_target: ["esp32", "esp32s2", "esp32s3", "esp32c2", "esp32c3", "esp32c6", "esp32h2"]
[
"esp32",
"esp32s2",
"esp32s3",
"esp32c2",
"esp32c3",
"esp32c6",
"esp32h2",
"esp32p4"
]
container: espressif/idf:${{ matrix.idf_ver }} container: espressif/idf:${{ matrix.idf_ver }}
steps: steps:
- name: Check out arduino-esp32 as a component - name: Check out arduino-esp32 as a component
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
path: components/arduino-esp32 path: components/arduino-esp32
- name: Setup jq
uses: dcarbone/install-jq-action@e397bd87438d72198f81efd21f876461183d383a # v3.0.1
- name: Build - name: Build
env: env:
IDF_TARGET: ${{ matrix.idf_target }} IDF_TARGET: ${{ matrix.idf_target }}
shell: bash shell: bash
run: | run: |
chmod a+x ./components/arduino-esp32/.github/scripts/* . ${IDF_PATH}/export.sh
./components/arduino-esp32/.github/scripts/on-push-idf.sh idf.py create-project test
echo CONFIG_FREERTOS_HZ=1000 > test/sdkconfig.defaults
- name: Upload generated sdkconfig files for debugging idf.py -C test -DEXTRA_COMPONENT_DIRS=$PWD/components build
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: sdkconfig-${{ matrix.idf_ver }}-${{ matrix.idf_target }}
path: ./components/arduino-esp32/idf_component_examples/**/sdkconfig
# Save artifacts to gh-pages
save-master-artifacts:
name: Save master artifacts
needs: build-arduino-linux
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
runs-on: ubuntu-latest
steps:
# Check out repository
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
token: ${{secrets.GITHUB_TOKEN}}
fetch-depth: "0"
- name: Switch branch
run: git checkout remotes/origin/gh-pages
- name: Download sketches reports artifact
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
pattern: pr_cli_compile_*
merge-multiple: true
path: master_cli_compile
- name: List files in the directory
run: ls -R
- name: Commit json files to gh-pages if on master
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
continue-on-error: true
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add --all
git commit -m "Updated cli compile json files"
git push origin HEAD:gh-pages
#Upload PR number as artifact
upload-pr-number:
name: Upload PR number
if: ${{ github.event_name == 'pull_request' && !startsWith(github.head_ref, 'release/') }}
runs-on: ubuntu-latest
steps:
- name: Save the PR number in an artifact
shell: bash
env:
PR_NUM: ${{ github.event.number }}
run: echo $PR_NUM > pr_num.txt
- name: Upload PR number
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: pr_number
path: ./pr_num.txt
overwrite: true

View file

@ -10,22 +10,12 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - uses: actions/checkout@v4
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@v5
- name: Set up Python
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with: with:
python-version: "3.x" python-version: '3.x'
- name: Install packaging
run: pip install packaging
- name: Install pyserial
run: pip install pyserial
- name: Build Release - name: Build Release
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -1,123 +0,0 @@
# This file is used to run the runtime tests for the Arduino core for ESP32.
# The tests are run on the hardware, Wokwi and QEMU emulators.
# The QEMU tests are disabled for now as they are redundant with most of the Wokwi tests.
# As the Wokwi tests require access to secrets, they are run in a separate workflow.
# We need to ensure that the artifacts from previous tests in the chain are propagated for publishing the results.
# This is the current trigger sequence for the tests:
# tests.yml -> tests_wokwi.yml -> tests_results.yml
# ⌙> tests_build.yml
# ⌙> tests_hw.yml
# ⌙> tests_qemu.yml
name: Runtime Tests
on:
workflow_dispatch:
pull_request:
types: [opened, reopened, closed, synchronize, labeled, unlabeled]
paths:
- ".github/workflows/tests*"
- ".github/scripts/*.sh"
- "!.github/scripts/check-cmakelists.sh"
- "!.github/scripts/find_*"
- "!.github/scripts/on-*.sh"
- "!.github/scripts/set_push_chunks.sh"
- "!.github/scripts/update-version.sh"
- "!.github/scripts/upload_py_tools.sh"
- "tests/**"
- "cores/**"
- "libraries/*/src/**.cpp"
- "libraries/*/src/**.h"
- "libraries/*/src/**.c"
- "package/**"
schedule:
- cron: "0 2 * * *"
concurrency:
group: tests-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
push-event-file:
name: Push event file
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: event_file
path: ${{ github.event_path }}
gen-matrix:
name: Generate matrix
runs-on: ubuntu-latest
outputs:
build-types: ${{ steps.set-matrix.outputs.build-types }}
hw-types: ${{ steps.set-matrix.outputs.hw-types }}
wokwi-types: ${{ steps.set-matrix.outputs.wokwi-types }}
qemu-types: ${{ steps.set-matrix.outputs.qemu-types }}
targets: ${{ steps.set-matrix.outputs.targets }}
env:
IS_PR: ${{ github.event.pull_request.number != null }}
PERFORMANCE_ENABLED: ${{ contains(github.event.pull_request.labels.*.name, 'perf_test') }}
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
sparse-checkout: .github/scripts/tests_matrix.sh
- name: Set matrix
id: set-matrix
run: bash .github/scripts/tests_matrix.sh
- name: Upload
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: matrix_info
path: info/*
call-build-tests:
name: Build
uses: ./.github/workflows/tests_build.yml
needs: gen-matrix
strategy:
matrix:
type: ${{ fromJson(needs.gen-matrix.outputs.build-types) }}
chip: ${{ fromJson(needs.gen-matrix.outputs.targets) }}
with:
type: ${{ matrix.type }}
chip: ${{ matrix.chip }}
call-hardware-tests:
name: Hardware
uses: ./.github/workflows/tests_hw.yml
needs: [gen-matrix, call-build-tests]
if: |
github.repository == 'espressif/arduino-esp32' &&
(github.event_name != 'pull_request' ||
contains(github.event.pull_request.labels.*.name, 'hil_test'))
strategy:
fail-fast: false
matrix:
type: ${{ fromJson(needs.gen-matrix.outputs.hw-types) }}
chip: ${{ fromJson(needs.gen-matrix.outputs.targets) }}
with:
type: ${{ matrix.type }}
chip: ${{ matrix.chip }}
# This job is disabled for now
call-qemu-tests:
name: QEMU
uses: ./.github/workflows/tests_qemu.yml
needs: [gen-matrix, call-build-tests]
if: false
strategy:
fail-fast: false
matrix:
type: ${{ fromJson(needs.gen-matrix.outputs.qemu-types) }}
chip: ["esp32", "esp32c3"]
with:
type: ${{ matrix.type }}
chip: ${{ matrix.chip }}
# Wokwi tests are run after this workflow as it needs access to secrets

View file

@ -1,90 +0,0 @@
name: Build tests
on:
workflow_call:
inputs:
type:
type: string
description: "Type of tests to build"
required: true
chip:
type: string
description: "Chip to build tests for"
required: true
jobs:
build-tests:
name: Build ${{ inputs.type }} tests for ${{ inputs.chip }}
runs-on: ubuntu-latest
env:
id: ${{ github.event.pull_request.number || github.ref }}-${{ github.event.pull_request.head.sha || github.sha }}-${{ inputs.chip }}-${{ inputs.type }}
steps:
- name: Check if already built
id: cache-build-binaries
if: github.event.pull_request.number != null
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: tests-${{ env.id }}-bin
path: |
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.bin
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.elf
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.json
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/sdkconfig
- name: Evaluate if tests should be built
id: check-build
run: |
cache_exists=${{ steps.cache-build-binaries.outputs.cache-hit == 'true' }}
enabled=true
if [[ $cache_exists == 'true' ]]; then
echo "Already built, skipping"
enabled=false
fi
echo "enabled=$enabled" >> $GITHUB_OUTPUT
- name: Checkout user repository
if: ${{ steps.check-build.outputs.enabled == 'true' }}
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Get libs cache
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: ${{ steps.check-build.outputs.enabled == 'true' }}
with:
key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }}
path: |
./tools/dist
./tools/esp32-arduino-libs
./tools/esptool
./tools/mk*
./tools/openocd-esp32
./tools/riscv32-*
./tools/xtensa-*
- name: Build sketches
if: ${{ steps.check-build.outputs.enabled == 'true' }}
run: |
bash .github/scripts/tests_build.sh -c -type ${{ inputs.type }} -t ${{ inputs.chip }}
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} binaries as cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: steps.check-build.outputs.enabled == 'true' && github.event.pull_request.number != null
with:
key: tests-${{ env.id }}-bin
path: |
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.bin
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.elf
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.json
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/sdkconfig
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} binaries as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: tests-bin-${{ inputs.chip }}-${{ inputs.type }}
overwrite: true
path: |
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.bin
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.elf
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.json
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/sdkconfig

View file

@ -1,118 +0,0 @@
name: Hardware tests
on:
workflow_call:
inputs:
type:
type: string
description: "Type of tests to run"
required: true
chip:
type: string
description: "Chip to run tests for"
required: true
env:
DEBIAN_FRONTEND: noninteractive
defaults:
run:
shell: bash
jobs:
hardware-test:
name: Hardware ${{ inputs.chip }} ${{ inputs.type }} tests
runs-on: ["arduino", "${{ inputs.chip }}"]
env:
id: ${{ github.event.pull_request.number || github.ref }}-${{ github.event.pull_request.head.sha || github.sha }}-${{ inputs.chip }}-${{ inputs.type }}
container:
image: python:3.10.1-bullseye
options: --privileged --device-cgroup-rule="c 188:* rmw" --device-cgroup-rule="c 166:* rmw"
steps:
- name: Clean workspace
run: |
rm -rf ./*
rm -rf ~/.arduino/tests
- name: Check if already passed
id: cache-results
if: github.event.pull_request.number != null
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: tests-${{ env.id }}-results-hw
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Evaluate if tests should be run
id: check-tests
run: |
cache_exists=${{ steps.cache-results.outputs.cache-hit == 'true' }}
enabled=true
if [[ $cache_exists == 'true' ]]; then
echo "Already ran, skipping"
enabled=false
fi
echo "enabled=$enabled" >> $GITHUB_OUTPUT
- name: Checkout user repository
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
sparse-checkout: |
*
# setup-python currently only works on ubuntu images
# - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
# if: ${{ steps.check-tests.outputs.enabled == 'true' }}
# with:
# cache-dependency-path: tests/requirements.txt
# cache: 'pip'
# python-version: '3.10.1'
- name: Install dependencies
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
pip install -U pip
pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi
apt update
apt install -y jq
- name: Get binaries
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
name: tests-bin-${{ inputs.chip }}-${{ inputs.type }}
path: |
~/.arduino/tests/${{ inputs.chip }}
- name: List binaries
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
ls -laR ~/.arduino/tests
- name: Run Tests
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
bash .github/scripts/tests_run.sh -c -type ${{ inputs.type }} -t ${{ inputs.chip }} -i 0 -m 1 -e
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} hardware results as cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: steps.check-tests.outputs.enabled == 'true' && github.event.pull_request.number != null
with:
key: tests-${{ env.id }}-results-hw
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} hardware results as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: tests-results-hw-${{ inputs.chip }}-${{ inputs.type }}
overwrite: true
path: |
tests/**/*.xml
tests/**/result_*.json

View file

@ -1,143 +0,0 @@
name: QEMU tests
on:
workflow_call:
inputs:
chip:
required: true
type: string
type:
required: true
type: string
jobs:
qemu-test:
name: QEMU ${{ inputs.chip }} ${{ inputs.type }} tests
env:
id: ${{ github.event.pull_request.number || github.ref }}-${{ github.event.pull_request.head.sha || github.sha }}-${{ inputs.chip }}-${{ inputs.type }}
QEMU_INSTALL_PATH: "$HOME"
runs-on: ubuntu-latest
steps:
- name: Check if already passed
id: get-cache-results
if: github.event.pull_request.number != null
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: tests-${{ env.id }}-results-qemu
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Evaluate if tests should be run
id: check-tests
run: |
cache_exists=${{ steps.get-cache-results.outputs.cache-hit == 'true' }}
enabled=true
if [[ $cache_exists == 'true' ]]; then
echo "Already ran, skipping"
enabled=false
fi
echo "enabled=$enabled" >> $GITHUB_OUTPUT
- name: Checkout user repository
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
sparse-checkout-cone-mode: false
sparse-checkout: |
/*
!.github
# To avoid giving unknown scripts elevated permissions, download them from the master branch
- name: Get CI scripts from master
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
mkdir -p .github
cd .github
curl https://codeload.github.com/${{ github.repository }}/tar.gz/master | tar -xz --strip=2 arduino-esp32-master/.github
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
with:
cache-dependency-path: tests/requirements.txt
cache: "pip"
python-version: "3.x"
- name: Install Python dependencies
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
pip install -U pip
pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi
- name: Install APT dependencies
uses: awalsh128/cache-apt-pkgs-action@5902b33ae29014e6ca012c5d8025d4346556bd40 # v1.4.3
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
with:
packages: libpixman-1-0 libnuma1 libglib2.0-0 libslirp0 libsdl2-2.0-0
version: 1.0
- name: Get QEMU version
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
id: get-qemu-version
with:
token: ${{secrets.GITHUB_TOKEN}}
owner: espressif
repo: qemu
excludes: prerelease, draft
- name: Cache QEMU
id: cache-qemu
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
with:
path: |
~/qemu
key: qemu-${{ steps.get-qemu-version.outputs.release }}-${{ hashFiles('.github/workflows/tests_qemu.yml') }}
- name: Download QEMU
if: ${{ steps.cache-qemu.outputs.cache-hit != 'true' && steps.check-tests.outputs.enabled == 'true' }}
run: |
cd ${{ env.QEMU_INSTALL_PATH }}
underscore_release=$(echo ${{ steps.get-qemu-version.outputs.release }} | sed 's/\-/_/g')
curl -L https://github.com/espressif/qemu/releases/download/${{ steps.get-qemu-version.outputs.release }}/qemu-riscv32-softmmu-${underscore_release}-x86_64-linux-gnu.tar.xz > qemu-riscv32.tar.xz
curl -L https://github.com/espressif/qemu/releases/download/${{ steps.get-qemu-version.outputs.release }}/qemu-xtensa-softmmu-${underscore_release}-x86_64-linux-gnu.tar.xz > qemu-xtensa.tar.xz
tar -xf qemu-riscv32.tar.xz
tar -xf qemu-xtensa.tar.xz
rm qemu-*
echo "QEMU_PATH=${{ env.QEMU_INSTALL_PATH }}/qemu" >> $GITHUB_ENV
- name: Get binaries
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
name: tests-bin-${{ inputs.chip }}-${{ inputs.type }}
path: |
~/.arduino/tests/${{ inputs.chip }}
- name: Run Tests
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: QEMU_PATH="${{ env.QEMU_INSTALL_PATH }}" bash .github/scripts/tests_run.sh -c -type ${{inputs.type}} -t ${{inputs.chip}} -i 0 -m 1 -Q
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} QEMU results as cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: steps.check-tests.outputs.enabled == 'true' && github.event.pull_request.number != null
with:
key: tests-${{ env.id }}-results-qemu
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} QEMU results as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: tests-results-qemu-${{ inputs.chip }}-${{ inputs.type }}
overwrite: true
path: |
tests/**/*.xml
tests/**/result_*.json

View file

@ -1,195 +0,0 @@
name: Publish and clean test results
on:
workflow_run:
workflows: ["Wokwi tests"]
types:
- completed
# No permissions by default
permissions: { contents: read }
jobs:
unit-test-results:
name: Unit Test Results
if: |
github.event.workflow_run.conclusion == 'success' ||
github.event.workflow_run.conclusion == 'failure' ||
github.event.workflow_run.conclusion == 'timed_out'
runs-on: ubuntu-latest
permissions:
actions: write
statuses: write
checks: write
pull-requests: write
contents: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: gh-pages
- name: Download and Extract Artifacts
uses: dawidd6/action-download-artifact@07ab29fd4a977ae4d2b275087cf67563dfdf0295 # v9
with:
run_id: ${{ github.event.workflow_run.id }}
path: ./artifacts
- name: Get original info
run: |
original_event=$(cat ./artifacts/parent-artifacts/event.txt)
original_action=$(cat ./artifacts/parent-artifacts/action.txt)
original_sha=$(cat ./artifacts/parent-artifacts/sha.txt)
original_ref=$(cat ./artifacts/parent-artifacts/ref.txt)
original_conclusion=$(cat ./artifacts/parent-artifacts/conclusion.txt)
original_run_id=$(cat ./artifacts/parent-artifacts/run_id.txt)
# Sanitize the values to avoid security issues
# Event: Allow alphabetical characters and underscores
original_event=$(echo "$original_event" | tr -cd '[:alpha:]_')
# Action: Allow alphabetical characters and underscores
original_action=$(echo "$original_action" | tr -cd '[:alpha:]_')
# SHA: Allow alphanumeric characters
original_sha=$(echo "$original_sha" | tr -cd '[:alnum:]')
# Ref: Allow alphanumeric characters, slashes, underscores, dots, and dashes
original_ref=$(echo "$original_ref" | tr -cd '[:alnum:]/_.-')
# Conclusion: Allow alphabetical characters and underscores
original_conclusion=$(echo "$original_conclusion" | tr -cd '[:alpha:]_')
# Run ID: Allow numeric characters
original_run_id=$(echo "$original_run_id" | tr -cd '[:digit:]')
echo "original_event=$original_event" >> $GITHUB_ENV
echo "original_action=$original_action" >> $GITHUB_ENV
echo "original_sha=$original_sha" >> $GITHUB_ENV
echo "original_ref=$original_ref" >> $GITHUB_ENV
echo "original_conclusion=$original_conclusion" >> $GITHUB_ENV
echo "original_run_id=$original_run_id" >> $GITHUB_ENV
echo "original_event = $original_event"
echo "original_action = $original_action"
echo "original_sha = $original_sha"
echo "original_ref = $original_ref"
echo "original_conclusion = $original_conclusion"
echo "original_run_id = $original_run_id"
- name: Print links to other runs
run: |
echo "Build, Hardware and QEMU tests: https://github.com/${{ github.repository }}/actions/runs/${{ env.original_run_id }}"
echo "Wokwi tests: https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}"
- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@170bf24d20d201b842d7a52403b73ed297e6645b # v2.18.0
with:
commit: ${{ env.original_sha }}
event_file: ./artifacts/parent-artifacts/event_file/event.json
event_name: ${{ env.original_event }}
files: ./artifacts/**/*.xml
action_fail: true
compare_to_earlier_commit: false
json_file: ./unity_results.json
json_suite_details: true
- name: Upload JSON
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: ${{ always() }}
with:
name: unity_results
overwrite: true
path: |
./unity_results.json
- name: Fail if tests failed
if: ${{ env.original_conclusion == 'failure' || env.original_conclusion == 'timed_out' || github.event.workflow_run.conclusion == 'failure' || github.event.workflow_run.conclusion == 'timed_out' }}
run: exit 1
- name: Clean up caches
if: always()
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const ref = process.env.original_ref;
const key_prefix = 'tests-' + ref + '-';
if (process.env.original_event == 'pull_request' && process.env.original_action != 'closed') {
console.log('Skipping cache cleanup for open PR');
return;
}
await github.paginate(github.rest.actions.getActionsCacheList, {
owner: context.repo.owner,
repo: context.repo.repo,
per_page: 100,
key: key_prefix
}).then(caches => {
if (caches) {
for (const cache of caches) {
console.log(`Deleting cache: ${cache.key}`);
github.rest.actions.deleteActionsCacheById({
owner: context.repo.owner,
repo: context.repo.repo,
cache_id: cache.id
});
}
}
});
- name: Report conclusion
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
if: always()
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = process.env.original_sha;
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: `Runtime Tests / Report results (${process.env.original_event} -> workflow_run -> workflow_run)`,
owner: owner,
repo: repo,
sha: sha,
state: '${{ job.status }}',
description: '${{ job.status }}' == 'success' ? 'Runtime tests successful' : 'Runtime tests failed',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);
- name: Generate report
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
env:
REPORT_FILE: ./runtime-tests-results/RUNTIME_TESTS_REPORT.md
WOKWI_RUN_ID: ${{ github.event.workflow_run.id }}
BUILD_RUN_ID: ${{ env.original_run_id }}
IS_FAILING: ${{ env.original_conclusion == 'failure' || env.original_conclusion == 'timed_out' || github.event.workflow_run.conclusion == 'failure' || github.event.workflow_run.conclusion == 'timed_out' || job.status == 'failure' }}
run: |
rm -rf artifacts $REPORT_FILE
mv -f ./unity_results.json ./runtime-tests-results/unity_results.json
touch $REPORT_FILE
python3 ./runtime-tests-results/table_generator.py ./runtime-tests-results/unity_results.json >> $REPORT_FILE
- name: Generate badge
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
uses: jaywcjlove/generated-badges@0e078ae4d4bab3777ea4f137de496ab44688f5ad # v1.0.13
with:
label: Runtime Tests
status: ${{ job.status == 'success' && 'passing' || 'failing' }}
output: runtime-tests-results/badge.svg
color: ${{ job.status == 'success' && 'green' || 'red' }}
style: flat
- name: Push badge
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
if [[ `git status --porcelain` ]]; then
git add --all
git commit -m "Updated runtime tests report"
git push origin HEAD:gh-pages
fi

View file

@ -1,326 +0,0 @@
name: Wokwi tests
on:
workflow_run:
workflows: ["Runtime Tests"]
types:
- completed
# No permissions by default
permissions: { contents: read }
env:
WOKWI_TIMEOUT: 600000 # Milliseconds
jobs:
get-artifacts:
name: Get required artifacts
runs-on: ubuntu-latest
permissions:
actions: read
statuses: write
outputs:
pr_num: ${{ steps.set-ref.outputs.pr_num }}
ref: ${{ steps.set-ref.outputs.ref }}
base: ${{ steps.set-ref.outputs.base }}
targets: ${{ steps.set-ref.outputs.targets }}
types: ${{ steps.set-ref.outputs.types }}
steps:
- name: Report pending
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Runtime Tests / Wokwi (Get artifacts) (${{ github.event.workflow_run.event }} -> workflow_run)',
owner: owner,
repo: repo,
sha: sha,
state: 'pending',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);
- name: Download and extract event file
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
name: event_file
path: artifacts/event_file
- name: Download and extract matrix info
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
name: matrix_info
path: artifacts/matrix_info
- name: Try to read PR number
id: set-ref
run: |
pr_num=$(jq -r '.pull_request.number' artifacts/event_file/event.json | tr -cd "[:digit:]")
if [ -z "$pr_num" ] || [ "$pr_num" == "null" ]; then
pr_num=""
fi
ref=$pr_num
if [ -z "$ref" ] || [ "$ref" == "null" ]; then
ref=${{ github.ref }}
fi
action=$(jq -r '.action' artifacts/event_file/event.json | tr -cd "[:alpha:]_")
if [ "$action" == "null" ]; then
action=""
fi
base=$(jq -r '.pull_request.base.ref' artifacts/event_file/event.json | tr -cd "[:alnum:]/_.-")
if [ -z "$base" ] || [ "$base" == "null" ]; then
base=${{ github.ref }}
fi
types=$(cat artifacts/matrix_info/wokwi_types.txt | tr -cd "[:alpha:],[]'")
targets=$(cat artifacts/matrix_info/targets.txt | tr -cd "[:alnum:],[]'")
echo "base = $base"
echo "targets = $targets"
echo "types = $types"
echo "pr_num = $pr_num"
printf "$ref" >> artifacts/ref.txt
printf "Ref = "
cat artifacts/ref.txt
printf "${{ github.event.workflow_run.event }}" >> artifacts/event.txt
printf "\nEvent name = "
cat artifacts/event.txt
printf "${{ github.event.workflow_run.head_sha || github.sha }}" >> artifacts/sha.txt
printf "\nHead SHA = "
cat artifacts/sha.txt
printf "$action" >> artifacts/action.txt
printf "\nAction = "
cat artifacts/action.txt
printf "${{ github.event.workflow_run.id }}" >> artifacts/run_id.txt
printf "\nRun ID = "
cat artifacts/run_id.txt
if [ -z "$ref" ] || [ "$ref" == "null" ]; then
echo "Failed to get PR number or ref"
exit 1
fi
conclusion="${{ github.event.workflow_run.conclusion }}"
printf "$conclusion" >> artifacts/conclusion.txt
printf "\nConclusion = "
cat artifacts/conclusion.txt
echo "pr_num=$pr_num" >> $GITHUB_OUTPUT
echo "base=$base" >> $GITHUB_OUTPUT
echo "targets=$targets" >> $GITHUB_OUTPUT
echo "types=$types" >> $GITHUB_OUTPUT
echo "ref=$ref" >> $GITHUB_OUTPUT
- name: Download and extract parent hardware results
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
continue-on-error: true
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
pattern: tests-results-hw-*
merge-multiple: true
path: artifacts/results/hw
- name: Download and extract parent QEMU results
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
continue-on-error: true
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
pattern: tests-results-qemu-*
merge-multiple: true
path: artifacts/results/qemu
- name: Upload parent artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: parent-artifacts
path: artifacts
if-no-files-found: error
- name: Report conclusion
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
if: always()
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Runtime Tests / Wokwi (Get artifacts) (${{ github.event.workflow_run.event }} -> workflow_run)',
owner: owner,
repo: repo,
sha: sha,
state: '${{ job.status }}',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);
wokwi-test:
name: Wokwi ${{ matrix.chip }} ${{ matrix.type }} tests
if: |
github.event.workflow_run.conclusion == 'success' ||
github.event.workflow_run.conclusion == 'failure' ||
github.event.workflow_run.conclusion == 'timed_out'
runs-on: ubuntu-latest
needs: get-artifacts
env:
id: ${{ needs.get-artifacts.outputs.ref }}-${{ github.event.workflow_run.head_sha || github.sha }}-${{ matrix.chip }}-${{ matrix.type }}
permissions:
actions: read
statuses: write
strategy:
fail-fast: false
matrix:
type: ${{ fromJson(needs.get-artifacts.outputs.types) }}
chip: ${{ fromJson(needs.get-artifacts.outputs.targets) }}
steps:
- name: Report pending
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Runtime Tests / Wokwi (${{ matrix.type }}, ${{ matrix.chip }}) / Wokwi ${{ matrix.chip }} ${{ matrix.type }} tests (${{ github.event.workflow_run.event }} -> workflow_run)',
owner: owner,
repo: repo,
sha: sha,
state: 'pending',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);
- name: Check if already passed
id: get-cache-results
if: needs.get-artifacts.outputs.pr_num
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: tests-${{ env.id }}-results-wokwi
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Evaluate if tests should be run
id: check-tests
run: |
cache_exists=${{ steps.get-cache-results.outputs.cache-hit == 'true' }}
enabled=true
if [[ $cache_exists == 'true' ]]; then
echo "Already ran, skipping"
enabled=false
fi
echo "enabled=$enabled" >> $GITHUB_OUTPUT
# Note that changes to the workflows and tests will only be picked up after the PR is merged
# DO NOT CHECKOUT THE USER'S REPOSITORY IN THIS WORKFLOW. IT HAS HIGH SECURITY RISKS.
- name: Checkout repository
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ needs.get-artifacts.outputs.base || github.ref }}
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
with:
cache-dependency-path: tests/requirements.txt
cache: "pip"
python-version: "3.x"
- name: Install dependencies
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
pip install -U pip
pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi
- name: Install Wokwi CLI
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: curl -L https://wokwi.com/ci/install.sh | sh
- name: Wokwi CI Server
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: wokwi/wokwi-ci-server-action@a6fabb5a49e080158c7a1d121ea5b789536a82c3 # v1
- name: Get binaries
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
name: tests-bin-${{ matrix.chip }}-${{ matrix.type }}
path: |
~/.arduino/tests/${{ matrix.chip }}
- name: Run Tests
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
env:
WOKWI_CLI_TOKEN: ${{ secrets.WOKWI_CLI_TOKEN }}
run: |
bash .github/scripts/tests_run.sh -c -type ${{ matrix.type }} -t ${{ matrix.chip }} -i 0 -m 1 -W ${{ env.WOKWI_TIMEOUT }}
- name: Upload ${{ matrix.chip }} ${{ matrix.type }} Wokwi results as cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: steps.check-tests.outputs.enabled == 'true' && needs.get-artifacts.outputs.pr_num
with:
key: tests-${{ env.id }}-results-wokwi
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Upload ${{ matrix.chip }} ${{ matrix.type }} Wokwi results as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: tests-results-wokwi-${{ matrix.chip }}-${{ matrix.type }}
overwrite: true
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Report conclusion
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
if: always()
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Runtime Tests / Wokwi (${{ matrix.type }}, ${{ matrix.chip }}) / Wokwi ${{ matrix.chip }} ${{ matrix.type }} tests (${{ github.event.workflow_run.event }} -> workflow_run)',
owner: owner,
repo: repo,
sha: sha,
state: '${{ job.status }}',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);

View file

@ -1,59 +1,20 @@
name: Push components to https://components.espressif.com name: Push components to https://components.espressif.com
on: on:
workflow_dispatch: push:
inputs: tags:
tag: - '*'
description: 'Version to push to the component registry'
required: true
git_ref:
description: 'Git ref with the source to push to the component registry'
required: true
workflow_run:
workflows: ["ESP32 Arduino Release"]
types:
- completed
permissions:
contents: read
jobs: jobs:
upload_components: upload_components:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Get the release tag - uses: actions/checkout@v4
env:
head_branch: ${{ inputs.tag || github.event.workflow_run.head_branch }}
run: |
if [ "${{ github.event.workflow_run.conclusion }}" != "success" ] && [ "${{ github.event_name }}" == "workflow_run" ]; then
echo "Release workflow failed. Exiting..."
exit 1
fi
# Read and sanitize the branch/tag name
branch=$(echo "$head_branch" | tr -cd '[:alnum:]/_.-')
if [[ $branch == refs/tags/* ]]; then
tag="${branch#refs/tags/}"
elif [[ $branch =~ ^[0-9]+\.[0-9]+\.[0-9]+.*$ ]]; then
tag=$branch
else
echo "Tag not found in $branch. Exiting..."
exit 1
fi
echo "Tag: $tag"
echo "RELEASE_TAG=$tag" >> $GITHUB_ENV
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with: with:
ref: ${{ inputs.git_ref || env.RELEASE_TAG }}
submodules: "recursive" submodules: "recursive"
- name: Upload components to the component registry - name: Upload components to the component registry
uses: espressif/upload-components-ci-action@b78a19fa5424714997596d3ecffa634aef8ae20b # v1.0.5 uses: espressif/upload-components-ci-action@v1
with: with:
name: arduino-esp32 name: arduino-esp32
version: ${{ env.RELEASE_TAG }} version: ${{ github.ref_name }}
namespace: espressif namespace: espressif
api_token: ${{ secrets.IDF_COMPONENT_API_TOKEN }} api_token: ${{ secrets.IDF_COMPONENT_API_TOKEN }}

11
.gitignore vendored
View file

@ -1,5 +1,4 @@
tools/esp32-arduino-libs tools/esp32-arduino-libs
tools/xtensa-esp-elf
tools/xtensa-esp32-elf tools/xtensa-esp32-elf
tools/xtensa-esp32s2-elf tools/xtensa-esp32s2-elf
tools/xtensa-esp32s3-elf tools/xtensa-esp32s3-elf
@ -45,13 +44,3 @@ debug.cfg
debug.svd debug.svd
debug_custom.json debug_custom.json
libraries/Insights/examples/*/*.ino.zip libraries/Insights/examples/*/*.ino.zip
# Vale Style
.vale/styles/*
!.vale/styles/Vocab/
.vale/styles/Vocab/*
!.vale/styles/Vocab/Espressif/
# Ignore Lib Builder Docker run scripts
/run.sh
/run.ps1

View file

@ -1,25 +0,0 @@
workflow:
rules:
# Disable those non-protected push triggered pipelines
- if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^\d+\.\d+(\.\d+)?($|-)/ && $CI_PIPELINE_SOURCE == "push"'
when: never
# when running merged result pipelines, CI_COMMIT_SHA represents the temp commit it created.
# Please use PIPELINE_COMMIT_SHA at all places that require a commit sha of the original commit.
- if: $CI_OPEN_MERGE_REQUESTS != null
variables:
PIPELINE_COMMIT_SHA: $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
IS_MR_PIPELINE: 1
- if: $CI_OPEN_MERGE_REQUESTS == null
variables:
PIPELINE_COMMIT_SHA: $CI_COMMIT_SHA
IS_MR_PIPELINE: 0
- if: '$CI_PIPELINE_SOURCE == "schedule"'
variables:
IS_SCHEDULED_RUN: "true"
- when: always
# Place the default settings in `.gitlab/workflows/common.yml` instead
include:
- ".gitlab/workflows/common.yml"
- ".gitlab/workflows/sample.yml"

View file

@ -1,26 +0,0 @@
#####################
# Default Variables #
#####################
stages:
- pre_check
- build
- test
- result
variables:
ESP_IDF_VERSION: "5.5"
ESP_ARDUINO_VERSION: "3.3.0"
#############
# `default` #
#############
default:
retry:
max: 2
when:
# In case of a runner failure we could hop to another one, or a network error could go away.
- runner_system_failure
# Job execution timeout may be caused by a network issue.
- job_execution_timeout

View file

@ -1,6 +0,0 @@
hello-world:
stage: test
script:
- echo "Hello, World from GitLab CI!"
rules:
- if: $CI_PIPELINE_SOURCE == "push"

View file

@ -1,110 +0,0 @@
exclude: |
(?x)(
^\.github\/|
^tests\/performance\/coremark\/.*\.[ch]$|
^tests\/performance\/superpi\/.*\.(cpp|h)$|
LICENSE\.md$
)
default_language_version:
# force all unspecified python hooks to run python3
python: python3
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: "cef0300fd0fc4d2a87a85fa2093c6b283ea36f4b" # v5.0.0
hooks:
# Generic checks
- id: check-case-conflict
- id: check-symlinks
- id: debug-statements
- id: destroyed-symlinks
- id: detect-private-key
- id: end-of-file-fixer
exclude: ^.*\.(bin|BIN)$
- id: mixed-line-ending
args: [--fix=lf]
- id: trailing-whitespace
args: [--markdown-linebreak-ext=md]
# JSON formatting
- id: pretty-format-json
stages: [manual]
args: [--autofix]
types_or: [json]
exclude: |
(?x)(
diagram\..*\.json$|
package\.json$|
^package\/.*$
)
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: "f6446549e5e97ec9665b9b03e75b87b445857f9a" # v18.1.3
hooks:
# C/C++ formatting
- id: clang-format
types_or: [c, c++]
exclude: ^.*\/build_opt\.h$
- repo: https://github.com/psf/black-pre-commit-mirror
rev: "a4920527036bb9a3f3e6055d595849d67d0da066" # 25.1.0
hooks:
# Python formatting
- id: black
types_or: [python]
args: [--line-length=120] #From the arduino code style. Add as argument rather than creating a new config file.
- repo: https://github.com/PyCQA/flake8
rev: "16f5f28a384f0781bebb37a08aa45e65b9526c50" # 7.2.0
hooks:
# Python linting
- id: flake8
types_or: [python]
additional_dependencies:
- flake8-bugbear
- flake8-comprehensions
- flake8-simplify
- repo: https://github.com/pre-commit/mirrors-prettier
rev: "ffb6a759a979008c0e6dff86e39f4745a2d9eac4" # v3.1.0
hooks:
# YAML formatting
- id: prettier
types_or: [yaml]
- repo: https://github.com/codespell-project/codespell
rev: "63c8f8312b7559622c0d82815639671ae42132ac" # v2.4.1
hooks:
# Spell checking
- id: codespell
exclude: ^.*\.(svd|SVD)$
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: "a23f6b85d0fdd5bb9d564e2579e678033debbdff" # v0.10.0.1
hooks:
# Bash linting
- id: shellcheck
types: [shell]
- repo: https://github.com/openstack/bashate
rev: "fbd7c2534c2701351c603ff700ddf08202430a31" # 2.1.1
hooks:
# Bash formatting
- id: bashate
types: [shell]
args: ["-i", "E006"] # Ignore E006: Line too long
- repo: https://github.com/errata-ai/vale
rev: "dc4c47923788a413fb5677de6e3370d514aecb78" # v3.11.2
hooks:
# Sync vale styles and lint markdown and reStructuredText
- id: vale
name: vale-sync
language_version: "1.23.2"
pass_filenames: false
args: [sync]
types_or: [markdown, rst]
- id: vale
language_version: "1.23.2"
types_or: [markdown, rst]

View file

@ -1,4 +0,0 @@
__pycache__/
.clang-format
.licenses/
/.git/

View file

@ -1,11 +0,0 @@
# Shellcheck configuration file for ESP32 Arduino core
# Optional checks. https://github.com/koalaman/shellcheck/wiki/optional
enable=add-default-case,deprecate-which,avoid-nullary-conditions
# Enable search for external sources
external-sources=true
# Search folder for sourced files.
# Set to the folder where the original script is located.
source-path=SCRIPTDIR

118
.vale.ini
View file

@ -1,118 +0,0 @@
###################
### Vale Config ###
###################
# This is a Vale linter configuration file.
# - Repo: arduino-esp32
# - Based on Default config: v0-1-1
# It lists all necessary parameters to configure Vale for your project.
# For official documentation on all config settings, see
# https://vale.sh/docs/topics/config
##############
### Global ###
##############
# This section lists core settings applying to Vale itself.
# Specify path to external resources (e.g., styles and vocab files).
# The path value may be absolute or relative to this configuration file.
StylesPath = .vale/styles
# Specify the minimum alert severity that Vale will report.
MinAlertLevel = error # "suggestion", "warning", or "error"
# Specify vocabulary for special treatment.
# Create a folder in <StylesPath>/Vocab/<name>/and add its name here
# The folder should contain two files:
# - accept.txt -- lists words with accepted case-sensitive spelling
# - reject.txt -- lists words whose occurrences throw an error
# Vocab = Espressif
# Specify the packages to import into your project.
# A package is a zip file containing a number of rules (style) written in YAML.
# For a list of official packages, see Package Hub at https://vale.sh/hub/
# For official documentation on packages, see
# https://vale.sh/docs/topics/packages/
# Before linting, navigate to your project and run `vale sync` to download
# the official packages specified below.
# Packages = Package1, Package2, \
# https://example.com/path/to/package/Package.zip
Packages = Google, Microsoft, RedHat, \
https://dl.espressif.com/dl/esp-vale-config/Espressif-latest.zip
###############
### Formats ###
###############
# This section enables association of "unknown" formats with the ones
# supported by Vale. For official documentation on supported formats, see
# https://vale.sh/docs/topics/scoping/
[formats]
# For example, treat MDX files as Markdown files.
# mdx = md
################################
### Format-specific settings ###
################################
# This section lists the settings that apply to specific file formats
# based on their glob pattern.
# Settings provided under a more specific glob pattern,
# such as [*.{md,txt}] will override those in [*].
[*.{md,rst}]
# Enable styles to activate all rules included in them.
# BasedOnStyles = Style1, Style2
BasedOnStyles = Vale, Espressif-latest
### Deactivate individual rules ###
### in enabled styles.
# Style1.Rule1 = NO
Vale.Repetition = NO
Vale.Spelling = NO
Espressif-latest.Admonitions = NO
Espressif-latest.Contractions = NO
Espressif-latest.Monospace = NO
### Change default severity level ###
### of an activated rule.
# Choose between "suggestion", "warning", or "error".
# Style1.Rule2 = error
### Activate individual rules ###
### in non-enabled styles stored in <StylesPath>.
# Style1.Rule = YES
Google.Gender = YES
Google.GenderBias = YES
Google.Slang = YES
Google.Spacing = YES
Microsoft.DateNumbers = YES
Microsoft.Ellipses = YES
Microsoft.FirstPerson = YES
Microsoft.Hyphens = YES
Microsoft.Ordinal = YES
Microsoft.OxfordComma = YES
Microsoft.Percentages = YES
Microsoft.RangeTime = YES
Microsoft.Semicolon = YES
Microsoft.SentenceLength = YES
Microsoft.Suspended = YES
Microsoft.Units = YES
Microsoft.URLFormat = YES
Microsoft.We = YES
Microsoft.Wordiness = YES
RedHat.Contractions = YES
RedHat.RepeatedWords = YES

View file

@ -5,8 +5,8 @@
# export ARDUINO_SKIP_IDF_VERSION_CHECK=1 # export ARDUINO_SKIP_IDF_VERSION_CHECK=1
# idf.py build # idf.py build
set(min_supported_idf_version "5.3.0") set(min_supported_idf_version "5.1.0")
set(max_supported_idf_version "5.5.99") set(max_supported_idf_version "5.1.99")
set(idf_version "${IDF_VERSION_MAJOR}.${IDF_VERSION_MINOR}.${IDF_VERSION_PATCH}") set(idf_version "${IDF_VERSION_MAJOR}.${IDF_VERSION_MINOR}.${IDF_VERSION_PATCH}")
if ("${idf_version}" AND NOT "$ENV{ARDUINO_SKIP_IDF_VERSION_CHECK}") if ("${idf_version}" AND NOT "$ENV{ARDUINO_SKIP_IDF_VERSION_CHECK}")
@ -25,7 +25,6 @@ endif()
set(CORE_SRCS set(CORE_SRCS
cores/esp32/base64.cpp cores/esp32/base64.cpp
cores/esp32/cbuf.cpp cores/esp32/cbuf.cpp
cores/esp32/ColorFormat.c
cores/esp32/chip-debug-report.cpp cores/esp32/chip-debug-report.cpp
cores/esp32/esp32-hal-adc.c cores/esp32/esp32-hal-adc.c
cores/esp32/esp32-hal-bt.c cores/esp32/esp32-hal-bt.c
@ -33,7 +32,6 @@ set(CORE_SRCS
cores/esp32/esp32-hal-dac.c cores/esp32/esp32-hal-dac.c
cores/esp32/esp32-hal-gpio.c cores/esp32/esp32-hal-gpio.c
cores/esp32/esp32-hal-i2c.c cores/esp32/esp32-hal-i2c.c
cores/esp32/esp32-hal-i2c-ng.c
cores/esp32/esp32-hal-i2c-slave.c cores/esp32/esp32-hal-i2c-slave.c
cores/esp32/esp32-hal-ledc.c cores/esp32/esp32-hal-ledc.c
cores/esp32/esp32-hal-matrix.c cores/esp32/esp32-hal-matrix.c
@ -47,18 +45,15 @@ set(CORE_SRCS
cores/esp32/esp32-hal-timer.c cores/esp32/esp32-hal-timer.c
cores/esp32/esp32-hal-tinyusb.c cores/esp32/esp32-hal-tinyusb.c
cores/esp32/esp32-hal-touch.c cores/esp32/esp32-hal-touch.c
cores/esp32/esp32-hal-touch-ng.c
cores/esp32/esp32-hal-uart.c cores/esp32/esp32-hal-uart.c
cores/esp32/esp32-hal-rmt.c cores/esp32/esp32-hal-rmt.c
cores/esp32/Esp.cpp cores/esp32/Esp.cpp
cores/esp32/freertos_stats.cpp
cores/esp32/FunctionalInterrupt.cpp cores/esp32/FunctionalInterrupt.cpp
cores/esp32/HardwareSerial.cpp cores/esp32/HardwareSerial.cpp
cores/esp32/HEXBuilder.cpp cores/esp32/HEXBuilder.cpp
cores/esp32/IPAddress.cpp cores/esp32/IPAddress.cpp
cores/esp32/libb64/cdecode.c cores/esp32/libb64/cdecode.c
cores/esp32/libb64/cencode.c cores/esp32/libb64/cencode.c
cores/esp32/MacAddress.cpp
cores/esp32/main.cpp cores/esp32/main.cpp
cores/esp32/MD5Builder.cpp cores/esp32/MD5Builder.cpp
cores/esp32/Print.cpp cores/esp32/Print.cpp
@ -87,7 +82,6 @@ set(ARDUINO_ALL_LIBRARIES
DNSServer DNSServer
EEPROM EEPROM
ESP_I2S ESP_I2S
ESP_NOW
ESP_SR ESP_SR
ESPmDNS ESPmDNS
Ethernet Ethernet
@ -97,11 +91,7 @@ set(ARDUINO_ALL_LIBRARIES
HTTPUpdate HTTPUpdate
Insights Insights
LittleFS LittleFS
Matter
NetBIOS NetBIOS
Network
OpenThread
PPP
Preferences Preferences
RainMaker RainMaker
SD_MMC SD_MMC
@ -113,14 +103,14 @@ set(ARDUINO_ALL_LIBRARIES
Update Update
USB USB
WebServer WebServer
NetworkClientSecure WiFiClientSecure
WiFi WiFi
WiFiProv WiFiProv
Wire Wire
Zigbee
) )
set(ARDUINO_LIBRARY_ArduinoOTA_SRCS libraries/ArduinoOTA/src/ArduinoOTA.cpp) set(ARDUINO_LIBRARY_ArduinoOTA_SRCS libraries/ArduinoOTA/src/ArduinoOTA.cpp)
set(ARDUINO_LIBRARY_ArduinoOTA_REQUIRES esp_https_ota)
set(ARDUINO_LIBRARY_AsyncUDP_SRCS libraries/AsyncUDP/src/AsyncUDP.cpp) set(ARDUINO_LIBRARY_AsyncUDP_SRCS libraries/AsyncUDP/src/AsyncUDP.cpp)
@ -136,10 +126,6 @@ set(ARDUINO_LIBRARY_EEPROM_SRCS libraries/EEPROM/src/EEPROM.cpp)
set(ARDUINO_LIBRARY_ESP_I2S_SRCS libraries/ESP_I2S/src/ESP_I2S.cpp) set(ARDUINO_LIBRARY_ESP_I2S_SRCS libraries/ESP_I2S/src/ESP_I2S.cpp)
set(ARDUINO_LIBRARY_ESP_NOW_SRCS
libraries/ESP_NOW/src/ESP32_NOW.cpp
libraries/ESP_NOW/src/ESP32_NOW_Serial.cpp)
set(ARDUINO_LIBRARY_ESP_SR_SRCS set(ARDUINO_LIBRARY_ESP_SR_SRCS
libraries/ESP_SR/src/ESP_SR.cpp libraries/ESP_SR/src/ESP_SR.cpp
libraries/ESP_SR/src/esp32-hal-sr.c) libraries/ESP_SR/src/esp32-hal-sr.c)
@ -164,33 +150,6 @@ set(ARDUINO_LIBRARY_LittleFS_SRCS libraries/LittleFS/src/LittleFS.cpp)
set(ARDUINO_LIBRARY_NetBIOS_SRCS libraries/NetBIOS/src/NetBIOS.cpp) set(ARDUINO_LIBRARY_NetBIOS_SRCS libraries/NetBIOS/src/NetBIOS.cpp)
set(ARDUINO_LIBRARY_OpenThread_SRCS
libraries/OpenThread/src/OThread.cpp
libraries/OpenThread/src/OThreadCLI.cpp
libraries/OpenThread/src/OThreadCLI_Util.cpp)
set(ARDUINO_LIBRARY_Matter_SRCS
libraries/Matter/src/MatterEndpoints/MatterGenericSwitch.cpp
libraries/Matter/src/MatterEndpoints/MatterOnOffLight.cpp
libraries/Matter/src/MatterEndpoints/MatterDimmableLight.cpp
libraries/Matter/src/MatterEndpoints/MatterColorTemperatureLight.cpp
libraries/Matter/src/MatterEndpoints/MatterColorLight.cpp
libraries/Matter/src/MatterEndpoints/MatterEnhancedColorLight.cpp
libraries/Matter/src/MatterEndpoints/MatterFan.cpp
libraries/Matter/src/MatterEndpoints/MatterTemperatureSensor.cpp
libraries/Matter/src/MatterEndpoints/MatterHumiditySensor.cpp
libraries/Matter/src/MatterEndpoints/MatterContactSensor.cpp
libraries/Matter/src/MatterEndpoints/MatterPressureSensor.cpp
libraries/Matter/src/MatterEndpoints/MatterOccupancySensor.cpp
libraries/Matter/src/MatterEndpoints/MatterOnOffPlugin.cpp
libraries/Matter/src/MatterEndpoints/MatterThermostat.cpp
libraries/Matter/src/Matter.cpp
libraries/Matter/src/MatterEndPoint.cpp)
set(ARDUINO_LIBRARY_PPP_SRCS
libraries/PPP/src/PPP.cpp
libraries/PPP/src/ppp.c)
set(ARDUINO_LIBRARY_Preferences_SRCS libraries/Preferences/src/Preferences.cpp) set(ARDUINO_LIBRARY_Preferences_SRCS libraries/Preferences/src/Preferences.cpp)
set(ARDUINO_LIBRARY_RainMaker_SRCS set(ARDUINO_LIBRARY_RainMaker_SRCS
@ -227,16 +186,6 @@ set(ARDUINO_LIBRARY_USB_SRCS
libraries/USB/src/USBMIDI.cpp libraries/USB/src/USBMIDI.cpp
libraries/USB/src/USBHIDMouse.cpp libraries/USB/src/USBHIDMouse.cpp
libraries/USB/src/USBHIDKeyboard.cpp libraries/USB/src/USBHIDKeyboard.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_da_DK.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_de_DE.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_en_US.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_es_ES.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_fr_FR.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_hu_HU.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_it_IT.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_pt_BR.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_pt_PT.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_sv_SE.cpp
libraries/USB/src/USBHIDGamepad.cpp libraries/USB/src/USBHIDGamepad.cpp
libraries/USB/src/USBHIDConsumerControl.cpp libraries/USB/src/USBHIDConsumerControl.cpp
libraries/USB/src/USBHIDSystemControl.cpp libraries/USB/src/USBHIDSystemControl.cpp
@ -246,71 +195,28 @@ set(ARDUINO_LIBRARY_USB_SRCS
set(ARDUINO_LIBRARY_WebServer_SRCS set(ARDUINO_LIBRARY_WebServer_SRCS
libraries/WebServer/src/WebServer.cpp libraries/WebServer/src/WebServer.cpp
libraries/WebServer/src/Parsing.cpp libraries/WebServer/src/Parsing.cpp
libraries/WebServer/src/detail/mimetable.cpp libraries/WebServer/src/detail/mimetable.cpp)
libraries/WebServer/src/middleware/MiddlewareChain.cpp
libraries/WebServer/src/middleware/AuthenticationMiddleware.cpp
libraries/WebServer/src/middleware/CorsMiddleware.cpp
libraries/WebServer/src/middleware/LoggingMiddleware.cpp)
set(ARDUINO_LIBRARY_NetworkClientSecure_SRCS set(ARDUINO_LIBRARY_WiFiClientSecure_SRCS
libraries/NetworkClientSecure/src/ssl_client.cpp libraries/WiFiClientSecure/src/ssl_client.cpp
libraries/NetworkClientSecure/src/NetworkClientSecure.cpp) libraries/WiFiClientSecure/src/WiFiClientSecure.cpp)
set(ARDUINO_LIBRARY_Network_SRCS
libraries/Network/src/NetworkInterface.cpp
libraries/Network/src/NetworkEvents.cpp
libraries/Network/src/NetworkManager.cpp
libraries/Network/src/NetworkClient.cpp
libraries/Network/src/NetworkServer.cpp
libraries/Network/src/NetworkUdp.cpp)
set(ARDUINO_LIBRARY_WiFi_SRCS set(ARDUINO_LIBRARY_WiFi_SRCS
libraries/WiFi/src/WiFiAP.cpp libraries/WiFi/src/WiFiAP.cpp
libraries/WiFi/src/WiFiClient.cpp
libraries/WiFi/src/WiFi.cpp libraries/WiFi/src/WiFi.cpp
libraries/WiFi/src/WiFiGeneric.cpp libraries/WiFi/src/WiFiGeneric.cpp
libraries/WiFi/src/WiFiMulti.cpp libraries/WiFi/src/WiFiMulti.cpp
libraries/WiFi/src/WiFiScan.cpp libraries/WiFi/src/WiFiScan.cpp
libraries/WiFi/src/WiFiServer.cpp
libraries/WiFi/src/WiFiSTA.cpp libraries/WiFi/src/WiFiSTA.cpp
libraries/WiFi/src/STA.cpp libraries/WiFi/src/WiFiUdp.cpp)
libraries/WiFi/src/AP.cpp)
set(ARDUINO_LIBRARY_WiFiProv_SRCS libraries/WiFiProv/src/WiFiProv.cpp) set(ARDUINO_LIBRARY_WiFiProv_SRCS libraries/WiFiProv/src/WiFiProv.cpp)
set(ARDUINO_LIBRARY_Wire_SRCS libraries/Wire/src/Wire.cpp) set(ARDUINO_LIBRARY_Wire_SRCS libraries/Wire/src/Wire.cpp)
set(ARDUINO_LIBRARY_Zigbee_SRCS
libraries/Zigbee/src/ZigbeeCore.cpp
libraries/Zigbee/src/ZigbeeEP.cpp
libraries/Zigbee/src/ZigbeeHandlers.cpp
libraries/Zigbee/src/ep/ZigbeeColorDimmableLight.cpp
libraries/Zigbee/src/ep/ZigbeeColorDimmerSwitch.cpp
libraries/Zigbee/src/ep/ZigbeeDimmableLight.cpp
libraries/Zigbee/src/ep/ZigbeeLight.cpp
libraries/Zigbee/src/ep/ZigbeeSwitch.cpp
libraries/Zigbee/src/ep/ZigbeeTempSensor.cpp
libraries/Zigbee/src/ep/ZigbeeThermostat.cpp
libraries/Zigbee/src/ep/ZigbeeFlowSensor.cpp
libraries/Zigbee/src/ep/ZigbeePressureSensor.cpp
libraries/Zigbee/src/ep/ZigbeeOccupancySensor.cpp
libraries/Zigbee/src/ep/ZigbeeCarbonDioxideSensor.cpp
libraries/Zigbee/src/ep/ZigbeeContactSwitch.cpp
libraries/Zigbee/src/ep/ZigbeeDoorWindowHandle.cpp
libraries/Zigbee/src/ep/ZigbeeWindowCovering.cpp
libraries/Zigbee/src/ep/ZigbeeVibrationSensor.cpp
libraries/Zigbee/src/ep/ZigbeeAnalog.cpp
libraries/Zigbee/src/ep/ZigbeeRangeExtender.cpp
libraries/Zigbee/src/ep/ZigbeeGateway.cpp
libraries/Zigbee/src/ep/ZigbeeWindSpeedSensor.cpp
libraries/Zigbee/src/ep/ZigbeeIlluminanceSensor.cpp
libraries/Zigbee/src/ep/ZigbeePM25Sensor.cpp
libraries/Zigbee/src/ep/ZigbeeElectricalMeasurement.cpp
libraries/Zigbee/src/ep/ZigbeeBinary.cpp
libraries/Zigbee/src/ep/ZigbeePowerOutlet.cpp
libraries/Zigbee/src/ep/ZigbeeFanControl.cpp
)
set(ARDUINO_LIBRARY_BLE_SRCS set(ARDUINO_LIBRARY_BLE_SRCS
libraries/BLE/src/BLE2901.cpp
libraries/BLE/src/BLE2902.cpp libraries/BLE/src/BLE2902.cpp
libraries/BLE/src/BLE2904.cpp libraries/BLE/src/BLE2904.cpp
libraries/BLE/src/BLEAddress.cpp libraries/BLE/src/BLEAddress.cpp
@ -362,20 +268,8 @@ endforeach()
set(includedirs variants/${CONFIG_ARDUINO_VARIANT}/ cores/esp32/ ${ARDUINO_LIBRARIES_INCLUDEDIRS}) set(includedirs variants/${CONFIG_ARDUINO_VARIANT}/ cores/esp32/ ${ARDUINO_LIBRARIES_INCLUDEDIRS})
set(srcs ${CORE_SRCS} ${ARDUINO_LIBRARIES_SRCS}) set(srcs ${CORE_SRCS} ${ARDUINO_LIBRARIES_SRCS})
set(priv_includes cores/esp32/libb64) set(priv_includes cores/esp32/libb64)
set(requires spi_flash esp_partition mbedtls wpa_supplicant esp_adc esp_eth http_parser esp_ringbuf esp_driver_gptimer esp_driver_usb_serial_jtag driver esp_http_client esp_https_ota) set(requires spi_flash esp_partition mbedtls wifi_provisioning wpa_supplicant esp_adc esp_eth http_parser)
set(priv_requires fatfs nvs_flash app_update spiffs bootloader_support bt esp_hid usb esp_psram ${ARDUINO_LIBRARIES_REQUIRES}) set(priv_requires fatfs nvs_flash app_update spiffs bootloader_support bt esp_hid ${ARDUINO_LIBRARIES_REQUIRES})
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_OpenThread)
#if(CONFIG_SOC_IEEE802154_SUPPORTED) # Does not work!
#if(CONFIG_OPENTHREAD_ENABLED) # Does not work!
if(IDF_TARGET STREQUAL "esp32c6" OR IDF_TARGET STREQUAL "esp32h2" OR IDF_TARGET STREQUAL "esp32c5") # Sadly only this works
list(APPEND requires openthread)
endif()
endif()
if(IDF_TARGET STREQUAL "esp32p4")
list(APPEND requires esp_driver_touch_sens)
endif()
idf_component_register(INCLUDE_DIRS ${includedirs} PRIV_INCLUDE_DIRS ${priv_includes} SRCS ${srcs} REQUIRES ${requires} PRIV_REQUIRES ${priv_requires}) idf_component_register(INCLUDE_DIRS ${includedirs} PRIV_INCLUDE_DIRS ${priv_includes} SRCS ${srcs} REQUIRES ${requires} PRIV_REQUIRES ${priv_requires})
@ -393,7 +287,7 @@ target_compile_options(${COMPONENT_TARGET} PUBLIC
-DARDUINO_ARCH_ESP32 -DARDUINO_ARCH_ESP32
-DARDUINO_BOARD="${idf_target_caps}_DEV" -DARDUINO_BOARD="${idf_target_caps}_DEV"
-DARDUINO_VARIANT="${CONFIG_ARDUINO_VARIANT}" -DARDUINO_VARIANT="${CONFIG_ARDUINO_VARIANT}"
-DESP32=ESP32) -DESP32)
if(CONFIG_AUTOSTART_ARDUINO) if(CONFIG_AUTOSTART_ARDUINO)
# in autostart mode, arduino-esp32 contains app_main() function and needs to # in autostart mode, arduino-esp32 contains app_main() function and needs to
@ -416,21 +310,9 @@ function(maybe_add_component component_name)
endif() endif()
endfunction() endfunction()
if(IDF_TARGET MATCHES "esp32s2|esp32s3|esp32p4" AND CONFIG_TINYUSB_ENABLED) if(IDF_TARGET MATCHES "esp32s2|esp32s3" AND CONFIG_TINYUSB_ENABLED)
maybe_add_component(arduino_tinyusb) maybe_add_component(arduino_tinyusb)
endif() endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_ArduinoOTA) if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_ArduinoOTA)
maybe_add_component(esp_https_ota) maybe_add_component(esp_https_ota)
endif() endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_ESP_SR)
maybe_add_component(espressif__esp_sr)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_Matter)
maybe_add_component(espressif__esp_matter)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_LittleFS)
maybe_add_component(joltwallet__littlefs)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_WiFiProv)
maybe_add_component(espressif__network_provisioning)
endif()

View file

@ -5,7 +5,7 @@
We as members, contributors, and leaders pledge to make participation in our We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socioeconomic status, identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity nationality, personal appearance, race, religion, or sexual identity
and orientation. and orientation.

View file

@ -209,7 +209,7 @@ config ARDUHAL_ESP_LOG
default "n" default "n"
help help
This option will redefine the ESP_LOGx macros to Arduino's log_x macros. This option will redefine the ESP_LOGx macros to Arduino's log_x macros.
To enable for your application, add the following after your includes: To enable for your application, add the follwing after your includes:
#ifdef ARDUINO_ARCH_ESP32 #ifdef ARDUINO_ARCH_ESP32
#include "esp32-hal-log.h" #include "esp32-hal-log.h"
#endif #endif
@ -256,147 +256,22 @@ config ARDUINO_SELECTIVE_COMPILATION
bool "Include only specific Arduino libraries" bool "Include only specific Arduino libraries"
default n default n
config ARDUINO_SELECTIVE_SPI
bool "Enable SPI"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Wire
bool "Enable Wire"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_ESP_SR
bool "Enable ESP-SR"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_EEPROM
bool "Enable EEPROM"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Preferences
bool "Enable Preferences"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Ticker
bool "Enable Ticker"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Update
bool "Enable Update"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Zigbee
bool "Enable Zigbee"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_FS
bool "Enable FS"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_SD
bool "Enable SD"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_SD_MMC
bool "Enable SD_MMC"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_SPIFFS
bool "Enable SPIFFS"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_FFat
bool "Enable FFat"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_LittleFS
bool "Enable LittleFS"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_Network
bool "Enable Networking"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Ethernet
bool "Enable Ethernet"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_PPP
bool "Enable PPP"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_ArduinoOTA config ARDUINO_SELECTIVE_ArduinoOTA
bool "Enable ArduinoOTA" bool "Enable ArduinoOTA"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
select ARDUINO_SELECTIVE_ESPmDNS select ARDUINO_SELECTIVE_ESPmDNS
default y default y
config ARDUINO_SELECTIVE_AsyncUDP config ARDUINO_SELECTIVE_AsyncUDP
bool "Enable AsyncUDP" bool "Enable AsyncUDP"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network depends on ARDUINO_SELECTIVE_COMPILATION
default y default y
config ARDUINO_SELECTIVE_DNSServer config ARDUINO_SELECTIVE_AzureIoT
bool "Enable DNSServer" bool "Enable AzureIoT"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network depends on ARDUINO_SELECTIVE_COMPILATION
default y select ARDUINO_SELECTIVE_HTTPClient
config ARDUINO_SELECTIVE_ESPmDNS
bool "Enable ESPmDNS"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_HTTPClient
bool "Enable HTTPClient"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
select ARDUINO_SELECTIVE_NetworkClientSecure
default y
config ARDUINO_SELECTIVE_Matter
bool "Enable Matter"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_NetBIOS
bool "Enable NetBIOS"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_WebServer
bool "Enable WebServer"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
select ARDUINO_SELECTIVE_FS
config ARDUINO_SELECTIVE_WiFi
bool "Enable WiFi"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_NetworkClientSecure
bool "Enable NetworkClientSecure"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_WiFiProv
bool "Enable WiFiProv"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network && ARDUINO_SELECTIVE_WiFi
default y default y
config ARDUINO_SELECTIVE_BLE config ARDUINO_SELECTIVE_BLE
@ -409,24 +284,129 @@ config ARDUINO_SELECTIVE_BluetoothSerial
depends on ARDUINO_SELECTIVE_COMPILATION depends on ARDUINO_SELECTIVE_COMPILATION
default y default y
config ARDUINO_SELECTIVE_DNSServer
bool "Enable DNSServer"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
default y
config ARDUINO_SELECTIVE_EEPROM
bool "Enable EEPROM"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_ESP32
bool "Enable ESP32"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_ESPmDNS
bool "Enable ESPmDNS"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
default y
config ARDUINO_SELECTIVE_FFat
bool "Enable FFat"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_FS
bool "Enable FS"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_HTTPClient
bool "Enable HTTPClient"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
select ARDUINO_SELECTIVE_WiFiClientSecure
default y
config ARDUINO_SELECTIVE_LITTLEFS
bool "Enable LITTLEFS"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_NetBIOS
bool "Enable NetBIOS"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
default y
config ARDUINO_SELECTIVE_Preferences
bool "Enable Preferences"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_SD
bool "Enable SD"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_SD_MMC
bool "Enable SD_MMC"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_SimpleBLE config ARDUINO_SELECTIVE_SimpleBLE
bool "Enable SimpleBLE" bool "Enable SimpleBLE"
depends on ARDUINO_SELECTIVE_COMPILATION depends on ARDUINO_SELECTIVE_COMPILATION
default y default y
config ARDUINO_SELECTIVE_RainMaker config ARDUINO_SELECTIVE_SPI
bool "Enable RainMaker" bool "Enable SPI"
depends on ARDUINO_SELECTIVE_COMPILATION depends on ARDUINO_SELECTIVE_COMPILATION
default y default y
config ARDUINO_SELECTIVE_OpenThread config ARDUINO_SELECTIVE_SPIFFS
bool "Enable OpenThread" bool "Enable SPIFFS"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_Ticker
bool "Enable Ticker"
depends on ARDUINO_SELECTIVE_COMPILATION depends on ARDUINO_SELECTIVE_COMPILATION
default y default y
config ARDUINO_SELECTIVE_Insights config ARDUINO_SELECTIVE_Update
bool "Enable Insights" bool "Enable Update"
depends on ARDUINO_SELECTIVE_COMPILATION depends on ARDUINO_SELECTIVE_COMPILATION
default y default y
config ARDUINO_SELECTIVE_WebServer
bool "Enable WebServer"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
select ARDUINO_SELECTIVE_FS
config ARDUINO_SELECTIVE_WiFi
bool "Enable WiFi"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_WiFiClientSecure
bool "Enable WiFiClientSecure"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
default y
config ARDUINO_SELECTIVE_WiFiProv
bool "Enable WiFiProv"
depends on ARDUINO_SELECTIVE_COMPILATION
select ARDUINO_SELECTIVE_WiFi
default y
config ARDUINO_SELECTIVE_Wire
bool "Enable Wire"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
endmenu endmenu

View file

@ -1,13 +1,8 @@
# Arduino core for the ESP32, ESP32-C3, ESP32-C6, ESP32-H2, ESP32-P4, ESP32-S2 and ESP32-S3. # Arduino core for the ESP32, ESP32-S2, ESP32-S3, ESP32-C3, ESP32-C6 and ESP32-H2
[![Build Status](https://img.shields.io/github/actions/workflow/status/espressif/arduino-esp32/push.yml?branch=master&event=push&label=Compilation%20Tests)](https://github.com/espressif/arduino-esp32/actions/workflows/push.yml?query=branch%3Amaster+event%3Apush) ![Build Status](https://github.com/espressif/arduino-esp32/workflows/ESP32%20Arduino%20CI/badge.svg) [![External Libraries Test](https://github.com/espressif/arduino-esp32/actions/workflows/lib.yml/badge.svg?branch=master&event=schedule)](https://github.com/espressif/arduino-esp32/actions/workflows/lib.yml?link=http://https://github.com/espressif/arduino-esp32/blob/master/LIBRARIES_TEST.md)
[![Verbose Build Status](https://img.shields.io/github/actions/workflow/status/espressif/arduino-esp32/push.yml?branch=master&event=schedule&label=Compilation%20Tests%20(Verbose))](https://github.com/espressif/arduino-esp32/actions/workflows/push.yml?query=branch%3Amaster+event%3Aschedule)
[![External Libraries Test](https://img.shields.io/github/actions/workflow/status/espressif/arduino-esp32/lib.yml?branch=master&event=schedule&label=External%20Libraries%20Test)](https://github.com/espressif/arduino-esp32/blob/gh-pages/LIBRARIES_TEST.md)
[![Runtime Tests](https://github.com/espressif/arduino-esp32/blob/gh-pages/runtime-tests-results/badge.svg)](https://github.com/espressif/arduino-esp32/blob/gh-pages/runtime-tests-results/RUNTIME_TESTS_REPORT.md)
### Need help or have a question? Join the chat at [Discord](https://discord.gg/8xY6e9crwv) or [open a new Discussion](https://github.com/espressif/arduino-esp32/discussions) ### Need help or have a question? Join the chat at [Gitter](https://gitter.im/espressif/arduino-esp32) or [open a new Discussion](https://github.com/espressif/arduino-esp32/discussions)
[![Discord invite](https://img.shields.io/discord/1327272229427216425?logo=discord&logoColor=white&logoSize=auto&label=Discord)](https://discord.gg/8xY6e9crwv)
## Contents ## Contents
@ -21,17 +16,9 @@
### Development Status ### Development Status
#### Latest Stable Release Latest Stable Release [![Release Version](https://img.shields.io/github/release/espressif/arduino-esp32.svg?style=plastic)](https://github.com/espressif/arduino-esp32/releases/latest/) [![Release Date](https://img.shields.io/github/release-date/espressif/arduino-esp32.svg?style=plastic)](https://github.com/espressif/arduino-esp32/releases/latest/) [![Downloads](https://img.shields.io/github/downloads/espressif/arduino-esp32/latest/total.svg?style=plastic)](https://github.com/espressif/arduino-esp32/releases/latest/)
[![Release Version](https://img.shields.io/github/release/espressif/arduino-esp32.svg)](https://github.com/espressif/arduino-esp32/releases/latest/) Latest Development Release [![Release Version](https://img.shields.io/github/release/espressif/arduino-esp32/all.svg?style=plastic)](https://github.com/espressif/arduino-esp32/releases/) [![Release Date](https://img.shields.io/github/release-date-pre/espressif/arduino-esp32.svg?style=plastic)](https://github.com/espressif/arduino-esp32/releases/) [![Downloads](https://img.shields.io/github/downloads-pre/espressif/arduino-esp32/latest/total.svg?style=plastic)](https://github.com/espressif/arduino-esp32/releases/)
[![Release Date](https://img.shields.io/github/release-date/espressif/arduino-esp32.svg)](https://github.com/espressif/arduino-esp32/releases/latest/)
[![Downloads](https://img.shields.io/github/downloads/espressif/arduino-esp32/latest/total.svg)](https://github.com/espressif/arduino-esp32/releases/latest/)
#### Latest Development Release
[![Release Version](https://img.shields.io/github/release/espressif/arduino-esp32/all.svg)](https://github.com/espressif/arduino-esp32/releases/)
[![Release Date](https://img.shields.io/github/release-date-pre/espressif/arduino-esp32.svg)](https://github.com/espressif/arduino-esp32/releases/)
[![Downloads](https://img.shields.io/github/downloads-pre/espressif/arduino-esp32/latest/total.svg)](https://github.com/espressif/arduino-esp32/releases/)
### Development Planning ### Development Planning
@ -67,17 +54,11 @@ Here are the ESP32 series supported by the Arduino-ESP32 project:
| **SoC** | **Stable** | **Development** | **Datasheet** | | **SoC** | **Stable** | **Development** | **Datasheet** |
|----------|:----------:|:---------------:|:-------------------------------------------------------------------------------------------------:| |----------|:----------:|:---------------:|:-------------------------------------------------------------------------------------------------:|
| ESP32 | Yes | Yes | [ESP32](https://www.espressif.com/sites/default/files/documentation/esp32_datasheet_en.pdf) | | ESP32 | Yes | Yes | [ESP32](https://www.espressif.com/sites/default/files/documentation/esp32_datasheet_en.pdf) |
| ESP32-C3 | Yes | Yes | [ESP32-C3](https://www.espressif.com/sites/default/files/documentation/esp32-c3_datasheet_en.pdf) |
| ESP32-C6 | Yes | Yes | [ESP32-C6](https://www.espressif.com/sites/default/files/documentation/esp32-c6_datasheet_en.pdf) |
| ESP32-H2 | Yes | Yes | [ESP32-H2](https://www.espressif.com/sites/default/files/documentation/esp32-h2_datasheet_en.pdf) |
| ESP32-P4 | Yes | Yes | [ESP32-P4](https://www.espressif.com/sites/default/files/documentation/esp32-p4_datasheet_en.pdf) |
| ESP32-S2 | Yes | Yes | [ESP32-S2](https://www.espressif.com/sites/default/files/documentation/esp32-s2_datasheet_en.pdf) | | ESP32-S2 | Yes | Yes | [ESP32-S2](https://www.espressif.com/sites/default/files/documentation/esp32-s2_datasheet_en.pdf) |
| ESP32-C3 | Yes | Yes | [ESP32-C3](https://www.espressif.com/sites/default/files/documentation/esp32-c3_datasheet_en.pdf) |
| ESP32-S3 | Yes | Yes | [ESP32-S3](https://www.espressif.com/sites/default/files/documentation/esp32-s3_datasheet_en.pdf) | | ESP32-S3 | Yes | Yes | [ESP32-S3](https://www.espressif.com/sites/default/files/documentation/esp32-s3_datasheet_en.pdf) |
| ESP32-C6 | No | Yes | [ESP32-C6](https://www.espressif.com/sites/default/files/documentation/esp32-c6_datasheet_en.pdf) |
> [!NOTE] | ESP32-H2 | No | Yes | [ESP32-H2](https://www.espressif.com/sites/default/files/documentation/esp32-h2_datasheet_en.pdf) |
> ESP32-C2 is also supported by Arduino-ESP32 but requires using Arduino as an ESP-IDF component or rebuilding the static libraries.
> For more information, see the [Arduino as an ESP-IDF component documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/esp-idf_component.html) or the
> [Lib Builder documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/lib_builder.html), respectively.
For more details visit the [supported chips](https://docs.espressif.com/projects/arduino-esp32/en/latest/getting_started.html#supported-soc-s) documentation page. For more details visit the [supported chips](https://docs.espressif.com/projects/arduino-esp32/en/latest/getting_started.html#supported-soc-s) documentation page.
@ -87,7 +68,7 @@ You can use [EspExceptionDecoder](https://github.com/me-no-dev/EspExceptionDecod
### Issue/Bug report template ### Issue/Bug report template
Before reporting an issue, make sure you've searched for similar one that was already created. Also make sure to go through all the issues labeled as [Type: For reference](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue+label%3A%22Type%3A+For+reference%22+). Before reporting an issue, make sure you've searched for similar one that was already created. Also make sure to go through all the issues labelled as [Type: For reference](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue+label%3A%22Type%3A+For+reference%22+).
Finally, if you are sure no one else had the issue, follow the **Issue template** or **Feature request template** while reporting any [new Issue](https://github.com/espressif/arduino-esp32/issues/new/choose). Finally, if you are sure no one else had the issue, follow the **Issue template** or **Feature request template** while reporting any [new Issue](https://github.com/espressif/arduino-esp32/issues/new/choose).

19303
boards.txt

File diff suppressed because it is too large Load diff

View file

@ -33,6 +33,7 @@
#include "freertos/FreeRTOS.h" #include "freertos/FreeRTOS.h"
#include "freertos/task.h" #include "freertos/task.h"
#include "freertos/semphr.h" #include "freertos/semphr.h"
#include "esp32-hal.h"
#include "esp8266-compat.h" #include "esp8266-compat.h"
#include "soc/gpio_reg.h" #include "soc/gpio_reg.h"
@ -40,9 +41,6 @@
#include "binary.h" #include "binary.h"
#include "extra_attr.h" #include "extra_attr.h"
#include "pins_arduino.h"
#include "esp32-hal.h"
#define PI 3.1415926535897932384626433832795 #define PI 3.1415926535897932384626433832795
#define HALF_PI 1.5707963267948966192313216916398 #define HALF_PI 1.5707963267948966192313216916398
#define TWO_PI 6.283185307179586476925286766559 #define TWO_PI 6.283185307179586476925286766559
@ -103,10 +101,7 @@
// avr-libc defines _NOP() since 1.6.2 // avr-libc defines _NOP() since 1.6.2
#ifndef _NOP #ifndef _NOP
#define _NOP() \ #define _NOP() do { __asm__ volatile ("nop"); } while (0)
do { \
__asm__ volatile("nop"); \
} while (0)
#endif #endif
#define bit(b) (1UL << (b)) #define bit(b) (1UL << (b))
@ -144,7 +139,7 @@
#endif #endif
#define EXTERNAL_NUM_INTERRUPTS NUM_DIGITAL_PINS // All GPIOs #define EXTERNAL_NUM_INTERRUPTS NUM_DIGITAL_PINS // All GPIOs
#define analogInputToDigitalPin(p) (((p)<NUM_ANALOG_INPUTS)?(analogChannelToDigitalPin(p)):-1) #define analogInputToDigitalPin(p) (((p)<NUM_ANALOG_INPUTS)?(analogChannelToDigitalPin(p)):-1)
#define digitalPinToInterrupt(p) ((((uint8_t)digitalPinToGPIONumber(p)) < NUM_DIGITAL_PINS) ? (p) : NOT_AN_INTERRUPT) #define digitalPinToInterrupt(p) ((((uint8_t)digitalPinToGPIONumber(p))<NUM_DIGITAL_PINS)?digitalPinToGPIONumber(p):NOT_AN_INTERRUPT)
#define digitalPinHasPWM(p) (((uint8_t)digitalPinToGPIONumber(p))<NUM_DIGITAL_PINS) #define digitalPinHasPWM(p) (((uint8_t)digitalPinToGPIONumber(p))<NUM_DIGITAL_PINS)
typedef bool boolean; typedef bool boolean;
@ -181,7 +176,7 @@ void initArduino(void);
unsigned long pulseIn(uint8_t pin, uint8_t state, unsigned long timeout); unsigned long pulseIn(uint8_t pin, uint8_t state, unsigned long timeout);
unsigned long pulseInLong(uint8_t pin, uint8_t state, unsigned long timeout); unsigned long pulseInLong(uint8_t pin, uint8_t state, unsigned long timeout);
uint8_t shiftIn(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder); // codespell:ignore shiftin uint8_t shiftIn(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder);
void shiftOut(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder, uint8_t val); void shiftOut(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder, uint8_t val);
#ifdef __cplusplus #ifdef __cplusplus
@ -201,7 +196,6 @@ void shiftOut(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder, uint8_t val);
#include "Udp.h" #include "Udp.h"
#include "HardwareSerial.h" #include "HardwareSerial.h"
#include "Esp.h" #include "Esp.h"
#include "freertos_stats.h"
// Use float-compatible stl abs() and round(), we don't use Arduino macros to avoid issues with the C++ libraries // Use float-compatible stl abs() and round(), we don't use Arduino macros to avoid issues with the C++ libraries
using std::abs; using std::abs;
@ -217,33 +211,23 @@ uint16_t makeWord(uint8_t h, uint8_t l);
#define word(...) makeWord(__VA_ARGS__) #define word(...) makeWord(__VA_ARGS__)
size_t getArduinoLoopTaskStackSize(void); size_t getArduinoLoopTaskStackSize(void);
#define SET_LOOP_TASK_STACK_SIZE(sz) \ #define SET_LOOP_TASK_STACK_SIZE(sz) size_t getArduinoLoopTaskStackSize() { return sz;}
size_t getArduinoLoopTaskStackSize() { \
return sz; \
}
bool shouldPrintChipDebugReport(void); bool shouldPrintChipDebugReport(void);
#define ENABLE_CHIP_DEBUG_REPORT \ #define ENABLE_CHIP_DEBUG_REPORT bool shouldPrintChipDebugReport(void){return true;}
bool shouldPrintChipDebugReport(void) { \
return true; \
}
// allows user to bypass esp_spiram_test() // allows user to bypass esp_spiram_test()
bool esp_psram_extram_test(void); bool esp_psram_extram_test(void);
#define BYPASS_SPIRAM_TEST(bypass) \ #define BYPASS_SPIRAM_TEST(bypass) bool testSPIRAM(void) { if (bypass) return true; else return esp_psram_extram_test(); }
bool testSPIRAM(void) { \
if (bypass) \
return true; \
else \
return esp_psram_extram_test(); \
}
unsigned long pulseIn(uint8_t pin, uint8_t state, unsigned long timeout = 1000000L); unsigned long pulseIn(uint8_t pin, uint8_t state, unsigned long timeout = 1000000L);
unsigned long pulseInLong(uint8_t pin, uint8_t state, unsigned long timeout = 1000000L); unsigned long pulseInLong(uint8_t pin, uint8_t state, unsigned long timeout = 1000000L);
extern "C" bool getLocalTime(struct tm * info, uint32_t ms = 5000); extern "C" bool getLocalTime(struct tm * info, uint32_t ms = 5000);
extern "C" void configTime(long gmtOffset_sec, int daylightOffset_sec, const char *server1, const char *server2 = nullptr, const char *server3 = nullptr); extern "C" void configTime(long gmtOffset_sec, int daylightOffset_sec,
extern "C" void configTzTime(const char *tz, const char *server1, const char *server2 = nullptr, const char *server3 = nullptr); const char* server1, const char* server2 = nullptr, const char* server3 = nullptr);
extern "C" void configTzTime(const char* tz,
const char* server1, const char* server2 = nullptr, const char* server3 = nullptr);
void setToneChannel(uint8_t channel = 0); void setToneChannel(uint8_t channel = 0);
void tone(uint8_t _pin, unsigned int frequency, unsigned long duration = 0); void tone(uint8_t _pin, unsigned int frequency, unsigned long duration = 0);
@ -251,7 +235,7 @@ void noTone(uint8_t _pin);
#endif /* __cplusplus */ #endif /* __cplusplus */
// must be applied last as it overrides some of the above #include "pins_arduino.h"
#include "io_pin_remap.h" #include "io_pin_remap.h"
#endif /* _ESP32_CORE_ARDUINO_H_ */ #endif /* _ESP32_CORE_ARDUINO_H_ */

View file

@ -23,7 +23,8 @@
#include "Stream.h" #include "Stream.h"
#include "IPAddress.h" #include "IPAddress.h"
class Client : public Stream { class Client: public Stream
{
public: public:
virtual int connect(IPAddress ip, uint16_t port) =0; virtual int connect(IPAddress ip, uint16_t port) =0;
virtual int connect(const char *host, uint16_t port) =0; virtual int connect(const char *host, uint16_t port) =0;
@ -37,9 +38,9 @@ public:
virtual void stop() = 0; virtual void stop() = 0;
virtual uint8_t connected() = 0; virtual uint8_t connected() = 0;
virtual operator bool() = 0; virtual operator bool() = 0;
protected: protected:
uint8_t *rawIPAddress(IPAddress &addr) { uint8_t* rawIPAddress(IPAddress& addr)
{
return addr.raw_address(); return addr.raw_address();
} }
}; };

View file

@ -1,281 +0,0 @@
/*
*
* Copyright (c) 2021 Project CHIP Authors
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ColorFormat.h"
#include <math.h>
// define a clamp macro to substitute the std::clamp macro which is available from C++17 onwards
#define clamp(a, min, max) ((a) < (min) ? (min) : ((a) > (max) ? (max) : (a)))
const espHsvColor_t HSV_BLACK = {0, 0, 0};
const espHsvColor_t HSV_WHITE = {0, 0, 254};
const espHsvColor_t HSV_RED = {0, 254, 254};
const espHsvColor_t HSV_YELLOW = {42, 254, 254};
const espHsvColor_t HSV_GREEN = {84, 254, 254};
const espHsvColor_t HSV_CYAN = {127, 254, 254};
const espHsvColor_t HSV_BLUE = {169, 254, 254};
const espHsvColor_t HSV_MAGENTA = {211, 254, 254};
const espRgbColor_t RGB_BLACK = {0, 0, 0};
const espRgbColor_t RGB_WHITE = {255, 255, 255};
const espRgbColor_t RGB_RED = {255, 0, 0};
const espRgbColor_t RGB_YELLOW = {255, 255, 0};
const espRgbColor_t RGB_GREEN = {0, 255, 0};
const espRgbColor_t RGB_CYAN = {0, 255, 255};
const espRgbColor_t RGB_BLUE = {0, 0, 255};
const espRgbColor_t RGB_MAGENTA = {255, 0, 255};
// main color temperature values
const espCtColor_t COOL_WHITE_COLOR_TEMPERATURE = {142};
const espCtColor_t DAYLIGHT_WHITE_COLOR_TEMPERATURE = {181};
const espCtColor_t WHITE_COLOR_TEMPERATURE = {250};
const espCtColor_t SOFT_WHITE_COLOR_TEMPERATURE = {370};
const espCtColor_t WARM_WHITE_COLOR_TEMPERATURE = {454};
espRgbColor_t espHsvToRgbColor(uint16_t h, uint8_t s, uint8_t v) {
espHsvColor_t hsv = {h, s, v};
return espHsvColorToRgbColor(hsv);
}
espRgbColor_t espHsvColorToRgbColor(espHsvColor_t hsv) {
espRgbColor_t rgb;
uint8_t region, p, q, t;
uint32_t h, s, v, remainder;
if (hsv.s == 0) {
rgb.r = rgb.g = rgb.b = hsv.v;
} else {
h = hsv.h;
s = hsv.s;
v = hsv.v;
region = h / 43;
remainder = (h - (region * 43)) * 6;
p = (v * (255 - s)) >> 8;
q = (v * (255 - ((s * remainder) >> 8))) >> 8;
t = (v * (255 - ((s * (255 - remainder)) >> 8))) >> 8;
switch (region) {
case 0: rgb.r = v, rgb.g = t, rgb.b = p; break;
case 1: rgb.r = q, rgb.g = v, rgb.b = p; break;
case 2: rgb.r = p, rgb.g = v, rgb.b = t; break;
case 3: rgb.r = p, rgb.g = q, rgb.b = v; break;
case 4: rgb.r = t, rgb.g = p, rgb.b = v; break;
case 5:
default: rgb.r = v, rgb.g = p, rgb.b = q; break;
}
}
return rgb;
}
espHsvColor_t espRgbToHsvColor(uint8_t r, uint8_t g, uint8_t b) {
espRgbColor_t rgb = {r, g, b};
return espRgbColorToHsvColor(rgb);
}
espHsvColor_t espRgbColorToHsvColor(espRgbColor_t rgb) {
espHsvColor_t hsv;
uint8_t rgbMin, rgbMax;
rgbMin = rgb.r < rgb.g ? (rgb.r < rgb.b ? rgb.r : rgb.b) : (rgb.g < rgb.b ? rgb.g : rgb.b);
rgbMax = rgb.r > rgb.g ? (rgb.r > rgb.b ? rgb.r : rgb.b) : (rgb.g > rgb.b ? rgb.g : rgb.b);
hsv.v = rgbMax;
if (hsv.v == 0) {
hsv.h = 0;
hsv.s = 0;
return hsv;
}
hsv.s = 255 * (rgbMax - rgbMin) / hsv.v;
if (hsv.s == 0) {
hsv.h = 0;
return hsv;
}
if (rgbMax == rgb.r) {
hsv.h = 0 + 43 * (rgb.g - rgb.b) / (rgbMax - rgbMin);
} else if (rgbMax == rgb.g) {
hsv.h = 85 + 43 * (rgb.b - rgb.r) / (rgbMax - rgbMin);
} else {
hsv.h = 171 + 43 * (rgb.r - rgb.g) / (rgbMax - rgbMin);
}
return hsv;
}
espRgbColor_t espXYColorToRgbColor(uint8_t Level, espXyColor_t xy) {
return espXYToRgbColor(Level, xy.x, xy.y, true);
}
espRgbColor_t espXYToRgbColor(uint8_t Level, uint16_t current_X, uint16_t current_Y, bool addXYZScaling) {
// convert xyY color space to RGB
// https://www.easyrgb.com/en/math.php
// https://en.wikipedia.org/wiki/SRGB
// refer https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
// The current_X/current_Y attribute contains the current value of the normalized chromaticity value of x/y.
// The value of x/y shall be related to the current_X/current_Y attribute by the relationship
// x = current_X/65536
// y = current_Y/65536
// z = 1-x-y
espRgbColor_t rgb;
float x, y, z;
float X, Y, Z;
float r, g, b;
x = ((float)current_X) / 65535.0f;
y = ((float)current_Y) / 65535.0f;
z = 1.0f - x - y;
// Calculate XYZ values
// Y - given brightness in 0 - 1 range
Y = ((float)Level) / 254.0f;
X = (Y / y) * x;
Z = (Y / y) * z;
// X, Y and Z input refer to a D65/2° standard illuminant.
// sR, sG and sB (standard RGB) output range = 0 ÷ 255
// convert XYZ to RGB - CIE XYZ to sRGB
if (addXYZScaling) {
X = X / 100.0f;
Y = Y / 100.0f;
Z = Z / 100.0f;
}
r = (X * 3.2406f) - (Y * 1.5372f) - (Z * 0.4986f);
g = -(X * 0.9689f) + (Y * 1.8758f) + (Z * 0.0415f);
b = (X * 0.0557f) - (Y * 0.2040f) + (Z * 1.0570f);
// apply gamma 2.2 correction
r = (r <= 0.0031308f ? 12.92f * r : (1.055f) * pow(r, (1.0f / 2.4f)) - 0.055f);
g = (g <= 0.0031308f ? 12.92f * g : (1.055f) * pow(g, (1.0f / 2.4f)) - 0.055f);
b = (b <= 0.0031308f ? 12.92f * b : (1.055f) * pow(b, (1.0f / 2.4f)) - 0.055f);
// Round off
r = clamp(r, 0, 1);
g = clamp(g, 0, 1);
b = clamp(b, 0, 1);
// these rgb values are in the range of 0 to 1, convert to limit of HW specific LED
rgb.r = (uint8_t)(r * 255);
rgb.g = (uint8_t)(g * 255);
rgb.b = (uint8_t)(b * 255);
return rgb;
}
espXyColor_t espRgbToXYColor(uint8_t r, uint8_t g, uint8_t b) {
espRgbColor_t rgb = {r, g, b};
return espRgbColorToXYColor(rgb);
}
espXyColor_t espRgbColorToXYColor(espRgbColor_t rgb) {
// convert RGB to xy color space
// https://www.easyrgb.com/en/math.php
// https://en.wikipedia.org/wiki/SRGB
// refer https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
espXyColor_t xy;
float r, g, b;
float X, Y, Z;
float x, y;
r = ((float)rgb.r) / 255.0f;
g = ((float)rgb.g) / 255.0f;
b = ((float)rgb.b) / 255.0f;
// convert RGB to XYZ - sRGB to CIE XYZ
r = (r <= 0.04045f ? r / 12.92f : pow((r + 0.055f) / 1.055f, 2.4f));
g = (g <= 0.04045f ? g / 12.92f : pow((g + 0.055f) / 1.055f, 2.4f));
b = (b <= 0.04045f ? b / 12.92f : pow((b + 0.055f) / 1.055f, 2.4f));
// https://gist.github.com/popcorn245/30afa0f98eea1c2fd34d
X = r * 0.649926f + g * 0.103455f + b * 0.197109f;
Y = r * 0.234327f + g * 0.743075f + b * 0.022598f;
Z = r * 0.0000000f + g * 0.053077f + b * 1.035763f;
// sR, sG and sB (standard RGB) input range = 0 ÷ 255
// X, Y and Z output refer to a D65/2° standard illuminant.
X = r * 0.4124564f + g * 0.3575761f + b * 0.1804375f;
Y = r * 0.2126729f + g * 0.7151522f + b * 0.0721750f;
Z = r * 0.0193339f + g * 0.1191920f + b * 0.9503041f;
// Calculate xy values
x = X / (X + Y + Z);
y = Y / (X + Y + Z);
// convert to 0-65535 range
xy.x = (uint16_t)(x * 65535);
xy.y = (uint16_t)(y * 65535);
return xy;
}
espRgbColor_t espCTToRgbColor(uint16_t ct) {
espCtColor_t ctColor = {ct};
return espCTColorToRgbColor(ctColor);
}
espRgbColor_t espCTColorToRgbColor(espCtColor_t ct) {
espRgbColor_t rgb = {0, 0, 0};
float r, g, b;
if (ct.ctMireds == 0) {
return rgb;
}
// Algorithm credits to Tanner Helland: https://tannerhelland.com/2012/09/18/convert-temperature-rgb-algorithm-code.html
// Convert Mireds to centiKelvins. k = 1,000,000/mired
float ctCentiKelvin = 10000 / ct.ctMireds;
// Red
if (ctCentiKelvin <= 66) {
r = 255;
} else {
r = 329.698727446f * pow(ctCentiKelvin - 60, -0.1332047592f);
}
// Green
if (ctCentiKelvin <= 66) {
g = 99.4708025861f * log(ctCentiKelvin) - 161.1195681661f;
} else {
g = 288.1221695283f * pow(ctCentiKelvin - 60, -0.0755148492f);
}
// Blue
if (ctCentiKelvin >= 66) {
b = 255;
} else {
if (ctCentiKelvin <= 19) {
b = 0;
} else {
b = 138.5177312231 * log(ctCentiKelvin - 10) - 305.0447927307;
}
}
rgb.r = (uint8_t)clamp(r, 0, 255);
rgb.g = (uint8_t)clamp(g, 0, 255);
rgb.b = (uint8_t)clamp(b, 0, 255);
return rgb;
}

View file

@ -1,71 +0,0 @@
/*
*
* Copyright (c) 2021 Project CHIP Authors
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <stdint.h>
#include <stdbool.h>
#ifdef __cplusplus
extern "C" {
#endif
struct RgbColor_t {
uint8_t r;
uint8_t g;
uint8_t b;
};
struct HsvColor_t {
uint16_t h;
uint8_t s;
uint8_t v;
};
struct XyColor_t {
uint16_t x;
uint16_t y;
};
struct CtColor_t {
uint16_t ctMireds;
};
typedef struct RgbColor_t espRgbColor_t;
typedef struct HsvColor_t espHsvColor_t;
typedef struct XyColor_t espXyColor_t;
typedef struct CtColor_t espCtColor_t;
espRgbColor_t espXYToRgbColor(uint8_t Level, uint16_t current_X, uint16_t current_Y, bool addXYZScaling);
espRgbColor_t espXYColorToRgb(uint8_t Level, espXyColor_t xy);
espXyColor_t espRgbColorToXYColor(espRgbColor_t rgb);
espXyColor_t espRgbToXYColor(uint8_t r, uint8_t g, uint8_t b);
espRgbColor_t espHsvColorToRgbColor(espHsvColor_t hsv);
espRgbColor_t espHsvToRgbColor(uint16_t h, uint8_t s, uint8_t v);
espRgbColor_t espCTColorToRgbColor(espCtColor_t ct);
espRgbColor_t espCTToRgbColor(uint16_t ct);
espHsvColor_t espRgbColorToHsvColor(espRgbColor_t rgb);
espHsvColor_t espRgbToHsvColor(uint8_t r, uint8_t g, uint8_t b);
extern const espHsvColor_t HSV_BLACK, HSV_WHITE, HSV_RED, HSV_YELLOW, HSV_GREEN, HSV_CYAN, HSV_BLUE, HSV_MAGENTA;
extern const espCtColor_t COOL_WHITE_COLOR_TEMPERATURE, DAYLIGHT_WHITE_COLOR_TEMPERATURE, WHITE_COLOR_TEMPERATURE, SOFT_WHITE_COLOR_TEMPERATURE,
WARM_WHITE_COLOR_TEMPERATURE;
extern const espRgbColor_t RGB_BLACK, RGB_WHITE, RGB_RED, RGB_YELLOW, RGB_GREEN, RGB_CYAN, RGB_BLUE, RGB_MAGENTA;
#ifdef __cplusplus
}
#endif

View file

@ -21,7 +21,6 @@
#include "Esp.h" #include "Esp.h"
#include "esp_sleep.h" #include "esp_sleep.h"
#include "spi_flash_mmap.h" #include "spi_flash_mmap.h"
#include "esp_idf_version.h"
#include <memory> #include <memory>
#include <soc/soc.h> #include <soc/soc.h>
#include <esp_partition.h> #include <esp_partition.h>
@ -61,12 +60,6 @@ extern "C" {
#elif CONFIG_IDF_TARGET_ESP32H2 #elif CONFIG_IDF_TARGET_ESP32H2
#include "esp32h2/rom/spi_flash.h" #include "esp32h2/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x0000 // Esp32h2 is located at 0x0000 #define ESP_FLASH_IMAGE_BASE 0x0000 // Esp32h2 is located at 0x0000
#elif CONFIG_IDF_TARGET_ESP32P4
#include "esp32p4/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x2000 // Esp32p4 is located at 0x2000
#elif CONFIG_IDF_TARGET_ESP32C5
#include "esp32c5/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x2000 // Esp32c5 is located at 0x2000
#else #else
#error Target CONFIG_IDF_TARGET is not supported #error Target CONFIG_IDF_TARGET is not supported
#endif #endif
@ -78,10 +71,9 @@ extern "C" {
// REG_SPI_BASE is not defined for S3/C3 ?? // REG_SPI_BASE is not defined for S3/C3 ??
#if CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3 #if CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3
#ifdef REG_SPI_BASE #ifndef REG_SPI_BASE
#undef REG_SPI_BASE
#endif // REG_SPI_BASE
#define REG_SPI_BASE(i) (DR_REG_SPI1_BASE + (((i)>1) ? (((i)* 0x1000) + 0x20000) : (((~(i)) & 1)* 0x1000 ))) #define REG_SPI_BASE(i) (DR_REG_SPI1_BASE + (((i)>1) ? (((i)* 0x1000) + 0x20000) : (((~(i)) & 1)* 0x1000 )))
#endif // REG_SPI_BASE
#endif // TARGET #endif // TARGET
/** /**
@ -91,90 +83,110 @@ extern "C" {
* uint32_t = test = 10_MHz; // --> 10000000 * uint32_t = test = 10_MHz; // --> 10000000
*/ */
unsigned long long operator"" _kHz(unsigned long long x) { unsigned long long operator"" _kHz(unsigned long long x)
{
return x * 1000; return x * 1000;
} }
unsigned long long operator"" _MHz(unsigned long long x) { unsigned long long operator"" _MHz(unsigned long long x)
{
return x * 1000 * 1000; return x * 1000 * 1000;
} }
unsigned long long operator"" _GHz(unsigned long long x) { unsigned long long operator"" _GHz(unsigned long long x)
{
return x * 1000 * 1000 * 1000; return x * 1000 * 1000 * 1000;
} }
unsigned long long operator"" _kBit(unsigned long long x) { unsigned long long operator"" _kBit(unsigned long long x)
{
return x * 1024; return x * 1024;
} }
unsigned long long operator"" _MBit(unsigned long long x) { unsigned long long operator"" _MBit(unsigned long long x)
{
return x * 1024 * 1024; return x * 1024 * 1024;
} }
unsigned long long operator"" _GBit(unsigned long long x) { unsigned long long operator"" _GBit(unsigned long long x)
{
return x * 1024 * 1024 * 1024; return x * 1024 * 1024 * 1024;
} }
unsigned long long operator"" _kB(unsigned long long x) { unsigned long long operator"" _kB(unsigned long long x)
{
return x * 1024; return x * 1024;
} }
unsigned long long operator"" _MB(unsigned long long x) { unsigned long long operator"" _MB(unsigned long long x)
{
return x * 1024 * 1024; return x * 1024 * 1024;
} }
unsigned long long operator"" _GB(unsigned long long x) { unsigned long long operator"" _GB(unsigned long long x)
{
return x * 1024 * 1024 * 1024; return x * 1024 * 1024 * 1024;
} }
EspClass ESP; EspClass ESP;
void EspClass::deepSleep(uint64_t time_us) { void EspClass::deepSleep(uint64_t time_us)
{
esp_deep_sleep(time_us); esp_deep_sleep(time_us);
} }
void EspClass::restart(void) { void EspClass::restart(void)
{
esp_restart(); esp_restart();
} }
uint32_t EspClass::getHeapSize(void) { uint32_t EspClass::getHeapSize(void)
{
return heap_caps_get_total_size(MALLOC_CAP_INTERNAL); return heap_caps_get_total_size(MALLOC_CAP_INTERNAL);
} }
uint32_t EspClass::getFreeHeap(void) { uint32_t EspClass::getFreeHeap(void)
{
return heap_caps_get_free_size(MALLOC_CAP_INTERNAL); return heap_caps_get_free_size(MALLOC_CAP_INTERNAL);
} }
uint32_t EspClass::getMinFreeHeap(void) { uint32_t EspClass::getMinFreeHeap(void)
{
return heap_caps_get_minimum_free_size(MALLOC_CAP_INTERNAL); return heap_caps_get_minimum_free_size(MALLOC_CAP_INTERNAL);
} }
uint32_t EspClass::getMaxAllocHeap(void) { uint32_t EspClass::getMaxAllocHeap(void)
{
return heap_caps_get_largest_free_block(MALLOC_CAP_INTERNAL); return heap_caps_get_largest_free_block(MALLOC_CAP_INTERNAL);
} }
uint32_t EspClass::getPsramSize(void) { uint32_t EspClass::getPsramSize(void)
{
if(psramFound()){ if(psramFound()){
return heap_caps_get_total_size(MALLOC_CAP_SPIRAM); return heap_caps_get_total_size(MALLOC_CAP_SPIRAM);
} }
return 0; return 0;
} }
uint32_t EspClass::getFreePsram(void) { uint32_t EspClass::getFreePsram(void)
{
if(psramFound()){ if(psramFound()){
return heap_caps_get_free_size(MALLOC_CAP_SPIRAM); return heap_caps_get_free_size(MALLOC_CAP_SPIRAM);
} }
return 0; return 0;
} }
uint32_t EspClass::getMinFreePsram(void) { uint32_t EspClass::getMinFreePsram(void)
{
if(psramFound()){ if(psramFound()){
return heap_caps_get_minimum_free_size(MALLOC_CAP_SPIRAM); return heap_caps_get_minimum_free_size(MALLOC_CAP_SPIRAM);
} }
return 0; return 0;
} }
uint32_t EspClass::getMaxAllocPsram(void) { uint32_t EspClass::getMaxAllocPsram(void)
{
if(psramFound()){ if(psramFound()){
return heap_caps_get_largest_free_block(MALLOC_CAP_SPIRAM); return heap_caps_get_largest_free_block(MALLOC_CAP_SPIRAM);
} }
@ -184,9 +196,7 @@ uint32_t EspClass::getMaxAllocPsram(void) {
static uint32_t sketchSize(sketchSize_t response) { static uint32_t sketchSize(sketchSize_t response) {
esp_image_metadata_t data; esp_image_metadata_t data;
const esp_partition_t *running = esp_ota_get_running_partition(); const esp_partition_t *running = esp_ota_get_running_partition();
if (!running) { if (!running) return 0;
return 0;
}
const esp_partition_pos_t running_pos = { const esp_partition_pos_t running_pos = {
.offset = running->address, .offset = running->address,
.size = running->size, .size = running->size,
@ -204,7 +214,8 @@ uint32_t EspClass::getSketchSize() {
return sketchSize(SKETCH_SIZE_TOTAL); return sketchSize(SKETCH_SIZE_TOTAL);
} }
String EspClass::getSketchMD5() { String EspClass::getSketchMD5()
{
static String result; static String result;
if (result.length()) { if (result.length()) {
return result; return result;
@ -257,43 +268,53 @@ uint32_t EspClass::getFreeSketchSpace() {
return _partition->size; return _partition->size;
} }
uint16_t EspClass::getChipRevision(void) { uint16_t EspClass::getChipRevision(void)
{
esp_chip_info_t chip_info; esp_chip_info_t chip_info;
esp_chip_info(&chip_info); esp_chip_info(&chip_info);
return chip_info.revision; return chip_info.revision;
} }
const char *EspClass::getChipModel(void) { const char * EspClass::getChipModel(void)
{
#if CONFIG_IDF_TARGET_ESP32 #if CONFIG_IDF_TARGET_ESP32
uint32_t chip_ver = REG_GET_FIELD(EFUSE_BLK0_RDATA3_REG, EFUSE_RD_CHIP_PACKAGE); uint32_t chip_ver = REG_GET_FIELD(EFUSE_BLK0_RDATA3_REG, EFUSE_RD_CHIP_PACKAGE);
uint32_t pkg_ver = chip_ver & 0x7; uint32_t pkg_ver = chip_ver & 0x7;
switch (pkg_ver) { switch (pkg_ver) {
case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDQ6 : case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDQ6 :
if ((getChipRevision() / 100) == 3) { if (getChipRevision() == 3)
return "ESP32-D0WDQ6-V3"; return "ESP32-D0WDQ6-V3";
} else { else
return "ESP32-D0WDQ6"; return "ESP32-D0WDQ6";
}
case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDQ5 : case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDQ5 :
if ((getChipRevision() / 100) == 3) { if (getChipRevision() == 3)
return "ESP32-D0WD-V3"; return "ESP32-D0WD-V3";
} else { else
return "ESP32-D0WD"; return "ESP32-D0WD";
} case EFUSE_RD_CHIP_VER_PKG_ESP32D2WDQ5 :
case EFUSE_RD_CHIP_VER_PKG_ESP32D2WDQ5: return "ESP32-D2WD"; return "ESP32-D2WD";
case EFUSE_RD_CHIP_VER_PKG_ESP32U4WDH: return "ESP32-U4WDH"; case EFUSE_RD_CHIP_VER_PKG_ESP32PICOD2 :
case EFUSE_RD_CHIP_VER_PKG_ESP32PICOD4: return "ESP32-PICO-D4"; return "ESP32-PICO-D2";
case EFUSE_RD_CHIP_VER_PKG_ESP32PICOV302: return "ESP32-PICO-V3-02"; case EFUSE_RD_CHIP_VER_PKG_ESP32PICOD4 :
case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDR2V3: return "ESP32-D0WDR2-V3"; return "ESP32-PICO-D4";
default: return "Unknown"; case EFUSE_RD_CHIP_VER_PKG_ESP32PICOV302 :
return "ESP32-PICO-V3-02";
case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDR2V3 :
return "ESP32-D0WDR2-V3";
default:
return "Unknown";
} }
#elif CONFIG_IDF_TARGET_ESP32S2 #elif CONFIG_IDF_TARGET_ESP32S2
uint32_t pkg_ver = REG_GET_FIELD(EFUSE_RD_MAC_SPI_SYS_3_REG, EFUSE_PKG_VERSION); uint32_t pkg_ver = REG_GET_FIELD(EFUSE_RD_MAC_SPI_SYS_3_REG, EFUSE_PKG_VERSION);
switch (pkg_ver) { switch (pkg_ver) {
case 0: return "ESP32-S2"; case 0:
case 1: return "ESP32-S2FH16"; return "ESP32-S2";
case 2: return "ESP32-S2FH32"; case 1:
default: return "ESP32-S2 (Unknown)"; return "ESP32-S2FH16";
case 2:
return "ESP32-S2FH32";
default:
return "ESP32-S2 (Unknown)";
} }
#else #else
esp_chip_info_t chip_info; esp_chip_info_t chip_info;
@ -304,43 +325,43 @@ const char *EspClass::getChipModel(void) {
case CHIP_ESP32C2: return "ESP32-C2"; case CHIP_ESP32C2: return "ESP32-C2";
case CHIP_ESP32C6: return "ESP32-C6"; case CHIP_ESP32C6: return "ESP32-C6";
case CHIP_ESP32H2: return "ESP32-H2"; case CHIP_ESP32H2: return "ESP32-H2";
case CHIP_ESP32P4: return "ESP32-P4";
#if ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(5, 5, 0)
case CHIP_ESP32C5: return "ESP32-C5";
case CHIP_ESP32C61: return "ESP32-C61";
case CHIP_ESP32H21: return "ESP32-H21";
#endif
default: return "UNKNOWN"; default: return "UNKNOWN";
} }
#endif #endif
} }
uint8_t EspClass::getChipCores(void) { uint8_t EspClass::getChipCores(void)
{
esp_chip_info_t chip_info; esp_chip_info_t chip_info;
esp_chip_info(&chip_info); esp_chip_info(&chip_info);
return chip_info.cores; return chip_info.cores;
} }
const char *EspClass::getSdkVersion(void) { const char * EspClass::getSdkVersion(void)
{
return esp_get_idf_version(); return esp_get_idf_version();
} }
const char *EspClass::getCoreVersion(void) { const char * EspClass::getCoreVersion(void)
{
return ESP_ARDUINO_VERSION_STR; return ESP_ARDUINO_VERSION_STR;
} }
uint32_t ESP_getFlashChipId(void) { uint32_t ESP_getFlashChipId(void)
{
uint32_t id = g_rom_flashchip.device_id; uint32_t id = g_rom_flashchip.device_id;
id = ((id & 0xff) << 16) | ((id >> 16) & 0xff) | (id & 0xff00); id = ((id & 0xff) << 16) | ((id >> 16) & 0xff) | (id & 0xff00);
return id; return id;
} }
uint32_t EspClass::getFlashChipSize(void) { uint32_t EspClass::getFlashChipSize(void)
{
uint32_t id = (ESP_getFlashChipId() >> 16) & 0xFF; uint32_t id = (ESP_getFlashChipId() >> 16) & 0xFF;
return 2 << (id - 1); return 2 << (id - 1);
} }
uint32_t EspClass::getFlashChipSpeed(void) { uint32_t EspClass::getFlashChipSpeed(void)
{
esp_image_header_t fhdr; esp_image_header_t fhdr;
if(esp_flash_read(esp_flash_default_chip, (void*)&fhdr, ESP_FLASH_IMAGE_BASE, sizeof(esp_image_header_t)) && fhdr.magic != ESP_IMAGE_HEADER_MAGIC) { if(esp_flash_read(esp_flash_default_chip, (void*)&fhdr, ESP_FLASH_IMAGE_BASE, sizeof(esp_image_header_t)) && fhdr.magic != ESP_IMAGE_HEADER_MAGIC) {
return 0; return 0;
@ -348,9 +369,8 @@ uint32_t EspClass::getFlashChipSpeed(void) {
return magicFlashChipSpeed(fhdr.spi_speed); return magicFlashChipSpeed(fhdr.spi_speed);
} }
// FIXME for P4 FlashMode_t EspClass::getFlashChipMode(void)
#if !defined(CONFIG_IDF_TARGET_ESP32P4) {
FlashMode_t EspClass::getFlashChipMode(void) {
#if CONFIG_IDF_TARGET_ESP32S2 #if CONFIG_IDF_TARGET_ESP32S2
uint32_t spi_ctrl = REG_READ(PERIPHS_SPI_FLASH_CTRL); uint32_t spi_ctrl = REG_READ(PERIPHS_SPI_FLASH_CTRL);
#else #else
@ -376,9 +396,9 @@ FlashMode_t EspClass::getFlashChipMode(void) {
} }
return (FM_DOUT); return (FM_DOUT);
} }
#endif // if !defined(CONFIG_IDF_TARGET_ESP32P4)
uint32_t EspClass::magicFlashChipSize(uint8_t byte) { uint32_t EspClass::magicFlashChipSize(uint8_t byte)
{
/* /*
FLASH_SIZES = { FLASH_SIZES = {
"1MB": 0x00, "1MB": 0x00,
@ -405,7 +425,8 @@ uint32_t EspClass::magicFlashChipSize(uint8_t byte) {
} }
} }
uint32_t EspClass::magicFlashChipSpeed(uint8_t byte) { uint32_t EspClass::magicFlashChipSpeed(uint8_t byte)
{
#if CONFIG_IDF_TARGET_ESP32C2 #if CONFIG_IDF_TARGET_ESP32C2
/* /*
FLASH_FREQUENCY = { FLASH_FREQUENCY = {
@ -424,6 +445,7 @@ uint32_t EspClass::magicFlashChipSpeed(uint8_t byte) {
return 0; return 0;
} }
#elif CONFIG_IDF_TARGET_ESP32C6 #elif CONFIG_IDF_TARGET_ESP32C6
/* /*
FLASH_FREQUENCY = { FLASH_FREQUENCY = {
@ -458,6 +480,7 @@ uint32_t EspClass::magicFlashChipSpeed(uint8_t byte) {
return 0; return 0;
} }
#else #else
/* /*
FLASH_FREQUENCY = { FLASH_FREQUENCY = {
@ -478,7 +501,9 @@ uint32_t EspClass::magicFlashChipSpeed(uint8_t byte) {
#endif #endif
} }
FlashMode_t EspClass::magicFlashChipMode(uint8_t byte) {
FlashMode_t EspClass::magicFlashChipMode(uint8_t byte)
{
FlashMode_t mode = (FlashMode_t) byte; FlashMode_t mode = (FlashMode_t) byte;
if(mode > FM_SLOW_READ) { if(mode > FM_SLOW_READ) {
mode = FM_UNKNOWN; mode = FM_UNKNOWN;
@ -486,32 +511,39 @@ FlashMode_t EspClass::magicFlashChipMode(uint8_t byte) {
return mode; return mode;
} }
bool EspClass::flashEraseSector(uint32_t sector) { bool EspClass::flashEraseSector(uint32_t sector)
{
return esp_flash_erase_region(esp_flash_default_chip, sector * SPI_FLASH_SEC_SIZE, SPI_FLASH_SEC_SIZE) == ESP_OK; return esp_flash_erase_region(esp_flash_default_chip, sector * SPI_FLASH_SEC_SIZE, SPI_FLASH_SEC_SIZE) == ESP_OK;
} }
// Warning: These functions do not work with encrypted flash // Warning: These functions do not work with encrypted flash
bool EspClass::flashWrite(uint32_t offset, uint32_t *data, size_t size) { bool EspClass::flashWrite(uint32_t offset, uint32_t *data, size_t size)
{
return esp_flash_write(esp_flash_default_chip, (const void*) data, offset, size) == ESP_OK; return esp_flash_write(esp_flash_default_chip, (const void*) data, offset, size) == ESP_OK;
} }
bool EspClass::flashRead(uint32_t offset, uint32_t *data, size_t size) { bool EspClass::flashRead(uint32_t offset, uint32_t *data, size_t size)
{
return esp_flash_read(esp_flash_default_chip, (void*) data, offset, size) == ESP_OK; return esp_flash_read(esp_flash_default_chip, (void*) data, offset, size) == ESP_OK;
} }
bool EspClass::partitionEraseRange(const esp_partition_t *partition, uint32_t offset, size_t size) { bool EspClass::partitionEraseRange(const esp_partition_t *partition, uint32_t offset, size_t size)
{
return esp_partition_erase_range(partition, offset, size) == ESP_OK; return esp_partition_erase_range(partition, offset, size) == ESP_OK;
} }
bool EspClass::partitionWrite(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size) { bool EspClass::partitionWrite(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size)
{
return esp_partition_write(partition, offset, data, size) == ESP_OK; return esp_partition_write(partition, offset, data, size) == ESP_OK;
} }
bool EspClass::partitionRead(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size) { bool EspClass::partitionRead(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size)
{
return esp_partition_read(partition, offset, data, size) == ESP_OK; return esp_partition_read(partition, offset, data, size) == ESP_OK;
} }
uint64_t EspClass::getEfuseMac(void) { uint64_t EspClass::getEfuseMac(void)
{
uint64_t _chipmacid = 0LL; uint64_t _chipmacid = 0LL;
esp_efuse_mac_get_default((uint8_t*) (&_chipmacid)); esp_efuse_mac_get_default((uint8_t*) (&_chipmacid));
return _chipmacid; return _chipmacid;

View file

@ -26,7 +26,7 @@
#include "esp_cpu.h" #include "esp_cpu.h"
/** /**
* AVR macros for WDT management * AVR macros for WDT managment
*/ */
typedef enum { typedef enum {
WDTO_0MS = 0, //!< WDTO_0MS WDTO_0MS = 0, //!< WDTO_0MS
@ -42,6 +42,7 @@ typedef enum {
WDTO_8S = 8000 //!< WDTO_8S WDTO_8S = 8000 //!< WDTO_8S
} WDTO_t; } WDTO_t;
typedef enum { typedef enum {
FM_QIO = 0x00, FM_QIO = 0x00,
FM_QOUT = 0x01, FM_QOUT = 0x01,
@ -57,7 +58,8 @@ typedef enum {
SKETCH_SIZE_FREE = 1 SKETCH_SIZE_FREE = 1
} sketchSize_t; } sketchSize_t;
class EspClass { class EspClass
{
public: public:
EspClass() {} EspClass() {}
~EspClass() {} ~EspClass() {}
@ -78,9 +80,7 @@ public:
uint16_t getChipRevision(); uint16_t getChipRevision();
const char * getChipModel(); const char * getChipModel();
uint8_t getChipCores(); uint8_t getChipCores();
uint32_t getCpuFreqMHz() { uint32_t getCpuFreqMHz(){ return getCpuFrequencyMhz(); }
return getCpuFrequencyMhz();
}
inline uint32_t getCycleCount() __attribute__((always_inline)); inline uint32_t getCycleCount() __attribute__((always_inline));
const char * getSdkVersion(); //version of ESP-IDF const char * getSdkVersion(); //version of ESP-IDF
@ -109,9 +109,11 @@ public:
bool partitionRead(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size); bool partitionRead(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size);
uint64_t getEfuseMac(); uint64_t getEfuseMac();
}; };
uint32_t ARDUINO_ISR_ATTR EspClass::getCycleCount() { uint32_t ARDUINO_ISR_ATTR EspClass::getCycleCount()
{
return (uint32_t)esp_cpu_get_cycle_count(); return (uint32_t)esp_cpu_get_cycle_count();
} }

View file

@ -19,8 +19,8 @@
#include "esp_partition.h" #include "esp_partition.h"
#include "esp_ota_ops.h" #include "esp_ota_ops.h"
#include "esp_image_format.h" #include "esp_image_format.h"
#include "pins_arduino.h"
#include "esp32-hal.h" #include "esp32-hal.h"
#include "pins_arduino.h"
#include "firmware_msc_fat.h" #include "firmware_msc_fat.h"
#include "spi_flash_mmap.h" #include "spi_flash_mmap.h"
@ -310,8 +310,7 @@ static int32_t msc_write(uint32_t lba, uint32_t offset, uint8_t *buffer, uint32_
return 0; return 0;
} }
} else if(msc_update_state == MSC_UPDATE_RUNNING){ } else if(msc_update_state == MSC_UPDATE_RUNNING){
if (msc_update_entry && msc_update_entry->file_size && msc_update_bytes_written < msc_update_entry->file_size if(msc_update_entry && msc_update_entry->file_size && msc_update_bytes_written < msc_update_entry->file_size && (msc_update_bytes_written + bufsize) >= msc_update_entry->file_size){
&& (msc_update_bytes_written + bufsize) >= msc_update_entry->file_size) {
bufsize = msc_update_entry->file_size - msc_update_bytes_written; bufsize = msc_update_entry->file_size - msc_update_bytes_written;
} }
if(msc_update_write(msc_ota_partition, ((lba - msc_update_start_sector) * DISK_SECTOR_SIZE) + offset, buffer, bufsize) == ESP_OK){ if(msc_update_write(msc_ota_partition, ((lba - msc_update_start_sector) * DISK_SECTOR_SIZE) + offset, buffer, bufsize) == ESP_OK){

View file

@ -11,24 +11,34 @@
typedef void (*voidFuncPtr)(void); typedef void (*voidFuncPtr)(void);
typedef void (*voidFuncPtrArg)(void*); typedef void (*voidFuncPtrArg)(void*);
extern "C" { extern "C"
{
extern void __attachInterruptFunctionalArg(uint8_t pin, voidFuncPtrArg userFunc, void * arg, int intr_type, bool functional); extern void __attachInterruptFunctionalArg(uint8_t pin, voidFuncPtrArg userFunc, void * arg, int intr_type, bool functional);
} }
void ARDUINO_ISR_ATTR interruptFunctional(void *arg) { void ARDUINO_ISR_ATTR interruptFunctional(void* arg)
{
InterruptArgStructure* localArg = (InterruptArgStructure*)arg; InterruptArgStructure* localArg = (InterruptArgStructure*)arg;
if (localArg->interruptFunction) { if (localArg->interruptFunction)
{
localArg->interruptFunction(); localArg->interruptFunction();
} }
} }
void attachInterrupt(uint8_t pin, std::function<void(void)> intRoutine, int mode) { void attachInterrupt(uint8_t pin, std::function<void(void)> intRoutine, int mode)
{
// use the local interrupt routine which takes the ArgStructure as argument // use the local interrupt routine which takes the ArgStructure as argument
__attachInterruptFunctionalArg (pin, (voidFuncPtrArg)interruptFunctional, new InterruptArgStructure{intRoutine}, mode, true); __attachInterruptFunctionalArg (pin, (voidFuncPtrArg)interruptFunctional, new InterruptArgStructure{intRoutine}, mode, true);
} }
extern "C" { extern "C"
void cleanupFunctional(void *arg) { {
void cleanupFunctional(void* arg)
{
delete (InterruptArgStructure*)arg; delete (InterruptArgStructure*)arg;
} }
} }

View file

@ -20,11 +20,11 @@
#include <Arduino.h> #include <Arduino.h>
#include <HEXBuilder.h> #include <HEXBuilder.h>
static uint8_t hex_char_to_byte(uint8_t c) { static uint8_t hex_char_to_byte(uint8_t c)
return (c >= 'a' && c <= 'f') ? (c - ((uint8_t)'a' - 0xa)) {
: (c >= 'A' && c <= 'F') ? (c - ((uint8_t)'A' - 0xA)) return (c >= 'a' && c <= 'f') ? (c - ((uint8_t)'a' - 0xa)) :
: (c >= '0' && c <= '9') ? (c - (uint8_t)'0') (c >= 'A' && c <= 'F') ? (c - ((uint8_t)'A' - 0xA)) :
: 0x10; // unknown char is 16 (c >= '0' && c<= '9') ? (c - (uint8_t)'0') : 0x10; // unknown char is 16
} }
size_t HEXBuilder::hex2bytes(unsigned char * out, size_t maxlen, String &in) { size_t HEXBuilder::hex2bytes(unsigned char * out, size_t maxlen, String &in) {
@ -36,19 +36,16 @@ size_t HEXBuilder::hex2bytes(unsigned char *out, size_t maxlen, const char *in)
for(;*in;in++) { for(;*in;in++) {
uint8_t c = hex_char_to_byte(*in); uint8_t c = hex_char_to_byte(*in);
// Silently skip anything unknown. // Silently skip anything unknown.
if (c > 15) { if (c > 15)
continue; continue;
}
if (len & 1) { if (len & 1) {
if (len / 2 < maxlen) { if (len/2 < maxlen)
out[len/2] |= c; out[len/2] |= c;
}
} else { } else {
if (len / 2 < maxlen) { if (len/2 < maxlen)
out[len/2] = c<<4; out[len/2] = c<<4;
} }
}
len++; len++;
} }
return (len + 1)/2; return (len + 1)/2;
@ -66,9 +63,7 @@ size_t HEXBuilder::bytes2hex(char *out, size_t maxlen, const unsigned char *in,
String HEXBuilder::bytes2hex(const unsigned char * in, size_t len) { String HEXBuilder::bytes2hex(const unsigned char * in, size_t len) {
size_t maxlen = len * 2 + 1; size_t maxlen = len * 2 + 1;
char * out = (char *) malloc(maxlen); char * out = (char *) malloc(maxlen);
if (!out) { if (!out) return String();
return String();
}
bytes2hex(out, maxlen, in, len); bytes2hex(out, maxlen, in, len);
String ret = String(out); String ret = String(out);
free(out); free(out);

View file

@ -1,4 +1,4 @@
// Copyright 2015-2024 Espressif Systems (Shanghai) PTE LTD // Copyright 2015-2020 Espressif Systems (Shanghai) PTE LTD
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -24,7 +24,6 @@
#include "esp_intr_alloc.h" #include "esp_intr_alloc.h"
#include "soc/periph_defs.h" #include "soc/periph_defs.h"
#include "soc/io_mux_reg.h" #include "soc/io_mux_reg.h"
#include "soc/usb_serial_jtag_struct.h"
#pragma GCC diagnostic ignored "-Wvolatile" #pragma GCC diagnostic ignored "-Wvolatile"
#include "hal/usb_serial_jtag_ll.h" #include "hal/usb_serial_jtag_ll.h"
#pragma GCC diagnostic warning "-Wvolatile" #pragma GCC diagnostic warning "-Wvolatile"
@ -36,31 +35,30 @@ static RingbufHandle_t tx_ring_buf = NULL;
static QueueHandle_t rx_queue = NULL; static QueueHandle_t rx_queue = NULL;
static uint8_t rx_data_buf[64] = {0}; static uint8_t rx_data_buf[64] = {0};
static intr_handle_t intr_handle = NULL; static intr_handle_t intr_handle = NULL;
static volatile bool initial_empty = false;
static SemaphoreHandle_t tx_lock = NULL; static SemaphoreHandle_t tx_lock = NULL;
static volatile bool connected = false;
// SOF in ISR causes problems for uploading firmware // workaround for when USB CDC is not connected
//static volatile unsigned long lastSOF_ms; static uint32_t tx_timeout_ms = 0;
//static volatile uint8_t SOF_TIMEOUT; static bool tx_timeout_change_request = false;
// timeout has no effect when USB CDC is unplugged
static uint32_t tx_timeout_ms = 100;
static esp_event_loop_handle_t arduino_hw_cdc_event_loop_handle = NULL; static esp_event_loop_handle_t arduino_hw_cdc_event_loop_handle = NULL;
static esp_err_t static esp_err_t arduino_hw_cdc_event_post(esp_event_base_t event_base, int32_t event_id, void *event_data, size_t event_data_size, BaseType_t *task_unblocked){
arduino_hw_cdc_event_post(esp_event_base_t event_base, int32_t event_id, void *event_data, size_t event_data_size, BaseType_t *task_unblocked) {
if(arduino_hw_cdc_event_loop_handle == NULL){ if(arduino_hw_cdc_event_loop_handle == NULL){
return ESP_FAIL; return ESP_FAIL;
} }
return esp_event_isr_post_to(arduino_hw_cdc_event_loop_handle, event_base, event_id, event_data, event_data_size, task_unblocked); return esp_event_isr_post_to(arduino_hw_cdc_event_loop_handle, event_base, event_id, event_data, event_data_size, task_unblocked);
} }
static esp_err_t static esp_err_t arduino_hw_cdc_event_handler_register_with(esp_event_base_t event_base, int32_t event_id, esp_event_handler_t event_handler, void *event_handler_arg){
arduino_hw_cdc_event_handler_register_with(esp_event_base_t event_base, int32_t event_id, esp_event_handler_t event_handler, void *event_handler_arg) {
if (!arduino_hw_cdc_event_loop_handle) { if (!arduino_hw_cdc_event_loop_handle) {
esp_event_loop_args_t event_task_args = { esp_event_loop_args_t event_task_args = {
.queue_size = 5, .task_name = "arduino_hw_cdc_events", .task_priority = 5, .task_stack_size = 2048, .task_core_id = tskNO_AFFINITY .queue_size = 5,
.task_name = "arduino_hw_cdc_events",
.task_priority = 5,
.task_stack_size = 2048,
.task_core_id = tskNO_AFFINITY
}; };
if (esp_event_loop_create(&event_task_args, &arduino_hw_cdc_event_loop_handle) != ESP_OK) { if (esp_event_loop_create(&event_task_args, &arduino_hw_cdc_event_loop_handle) != ESP_OK) {
log_e("esp_event_loop_create failed"); log_e("esp_event_loop_create failed");
@ -80,29 +78,31 @@ static void hw_cdc_isr_handler(void *arg) {
if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY) { if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY) {
// Interrupt tells us the host picked up the data we sent. // Interrupt tells us the host picked up the data we sent.
if (!HWCDC::isPlugged()) { if (usb_serial_jtag_ll_txfifo_writable() == 1) {
connected = false;
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
// USB is unplugged, nothing to be done here
return;
} else {
connected = true;
}
if (tx_ring_buf != NULL && usb_serial_jtag_ll_txfifo_writable() == 1) {
// We disable the interrupt here so that the interrupt won't be triggered if there is no data to send. // We disable the interrupt here so that the interrupt won't be triggered if there is no data to send.
usb_serial_jtag_ll_disable_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY); usb_serial_jtag_ll_disable_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
size_t queued_size = 0; if(!initial_empty){
initial_empty = true;
// First time USB is plugged and the application has not explicitly set TX Timeout, set it to default 100ms.
// Otherwise, USB is still unplugged and the timeout will be kept as Zero in order to avoid any delay in the
// application whenever it uses write() and the TX Queue gets full.
if (!tx_timeout_change_request) {
tx_timeout_ms = 100;
}
//send event?
//ets_printf("CONNECTED\n");
arduino_hw_cdc_event_post(ARDUINO_HW_CDC_EVENTS, ARDUINO_HW_CDC_CONNECTED_EVENT, &event, sizeof(arduino_hw_cdc_event_data_t), &xTaskWoken);
}
size_t queued_size;
uint8_t *queued_buff = (uint8_t *)xRingbufferReceiveUpToFromISR(tx_ring_buf, &queued_size, 64); uint8_t *queued_buff = (uint8_t *)xRingbufferReceiveUpToFromISR(tx_ring_buf, &queued_size, 64);
// If the hardware fifo is available, write in it. Otherwise, do nothing. // If the hardware fifo is avaliable, write in it. Otherwise, do nothing.
if (queued_buff != NULL) { //Although tx_queued_bytes may be larger than 0. We may have interrupt before xRingbufferSend() was called. if (queued_buff != NULL) { //Although tx_queued_bytes may be larger than 0. We may have interrupt before xRingbufferSend() was called.
//Copy the queued buffer into the TX FIFO //Copy the queued buffer into the TX FIFO
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY); usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
usb_serial_jtag_ll_write_txfifo(queued_buff, queued_size); usb_serial_jtag_ll_write_txfifo(queued_buff, queued_size);
usb_serial_jtag_ll_txfifo_flush(); usb_serial_jtag_ll_txfifo_flush();
vRingbufferReturnItemFromISR(tx_ring_buf, queued_buff, &xTaskWoken); vRingbufferReturnItemFromISR(tx_ring_buf, queued_buff, &xTaskWoken);
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY); usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
//send event? //send event?
//ets_printf("TX:%u\n", queued_size); //ets_printf("TX:%u\n", queued_size);
event.tx.len = queued_size; event.tx.len = queued_size;
@ -114,7 +114,7 @@ static void hw_cdc_isr_handler(void *arg) {
} }
if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT) { if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT) {
// read rx buffer(max length is 64), and send available data to ringbuffer. // read rx buffer(max length is 64), and send avaliable data to ringbuffer.
// Ensure the rx buffer size is larger than RX_MAX_SIZE. // Ensure the rx buffer size is larger than RX_MAX_SIZE.
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT); usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT);
uint32_t rx_fifo_len = usb_serial_jtag_ll_read_rxfifo(rx_data_buf, 64); uint32_t rx_fifo_len = usb_serial_jtag_ll_read_rxfifo(rx_data_buf, 64);
@ -124,149 +124,45 @@ static void hw_cdc_isr_handler(void *arg) {
break; break;
} }
} }
//send event?
//ets_printf("RX:%u/%u\n", i, rx_fifo_len);
event.rx.len = i; event.rx.len = i;
arduino_hw_cdc_event_post(ARDUINO_HW_CDC_EVENTS, ARDUINO_HW_CDC_RX_EVENT, &event, sizeof(arduino_hw_cdc_event_data_t), &xTaskWoken); arduino_hw_cdc_event_post(ARDUINO_HW_CDC_EVENTS, ARDUINO_HW_CDC_RX_EVENT, &event, sizeof(arduino_hw_cdc_event_data_t), &xTaskWoken);
connected = true;
} }
if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_BUS_RESET) { if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_BUS_RESET) {
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_BUS_RESET); usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_BUS_RESET);
initial_empty = false;
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
//ets_printf("BUS_RESET\n");
arduino_hw_cdc_event_post(ARDUINO_HW_CDC_EVENTS, ARDUINO_HW_CDC_BUS_RESET_EVENT, &event, sizeof(arduino_hw_cdc_event_data_t), &xTaskWoken); arduino_hw_cdc_event_post(ARDUINO_HW_CDC_EVENTS, ARDUINO_HW_CDC_BUS_RESET_EVENT, &event, sizeof(arduino_hw_cdc_event_data_t), &xTaskWoken);
connected = false;
} }
// SOF ISR is causing esptool to be unable to upload firmware to the board
// if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_SOF) {
// usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SOF);
// lastSOF_ms = millis();
// }
if (xTaskWoken == pdTRUE) { if (xTaskWoken == pdTRUE) {
portYIELD_FROM_ISR(); portYIELD_FROM_ISR();
} }
} }
// Moved to header file as inline function. Kept just as future reference.
//inline bool HWCDC::isPlugged(void) {
// SOF ISR is causing esptool to be unable to upload firmware to the board
// Timer test for SOF seems to work when uploading firmware
// return usb_serial_jtag_is_connected();//(lastSOF_ms + SOF_TIMEOUT) >= millis();
//}
bool HWCDC::isCDC_Connected() {
static bool running = false;
// USB may be unplugged
if (!isPlugged()) {
connected = false;
running = false;
// SOF in ISR causes problems for uploading firmware
//SOF_TIMEOUT = 5; // SOF timeout when unplugged
return false;
}
//else {
// SOF_TIMEOUT = 50; // SOF timeout when plugged
//}
if (connected) {
running = false;
return true;
}
if (running == false && !connected) { // enables it only once!
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
// this will feed CDC TX FIFO to trigger IN_EMPTY
usb_serial_jtag_ll_txfifo_flush();
running = true;
return false;
}
static void flushTXBuffer(const uint8_t *buffer, size_t size) {
if (!tx_ring_buf) {
return;
}
UBaseType_t uxItemsWaiting = 0;
vRingbufferGetInfo(tx_ring_buf, NULL, NULL, NULL, NULL, &uxItemsWaiting);
size_t freeSpace = xRingbufferGetCurFreeSize(tx_ring_buf);
size_t ringbufferLength = freeSpace + uxItemsWaiting;
if (buffer == NULL) {
// just flush the whole ring buffer and exit - used by HWCDC::flush()
size_t queued_size = 0;
uint8_t *queued_buff = (uint8_t *)xRingbufferReceiveUpTo(tx_ring_buf, &queued_size, 0, ringbufferLength);
if (queued_size && queued_buff != NULL) {
vRingbufferReturnItem(tx_ring_buf, (void *)queued_buff);
}
return;
}
if (size == 0) {
return; // nothing to do
}
if (freeSpace >= size) {
// there is enough space, just add the data to the ring buffer
if (xRingbufferSend(tx_ring_buf, (void *)buffer, size, 0) != pdTRUE) {
return;
}
} else {
// how many byte should be flushed to make space for the new data
size_t to_flush = size - freeSpace;
if (to_flush > ringbufferLength) {
to_flush = ringbufferLength;
}
size_t queued_size = 0;
uint8_t *queued_buff = (uint8_t *)xRingbufferReceiveUpTo(tx_ring_buf, &queued_size, 0, to_flush);
if (queued_size && queued_buff != NULL) {
vRingbufferReturnItem(tx_ring_buf, (void *)queued_buff);
}
// now add the new data that fits into the ring buffer
uint8_t *bptr = (uint8_t *)buffer;
if (size >= ringbufferLength) {
size = ringbufferLength;
bptr = (uint8_t *)buffer + (size - ringbufferLength);
}
if (xRingbufferSend(tx_ring_buf, (void *)bptr, size, 0) != pdTRUE) {
return;
}
}
// flushes CDC FIFO
usb_serial_jtag_ll_txfifo_flush();
}
static void ARDUINO_ISR_ATTR cdc0_write_char(char c) { static void ARDUINO_ISR_ATTR cdc0_write_char(char c) {
if (tx_ring_buf == NULL) {
return;
}
if (!HWCDC::isConnected()) {
// just pop/push RingBuffer and apply FIFO policy
flushTXBuffer((const uint8_t *)&c, 1);
return;
}
if(xPortInIsrContext()){ if(xPortInIsrContext()){
xRingbufferSendFromISR(tx_ring_buf, (void*) (&c), 1, NULL); xRingbufferSendFromISR(tx_ring_buf, (void*) (&c), 1, NULL);
} else { } else {
xRingbufferSend(tx_ring_buf, (void*) (&c), 1, tx_timeout_ms / portTICK_PERIOD_MS); xRingbufferSend(tx_ring_buf, (void*) (&c), 1, tx_timeout_ms / portTICK_PERIOD_MS);
} }
usb_serial_jtag_ll_txfifo_flush();
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY); usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
} }
HWCDC::HWCDC() { HWCDC::HWCDC() {
perimanSetBusDeinit(ESP32_BUS_TYPE_USB_DM, HWCDC::deinit);
perimanSetBusDeinit(ESP32_BUS_TYPE_USB_DP, HWCDC::deinit);
// SOF in ISR causes problems for uploading firmware
// lastSOF_ms = 0;
// SOF_TIMEOUT = 5;
} }
HWCDC::~HWCDC(){ HWCDC::~HWCDC(){
end(); end();
} }
// It should return <true> just when USB is plugged and CDC is connected. HWCDC::operator bool() const
HWCDC::operator bool() const { {
return HWCDC::isCDC_Connected(); return initial_empty;
} }
void HWCDC::onEvent(esp_event_handler_t callback){ void HWCDC::onEvent(esp_event_handler_t callback){
@ -277,30 +173,30 @@ void HWCDC::onEvent(arduino_hw_cdc_event_t event, esp_event_handler_t callback)
arduino_hw_cdc_event_handler_register_with(ARDUINO_HW_CDC_EVENTS, event, callback, this); arduino_hw_cdc_event_handler_register_with(ARDUINO_HW_CDC_EVENTS, event, callback, this);
} }
bool HWCDC::deinit(void *busptr) { bool HWCDC::deinit(void * busptr)
{
// avoid any recursion issue with Peripheral Manager perimanSetPinBus() call // avoid any recursion issue with Peripheral Manager perimanSetPinBus() call
static bool running = false; static bool running = false;
if (running) { if (running) return true;
return true;
}
running = true; running = true;
// Setting USB D+ D- pins // Setting USB D+ D- pins
bool retCode = true; bool retCode = true;
retCode &= perimanClearPinBus(USB_INT_PHY0_DM_GPIO_NUM); retCode &= perimanClearPinBus(USB_DM_GPIO_NUM);
retCode &= perimanClearPinBus(USB_INT_PHY0_DP_GPIO_NUM); retCode &= perimanClearPinBus(USB_DP_GPIO_NUM);
if (retCode) { if (retCode) {
// Force the host to re-enumerate (BUS_RESET) // Force the host to re-enumerate (BUS_RESET)
pinMode(USB_INT_PHY0_DM_GPIO_NUM, OUTPUT_OPEN_DRAIN); pinMode(USB_DM_GPIO_NUM, OUTPUT_OPEN_DRAIN);
pinMode(USB_INT_PHY0_DP_GPIO_NUM, OUTPUT_OPEN_DRAIN); pinMode(USB_DP_GPIO_NUM, OUTPUT_OPEN_DRAIN);
digitalWrite(USB_INT_PHY0_DM_GPIO_NUM, LOW); digitalWrite(USB_DM_GPIO_NUM, LOW);
digitalWrite(USB_INT_PHY0_DP_GPIO_NUM, LOW); digitalWrite(USB_DP_GPIO_NUM, LOW);
} }
// release the flag // release the flag
running = false; running = false;
return retCode; return retCode;
} }
void HWCDC::begin(unsigned long baud) { void HWCDC::begin(unsigned long baud)
{
if(tx_lock == NULL) { if(tx_lock == NULL) {
tx_lock = xSemaphoreCreateMutex(); tx_lock = xSemaphoreCreateMutex();
} }
@ -316,53 +212,29 @@ void HWCDC::begin(unsigned long baud) {
log_e("HW CDC TX Buffer error"); log_e("HW CDC TX Buffer error");
} }
} }
// the HW Serial pins needs to be first deinited in order to allow `if(Serial)` to work :-(
// But this is also causing terminal to hang, so they are disabled
// deinit(NULL);
// delay(10); // USB Host has to enumerate it again
// Peripheral Manager setting for USB D+ D- pins
uint8_t pin = USB_INT_PHY0_DM_GPIO_NUM;
if (!perimanSetPinBus(pin, ESP32_BUS_TYPE_USB_DM, (void *)this, -1, -1)) {
goto err;
}
pin = USB_INT_PHY0_DP_GPIO_NUM;
if (!perimanSetPinBus(pin, ESP32_BUS_TYPE_USB_DP, (void *)this, -1, -1)) {
goto err;
}
// Configure PHY
// USB_Serial_JTAG use internal PHY
USB_SERIAL_JTAG.conf0.phy_sel = 0;
// Disable software control USB D+ D- pullup pulldown (Device FS: dp_pullup = 1)
USB_SERIAL_JTAG.conf0.pad_pull_override = 0;
// Enable USB D+ pullup
USB_SERIAL_JTAG.conf0.dp_pullup = 1;
// Enable USB pad function
USB_SERIAL_JTAG.conf0.usb_pad_enable = 1;
usb_serial_jtag_ll_disable_intr_mask(USB_SERIAL_JTAG_LL_INTR_MASK); usb_serial_jtag_ll_disable_intr_mask(USB_SERIAL_JTAG_LL_INTR_MASK);
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_LL_INTR_MASK);
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY | USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT | USB_SERIAL_JTAG_INTR_BUS_RESET); usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY | USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT | USB_SERIAL_JTAG_INTR_BUS_RESET);
// SOF ISR is causing esptool to be unable to upload firmware to the board
// usb_serial_jtag_ll_ena_intr_mask(
// USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY | USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT | USB_SERIAL_JTAG_INTR_BUS_RESET | USB_SERIAL_JTAG_INTR_SOF
// );
if(!intr_handle && esp_intr_alloc(ETS_USB_SERIAL_JTAG_INTR_SOURCE, 0, hw_cdc_isr_handler, NULL, &intr_handle) != ESP_OK){ if(!intr_handle && esp_intr_alloc(ETS_USB_SERIAL_JTAG_INTR_SOURCE, 0, hw_cdc_isr_handler, NULL, &intr_handle) != ESP_OK){
isr_log_e("HW USB CDC failed to init interrupts"); isr_log_e("HW USB CDC failed to init interrupts");
end(); end();
return; return;
} }
return; if (perimanSetBusDeinit(ESP32_BUS_TYPE_USB_DM, HWCDC::deinit) && perimanSetBusDeinit(ESP32_BUS_TYPE_USB_DP, HWCDC::deinit)) {
// Setting USB D+ D- pins
err: perimanSetPinBus(USB_DM_GPIO_NUM, ESP32_BUS_TYPE_USB_DM, (void *) this, -1, -1);
log_e("Serial JTAG Pin %u can't be set into Peripheral Manager.", pin); perimanSetPinBus(USB_DP_GPIO_NUM, ESP32_BUS_TYPE_USB_DP, (void *) this, -1, -1);
end(); } else {
log_e("Serial JTAG Pins can't be set into Peripheral Manager.");
} }
void HWCDC::end() { usb_serial_jtag_ll_txfifo_flush();
//Disable/clear/free tx/rx interrupt. }
void HWCDC::end()
{
//Disable tx/rx interrupt.
usb_serial_jtag_ll_disable_intr_mask(USB_SERIAL_JTAG_LL_INTR_MASK); usb_serial_jtag_ll_disable_intr_mask(USB_SERIAL_JTAG_LL_INTR_MASK);
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_LL_INTR_MASK);
esp_intr_free(intr_handle); esp_intr_free(intr_handle);
intr_handle = NULL; intr_handle = NULL;
if(tx_lock != NULL) { if(tx_lock != NULL) {
@ -376,12 +248,13 @@ void HWCDC::end() {
arduino_hw_cdc_event_loop_handle = NULL; arduino_hw_cdc_event_loop_handle = NULL;
} }
HWCDC::deinit(this); HWCDC::deinit(this);
setDebugOutput(false);
connected = false;
} }
void HWCDC::setTxTimeoutMs(uint32_t timeout){ void HWCDC::setTxTimeoutMs(uint32_t timeout){
tx_timeout_ms = timeout; tx_timeout_ms = timeout;
// it registers that the user has explicitly requested to use a value as TX timeout
// used for the workaround with unplugged USB and TX Queue Full that causes a delay on every write()
tx_timeout_change_request = true;
} }
/* /*
@ -403,7 +276,8 @@ size_t HWCDC::setTxBufferSize(size_t tx_queue_len) {
return tx_queue_len; return tx_queue_len;
} }
int HWCDC::availableForWrite(void) { int HWCDC::availableForWrite(void)
{
if(tx_ring_buf == NULL || tx_lock == NULL){ if(tx_ring_buf == NULL || tx_lock == NULL){
return 0; return 0;
} }
@ -415,17 +289,15 @@ int HWCDC::availableForWrite(void) {
return a; return a;
} }
size_t HWCDC::write(const uint8_t *buffer, size_t size) { size_t HWCDC::write(const uint8_t *buffer, size_t size)
{
if(buffer == NULL || size == 0 || tx_ring_buf == NULL || tx_lock == NULL){ if(buffer == NULL || size == 0 || tx_ring_buf == NULL || tx_lock == NULL){
return 0; return 0;
} }
if(xSemaphoreTake(tx_lock, tx_timeout_ms / portTICK_PERIOD_MS) != pdPASS){ if(xSemaphoreTake(tx_lock, tx_timeout_ms / portTICK_PERIOD_MS) != pdPASS){
return 0; return 0;
} }
if (!isCDC_Connected()) { size_t max_size = xRingbufferGetMaxItemSize(tx_ring_buf);
// just pop/push RingBuffer and apply FIFO policy
flushTXBuffer(buffer, size);
} else {
size_t space = xRingbufferGetCurFreeSize(tx_ring_buf); size_t space = xRingbufferGetCurFreeSize(tx_ring_buf);
size_t to_send = size, so_far = 0; size_t to_send = size, so_far = 0;
@ -433,101 +305,55 @@ size_t HWCDC::write(const uint8_t *buffer, size_t size) {
space = size; space = size;
} }
// Non-Blocking method, Sending data to ringbuffer, and handle the data in ISR. // Non-Blocking method, Sending data to ringbuffer, and handle the data in ISR.
if (space > 0 && xRingbufferSend(tx_ring_buf, (void *)(buffer), space, 0) != pdTRUE) { if(xRingbufferSend(tx_ring_buf, (void*) (buffer), space, 0) != pdTRUE){
size = 0; size = 0;
} else { } else {
to_send -= space; to_send -= space;
so_far += space; so_far += space;
// Now trigger the ISR to read data from the ring buffer. // Now trigger the ISR to read data from the ring buffer.
usb_serial_jtag_ll_txfifo_flush();
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY); usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
// tracks CDC transmission progress to avoid hanging if CDC is unplugged while still sending data while(to_send){
size_t last_toSend = to_send; if(max_size > to_send){
uint32_t tries = tx_timeout_ms; // waits 1ms per sending data attempt, in case CDC is unplugged max_size = to_send;
while (connected && to_send) {
space = xRingbufferGetCurFreeSize(tx_ring_buf);
if (space > to_send) {
space = to_send;
} }
// Blocking method, Sending data to ringbuffer, and handle the data in ISR. // Blocking method, Sending data to ringbuffer, and handle the data in ISR.
if (xRingbufferSend(tx_ring_buf, (void *)(buffer + so_far), space, tx_timeout_ms / portTICK_PERIOD_MS) != pdTRUE) { if(xRingbufferSend(tx_ring_buf, (void*) (buffer+so_far), max_size, tx_timeout_ms / portTICK_PERIOD_MS) != pdTRUE){
size = so_far; size = so_far;
log_w("write failed due to ring buffer full - timeout");
break; break;
} }
so_far += space; so_far += max_size;
to_send -= space; to_send -= max_size;
// Now trigger the ISR to read data from the ring buffer. // Now trigger the ISR to read data from the ring buffer.
usb_serial_jtag_ll_txfifo_flush();
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY); usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
} }
if (last_toSend == to_send) {
// no progress in sending data... USB CDC is probably unplugged
tries--;
delay(1);
} else {
last_toSend = to_send;
tries = tx_timeout_ms; // reset the timeout
}
if (tries == 0) { // CDC isn't connected anymore...
size = so_far;
log_w("write failed due to waiting USB Host - timeout");
connected = false;
}
}
}
// CDC was disconnected while sending data ==> flush the TX buffer keeping the last data
if (to_send && !usb_serial_jtag_ll_txfifo_writable()) {
connected = false;
flushTXBuffer(buffer + so_far, to_send);
}
} }
xSemaphoreGive(tx_lock); xSemaphoreGive(tx_lock);
return size; return size;
} }
size_t HWCDC::write(uint8_t c) { size_t HWCDC::write(uint8_t c)
{
return write(&c, 1); return write(&c, 1);
} }
void HWCDC::flush(void) { void HWCDC::flush(void)
{
if(tx_ring_buf == NULL || tx_lock == NULL){ if(tx_ring_buf == NULL || tx_lock == NULL){
return; return;
} }
if(xSemaphoreTake(tx_lock, tx_timeout_ms / portTICK_PERIOD_MS) != pdPASS){ if(xSemaphoreTake(tx_lock, tx_timeout_ms / portTICK_PERIOD_MS) != pdPASS){
return; return;
} }
if (!isCDC_Connected()) {
flushTXBuffer(NULL, 0);
} else {
UBaseType_t uxItemsWaiting = 0; UBaseType_t uxItemsWaiting = 0;
vRingbufferGetInfo(tx_ring_buf, NULL, NULL, NULL, NULL, &uxItemsWaiting); vRingbufferGetInfo(tx_ring_buf, NULL, NULL, NULL, NULL, &uxItemsWaiting);
if(uxItemsWaiting){ if(uxItemsWaiting){
// Now trigger the ISR to read data from the ring buffer. // Now trigger the ISR to read data from the ring buffer.
usb_serial_jtag_ll_txfifo_flush();
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY); usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
} }
} while(uxItemsWaiting){
uint32_t tries = tx_timeout_ms; // waits 1ms per ISR sending data attempt, in case CDC is unplugged delay(5);
while (connected && tries && uxItemsWaiting) {
delay(1);
UBaseType_t lastUxItemsWaiting = uxItemsWaiting;
vRingbufferGetInfo(tx_ring_buf, NULL, NULL, NULL, NULL, &uxItemsWaiting); vRingbufferGetInfo(tx_ring_buf, NULL, NULL, NULL, NULL, &uxItemsWaiting);
if (lastUxItemsWaiting == uxItemsWaiting) {
tries--;
}
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
}
if (tries == 0) { // CDC isn't connected anymore...
connected = false;
flushTXBuffer(NULL, 0); // flushes all TX Buffer
}
} }
xSemaphoreGive(tx_lock); xSemaphoreGive(tx_lock);
} }
@ -551,14 +377,16 @@ size_t HWCDC::setRxBufferSize(size_t rx_queue_len) {
return rx_queue_len; return rx_queue_len;
} }
int HWCDC::available(void) { int HWCDC::available(void)
{
if(rx_queue == NULL){ if(rx_queue == NULL){
return -1; return -1;
} }
return uxQueueMessagesWaiting(rx_queue); return uxQueueMessagesWaiting(rx_queue);
} }
int HWCDC::peek(void) { int HWCDC::peek(void)
{
if(rx_queue == NULL){ if(rx_queue == NULL){
return -1; return -1;
} }
@ -569,7 +397,8 @@ int HWCDC::peek(void) {
return -1; return -1;
} }
int HWCDC::read(void) { int HWCDC::read(void)
{
if(rx_queue == NULL){ if(rx_queue == NULL){
return -1; return -1;
} }
@ -580,7 +409,8 @@ int HWCDC::read(void) {
return -1; return -1;
} }
size_t HWCDC::read(uint8_t *buffer, size_t size) { size_t HWCDC::read(uint8_t *buffer, size_t size)
{
if(rx_queue == NULL){ if(rx_queue == NULL){
return -1; return -1;
} }
@ -596,17 +426,17 @@ size_t HWCDC::read(uint8_t *buffer, size_t size) {
* DEBUG * DEBUG
*/ */
void HWCDC::setDebugOutput(bool en) { void HWCDC::setDebugOutput(bool en)
{
if(en) { if(en) {
uartSetDebug(NULL); uartSetDebug(NULL);
ets_install_putc2((void (*)(char)) & cdc0_write_char); ets_install_putc1((void (*)(char)) &cdc0_write_char);
} else { } else {
ets_install_putc2(NULL); ets_install_putc1(NULL);
} }
ets_install_putc1(NULL); // closes UART log output
} }
#if ARDUINO_USB_MODE && ARDUINO_USB_CDC_ON_BOOT // Hardware JTAG CDC selected #if ARDUINO_USB_MODE // Hardware JTAG CDC selected
// USBSerial is always available to be used // USBSerial is always available to be used
HWCDC HWCDCSerial; HWCDC HWCDCSerial;
#endif #endif

View file

@ -1,4 +1,4 @@
// Copyright 2015-2024 Espressif Systems (Shanghai) PTE LTD // Copyright 2015-2020 Espressif Systems (Shanghai) PTE LTD
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -21,7 +21,6 @@
#include <inttypes.h> #include <inttypes.h>
#include "esp_event.h" #include "esp_event.h"
#include "Stream.h" #include "Stream.h"
#include "driver/usb_serial_jtag.h"
ESP_EVENT_DECLARE_BASE(ARDUINO_HW_CDC_EVENTS); ESP_EVENT_DECLARE_BASE(ARDUINO_HW_CDC_EVENTS);
@ -43,10 +42,10 @@ typedef union {
} tx; } tx;
} arduino_hw_cdc_event_data_t; } arduino_hw_cdc_event_data_t;
class HWCDC : public Stream { class HWCDC: public Stream
{
private: private:
static bool deinit(void * busptr); static bool deinit(void * busptr);
static bool isCDC_Connected();
public: public:
HWCDC(); HWCDC();
@ -70,44 +69,40 @@ public:
size_t write(const uint8_t *buffer, size_t size); size_t write(const uint8_t *buffer, size_t size);
void flush(void); void flush(void);
inline static bool isPlugged(void) { inline size_t read(char * buffer, size_t size)
// SOF ISR is causing esptool to be unable to upload firmware to the board {
// Using IDF 5.1 helper function because it is based on Timer check instead of ISR
return usb_serial_jtag_is_connected();
}
inline static bool isConnected(void) {
return isCDC_Connected();
}
inline size_t read(char *buffer, size_t size) {
return read((uint8_t*) buffer, size); return read((uint8_t*) buffer, size);
} }
inline size_t write(const char *buffer, size_t size) { inline size_t write(const char * buffer, size_t size)
{
return write((uint8_t*) buffer, size); return write((uint8_t*) buffer, size);
} }
inline size_t write(const char *s) { inline size_t write(const char * s)
{
return write((uint8_t*) s, strlen(s)); return write((uint8_t*) s, strlen(s));
} }
inline size_t write(unsigned long n) { inline size_t write(unsigned long n)
{
return write((uint8_t) n); return write((uint8_t) n);
} }
inline size_t write(long n) { inline size_t write(long n)
{
return write((uint8_t) n); return write((uint8_t) n);
} }
inline size_t write(unsigned int n) { inline size_t write(unsigned int n)
{
return write((uint8_t) n); return write((uint8_t) n);
} }
inline size_t write(int n) { inline size_t write(int n)
{
return write((uint8_t) n); return write((uint8_t) n);
} }
operator bool() const; operator bool() const;
void setDebugOutput(bool); void setDebugOutput(bool);
uint32_t baudRate() { uint32_t baudRate(){return 115200;}
return 115200;
}
}; };
#if ARDUINO_USB_MODE && ARDUINO_USB_CDC_ON_BOOT // Hardware JTAG CDC selected #if ARDUINO_USB_MODE // Hardware JTAG CDC selected
#ifndef HWCDC_SERIAL_IS_DEFINED #ifndef HWCDC_SERIAL_IS_DEFINED
#define HWCDC_SERIAL_IS_DEFINED 1 #define HWCDC_SERIAL_IS_DEFINED 1
#endif #endif

View file

@ -20,9 +20,9 @@
#include <inttypes.h> #include <inttypes.h>
#include "Stream.h" #include "Stream.h"
#include <functional>
class HardwareI2C : public Stream { class HardwareI2C : public Stream
{
public: public:
virtual bool begin() = 0; virtual bool begin() = 0;
virtual bool begin(uint8_t address) = 0; virtual bool begin(uint8_t address) = 0;
@ -37,7 +37,6 @@ public:
virtual size_t requestFrom(uint8_t address, size_t len, bool stopBit) = 0; virtual size_t requestFrom(uint8_t address, size_t len, bool stopBit) = 0;
virtual size_t requestFrom(uint8_t address, size_t len) = 0; virtual size_t requestFrom(uint8_t address, size_t len) = 0;
// Update base class to use std::function virtual void onReceive(void(*)(int)) = 0;
virtual void onReceive(const std::function<void(int)> &) = 0; virtual void onRequest(void(*)(void)) = 0;
virtual void onRequest(const std::function<void()> &) = 0;
}; };

View file

@ -11,34 +11,31 @@
#include "driver/uart.h" #include "driver/uart.h"
#include "freertos/queue.h" #include "freertos/queue.h"
#if (SOC_UART_LP_NUM >= 1) #ifndef ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE
#define UART_HW_FIFO_LEN(uart_num) ((uart_num < SOC_UART_HP_NUM) ? SOC_UART_FIFO_LEN : SOC_LP_UART_FIFO_LEN) #define ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE 2048
#else #endif
#define UART_HW_FIFO_LEN(uart_num) SOC_UART_FIFO_LEN
#ifndef ARDUINO_SERIAL_EVENT_TASK_PRIORITY
#define ARDUINO_SERIAL_EVENT_TASK_PRIORITY (configMAX_PRIORITIES-1)
#endif
#ifndef ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
#define ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE -1
#endif #endif
void serialEvent(void) __attribute__((weak)); void serialEvent(void) __attribute__((weak));
void serialEvent(void) {}
#if SOC_UART_NUM > 1 #if SOC_UART_NUM > 1
void serialEvent1(void) __attribute__((weak)); void serialEvent1(void) __attribute__((weak));
void serialEvent1(void) {}
#endif /* SOC_UART_NUM > 1 */ #endif /* SOC_UART_NUM > 1 */
#if SOC_UART_NUM > 2 #if SOC_UART_NUM > 2
void serialEvent2(void) __attribute__((weak)); void serialEvent2(void) __attribute__((weak));
void serialEvent2(void) {}
#endif /* SOC_UART_NUM > 2 */ #endif /* SOC_UART_NUM > 2 */
#if SOC_UART_NUM > 3
void serialEvent3(void) __attribute__((weak));
#endif /* SOC_UART_NUM > 3 */
#if SOC_UART_NUM > 4
void serialEvent4(void) __attribute__((weak));
#endif /* SOC_UART_NUM > 4 */
#if SOC_UART_NUM > 5
void serialEvent5(void) __attribute__((weak));
#endif /* SOC_UART_NUM > 5 */
#if !defined(NO_GLOBAL_INSTANCES) && !defined(NO_GLOBAL_SERIAL) #if !defined(NO_GLOBAL_INSTANCES) && !defined(NO_GLOBAL_SERIAL)
// There is always Seria0 for UART0 // There is always Seria0 for UART0
HardwareSerial Serial0(0); HardwareSerial Serial0(0);
@ -48,83 +45,58 @@ HardwareSerial Serial1(1);
#if SOC_UART_NUM > 2 #if SOC_UART_NUM > 2
HardwareSerial Serial2(2); HardwareSerial Serial2(2);
#endif #endif
#if SOC_UART_NUM > 3
HardwareSerial Serial3(3);
#endif
#if SOC_UART_NUM > 4
HardwareSerial Serial4(4);
#endif
#if (SOC_UART_NUM > 5)
HardwareSerial Serial5(5);
#endif
#if HWCDC_SERIAL_IS_DEFINED == 1 // Hardware JTAG CDC Event #if HWCDC_SERIAL_IS_DEFINED == 1 // Hardware JTAG CDC Event
extern void HWCDCSerialEvent (void)__attribute__((weak)); extern void HWCDCSerialEvent (void)__attribute__((weak));
void HWCDCSerialEvent(void) {}
#endif #endif
#if USB_SERIAL_IS_DEFINED == 1 // Native USB CDC Event #if USB_SERIAL_IS_DEFINED == 1 // Native USB CDC Event
// Used by Hardware Serial for USB CDC events // Used by Hardware Serial for USB CDC events
extern void USBSerialEvent (void)__attribute__((weak)); extern void USBSerialEvent (void)__attribute__((weak));
void USBSerialEvent(void) {}
#endif #endif
void serialEventRun(void) { void serialEventRun(void)
{
#if HWCDC_SERIAL_IS_DEFINED == 1 // Hardware JTAG CDC Event #if HWCDC_SERIAL_IS_DEFINED == 1 // Hardware JTAG CDC Event
if (HWCDCSerialEvent && HWCDCSerial.available()) { if(HWCDCSerial.available()) HWCDCSerialEvent();
HWCDCSerialEvent();
}
#endif #endif
#if USB_SERIAL_IS_DEFINED == 1 // Native USB CDC Event #if USB_SERIAL_IS_DEFINED == 1 // Native USB CDC Event
if (USBSerialEvent && USBSerial.available()) { if(USBSerial.available()) USBSerialEvent();
USBSerialEvent();
}
#endif #endif
// UART0 is default serialEvent() // UART0 is default serialEvent()
if (serialEvent && Serial0.available()) { if(Serial0.available()) serialEvent();
serialEvent();
}
#if SOC_UART_NUM > 1 #if SOC_UART_NUM > 1
if (serialEvent1 && Serial1.available()) { if(Serial1.available()) serialEvent1();
serialEvent1();
}
#endif #endif
#if SOC_UART_NUM > 2 #if SOC_UART_NUM > 2
if (serialEvent2 && Serial2.available()) { if(Serial2.available()) serialEvent2();
serialEvent2();
}
#endif
#if SOC_UART_NUM > 3
if (serialEvent3 && Serial3.available()) {
serialEvent3();
}
#endif
#if SOC_UART_NUM > 4
if (serialEvent4 && Serial4.available()) {
serialEvent4();
}
#endif
#if SOC_UART_NUM > 5
if (serialEvent5 && Serial5.available()) {
serialEvent5();
}
#endif #endif
} }
#endif #endif
#if !CONFIG_DISABLE_HAL_LOCKS #if !CONFIG_DISABLE_HAL_LOCKS
#define HSERIAL_MUTEX_LOCK() \ #define HSERIAL_MUTEX_LOCK() do {} while (xSemaphoreTake(_lock, portMAX_DELAY) != pdPASS)
do { \
} while (xSemaphoreTake(_lock, portMAX_DELAY) != pdPASS)
#define HSERIAL_MUTEX_UNLOCK() xSemaphoreGive(_lock) #define HSERIAL_MUTEX_UNLOCK() xSemaphoreGive(_lock)
#else #else
#define HSERIAL_MUTEX_LOCK() #define HSERIAL_MUTEX_LOCK()
#define HSERIAL_MUTEX_UNLOCK() #define HSERIAL_MUTEX_UNLOCK()
#endif #endif
HardwareSerial::HardwareSerial(uint8_t uart_nr) HardwareSerial::HardwareSerial(uint8_t uart_nr) :
: _uart_nr(uart_nr), _uart(NULL), _rxBufferSize(256), _txBufferSize(0), _onReceiveCB(NULL), _onReceiveErrorCB(NULL), _onReceiveTimeout(false), _rxTimeout(1), _uart_nr(uart_nr),
_rxFIFOFull(0), _eventTask(NULL) _uart(NULL),
_rxBufferSize(256),
_txBufferSize(0),
_onReceiveCB(NULL),
_onReceiveErrorCB(NULL),
_onReceiveTimeout(false),
_rxTimeout(2),
_rxFIFOFull(0),
_eventTask(NULL)
#if !CONFIG_DISABLE_HAL_LOCKS #if !CONFIG_DISABLE_HAL_LOCKS
, ,_lock(NULL)
_lock(NULL)
#endif #endif
{ {
#if !CONFIG_DISABLE_HAL_LOCKS #if !CONFIG_DISABLE_HAL_LOCKS
@ -140,7 +112,8 @@ HardwareSerial::HardwareSerial(uint8_t uart_nr)
uart_init_PeriMan(); uart_init_PeriMan();
} }
HardwareSerial::~HardwareSerial() { HardwareSerial::~HardwareSerial()
{
end(); // explicit Full UART termination end(); // explicit Full UART termination
#if !CONFIG_DISABLE_HAL_LOCKS #if !CONFIG_DISABLE_HAL_LOCKS
if(_lock != NULL){ if(_lock != NULL){
@ -149,25 +122,26 @@ HardwareSerial::~HardwareSerial() {
#endif #endif
} }
void HardwareSerial::_createEventTask(void *args) {
void HardwareSerial::_createEventTask(void *args)
{
// Creating UART event Task // Creating UART event Task
xTaskCreateUniversal( xTaskCreateUniversal(_uartEventTask, "uart_event_task", ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE, this, ARDUINO_SERIAL_EVENT_TASK_PRIORITY, &_eventTask, ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE);
_uartEventTask, "uart_event_task", ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE, this, ARDUINO_SERIAL_EVENT_TASK_PRIORITY, &_eventTask,
ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
);
if (_eventTask == NULL) { if (_eventTask == NULL) {
log_e(" -- UART%d Event Task not Created!", _uart_nr); log_e(" -- UART%d Event Task not Created!", _uart_nr);
} }
} }
void HardwareSerial::_destroyEventTask(void) { void HardwareSerial::_destroyEventTask(void)
{
if (_eventTask != NULL) { if (_eventTask != NULL) {
vTaskDelete(_eventTask); vTaskDelete(_eventTask);
_eventTask = NULL; _eventTask = NULL;
} }
} }
void HardwareSerial::onReceiveError(OnReceiveErrorCb function) { void HardwareSerial::onReceiveError(OnReceiveErrorCb function)
{
HSERIAL_MUTEX_LOCK(); HSERIAL_MUTEX_LOCK();
// function may be NULL to cancel onReceive() from its respective task // function may be NULL to cancel onReceive() from its respective task
_onReceiveErrorCB = function; _onReceiveErrorCB = function;
@ -178,7 +152,8 @@ void HardwareSerial::onReceiveError(OnReceiveErrorCb function) {
HSERIAL_MUTEX_UNLOCK(); HSERIAL_MUTEX_UNLOCK();
} }
void HardwareSerial::onReceive(OnReceiveCb function, bool onlyOnTimeout) { void HardwareSerial::onReceive(OnReceiveCb function, bool onlyOnTimeout)
{
HSERIAL_MUTEX_LOCK(); HSERIAL_MUTEX_LOCK();
// function may be NULL to cancel onReceive() from its respective task // function may be NULL to cancel onReceive() from its respective task
_onReceiveCB = function; _onReceiveCB = function;
@ -190,8 +165,7 @@ void HardwareSerial::onReceive(OnReceiveCb function, bool onlyOnTimeout) {
// in case that onReceive() shall work only with RX Timeout, FIFO shall be high // in case that onReceive() shall work only with RX Timeout, FIFO shall be high
// this is a work around for an IDF issue with events and low FIFO Full value (< 3) // this is a work around for an IDF issue with events and low FIFO Full value (< 3)
// Not valid for the LP UART if (_onReceiveTimeout) {
if (_onReceiveTimeout && _uart_nr < SOC_UART_HP_NUM) {
uartSetRxFIFOFull(_uart, 120); uartSetRxFIFOFull(_uart, 120);
log_w("OnReceive is set to Timeout only, thus FIFO Full is now 120 bytes."); log_w("OnReceive is set to Timeout only, thus FIFO Full is now 120 bytes.");
} }
@ -209,34 +183,31 @@ void HardwareSerial::onReceive(OnReceiveCb function, bool onlyOnTimeout) {
// A low value of FIFO Full bytes will consume more CPU time within the ISR // A low value of FIFO Full bytes will consume more CPU time within the ISR
// A high value of FIFO Full bytes will make the application wait longer to have byte available for the Stkech in a streaming scenario // A high value of FIFO Full bytes will make the application wait longer to have byte available for the Stkech in a streaming scenario
// Both RX FIFO Full and RX Timeout may affect when onReceive() will be called // Both RX FIFO Full and RX Timeout may affect when onReceive() will be called
bool HardwareSerial::setRxFIFOFull(uint8_t fifoBytes) { bool HardwareSerial::setRxFIFOFull(uint8_t fifoBytes)
{
HSERIAL_MUTEX_LOCK(); HSERIAL_MUTEX_LOCK();
// in case that onReceive() shall work only with RX Timeout, FIFO shall be high // in case that onReceive() shall work only with RX Timeout, FIFO shall be high
// this is a work around for an IDF issue with events and low FIFO Full value (< 3) // this is a work around for an IDF issue with events and low FIFO Full value (< 3)
// Not valid for the LP UART if (_onReceiveCB != NULL && _onReceiveTimeout) {
if (_onReceiveCB != NULL && _onReceiveTimeout && _uart_nr < SOC_UART_HP_NUM) {
fifoBytes = 120; fifoBytes = 120;
log_w("OnReceive is set to Timeout only, thus FIFO Full is now 120 bytes."); log_w("OnReceive is set to Timeout only, thus FIFO Full is now 120 bytes.");
} }
bool retCode = uartSetRxFIFOFull(_uart, fifoBytes); // Set new timeout bool retCode = uartSetRxFIFOFull(_uart, fifoBytes); // Set new timeout
if (fifoBytes > 0 && fifoBytes < UART_HW_FIFO_LEN(_uart_nr) - 1) { if (fifoBytes > 0 && fifoBytes < SOC_UART_FIFO_LEN - 1) _rxFIFOFull = fifoBytes;
_rxFIFOFull = fifoBytes;
}
HSERIAL_MUTEX_UNLOCK(); HSERIAL_MUTEX_UNLOCK();
return retCode; return retCode;
} }
// timeout is calculates in time to receive UART symbols at the UART baudrate. // timout is calculates in time to receive UART symbols at the UART baudrate.
// the estimation is about 11 bits per symbol (SERIAL_8N1) // the estimation is about 11 bits per symbol (SERIAL_8N1)
bool HardwareSerial::setRxTimeout(uint8_t symbols_timeout) { bool HardwareSerial::setRxTimeout(uint8_t symbols_timeout)
{
HSERIAL_MUTEX_LOCK(); HSERIAL_MUTEX_LOCK();
// Zero disables timeout, thus, onReceive callback will only be called when RX FIFO reaches 120 bytes // Zero disables timeout, thus, onReceive callback will only be called when RX FIFO reaches 120 bytes
// Any non-zero value will activate onReceive callback based on UART baudrate with about 11 bits per symbol // Any non-zero value will activate onReceive callback based on UART baudrate with about 11 bits per symbol
_rxTimeout = symbols_timeout; _rxTimeout = symbols_timeout;
if (!symbols_timeout) { if (!symbols_timeout) _onReceiveTimeout = false; // only when RX timeout is disabled, we also must disable this flag
_onReceiveTimeout = false; // only when RX timeout is disabled, we also must disable this flag
}
bool retCode = uartSetRxTimeout(_uart, _rxTimeout); // Set new timeout bool retCode = uartSetRxTimeout(_uart, _rxTimeout); // Set new timeout
@ -244,7 +215,8 @@ bool HardwareSerial::setRxTimeout(uint8_t symbols_timeout) {
return retCode; return retCode;
} }
void HardwareSerial::eventQueueReset() { void HardwareSerial::eventQueueReset()
{
QueueHandle_t uartEventQueue = NULL; QueueHandle_t uartEventQueue = NULL;
if (_uart == NULL) { if (_uart == NULL) {
return; return;
@ -255,7 +227,8 @@ void HardwareSerial::eventQueueReset() {
} }
} }
void HardwareSerial::_uartEventTask(void *args) { void HardwareSerial::_uartEventTask(void *args)
{
HardwareSerial *uart = (HardwareSerial *)args; HardwareSerial *uart = (HardwareSerial *)args;
uart_event_t event; uart_event_t event;
QueueHandle_t uartEventQueue = NULL; QueueHandle_t uartEventQueue = NULL;
@ -267,9 +240,9 @@ void HardwareSerial::_uartEventTask(void *args) {
hardwareSerial_error_t currentErr = UART_NO_ERROR; hardwareSerial_error_t currentErr = UART_NO_ERROR;
switch(event.type) { switch(event.type) {
case UART_DATA: case UART_DATA:
if (uart->_onReceiveCB && uart->available() > 0 && ((uart->_onReceiveTimeout && event.timeout_flag) || !uart->_onReceiveTimeout)) { if(uart->_onReceiveCB && uart->available() > 0 &&
((uart->_onReceiveTimeout && event.timeout_flag) || !uart->_onReceiveTimeout) )
uart->_onReceiveCB(); uart->_onReceiveCB();
}
break; break;
case UART_FIFO_OVF: case UART_FIFO_OVF:
log_w("UART%d FIFO Overflow. Consider adding Hardware Flow Control to your Application.", uart->_uart_nr); log_w("UART%d FIFO Overflow. Consider adding Hardware Flow Control to your Application.", uart->_uart_nr);
@ -280,23 +253,23 @@ void HardwareSerial::_uartEventTask(void *args) {
currentErr = UART_BUFFER_FULL_ERROR; currentErr = UART_BUFFER_FULL_ERROR;
break; break;
case UART_BREAK: case UART_BREAK:
log_v("UART%d RX break.", uart->_uart_nr); log_w("UART%d RX break.", uart->_uart_nr);
currentErr = UART_BREAK_ERROR; currentErr = UART_BREAK_ERROR;
break; break;
case UART_PARITY_ERR: case UART_PARITY_ERR:
log_v("UART%d parity error.", uart->_uart_nr); log_w("UART%d parity error.", uart->_uart_nr);
currentErr = UART_PARITY_ERROR; currentErr = UART_PARITY_ERROR;
break; break;
case UART_FRAME_ERR: case UART_FRAME_ERR:
log_v("UART%d frame error.", uart->_uart_nr); log_w("UART%d frame error.", uart->_uart_nr);
currentErr = UART_FRAME_ERROR; currentErr = UART_FRAME_ERROR;
break; break;
default: log_v("UART%d unknown event type %d.", uart->_uart_nr, event.type); break; default:
log_w("UART%d unknown event type %d.", uart->_uart_nr, event.type);
break;
} }
if (currentErr != UART_NO_ERROR) { if (currentErr != UART_NO_ERROR) {
if (uart->_onReceiveErrorCB) { if(uart->_onReceiveErrorCB) uart->_onReceiveErrorCB(currentErr);
uart->_onReceiveErrorCB(currentErr);
}
} }
} }
} }
@ -304,7 +277,8 @@ void HardwareSerial::_uartEventTask(void *args) {
vTaskDelete(NULL); vTaskDelete(NULL);
} }
void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, int8_t txPin, bool invert, unsigned long timeout_ms, uint8_t rxfifo_full_thrhd) { void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, int8_t txPin, bool invert, unsigned long timeout_ms, uint8_t rxfifo_full_thrhd)
{
if(_uart_nr >= SOC_UART_NUM) { if(_uart_nr >= SOC_UART_NUM) {
log_e("Serial number is invalid, please use a number from 0 to %u", SOC_UART_NUM - 1); log_e("Serial number is invalid, please use a number from 0 to %u", SOC_UART_NUM - 1);
return; return;
@ -317,15 +291,6 @@ void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, in
} }
#endif #endif
// map logical pins to GPIO numbers
rxPin = digitalPinToGPIONumber(rxPin);
txPin = digitalPinToGPIONumber(txPin);
int8_t _rxPin = uart_get_RxPin(_uart_nr);
int8_t _txPin = uart_get_TxPin(_uart_nr);
rxPin = rxPin < 0 ? _rxPin : rxPin;
txPin = txPin < 0 ? _txPin : txPin;
HSERIAL_MUTEX_LOCK(); HSERIAL_MUTEX_LOCK();
// First Time or after end() --> set default Pins // First Time or after end() --> set default Pins
if (!uartIsDriverInstalled(_uart)) { if (!uartIsDriverInstalled(_uart)) {
@ -340,7 +305,7 @@ void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, in
txPin = _txPin < 0 ? (int8_t)SOC_TX0 : _txPin; txPin = _txPin < 0 ? (int8_t)SOC_TX0 : _txPin;
} }
break; break;
#if SOC_UART_HP_NUM > 1 #if SOC_UART_NUM > 1 // may save some flash bytes...
case UART_NUM_1: case UART_NUM_1:
if (rxPin < 0 && txPin < 0) { if (rxPin < 0 && txPin < 0) {
// do not change RX1/TX1 if it has already been set before // do not change RX1/TX1 if it has already been set before
@ -348,73 +313,26 @@ void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, in
txPin = _txPin < 0 ? (int8_t)TX1 : _txPin; txPin = _txPin < 0 ? (int8_t)TX1 : _txPin;
} }
break; break;
#endif // UART_NUM_1 #endif
#if SOC_UART_HP_NUM > 2 #if SOC_UART_NUM > 2 // may save some flash bytes...
case UART_NUM_2: case UART_NUM_2:
if (rxPin < 0 && txPin < 0) { if (rxPin < 0 && txPin < 0) {
// do not change RX2/TX2 if it has already been set before // do not change RX2/TX2 if it has already been set before
#ifdef RX2
rxPin = _rxPin < 0 ? (int8_t)RX2 : _rxPin; rxPin = _rxPin < 0 ? (int8_t)RX2 : _rxPin;
#endif
#ifdef TX2
txPin = _txPin < 0 ? (int8_t)TX2 : _txPin; txPin = _txPin < 0 ? (int8_t)TX2 : _txPin;
#endif
} }
break; break;
#endif // UART_NUM_2
#if SOC_UART_HP_NUM > 3
case UART_NUM_3:
if (rxPin < 0 && txPin < 0) {
// do not change RX3/TX3 if it has already been set before
#ifdef RX3
rxPin = _rxPin < 0 ? (int8_t)RX3 : _rxPin;
#endif #endif
#ifdef TX3
txPin = _txPin < 0 ? (int8_t)TX3 : _txPin;
#endif
}
break;
#endif // UART_NUM_3
#if SOC_UART_HP_NUM > 4
case UART_NUM_4:
if (rxPin < 0 && txPin < 0) {
// do not change RX4/TX4 if it has already been set before
#ifdef RX4
rxPin = _rxPin < 0 ? (int8_t)RX4 : _rxPin;
#endif
#ifdef TX4
txPin = _txPin < 0 ? (int8_t)TX4 : _txPin;
#endif
}
break;
#endif // UART_NUM_4
#if (SOC_UART_LP_NUM >= 1)
case LP_UART_NUM_0:
if (rxPin < 0 && txPin < 0) {
// do not change RX0_LP/TX0_LP if it has already been set before
#ifdef LP_RX0
rxPin = _rxPin < 0 ? (int8_t)LP_RX0 : _rxPin;
#endif
#ifdef LP_TX0
txPin = _txPin < 0 ? (int8_t)LP_TX0 : _txPin;
#endif
}
break;
#endif // LP_UART_NUM_0
} }
} }
// if no RX/TX pins are defined, it will not start the UART driver // map logical pins to GPIO numbers
if (rxPin < 0 && txPin < 0) { rxPin = digitalPinToGPIONumber(rxPin);
log_e("No RX/TX pins defined. Please set RX/TX pins."); txPin = digitalPinToGPIONumber(txPin);
HSERIAL_MUTEX_UNLOCK();
return;
}
// IDF UART driver keeps Pin setting on restarting. Negative Pin number will keep it unmodified. // IDF UART driver keeps Pin setting on restarting. Negative Pin number will keep it unmodified.
// it will detach previous UART attached pins // it will detach previous UART attached pins
// indicates that uartbegin() has to initialize a new IDF driver // indicates that uartbegin() has to initilize a new IDF driver
if (_testUartBegin(_uart_nr, baud ? baud : 9600, config, rxPin, txPin, _rxBufferSize, _txBufferSize, invert, rxfifo_full_thrhd)) { if (_testUartBegin(_uart_nr, baud ? baud : 9600, config, rxPin, txPin, _rxBufferSize, _txBufferSize, invert, rxfifo_full_thrhd)) {
_destroyEventTask(); // when IDF uart driver must be restarted, _eventTask must finish too _destroyEventTask(); // when IDF uart driver must be restarted, _eventTask must finish too
} }
@ -465,8 +383,7 @@ void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, in
if (!_rxFIFOFull) { // it has not being changed before calling begin() if (!_rxFIFOFull) { // it has not being changed before calling begin()
// set a default FIFO Full value for the IDF driver // set a default FIFO Full value for the IDF driver
uint8_t fifoFull = 1; uint8_t fifoFull = 1;
// if baud rate is higher than 57600 or onReceive() is set, it will set FIFO Full to 120 bytes, except for LP UART if (baud > 57600 || (_onReceiveCB != NULL && _onReceiveTimeout)) {
if (_uart_nr < SOC_UART_HP_NUM && (baud > 57600 || (_onReceiveCB != NULL && _onReceiveTimeout))) {
fifoFull = 120; fifoFull = 120;
} }
uartSetRxFIFOFull(_uart, fifoFull); uartSetRxFIFOFull(_uart, fifoFull);
@ -476,11 +393,13 @@ void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, in
HSERIAL_MUTEX_UNLOCK(); HSERIAL_MUTEX_UNLOCK();
} }
void HardwareSerial::updateBaudRate(unsigned long baud) { void HardwareSerial::updateBaudRate(unsigned long baud)
{
uartSetBaudRate(_uart, baud); uartSetBaudRate(_uart, baud);
} }
void HardwareSerial::end() { void HardwareSerial::end()
{
// default Serial.end() will completely disable HardwareSerial, // default Serial.end() will completely disable HardwareSerial,
// including any tasks or debug message channel (log_x()) - but not for IDF log messages! // including any tasks or debug message channel (log_x()) - but not for IDF log messages!
_onReceiveCB = NULL; _onReceiveCB = NULL;
@ -494,16 +413,11 @@ void HardwareSerial::end() {
_uart = NULL; _uart = NULL;
} }
void HardwareSerial::setDebugOutput(bool en) { void HardwareSerial::setDebugOutput(bool en)
{
if(_uart == 0) { if(_uart == 0) {
return; return;
} }
#if (SOC_UART_LP_NUM >= 1)
if (_uart_nr >= SOC_UART_HP_NUM) {
log_e("LP UART does not support Debug Output.");
return;
}
#endif
if(en) { if(en) {
uartSetDebug(_uart); uartSetDebug(_uart);
} else { } else {
@ -513,21 +427,25 @@ void HardwareSerial::setDebugOutput(bool en) {
} }
} }
int HardwareSerial::available(void) { int HardwareSerial::available(void)
{
return uartAvailable(_uart); return uartAvailable(_uart);
} }
int HardwareSerial::availableForWrite(void) { int HardwareSerial::availableForWrite(void)
{
return uartAvailableForWrite(_uart); return uartAvailableForWrite(_uart);
} }
int HardwareSerial::peek(void) { int HardwareSerial::peek(void)
{
if (available()) { if (available()) {
return uartPeek(_uart); return uartPeek(_uart);
} }
return -1; return -1;
} }
int HardwareSerial::read(void) { int HardwareSerial::read(void)
{
uint8_t c = 0; uint8_t c = 0;
if (uartReadBytes(_uart, &c, 1, 0) == 1) { if (uartReadBytes(_uart, &c, 1, 0) == 1) {
return c; return c;
@ -540,47 +458,57 @@ int HardwareSerial::read(void) {
// terminates if size characters have been read, or no further are pending // terminates if size characters have been read, or no further are pending
// returns the number of characters placed in the buffer // returns the number of characters placed in the buffer
// the buffer is NOT null terminated. // the buffer is NOT null terminated.
size_t HardwareSerial::read(uint8_t *buffer, size_t size) { size_t HardwareSerial::read(uint8_t *buffer, size_t size)
{
return uartReadBytes(_uart, buffer, size, 0); return uartReadBytes(_uart, buffer, size, 0);
} }
// Overrides Stream::readBytes() to be faster using IDF // Overrides Stream::readBytes() to be faster using IDF
size_t HardwareSerial::readBytes(uint8_t *buffer, size_t length) { size_t HardwareSerial::readBytes(uint8_t *buffer, size_t length)
{
return uartReadBytes(_uart, buffer, length, (uint32_t)getTimeout()); return uartReadBytes(_uart, buffer, length, (uint32_t)getTimeout());
} }
void HardwareSerial::flush(void) { void HardwareSerial::flush(void)
{
uartFlush(_uart); uartFlush(_uart);
} }
void HardwareSerial::flush(bool txOnly) { void HardwareSerial::flush(bool txOnly)
{
uartFlushTxOnly(_uart, txOnly); uartFlushTxOnly(_uart, txOnly);
} }
size_t HardwareSerial::write(uint8_t c) { size_t HardwareSerial::write(uint8_t c)
{
uartWrite(_uart, c); uartWrite(_uart, c);
return 1; return 1;
} }
size_t HardwareSerial::write(const uint8_t *buffer, size_t size) { size_t HardwareSerial::write(const uint8_t *buffer, size_t size)
{
uartWriteBuf(_uart, buffer, size); uartWriteBuf(_uart, buffer, size);
return size; return size;
} }
uint32_t HardwareSerial::baudRate() { uint32_t HardwareSerial::baudRate()
{
return uartGetBaudRate(_uart); return uartGetBaudRate(_uart);
} }
HardwareSerial::operator bool() const { HardwareSerial::operator bool() const
{
return uartIsDriverInstalled(_uart); return uartIsDriverInstalled(_uart);
} }
void HardwareSerial::setRxInvert(bool invert) { void HardwareSerial::setRxInvert(bool invert)
{
uartSetRxInvert(_uart, invert); uartSetRxInvert(_uart, invert);
} }
// negative Pin value will keep it unmodified // negative Pin value will keep it unmodified
// can be called after or before begin() // can be called after or before begin()
bool HardwareSerial::setPins(int8_t rxPin, int8_t txPin, int8_t ctsPin, int8_t rtsPin) { bool HardwareSerial::setPins(int8_t rxPin, int8_t txPin, int8_t ctsPin, int8_t rtsPin)
{
// map logical pins to GPIO numbers // map logical pins to GPIO numbers
rxPin = digitalPinToGPIONumber(rxPin); rxPin = digitalPinToGPIONumber(rxPin);
txPin = digitalPinToGPIONumber(txPin); txPin = digitalPinToGPIONumber(txPin);
@ -595,7 +523,8 @@ bool HardwareSerial::setPins(int8_t rxPin, int8_t txPin, int8_t ctsPin, int8_t r
// must use setAllPins() in order to set RTS/CTS pins // must use setAllPins() in order to set RTS/CTS pins
// SerialHwFlowCtrl = UART_HW_FLOWCTRL_DISABLE, UART_HW_FLOWCTRL_RTS, // SerialHwFlowCtrl = UART_HW_FLOWCTRL_DISABLE, UART_HW_FLOWCTRL_RTS,
// UART_HW_FLOWCTRL_CTS, UART_HW_FLOWCTRL_CTS_RTS // UART_HW_FLOWCTRL_CTS, UART_HW_FLOWCTRL_CTS_RTS
bool HardwareSerial::setHwFlowCtrlMode(SerialHwFlowCtrl mode, uint8_t threshold) { bool HardwareSerial::setHwFlowCtrlMode(SerialHwFlowCtrl mode, uint8_t threshold)
{
return uartSetHwFlowCtrlMode(_uart, mode, threshold); return uartSetHwFlowCtrlMode(_uart, mode, threshold);
} }
@ -603,62 +532,40 @@ bool HardwareSerial::setHwFlowCtrlMode(SerialHwFlowCtrl mode, uint8_t threshold)
// HwFlowCtrl must be disabled and RTS pin set // HwFlowCtrl must be disabled and RTS pin set
// SerialMode = UART_MODE_UART, UART_MODE_RS485_HALF_DUPLEX, UART_MODE_IRDA, // SerialMode = UART_MODE_UART, UART_MODE_RS485_HALF_DUPLEX, UART_MODE_IRDA,
// or testing mode: UART_MODE_RS485_COLLISION_DETECT, UART_MODE_RS485_APP_CTRL // or testing mode: UART_MODE_RS485_COLLISION_DETECT, UART_MODE_RS485_APP_CTRL
bool HardwareSerial::setMode(SerialMode mode) { bool HardwareSerial::setMode(SerialMode mode)
{
return uartSetMode(_uart, mode); return uartSetMode(_uart, mode);
} }
// Sets the UART Clock Source based on the compatible SoC options
// This method must be called before starting UART using begin(), otherwise it won't have any effect.
// Clock Source Options are:
// UART_CLK_SRC_DEFAULT :: any SoC - it will set whatever IDF defines as the default UART Clock Source
// UART_CLK_SRC_APB :: ESP32, ESP32-S2, ESP32-C3 and ESP32-S3
// UART_CLK_SRC_PLL :: ESP32-C2, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2 and ESP32-P4
// UART_CLK_SRC_XTAL :: ESP32-C2, ESP32-C3, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2, ESP32-S3 and ESP32-P4
// UART_CLK_SRC_RTC :: ESP32-C2, ESP32-C3, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2, ESP32-S3 and ESP32-P4
// UART_CLK_SRC_REF_TICK :: ESP32 and ESP32-S2
// Note: CLK_SRC_PLL Freq depends on the SoC - ESP32-C2 has 40MHz, ESP32-H2 has 48MHz and ESP32-C5, C6, C61 and P4 has 80MHz
// Note: ESP32-C6, C61, ESP32-P4 and ESP32-C5 have LP UART that will use only RTC_FAST or XTAL/2 as Clock Source
bool HardwareSerial::setClockSource(SerialClkSrc clkSrc) {
if (_uart) {
log_e("No Clock Source change was done. This function must be called before beginning UART%d.", _uart_nr);
return false;
}
return uartSetClockSource(_uart_nr, (uart_sclk_t)clkSrc);
}
// minimum total RX Buffer size is the UART FIFO space (128 bytes for most SoC) + 1. IDF imposition.
// LP UART has FIFO of 16 bytes
size_t HardwareSerial::setRxBufferSize(size_t new_size) { size_t HardwareSerial::setRxBufferSize(size_t new_size) {
if (_uart) { if (_uart) {
log_e("RX Buffer can't be resized when Serial is already running. Set it before calling begin()."); log_e("RX Buffer can't be resized when Serial is already running.\n");
return 0; return 0;
} }
uint8_t FIFOLen = UART_HW_FIFO_LEN(_uart_nr);
// Valid value is higher than the FIFO length if (new_size <= SOC_UART_FIFO_LEN) {
if (new_size <= FIFOLen) { log_e("RX Buffer must be higher than %d.\n", SOC_UART_FIFO_LEN); // ESP32, S2, S3 and C3 means higher than 128
new_size = FIFOLen + 1; return 0;
log_w("RX Buffer set to minimum value: %d.", new_size);
} }
_rxBufferSize = new_size; _rxBufferSize = new_size;
return _rxBufferSize; return _rxBufferSize;
} }
// minimum total TX Buffer size is the UART FIFO space (128 bytes for most SoC) + 1.
// LP UART has FIFO of 16 bytes
size_t HardwareSerial::setTxBufferSize(size_t new_size) { size_t HardwareSerial::setTxBufferSize(size_t new_size) {
if (_uart) { if (_uart) {
log_e("TX Buffer can't be resized when Serial is already running. Set it before calling begin()."); log_e("TX Buffer can't be resized when Serial is already running.\n");
return 0; return 0;
} }
uint8_t FIFOLen = UART_HW_FIFO_LEN(_uart_nr);
// Valid values are zero or higher than the FIFO length if (new_size <= SOC_UART_FIFO_LEN) {
if (new_size > 0 && new_size <= FIFOLen) { log_e("TX Buffer must be higher than %d.\n", SOC_UART_FIFO_LEN); // ESP32, S2, S3 and C3 means higher than 128
new_size = FIFOLen + 1; return 0;
log_w("TX Buffer set to minimum value: %d.", new_size);
} }
// if new_size is higher than SOC_UART_FIFO_LEN, TX Ringbuffer will be active and it will be used to report back "availableToWrite()"
_txBufferSize = new_size; _txBufferSize = new_size;
return new_size; return _txBufferSize;
} }

View file

@ -96,51 +96,17 @@ typedef enum {
UART_PARITY_ERROR UART_PARITY_ERROR
} hardwareSerial_error_t; } hardwareSerial_error_t;
typedef enum {
UART_CLK_SRC_DEFAULT = UART_SCLK_DEFAULT,
#if SOC_UART_SUPPORT_APB_CLK
UART_CLK_SRC_APB = UART_SCLK_APB,
#endif
#if SOC_UART_SUPPORT_PLL_F40M_CLK
UART_CLK_SRC_PLL = UART_SCLK_PLL_F40M,
#elif SOC_UART_SUPPORT_PLL_F80M_CLK
UART_CLK_SRC_PLL = UART_SCLK_PLL_F80M,
#elif CONFIG_IDF_TARGET_ESP32H2
UART_CLK_SRC_PLL = UART_SCLK_PLL_F48M,
#endif
#if SOC_UART_SUPPORT_XTAL_CLK
UART_CLK_SRC_XTAL = UART_SCLK_XTAL,
#endif
#if SOC_UART_SUPPORT_RTC_CLK
UART_CLK_SRC_RTC = UART_SCLK_RTC,
#endif
#if SOC_UART_SUPPORT_REF_TICK
UART_CLK_SRC_REF_TICK = UART_SCLK_REF_TICK,
#endif
} SerialClkSrc;
#ifndef ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE #ifndef ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE
#ifndef CONFIG_ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE
#define ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE 2048 #define ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE 2048
#else
#define ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE CONFIG_ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE
#endif
#endif #endif
#ifndef ARDUINO_SERIAL_EVENT_TASK_PRIORITY #ifndef ARDUINO_SERIAL_EVENT_TASK_PRIORITY
#ifndef CONFIG_ARDUINO_SERIAL_EVENT_TASK_PRIORITY
#define ARDUINO_SERIAL_EVENT_TASK_PRIORITY (configMAX_PRIORITIES-1) #define ARDUINO_SERIAL_EVENT_TASK_PRIORITY (configMAX_PRIORITIES-1)
#else
#define ARDUINO_SERIAL_EVENT_TASK_PRIORITY CONFIG_ARDUINO_SERIAL_EVENT_TASK_PRIORITY
#endif
#endif #endif
#ifndef ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE #ifndef ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
#ifndef CONFIG_ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
#define ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE -1 #define ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE -1
#else
#define ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE CONFIG_ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
#endif
#endif #endif
// UART0 pins are defined by default by the bootloader. // UART0 pins are defined by default by the bootloader.
@ -160,10 +126,6 @@ typedef enum {
#define SOC_RX0 (gpio_num_t)17 #define SOC_RX0 (gpio_num_t)17
#elif CONFIG_IDF_TARGET_ESP32H2 #elif CONFIG_IDF_TARGET_ESP32H2
#define SOC_RX0 (gpio_num_t)23 #define SOC_RX0 (gpio_num_t)23
#elif CONFIG_IDF_TARGET_ESP32P4
#define SOC_RX0 (gpio_num_t)38
#elif CONFIG_IDF_TARGET_ESP32C5
#define SOC_RX0 (gpio_num_t)12
#endif #endif
#endif #endif
@ -180,16 +142,12 @@ typedef enum {
#define SOC_TX0 (gpio_num_t)16 #define SOC_TX0 (gpio_num_t)16
#elif CONFIG_IDF_TARGET_ESP32H2 #elif CONFIG_IDF_TARGET_ESP32H2
#define SOC_TX0 (gpio_num_t)24 #define SOC_TX0 (gpio_num_t)24
#elif CONFIG_IDF_TARGET_ESP32P4
#define SOC_TX0 (gpio_num_t)37
#elif CONFIG_IDF_TARGET_ESP32C5
#define SOC_TX0 (gpio_num_t)11
#endif #endif
#endif #endif
// Default pins for UART1 are arbitrary, and defined here for convenience. // Default pins for UART1 are arbitrary, and defined here for convenience.
#if SOC_UART_HP_NUM > 1 #if SOC_UART_NUM > 1
#ifndef RX1 #ifndef RX1
#if CONFIG_IDF_TARGET_ESP32 #if CONFIG_IDF_TARGET_ESP32
#define RX1 (gpio_num_t)26 #define RX1 (gpio_num_t)26
@ -205,10 +163,6 @@ typedef enum {
#define RX1 (gpio_num_t)4 #define RX1 (gpio_num_t)4
#elif CONFIG_IDF_TARGET_ESP32H2 #elif CONFIG_IDF_TARGET_ESP32H2
#define RX1 (gpio_num_t)0 #define RX1 (gpio_num_t)0
#elif CONFIG_IDF_TARGET_ESP32P4
#define RX1 (gpio_num_t)11
#elif CONFIG_IDF_TARGET_ESP32C5
#define RX1 (gpio_num_t)4
#endif #endif
#endif #endif
@ -227,17 +181,13 @@ typedef enum {
#define TX1 (gpio_num_t)5 #define TX1 (gpio_num_t)5
#elif CONFIG_IDF_TARGET_ESP32H2 #elif CONFIG_IDF_TARGET_ESP32H2
#define TX1 (gpio_num_t)1 #define TX1 (gpio_num_t)1
#elif CONFIG_IDF_TARGET_ESP32P4
#define TX1 (gpio_num_t)10
#elif CONFIG_IDF_TARGET_ESP32C5
#define TX1 (gpio_num_t)5
#endif #endif
#endif #endif
#endif /* SOC_UART_HP_NUM > 1 */ #endif /* SOC_UART_NUM > 1 */
// Default pins for UART2 are arbitrary, and defined here for convenience. // Default pins for UART2 are arbitrary, and defined here for convenience.
#if SOC_UART_HP_NUM > 2 #if SOC_UART_NUM > 2
#ifndef RX2 #ifndef RX2
#if CONFIG_IDF_TARGET_ESP32 #if CONFIG_IDF_TARGET_ESP32
#define RX2 (gpio_num_t)4 #define RX2 (gpio_num_t)4
@ -253,22 +203,13 @@ typedef enum {
#define TX2 (gpio_num_t)20 #define TX2 (gpio_num_t)20
#endif #endif
#endif #endif
#endif /* SOC_UART_HP_NUM > 2 */ #endif /* SOC_UART_NUM > 2 */
#if SOC_UART_LP_NUM >= 1
#ifndef LP_RX0
#define LP_RX0 (gpio_num_t) LP_U0RXD_GPIO_NUM
#endif
#ifndef LP_TX0
#define LP_TX0 (gpio_num_t) LP_U0TXD_GPIO_NUM
#endif
#endif /* SOC_UART_LP_NUM >= 1 */
typedef std::function<void(void)> OnReceiveCb; typedef std::function<void(void)> OnReceiveCb;
typedef std::function<void(hardwareSerial_error_t)> OnReceiveErrorCb; typedef std::function<void(hardwareSerial_error_t)> OnReceiveErrorCb;
class HardwareSerial : public Stream { class HardwareSerial: public Stream
{
public: public:
HardwareSerial(uint8_t uart_nr); HardwareSerial(uint8_t uart_nr);
~HardwareSerial(); ~HardwareSerial();
@ -289,7 +230,7 @@ public:
// onReceive will setup a callback that will be called whenever an UART interruption occurs (UART_INTR_RXFIFO_FULL or UART_INTR_RXFIFO_TOUT) // onReceive will setup a callback that will be called whenever an UART interruption occurs (UART_INTR_RXFIFO_FULL or UART_INTR_RXFIFO_TOUT)
// UART_INTR_RXFIFO_FULL interrupt triggers at UART_FULL_THRESH_DEFAULT bytes received (defined as 120 bytes by default in IDF) // UART_INTR_RXFIFO_FULL interrupt triggers at UART_FULL_THRESH_DEFAULT bytes received (defined as 120 bytes by default in IDF)
// UART_INTR_RXFIFO_TOUT interrupt triggers at UART_TOUT_THRESH_DEFAULT symbols passed without any reception (defined as 10 symbols by default in IDF) // UART_INTR_RXFIFO_TOUT interrupt triggers at UART_TOUT_THRESH_DEFAULT symbols passed without any reception (defined as 10 symbos by default in IDF)
// onlyOnTimeout parameter will define how onReceive will behave: // onlyOnTimeout parameter will define how onReceive will behave:
// Default: true -- The callback will only be called when RX Timeout happens. // Default: true -- The callback will only be called when RX Timeout happens.
// Whole stream of bytes will be ready for being read on the callback function at once. // Whole stream of bytes will be ready for being read on the callback function at once.
@ -302,7 +243,7 @@ public:
// onReceive will be called on error events (see hardwareSerial_error_t) // onReceive will be called on error events (see hardwareSerial_error_t)
void onReceiveError(OnReceiveErrorCb function); void onReceiveError(OnReceiveErrorCb function);
// eventQueueReset clears all events in the queue (the events that trigger onReceive and onReceiveError) - maybe useful in some use cases // eventQueueReset clears all events in the queue (the events that trigger onReceive and onReceiveError) - maybe usefull in some use cases
void eventQueueReset(); void eventQueueReset();
// When pins are changed, it will detach the previous ones // When pins are changed, it will detach the previous ones
@ -310,10 +251,7 @@ public:
// timeout_ms is used in baudrate detection (ESP32, ESP32S2 only) // timeout_ms is used in baudrate detection (ESP32, ESP32S2 only)
// invert will invert RX/TX polarity // invert will invert RX/TX polarity
// rxfifo_full_thrhd if the UART Flow Control Threshold in the UART FIFO (max 127) // rxfifo_full_thrhd if the UART Flow Control Threshold in the UART FIFO (max 127)
void begin( void begin(unsigned long baud, uint32_t config=SERIAL_8N1, int8_t rxPin=-1, int8_t txPin=-1, bool invert=false, unsigned long timeout_ms = 20000UL, uint8_t rxfifo_full_thrhd = 112);
unsigned long baud, uint32_t config = SERIAL_8N1, int8_t rxPin = -1, int8_t txPin = -1, bool invert = false, unsigned long timeout_ms = 20000UL,
uint8_t rxfifo_full_thrhd = 120
);
void end(void); void end(void);
void updateBaudRate(unsigned long baud); void updateBaudRate(unsigned long baud);
int available(void); int available(void);
@ -321,34 +259,42 @@ public:
int peek(void); int peek(void);
int read(void); int read(void);
size_t read(uint8_t *buffer, size_t size); size_t read(uint8_t *buffer, size_t size);
inline size_t read(char *buffer, size_t size) { inline size_t read(char * buffer, size_t size)
{
return read((uint8_t*) buffer, size); return read((uint8_t*) buffer, size);
} }
// Overrides Stream::readBytes() to be faster using IDF // Overrides Stream::readBytes() to be faster using IDF
size_t readBytes(uint8_t *buffer, size_t length); size_t readBytes(uint8_t *buffer, size_t length);
size_t readBytes(char *buffer, size_t length) { size_t readBytes(char *buffer, size_t length)
{
return readBytes((uint8_t *) buffer, length); return readBytes((uint8_t *) buffer, length);
} }
void flush(void); void flush(void);
void flush( bool txOnly); void flush( bool txOnly);
size_t write(uint8_t); size_t write(uint8_t);
size_t write(const uint8_t *buffer, size_t size); size_t write(const uint8_t *buffer, size_t size);
inline size_t write(const char *buffer, size_t size) { inline size_t write(const char * buffer, size_t size)
{
return write((uint8_t*) buffer, size); return write((uint8_t*) buffer, size);
} }
inline size_t write(const char *s) { inline size_t write(const char * s)
{
return write((uint8_t*) s, strlen(s)); return write((uint8_t*) s, strlen(s));
} }
inline size_t write(unsigned long n) { inline size_t write(unsigned long n)
{
return write((uint8_t) n); return write((uint8_t) n);
} }
inline size_t write(long n) { inline size_t write(long n)
{
return write((uint8_t) n); return write((uint8_t) n);
} }
inline size_t write(unsigned int n) { inline size_t write(unsigned int n)
{
return write((uint8_t) n); return write((uint8_t) n);
} }
inline size_t write(int n) { inline size_t write(int n)
{
return write((uint8_t) n); return write((uint8_t) n);
} }
uint32_t baudRate(); uint32_t baudRate();
@ -375,17 +321,6 @@ public:
// UART_MODE_RS485_COLLISION_DETECT = 0x03 mode: RS485 collision detection UART mode (used for test purposes) // UART_MODE_RS485_COLLISION_DETECT = 0x03 mode: RS485 collision detection UART mode (used for test purposes)
// UART_MODE_RS485_APP_CTRL = 0x04 mode: application control RS485 UART mode (used for test purposes) // UART_MODE_RS485_APP_CTRL = 0x04 mode: application control RS485 UART mode (used for test purposes)
bool setMode(SerialMode mode); bool setMode(SerialMode mode);
// Used to set the UART clock source mode. It must be set before calling begin(), otherwise it won't have any effect.
// Not all clock source are available to every SoC. The compatible option are listed here:
// UART_CLK_SRC_DEFAULT :: any SoC - it will set whatever IDF defines as the default UART Clock Source
// UART_CLK_SRC_APB :: ESP32, ESP32-S2, ESP32-C3 and ESP32-S3
// UART_CLK_SRC_PLL :: ESP32-C2, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2 and ESP32-P4
// UART_CLK_SRC_XTAL :: ESP32-C2, ESP32-C3, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2, ESP32-S3 and ESP32-P4
// UART_CLK_SRC_RTC :: ESP32-C2, ESP32-C3, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2, ESP32-S3 and ESP32-P4
// UART_CLK_SRC_REF_TICK :: ESP32 and ESP32-S2
// Note: CLK_SRC_PLL Freq depends on the SoC - ESP32-C2 has 40MHz, ESP32-H2 has 48MHz and ESP32-C5, C6, C61 and P4 has 80MHz
// Note: ESP32-C6, C61, ESP32-P4 and ESP32-C5 have LP UART that will use only RTC_FAST or XTAL/2 as Clock Source
bool setClockSource(SerialClkSrc clkSrc);
size_t setRxBufferSize(size_t new_size); size_t setRxBufferSize(size_t new_size);
size_t setTxBufferSize(size_t new_size); size_t setTxBufferSize(size_t new_size);
@ -435,15 +370,6 @@ extern HardwareSerial Serial1;
#if SOC_UART_NUM > 2 #if SOC_UART_NUM > 2
extern HardwareSerial Serial2; extern HardwareSerial Serial2;
#endif #endif
#if SOC_UART_NUM > 3
extern HardwareSerial Serial3;
#endif
#if SOC_UART_NUM > 4
extern HardwareSerial Serial4;
#endif
#if SOC_UART_NUM > 5
extern HardwareSerial Serial5;
#endif
#endif //!defined(NO_GLOBAL_INSTANCES) && !defined(NO_GLOBAL_SERIAL) #endif //!defined(NO_GLOBAL_INSTANCES) && !defined(NO_GLOBAL_SERIAL)
#endif // HardwareSerial_h #endif // HardwareSerial_h

View file

@ -20,21 +20,25 @@
#include "HEXBuilder.h" #include "HEXBuilder.h"
class HashBuilder : public HEXBuilder { class HashBuilder : public HEXBuilder
{
public: public:
virtual ~HashBuilder() {} virtual ~HashBuilder() {}
virtual void begin() = 0; virtual void begin() = 0;
virtual void add(const uint8_t* data, size_t len) = 0; virtual void add(const uint8_t* data, size_t len) = 0;
virtual void add(const char *data) { virtual void add(const char* data)
{
add((const uint8_t*)data, strlen(data)); add((const uint8_t*)data, strlen(data));
} }
virtual void add(String data) { virtual void add(String data)
{
add(data.c_str()); add(data.c_str());
} }
virtual void addHexString(const char* data) = 0; virtual void addHexString(const char* data) = 0;
virtual void addHexString(String data) { virtual void addHexString(String data)
{
addHexString(data.c_str()); addHexString(data.c_str());
} }

View file

@ -22,19 +22,17 @@
#include "lwip/netif.h" #include "lwip/netif.h"
#include "StreamString.h" #include "StreamString.h"
#ifndef CONFIG_LWIP_IPV6
#define IP6_NO_ZONE 0
#endif
IPAddress::IPAddress() : IPAddress(IPv4) {} IPAddress::IPAddress() : IPAddress(IPv4) {}
IPAddress::IPAddress(IPType ip_type) { IPAddress::IPAddress(IPType ip_type)
{
_type = ip_type; _type = ip_type;
_zone = IP6_NO_ZONE; _zone = IP6_NO_ZONE;
memset(_address.bytes, 0, sizeof(_address.bytes)); memset(_address.bytes, 0, sizeof(_address.bytes));
} }
IPAddress::IPAddress(uint8_t first_octet, uint8_t second_octet, uint8_t third_octet, uint8_t fourth_octet) { IPAddress::IPAddress(uint8_t first_octet, uint8_t second_octet, uint8_t third_octet, uint8_t fourth_octet)
{
_type = IPv4; _type = IPv4;
_zone = IP6_NO_ZONE; _zone = IP6_NO_ZONE;
memset(_address.bytes, 0, sizeof(_address.bytes)); memset(_address.bytes, 0, sizeof(_address.bytes));
@ -44,10 +42,7 @@ IPAddress::IPAddress(uint8_t first_octet, uint8_t second_octet, uint8_t third_oc
_address.bytes[IPADDRESS_V4_BYTES_INDEX + 3] = fourth_octet; _address.bytes[IPADDRESS_V4_BYTES_INDEX + 3] = fourth_octet;
} }
IPAddress::IPAddress( IPAddress::IPAddress(uint8_t o1, uint8_t o2, uint8_t o3, uint8_t o4, uint8_t o5, uint8_t o6, uint8_t o7, uint8_t o8, uint8_t o9, uint8_t o10, uint8_t o11, uint8_t o12, uint8_t o13, uint8_t o14, uint8_t o15, uint8_t o16, uint8_t z) {
uint8_t o1, uint8_t o2, uint8_t o3, uint8_t o4, uint8_t o5, uint8_t o6, uint8_t o7, uint8_t o8, uint8_t o9, uint8_t o10, uint8_t o11, uint8_t o12,
uint8_t o13, uint8_t o14, uint8_t o15, uint8_t o16, uint8_t z
) {
_type = IPv6; _type = IPv6;
_address.bytes[0] = o1; _address.bytes[0] = o1;
_address.bytes[1] = o2; _address.bytes[1] = o2;
@ -68,7 +63,8 @@ IPAddress::IPAddress(
_zone = z; _zone = z;
} }
IPAddress::IPAddress(uint32_t address) { IPAddress::IPAddress(uint32_t address)
{
// IPv4 only // IPv4 only
_type = IPv4; _type = IPv4;
_zone = IP6_NO_ZONE; _zone = IP6_NO_ZONE;
@ -86,7 +82,8 @@ IPAddress::IPAddress(uint32_t address) {
IPAddress::IPAddress(const uint8_t *address) : IPAddress(IPv4, address) {} IPAddress::IPAddress(const uint8_t *address) : IPAddress(IPv4, address) {}
IPAddress::IPAddress(IPType ip_type, const uint8_t *address, uint8_t z) { IPAddress::IPAddress(IPType ip_type, const uint8_t *address, uint8_t z)
{
_type = ip_type; _type = ip_type;
if (ip_type == IPv4) { if (ip_type == IPv4) {
memset(_address.bytes, 0, sizeof(_address.bytes)); memset(_address.bytes, 0, sizeof(_address.bytes));
@ -98,15 +95,18 @@ IPAddress::IPAddress(IPType ip_type, const uint8_t *address, uint8_t z) {
} }
} }
IPAddress::IPAddress(const char *address) { IPAddress::IPAddress(const char *address)
{
fromString(address); fromString(address);
} }
IPAddress::IPAddress(const IPAddress &address) { IPAddress::IPAddress(const IPAddress& address)
{
*this = address; *this = address;
} }
String IPAddress::toString(bool includeZone) const { String IPAddress::toString(bool includeZone) const
{
StreamString s; StreamString s;
printTo(s, includeZone); printTo(s, includeZone);
return String(s); return String(s);
@ -119,22 +119,27 @@ bool IPAddress::fromString(const char *address) {
return true; return true;
} }
bool IPAddress::fromString4(const char *address) { bool IPAddress::fromString4(const char *address)
{
// TODO: add support for "a", "a.b", "a.b.c" formats // TODO: add support for "a", "a.b", "a.b.c" formats
int16_t acc = -1; // Accumulator int16_t acc = -1; // Accumulator
uint8_t dots = 0; uint8_t dots = 0;
memset(_address.bytes, 0, sizeof(_address.bytes)); memset(_address.bytes, 0, sizeof(_address.bytes));
while (*address) { while (*address)
{
char c = *address++; char c = *address++;
if (c >= '0' && c <= '9') { if (c >= '0' && c <= '9')
{
acc = (acc < 0) ? (c - '0') : acc * 10 + (c - '0'); acc = (acc < 0) ? (c - '0') : acc * 10 + (c - '0');
if (acc > 255) { if (acc > 255) {
// Value out of [0..255] range // Value out of [0..255] range
return false; return false;
} }
} else if (c == '.') { }
else if (c == '.')
{
if (dots == 3) { if (dots == 3) {
// Too many dots (there must be 3 dots) // Too many dots (there must be 3 dots)
return false; return false;
@ -145,7 +150,9 @@ bool IPAddress::fromString4(const char *address) {
} }
_address.bytes[IPADDRESS_V4_BYTES_INDEX + dots++] = acc; _address.bytes[IPADDRESS_V4_BYTES_INDEX + dots++] = acc;
acc = -1; acc = -1;
} else { }
else
{
// Invalid char // Invalid char
return false; return false;
} }
@ -168,18 +175,18 @@ bool IPAddress::fromString6(const char *address) {
uint32_t acc = 0; // Accumulator uint32_t acc = 0; // Accumulator
int colons = 0, double_colons = -1; int colons = 0, double_colons = -1;
while (*address) { while (*address)
{
char c = tolower(*address++); char c = tolower(*address++);
if (isalnum(c) && c <= 'f') { if (isalnum(c) && c <= 'f') {
if (c >= 'a') { if (c >= 'a')
c -= 'a' - '0' - 10; c -= 'a' - '0' - 10;
}
acc = acc * 16 + (c - '0'); acc = acc * 16 + (c - '0');
if (acc > 0xffff) { if (acc > 0xffff)
// Value out of range // Value out of range
return false; return false;
} }
} else if (c == ':') { else if (c == ':') {
if (*address == ':') { if (*address == ':') {
if (double_colons >= 0) { if (double_colons >= 0) {
// :: allowed once // :: allowed once
@ -196,30 +203,24 @@ bool IPAddress::fromString6(const char *address) {
// can't end with a single colon // can't end with a single colon
return false; return false;
} }
if (colons == 7) { if (colons == 7)
// too many separators // too many separators
return false; return false;
}
_address.bytes[colons * 2] = acc >> 8; _address.bytes[colons * 2] = acc >> 8;
_address.bytes[colons * 2 + 1] = acc & 0xff; _address.bytes[colons * 2 + 1] = acc & 0xff;
colons++; colons++;
acc = 0; acc = 0;
} else if (c == '%') {
// netif_index_to_name crashes on latest esp-idf
// _zone = netif_name_to_index(address);
// in the interim, we parse the suffix as a zone number
while ((*address != '\0') && (!isdigit(*address))) { // skip all non-digit after '%'
address++;
} }
_zone = atol(address) + 1; // increase by one by convention, so we can have zone '0' else if (c == '%') {
_zone = netif_name_to_index(address);
while(*address != '\0'){ while(*address != '\0'){
address++; address++;
} }
} else { }
else
// Invalid char // Invalid char
return false; return false;
} }
}
if (double_colons == -1 && colons != 7) { if (double_colons == -1 && colons != 7) {
// Too few separators // Too few separators
@ -234,19 +235,18 @@ bool IPAddress::fromString6(const char *address) {
colons++; colons++;
if (double_colons != -1) { if (double_colons != -1) {
for (int i = colons * 2 - double_colons * 2 - 1; i >= 0; i--) { for (int i = colons * 2 - double_colons * 2 - 1; i >= 0; i--)
_address.bytes[16 - colons * 2 + double_colons * 2 + i] = _address.bytes[double_colons * 2 + i]; _address.bytes[16 - colons * 2 + double_colons * 2 + i] = _address.bytes[double_colons * 2 + i];
} for (int i = double_colons * 2; i < 16 - colons * 2 + double_colons * 2; i++)
for (int i = double_colons * 2; i < 16 - colons * 2 + double_colons * 2; i++) {
_address.bytes[i] = 0; _address.bytes[i] = 0;
} }
}
_type = IPv6; _type = IPv6;
return true; return true;
} }
IPAddress &IPAddress::operator=(const uint8_t *address) { IPAddress& IPAddress::operator=(const uint8_t *address)
{
// IPv4 only conversion from byte pointer // IPv4 only conversion from byte pointer
_type = IPv4; _type = IPv4;
memset(_address.bytes, 0, sizeof(_address.bytes)); memset(_address.bytes, 0, sizeof(_address.bytes));
@ -254,12 +254,14 @@ IPAddress &IPAddress::operator=(const uint8_t *address) {
return *this; return *this;
} }
IPAddress &IPAddress::operator=(const char *address) { IPAddress& IPAddress::operator=(const char *address)
{
fromString(address); fromString(address);
return *this; return *this;
} }
IPAddress &IPAddress::operator=(uint32_t address) { IPAddress& IPAddress::operator=(uint32_t address)
{
// IPv4 conversion // IPv4 conversion
// See note on conversion/comparison and uint32_t // See note on conversion/comparison and uint32_t
_type = IPv4; _type = IPv4;
@ -276,10 +278,12 @@ IPAddress &IPAddress::operator=(const IPAddress &address) {
} }
bool IPAddress::operator==(const IPAddress& addr) const { bool IPAddress::operator==(const IPAddress& addr) const {
return (addr._type == _type) && (_type == IPType::IPv4 ? addr._address.dword[IPADDRESS_V4_DWORD_INDEX] == _address.dword[IPADDRESS_V4_DWORD_INDEX] : memcmp(addr._address.bytes, _address.bytes, sizeof(_address.bytes)) == 0); return (addr._type == _type)
&& (memcmp(addr._address.bytes, _address.bytes, sizeof(_address.bytes)) == 0);
} }
bool IPAddress::operator==(const uint8_t *addr) const { bool IPAddress::operator==(const uint8_t* addr) const
{
// IPv4 only comparison to byte pointer // IPv4 only comparison to byte pointer
// Can't support IPv6 as we know our type, but not the length of the pointer // Can't support IPv6 as we know our type, but not the length of the pointer
return _type == IPv4 && memcmp(addr, &_address.bytes[IPADDRESS_V4_BYTES_INDEX], sizeof(uint32_t)) == 0; return _type == IPv4 && memcmp(addr, &_address.bytes[IPADDRESS_V4_BYTES_INDEX], sizeof(uint32_t)) == 0;
@ -299,11 +303,13 @@ uint8_t &IPAddress::operator[](int index) {
return _address.bytes[index]; return _address.bytes[index];
} }
size_t IPAddress::printTo(Print &p) const { size_t IPAddress::printTo(Print& p) const
{
return printTo(p, false); return printTo(p, false);
} }
size_t IPAddress::printTo(Print &p, bool includeZone) const { size_t IPAddress::printTo(Print& p, bool includeZone) const
{
size_t n = 0; size_t n = 0;
if (_type == IPv6) { if (_type == IPv6) {
@ -354,31 +360,19 @@ size_t IPAddress::printTo(Print &p, bool includeZone) const {
n += p.print(':'); n += p.print(':');
} }
} }
// add a zone if zone-id is non-zero (causes exception on recent IDF builds) // add a zone if zone-id is non-zero
// if (_zone > 0 && includeZone) {
// n += p.print('%');
// char if_name[NETIF_NAMESIZE];
// netif_index_to_name(_zone, if_name);
// n += p.print(if_name);
// }
// In the interim, we just output the index number
if(_zone > 0 && includeZone){ if(_zone > 0 && includeZone){
n += p.print('%'); n += p.print('%');
// look for the interface name char if_name[NETIF_NAMESIZE];
for (netif *intf = netif_list; intf != nullptr; intf = intf->next) { netif_index_to_name(_zone, if_name);
if (_zone - 1 == intf->num) { n += p.print(if_name);
n += p.print(intf->name[0]);
n += p.print(intf->name[1]);
break;
}
}
n += p.print(_zone - 1);
} }
return n; return n;
} }
// IPv4 // IPv4
for (int i = 0; i < 3; i++) { for (int i =0; i < 3; i++)
{
n += p.print(_address.bytes[IPADDRESS_V4_BYTES_INDEX + i], DEC); n += p.print(_address.bytes[IPADDRESS_V4_BYTES_INDEX + i], DEC);
n += p.print('.'); n += p.print('.');
} }
@ -391,7 +385,6 @@ IPAddress::IPAddress(const ip_addr_t *addr) {
} }
void IPAddress::to_ip_addr_t(ip_addr_t* addr) const { void IPAddress::to_ip_addr_t(ip_addr_t* addr) const {
#if CONFIG_LWIP_IPV6
if(_type == IPv6){ if(_type == IPv6){
addr->type = IPADDR_TYPE_V6; addr->type = IPADDR_TYPE_V6;
addr->u_addr.ip6.addr[0] = _address.dword[0]; addr->u_addr.ip6.addr[0] = _address.dword[0];
@ -405,13 +398,9 @@ void IPAddress::to_ip_addr_t(ip_addr_t *addr) const {
addr->type = IPADDR_TYPE_V4; addr->type = IPADDR_TYPE_V4;
addr->u_addr.ip4.addr = _address.dword[IPADDRESS_V4_DWORD_INDEX]; addr->u_addr.ip4.addr = _address.dword[IPADDRESS_V4_DWORD_INDEX];
} }
#else
addr->addr = _address.dword[IPADDRESS_V4_DWORD_INDEX];
#endif
} }
IPAddress& IPAddress::from_ip_addr_t(const ip_addr_t* addr){ IPAddress& IPAddress::from_ip_addr_t(const ip_addr_t* addr){
#if CONFIG_LWIP_IPV6
if(addr->type == IPADDR_TYPE_V6){ if(addr->type == IPADDR_TYPE_V6){
_type = IPv6; _type = IPv6;
_address.dword[0] = addr->u_addr.ip6.addr[0]; _address.dword[0] = addr->u_addr.ip6.addr[0];
@ -422,21 +411,12 @@ IPAddress &IPAddress::from_ip_addr_t(const ip_addr_t *addr) {
_zone = addr->u_addr.ip6.zone; _zone = addr->u_addr.ip6.zone;
#endif /* LWIP_IPV6_SCOPES */ #endif /* LWIP_IPV6_SCOPES */
} else { } else {
#endif
_type = IPv4; _type = IPv4;
memset(_address.bytes, 0, sizeof(_address.bytes));
#if CONFIG_LWIP_IPV6
_address.dword[IPADDRESS_V4_DWORD_INDEX] = addr->u_addr.ip4.addr; _address.dword[IPADDRESS_V4_DWORD_INDEX] = addr->u_addr.ip4.addr;
#else
_address.dword[IPADDRESS_V4_DWORD_INDEX] = addr->addr;
#endif
#if CONFIG_LWIP_IPV6
} }
#endif
return *this; return *this;
} }
#if CONFIG_LWIP_IPV6
esp_ip6_addr_type_t IPAddress::addr_type() const { esp_ip6_addr_type_t IPAddress::addr_type() const {
if(_type != IPv6){ if(_type != IPv6){
return ESP_IP6_ADDR_IS_UNKNOWN; return ESP_IP6_ADDR_IS_UNKNOWN;
@ -445,9 +425,6 @@ esp_ip6_addr_type_t IPAddress::addr_type() const {
to_ip_addr_t(&addr); to_ip_addr_t(&addr);
return esp_netif_ip6_get_addr_type((esp_ip6_addr_t*)(&(addr.u_addr.ip6))); return esp_netif_ip6_get_addr_type((esp_ip6_addr_t*)(&(addr.u_addr.ip6)));
} }
#endif
#if CONFIG_LWIP_IPV6
const IPAddress IN6ADDR_ANY(IPv6); const IPAddress IN6ADDR_ANY(IPv6);
#endif
const IPAddress INADDR_NONE(0,0,0,0); const IPAddress INADDR_NONE(0,0,0,0);

View file

@ -24,7 +24,6 @@
#include "WString.h" #include "WString.h"
#include "lwip/ip_addr.h" #include "lwip/ip_addr.h"
#include "esp_netif_ip_addr.h" #include "esp_netif_ip_addr.h"
#include "sdkconfig.h"
#define IPADDRESS_V4_BYTES_INDEX 12 #define IPADDRESS_V4_BYTES_INDEX 12
#define IPADDRESS_V4_DWORD_INDEX 3 #define IPADDRESS_V4_DWORD_INDEX 3
@ -49,9 +48,7 @@ private:
// to the internal structure rather than a copy of the address this function should only // to the internal structure rather than a copy of the address this function should only
// be used when you know that the usage of the returned uint8_t* will be transient and not // be used when you know that the usage of the returned uint8_t* will be transient and not
// stored. // stored.
uint8_t *raw_address() { uint8_t* raw_address() { return _type == IPv4 ? &_address.bytes[IPADDRESS_V4_BYTES_INDEX] : _address.bytes; }
return _type == IPv4 ? &_address.bytes[IPADDRESS_V4_BYTES_INDEX] : _address.bytes;
}
public: public:
// Constructors // Constructors
@ -60,10 +57,7 @@ public:
IPAddress(); IPAddress();
IPAddress(IPType ip_type); IPAddress(IPType ip_type);
IPAddress(uint8_t first_octet, uint8_t second_octet, uint8_t third_octet, uint8_t fourth_octet); IPAddress(uint8_t first_octet, uint8_t second_octet, uint8_t third_octet, uint8_t fourth_octet);
IPAddress( IPAddress(uint8_t o1, uint8_t o2, uint8_t o3, uint8_t o4, uint8_t o5, uint8_t o6, uint8_t o7, uint8_t o8, uint8_t o9, uint8_t o10, uint8_t o11, uint8_t o12, uint8_t o13, uint8_t o14, uint8_t o15, uint8_t o16, uint8_t z=0);
uint8_t o1, uint8_t o2, uint8_t o3, uint8_t o4, uint8_t o5, uint8_t o6, uint8_t o7, uint8_t o8, uint8_t o9, uint8_t o10, uint8_t o11, uint8_t o12,
uint8_t o13, uint8_t o14, uint8_t o15, uint8_t o16, uint8_t z = 0
);
// IPv4; see implementation note // IPv4; see implementation note
IPAddress(uint32_t address); IPAddress(uint32_t address);
// Default IPv4 // Default IPv4
@ -74,20 +68,14 @@ public:
IPAddress(const IPAddress& address); IPAddress(const IPAddress& address);
bool fromString(const char *address); bool fromString(const char *address);
bool fromString(const String &address) { bool fromString(const String &address) { return fromString(address.c_str()); }
return fromString(address.c_str());
}
// Overloaded cast operator to allow IPAddress objects to be used where a uint32_t is expected // Overloaded cast operator to allow IPAddress objects to be used where a uint32_t is expected
// NOTE: IPv4 only; see implementation note // NOTE: IPv4 only; see implementation note
operator uint32_t() const { operator uint32_t() const { return _type == IPv4 ? _address.dword[IPADDRESS_V4_DWORD_INDEX] : 0; };
return _type == IPv4 ? _address.dword[IPADDRESS_V4_DWORD_INDEX] : 0;
};
bool operator==(const IPAddress& addr) const; bool operator==(const IPAddress& addr) const;
bool operator!=(const IPAddress &addr) const { bool operator!=(const IPAddress& addr) const { return !(*this == addr); };
return !(*this == addr);
};
// NOTE: IPv4 only; we don't know the length of the pointer // NOTE: IPv4 only; we don't know the length of the pointer
bool operator==(const uint8_t* addr) const; bool operator==(const uint8_t* addr) const;
@ -96,7 +84,7 @@ public:
uint8_t operator[](int index) const; uint8_t operator[](int index) const;
uint8_t& operator[](int index); uint8_t& operator[](int index);
// Overloaded copy operators to allow initialization of IPAddress objects from other types // Overloaded copy operators to allow initialisation of IPAddress objects from other types
// NOTE: IPv4 only // NOTE: IPv4 only
IPAddress& operator=(const uint8_t *address); IPAddress& operator=(const uint8_t *address);
// NOTE: IPv4 only; see implementation note // NOTE: IPv4 only; see implementation note
@ -108,28 +96,19 @@ public:
virtual size_t printTo(Print& p) const; virtual size_t printTo(Print& p) const;
String toString(bool includeZone = false) const; String toString(bool includeZone = false) const;
IPType type() const { IPType type() const { return _type; }
return _type;
}
// Espresif LwIP conversions // Espresif LwIP conversions
IPAddress(const ip_addr_t *addr); IPAddress(const ip_addr_t *addr);
void to_ip_addr_t(ip_addr_t* addr) const; void to_ip_addr_t(ip_addr_t* addr) const;
IPAddress& from_ip_addr_t(const ip_addr_t* addr); IPAddress& from_ip_addr_t(const ip_addr_t* addr);
#if CONFIG_LWIP_IPV6
esp_ip6_addr_type_t addr_type() const; esp_ip6_addr_type_t addr_type() const;
#endif uint8_t zone() const { return (type() == IPv6)?_zone:0; }
uint8_t zone() const {
return (type() == IPv6) ? _zone : 0;
}
size_t printTo(Print& p, bool includeZone) const; size_t printTo(Print& p, bool includeZone) const;
friend class UDP; friend class UDP;
friend class Client; friend class Client;
friend class Server; friend class Server;
friend class EthernetClass;
friend class DhcpClass;
friend class DNSClient;
protected: protected:
bool fromString4(const char *address); bool fromString4(const char *address);

View file

@ -21,16 +21,19 @@
#include <HEXBuilder.h> #include <HEXBuilder.h>
#include <MD5Builder.h> #include <MD5Builder.h>
void MD5Builder::begin(void) { void MD5Builder::begin(void)
{
memset(_buf, 0x00, ESP_ROM_MD5_DIGEST_LEN); memset(_buf, 0x00, ESP_ROM_MD5_DIGEST_LEN);
esp_rom_md5_init(&_ctx); esp_rom_md5_init(&_ctx);
} }
void MD5Builder::add(const uint8_t *data, size_t len) { void MD5Builder::add(const uint8_t * data, size_t len)
{
esp_rom_md5_update(&_ctx, data, len); esp_rom_md5_update(&_ctx, data, len);
} }
void MD5Builder::addHexString(const char *data) { void MD5Builder::addHexString(const char * data)
{
size_t len = strlen(data); size_t len = strlen(data);
uint8_t * tmp = (uint8_t*)malloc(len/2); uint8_t * tmp = (uint8_t*)malloc(len/2);
if(tmp == NULL) { if(tmp == NULL) {
@ -41,7 +44,8 @@ void MD5Builder::addHexString(const char *data) {
free(tmp); free(tmp);
} }
bool MD5Builder::addStream(Stream &stream, const size_t maxLen) { bool MD5Builder::addStream(Stream & stream, const size_t maxLen)
{
const int buf_size = 512; const int buf_size = 512;
int maxLengthLeft = maxLen; int maxLengthLeft = maxLen;
uint8_t * buf = (uint8_t*) malloc(buf_size); uint8_t * buf = (uint8_t*) malloc(buf_size);
@ -80,19 +84,23 @@ bool MD5Builder::addStream(Stream &stream, const size_t maxLen) {
return true; return true;
} }
void MD5Builder::calculate(void) { void MD5Builder::calculate(void)
{
esp_rom_md5_final(_buf, &_ctx); esp_rom_md5_final(_buf, &_ctx);
} }
void MD5Builder::getBytes(uint8_t *output) { void MD5Builder::getBytes(uint8_t * output)
{
memcpy(output, _buf, ESP_ROM_MD5_DIGEST_LEN); memcpy(output, _buf, ESP_ROM_MD5_DIGEST_LEN);
} }
void MD5Builder::getChars(char *output) { void MD5Builder::getChars(char * output)
{
bytes2hex(output, ESP_ROM_MD5_DIGEST_LEN*2+1, _buf, ESP_ROM_MD5_DIGEST_LEN); bytes2hex(output, ESP_ROM_MD5_DIGEST_LEN*2+1, _buf, ESP_ROM_MD5_DIGEST_LEN);
} }
String MD5Builder::toString(void) { String MD5Builder::toString(void)
{
char out[(ESP_ROM_MD5_DIGEST_LEN * 2) + 1]; char out[(ESP_ROM_MD5_DIGEST_LEN * 2) + 1];
getChars(out); getChars(out);
return String(out); return String(out);

View file

@ -29,11 +29,11 @@
#include "HashBuilder.h" #include "HashBuilder.h"
class MD5Builder : public HashBuilder { class MD5Builder : public HashBuilder
{
private: private:
md5_context_t _ctx; md5_context_t _ctx;
uint8_t _buf[ESP_ROM_MD5_DIGEST_LEN]; uint8_t _buf[ESP_ROM_MD5_DIGEST_LEN];
public: public:
void begin(void) override; void begin(void) override;

View file

@ -1,228 +0,0 @@
#include <MacAddress.h>
#include <stdio.h>
#include <Print.h>
//Default constructor, blank mac address.
MacAddress::MacAddress() : MacAddress(MAC6) {}
MacAddress::MacAddress(MACType mac_type) {
_type = mac_type;
memset(_mac.bytes, 0, sizeof(_mac.bytes));
}
MacAddress::MacAddress(MACType mac_type, uint64_t mac) {
_type = mac_type;
_mac.val = mac;
}
MacAddress::MacAddress(MACType mac_type, const uint8_t *macbytearray) {
_type = mac_type;
memset(_mac.bytes, 0, sizeof(_mac.bytes));
if (_type == MAC6) {
memcpy(_mac.bytes, macbytearray, 6);
} else {
memcpy(_mac.bytes, macbytearray, 8);
}
}
MacAddress::MacAddress(const char *macstr) {
fromString(macstr);
}
MacAddress::MacAddress(const String &macstr) {
fromString(macstr.c_str());
}
MacAddress::MacAddress(uint8_t b1, uint8_t b2, uint8_t b3, uint8_t b4, uint8_t b5, uint8_t b6) {
_type = MAC6;
memset(_mac.bytes, 0, sizeof(_mac.bytes));
_mac.bytes[0] = b1;
_mac.bytes[1] = b2;
_mac.bytes[2] = b3;
_mac.bytes[3] = b4;
_mac.bytes[4] = b5;
_mac.bytes[5] = b6;
}
MacAddress::MacAddress(uint8_t b1, uint8_t b2, uint8_t b3, uint8_t b4, uint8_t b5, uint8_t b6, uint8_t b7, uint8_t b8) {
_type = MAC8;
_mac.bytes[0] = b1;
_mac.bytes[1] = b2;
_mac.bytes[2] = b3;
_mac.bytes[3] = b4;
_mac.bytes[4] = b5;
_mac.bytes[5] = b6;
_mac.bytes[6] = b7;
_mac.bytes[7] = b8;
}
//Parse user entered string into MAC address
bool MacAddress::fromString(const char *buf) {
if (strlen(buf) == 17) {
return fromString6(buf);
} else if (strlen(buf) == 23) {
return fromString8(buf);
}
return false;
}
//Parse user entered string into MAC address
bool MacAddress::fromString6(const char *buf) {
char cs[18]; // 17 + 1 for null terminator
char *token;
char *next; //Unused but required
int i;
strncpy(cs, buf, sizeof(cs) - 1); //strtok modifies the buffer: copy to working buffer.
for (i = 0; i < 6; i++) {
token = strtok((i == 0) ? cs : NULL, ":"); //Find first or next token
if (!token) { //No more tokens found
return false;
}
_mac.bytes[i] = strtol(token, &next, 16);
}
_type = MAC6;
return true;
}
bool MacAddress::fromString8(const char *buf) {
char cs[24]; // 23 + 1 for null terminator
char *token;
char *next; //Unused but required
int i;
strncpy(cs, buf, sizeof(cs) - 1); //strtok modifies the buffer: copy to working buffer.
for (i = 0; i < 8; i++) {
token = strtok((i == 0) ? cs : NULL, ":"); //Find first or next token
if (!token) { //No more tokens found
return false;
}
_mac.bytes[i] = strtol(token, &next, 16);
}
_type = MAC8;
return true;
}
//Copy MAC into byte array
void MacAddress::toBytes(uint8_t *buf) {
if (_type == MAC6) {
memcpy(buf, _mac.bytes, 6);
} else {
memcpy(buf, _mac.bytes, sizeof(_mac.bytes));
}
}
//Print MAC address into a C string.
//MAC: Buffer must be at least 18 chars
int MacAddress::toString(char *buf) {
if (_type == MAC6) {
return sprintf(buf, "%02X:%02X:%02X:%02X:%02X:%02X", _mac.bytes[0], _mac.bytes[1], _mac.bytes[2], _mac.bytes[3], _mac.bytes[4], _mac.bytes[5]);
} else {
return sprintf(
buf, "%02X:%02X:%02X:%02X:%02X:%02X:%02X:%02X", _mac.bytes[0], _mac.bytes[1], _mac.bytes[2], _mac.bytes[3], _mac.bytes[4], _mac.bytes[5], _mac.bytes[6],
_mac.bytes[7]
);
}
}
String MacAddress::toString() const {
uint8_t bytes = (_type == MAC6) ? 18 : 24;
char buf[bytes];
if (_type == MAC6) {
snprintf(buf, sizeof(buf), "%02X:%02X:%02X:%02X:%02X:%02X", _mac.bytes[0], _mac.bytes[1], _mac.bytes[2], _mac.bytes[3], _mac.bytes[4], _mac.bytes[5]);
} else {
snprintf(
buf, sizeof(buf), "%02X:%02X:%02X:%02X:%02X:%02X:%02X:%02X", _mac.bytes[0], _mac.bytes[1], _mac.bytes[2], _mac.bytes[3], _mac.bytes[4], _mac.bytes[5],
_mac.bytes[6], _mac.bytes[7]
);
}
return String(buf);
}
uint64_t MacAddress::Value() {
return _mac.val;
}
//Allow getting individual octets of the address. e.g. uint8_t b0 = ma[0];
uint8_t MacAddress::operator[](int index) const {
index = EnforceIndexBounds(index);
return _mac.bytes[index];
}
//Allow setting individual octets of the address. e.g. ma[2] = 255;
uint8_t &MacAddress::operator[](int index) {
index = EnforceIndexBounds(index);
return _mac.bytes[index];
}
//Overloaded copy operator: init MacAddress object from byte array
MacAddress &MacAddress::operator=(const uint8_t *macbytearray) {
// 6-bytes MacAddress only
_type = MAC6;
memset(_mac.bytes, 0, sizeof(_mac.bytes));
memcpy(_mac.bytes, macbytearray, 6);
return *this;
}
//Overloaded copy operator: init MacAddress object from uint64_t
MacAddress &MacAddress::operator=(uint64_t macval) {
// 6-bytes MacAddress only
_type = MAC6;
_mac.val = macval;
return *this;
}
//Compare class to byte array
bool MacAddress::operator==(const uint8_t *macbytearray) const {
return !memcmp(_mac.bytes, macbytearray, 6);
}
//Allow comparing value of two classes
bool MacAddress::operator==(const MacAddress &mac2) const {
return _mac.val == mac2._mac.val;
}
//Type converter object to uint64_t [same as .Value()]
MacAddress::operator uint64_t() const {
return _mac.val;
}
//Type converter object to read only pointer to mac bytes. e.g. const uint8_t *ip_8 = ma;
MacAddress::operator const uint8_t *() const {
return _mac.bytes;
}
//Type converter object to read only pointer to mac value. e.g. const uint32_t *ip_64 = ma;
MacAddress::operator const uint64_t *() const {
return &_mac.val;
}
size_t MacAddress::printTo(Print &p) const {
uint8_t bytes = (_type == MAC6) ? 6 : 8;
size_t n = 0;
for (int i = 0; i < bytes; i++) {
if (i) {
n += p.print(':');
}
n += p.printf("%02X", _mac.bytes[i]);
}
return n;
}
//Bounds checking
int MacAddress::EnforceIndexBounds(int i) const {
if (i < 0) {
return 0;
}
if (_type == MAC6) {
if (i >= 6) {
return 5;
}
} else {
if (i >= 8) {
return 7;
}
}
return i;
}

View file

@ -1,104 +0,0 @@
//-----------------------------------------------------------------------------
// MacAddress.h - class to make it easier to handle BSSID and MAC addresses.
//
// Copyright 2022 David McCurley
// Modified by Espressif Systems 2024
//
// Licensed under the Apache License, Version 2.0 (the "License").
// You may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//-----------------------------------------------------------------------------
#ifndef MacAddress_h
#define MacAddress_h
#include <stdint.h>
#include <WString.h>
#include <Printable.h>
enum MACType {
MAC6,
MAC8
};
// A class to make it easier to handle and pass around MAC addresses, supporting both 6-byte and 8-byte MAC addresses.
class MacAddress : public Printable {
private:
union {
uint8_t bytes[8];
uint64_t val;
} _mac;
MACType _type;
public:
//Default MAC6
MacAddress();
MacAddress(MACType mac_type);
MacAddress(uint8_t b1, uint8_t b2, uint8_t b3, uint8_t b4, uint8_t b5, uint8_t b6);
MacAddress(uint8_t b1, uint8_t b2, uint8_t b3, uint8_t b4, uint8_t b5, uint8_t b6, uint8_t b7, uint8_t b8);
MacAddress(MACType mac_type, uint64_t mac);
MacAddress(MACType mac_type, const uint8_t *macbytearray);
//Default MAC6
MacAddress(uint64_t mac) : MacAddress(MAC6, mac) {}
MacAddress(const uint8_t *macbytearray) : MacAddress(MAC6, macbytearray) {}
MacAddress(const char *macstr);
MacAddress(const String &macstr);
virtual ~MacAddress() {}
bool fromString(const char *buf);
bool fromString(const String &macstr) {
return fromString(macstr.c_str());
}
void toBytes(uint8_t *buf);
int toString(char *buf);
String toString() const;
uint64_t Value();
uint8_t operator[](int index) const;
uint8_t &operator[](int index);
//MAC6 only
MacAddress &operator=(const uint8_t *macbytearray);
MacAddress &operator=(uint64_t macval);
bool operator==(const uint8_t *macbytearray) const;
bool operator==(const MacAddress &mac2) const;
operator uint64_t() const;
operator const uint8_t *() const;
operator const uint64_t *() const;
virtual size_t printTo(Print &p) const;
// future use in Arduino Networking
/*
friend class EthernetClass;
friend class UDP;
friend class Client;
friend class Server;
friend class DhcpClass;
friend class DNSClient;
*/
protected:
bool fromString6(const char *buf);
bool fromString8(const char *buf);
private:
int EnforceIndexBounds(int i) const;
};
#endif

View file

@ -35,7 +35,8 @@ extern "C" {
// Public Methods ////////////////////////////////////////////////////////////// // Public Methods //////////////////////////////////////////////////////////////
/* default implementation: may be overridden */ /* default implementation: may be overridden */
size_t Print::write(const uint8_t *buffer, size_t size) { size_t Print::write(const uint8_t *buffer, size_t size)
{
size_t n = 0; size_t n = 0;
while(size--) { while(size--) {
n += write(*buffer++); n += write(*buffer++);
@ -43,7 +44,8 @@ size_t Print::write(const uint8_t *buffer, size_t size) {
return n; return n;
} }
size_t Print::vprintf(const char *format, va_list arg) { size_t Print::vprintf(const char *format, va_list arg)
{
char loc_buf[64]; char loc_buf[64];
char * temp = loc_buf; char * temp = loc_buf;
va_list copy; va_list copy;
@ -54,7 +56,7 @@ size_t Print::vprintf(const char *format, va_list arg) {
va_end(arg); va_end(arg);
return 0; return 0;
} }
if (len >= (int)sizeof(loc_buf)) { // comparison of same sign type for the compiler if(len >= (int)sizeof(loc_buf)){ // comparation of same sign type for the compiler
temp = (char*) malloc(len+1); temp = (char*) malloc(len+1);
if(temp == NULL) { if(temp == NULL) {
va_end(arg); va_end(arg);
@ -70,7 +72,8 @@ size_t Print::vprintf(const char *format, va_list arg) {
return len; return len;
} }
size_t Print::printf(const __FlashStringHelper *ifsh, ...) { size_t Print::printf(const __FlashStringHelper *ifsh, ...)
{
va_list arg; va_list arg;
va_start(arg, ifsh); va_start(arg, ifsh);
const char * format = (reinterpret_cast<const char *>(ifsh)); const char * format = (reinterpret_cast<const char *>(ifsh));
@ -79,7 +82,8 @@ size_t Print::printf(const __FlashStringHelper *ifsh, ...) {
return ret; return ret;
} }
size_t Print::printf(const char *format, ...) { size_t Print::printf(const char *format, ...)
{
va_list arg; va_list arg;
va_start(arg, format); va_start(arg, format);
size_t ret = vprintf(format, arg); size_t ret = vprintf(format, arg);
@ -87,31 +91,38 @@ size_t Print::printf(const char *format, ...) {
return ret; return ret;
} }
size_t Print::print(const String &s) { size_t Print::print(const String &s)
{
return write(s.c_str(), s.length()); return write(s.c_str(), s.length());
} }
size_t Print::print(const char str[]) { size_t Print::print(const char str[])
{
return write(str); return write(str);
} }
size_t Print::print(char c) { size_t Print::print(char c)
{
return write(c); return write(c);
} }
size_t Print::print(unsigned char b, int base) { size_t Print::print(unsigned char b, int base)
{
return print((unsigned long) b, base); return print((unsigned long) b, base);
} }
size_t Print::print(int n, int base) { size_t Print::print(int n, int base)
{
return print((long) n, base); return print((long) n, base);
} }
size_t Print::print(unsigned int n, int base) { size_t Print::print(unsigned int n, int base)
{
return print((unsigned long) n, base); return print((unsigned long) n, base);
} }
size_t Print::print(long n, int base) { size_t Print::print(long n, int base)
{
int t = 0; int t = 0;
if (base == 10 && n < 0) { if (base == 10 && n < 0) {
t = print('-'); t = print('-');
@ -120,7 +131,8 @@ size_t Print::print(long n, int base) {
return printNumber(static_cast<unsigned long>(n), base) + t; return printNumber(static_cast<unsigned long>(n), base) + t;
} }
size_t Print::print(unsigned long n, int base) { size_t Print::print(unsigned long n, int base)
{
if(base == 0) { if(base == 0) {
return write(n); return write(n);
} else { } else {
@ -128,7 +140,8 @@ size_t Print::print(unsigned long n, int base) {
} }
} }
size_t Print::print(long long n, int base) { size_t Print::print(long long n, int base)
{
int t = 0; int t = 0;
if (base == 10 && n < 0) { if (base == 10 && n < 0) {
t = print('-'); t = print('-');
@ -137,7 +150,8 @@ size_t Print::print(long long n, int base) {
return printNumber(static_cast<unsigned long long>(n), base) + t; return printNumber(static_cast<unsigned long long>(n), base) + t;
} }
size_t Print::print(unsigned long long n, int base) { size_t Print::print(unsigned long long n, int base)
{
if (base == 0) { if (base == 0) {
return write(n); return write(n);
} else { } else {
@ -145,15 +159,18 @@ size_t Print::print(unsigned long long n, int base) {
} }
} }
size_t Print::print(double n, int digits) { size_t Print::print(double n, int digits)
{
return printFloat(n, digits); return printFloat(n, digits);
} }
size_t Print::print(const Printable &x) { size_t Print::print(const Printable& x)
{
return x.printTo(*this); return x.printTo(*this);
} }
size_t Print::print(struct tm *timeinfo, const char *format) { size_t Print::print(struct tm * timeinfo, const char * format)
{
const char * f = format; const char * f = format;
if(!f){ if(!f){
f = "%c"; f = "%c";
@ -166,83 +183,97 @@ size_t Print::print(struct tm *timeinfo, const char *format) {
return print(buf); return print(buf);
} }
size_t Print::println(void) { size_t Print::println(void)
{
return print("\r\n"); return print("\r\n");
} }
size_t Print::println(const String &s) { size_t Print::println(const String &s)
{
size_t n = print(s); size_t n = print(s);
n += println(); n += println();
return n; return n;
} }
size_t Print::println(const char c[]) { size_t Print::println(const char c[])
{
size_t n = print(c); size_t n = print(c);
n += println(); n += println();
return n; return n;
} }
size_t Print::println(char c) { size_t Print::println(char c)
{
size_t n = print(c); size_t n = print(c);
n += println(); n += println();
return n; return n;
} }
size_t Print::println(unsigned char b, int base) { size_t Print::println(unsigned char b, int base)
{
size_t n = print(b, base); size_t n = print(b, base);
n += println(); n += println();
return n; return n;
} }
size_t Print::println(int num, int base) { size_t Print::println(int num, int base)
{
size_t n = print(num, base); size_t n = print(num, base);
n += println(); n += println();
return n; return n;
} }
size_t Print::println(unsigned int num, int base) { size_t Print::println(unsigned int num, int base)
{
size_t n = print(num, base); size_t n = print(num, base);
n += println(); n += println();
return n; return n;
} }
size_t Print::println(long num, int base) { size_t Print::println(long num, int base)
{
size_t n = print(num, base); size_t n = print(num, base);
n += println(); n += println();
return n; return n;
} }
size_t Print::println(unsigned long num, int base) { size_t Print::println(unsigned long num, int base)
{
size_t n = print(num, base); size_t n = print(num, base);
n += println(); n += println();
return n; return n;
} }
size_t Print::println(long long num, int base) { size_t Print::println(long long num, int base)
{
size_t n = print(num, base); size_t n = print(num, base);
n += println(); n += println();
return n; return n;
} }
size_t Print::println(unsigned long long num, int base) { size_t Print::println(unsigned long long num, int base)
{
size_t n = print(num, base); size_t n = print(num, base);
n += println(); n += println();
return n; return n;
} }
size_t Print::println(double num, int digits) { size_t Print::println(double num, int digits)
{
size_t n = print(num, digits); size_t n = print(num, digits);
n += println(); n += println();
return n; return n;
} }
size_t Print::println(const Printable &x) { size_t Print::println(const Printable& x)
{
size_t n = print(x); size_t n = print(x);
n += println(); n += println();
return n; return n;
} }
size_t Print::println(struct tm *timeinfo, const char *format) { size_t Print::println(struct tm * timeinfo, const char * format)
{
size_t n = print(timeinfo, format); size_t n = print(timeinfo, format);
n += println(); n += println();
return n; return n;
@ -250,7 +281,8 @@ size_t Print::println(struct tm *timeinfo, const char *format) {
// Private Methods ///////////////////////////////////////////////////////////// // Private Methods /////////////////////////////////////////////////////////////
size_t Print::printNumber(unsigned long n, uint8_t base) { size_t Print::printNumber(unsigned long n, uint8_t base)
{
char buf[8 * sizeof(n) + 1]; // Assumes 8-bit chars plus zero byte. char buf[8 * sizeof(n) + 1]; // Assumes 8-bit chars plus zero byte.
char *str = &buf[sizeof(buf) - 1]; char *str = &buf[sizeof(buf) - 1];
@ -271,7 +303,8 @@ size_t Print::printNumber(unsigned long n, uint8_t base) {
return write(str); return write(str);
} }
size_t Print::printNumber(unsigned long long n, uint8_t base) { size_t Print::printNumber(unsigned long long n, uint8_t base)
{
char buf[8 * sizeof(n) + 1]; // Assumes 8-bit chars plus zero byte. char buf[8 * sizeof(n) + 1]; // Assumes 8-bit chars plus zero byte.
char* str = &buf[sizeof(buf) - 1]; char* str = &buf[sizeof(buf) - 1];
@ -293,7 +326,8 @@ size_t Print::printNumber(unsigned long long n, uint8_t base) {
return write(str); return write(str);
} }
size_t Print::printFloat(double number, uint8_t digits) { size_t Print::printFloat(double number, uint8_t digits)
{
size_t n = 0; size_t n = 0;
if(isnan(number)) { if(isnan(number)) {

View file

@ -32,37 +32,44 @@
#define OCT 8 #define OCT 8
#define BIN 2 #define BIN 2
class Print { class Print
{
private: private:
int write_error; int write_error;
size_t printNumber(unsigned long, uint8_t); size_t printNumber(unsigned long, uint8_t);
size_t printNumber(unsigned long long, uint8_t); size_t printNumber(unsigned long long, uint8_t);
size_t printFloat(double, uint8_t); size_t printFloat(double, uint8_t);
protected: protected:
void setWriteError(int err = 1) { void setWriteError(int err = 1)
{
write_error = err; write_error = err;
} }
public: public:
Print() : write_error(0) {} Print() :
write_error(0)
{
}
virtual ~Print() {} virtual ~Print() {}
int getWriteError() { int getWriteError()
{
return write_error; return write_error;
} }
void clearWriteError() { void clearWriteError()
{
setWriteError(0); setWriteError(0);
} }
virtual size_t write(uint8_t) = 0; virtual size_t write(uint8_t) = 0;
size_t write(const char *str) { size_t write(const char *str)
{
if(str == NULL) { if(str == NULL) {
return 0; return 0;
} }
return write((const uint8_t *) str, strlen(str)); return write((const uint8_t *) str, strlen(str));
} }
virtual size_t write(const uint8_t *buffer, size_t size); virtual size_t write(const uint8_t *buffer, size_t size);
size_t write(const char *buffer, size_t size) { size_t write(const char *buffer, size_t size)
{
return write((const uint8_t *) buffer, size); return write((const uint8_t *) buffer, size);
} }
@ -73,13 +80,9 @@ public:
// add availableForWrite to make compatible with Arduino Print.h // add availableForWrite to make compatible with Arduino Print.h
// default to zero, meaning "a single write may block" // default to zero, meaning "a single write may block"
// should be overridden by subclasses with buffering // should be overriden by subclasses with buffering
virtual int availableForWrite() { virtual int availableForWrite() { return 0; }
return 0; size_t print(const __FlashStringHelper *ifsh) { return print(reinterpret_cast<const char *>(ifsh)); }
}
size_t print(const __FlashStringHelper *ifsh) {
return print(reinterpret_cast<const char *>(ifsh));
}
size_t print(const String &); size_t print(const String &);
size_t print(const char[]); size_t print(const char[]);
size_t print(char); size_t print(char);
@ -94,9 +97,7 @@ public:
size_t print(const Printable&); size_t print(const Printable&);
size_t print(struct tm * timeinfo, const char * format = NULL); size_t print(struct tm * timeinfo, const char * format = NULL);
size_t println(const __FlashStringHelper *ifsh) { size_t println(const __FlashStringHelper *ifsh) { return println(reinterpret_cast<const char *>(ifsh)); }
return println(reinterpret_cast<const char *>(ifsh));
}
size_t println(const String &s); size_t println(const String &s);
size_t println(const char[]); size_t println(const char[]);
size_t println(char); size_t println(char);
@ -113,6 +114,7 @@ public:
size_t println(void); size_t println(void);
virtual void flush() { /* Empty implementation for backward compatibility */ } virtual void flush() { /* Empty implementation for backward compatibility */ }
}; };
#endif #endif

View file

@ -30,10 +30,12 @@ class Print;
Print::print and Print::println methods. Print::print and Print::println methods.
*/ */
class Printable { class Printable
{
public: public:
virtual ~Printable() {} virtual ~Printable() {}
virtual size_t printTo(Print& p) const = 0; virtual size_t printTo(Print& p) const = 0;
}; };
#endif #endif

View file

@ -27,7 +27,12 @@
#ifndef GET_UINT32_BE #ifndef GET_UINT32_BE
#define GET_UINT32_BE(n,b,i) \ #define GET_UINT32_BE(n,b,i) \
{ (n) = ((uint32_t)(b)[(i)] << 24) | ((uint32_t)(b)[(i) + 1] << 16) | ((uint32_t)(b)[(i) + 2] << 8) | ((uint32_t)(b)[(i) + 3]); } { \
(n) = ((uint32_t) (b)[(i) ] << 24) \
| ((uint32_t) (b)[(i) + 1] << 16) \
| ((uint32_t) (b)[(i) + 2] << 8) \
| ((uint32_t) (b)[(i) + 3] ); \
}
#endif #endif
#ifndef PUT_UINT32_BE #ifndef PUT_UINT32_BE
@ -42,12 +47,18 @@
// Constants // Constants
static const uint8_t sha1_padding[64] = {0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, static const uint8_t sha1_padding[64] =
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; {
0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
// Private methods // Private methods
void SHA1Builder::process(const uint8_t *data) { void SHA1Builder::process(const uint8_t* data)
{
uint32_t temp, W[16], A, B, C, D, E; uint32_t temp, W[16], A, B, C, D, E;
GET_UINT32_BE(W[ 0], data, 0); GET_UINT32_BE(W[ 0], data, 0);
@ -69,12 +80,16 @@ void SHA1Builder::process(const uint8_t *data) {
#define sha1_S(x,n) ((x << n) | ((x & 0xFFFFFFFF) >> (32 - n))) #define sha1_S(x,n) ((x << n) | ((x & 0xFFFFFFFF) >> (32 - n)))
#define sha1_R(t) (temp = W[(t - 3) & 0x0F] ^ W[(t - 8) & 0x0F] ^ W[(t - 14) & 0x0F] ^ W[t & 0x0F], (W[t & 0x0F] = sha1_S(temp, 1))) #define sha1_R(t) \
( \
temp = W[(t - 3) & 0x0F] ^ W[(t - 8) & 0x0F] ^ \
W[(t - 14) & 0x0F] ^ W[ t & 0x0F], \
(W[t & 0x0F] = sha1_S(temp,1)) \
)
#define sha1_P(a,b,c,d,e,x) \ #define sha1_P(a,b,c,d,e,x) \
{ \ { \
e += sha1_S(a, 5) + sha1_F(b, c, d) + sha1_K + x; \ e += sha1_S(a,5) + sha1_F(b,c,d) + sha1_K + x; b = sha1_S(b,30); \
b = sha1_S(b, 30); \
} }
A = state[0]; A = state[0];
@ -200,7 +215,8 @@ void SHA1Builder::process(const uint8_t *data) {
// Public methods // Public methods
void SHA1Builder::begin(void) { void SHA1Builder::begin(void)
{
total[0] = 0; total[0] = 0;
total[1] = 0; total[1] = 0;
@ -214,11 +230,13 @@ void SHA1Builder::begin(void) {
memset(hash, 0x00, sizeof(hash)); memset(hash, 0x00, sizeof(hash));
} }
void SHA1Builder::add(const uint8_t *data, size_t len) { void SHA1Builder::add(const uint8_t* data, size_t len)
{
size_t fill; size_t fill;
uint32_t left; uint32_t left;
if (len == 0) { if(len == 0)
{
return; return;
} }
@ -228,11 +246,13 @@ void SHA1Builder::add(const uint8_t *data, size_t len) {
total[0] += (uint32_t) len; total[0] += (uint32_t) len;
total[0] &= 0xFFFFFFFF; total[0] &= 0xFFFFFFFF;
if (total[0] < (uint32_t)len) { if(total[0] < (uint32_t) len)
{
total[1]++; total[1]++;
} }
if (left && len >= fill) { if(left && len >= fill)
{
memcpy((void *) (buffer + left), data, fill); memcpy((void *) (buffer + left), data, fill);
process(buffer); process(buffer);
data += fill; data += fill;
@ -240,7 +260,8 @@ void SHA1Builder::add(const uint8_t *data, size_t len) {
left = 0; left = 0;
} }
while (len >= 64) { while(len >= 64)
{
process(data); process(data);
data += 64; data += 64;
len -= 64; len -= 64;
@ -251,7 +272,8 @@ void SHA1Builder::add(const uint8_t *data, size_t len) {
} }
} }
void SHA1Builder::addHexString(const char *data) { void SHA1Builder::addHexString(const char * data)
{
uint16_t len = strlen(data); uint16_t len = strlen(data);
uint8_t * tmp = (uint8_t*)malloc(len/2); uint8_t * tmp = (uint8_t*)malloc(len/2);
if(tmp == NULL) { if(tmp == NULL) {
@ -262,7 +284,8 @@ void SHA1Builder::addHexString(const char *data) {
free(tmp); free(tmp);
} }
bool SHA1Builder::addStream(Stream &stream, const size_t maxLen) { bool SHA1Builder::addStream(Stream & stream, const size_t maxLen)
{
const int buf_size = 512; const int buf_size = 512;
int maxLengthLeft = maxLen; int maxLengthLeft = maxLen;
uint8_t * buf = (uint8_t*) malloc(buf_size); uint8_t * buf = (uint8_t*) malloc(buf_size);
@ -301,7 +324,8 @@ bool SHA1Builder::addStream(Stream &stream, const size_t maxLen) {
return true; return true;
} }
void SHA1Builder::calculate(void) { void SHA1Builder::calculate(void)
{
uint32_t last, padn; uint32_t last, padn;
uint32_t high, low; uint32_t high, low;
uint8_t msglen[8]; uint8_t msglen[8];
@ -325,15 +349,18 @@ void SHA1Builder::calculate(void) {
PUT_UINT32_BE(state[4], hash, 16); PUT_UINT32_BE(state[4], hash, 16);
} }
void SHA1Builder::getBytes(uint8_t *output) { void SHA1Builder::getBytes(uint8_t * output)
{
memcpy(output, hash, SHA1_HASH_SIZE); memcpy(output, hash, SHA1_HASH_SIZE);
} }
void SHA1Builder::getChars(char *output) { void SHA1Builder::getChars(char * output)
{
bytes2hex(output, SHA1_HASH_SIZE*2+1, hash, SHA1_HASH_SIZE); bytes2hex(output, SHA1_HASH_SIZE*2+1, hash, SHA1_HASH_SIZE);
} }
String SHA1Builder::toString(void) { String SHA1Builder::toString(void)
{
char out[(SHA1_HASH_SIZE * 2) + 1]; char out[(SHA1_HASH_SIZE * 2) + 1];
getChars(out); getChars(out);
return String(out); return String(out);

View file

@ -22,7 +22,8 @@
#define SHA1_HASH_SIZE 20 #define SHA1_HASH_SIZE 20
class SHA1Builder : public HashBuilder { class SHA1Builder : public HashBuilder
{
private: private:
uint32_t total[2]; /* number of bytes processed */ uint32_t total[2]; /* number of bytes processed */
uint32_t state[5]; /* intermediate digest state */ uint32_t state[5]; /* intermediate digest state */

View file

@ -22,9 +22,10 @@
#include "Print.h" #include "Print.h"
class Server : public Print { class Server: public Print
{
public: public:
virtual void begin() = 0; virtual void begin(uint16_t port=0) =0;
}; };
#endif #endif

View file

@ -18,17 +18,18 @@
Created July 2011 Created July 2011
parsing functions based on TextFinder library by Michael Margolis parsing functions based on TextFinder library by Michael Margolis
findMulti/findUntil routines written by Jim Leonard/Xuth
*/ */
#include "Arduino.h" #include "Arduino.h"
#include "Stream.h" #include "Stream.h"
#include "esp32-hal.h"
#define PARSE_TIMEOUT 1000 // default number of milli-seconds to wait #define PARSE_TIMEOUT 1000 // default number of milli-seconds to wait
#define NO_SKIP_CHAR 1 // a magic char not found in a valid ASCII numeric field
// private method to read stream with timeout // private method to read stream with timeout
int Stream::timedRead() { int Stream::timedRead()
{
int c; int c;
_startMillis = millis(); _startMillis = millis();
do { do {
@ -41,7 +42,8 @@ int Stream::timedRead() {
} }
// private method to peek stream with timeout // private method to peek stream with timeout
int Stream::timedPeek() { int Stream::timedPeek()
{
int c; int c;
_startMillis = millis(); _startMillis = millis();
do { do {
@ -55,26 +57,19 @@ int Stream::timedPeek() {
// returns peek of the next digit in the stream or -1 if timeout // returns peek of the next digit in the stream or -1 if timeout
// discards non-numeric characters // discards non-numeric characters
int Stream::peekNextDigit(LookaheadMode lookahead, bool detectDecimal) { int Stream::peekNextDigit()
{
int c; int c;
while(1) { while(1) {
c = timedPeek(); c = timedPeek();
if(c < 0) {
if (c < 0 || c == '-' || (c >= '0' && c <= '9') || (detectDecimal && c == '.')) { return c; // timeout
}
if(c == '-') {
return c; return c;
} }
if(c >= '0' && c <= '9') {
switch (lookahead) { return c;
case SKIP_NONE: return -1; // Fail code.
case SKIP_WHITESPACE:
switch (c) {
case ' ':
case '\t':
case '\r':
case '\n': break;
default: return -1; // Fail code.
}
case SKIP_ALL: break;
} }
read(); // discard non-numeric read(); // discard non-numeric
} }
@ -87,205 +82,78 @@ void Stream::setTimeout(unsigned long timeout) // sets the maximum number of mi
{ {
_timeout = timeout; _timeout = timeout;
} }
unsigned long Stream::getTimeout(void) {
return _timeout;
}
// find returns true if the target string is found // find returns true if the target string is found
bool Stream::find(const char *target) { bool Stream::find(const char *target)
{
return findUntil(target, strlen(target), NULL, 0); return findUntil(target, strlen(target), NULL, 0);
} }
// reads data from the stream until the target string of given length is found // reads data from the stream until the target string of given length is found
// returns true if target string is found, false if timed out // returns true if target string is found, false if timed out
bool Stream::find(const char *target, size_t length) { bool Stream::find(const char *target, size_t length)
{
return findUntil(target, length, NULL, 0); return findUntil(target, length, NULL, 0);
} }
// as find but search ends if the terminator string is found // as find but search ends if the terminator string is found
bool Stream::findUntil(const char *target, const char *terminator) { bool Stream::findUntil(const char *target, const char *terminator)
{
return findUntil(target, strlen(target), terminator, strlen(terminator)); return findUntil(target, strlen(target), terminator, strlen(terminator));
} }
// reads data from the stream until the target string of the given length is found // reads data from the stream until the target string of the given length is found
// search terminated if the terminator string is found // search terminated if the terminator string is found
// returns true if target string is found, false if terminated or timed out // returns true if target string is found, false if terminated or timed out
bool Stream::findUntil(const char *target, size_t targetLen, const char *terminator, size_t termLen) { bool Stream::findUntil(const char *target, size_t targetLen, const char *terminator, size_t termLen)
{
if (terminator == NULL) { if (terminator == NULL) {
MultiTarget t[1] = {{target, targetLen, 0}}; MultiTarget t[1] = {{target, targetLen, 0}};
return findMulti(t, 1) == 0; return findMulti(t, 1) == 0 ? true : false;
} else { } else {
MultiTarget t[2] = {{target, targetLen, 0}, {terminator, termLen, 0}}; MultiTarget t[2] = {{target, targetLen, 0}, {terminator, termLen, 0}};
return findMulti(t, 2) == 0; return findMulti(t, 2) == 0 ? true : false;
} }
} }
// returns the first valid (long) integer value from the current position.
// lookahead determines how parseInt looks ahead in the stream.
// See LookaheadMode enumeration at the top of the file.
// Lookahead is terminated by the first character that is not a valid part of an integer.
// Once parsing commences, 'ignore' will be skipped in the stream.
long Stream::parseInt(LookaheadMode lookahead, char ignore) {
bool isNegative = false;
long value = 0;
int c;
c = peekNextDigit(lookahead, false);
// ignore non numeric leading characters
if (c < 0) {
return 0; // zero returned if timeout
}
do {
if ((char)c == ignore)
; // ignore this character
else if (c == '-') {
isNegative = true;
} else if (c >= '0' && c <= '9') { // is c a digit?
value = value * 10 + c - '0';
}
read(); // consume the character we got with peek
c = timedPeek();
} while ((c >= '0' && c <= '9') || (char)c == ignore);
if (isNegative) {
value = -value;
}
return value;
}
// as parseInt but returns a floating point value
float Stream::parseFloat(LookaheadMode lookahead, char ignore) {
bool isNegative = false;
bool isFraction = false;
double value = 0.0;
int c;
double fraction = 1.0;
c = peekNextDigit(lookahead, true);
// ignore non numeric leading characters
if (c < 0) {
return 0; // zero returned if timeout
}
do {
if ((char)c == ignore)
; // ignore
else if (c == '-') {
isNegative = true;
} else if (c == '.') {
isFraction = true;
} else if (c >= '0' && c <= '9') { // is c a digit?
if (isFraction) {
fraction *= 0.1;
value = value + fraction * (c - '0');
} else {
value = value * 10 + c - '0';
}
}
read(); // consume the character we got with peek
c = timedPeek();
} while ((c >= '0' && c <= '9') || (c == '.' && !isFraction) || (char)c == ignore);
if (isNegative) {
value = -value;
}
return value;
}
// read characters from stream into buffer
// terminates if length characters have been read, or timeout (see setTimeout)
// returns the number of characters placed in the buffer
// the buffer is NOT null terminated.
//
size_t Stream::readBytes(char *buffer, size_t length) {
size_t count = 0;
while (count < length) {
int c = timedRead();
if (c < 0) {
break;
}
*buffer++ = (char)c;
count++;
}
return count;
}
// as readBytes with terminator character
// terminates if length characters have been read, timeout, or if the terminator character detected
// returns the number of characters placed in the buffer (0 means no valid data found)
size_t Stream::readBytesUntil(char terminator, char *buffer, size_t length) {
size_t index = 0;
while (index < length) {
int c = timedRead();
if (c < 0 || (char)c == terminator) {
break;
}
*buffer++ = (char)c;
index++;
}
return index; // return number of characters, not including null terminator
}
String Stream::readString() {
String ret;
int c = timedRead();
while (c >= 0) {
ret += (char)c;
c = timedRead();
}
return ret;
}
String Stream::readStringUntil(char terminator) {
String ret;
int c = timedRead();
while (c >= 0 && (char)c != terminator) {
ret += (char)c;
c = timedRead();
}
return ret;
}
int Stream::findMulti( struct Stream::MultiTarget *targets, int tCount) { int Stream::findMulti( struct Stream::MultiTarget *targets, int tCount) {
// any zero length target string automatically matches and would make // any zero length target string automatically matches and would make
// a mess of the rest of the algorithm. // a mess of the rest of the algorithm.
for (struct MultiTarget *t = targets; t < targets+tCount; ++t) { for (struct MultiTarget *t = targets; t < targets+tCount; ++t) {
if (t->len <= 0) { if (t->len <= 0)
return t - targets; return t - targets;
} }
}
while (1) { while (1) {
int c = timedRead(); int c = timedRead();
if (c < 0) { if (c < 0)
return -1; return -1;
}
for (struct MultiTarget *t = targets; t < targets+tCount; ++t) { for (struct MultiTarget *t = targets; t < targets+tCount; ++t) {
// the simple case is if we match, deal with that first. // the simple case is if we match, deal with that first.
if ((char)c == t->str[t->index]) { if (c == t->str[t->index]) {
if (++t->index == t->len) { if (++t->index == t->len)
return t - targets; return t - targets;
} else { else
continue; continue;
} }
}
// if not we need to walk back and see if we could have matched further // if not we need to walk back and see if we could have matched further
// down the stream (ie '1112' doesn't match the first position in '11112' // down the stream (ie '1112' doesn't match the first position in '11112'
// but it will match the second position so we can't just reset the current // but it will match the second position so we can't just reset the current
// index to 0 when we find a mismatch. // index to 0 when we find a mismatch.
if (t->index == 0) { if (t->index == 0)
continue; continue;
}
int origIndex = t->index; int origIndex = t->index;
do { do {
--t->index; --t->index;
// first check if current char works against the new current index // first check if current char works against the new current index
if ((char)c != t->str[t->index]) { if (c != t->str[t->index])
continue; continue;
}
// if it's the only char then we're good, nothing more to check // if it's the only char then we're good, nothing more to check
if (t->index == 0) { if (t->index == 0) {
@ -297,10 +165,9 @@ int Stream::findMulti(struct Stream::MultiTarget *targets, int tCount) {
int diff = origIndex - t->index; int diff = origIndex - t->index;
size_t i; size_t i;
for (i = 0; i < t->index; ++i) { for (i = 0; i < t->index; ++i) {
if (t->str[i] != t->str[i + diff]) { if (t->str[i] != t->str[i + diff])
break; break;
} }
}
// if we successfully got through the previous loop then our current // if we successfully got through the previous loop then our current
// index is good. // index is good.
@ -316,3 +183,155 @@ int Stream::findMulti(struct Stream::MultiTarget *targets, int tCount) {
// unreachable // unreachable
return -1; return -1;
} }
// returns the first valid (long) integer value from the current position.
// initial characters that are not digits (or the minus sign) are skipped
// function is terminated by the first character that is not a digit.
long Stream::parseInt()
{
return parseInt(NO_SKIP_CHAR); // terminate on first non-digit character (or timeout)
}
// as above but a given skipChar is ignored
// this allows format characters (typically commas) in values to be ignored
long Stream::parseInt(char skipChar)
{
boolean isNegative = false;
long value = 0;
int c;
c = peekNextDigit();
// ignore non numeric leading characters
if(c < 0) {
return 0; // zero returned if timeout
}
do {
if(c == skipChar) {
} // ignore this charactor
else if(c == '-') {
isNegative = true;
} else if(c >= '0' && c <= '9') { // is c a digit?
value = value * 10 + c - '0';
}
read(); // consume the character we got with peek
c = timedPeek();
} while((c >= '0' && c <= '9') || c == skipChar);
if(isNegative) {
value = -value;
}
return value;
}
// as parseInt but returns a floating point value
float Stream::parseFloat()
{
return parseFloat(NO_SKIP_CHAR);
}
// as above but the given skipChar is ignored
// this allows format characters (typically commas) in values to be ignored
float Stream::parseFloat(char skipChar)
{
boolean isNegative = false;
boolean isFraction = false;
long value = 0;
int c;
float fraction = 1.0;
c = peekNextDigit();
// ignore non numeric leading characters
if(c < 0) {
return 0; // zero returned if timeout
}
do {
if(c == skipChar) {
} // ignore
else if(c == '-') {
isNegative = true;
} else if(c == '.') {
isFraction = true;
} else if(c >= '0' && c <= '9') { // is c a digit?
value = value * 10 + c - '0';
if(isFraction) {
fraction *= 0.1f;
}
}
read(); // consume the character we got with peek
c = timedPeek();
} while((c >= '0' && c <= '9') || c == '.' || c == skipChar);
if(isNegative) {
value = -value;
}
if(isFraction) {
return value * fraction;
} else {
return value;
}
}
// read characters from stream into buffer
// terminates if length characters have been read, or timeout (see setTimeout)
// returns the number of characters placed in the buffer
// the buffer is NOT null terminated.
//
size_t Stream::readBytes(char *buffer, size_t length)
{
size_t count = 0;
while(count < length) {
int c = timedRead();
if(c < 0) {
break;
}
*buffer++ = (char) c;
count++;
}
return count;
}
// as readBytes with terminator character
// terminates if length characters have been read, timeout, or if the terminator character detected
// returns the number of characters placed in the buffer (0 means no valid data found)
size_t Stream::readBytesUntil(char terminator, char *buffer, size_t length)
{
if(length < 1) {
return 0;
}
size_t index = 0;
while(index < length) {
int c = timedRead();
if(c < 0 || c == terminator) {
break;
}
*buffer++ = (char) c;
index++;
}
return index; // return number of characters, not including null terminator
}
String Stream::readString()
{
String ret;
int c = timedRead();
while(c >= 0) {
ret += (char) c;
c = timedRead();
}
return ret;
}
String Stream::readStringUntil(char terminator)
{
String ret;
int c = timedRead();
while(c >= 0 && c != terminator) {
ret += (char) c;
c = timedRead();
}
return ret;
}

View file

@ -19,101 +19,95 @@
parsing functions based on TextFinder library by Michael Margolis parsing functions based on TextFinder library by Michael Margolis
*/ */
#pragma once #ifndef Stream_h
#define Stream_h
#include <inttypes.h> #include <inttypes.h>
#include "Print.h" #include "Print.h"
// compatibility macros for testing // compatability macros for testing
/* /*
#define getInt() parseInt() #define getInt() parseInt()
#define getInt(ignore) parseInt(ignore) #define getInt(skipChar) parseInt(skipchar)
#define getFloat() parseFloat() #define getFloat() parseFloat()
#define getFloat(ignore) parseFloat(ignore) #define getFloat(skipChar) parseFloat(skipChar)
#define getString( pre_string, post_string, buffer, length) #define getString( pre_string, post_string, buffer, length)
readBytesBetween( pre_string, terminator, buffer, length) readBytesBetween( pre_string, terminator, buffer, length)
*/ */
// This enumeration provides the lookahead options for parseInt(), parseFloat() class Stream: public Print
// The rules set out here are used until either the first valid character is found {
// or a time out occurs due to lack of input.
enum LookaheadMode {
SKIP_ALL, // All invalid characters are ignored.
SKIP_NONE, // Nothing is skipped, and the stream is not touched unless the first waiting character is valid.
SKIP_WHITESPACE // Only tabs, spaces, line feeds & carriage returns are skipped.
};
#define NO_IGNORE_CHAR '\x01' // a char not found in a valid ASCII numeric field
class Stream : public Print {
protected: protected:
unsigned long _timeout; // number of milliseconds to wait for the next char before aborting timed read unsigned long _timeout; // number of milliseconds to wait for the next char before aborting timed read
unsigned long _startMillis; // used for timeout measurement unsigned long _startMillis; // used for timeout measurement
int timedRead(); // private method to read stream with timeout int timedRead(); // private method to read stream with timeout
int timedPeek(); // private method to peek stream with timeout int timedPeek(); // private method to peek stream with timeout
int peekNextDigit(LookaheadMode lookahead, bool detectDecimal); // returns the next numeric digit in the stream or -1 if timeout int peekNextDigit(); // returns the next numeric digit in the stream or -1 if timeout
public: public:
virtual int available() = 0; virtual int available() = 0;
virtual int read() = 0; virtual int read() = 0;
virtual int peek() = 0; virtual int peek() = 0;
Stream() { Stream():_startMillis(0)
{
_timeout = 1000; _timeout = 1000;
} }
virtual ~Stream() {}
// parsing methods // parsing methods
void setTimeout(unsigned long timeout); // sets maximum milliseconds to wait for stream data, default is 1 second void setTimeout(unsigned long timeout); // sets maximum milliseconds to wait for stream data, default is 1 second
unsigned long getTimeout(void) { unsigned long getTimeout(void);
return _timeout;
}
bool find(const char *target); // reads data from the stream until the target string is found bool find(const char *target); // reads data from the stream until the target string is found
bool find(const uint8_t *target) { bool find(uint8_t *target)
return find((const char *)target); {
return find((char *) target);
} }
// returns true if target string is found, false if timed out (see setTimeout) // returns true if target string is found, false if timed out (see setTimeout)
bool find(const char *target, size_t length); // reads data from the stream until the target string of given length is found bool find(const char *target, size_t length); // reads data from the stream until the target string of given length is found
bool find(const uint8_t *target, size_t length) { bool find(const uint8_t *target, size_t length)
return find((const char *)target, length); {
return find((char *) target, length);
} }
// returns true if target string is found, false if timed out // returns true if target string is found, false if timed out
bool find(char target) { bool find(char target)
{
return find (&target, 1); return find (&target, 1);
} }
bool findUntil(const char *target, const char *terminator); // as find but search ends if the terminator string is found bool findUntil(const char *target, const char *terminator); // as find but search ends if the terminator string is found
bool findUntil(const uint8_t *target, const char *terminator) { bool findUntil(const uint8_t *target, const char *terminator)
return findUntil((const char *)target, terminator); {
return findUntil((char *) target, terminator);
} }
bool findUntil(const char *target, size_t targetLen, const char *terminate, size_t termLen); // as above but search ends if the terminate string is found bool findUntil(const char *target, size_t targetLen, const char *terminate, size_t termLen); // as above but search ends if the terminate string is found
bool findUntil(const uint8_t *target, size_t targetLen, const char *terminate, size_t termLen) { bool findUntil(const uint8_t *target, size_t targetLen, const char *terminate, size_t termLen)
return findUntil((const char *)target, targetLen, terminate, termLen); {
return findUntil((char *) target, targetLen, terminate, termLen);
} }
long parseInt(LookaheadMode lookahead = SKIP_ALL, char ignore = NO_IGNORE_CHAR); long parseInt(); // returns the first valid (long) integer value from the current position.
// returns the first valid (long) integer value from the current position. // initial characters that are not digits (or the minus sign) are skipped
// lookahead determines how parseInt looks ahead in the stream. // integer is terminated by the first character that is not a digit.
// See LookaheadMode enumeration at the top of the file.
// Lookahead is terminated by the first character that is not a valid part of an integer.
// Once parsing commences, 'ignore' will be skipped in the stream.
float parseFloat(LookaheadMode lookahead = SKIP_ALL, char ignore = NO_IGNORE_CHAR); float parseFloat(); // float version of parseInt
// float version of parseInt
virtual size_t readBytes(char *buffer, size_t length); // read chars from stream into buffer virtual size_t readBytes(char *buffer, size_t length); // read chars from stream into buffer
virtual size_t readBytes(uint8_t *buffer, size_t length) { virtual size_t readBytes(uint8_t *buffer, size_t length)
{
return readBytes((char *) buffer, length); return readBytes((char *) buffer, length);
} }
// terminates if length characters have been read or timeout (see setTimeout) // terminates if length characters have been read or timeout (see setTimeout)
// returns the number of characters placed in the buffer (0 means no valid data found) // returns the number of characters placed in the buffer (0 means no valid data found)
size_t readBytesUntil(char terminator, char *buffer, size_t length); // as readBytes with terminator character size_t readBytesUntil(char terminator, char *buffer, size_t length); // as readBytes with terminator character
size_t readBytesUntil(char terminator, uint8_t *buffer, size_t length) { size_t readBytesUntil(char terminator, uint8_t *buffer, size_t length)
{
return readBytesUntil(terminator, (char *) buffer, length); return readBytesUntil(terminator, (char *) buffer, length);
} }
// terminates if length characters have been read, timeout, or if the terminator character detected // terminates if length characters have been read, timeout, or if the terminator character detected
@ -124,15 +118,11 @@ public:
String readStringUntil(char terminator); String readStringUntil(char terminator);
protected: protected:
long parseInt(char ignore) { long parseInt(char skipChar); // as above but the given skipChar is ignored
return parseInt(SKIP_ALL, ignore); // as above but the given skipChar is ignored
} // this allows format characters (typically commas) in values to be ignored
float parseFloat(char ignore) {
return parseFloat(SKIP_ALL, ignore); float parseFloat(char skipChar); // as above but the given skipChar is ignored
}
// These overload exists for compatibility with any class that has derived
// Stream and used parseFloat/Int with a custom ignore character. To keep
// the public API simple, these overload remains protected.
struct MultiTarget { struct MultiTarget {
const char *str; // string you're searching for const char *str; // string you're searching for
@ -143,6 +133,7 @@ protected:
// This allows you to search for an arbitrary number of strings. // This allows you to search for an arbitrary number of strings.
// Returns index of the target that is found first or -1 if timeout occurs. // Returns index of the target that is found first or -1 if timeout occurs.
int findMulti(struct MultiTarget *targets, int tCount); int findMulti(struct MultiTarget *targets, int tCount);
}; };
#undef NO_IGNORE_CHAR #endif

View file

@ -49,6 +49,7 @@ int StreamString::read() {
char c = charAt(0); char c = charAt(0);
remove(0, 1); remove(0, 1);
return c; return c;
} }
return -1; return -1;
} }
@ -61,4 +62,6 @@ int StreamString::peek() {
return -1; return -1;
} }
void StreamString::flush() {} void StreamString::flush() {
}

Some files were not shown because too many files have changed in this diff Show more