Compare commits

...

No commits in common. "master" and "ci-wippersnapper" have entirely different histories.

2031 changed files with 4847 additions and 586430 deletions

View file

@ -1,246 +0,0 @@
# Clang format version: 18.1.3
---
BasedOnStyle: LLVM
AccessModifierOffset: -2
AlignAfterOpenBracket: BlockIndent
AlignArrayOfStructures: None
AlignConsecutiveAssignments:
Enabled: false
AcrossEmptyLines: false
AcrossComments: false
AlignCompound: false
AlignFunctionPointers: false
PadOperators: true
AlignConsecutiveBitFields:
Enabled: true
AcrossEmptyLines: false
AcrossComments: false
AlignCompound: false
AlignFunctionPointers: false
PadOperators: false
AlignConsecutiveDeclarations:
Enabled: false
AcrossEmptyLines: false
AcrossComments: false
AlignCompound: false
AlignFunctionPointers: false
PadOperators: false
AlignConsecutiveMacros:
Enabled: true
AcrossEmptyLines: false
AcrossComments: false
AlignCompound: false
AlignFunctionPointers: false
PadOperators: false
AlignConsecutiveShortCaseStatements:
Enabled: true
AcrossEmptyLines: false
AcrossComments: false
AlignCaseColons: false
AlignEscapedNewlines: Left
AlignOperands: Align
AlignTrailingComments:
Kind: Always
OverEmptyLines: 0
AllowAllArgumentsOnNextLine: true
AllowAllParametersOfDeclarationOnNextLine: true
AllowBreakBeforeNoexceptSpecifier: Never
AllowShortBlocksOnASingleLine: Empty
AllowShortCaseLabelsOnASingleLine: true
AllowShortCompoundRequirementOnASingleLine: true
AllowShortEnumsOnASingleLine: false
AllowShortFunctionsOnASingleLine: Empty
AllowShortIfStatementsOnASingleLine: Never
AllowShortLambdasOnASingleLine: Empty
AllowShortLoopsOnASingleLine: true
AlwaysBreakAfterDefinitionReturnType: None
AlwaysBreakAfterReturnType: None
AlwaysBreakBeforeMultilineStrings: false
AlwaysBreakTemplateDeclarations: MultiLine
AttributeMacros:
- __capability
BinPackArguments: true
BinPackParameters: true
BitFieldColonSpacing: Both
BraceWrapping:
AfterCaseLabel: true
AfterClass: false
AfterControlStatement: Never
AfterEnum: false
AfterFunction: false
AfterNamespace: false
AfterObjCDeclaration: false
AfterStruct: false
AfterUnion: false
AfterExternBlock: false
BeforeCatch: false
BeforeElse: false
BeforeLambdaBody: false
BeforeWhile: false
IndentBraces: false
SplitEmptyFunction: false
SplitEmptyRecord: true
SplitEmptyNamespace: true
BreakAdjacentStringLiterals: true
BreakAfterAttributes: Always
BreakAfterJavaFieldAnnotations: false
BreakArrays: false
BreakBeforeBinaryOperators: NonAssignment
BreakBeforeBraces: Custom
BreakBeforeConceptDeclarations: Always
BreakBeforeInlineASMColon: OnlyMultiline
BreakBeforeTernaryOperators: true
BreakConstructorInitializers: BeforeColon
BreakInheritanceList: BeforeColon
BreakStringLiterals: true
ColumnLimit: 160
CommentPragmas: ""
CompactNamespaces: false
ConstructorInitializerIndentWidth: 2
ContinuationIndentWidth: 2
Cpp11BracedListStyle: true
DerivePointerAlignment: false
DisableFormat: false
EmptyLineAfterAccessModifier: Never
EmptyLineBeforeAccessModifier: LogicalBlock
ExperimentalAutoDetectBinPacking: false
FixNamespaceComments: true
ForEachMacros:
- foreach
- Q_FOREACH
- BOOST_FOREACH
IfMacros:
- KJ_IF_MAYBE
IncludeBlocks: Preserve
IncludeCategories:
- Regex: ^"(llvm|llvm-c|clang|clang-c)/
Priority: 2
SortPriority: 0
CaseSensitive: false
- Regex: ^(<|"(gtest|gmock|isl|json)/)
Priority: 3
SortPriority: 0
CaseSensitive: false
- Regex: .*
Priority: 1
SortPriority: 0
CaseSensitive: false
IncludeIsMainRegex: ""
IncludeIsMainSourceRegex: ""
IndentAccessModifiers: false
IndentCaseBlocks: false
IndentCaseLabels: true
IndentExternBlock: NoIndent
IndentGotoLabels: false
IndentPPDirectives: None
IndentRequiresClause: false
IndentWidth: 2
IndentWrappedFunctionNames: true
InsertBraces: true
InsertNewlineAtEOF: true
InsertTrailingCommas: None
IntegerLiteralSeparator:
Binary: 0
BinaryMinDigits: 0
Decimal: 0
DecimalMinDigits: 0
Hex: 0
HexMinDigits: 0
JavaScriptQuotes: Leave
JavaScriptWrapImports: true
KeepEmptyLinesAtEOF: false
KeepEmptyLinesAtTheStartOfBlocks: true
LambdaBodyIndentation: Signature
Language: Cpp
LineEnding: LF
MacroBlockBegin: ""
MacroBlockEnd: ""
MaxEmptyLinesToKeep: 1
NamespaceIndentation: None
ObjCBinPackProtocolList: Auto
ObjCBlockIndentWidth: 2
ObjCBreakBeforeNestedBlockParam: true
ObjCSpaceAfterProperty: false
ObjCSpaceBeforeProtocolList: true
PPIndentWidth: -1
PackConstructorInitializers: BinPack
PenaltyBreakAssignment: 2
PenaltyBreakBeforeFirstCallParameter: 19
PenaltyBreakComment: 300
PenaltyBreakFirstLessLess: 120
PenaltyBreakOpenParenthesis: 0
PenaltyBreakScopeResolution: 500
PenaltyBreakString: 1000
PenaltyBreakTemplateDeclaration: 10
PenaltyExcessCharacter: 1000000
PenaltyIndentedWhitespace: 0
PenaltyReturnTypeOnItsOwnLine: 60
PointerAlignment: Right
QualifierAlignment: Leave
ReferenceAlignment: Pointer
ReflowComments: false
RemoveBracesLLVM: false
RemoveParentheses: Leave
RemoveSemicolon: false
RequiresClausePosition: OwnLine
RequiresExpressionIndentation: OuterScope
SeparateDefinitionBlocks: Leave
ShortNamespaceLines: 1
SkipMacroDefinitionBody: false
SortIncludes: Never
SortJavaStaticImport: Before
SortUsingDeclarations: LexicographicNumeric
SpaceAfterCStyleCast: false
SpaceAfterLogicalNot: false
SpaceAfterTemplateKeyword: false
SpaceAroundPointerQualifiers: Default
SpaceBeforeAssignmentOperators: true
SpaceBeforeCaseColon: false
SpaceBeforeCpp11BracedList: false
SpaceBeforeCtorInitializerColon: true
SpaceBeforeInheritanceColon: true
SpaceBeforeJsonColon: false
SpaceBeforeParens: ControlStatements
SpaceBeforeParensOptions:
AfterControlStatements: true
AfterForeachMacros: true
AfterFunctionDeclarationName: false
AfterFunctionDefinitionName: false
AfterIfMacros: true
AfterOverloadedOperator: true
AfterPlacementOperator: true
AfterRequiresInClause: false
AfterRequiresInExpression: false
BeforeNonEmptyParentheses: false
SpaceBeforeRangeBasedForLoopColon: true
SpaceBeforeSquareBrackets: false
SpaceInEmptyBlock: false
SpacesBeforeTrailingComments: 2
SpacesInAngles: Never
SpacesInContainerLiterals: false
SpacesInLineCommentPrefix:
Minimum: 1
Maximum: -1
SpacesInParens: Never
SpacesInParensOptions:
InConditionalStatements: false
InCStyleCasts: false
InEmptyParentheses: false
Other: false
SpacesInSquareBrackets: false
Standard: Auto
StatementAttributeLikeMacros:
- Q_EMIT
StatementMacros:
- Q_UNUSED
- QT_REQUIRE_VERSION
TabWidth: 2
UseTab: Never
VerilogBreakBetweenInstancePorts: true
WhitespaceSensitiveMacros:
- BOOST_PP_STRINGIZE
- CF_SWIFT_NAME
- NS_SWIFT_NAME
- PP_STRINGIZE
- STRINGIZE
BracedInitializerIndentWidth: 2

View file

@ -1,8 +0,0 @@
[codespell]
# Source: https://github.com/arduino/tooling-project-assets/blob/main/workflow-templates/assets/spell-check/.codespellrc
# In the event of a false positive, add the problematic word, in all lowercase, to a comma-separated list here:
ignore-words-list = ba,licence,ot,dout,als,exten,emac
skip = ./.git,./.licenses,__pycache__,.clang-format,.codespellrc,.editorconfig,.flake8,.prettierignore,.yamllint.yml,.gitignore,boards.txt,platform.txt,programmers.txt
builtin = clear,informal,en-GB_to_en-US
check-filenames =
check-hidden =

View file

@ -1,60 +0,0 @@
# Source: https://github.com/arduino/tooling-project-assets/blob/main/workflow-templates/assets/general/.editorconfig
# See: https://editorconfig.org/
# The formatting style defined in this file is the official standardized style to be used in all Arduino Tooling
# projects and should not be modified.
# Note: indent style for each file type is defined even when it matches the universal config in order to make it clear
# that this type has an official style.
[*]
charset = utf-8
end_of_line = lf
indent_size = 2
indent_style = space
insert_final_newline = true
trim_trailing_whitespace = true
[*.{adoc,asc,asciidoc}]
indent_size = 2
indent_style = space
[*.{bash,sh}]
indent_size = 4
indent_style = space
[*.{c,cc,cp,cpp,cxx,h,hh,hpp,hxx,ii,inl,ino,ixx,pde,tpl,tpp,txx}]
indent_size = 2
indent_style = space
[*.{go,mod}]
indent_style = tab
[*.java]
indent_size = 2
indent_style = space
[*.{js,jsx,json,jsonc,json5,ts,tsx}]
indent_size = 2
indent_style = space
[*.{md,mdx,mkdn,mdown,markdown}]
indent_size = unset
indent_style = space
[*.proto]
indent_size = 2
indent_style = space
[*.py]
indent_size = 4
indent_style = space
[*.svg]
indent_size = 2
indent_style = space
[*.{yaml,yml}]
indent_size = 2
indent_style = space
[{.gitconfig,.gitmodules}]
indent_style = tab

10
.flake8
View file

@ -1,10 +0,0 @@
# Source: https://github.com/arduino/tooling-project-assets/blob/main/workflow-templates/assets/check-python/.flake8
# See: https://flake8.pycqa.org/en/latest/user/configuration.html
[flake8]
doctests = True
# W503 and W504 are mutually exclusive. PEP 8 recommends line break before.
ignore = W503,E203
max-complexity = 20
max-line-length = 120
select = E,W,F,C,N

81
.github/CODEOWNERS vendored
View file

@ -1,81 +0,0 @@
# CODEOWNERS for ESP32 Arduino Core
# This file is used to specify the code owners for the ESP32 Arduino Core.
# Read more about CODEOWNERS:
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners
# Note that order matters. The last matching pattern will be used.
# The default owners are the active developers of the ESP32 Arduino Core.
# Refrain from using @espressif/arduino-esp32 to avoid spamming non-developers with review requests.
* @espressif/arduino-devs
# CI
/.github/ @lucasssvaz @me-no-dev @P-R-O-C-H-Y
/.github/codeql/ @lucasssvaz
/.gitlab/ @lucasssvaz
/tests/ @lucasssvaz @P-R-O-C-H-Y
# Tools
/tools/ @me-no-dev
/tools/pre-commit/ @lucasssvaz
/tools/add_lib.sh @P-R-O-C-H-Y
# Pre-commit
/.* @lucasssvaz # Files in root directory that start with a dot.
# Git Files
/.gitignore @espressif/arduino-devs
/.gitmodules @espressif/arduino-devs
# Documentation
/docs/ @pedrominatel
/.github/ISSUE_TEMPLATE/ @pedrominatel
/.github/PULL_REQUEST_TEMPLATE.md @pedrominatel
/.readthedocs.yaml @pedrominatel
/*.md @pedrominatel
# Boards
/variants/ @P-R-O-C-H-Y
/boards.txt @P-R-O-C-H-Y
# Arduino as Component
/idf_component_examples/ @SuGlider
/idf_component.yml @SuGlider @me-no-dev
/CMakeLists.txt @SuGlider @me-no-dev
/Kconfig.projbuild @SuGlider @me-no-dev
# Build System
/package.json @me-no-dev
/platform.txt @me-no-dev
/programmers.txt @me-no-dev
/package/ @me-no-dev
# Libraries
/libraries/ArduinoOTA/ @me-no-dev
/libraries/AsyncUDP/ @me-no-dev
/libraries/BLE/ @lucasssvaz @SuGlider
/libraries/ESP_I2S/ @me-no-dev
/libraries/ESP_NOW/ @P-R-O-C-H-Y @lucasssvaz
/libraries/ESP_SR/ @me-no-dev
/libraries/ESPmDNS/ @me-no-dev
/libraries/Ethernet/ @me-no-dev
/libraries/Matter/ @SuGlider
/libraries/NetBIOS/ @me-no-dev
/libraries/Network/ @me-no-dev
/libraries/OpenThread/ @SuGlider
/libraries/PPP/ @me-no-dev
/libraries/SPI/ @me-no-dev
/libraries/Update/ @me-no-dev
/libraries/USB/ @SuGlider @me-no-dev
/libraries/WiFi/ @me-no-dev
/libraries/WiFiProv/ @me-no-dev
/libraries/Wire/ @me-no-dev
/libraries/Zigbee/ @P-R-O-C-H-Y
# CI JSON
# Keep this after other libraries and tests to avoid being overridden.
**/ci.json @lucasssvaz
# The CODEOWNERS file should be owned by the developers of the ESP32 Arduino Core.
# Leave this entry as the last one to avoid being overridden.
/.github/CODEOWNERS @espressif/arduino-devs

46
.github/ISSUE_TEMPLATE.md vendored Normal file
View file

@ -0,0 +1,46 @@
Thank you for opening an issue on an Adafruit Arduino library repository. To
improve the speed of resolution please review the following guidelines and
common troubleshooting steps below before creating the issue:
- **Do not use GitHub issues for troubleshooting projects and issues.** Instead use
the forums at http://forums.adafruit.com to ask questions and troubleshoot why
something isn't working as expected. In many cases the problem is a common issue
that you will more quickly receive help from the forum community. GitHub issues
are meant for known defects in the code. If you don't know if there is a defect
in the code then start with troubleshooting on the forum first.
- **If following a tutorial or guide be sure you didn't miss a step.** Carefully
check all of the steps and commands to run have been followed. Consult the
forum if you're unsure or have questions about steps in a guide/tutorial.
- **For Arduino projects check these very common issues to ensure they don't apply**:
- For uploading sketches or communicating with the board make sure you're using
a **USB data cable** and **not** a **USB charge-only cable**. It is sometimes
very hard to tell the difference between a data and charge cable! Try using the
cable with other devices or swapping to another cable to confirm it is not
the problem.
- **Be sure you are supplying adequate power to the board.** Check the specs of
your board and plug in an external power supply. In many cases just
plugging a board into your computer is not enough to power it and other
peripherals.
- **Double check all soldering joints and connections.** Flakey connections
cause many mysterious problems. See the [guide to excellent soldering](https://learn.adafruit.com/adafruit-guide-excellent-soldering/tools) for examples of good solder joints.
- **Ensure you are using an official Arduino or Adafruit board.** We can't
guarantee a clone board will have the same functionality and work as expected
with this code and don't support them.
If you're sure this issue is a defect in the code and checked the steps above
please fill in the following fields to provide enough troubleshooting information.
You may delete the guideline and text above to just leave the following details:
- Arduino board: **INSERT ARDUINO BOARD NAME/TYPE HERE**
- Arduino IDE version (found in Arduino -> About Arduino menu): **INSERT ARDUINO
VERSION HERE**
- List the steps to reproduce the problem below (if possible attach a sketch or
copy the sketch code in too): **LIST REPRO STEPS BELOW**

View file

@ -1,63 +0,0 @@
name: Feature request
description: Suggest an idea for this project
labels: ["Type: Feature request"]
body:
- type: markdown
attributes:
value: |
* Please note that we can only process feature requests reported in English to ensure effective communication and support. Feature requests written in other languages will be closed, with a request to rewrite them in English.
* We welcome any ideas or feature requests! It is helpful if you can explain exactly why the feature would be useful.
* There are usually some outstanding feature requests in the [existing issues list](https://github.com/espressif/arduino-esp32/issues?q=is%3Aopen+is%3Aissue+label%3A%22Type%3A+Feature+request%22), feel free to add comments to them.
* If you would like to contribute, please read the [contributions guide](https://docs.espressif.com/projects/arduino-esp32/en/latest/contributing.html).
- type: input
id: Area
attributes:
label: Related area
description: Please briefly explain the area of your Feature Request.
placeholder: eg. Board support, specific Peripheral, BT, Wifi...
validations:
required: true
- type: input
id: HW
attributes:
label: Hardware specification
description: Please provide if your proposal depends on specific Hardware.
placeholder: eg. Support for ESP32 DevKitC, ESP32-C3 DevKitM...
validations:
required: true
- type: textarea
id: problem-related
attributes:
label: Is your feature request related to a problem?
description: Please provide a clear and concise description of what the problem is. Add relevant issue link.
placeholder: ex. I'm facing the issue/missing function...
validations:
required: true
- type: textarea
id: solution
attributes:
label: Describe the solution you'd like
description: Please provide a clear and concise description of what you want to happen.
placeholder: ex. When using this function...
validations:
required: true
- type: textarea
id: alternatives
attributes:
label: Describe alternatives you've considered
description: Please provide a clear and concise description of any alternative solutions or features you've considered.
placeholder: ex. Choosing other approach wouldn't work, because...
- type: textarea
id: context
attributes:
label: Additional context
description: Please add any other context or screenshots about the feature request here.
placeholder: ex. This would work only when ...
- type: checkboxes
id: confirmation
attributes:
label: I have checked existing list of Feature requests and the Contribution Guide
description: You agree to check all the resources above before opening a new Feature request.
options:
- label: I confirm I have checked existing list of Feature requests and Contribution Guide.
required: true

View file

@ -1,175 +0,0 @@
name: Issue report
description: Report any problem here
labels: ["Status: Awaiting triage"]
body:
- type: markdown
attributes:
value: |
* Please note that we can only process issues reported in English to ensure effective communication and support. Issues written in other languages will be closed, with a request to rewrite them in English.
* Before reporting a new issue please check and search in [List of existing issues](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue)
* Please check [Online Documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/index.html)
* Take a look on [Troubleshooting guide](https://docs.espressif.com/projects/arduino-esp32/en/latest/troubleshooting.html)
* If still experiencing the issue, please provide as many details as possible below about your hardware, computer setup and code.
- type: input
id: Board
attributes:
label: Board
description: On which Board does this issue occur?
placeholder: eg. ESP32 Dev Module, ESP32-S2, LilyGo TTGO LoRa32...
validations:
required: true
- type: textarea
id: devboard
attributes:
label: Device Description
description: What development board or other hardware is the chip attached to?
placeholder: ex. DevKitC, plain module on breadboard, etc. If your hardware is custom or unusual, please attach a photo.
validations:
required: true
- type: textarea
id: other-hw
attributes:
label: Hardware Configuration
description: Is anything else attached to the development board?
placeholder: ex. GPIO 18 & 19 are connected to I2C devices.
validations:
required: true
- type: dropdown
id: version
attributes:
label: Version
description: What version of Arduino ESP32 are you running? If possible, consider updating to the latest version.
options:
- latest stable Release (if not listed below)
- latest development Release Candidate (RC-X)
- latest master (checkout manually)
- v3.3.0
- v3.2.1
- v3.2.0
- v3.1.3
- v3.1.2
- v3.1.1
- v3.1.0
- v3.0.7
- v3.0.6
- v3.0.5
- v3.0.4
- v3.0.3
- v3.0.2
- v3.0.1
- v3.0.0
- v2.0.17
- v2.0.16
- v2.0.15
- v2.0.14
- v2.0.13
- v2.0.12
- v2.0.11
- v2.0.10
- v2.0.9
- v2.0.8
- v2.0.7
- v2.0.6
- v2.0.5
- v2.0.4
- v2.0.3
- v2.0.2
- v2.0.1
- v2.0.0
- v1.0.6
- other
validations:
required: true
- type: dropdown
id: type
attributes:
label: Type
description: How would you define the type of the issue? Please select from the types below.
options:
- Task
- Bug
- Question
validations:
required: true
- type: input
id: IDE
attributes:
label: IDE Name
description: What IDE are you using?
placeholder: eg. Arduino IDE, VSCode, Sloeber...
validations:
required: true
- type: input
id: os
attributes:
label: Operating System
description: On which OS does this issue occur?
placeholder: ex. macOS 12.1, Windows 10...
validations:
required: true
- type: input
id: Flash
attributes:
label: Flash frequency
description: What flash frequency is used?
placeholder: eg. 40Mhz
validations:
required: true
- type: dropdown
id: PSRAM
attributes:
label: PSRAM enabled
description: Is PSRAM enabled?
options:
- "yes"
- "no"
validations:
required: true
- type: input
id: Upload
attributes:
label: Upload speed
description: What upload speed is used?
placeholder: eg. 115200
validations:
required: true
- type: textarea
id: Description
attributes:
label: Description
description: Please describe your problem here and expected behavior
placeholder: ex. Can't connect/weird behavior/wrong function/missing parameter..
validations:
required: true
- type: textarea
id: sketch
attributes:
label: Sketch
description: Please provide full minimal sketch/code which can be run to reproduce your issue
placeholder: ex. Related part of the code to replicate the issue
render: cpp
validations:
required: true
- type: textarea
id: Debug
attributes:
label: Debug Message
description: Please provide a debug message or error message. If you have a Guru Meditation Error or Backtrace, please decode it with [ExceptionDecoder](https://github.com/me-no-dev/EspExceptionDecoder)
placeholder: Enable Core debug level - Debug on tools menu of Arduino IDE, then put the serial output here.
render: plain
validations:
required: true
- type: textarea
id: other-remarks
attributes:
label: Other Steps to Reproduce
description: Is there any other information you can think of which will help us reproduce this problem? Any additional info can be added as well.
placeholder: ex. I also tried on other OS, HW...it works correctly on that setup.
- type: checkboxes
id: confirmation
attributes:
label: I have checked existing issues, online documentation and the Troubleshooting Guide
description: You agree to check all the resources above before opening a new issue.
options:
- label: I confirm I have checked existing issues, online documentation and Troubleshooting guide.
required: true

View file

@ -1,5 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: Arduino Core for Espressif Discord Server
url: https://discord.gg/8xY6e9crwv
about: Community Discord server for questions and help

View file

@ -1,24 +1,26 @@
*By completing this PR sufficiently, you help us to review this Pull Request quicker and also help improve the quality of Release Notes*
Thank you for creating a pull request to contribute to Adafruit's GitHub code!
Before you open the request please review the following guidelines and tips to
help it be more easily integrated:
### Checklist
1. [ ] Please provide specific title of the PR describing the change, including the component name (*eg. „Update of Documentation link on Readme.md“*)
2. [ ] Please provide related links (*eg. Issue which will be closed by this Pull Request*)
3. [ ] Please **update relevant Documentation** if applicable
4. [ ] Please check [Contributing guide](https://docs.espressif.com/projects/arduino-esp32/en/latest/contributing.html)
5. [ ] Please **confirm option to "Allow edits and access to secrets by maintainers"** when opening a Pull Request
- **Describe the scope of your change--i.e. what the change does and what parts
of the code were modified.** This will help us understand any risks of integrating
the code.
*This entire section above can be deleted if all items are checked.*
- **Describe any known limitations with your change.** For example if the change
doesn't apply to a supported platform of the library please mention it.
-----------
## Description of Change
Please describe your proposed Pull Request and it's impact.
- **Please run any tests or examples that can exercise your modified code.** We
strive to not break users of the code and running tests/examples helps with this
process.
## Tests scenarios
Please describe on what Hardware and Software combinations you have tested this Pull Request and how.
Thank you again for contributing! We will try to test and integrate the change
as soon as we can, but be aware we have many GitHub repositories to manage and
can't immediately respond to every request. There is no need to bump or check in
on a pull request (it will clutter the discussion of the request).
(*eg. I have tested my Pull Request on Arduino-esp32 core v2.0.2 with ESP32 and ESP32-S2 Board with this scenario*)
Also don't be worried if the request is closed or not integrated--sometimes the
priorities of Adafruit's GitHub code (education, ease of use) might not match the
priorities of the pull request. Don't fret, the open source community thrives on
forks and GitHub makes it easy to keep your changes in a forked repo.
## Related links
Please provide links to related issue, PRs etc.
(*eg. Closes #number of issue*)
After reviewing the guidelines above you can delete this text from the pull request.

View file

@ -1,26 +0,0 @@
name: "CodeQL config"
packs:
- trailofbits/cpp-queries
- githubsecuritylab/codeql-cpp-queries
- githubsecuritylab/codeql-python-queries
queries:
- uses: security-extended
- uses: security-and-quality
query-filters:
- exclude:
query path:
- /^experimental\/.*/
- exclude:
tags contain:
- experimental
- exclude:
problem.severity:
- recommendation
- exclude:
id: tob/cpp/use-of-legacy-algorithm
paths-ignore:
- tests/**

View file

@ -1,78 +0,0 @@
[CmdletBinding()]
param (
[Parameter()]
[String]
$Path
)
function FindSignTool {
$SignTool = "signtool.exe"
if (Get-Command $SignTool -ErrorAction SilentlyContinue) {
return $SignTool
}
$SignTool = "${env:ProgramFiles(x86)}\Windows Kits\10\bin\x64\signtool.exe"
if (Test-Path -Path $SignTool -PathType Leaf) {
return $SignTool
}
$SignTool = "${env:ProgramFiles(x86)}\Windows Kits\10\bin\x86\signtool.exe"
if (Test-Path -Path $SignTool -PathType Leaf) {
return $SignTool
}
$sdkVers = "10.0.22000.0", "10.0.20348.0", "10.0.19041.0", "10.0.17763.0"
Foreach ($ver in $sdkVers)
{
$SignTool = "${env:ProgramFiles(x86)}\Windows Kits\10\bin\${ver}\x64\signtool.exe"
if (Test-Path -Path $SignTool -PathType Leaf) {
return $SignTool
}
}
"signtool.exe not found"
Exit 1
}
function SignEsptool {
param(
[Parameter()]
[String]
$Path
)
$SignTool = FindSignTool
"Using: $SignTool"
$CertificateFile = [system.io.path]::GetTempPath() + "certificate.pfx"
if ($null -eq $env:CERTIFICATE) {
"CERTIFICATE variable not set, unable to sign the file"
Exit 1
}
if ("" -eq $env:CERTIFICATE) {
"CERTIFICATE variable is empty, unable to sign the file"
Exit 1
}
$SignParameters = @("sign", "/tr", 'http://timestamp.digicert.com', "/td", "SHA256", "/f", $CertificateFile, "/fd", "SHA256")
if ($env:CERTIFICATE_PASSWORD) {
"CERTIFICATE_PASSWORD detected, using the password"
$SignParameters += "/p"
$SignParameters += $env:CERTIFICATE_PASSWORD
}
$SignParameters += $Path
[byte[]]$CertificateBytes = [convert]::FromBase64String($env:CERTIFICATE)
[IO.File]::WriteAllBytes($CertificateFile, $CertificateBytes)
&$SignTool $SignParameters
if (0 -eq $LASTEXITCODE) {
Remove-Item $CertificateFile
} else {
Remove-Item $CertificateFile
"Signing failed"
Exit 1
}
}
SignEsptool ${Path}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 115 KiB

View file

@ -1,28 +0,0 @@
#!/bin/bash
#
# This script is used in the CI workflow. It checks all non-examples source files in libraries/ and cores/ are listed in
# CMakeLists.txt for the cmake-based IDF component
#
# If you see an error running this script, edit CMakeLists.txt and add any new source files into your PR
#
set -e
# pull all submodules
git submodule update --init --recursive
# find all source files in repo
REPO_SRCS=$(find cores/esp32/ libraries/ -name 'examples' -prune -o -name '*.c' -print -o -name '*.cpp' -print | sort)
# find all source files named in CMakeLists.txt COMPONENT_SRCS
CMAKE_SRCS=$(cmake --trace-expand -P CMakeLists.txt 2>&1 | grep set\(srcs | cut -d'(' -f3 | sed 's/ )//' | sed 's/srcs //' | tr ' ;' '\n' | sort)
if ! diff -u0 --label "Repo Files" --label "srcs" <(echo "$REPO_SRCS") <(echo "$CMAKE_SRCS"); then
echo "Source files in repo (-) and source files in CMakeLists.txt (+) don't match"
echo "Edit CMakeLists.txt as appropriate to add/remove source files from COMPONENT_SRCS"
exit 1
fi
echo "CMakeLists.txt and repo source files match"
exit 0

View file

@ -1,39 +0,0 @@
#!/bin/bash
# Get all boards
boards_array=()
boards_list=$(grep '.tarch=' boards.txt)
while read -r line; do
board_name=$(echo "$line" | cut -d '.' -f1 | cut -d '#' -f1)
# skip esp32c2 as we dont build libs for it
if [ "$board_name" == "esp32c2" ]; then
echo "Skipping 'espressif:esp32:$board_name'"
continue
fi
boards_array+=("espressif:esp32:$board_name")
echo "Added 'espressif:esp32:$board_name' to array"
done <<< "$boards_list"
# Create JSON like string with all boards found and pass it to env variable
board_count=${#boards_array[@]}
echo "Boards found: $board_count"
echo "BOARD-COUNT=$board_count" >> "$GITHUB_ENV"
if [ "$board_count" -gt 0 ]; then
json_matrix='['
for board in "${boards_array[@]}"; do
json_matrix+='"'$board'"'
if [ "$board_count" -gt 1 ]; then
json_matrix+=","
fi
board_count=$((board_count - 1))
done
json_matrix+=']'
echo "$json_matrix"
echo "FQBNS=${json_matrix}" >> "$GITHUB_ENV"
else
echo "FQBNS=" >> "$GITHUB_ENV"
fi

View file

@ -1,62 +0,0 @@
#!/bin/bash
# Get inputs from command
owner_repository=$1
base_ref=$2
# Download the boards.txt file from the base branch
curl -L -o boards_base.txt https://raw.githubusercontent.com/"$owner_repository"/"$base_ref"/boards.txt
# Compare boards.txt file in the repo with the modified file from PR
diff=$(diff -u boards_base.txt boards.txt)
# Check if the diff is empty
if [ -z "$diff" ]; then
echo "No changes in boards.txt file"
echo "FQBNS="
exit 0
fi
# Extract added or modified lines (lines starting with '+' or '-')
modified_lines=$(echo "$diff" | grep -E '^[+-][^+-]')
# Print the modified lines for debugging
echo "Modified lines:"
echo "$modified_lines"
boards_array=()
previous_board=""
# Extract board names from the modified lines, and add them to the boards_array
while read -r line; do
board_name=$(echo "$line" | cut -d '.' -f1 | cut -d '#' -f1)
# remove + or - from the board name at the beginning
board_name=${board_name#[-+]}
if [ "$board_name" != "" ] && [ "$board_name" != "+" ] && [ "$board_name" != "-" ] && [ "$board_name" != "esp32_family" ]; then
if [ "$board_name" != "$previous_board" ]; then
boards_array+=("espressif:esp32:$board_name")
previous_board="$board_name"
echo "Added 'espressif:esp32:$board_name' to array"
fi
fi
done <<< "$modified_lines"
# Create JSON like string with all boards found and pass it to env variable
board_count=${#boards_array[@]}
if [ "$board_count" -gt 0 ]; then
json_matrix='{"fqbn": ['
for board in "${boards_array[@]}"; do
json_matrix+='"'$board'"'
if [ "$board_count" -gt 1 ]; then
json_matrix+=","
fi
board_count=$((board_count - 1))
done
json_matrix+=']}'
echo "$json_matrix"
echo "FQBNS=${json_matrix}" >> "$GITHUB_ENV"
else
echo "FQBNS=" >> "$GITHUB_ENV"
fi

View file

@ -1,51 +0,0 @@
#!/bin/bash
OSBITS=$(uname -m)
if [[ "$OSTYPE" == "linux"* ]]; then
export OS_IS_LINUX="1"
if [[ "$OSBITS" == "i686" ]]; then
OS_NAME="linux32"
elif [[ "$OSBITS" == "x86_64" ]]; then
OS_NAME="linux64"
elif [[ "$OSBITS" == "armv7l" || "$OSBITS" == "aarch64" ]]; then
OS_NAME="linuxarm"
else
OS_NAME="$OSTYPE-$OSBITS"
echo "Unknown OS '$OS_NAME'"
exit 1
fi
elif [[ "$OSTYPE" == "darwin"* ]]; then
export OS_IS_MACOS="1"
OS_NAME="macosx"
elif [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]] || [[ "$OSTYPE" == "win32" ]]; then
export OS_IS_WINDOWS="1"
OS_NAME="windows"
else
OS_NAME="$OSTYPE-$OSBITS"
echo "Unknown OS '$OS_NAME'"
exit 1
fi
export OS_NAME
if [ "$OS_IS_MACOS" == "1" ]; then
export ARDUINO_IDE_PATH="$HOME/bin"
export ARDUINO_USR_PATH="$HOME/Documents/Arduino"
elif [ "$OS_IS_WINDOWS" == "1" ]; then
export ARDUINO_IDE_PATH="$HOME/bin"
export ARDUINO_USR_PATH="$HOME/Documents/Arduino"
else
export ARDUINO_IDE_PATH="$HOME/bin"
export ARDUINO_USR_PATH="$HOME/Arduino"
fi
if [ ! -d "$ARDUINO_IDE_PATH" ] || [ ! -f "$ARDUINO_IDE_PATH/arduino-cli" ]; then
echo "Installing Arduino CLI on $OS_NAME ..."
mkdir -p "$ARDUINO_IDE_PATH"
if [ "$OS_IS_WINDOWS" == "1" ]; then
curl -fsSL https://downloads.arduino.cc/arduino-cli/arduino-cli_latest_Windows_64bit.zip -o arduino-cli.zip
unzip -q arduino-cli.zip -d "$ARDUINO_IDE_PATH"
rm arduino-cli.zip
else
curl -fsSL https://raw.githubusercontent.com/arduino/arduino-cli/master/install.sh | BINDIR="$ARDUINO_IDE_PATH" sh
fi
fi

View file

@ -1,40 +0,0 @@
#!/bin/bash
export ARDUINO_ESP32_PATH="$ARDUINO_USR_PATH/hardware/espressif/esp32"
if [ ! -d "$ARDUINO_ESP32_PATH" ]; then
echo "Installing ESP32 Arduino Core ..."
script_init_path="$PWD"
mkdir -p "$ARDUINO_USR_PATH/hardware/espressif"
cd "$ARDUINO_USR_PATH/hardware/espressif" || exit
echo "Installing Python Serial ..."
pip install pyserial > /dev/null
if [ "$OS_IS_WINDOWS" == "1" ]; then
echo "Installing Python Requests ..."
pip install requests > /dev/null
fi
if [ -n "$GITHUB_REPOSITORY" ]; then
echo "Linking Core..."
ln -s "$GITHUB_WORKSPACE" esp32
else
echo "Cloning Core Repository..."
git clone https://github.com/espressif/arduino-esp32.git esp32 > /dev/null 2>&1
fi
#echo "Updating Submodules ..."
cd esp32 || exit
#git submodule update --init --recursive > /dev/null 2>&1
echo "Installing Platform Tools ..."
if [ "$OS_IS_WINDOWS" == "1" ]; then
cd tools && ./get.exe
else
cd tools && python get.py
fi
cd "$script_init_path" || exit
echo "ESP32 Arduino has been installed in '$ARDUINO_ESP32_PATH'"
echo ""
fi

View file

@ -1,79 +0,0 @@
#!/bin/bash
#OSTYPE: 'linux-gnu', ARCH: 'x86_64' => linux64
#OSTYPE: 'msys', ARCH: 'x86_64' => win32
#OSTYPE: 'darwin18', ARCH: 'i386' => macos
OSBITS=$(uname -m)
if [[ "$OSTYPE" == "linux"* ]]; then
export OS_IS_LINUX="1"
ARCHIVE_FORMAT="tar.xz"
if [[ "$OSBITS" == "i686" ]]; then
OS_NAME="linux32"
elif [[ "$OSBITS" == "x86_64" ]]; then
OS_NAME="linux64"
elif [[ "$OSBITS" == "armv7l" || "$OSBITS" == "aarch64" ]]; then
OS_NAME="linuxarm"
else
OS_NAME="$OSTYPE-$OSBITS"
echo "Unknown OS '$OS_NAME'"
exit 1
fi
elif [[ "$OSTYPE" == "darwin"* ]]; then
export OS_IS_MACOS="1"
ARCHIVE_FORMAT="zip"
OS_NAME="macosx"
elif [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]] || [[ "$OSTYPE" == "win32" ]]; then
export OS_IS_WINDOWS="1"
ARCHIVE_FORMAT="zip"
OS_NAME="windows"
else
OS_NAME="$OSTYPE-$OSBITS"
echo "Unknown OS '$OS_NAME'"
exit 1
fi
export OS_NAME
if [ "$OS_IS_MACOS" == "1" ]; then
export ARDUINO_IDE_PATH="/Applications/Arduino.app/Contents/Java"
export ARDUINO_USR_PATH="$HOME/Documents/Arduino"
elif [ "$OS_IS_WINDOWS" == "1" ]; then
export ARDUINO_IDE_PATH="$HOME/arduino_ide"
export ARDUINO_USR_PATH="$HOME/Documents/Arduino"
else
export ARDUINO_IDE_PATH="$HOME/arduino_ide"
export ARDUINO_USR_PATH="$HOME/Arduino"
fi
# Updated as of Nov 3rd 2020
ARDUINO_IDE_URL="https://github.com/espressif/arduino-esp32/releases/download/1.0.4/arduino-nightly-"
# Currently not working
#ARDUINO_IDE_URL="https://www.arduino.cc/download.php?f=/arduino-nightly-"
if [ ! -d "$ARDUINO_IDE_PATH" ]; then
echo "Installing Arduino IDE on $OS_NAME ..."
echo "Downloading '$ARDUINO_IDE_URL$OS_NAME.$ARCHIVE_FORMAT' to 'arduino.$ARCHIVE_FORMAT' ..."
if [ "$OS_IS_LINUX" == "1" ]; then
wget -O "arduino.$ARCHIVE_FORMAT" "$ARDUINO_IDE_URL$OS_NAME.$ARCHIVE_FORMAT" > /dev/null 2>&1
echo "Extracting 'arduino.$ARCHIVE_FORMAT' ..."
tar xf "arduino.$ARCHIVE_FORMAT" > /dev/null
mv arduino-nightly "$ARDUINO_IDE_PATH"
else
curl -o "arduino.$ARCHIVE_FORMAT" -L "$ARDUINO_IDE_URL$OS_NAME.$ARCHIVE_FORMAT" > /dev/null 2>&1
echo "Extracting 'arduino.$ARCHIVE_FORMAT' ..."
unzip "arduino.$ARCHIVE_FORMAT" > /dev/null
if [ "$OS_IS_MACOS" == "1" ]; then
mv "Arduino.app" "/Applications/Arduino.app"
else
mv arduino-nightly "$ARDUINO_IDE_PATH"
fi
fi
rm -rf "arduino.$ARCHIVE_FORMAT"
mkdir -p "$ARDUINO_USR_PATH/libraries"
mkdir -p "$ARDUINO_USR_PATH/hardware"
echo "Arduino IDE Installed in '$ARDUINO_IDE_PATH'"
echo ""
fi

View file

@ -1,98 +0,0 @@
#!/usr/bin/env python
# This script merges two Arduino Board Manager package json files.
# Usage:
# python merge_packages.py package_esp8266com_index.json version/new/package_esp8266com_index.json
# Written by Ivan Grokhotkov, 2015
# Updated by lucasssvaz to handle Chinese version sorting, 2025
#
from __future__ import print_function
# from distutils.version import LooseVersion
from packaging.version import Version
import re
import json
import sys
def load_package(filename):
pkg = json.load(open(filename))["packages"][0]
print("Loaded package {0} from {1}".format(pkg["name"], filename), file=sys.stderr)
print("{0} platform(s), {1} tools".format(len(pkg["platforms"]), len(pkg["tools"])), file=sys.stderr)
return pkg
def merge_objects(versions, obj):
for o in obj:
name = o["name"].encode("ascii")
ver = o["version"].encode("ascii")
if name not in versions:
print("found new object, {0}".format(name), file=sys.stderr)
versions[name] = {}
if ver not in versions[name]:
print("found new version {0} for object {1}".format(ver, name), file=sys.stderr)
versions[name][ver] = o
return versions
# Normalize ESP release version string (x.x.x) by adding '-rc<MAXINT>' (x.x.x-rc9223372036854775807)
# to ensure having REL above any RC. CN version will be sorted after the official version if they happen
# to be mixed (normally, CN and non-CN versions should not be mixed)
# Dummy approach, functional anyway for current ESP package versioning
# (unlike NormalizedVersion/LooseVersion/StrictVersion & similar crap)
def pkgVersionNormalized(versionString):
verStr = str(versionString).replace("-cn", "")
verParts = re.split(r"\.|-rc|-alpha", verStr, flags=re.IGNORECASE)
if len(verParts) == 3:
if "-cn" in str(versionString):
verStr = verStr + "-rc" + str(sys.maxsize // 2)
else:
verStr = verStr + "-rc" + str(sys.maxsize)
elif len(verParts) != 4:
print("pkgVersionNormalized WARNING: unexpected version format: {0})".format(verStr), file=sys.stderr)
return verStr
def main(args):
if len(args) < 3:
print("Usage: {0} <package1> <package2>".format(args[0]), file=sys.stderr)
return 1
tools = {}
platforms = {}
pkg1 = load_package(args[1])
tools = merge_objects(tools, pkg1["tools"])
platforms = merge_objects(platforms, pkg1["platforms"])
pkg2 = load_package(args[2])
tools = merge_objects(tools, pkg2["tools"])
platforms = merge_objects(platforms, pkg2["platforms"])
pkg1["tools"] = []
pkg1["platforms"] = []
for name in tools:
for version in tools[name]:
print("Adding tool {0}-{1}".format(name, version), file=sys.stderr)
pkg1["tools"].append(tools[name][version])
for name in platforms:
for version in platforms[name]:
print("Adding platform {0}-{1}".format(name, version), file=sys.stderr)
pkg1["platforms"].append(platforms[name][version])
# pkg1["platforms"] = sorted(
# pkg1["platforms"], key=lambda k: LooseVersion(pkgVersionNormalized(k["version"])), reverse=True
# )
pkg1["platforms"] = sorted(
pkg1["platforms"], key=lambda k: Version(pkgVersionNormalized(k["version"])), reverse=True
)
json.dump({"packages": [pkg1]}, sys.stdout, indent=2)
if __name__ == "__main__":
sys.exit(main(sys.argv))

View file

@ -1,160 +0,0 @@
#!/bin/bash
set -e
function get_file_size {
local file="$1"
if [[ "$OSTYPE" == "darwin"* ]]; then
eval "$(stat -s "$file")"
local res="$?"
echo "${st_size:?}"
return $res
else
stat --printf="%s" "$file"
return $?
fi
}
#git_remove_from_pages <file>
function git_remove_from_pages {
local path=$1
local info
local type
local sha
local message
info=$(curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages")
type=$(echo "$info" | jq -r '.type')
if [ ! "$type" == "file" ]; then
if [ ! "$type" == "null" ]; then
echo "Wrong type '$type'"
else
echo "File is not on Pages"
fi
return 0
fi
sha=$(echo "$info" | jq -r '.sha')
message="Deleting "$(basename "$path")
local json="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"sha\":\"$sha\"}"
echo "$json" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X DELETE --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path"
}
function git_upload_to_pages {
local path=$1
local src=$2
if [ ! -f "$src" ]; then
>&2 echo "Input is not a file! Aborting..."
return 1
fi
local info
local type
local message
local sha=""
local content=""
info=$(curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages")
type=$(echo "$info" | jq -r '.type')
message=$(basename "$path")
if [ "$type" == "file" ]; then
sha=$(echo "$info" | jq -r '.sha')
sha=",\"sha\":\"$sha\""
message="Updating $message"
elif [ ! "$type" == "null" ]; then
>&2 echo "Wrong type '$type'"
return 1
else
message="Creating $message"
fi
content=$(base64 -i "$src")
data="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"content\":\"$content\"$sha}"
echo "$data" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X PUT --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path"
}
function git_safe_upload_to_pages {
local path=$1
local file="$2"
local name
local size
local upload_res
name=$(basename "$file")
size=$(get_file_size "$file")
if ! upload_res=$(git_upload_to_pages "$path" "$file"); then
>&2 echo "ERROR: Failed to upload '$name' ($?)"
return 1
fi
up_size=$(echo "$upload_res" | jq -r '.content.size')
if [ "$up_size" -ne "$size" ]; then
>&2 echo "ERROR: Uploaded size does not match! $up_size != $size"
#git_delete_asset
return 1
fi
echo "$upload_res" | jq -r '.content.download_url'
return $?
}
git_safe_upload_to_pages "index.md" "README.md"
# At some point github stopped providing a list of edited file
# but we also stopped havong documentation in md format,
# so we can skip this portion safely and update just the index
# EVENT_JSON=`cat $GITHUB_EVENT_PATH`
# echo "GITHUB_EVENT_PATH: $GITHUB_EVENT_PATH"
# echo "EVENT_JSON: $EVENT_JSON"
# pages_added=`echo "$EVENT_JSON" | jq -r '.commits[].added[]'`
# echo "added: $pages_added"
# pages_modified=`echo "$EVENT_JSON" | jq -r '.commits[].modified[]'`
# echo "modified: $pages_modified"
# pages_removed=`echo "$EVENT_JSON" | jq -r '.commits[].removed[]'`
# echo "removed: $pages_removed"
# for page in $pages_added; do
# if [[ $page != "README.md" && $page != "docs/"* ]]; then
# continue
# fi
# echo "Adding '$page' to pages ..."
# if [[ $page == "README.md" ]]; then
# git_safe_upload_to_pages "index.md" "README.md"
# else
# git_safe_upload_to_pages "$page" "$page"
# fi
# done
# for page in $pages_modified; do
# if [[ $page != "README.md" && $page != "docs/"* ]]; then
# continue
# fi
# echo "Modifying '$page' ..."
# if [[ $page == "README.md" ]]; then
# git_safe_upload_to_pages "index.md" "README.md"
# else
# git_safe_upload_to_pages "$page" "$page"
# fi
# done
# for page in $pages_removed; do
# if [[ $page != "README.md" && $page != "docs/"* ]]; then
# continue
# fi
# echo "Removing '$page' from pages ..."
# if [[ $page == "README.md" ]]; then
# git_remove_from_pages "README.md" > /dev/null
# else
# git_remove_from_pages "$page" > /dev/null
# fi
# done
echo
echo "DONE!"

View file

@ -1,33 +0,0 @@
#!/bin/bash
set -e
CHECK_REQUIREMENTS="./components/arduino-esp32/.github/scripts/sketch_utils.sh check_requirements"
# Export IDF environment
. ${IDF_PATH}/export.sh
# Find all examples in ./components/arduino-esp32/idf_component_examples
idf_component_examples=$(find ./components/arduino-esp32/idf_component_examples -mindepth 1 -maxdepth 1 -type d)
for example in $idf_component_examples; do
if [ -f "$example"/ci.json ]; then
# If the target is listed as false, skip the sketch. Otherwise, include it.
is_target=$(jq -r --arg target "$IDF_TARGET" '.targets[$target]' "$example"/ci.json)
if [[ "$is_target" == "false" ]]; then
printf "\n\033[93mSkipping %s for target %s\033[0m\n\n" "$example" "$IDF_TARGET"
continue
fi
fi
idf.py -C "$example" set-target "$IDF_TARGET"
has_requirements=$(${CHECK_REQUIREMENTS} "$example" "$example/sdkconfig")
if [ "$has_requirements" -eq 0 ]; then
printf "\n\033[93m%s does not meet the requirements for %s. Skipping...\033[0m\n\n" "$example" "$IDF_TARGET"
continue
fi
printf "\n\033[95mBuilding %s\033[0m\n\n" "$example"
idf.py -C "$example" -DEXTRA_COMPONENT_DIRS="$PWD/components" build
done

View file

@ -1,107 +0,0 @@
#!/bin/bash
set -e
export ARDUINO_BUILD_DIR="$HOME/.arduino/build.tmp"
function build {
local target=$1
local chunk_index=$2
local chunks_cnt=$3
local build_log=$4
local log_level=${5:-none}
local sketches_file=$6
shift 6
local sketches=("$@")
local BUILD_SKETCH="${SCRIPTS_DIR}/sketch_utils.sh build"
local BUILD_SKETCHES="${SCRIPTS_DIR}/sketch_utils.sh chunk_build"
local args=("-ai" "$ARDUINO_IDE_PATH" "-au" "$ARDUINO_USR_PATH" "-t" "$target")
if [ "$OS_IS_LINUX" == "1" ]; then
args+=("-p" "$ARDUINO_ESP32_PATH/libraries" "-i" "$chunk_index" "-m" "$chunks_cnt" "-d" "$log_level")
if [ -n "$sketches_file" ]; then
args+=("-f" "$sketches_file")
fi
if [ "$build_log" -eq 1 ]; then
args+=("-l" "$build_log")
fi
${BUILD_SKETCHES} "${args[@]}"
else
for sketch in "${sketches[@]}"; do
local sargs=("${args[@]}")
local ctags_version
local preprocessor_version
sargs+=("-s" "$(dirname "$sketch")")
if [ "$OS_IS_WINDOWS" == "1" ] && [ -d "$ARDUINO_IDE_PATH/tools-builder" ]; then
ctags_version=$(ls "$ARDUINO_IDE_PATH/tools-builder/ctags/")
preprocessor_version=$(ls "$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/")
sargs+=(
"-prefs=runtime.tools.ctags.path=$ARDUINO_IDE_PATH/tools-builder/ctags/$ctags_version"
"-prefs=runtime.tools.arduino-preprocessor.path=$ARDUINO_IDE_PATH/tools-builder/arduino-preprocessor/$preprocessor_version"
)
fi
${BUILD_SKETCH} "${sargs[@]}"
done
fi
}
if [ -z "$GITHUB_WORKSPACE" ]; then
export GITHUB_WORKSPACE="$PWD"
export GITHUB_REPOSITORY="espressif/arduino-esp32"
fi
CHUNK_INDEX=$1
CHUNKS_CNT=$2
BUILD_LOG=$3
LOG_LEVEL=$4
SKETCHES_FILE=$5
if [ "$#" -lt 2 ] || [ "$CHUNKS_CNT" -le 0 ]; then
CHUNK_INDEX=0
CHUNKS_CNT=1
elif [ "$CHUNK_INDEX" -gt "$CHUNKS_CNT" ] && [ "$CHUNKS_CNT" -ge 2 ]; then
CHUNK_INDEX=$CHUNKS_CNT
fi
if [ -z "$BUILD_LOG" ] || [ "$BUILD_LOG" -le 0 ]; then
BUILD_LOG=0
fi
#echo "Updating submodules ..."
#git -C "$GITHUB_WORKSPACE" submodule update --init --recursive > /dev/null 2>&1
SCRIPTS_DIR="./.github/scripts"
source "${SCRIPTS_DIR}/install-arduino-cli.sh"
source "${SCRIPTS_DIR}/install-arduino-core-esp32.sh"
SKETCHES_ESP32=(
"$ARDUINO_ESP32_PATH/libraries/NetworkClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino"
"$ARDUINO_ESP32_PATH/libraries/BLE/examples/Server/Server.ino"
"$ARDUINO_ESP32_PATH/libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino"
"$ARDUINO_ESP32_PATH/libraries/Insights/examples/MinimalDiagnostics/MinimalDiagnostics.ino"
)
#create sizes_file
sizes_file="$GITHUB_WORKSPACE/cli_compile_$CHUNK_INDEX.json"
if [ "$BUILD_LOG" -eq 1 ]; then
#create sizes_file and echo start of JSON array with "boards" key
echo "{\"boards\": [" > "$sizes_file"
fi
#build sketches for different targets
build "esp32c5" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32p4" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32s3" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32s2" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32c3" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32c6" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32h2" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
build "esp32" "$CHUNK_INDEX" "$CHUNKS_CNT" "$BUILD_LOG" "$LOG_LEVEL" "$SKETCHES_FILE" "${SKETCHES_ESP32[@]}"
if [ "$BUILD_LOG" -eq 1 ]; then
#remove last comma from the last JSON object
sed -i '$ s/,$//' "$sizes_file"
#echo end of JSON array
echo "]}" >> "$sizes_file"
fi

View file

@ -1,477 +0,0 @@
#!/bin/bash
# Disable shellcheck warning about using 'cat' to read a file.
# Disable shellcheck warning about using individual redirections for each command.
# Disable shellcheck warning about $? uses.
# shellcheck disable=SC2002,SC2129,SC2181,SC2319
if [ ! "$GITHUB_EVENT_NAME" == "release" ]; then
echo "Wrong event '$GITHUB_EVENT_NAME'!"
exit 1
fi
EVENT_JSON=$(cat "$GITHUB_EVENT_PATH")
action=$(echo "$EVENT_JSON" | jq -r '.action')
if [ ! "$action" == "published" ]; then
echo "Wrong action '$action'. Exiting now..."
exit 0
fi
draft=$(echo "$EVENT_JSON" | jq -r '.release.draft')
if [ "$draft" == "true" ]; then
echo "It's a draft release. Exiting now..."
exit 0
fi
RELEASE_PRE=$(echo "$EVENT_JSON" | jq -r '.release.prerelease')
RELEASE_TAG=$(echo "$EVENT_JSON" | jq -r '.release.tag_name')
RELEASE_BRANCH=$(echo "$EVENT_JSON" | jq -r '.release.target_commitish')
RELEASE_ID=$(echo "$EVENT_JSON" | jq -r '.release.id')
SCRIPTS_DIR="./.github/scripts"
OUTPUT_DIR="$GITHUB_WORKSPACE/build"
PACKAGE_NAME="esp32-$RELEASE_TAG"
PACKAGE_JSON_MERGE="$GITHUB_WORKSPACE/.github/scripts/merge_packages.py"
PACKAGE_JSON_TEMPLATE="$GITHUB_WORKSPACE/package/package_esp32_index.template.json"
PACKAGE_JSON_DEV="package_esp32_dev_index.json"
PACKAGE_JSON_REL="package_esp32_index.json"
PACKAGE_JSON_DEV_CN="package_esp32_dev_index_cn.json"
PACKAGE_JSON_REL_CN="package_esp32_index_cn.json"
echo "Event: $GITHUB_EVENT_NAME, Repo: $GITHUB_REPOSITORY, Path: $GITHUB_WORKSPACE, Ref: $GITHUB_REF"
echo "Action: $action, Branch: $RELEASE_BRANCH, ID: $RELEASE_ID"
echo "Tag: $RELEASE_TAG, Draft: $draft, Pre-Release: $RELEASE_PRE"
# Try extracting something like a JSON with a "boards" array/element and "vendor" fields
BOARDS=$(echo "$RELEASE_BODY" | grep -Pzo '(?s){.*}' | jq -r '.boards[]? // .boards? // empty' | xargs echo -n 2>/dev/null)
VENDOR=$(echo "$RELEASE_BODY" | grep -Pzo '(?s){.*}' | jq -r '.vendor? // empty' | xargs echo -n 2>/dev/null)
if [ -n "${BOARDS}" ]; then
echo "Releasing board(s): $BOARDS"
fi
if [ -n "${VENDOR}" ]; then
echo "Setting packager: $VENDOR"
fi
function get_file_size {
local file="$1"
if [[ "$OSTYPE" == "darwin"* ]]; then
eval "$(stat -s "$file")"
local res="$?"
echo "${st_size:?}"
return $res
else
stat --printf="%s" "$file"
return $?
fi
}
function git_upload_asset {
local name
name=$(basename "$1")
# local mime=$(file -b --mime-type "$1")
curl -k -X POST -sH "Authorization: token $GITHUB_TOKEN" -H "Content-Type: application/octet-stream" --data-binary @"$1" "https://uploads.github.com/repos/$GITHUB_REPOSITORY/releases/$RELEASE_ID/assets?name=$name"
}
function git_safe_upload_asset {
local file="$1"
local name
local size
local upload_res
name=$(basename "$file")
size=$(get_file_size "$file")
if ! upload_res=$(git_upload_asset "$file"); then
>&2 echo "ERROR: Failed to upload '$name' ($?)"
return 1
fi
up_size=$(echo "$upload_res" | jq -r '.size')
if [ "$up_size" -ne "$size" ]; then
>&2 echo "ERROR: Uploaded size does not match! $up_size != $size"
#git_delete_asset
return 1
fi
echo "$upload_res" | jq -r '.browser_download_url'
return $?
}
function git_upload_to_pages {
local path=$1
local src=$2
if [ ! -f "$src" ]; then
>&2 echo "Input is not a file! Aborting..."
return 1
fi
local info
local type
local message
local sha=""
local content=""
info=$(curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.object+json" -X GET "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path?ref=gh-pages")
type=$(echo "$info" | jq -r '.type')
message=$(basename "$path")
if [ "$type" == "file" ]; then
sha=$(echo "$info" | jq -r '.sha')
sha=",\"sha\":\"$sha\""
message="Updating $message"
elif [ ! "$type" == "null" ]; then
>&2 echo "Wrong type '$type'"
return 1
else
message="Creating $message"
fi
content=$(base64 -i "$src")
data="{\"branch\":\"gh-pages\",\"message\":\"$message\",\"content\":\"$content\"$sha}"
echo "$data" | curl -s -k -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3.raw+json" -X PUT --data @- "https://api.github.com/repos/$GITHUB_REPOSITORY/contents/$path"
}
function git_safe_upload_to_pages {
local path=$1
local file="$2"
local name
local size
local upload_res
name=$(basename "$file")
size=$(get_file_size "$file")
if ! upload_res=$(git_upload_to_pages "$path" "$file"); then
>&2 echo "ERROR: Failed to upload '$name' ($?)"
return 1
fi
up_size=$(echo "$upload_res" | jq -r '.content.size')
if [ "$up_size" -ne "$size" ]; then
>&2 echo "ERROR: Uploaded size does not match! $up_size != $size"
#git_delete_asset
return 1
fi
echo "$upload_res" | jq -r '.content.download_url'
return $?
}
function merge_package_json {
local jsonLink=$1
local jsonOut=$2
local old_json=$OUTPUT_DIR/oldJson.json
local merged_json=$OUTPUT_DIR/mergedJson.json
local error_code=0
echo "Downloading previous JSON $jsonLink ..."
curl -L -o "$old_json" "https://github.com/$GITHUB_REPOSITORY/releases/download/$jsonLink?access_token=$GITHUB_TOKEN" 2>/dev/null
error_code=$?
if [ $error_code -ne 0 ]; then
echo "ERROR: Download Failed! $error_code"
exit 1
fi
echo "Creating new JSON ..."
set +e
stdbuf -oL python "$PACKAGE_JSON_MERGE" "$jsonOut" "$old_json" > "$merged_json"
set -e
set -v
if [ ! -s "$merged_json" ]; then
rm -f "$merged_json"
echo "Nothing to merge"
else
rm -f "$jsonOut"
mv "$merged_json" "$jsonOut"
echo "JSON data successfully merged"
fi
rm -f "$old_json"
set +v
}
set -e
##
## PACKAGE ZIP
##
mkdir -p "$OUTPUT_DIR"
PKG_DIR="$OUTPUT_DIR/$PACKAGE_NAME"
PACKAGE_ZIP="$PACKAGE_NAME.zip"
echo "Updating submodules ..."
git -C "$GITHUB_WORKSPACE" submodule update --init --recursive > /dev/null 2>&1
mkdir -p "$PKG_DIR/tools"
# Copy all core files to the package folder
echo "Copying files for packaging ..."
if [ -z "${BOARDS}" ]; then
# Copy all variants
cp -f "$GITHUB_WORKSPACE/boards.txt" "$PKG_DIR/"
cp -Rf "$GITHUB_WORKSPACE/variants" "$PKG_DIR/"
else
# Remove all entries not starting with any board code or "menu." from boards.txt
cat "$GITHUB_WORKSPACE/boards.txt" | grep "^menu\." > "$PKG_DIR/boards.txt"
for board in ${BOARDS} ; do
cat "$GITHUB_WORKSPACE/boards.txt" | grep "^${board}\." >> "$PKG_DIR/boards.txt"
done
# Copy only relevant variant files
mkdir "$PKG_DIR/variants/"
board_list=$(cat "${PKG_DIR}"/boards.txt | grep "\.variant=" | cut -d= -f2)
while IFS= read -r variant; do
cp -Rf "$GITHUB_WORKSPACE/variants/${variant}" "$PKG_DIR/variants/"
done <<< "$board_list"
fi
cp -f "$GITHUB_WORKSPACE/CMakeLists.txt" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/idf_component.yml" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/Kconfig.projbuild" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/package.json" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/programmers.txt" "$PKG_DIR/"
cp -Rf "$GITHUB_WORKSPACE/cores" "$PKG_DIR/"
cp -Rf "$GITHUB_WORKSPACE/libraries" "$PKG_DIR/"
cp -f "$GITHUB_WORKSPACE/tools/espota.exe" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/espota.py" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/gen_esp32part.py" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/gen_esp32part.exe" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/gen_insights_package.py" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/gen_insights_package.exe" "$PKG_DIR/tools/"
cp -Rf "$GITHUB_WORKSPACE/tools/partitions" "$PKG_DIR/tools/"
cp -Rf "$GITHUB_WORKSPACE/tools/ide-debug" "$PKG_DIR/tools/"
cp -f "$GITHUB_WORKSPACE/tools/pioarduino-build.py" "$PKG_DIR/tools/"
# Remove unnecessary files in the package folder
echo "Cleaning up folders ..."
find "$PKG_DIR" -name '*.DS_Store' -exec rm -f {} \;
find "$PKG_DIR" -name '*.git*' -type f -delete
##
## TEMP WORKAROUND FOR RV32 LONG PATH ON WINDOWS
##
RVTC_NAME="riscv32-esp-elf-gcc"
RVTC_NEW_NAME="esp-rv32"
X32TC_NAME="xtensa-esp-elf-gcc"
X32TC_NEW_NAME="esp-x32"
# Replace tools locations in platform.txt
echo "Generating platform.txt..."
cat "$GITHUB_WORKSPACE/platform.txt" | \
sed "s/version=.*/version=$RELEASE_TAG/g" | \
sed 's/tools\.esp32-arduino-libs\.path\.windows=.*//g' | \
sed 's/{runtime\.platform\.path}.tools.esp32-arduino-libs/\{runtime.tools.esp32-arduino-libs.path\}/g' | \
sed 's/{runtime\.platform\.path}.tools.xtensa-esp-elf-gdb/\{runtime.tools.xtensa-esp-elf-gdb.path\}/g' | \
sed "s/{runtime\.platform\.path}.tools.xtensa-esp-elf/\\{runtime.tools.$X32TC_NEW_NAME.path\\}/g" | \
sed 's/{runtime\.platform\.path}.tools.riscv32-esp-elf-gdb/\{runtime.tools.riscv32-esp-elf-gdb.path\}/g' | \
sed "s/{runtime\.platform\.path}.tools.riscv32-esp-elf/\\{runtime.tools.$RVTC_NEW_NAME.path\\}/g" | \
sed 's/{runtime\.platform\.path}.tools.esptool/\{runtime.tools.esptool_py.path\}/g' | \
sed 's/{runtime\.platform\.path}.tools.openocd-esp32/\{runtime.tools.openocd-esp32.path\}/g' > "$PKG_DIR/platform.txt"
if [ -n "${VENDOR}" ]; then
# Append vendor name to platform.txt to create a separate section
sed -i "/^name=.*/s/$/ ($VENDOR)/" "$PKG_DIR/platform.txt"
fi
# Add header with version information
echo "Generating core_version.h ..."
ver_define=$(echo "$RELEASE_TAG" | tr "[:lower:].\055" "[:upper:]_")
ver_hex=$(git -C "$GITHUB_WORKSPACE" rev-parse --short=8 HEAD 2>/dev/null)
echo \#define ARDUINO_ESP32_GIT_VER 0x"$ver_hex" > "$PKG_DIR/cores/esp32/core_version.h"
echo \#define ARDUINO_ESP32_GIT_DESC "$(git -C "$GITHUB_WORKSPACE" describe --tags 2>/dev/null)" >> "$PKG_DIR/cores/esp32/core_version.h"
echo \#define ARDUINO_ESP32_RELEASE_"$ver_define" >> "$PKG_DIR/cores/esp32/core_version.h"
echo \#define ARDUINO_ESP32_RELEASE \""$ver_define"\" >> "$PKG_DIR/cores/esp32/core_version.h"
# Compress package folder
echo "Creating ZIP ..."
pushd "$OUTPUT_DIR" >/dev/null
zip -qr "$PACKAGE_ZIP" "$PACKAGE_NAME"
if [ $? -ne 0 ]; then
echo "ERROR: Failed to create $PACKAGE_ZIP ($?)"
exit 1
fi
# Calculate SHA-256
echo "Calculating SHA sum ..."
PACKAGE_PATH="$OUTPUT_DIR/$PACKAGE_ZIP"
PACKAGE_SHA=$(shasum -a 256 "$PACKAGE_ZIP" | cut -f 1 -d ' ')
PACKAGE_SIZE=$(get_file_size "$PACKAGE_ZIP")
popd >/dev/null
rm -rf "$PKG_DIR"
echo "'$PACKAGE_ZIP' Created! Size: $PACKAGE_SIZE, SHA-256: $PACKAGE_SHA"
echo
# Upload package to release page
echo "Uploading package to release page ..."
PACKAGE_URL=$(git_safe_upload_asset "$PACKAGE_PATH")
echo "Package Uploaded"
echo "Download URL: $PACKAGE_URL"
echo
##
## TEMP WORKAROUND FOR RV32 LONG PATH ON WINDOWS
##
RVTC_VERSION=$(cat "$PACKAGE_JSON_TEMPLATE" | jq -r ".packages[0].platforms[0].toolsDependencies[] | select(.name == \"$RVTC_NAME\") | .version" | cut -d '_' -f 2)
# RVTC_VERSION=`date -j -f '%Y%m%d' "$RVTC_VERSION" '+%y%m'` # MacOS
RVTC_VERSION=$(date -d "$RVTC_VERSION" '+%y%m')
rvtc_jq_arg="\
(.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$RVTC_NAME\")).version = \"$RVTC_VERSION\" |\
(.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$RVTC_NAME\")).name = \"$RVTC_NEW_NAME\" |\
(.packages[0].tools[] | select(.name==\"$RVTC_NAME\")).version = \"$RVTC_VERSION\" |\
(.packages[0].tools[] | select(.name==\"$RVTC_NAME\")).name = \"$RVTC_NEW_NAME\" |\
(.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$X32TC_NAME\")).version = \"$RVTC_VERSION\" |\
(.packages[0].platforms[0].toolsDependencies[] | select(.name==\"$X32TC_NAME\")).name = \"$X32TC_NEW_NAME\" |\
(.packages[0].tools[] | select(.name==\"$X32TC_NAME\")).version = \"$RVTC_VERSION\" |\
(.packages[0].tools[] | select(.name==\"$X32TC_NAME\")).name = \"$X32TC_NEW_NAME\""
cat "$PACKAGE_JSON_TEMPLATE" | jq "$rvtc_jq_arg" > "$OUTPUT_DIR/package-rvfix.json"
PACKAGE_JSON_TEMPLATE="$OUTPUT_DIR/package-rvfix.json"
##
## PACKAGE JSON
##
# Construct JQ argument with package data
jq_arg=".packages[0].platforms[0].version = \"$RELEASE_TAG\" | \
.packages[0].platforms[0].url = \"$PACKAGE_URL\" |\
.packages[0].platforms[0].archiveFileName = \"$PACKAGE_ZIP\" |\
.packages[0].platforms[0].size = \"$PACKAGE_SIZE\" |\
.packages[0].platforms[0].checksum = \"SHA-256:$PACKAGE_SHA\""
# Generate package JSONs
echo "Generating $PACKAGE_JSON_DEV ..."
cat "$PACKAGE_JSON_TEMPLATE" | jq "$jq_arg" > "$OUTPUT_DIR/$PACKAGE_JSON_DEV"
# On MacOS the sed command won't skip the first match. Use gsed instead.
sed '0,/github\.com\//!s|github\.com/|dl.espressif.cn/github_assets/|g' "$OUTPUT_DIR/$PACKAGE_JSON_DEV" > "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN"
python "$SCRIPTS_DIR/release_append_cn.py" "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN"
if [ "$RELEASE_PRE" == "false" ]; then
echo "Generating $PACKAGE_JSON_REL ..."
cat "$PACKAGE_JSON_TEMPLATE" | jq "$jq_arg" > "$OUTPUT_DIR/$PACKAGE_JSON_REL"
# On MacOS the sed command won't skip the first match. Use gsed instead.
sed '0,/github\.com\//!s|github\.com/|dl.espressif.cn/github_assets/|g' "$OUTPUT_DIR/$PACKAGE_JSON_REL" > "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN"
python "$SCRIPTS_DIR/release_append_cn.py" "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN"
fi
# Figure out the last release or pre-release
echo "Getting previous releases ..."
releasesJson=$(curl -sH "Authorization: token $GITHUB_TOKEN" "https://api.github.com/repos/$GITHUB_REPOSITORY/releases" 2>/dev/null)
if [ $? -ne 0 ]; then
echo "ERROR: Get Releases Failed! ($?)"
exit 1
fi
set +e
prev_release=$(echo "$releasesJson" | jq -e -r ". | map(select(.draft == false and .prerelease == false)) | sort_by(.published_at | - fromdateiso8601) | .[0].tag_name")
prev_any_release=$(echo "$releasesJson" | jq -e -r ". | map(select(.draft == false)) | sort_by(.published_at | - fromdateiso8601) | .[0].tag_name")
shopt -s nocasematch
if [ "$prev_release" == "$RELEASE_TAG" ]; then
prev_release=$(echo "$releasesJson" | jq -e -r ". | map(select(.draft == false and .prerelease == false)) | sort_by(.published_at | - fromdateiso8601) | .[1].tag_name")
fi
if [ "$prev_any_release" == "$RELEASE_TAG" ]; then
prev_any_release=$(echo "$releasesJson" | jq -e -r ". | map(select(.draft == false)) | sort_by(.published_at | - fromdateiso8601) | .[1].tag_name")
fi
shopt -u nocasematch
set -e
echo "Previous Release: $prev_release"
echo "Previous (any)release: $prev_any_release"
echo
# Merge package JSONs with previous releases
if [ -n "$prev_any_release" ] && [ "$prev_any_release" != "null" ]; then
echo "Merging with JSON from $prev_any_release ..."
merge_package_json "$prev_any_release/$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV"
merge_package_json "$prev_any_release/$PACKAGE_JSON_DEV_CN" "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN"
fi
if [ "$RELEASE_PRE" == "false" ]; then
if [ -n "$prev_release" ] && [ "$prev_release" != "null" ]; then
echo "Merging with JSON from $prev_release ..."
merge_package_json "$prev_release/$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL"
merge_package_json "$prev_release/$PACKAGE_JSON_REL_CN" "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN"
fi
fi
# Test the package JSONs
echo "Installing arduino-cli ..."
export PATH="/home/runner/bin:$PATH"
source "${SCRIPTS_DIR}/install-arduino-cli.sh"
# For the Chinese mirror, we can't test the package JSONs as the Chinese mirror might not be updated yet.
echo "Testing $PACKAGE_JSON_DEV install ..."
echo "Installing esp32 ..."
arduino-cli core install esp32:esp32 --additional-urls "file://$OUTPUT_DIR/$PACKAGE_JSON_DEV"
if [ $? -ne 0 ]; then
echo "ERROR: Failed to install esp32 ($?)"
exit 1
fi
echo "Compiling example ..."
arduino-cli compile --fqbn esp32:esp32:esp32 "$GITHUB_WORKSPACE"/libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino
if [ $? -ne 0 ]; then
echo "ERROR: Failed to compile example ($?)"
exit 1
fi
echo "Uninstalling esp32 ..."
arduino-cli core uninstall esp32:esp32
if [ $? -ne 0 ]; then
echo "ERROR: Failed to uninstall esp32 ($?)"
exit 1
fi
echo "Test successful!"
if [ "$RELEASE_PRE" == "false" ]; then
echo "Testing $PACKAGE_JSON_REL install ..."
echo "Installing esp32 ..."
arduino-cli core install esp32:esp32 --additional-urls "file://$OUTPUT_DIR/$PACKAGE_JSON_REL"
if [ $? -ne 0 ]; then
echo "ERROR: Failed to install esp32 ($?)"
exit 1
fi
echo "Compiling example ..."
arduino-cli compile --fqbn esp32:esp32:esp32 "$GITHUB_WORKSPACE"/libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino
if [ $? -ne 0 ]; then
echo "ERROR: Failed to compile example ($?)"
exit 1
fi
echo "Uninstalling esp32 ..."
arduino-cli core uninstall esp32:esp32
if [ $? -ne 0 ]; then
echo "ERROR: Failed to uninstall esp32 ($?)"
exit 1
fi
echo "Test successful!"
fi
# Upload package JSONs
echo "Uploading $PACKAGE_JSON_DEV ..."
echo "Download URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_DEV")"
echo "Pages URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_DEV" "$OUTPUT_DIR/$PACKAGE_JSON_DEV")"
echo "Download CN URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN")"
echo "Pages CN URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_DEV_CN" "$OUTPUT_DIR/$PACKAGE_JSON_DEV_CN")"
echo
if [ "$RELEASE_PRE" == "false" ]; then
echo "Uploading $PACKAGE_JSON_REL ..."
echo "Download URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_REL")"
echo "Pages URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_REL" "$OUTPUT_DIR/$PACKAGE_JSON_REL")"
echo "Download CN URL: $(git_safe_upload_asset "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN")"
echo "Pages CN URL: $(git_safe_upload_to_pages "$PACKAGE_JSON_REL_CN" "$OUTPUT_DIR/$PACKAGE_JSON_REL_CN")"
echo
fi
set +e
##
## DONE
##
echo "DONE!"

View file

@ -1,57 +0,0 @@
#!/usr/bin/env python3
# Arduino IDE provides by default a package file for the ESP32. This causes version conflicts
# when the user tries to use the JSON file with the Chinese mirrors.
#
# The downside is that the Arduino IDE will always warn the user that updates are available as it
# will consider the version from the Chinese mirrors as a pre-release version.
#
# This script is used to append "-cn" to all versions in the package_esp32_index_cn.json file so that
# the user can select the Chinese mirrors without conflicts.
#
# If Arduino ever stops providing the package_esp32_index.json file by default,
# this script can be removed and the tags reverted.
import json
def append_cn_to_versions(obj):
if isinstance(obj, dict):
# Skip tools that are not from the esp32 package
packager = obj.get("packager")
if packager is not None and packager != "esp32":
return
for key, value in obj.items():
if key == "version" and isinstance(value, str):
if not value.endswith("-cn"):
obj[key] = value + "-cn"
else:
append_cn_to_versions(value)
elif isinstance(obj, list):
for item in obj:
append_cn_to_versions(item)
def process_json_file(input_path, output_path=None):
with open(input_path, "r", encoding="utf-8") as f:
data = json.load(f)
append_cn_to_versions(data)
if output_path is None:
output_path = input_path
with open(output_path, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2)
print(f"Updated JSON written to {output_path}")
if __name__ == "__main__":
import sys
if len(sys.argv) < 2:
print("Usage: python release_append_cn.py input.json [output.json]")
else:
input_file = sys.argv[1]
output_file = sys.argv[2] if len(sys.argv) > 2 else None
process_json_file(input_file, output_file)

View file

@ -1,84 +0,0 @@
#!/bin/bash
build_all=false
chunks_count=0
if [[ $CORE_CHANGED == 'true' ]] || [[ $IS_PR != 'true' ]]; then
echo "Core files changed or not a PR. Building all."
build_all=true
chunks_count=$MAX_CHUNKS
elif [[ $LIB_CHANGED == 'true' ]]; then
echo "Libraries changed. Building only affected sketches."
if [[ $NETWORKING_CHANGED == 'true' ]]; then
echo "Networking libraries changed. Building networking related sketches."
networking_sketches="$(find libraries/WiFi -name '*.ino') "
networking_sketches+="$(find libraries/Ethernet -name '*.ino') "
networking_sketches+="$(find libraries/PPP -name '*.ino') "
networking_sketches+="$(find libraries/NetworkClientSecure -name '*.ino') "
networking_sketches+="$(find libraries/WebServer -name '*.ino') "
fi
if [[ $FS_CHANGED == 'true' ]]; then
echo "FS libraries changed. Building FS related sketches."
fs_sketches="$(find libraries/SD -name '*.ino') "
fs_sketches+="$(find libraries/SD_MMC -name '*.ino') "
fs_sketches+="$(find libraries/SPIFFS -name '*.ino') "
fs_sketches+="$(find libraries/LittleFS -name '*.ino') "
fs_sketches+="$(find libraries/FFat -name '*.ino') "
fi
sketches="$networking_sketches $fs_sketches"
for file in $LIB_FILES; do
lib=$(echo "$file" | awk -F "/" '{print $1"/"$2}')
if [[ "$file" == *.ino ]]; then
# If file ends with .ino, add it to the list of sketches
echo "Sketch found: $file"
sketches+="$file "
elif [[ "$file" == "$lib/src/"* ]]; then
# If file is inside the src directory, find all sketches in the lib/examples directory
echo "Library src file found: $file"
if [[ -d $lib/examples ]]; then
lib_sketches=$(find "$lib"/examples -name '*.ino')
sketches+="$lib_sketches "
echo "Library sketches: $lib_sketches"
fi
else
# If file is in a example folder but it is not a sketch, find all sketches in the current directory
echo "File in example folder found: $file"
sketch=$(find "$(dirname "$file")" -name '*.ino')
sketches+="$sketch "
echo "Sketch in example folder: $sketch"
fi
echo ""
done
fi
if [[ -n $sketches ]]; then
# Remove duplicates
sketches=$(echo "$sketches" | tr ' ' '\n' | sort | uniq)
for sketch in $sketches; do
echo "$sketch" >> sketches_found.txt
chunks_count=$((chunks_count+1))
done
echo "Number of sketches found: $chunks_count"
echo "Sketches:"
echo "$sketches"
if [[ $chunks_count -gt $MAX_CHUNKS ]]; then
echo "More sketches than the allowed number of chunks found. Limiting to $MAX_CHUNKS chunks."
chunks_count=$MAX_CHUNKS
fi
fi
chunks='["0"'
for i in $(seq 1 $(( chunks_count - 1 )) ); do
chunks+=",\"$i\""
done
chunks+="]"
{
echo "build_all=$build_all"
echo "build_libraries=$BUILD_LIBRARIES"
echo "build_static_sketches=$BUILD_STATIC_SKETCHES"
echo "build_idf=$BUILD_IDF"
echo "chunk_count=$chunks_count"
echo "chunks=$chunks"
} >> "$GITHUB_OUTPUT"

View file

@ -1,613 +0,0 @@
#!/bin/bash
if [ -d "$ARDUINO_ESP32_PATH/tools/esp32-arduino-libs" ]; then
SDKCONFIG_DIR="$ARDUINO_ESP32_PATH/tools/esp32-arduino-libs"
elif [ -d "$GITHUB_WORKSPACE/tools/esp32-arduino-libs" ]; then
SDKCONFIG_DIR="$GITHUB_WORKSPACE/tools/esp32-arduino-libs"
else
SDKCONFIG_DIR="tools/esp32-arduino-libs"
fi
function check_requirements { # check_requirements <sketchdir> <sdkconfig_path>
local sketchdir=$1
local sdkconfig_path=$2
local has_requirements=1
local requirements
local requirements_or
if [ ! -f "$sdkconfig_path" ] || [ ! -f "$sketchdir/ci.json" ]; then
echo "WARNING: sdkconfig or ci.json not found. Assuming requirements are met." 1>&2
# Return 1 on error to force the sketch to be built and fail. This way the
# CI will fail and the user will know that the sketch has a problem.
else
# Check if the sketch requires any configuration options (AND)
requirements=$(jq -r '.requires[]? // empty' "$sketchdir/ci.json")
if [[ "$requirements" != "null" && "$requirements" != "" ]]; then
for requirement in $requirements; do
requirement=$(echo "$requirement" | xargs)
found_line=$(grep -E "^$requirement" "$sdkconfig_path")
if [[ "$found_line" == "" ]]; then
has_requirements=0
fi
done
fi
# Check if the sketch requires any configuration options (OR)
requirements_or=$(jq -r '.requires_any[]? // empty' "$sketchdir/ci.json")
if [[ "$requirements_or" != "null" && "$requirements_or" != "" ]]; then
local found=false
for requirement in $requirements_or; do
requirement=$(echo "$requirement" | xargs)
found_line=$(grep -E "^$requirement" "$sdkconfig_path")
if [[ "$found_line" != "" ]]; then
found=true
break
fi
done
if [[ "$found" == "false" ]]; then
has_requirements=0
fi
fi
fi
echo $has_requirements
}
function build_sketch { # build_sketch <ide_path> <user_path> <path-to-ino> [extra-options]
while [ -n "$1" ]; do
case "$1" in
-ai )
shift
ide_path=$1
;;
-au )
shift
user_path=$1
;;
-t )
shift
target=$1
;;
-fqbn )
shift
fqbn=$1
;;
-o )
shift
options=$1
;;
-s )
shift
sketchdir=$1
;;
-i )
shift
chunk_index=$1
;;
-l )
shift
log_compilation=$1
;;
-d )
shift
debug_level="DebugLevel=$1"
;;
* )
break
;;
esac
shift
done
xtra_opts=("$@")
len=0
if [ -z "$sketchdir" ]; then
echo "ERROR: Sketch directory not provided"
echo "$USAGE"
exit 1
fi
# No FQBN was passed, try to get it from other options
if [ -z "$fqbn" ]; then
if [ -z "$target" ]; then
echo "ERROR: Unspecified chip"
echo "$USAGE"
exit 1
fi
# The options are either stored in the test directory, for a per test
# customization or passed as parameters. Command line options take
# precedence. Note that the following logic also falls to the default
# parameters if no arguments were passed and no file was found.
if [ -z "$options" ] && [ -f "$sketchdir"/ci.json ]; then
# The config file could contain multiple FQBNs for one chip. If
# that's the case we build one time for every FQBN.
len=$(jq -r --arg target "$target" '.fqbn[$target] | length' "$sketchdir"/ci.json)
if [ "$len" -gt 0 ]; then
fqbn=$(jq -r --arg target "$target" '.fqbn[$target] | sort' "$sketchdir"/ci.json)
fi
fi
if [ -n "$options" ] || [ "$len" -eq 0 ]; then
# Since we are passing options, we will end up with only one FQBN to
# build.
len=1
if [ -f "$sketchdir"/ci.json ]; then
fqbn_append=$(jq -r '.fqbn_append' "$sketchdir"/ci.json)
if [ "$fqbn_append" == "null" ]; then
fqbn_append=""
fi
fi
# Default FQBN options if none were passed in the command line.
# Replace any double commas with a single one and strip leading and
# trailing commas.
esp32_opts=$(echo "PSRAM=enabled,$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32s2_opts=$(echo "PSRAM=enabled,$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32s3_opts=$(echo "PSRAM=opi,USBMode=default,$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32c3_opts=$(echo "$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32c6_opts=$(echo "$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32h2_opts=$(echo "$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32p4_opts=$(echo "PSRAM=enabled,USBMode=default,$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
esp32c5_opts=$(echo "$debug_level,$fqbn_append" | sed 's/^,*//;s/,*$//;s/,\{2,\}/,/g')
# Select the common part of the FQBN based on the target. The rest will be
# appended depending on the passed options.
opt=""
case "$target" in
"esp32")
[ -n "${options:-$esp32_opts}" ] && opt=":${options:-$esp32_opts}"
fqbn="espressif:esp32:esp32$opt"
;;
"esp32s2")
[ -n "${options:-$esp32s2_opts}" ] && opt=":${options:-$esp32s2_opts}"
fqbn="espressif:esp32:esp32s2$opt"
;;
"esp32c3")
[ -n "${options:-$esp32c3_opts}" ] && opt=":${options:-$esp32c3_opts}"
fqbn="espressif:esp32:esp32c3$opt"
;;
"esp32s3")
[ -n "${options:-$esp32s3_opts}" ] && opt=":${options:-$esp32s3_opts}"
fqbn="espressif:esp32:esp32s3$opt"
;;
"esp32c6")
[ -n "${options:-$esp32c6_opts}" ] && opt=":${options:-$esp32c6_opts}"
fqbn="espressif:esp32:esp32c6$opt"
;;
"esp32h2")
[ -n "${options:-$esp32h2_opts}" ] && opt=":${options:-$esp32h2_opts}"
fqbn="espressif:esp32:esp32h2$opt"
;;
"esp32p4")
[ -n "${options:-$esp32p4_opts}" ] && opt=":${options:-$esp32p4_opts}"
fqbn="espressif:esp32:esp32p4$opt"
;;
"esp32c5")
[ -n "${options:-$esp32c5_opts}" ] && opt=":${options:-$esp32c5_opts}"
fqbn="espressif:esp32:esp32c5$opt"
;;
*)
echo "ERROR: Invalid chip: $target"
exit 1
;;
esac
# Make it look like a JSON array.
fqbn="[\"$fqbn\"]"
fi
else
# An FQBN was passed. Make it look like a JSON array.
len=1
fqbn="[\"$fqbn\"]"
fi
if [ -z "$fqbn" ]; then
echo "No FQBN passed or invalid chip: $target"
exit 1
fi
# The directory that will hold all the artifacts (the build directory) is
# provided through:
# 1. An env variable called ARDUINO_BUILD_DIR.
# 2. Created at the sketch level as "build" in the case of a single
# configuration test.
# 3. Created at the sketch level as "buildX" where X is the number
# of configuration built in case of a multiconfiguration test.
sketchname=$(basename "$sketchdir")
local has_requirements
if [ -f "$sketchdir"/ci.json ]; then
# If the target is listed as false, skip the sketch. Otherwise, include it.
is_target=$(jq -r --arg target "$target" '.targets[$target]' "$sketchdir"/ci.json)
if [[ "$is_target" == "false" ]]; then
echo "Skipping $sketchname for target $target"
exit 0
fi
has_requirements=$(check_requirements "$sketchdir" "$SDKCONFIG_DIR/$target/sdkconfig")
if [ "$has_requirements" == "0" ]; then
echo "Target $target does not meet the requirements for $sketchname. Skipping."
exit 0
fi
fi
ARDUINO_CACHE_DIR="$HOME/.arduino/cache.tmp"
if [ -n "$ARDUINO_BUILD_DIR" ]; then
build_dir="$ARDUINO_BUILD_DIR"
elif [ "$len" -eq 1 ]; then
# build_dir="$sketchdir/build"
build_dir="$HOME/.arduino/tests/$target/$sketchname/build.tmp"
fi
output_file="$HOME/.arduino/cli_compile_output.txt"
sizes_file="$GITHUB_WORKSPACE/cli_compile_$chunk_index.json"
mkdir -p "$ARDUINO_CACHE_DIR"
for i in $(seq 0 $((len - 1))); do
if [ "$len" -ne 1 ]; then
# build_dir="$sketchdir/build$i"
build_dir="$HOME/.arduino/tests/$target/$sketchname/build$i.tmp"
fi
rm -rf "$build_dir"
mkdir -p "$build_dir"
currfqbn=$(echo "$fqbn" | jq -r --argjson i "$i" '.[$i]')
if [ -f "$ide_path/arduino-cli" ]; then
echo "Building $sketchname with arduino-cli and FQBN=$currfqbn"
curroptions=$(echo "$currfqbn" | cut -d':' -f4)
currfqbn=$(echo "$currfqbn" | cut -d':' -f1-3)
"$ide_path"/arduino-cli compile \
--fqbn "$currfqbn" \
--board-options "$curroptions" \
--warnings "all" \
--build-property "compiler.warning_flags.all=-Wall -Werror=all -Wextra" \
--build-path "$build_dir" \
"${xtra_opts[@]}" "${sketchdir}" \
2>&1 | tee "$output_file"
exit_status=${PIPESTATUS[0]}
if [ "$exit_status" -ne 0 ]; then
echo "ERROR: Compilation failed with error code $exit_status"
exit "$exit_status"
fi
if [ -n "$log_compilation" ]; then
#Extract the program storage space and dynamic memory usage in bytes and percentage in separate variables from the output, just the value without the string
flash_bytes=$(grep -oE 'Sketch uses ([0-9]+) bytes' "$output_file" | awk '{print $3}')
flash_percentage=$(grep -oE 'Sketch uses ([0-9]+) bytes \(([0-9]+)%\)' "$output_file" | awk '{print $5}' | tr -d '(%)')
ram_bytes=$(grep -oE 'Global variables use ([0-9]+) bytes' "$output_file" | awk '{print $4}')
ram_percentage=$(grep -oE 'Global variables use ([0-9]+) bytes \(([0-9]+)%\)' "$output_file" | awk '{print $6}' | tr -d '(%)')
# Extract the directory path excluding the filename
directory_path=$(dirname "$sketch")
# Define the constant part
constant_part="/home/runner/Arduino/hardware/espressif/esp32/libraries/"
# Extract the desired substring
lib_sketch_name="${directory_path#"$constant_part"}"
#append json file where key is fqbn, sketch name, sizes -> extracted values
echo "{\"name\": \"$lib_sketch_name\",
\"sizes\": [{
\"flash_bytes\": $flash_bytes,
\"flash_percentage\": $flash_percentage,
\"ram_bytes\": $ram_bytes,
\"ram_percentage\": $ram_percentage
}]
}," >> "$sizes_file"
fi
elif [ -f "$ide_path/arduino-builder" ]; then
echo "Building $sketchname with arduino-builder and FQBN=$currfqbn"
echo "Build path = $build_dir"
"$ide_path"/arduino-builder -compile -logger=human -core-api-version=10810 \
-fqbn=\""$currfqbn"\" \
-warnings="all" \
-tools "$ide_path/tools-builder" \
-hardware "$user_path/hardware" \
-libraries "$user_path/libraries" \
-build-cache "$ARDUINO_CACHE_DIR" \
-build-path "$build_dir" \
"${xtra_opts[@]}" "${sketchdir}/${sketchname}.ino"
exit_status=$?
if [ $exit_status -ne 0 ]; then
echo "ERROR: Compilation failed with error code $exit_status"
exit $exit_status
fi
# $ide_path/arduino-builder -compile -logger=human -core-api-version=10810 \
# -fqbn=\"$currfqbn\" \
# -warnings="all" \
# -tools "$ide_path/tools-builder" \
# -tools "$ide_path/tools" \
# -built-in-libraries "$ide_path/libraries" \
# -hardware "$ide_path/hardware" \
# -hardware "$user_path/hardware" \
# -libraries "$user_path/libraries" \
# -build-cache "$ARDUINO_CACHE_DIR" \
# -build-path "$build_dir" \
# $xtra_opts "${sketchdir}/${sketchname}.ino"
fi
done
unset fqbn
unset xtra_opts
unset options
}
function count_sketches { # count_sketches <path> [target] [file] [ignore-requirements]
local path=$1
local target=$2
local ignore_requirements=$3
local file=$4
local sketches
if [ $# -lt 1 ]; then
echo "ERROR: Illegal number of parameters"
echo "USAGE: ${0} count <path> [target]"
fi
rm -rf sketches.txt
touch sketches.txt
if [ ! -d "$path" ]; then
return 0
fi
if [ -f "$file" ]; then
sketches=$(cat "$file")
else
sketches=$(find "$path" -name '*.ino' | sort)
fi
local sketchnum=0
for sketch in $sketches; do
local sketchdir
local sketchdirname
local sketchname
local has_requirements
sketchdir=$(dirname "$sketch")
sketchdirname=$(basename "$sketchdir")
sketchname=$(basename "$sketch")
if [[ "$sketchdirname.ino" != "$sketchname" ]]; then
continue
elif [[ -n $target ]] && [[ -f $sketchdir/ci.json ]]; then
# If the target is listed as false, skip the sketch. Otherwise, include it.
is_target=$(jq -r --arg target "$target" '.targets[$target]' "$sketchdir"/ci.json)
if [[ "$is_target" == "false" ]]; then
continue
fi
if [ "$ignore_requirements" != "1" ]; then
has_requirements=$(check_requirements "$sketchdir" "$SDKCONFIG_DIR/$target/sdkconfig")
if [ "$has_requirements" == "0" ]; then
continue
fi
fi
fi
echo "$sketch" >> sketches.txt
sketchnum=$((sketchnum + 1))
done
return $sketchnum
}
function build_sketches { # build_sketches <ide_path> <user_path> <target> <path> <chunk> <total-chunks> [extra-options]
local args=()
while [ -n "$1" ]; do
case $1 in
-ai )
shift
ide_path=$1
;;
-au )
shift
user_path=$1
;;
-t )
shift
target=$1
args+=("-t" "$target")
;;
-fqbn )
shift
fqbn=$1
args+=("-fqbn" "$fqbn")
;;
-p )
shift
path=$1
;;
-i )
shift
chunk_index=$1
;;
-m )
shift
chunk_max=$1
;;
-l )
shift
log_compilation=$1
;;
-f )
shift
sketches_file=$1
;;
-d )
shift
debug_level="$1"
args+=("-d" "$debug_level")
;;
* )
break
;;
esac
shift
done
local xtra_opts=("$@")
if [ -z "$chunk_index" ] || [ -z "$chunk_max" ]; then
echo "ERROR: Invalid chunk parameters"
echo "$USAGE"
exit 1
fi
if [ "$chunk_max" -le 0 ]; then
echo "ERROR: Chunks count must be positive number"
return 1
fi
if [ "$chunk_index" -gt "$chunk_max" ] && [ "$chunk_max" -ge 2 ]; then
chunk_index=$chunk_max
fi
set +e
if [ -n "$sketches_file" ]; then
count_sketches "$path" "$target" "0" "$sketches_file"
local sketchcount=$?
else
count_sketches "$path" "$target"
local sketchcount=$?
fi
set -e
local sketches
sketches=$(cat sketches.txt)
rm -rf sketches.txt
local chunk_size
local all_chunks
chunk_size=$(( sketchcount / chunk_max ))
all_chunks=$(( chunk_max * chunk_size ))
if [ "$all_chunks" -lt "$sketchcount" ]; then
chunk_size=$(( chunk_size + 1 ))
fi
local start_index=0
local end_index=0
if [ "$chunk_index" -ge "$chunk_max" ]; then
start_index=$chunk_index
end_index=$sketchcount
else
start_index=$(( chunk_index * chunk_size ))
if [ "$sketchcount" -le "$start_index" ]; then
echo "No sketches to build for $target in this chunk"
return 0
fi
end_index=$(( $(( chunk_index + 1 )) * chunk_size ))
if [ "$end_index" -gt "$sketchcount" ]; then
end_index=$sketchcount
fi
fi
local start_num
start_num=$(( start_index + 1 ))
echo "Found $sketchcount Sketches for target '$target'";
echo "Chunk Index : $chunk_index"
echo "Chunk Count : $chunk_max"
echo "Chunk Size : $chunk_size"
echo "Start Sketch: $start_num"
echo "End Sketch : $end_index"
#if fqbn is not passed then set it to default for compilation log
if [ -z "$fqbn" ]; then
log_fqbn="espressif:esp32:$target"
else
log_fqbn=$fqbn
fi
sizes_file="$GITHUB_WORKSPACE/cli_compile_$chunk_index.json"
if [ -n "$log_compilation" ]; then
#echo board,target and start of sketches to sizes_file json
echo "{ \"board\": \"$log_fqbn\",
\"target\": \"$target\",
\"sketches\": [" >> "$sizes_file"
fi
local sketchnum=0
args+=("-ai" "$ide_path" "-au" "$user_path" "-i" "$chunk_index")
if [ -n "$log_compilation" ]; then
args+=("-l" "$log_compilation")
fi
for sketch in $sketches; do
local sketchdir
local sketchdirname
sketchdir=$(dirname "$sketch")
sketchdirname=$(basename "$sketchdir")
sketchnum=$((sketchnum + 1))
if [ "$sketchnum" -le "$start_index" ] \
|| [ "$sketchnum" -gt "$end_index" ]; then
continue
fi
echo ""
echo "Building Sketch Index $sketchnum - $sketchdirname"
build_sketch "${args[@]}" -s "$sketchdir" "${xtra_opts[@]}"
local result=$?
if [ $result -ne 0 ]; then
return $result
fi
done
if [ -n "$log_compilation" ]; then
#remove last comma from json
if [ "$i" -eq $((len - 1)) ]; then
sed -i '$ s/.$//' "$sizes_file"
fi
#echo end of sketches sizes_file json
echo "]" >> "$sizes_file"
#echo end of board sizes_file json
echo "}," >> "$sizes_file"
fi
return 0
}
USAGE="
USAGE: ${0} [command] [options]
Available commands:
count: Count sketches.
build: Build a sketch.
chunk_build: Build a chunk of sketches.
check_requirements: Check if target meets sketch requirements.
"
cmd=$1
shift
if [ -z "$cmd" ]; then
echo "ERROR: No command supplied"
echo "$USAGE"
exit 2
fi
case "$cmd" in
"count") count_sketches "$@"
;;
"build") build_sketch "$@"
;;
"chunk_build") build_sketches "$@"
;;
"check_requirements") check_requirements "$@"
;;
*)
echo "ERROR: Unrecognized command"
echo "$USAGE"
exit 2
esac

View file

@ -1,80 +0,0 @@
#!/bin/bash
USAGE="
USAGE:
${0} -c -type <test_type> <chunk_build_opts>
Example: ${0} -c -type validation -t esp32 -i 0 -m 15
${0} -s sketch_name <build_opts>
Example: ${0} -s hello_world -t esp32
${0} -clean
Remove build and test generated files
"
function clean {
rm -rf tests/.pytest_cache
find tests/ -type d -name 'build*' -exec rm -rf "{}" \+
find tests/ -type d -name '__pycache__' -exec rm -rf "{}" \+
find tests/ -name '*.xml' -exec rm -rf "{}" \+
find tests/ -name 'result_*.json' -exec rm -rf "{}" \+
}
SCRIPTS_DIR="./.github/scripts"
BUILD_CMD=""
chunk_build=0
while [ -n "$1" ]; do
case $1 in
-c )
chunk_build=1
;;
-s )
shift
sketch=$1
;;
-h )
echo "$USAGE"
exit 0
;;
-type )
shift
test_type=$1
;;
-clean )
clean
exit 0
;;
* )
break
;;
esac
shift
done
source "${SCRIPTS_DIR}/install-arduino-cli.sh"
source "${SCRIPTS_DIR}/install-arduino-core-esp32.sh"
args=("-ai" "$ARDUINO_IDE_PATH" "-au" "$ARDUINO_USR_PATH")
if [[ $test_type == "all" ]] || [[ -z $test_type ]]; then
if [ -n "$sketch" ]; then
tmp_sketch_path=$(find tests -name "$sketch".ino)
test_type=$(basename "$(dirname "$(dirname "$tmp_sketch_path")")")
echo "Sketch $sketch test type: $test_type"
test_folder="$PWD/tests/$test_type"
else
test_folder="$PWD/tests"
fi
else
test_folder="$PWD/tests/$test_type"
fi
if [ $chunk_build -eq 1 ]; then
BUILD_CMD="${SCRIPTS_DIR}/sketch_utils.sh chunk_build"
args+=("-p" "$test_folder" "-i" "0" "-m" "1")
else
BUILD_CMD="${SCRIPTS_DIR}/sketch_utils.sh build"
args+=("-s" "$test_folder/$sketch")
fi
${BUILD_CMD} "${args[@]}" "$@"

View file

@ -1,28 +0,0 @@
#!/bin/bash
build_types="'validation'"
hw_types="'validation'"
wokwi_types="'validation'"
qemu_types="'validation'"
if [[ $IS_PR != 'true' ]] || [[ $PERFORMANCE_ENABLED == 'true' ]]; then
build_types+=",'performance'"
hw_types+=",'performance'"
#wokwi_types+=",'performance'"
#qemu_types+=",'performance'"
fi
targets="'esp32','esp32s2','esp32s3','esp32c3','esp32c6','esp32h2','esp32p4'"
mkdir -p info
echo "[$wokwi_types]" > info/wokwi_types.txt
echo "[$targets]" > info/targets.txt
{
echo "build-types=[$build_types]"
echo "hw-types=[$hw_types]"
echo "wokwi-types=[$wokwi_types]"
echo "qemu-types=[$qemu_types]"
echo "targets=[$targets]"
} >> "$GITHUB_OUTPUT"

View file

@ -1,290 +0,0 @@
#!/bin/bash
function run_test {
local target=$1
local sketch=$2
local options=$3
local erase_flash=$4
local sketchdir
local sketchname
local result=0
local error=0
local sdkconfig_path
local extra_args
local test_type
sketchdir=$(dirname "$sketch")
sketchname=$(basename "$sketchdir")
test_type=$(basename "$(dirname "$sketchdir")")
if [ "$options" -eq 0 ] && [ -f "$sketchdir"/ci.json ]; then
len=$(jq -r --arg target "$target" '.fqbn[$target] | length' "$sketchdir"/ci.json)
if [ "$len" -eq 0 ]; then
len=1
fi
else
len=1
fi
if [ "$len" -eq 1 ]; then
sdkconfig_path="$HOME/.arduino/tests/$target/$sketchname/build.tmp/sdkconfig"
else
sdkconfig_path="$HOME/.arduino/tests/$target/$sketchname/build0.tmp/sdkconfig"
fi
if [ -f "$sketchdir"/ci.json ]; then
# If the target or platform is listed as false, skip the sketch. Otherwise, include it.
is_target=$(jq -r --arg target "$target" '.targets[$target]' "$sketchdir"/ci.json)
selected_platform=$(jq -r --arg platform "$platform" '.platforms[$platform]' "$sketchdir"/ci.json)
if [[ $is_target == "false" ]] || [[ $selected_platform == "false" ]]; then
printf "\033[93mSkipping %s test for %s, platform: %s\033[0m\n" "$sketchname" "$target" "$platform"
printf "\n\n\n"
return 0
fi
fi
if [ ! -f "$sdkconfig_path" ]; then
printf "\033[93mSketch %s build not found in %s\nMight be due to missing target requirements or build failure\033[0m\n" "$(dirname "$sdkconfig_path")" "$sketchname"
printf "\n\n\n"
return 0
fi
local compiled_target
compiled_target=$(grep -E "CONFIG_IDF_TARGET=" "$sdkconfig_path" | cut -d'"' -f2)
if [ "$compiled_target" != "$target" ]; then
printf "\033[91mError: Sketch %s compiled for %s, expected %s\033[0m\n" "$sketchname" "$compiled_target" "$target"
printf "\n\n\n"
return 1
fi
if [ "$len" -eq 1 ]; then
# build_dir="$sketchdir/build"
build_dir="$HOME/.arduino/tests/$target/$sketchname/build.tmp"
report_file="$sketchdir/$target/$sketchname.xml"
fi
for i in $(seq 0 $((len - 1))); do
fqbn="Default"
if [ "$len" -ne 1 ]; then
fqbn=$(jq -r --arg target "$target" --argjson i "$i" '.fqbn[$target] | sort | .[$i]' "$sketchdir"/ci.json)
elif [ -f "$sketchdir"/ci.json ]; then
has_fqbn=$(jq -r --arg target "$target" '.fqbn[$target]' "$sketchdir"/ci.json)
if [ "$has_fqbn" != "null" ]; then
fqbn=$(jq -r --arg target "$target" '.fqbn[$target] | .[0]' "$sketchdir"/ci.json)
fi
fi
printf "\033[95mRunning test: %s -- Config: %s\033[0m\n" "$sketchname" "$fqbn"
if [ "$erase_flash" -eq 1 ]; then
esptool.py -c "$target" erase_flash
fi
if [ "$len" -ne 1 ]; then
# build_dir="$sketchdir/build$i"
build_dir="$HOME/.arduino/tests/$target/$sketchname/build$i.tmp"
report_file="$sketchdir/$target/$sketchname$i.xml"
fi
if [ $platform == "wokwi" ]; then
extra_args=("--target" "$target" "--embedded-services" "arduino,wokwi" "--wokwi-timeout=$wokwi_timeout")
if [[ -f "$sketchdir/scenario.yaml" ]]; then
extra_args+=("--wokwi-scenario" "$sketchdir/scenario.yaml")
fi
if [[ -f "$sketchdir/diagram.$target.json" ]]; then
extra_args+=("--wokwi-diagram" "$sketchdir/diagram.$target.json")
fi
elif [ $platform == "qemu" ]; then
PATH=$HOME/qemu/bin:$PATH
extra_args=("--embedded-services" "qemu" "--qemu-image-path" "$build_dir/$sketchname.ino.merged.bin")
if [ "$target" == "esp32" ] || [ "$target" == "esp32s3" ]; then
extra_args+=("--qemu-prog-path" "qemu-system-xtensa" "--qemu-cli-args=\"-machine $target -m 4M -nographic\"")
elif [ "$target" == "esp32c3" ]; then
extra_args+=("--qemu-prog-path" "qemu-system-riscv32" "--qemu-cli-args=\"-machine $target -icount 3 -nographic\"")
else
printf "\033[91mUnsupported QEMU target: %s\033[0m\n" "$target"
exit 1
fi
else
extra_args=("--embedded-services" "esp,arduino")
fi
rm "$sketchdir"/diagram.json 2>/dev/null || true
result=0
printf "\033[95mpytest \"%s/test_%s.py\" --build-dir \"%s\" --junit-xml=\"%s\" -o junit_suite_name=%s_%s_%s_%s%s %s\033[0m\n" "$sketchdir" "$sketchname" "$build_dir" "$report_file" "$test_type" "$platform" "$target" "$sketchname" "$i" "${extra_args[*]@Q}"
bash -c "set +e; pytest \"$sketchdir/test_$sketchname.py\" --build-dir \"$build_dir\" --junit-xml=\"$report_file\" -o junit_suite_name=${test_type}_${platform}_${target}_${sketchname}${i} ${extra_args[*]@Q}; exit \$?" || result=$?
printf "\n"
if [ $result -ne 0 ]; then
result=0
printf "\033[95mRetrying test: %s -- Config: %s\033[0m\n" "$sketchname" "$i"
printf "\033[95mpytest \"%s/test_%s.py\" --build-dir \"%s\" --junit-xml=\"%s\" -o junit_suite_name=%s_%s_%s_%s%s %s\033[0m\n" "$sketchdir" "$sketchname" "$build_dir" "$report_file" "$test_type" "$platform" "$target" "$sketchname" "$i" "${extra_args[*]@Q}"
bash -c "set +e; pytest \"$sketchdir/test_$sketchname.py\" --build-dir \"$build_dir\" --junit-xml=\"$report_file\" -o junit_suite_name=${test_type}_${platform}_${target}_${sketchname}${i} ${extra_args[*]@Q}; exit \$?" || result=$?
printf "\n"
if [ $result -ne 0 ]; then
printf "\033[91mFailed test: %s -- Config: %s\033[0m\n\n" "$sketchname" "$i"
error=$result
fi
fi
done
return $error
}
SCRIPTS_DIR="./.github/scripts"
COUNT_SKETCHES="${SCRIPTS_DIR}/sketch_utils.sh count"
platform="hardware"
wokwi_timeout=60000
chunk_run=0
options=0
erase=0
while [ -n "$1" ]; do
case $1 in
-c )
chunk_run=1
;;
-Q )
if [ ! -d "$QEMU_PATH" ]; then
echo "QEMU path $QEMU_PATH does not exist"
exit 1
fi
platform="qemu"
;;
-W )
shift
wokwi_timeout=$1
if [[ -z $WOKWI_CLI_TOKEN ]]; then
echo "Wokwi CLI token is not set"
exit 1
fi
platform="wokwi"
;;
-o )
options=1
;;
-s )
shift
sketch=$1
;;
-t )
shift
target=$1
;;
-i )
shift
chunk_index=$1
;;
-m )
shift
chunk_max=$1
;;
-e )
erase=1
;;
-h )
echo "$USAGE"
exit 0
;;
-type )
shift
test_type=$1
;;
* )
break
;;
esac
shift
done
if [ ! $platform == "qemu" ]; then
source "${SCRIPTS_DIR}/install-arduino-ide.sh"
fi
# If sketch is provided and test type is not, test type is inferred from the sketch path
if [[ $test_type == "all" ]] || [[ -z $test_type ]]; then
if [ -n "$sketch" ]; then
tmp_sketch_path=$(find tests -name "$sketch".ino)
test_type=$(basename "$(dirname "$(dirname "$tmp_sketch_path")")")
echo "Sketch $sketch test type: $test_type"
test_folder="$PWD/tests/$test_type"
else
test_folder="$PWD/tests"
fi
else
test_folder="$PWD/tests/$test_type"
fi
if [ $chunk_run -eq 0 ]; then
if [ -z "$sketch" ]; then
echo "ERROR: Sketch name is required for single test run"
exit 1
fi
run_test "$target" "$test_folder"/"$sketch"/"$sketch".ino $options $erase
exit $?
else
if [ "$chunk_max" -le 0 ]; then
echo "ERROR: Chunks count must be positive number"
exit 1
fi
if [ "$chunk_index" -ge "$chunk_max" ] && [ "$chunk_max" -ge 2 ]; then
echo "ERROR: Chunk index must be less than chunks count"
exit 1
fi
set +e
# Ignore requirements as we don't have the libs. The requirements will be checked in the run_test function
${COUNT_SKETCHES} "$test_folder" "$target" "1"
sketchcount=$?
set -e
sketches=$(cat sketches.txt)
rm -rf sketches.txt
chunk_size=$(( sketchcount / chunk_max ))
all_chunks=$(( chunk_max * chunk_size ))
if [ "$all_chunks" -lt "$sketchcount" ]; then
chunk_size=$(( chunk_size + 1 ))
fi
start_index=0
end_index=0
if [ "$chunk_index" -ge "$chunk_max" ]; then
start_index=$chunk_index
end_index=$sketchcount
else
start_index=$(( chunk_index * chunk_size ))
if [ "$sketchcount" -le "$start_index" ]; then
exit 0
fi
end_index=$(( $(( chunk_index + 1 )) * chunk_size ))
if [ "$end_index" -gt "$sketchcount" ]; then
end_index=$sketchcount
fi
fi
sketchnum=0
error=0
for sketch in $sketches; do
sketchnum=$((sketchnum + 1))
if [ "$sketchnum" -le "$start_index" ] \
|| [ "$sketchnum" -gt "$end_index" ]; then
continue
fi
printf "\033[95mSketch Index %s\033[0m\n" "$((sketchnum - 1))"
exit_code=0
run_test "$target" "$sketch" $options $erase || exit_code=$?
if [ $exit_code -ne 0 ]; then
error=$exit_code
fi
done
exit $error
fi

View file

@ -1,67 +0,0 @@
#!/bin/bash
# Disable shellcheck warning about using 'cat' to read a file.
# shellcheck disable=SC2002
# For reference: add tools for all boards by replacing one line in each board
# "[board].upload.tool=esptool_py" to "[board].upload.tool=esptool_py\n[board].upload.tool.default=esptool_py\n[board].upload.tool.network=esp_ota"
#cat boards.txt | sed "s/\([a-zA-Z0-9_\-]*\)\.upload\.tool\=esptool_py/\1\.upload\.tool\=esptool_py\\n\1\.upload\.tool\.default\=esptool_py\\n\1\.upload\.tool\.network\=esp_ota/"
if [ ! $# -eq 3 ]; then
echo "Bad number of arguments: $#" >&2
echo "usage: $0 <major> <minor> <patch>" >&2
exit 1
fi
re='^[0-9]+$'
if [[ ! $1 =~ $re ]] || [[ ! $2 =~ $re ]] || [[ ! $3 =~ $re ]] ; then
echo "error: Not a valid version: $1.$2.$3" >&2
echo "usage: $0 <major> <minor> <patch>" >&2
exit 1
fi
ESP_ARDUINO_VERSION_MAJOR="$1"
ESP_ARDUINO_VERSION_MINOR="$2"
ESP_ARDUINO_VERSION_PATCH="$3"
ESP_ARDUINO_VERSION="$ESP_ARDUINO_VERSION_MAJOR.$ESP_ARDUINO_VERSION_MINOR.$ESP_ARDUINO_VERSION_PATCH"
# Get ESP-IDF version from push.yml (this way we can ensure that the version is correct even if the local libs are not up to date)
ESP_IDF_VERSION=$(grep "idf_ver:" .github/workflows/push.yml | sed 's/.*release-v\([^"]*\).*/\1/')
if [ -z "$ESP_IDF_VERSION" ]; then
echo "Error: ESP-IDF version not found in push.yml" >&2
exit 1
fi
echo "New Arduino Version: $ESP_ARDUINO_VERSION"
echo "ESP-IDF Version: $ESP_IDF_VERSION"
echo "Updating platform.txt..."
cat platform.txt | sed "s/version=.*/version=$ESP_ARDUINO_VERSION/g" > __platform.txt && mv __platform.txt platform.txt
echo "Updating package.json..."
cat package.json | sed "s/.*\"version\":.*/ \"version\": \"$ESP_ARDUINO_VERSION\",/g" > __package.json && mv __package.json package.json
echo "Updating docs/conf_common.py..."
cat docs/conf_common.py | \
sed "s/.. |version| replace:: .*/.. |version| replace:: $ESP_ARDUINO_VERSION/g" | \
sed "s/.. |idf_version| replace:: .*/.. |idf_version| replace:: $ESP_IDF_VERSION/g" > docs/__conf_common.py && mv docs/__conf_common.py docs/conf_common.py
echo "Updating .gitlab/workflows/common.yml..."
cat .gitlab/workflows/common.yml | \
sed "s/ESP_IDF_VERSION:.*/ESP_IDF_VERSION: \"$ESP_IDF_VERSION\"/g" | \
sed "s/ESP_ARDUINO_VERSION:.*/ESP_ARDUINO_VERSION: \"$ESP_ARDUINO_VERSION\"/g" > .gitlab/workflows/__common.yml && mv .gitlab/workflows/__common.yml .gitlab/workflows/common.yml
echo "Updating cores/esp32/esp_arduino_version.h..."
cat cores/esp32/esp_arduino_version.h | \
sed "s/#define ESP_ARDUINO_VERSION_MAJOR.*/#define ESP_ARDUINO_VERSION_MAJOR $ESP_ARDUINO_VERSION_MAJOR/g" | \
sed "s/#define ESP_ARDUINO_VERSION_MINOR.*/#define ESP_ARDUINO_VERSION_MINOR $ESP_ARDUINO_VERSION_MINOR/g" | \
sed "s/#define ESP_ARDUINO_VERSION_PATCH.*/#define ESP_ARDUINO_VERSION_PATCH $ESP_ARDUINO_VERSION_PATCH/g" > __esp_arduino_version.h && mv __esp_arduino_version.h cores/esp32/esp_arduino_version.h
libraries=$(find libraries -maxdepth 1 -mindepth 1 -type d -exec basename {} \;)
for lib in $libraries; do
if [ -f "libraries/$lib/library.properties" ]; then
echo "Updating Library $lib..."
cat "libraries/$lib/library.properties" | sed "s/version=.*/version=$ESP_ARDUINO_VERSION/g" > "libraries/$lib/__library.properties" && mv "libraries/$lib/__library.properties" "libraries/$lib/library.properties"
fi
done
exit 0

View file

@ -1,236 +0,0 @@
#!/usr/bin/env python3
# This script is used to re-package the esptool if needed and update the JSON file
# for the Arduino ESP32 platform.
#
# The script has only been tested on macOS.
#
# For regular esptool releases, the generated packages already contain the correct permissions,
# extensions and are uploaded to the GitHub release assets. In this case, the script will only
# update the JSON file with the information from the GitHub release.
#
# The script can be used in two modes:
# 1. Local build: The build artifacts must be already downloaded and extracted in the base_folder.
# This is useful for esptool versions that are not yet released and that are grabbed from the
# GitHub build artifacts.
# 2. Release build: The script will get the release information from GitHub and update the JSON file.
# This is useful for esptool versions that are already released and that are uploaded to the
# GitHub release assets.
#
# For local build, the artifacts must be already downloaded and extracted in the base_folder
# set with the -l option.
# For example, a base folder "esptool" should contain the following folders extracted directly
# from the GitHub build artifacts:
# esptool/esptool-linux-aarch64
# esptool/esptool-linux-amd64
# esptool/esptool-linux-armv7
# esptool/esptool-macos-amd64
# esptool/esptool-macos-arm64
# esptool/esptool-windows-amd64
import argparse
import json
import os
import shutil
import stat
import tarfile
import zipfile
import hashlib
import requests
from pathlib import Path
def compute_sha256(filepath):
sha256 = hashlib.sha256()
with open(filepath, "rb") as f:
for block in iter(lambda: f.read(4096), b""):
sha256.update(block)
return f"SHA-256:{sha256.hexdigest()}"
def get_file_size(filepath):
return os.path.getsize(filepath)
def update_json_for_host(tmp_json_path, version, host, url, archiveFileName, checksum, size):
with open(tmp_json_path) as f:
data = json.load(f)
for pkg in data.get("packages", []):
for tool in pkg.get("tools", []):
if tool.get("name") == "esptool_py":
tool["version"] = version
if url is None:
# If the URL is not set, we need to find the old URL and update it
for system in tool.get("systems", []):
if system.get("host") == host:
url = system.get("url").replace(system.get("archiveFileName"), archiveFileName)
break
else:
print(f"No old URL found for host {host}. Using empty URL.")
url = ""
# Preserve existing systems order and update or append the new system
systems = tool.get("systems", [])
system_updated = False
for i, system in enumerate(systems):
if system.get("host") == host:
systems[i] = {
"host": host,
"url": url,
"archiveFileName": archiveFileName,
"checksum": checksum,
"size": str(size),
}
system_updated = True
break
if not system_updated:
systems.append({
"host": host,
"url": url,
"archiveFileName": archiveFileName,
"checksum": checksum,
"size": str(size),
})
tool["systems"] = systems
with open(tmp_json_path, "w") as f:
json.dump(data, f, indent=2, sort_keys=False, ensure_ascii=False)
f.write("\n")
def update_tools_dependencies(tmp_json_path, version):
with open(tmp_json_path) as f:
data = json.load(f)
for pkg in data.get("packages", []):
for platform in pkg.get("platforms", []):
for dep in platform.get("toolsDependencies", []):
if dep.get("name") == "esptool_py":
dep["version"] = version
with open(tmp_json_path, "w") as f:
json.dump(data, f, indent=2, sort_keys=False, ensure_ascii=False)
f.write("\n")
def create_archives(version, base_folder):
archive_files = []
for dirpath in Path(base_folder).glob("esptool-*"):
if not dirpath.is_dir():
continue
base = dirpath.name[len("esptool-"):]
if "windows" in dirpath.name:
zipfile_name = f"esptool-v{version}-{base}.zip"
print(f"Creating {zipfile_name} from {dirpath} ...")
with zipfile.ZipFile(zipfile_name, "w", zipfile.ZIP_DEFLATED) as zipf:
for root, _, files in os.walk(dirpath):
for file in files:
full_path = os.path.join(root, file)
zipf.write(full_path, os.path.relpath(full_path, start=dirpath))
archive_files.append(zipfile_name)
else:
tarfile_name = f"esptool-v{version}-{base}.tar.gz"
print(f"Creating {tarfile_name} from {dirpath} ...")
for root, dirs, files in os.walk(dirpath):
for name in dirs + files:
os.chmod(os.path.join(root, name), stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
stat.S_IRGRP | stat.S_IXGRP |
stat.S_IROTH | stat.S_IXOTH)
with tarfile.open(tarfile_name, "w:gz") as tar:
tar.add(dirpath, arcname=dirpath.name)
archive_files.append(tarfile_name)
return archive_files
def determine_hosts(archive_name):
if "linux-amd64" in archive_name:
return ["x86_64-pc-linux-gnu"]
elif "linux-armv7" in archive_name:
return ["arm-linux-gnueabihf"]
elif "linux-aarch64" in archive_name:
return ["aarch64-linux-gnu"]
elif "macos-amd64" in archive_name:
return ["x86_64-apple-darwin"]
elif "macos-arm64" in archive_name:
return ["arm64-apple-darwin"]
elif "windows-amd64" in archive_name:
return ["x86_64-mingw32", "i686-mingw32"]
else:
return []
def update_json_from_local_build(tmp_json_path, version, base_folder, archive_files):
for archive in archive_files:
print(f"Processing archive: {archive}")
hosts = determine_hosts(archive)
if not hosts:
print(f"Skipping unknown archive type: {archive}")
continue
archive_path = Path(archive)
checksum = compute_sha256(archive_path)
size = get_file_size(archive_path)
for host in hosts:
update_json_for_host(tmp_json_path, version, host, None, archive_path.name, checksum, size)
def update_json_from_release(tmp_json_path, version, release_info):
assets = release_info.get("assets", [])
for asset in assets:
if (asset.get("name").endswith(".tar.gz") or asset.get("name").endswith(".zip")) and "esptool" in asset.get("name"):
asset_fname = asset.get("name")
print(f"Processing asset: {asset_fname}")
hosts = determine_hosts(asset_fname)
if not hosts:
print(f"Skipping unknown archive type: {asset_fname}")
continue
asset_url = asset.get("browser_download_url")
asset_checksum = asset.get("digest").replace("sha256:", "SHA-256:")
asset_size = asset.get("size")
if asset_checksum is None:
asset_checksum = ""
print(f"Asset {asset_fname} has no checksum. Please set the checksum in the JSON file.")
for host in hosts:
update_json_for_host(tmp_json_path, version, host, asset_url, asset_fname, asset_checksum, asset_size)
def get_release_info(version):
url = f"https://api.github.com/repos/espressif/esptool/releases/tags/v{version}"
response = requests.get(url)
response.raise_for_status()
return response.json()
def main():
parser = argparse.ArgumentParser(description="Repack esptool and update JSON metadata.")
parser.add_argument("version", help="Version of the esptool (e.g. 5.0.dev1)")
parser.add_argument("-l", "--local", dest="base_folder", help="Enable local build mode and set the base folder with unpacked artifacts")
args = parser.parse_args()
script_dir = Path(__file__).resolve().parent
json_path = (script_dir / "../../package/package_esp32_index.template.json").resolve()
tmp_json_path = Path(str(json_path) + ".tmp")
shutil.copy(json_path, tmp_json_path)
local_build = args.base_folder is not None
if local_build:
os.chdir(args.base_folder)
os.environ['COPYFILE_DISABLE'] = 'true' # this disables including resource forks in tar files on macOS
# Clear any existing archive files
for file in Path(args.base_folder).glob("esptool-*.*"):
file.unlink()
archive_files = create_archives(args.version, args.base_folder)
update_json_from_local_build(tmp_json_path, args.version, args.base_folder, archive_files)
else:
release_info = get_release_info(args.version)
update_json_from_release(tmp_json_path, args.version, release_info)
print(f"Updating esptool version fields to {args.version}")
update_tools_dependencies(tmp_json_path, args.version)
shutil.move(tmp_json_path, json_path)
print(f"Done. JSON updated at {json_path}")
if __name__ == "__main__":
main()

View file

@ -1,12 +0,0 @@
#!/bin/bash
CHANGED_FILES=$1
echo "Pushing '$CHANGED_FILES' as github-actions[bot]"
git config --global github.user "github-actions[bot]"
git config --global user.name "github-actions[bot]"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
for tool in $CHANGED_FILES; do
git add tools/"$tool".exe
done
git commit -m "change(tools): Push generated binaries to PR"
git push

View file

@ -1,88 +0,0 @@
name: Boards Test - Remote trigger
# The workflow will run on remote dispatch with event-type set to "test-boards"
on:
repository_dispatch:
types: [test-boards]
jobs:
find-boards:
runs-on: ubuntu-latest
outputs:
fqbns: ${{ env.FQBNS }}
board-count: ${{ env.BOARD-COUNT }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.client_payload.branch }}
- name: Get boards fqbns
run: bash .github/scripts/find_all_boards.sh
setup-chunks:
needs: find-boards
runs-on: ubuntu-latest
if: needs.find-boards.outputs.fqbns != ''
outputs:
test-chunks: ${{ steps['set-test-chunks'].outputs['test-chunks'] }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.client_payload.branch }}
- run: npm install
- name: Setup jq
uses: dcarbone/install-jq-action@e397bd87438d72198f81efd21f876461183d383a # v3.0.1
- id: set-test-chunks
name: Set Chunks
run: echo "test-chunks<<EOF" >> $GITHUB_OUTPUT
echo "$( jq -nc '${{ needs.find-boards.outputs.fqbns }} | [_nwise( ${{ needs.find-boards.outputs.board-count }}/15 | ceil)]')" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
test-boards:
needs: setup-chunks
runs-on: ubuntu-latest
env:
REPOSITORY: |
- source-path: '.'
name: "espressif:esp32"
strategy:
fail-fast: false
matrix:
chunk: ${{ fromJSON(needs.setup-chunks.outputs['test-chunks']) }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.client_payload.branch }}
- name: Echo FQBNS to file
run: echo "$FQBN" > fqbns.json
env:
FQBN: ${{ toJSON(matrix.chunk) }}
- name: Compile sketch
uses: P-R-O-C-H-Y/compile-sketches@a62f069b92dc8f5053da4ac439ea6d1950cf6379 # main
with:
platforms: |
${{ env.REPOSITORY }}
multiple-fqbn: true
multiple-fqbn-path: "fqbns.json"
use-json-file: false
enable-deltas-report: false
enable-warnings-report: false
cli-compile-flags: |
- --warnings="all"
sketch-paths: "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"

View file

@ -1,88 +0,0 @@
name: Boards Test
# The workflow will run on schedule and labeled pull requests
on:
pull_request:
paths:
- "boards.txt"
- "libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"
- ".github/workflows/boards.yml"
env:
# It's convenient to set variables for values used multiple times in the workflow
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
jobs:
find-boards:
runs-on: ubuntu-latest
outputs:
fqbns: ${{ env.FQBNS }}
steps:
# This step makes the contents of the repository available to the workflow
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup jq
uses: dcarbone/install-jq-action@e397bd87438d72198f81efd21f876461183d383a # v3.0.1
- name: Get board name
run: bash .github/scripts/find_new_boards.sh ${{ github.repository }} ${{github.base_ref}}
test-boards:
needs: find-boards
runs-on: ubuntu-latest
if: needs.find-boards.outputs.fqbns != ''
env:
REPOSITORY: |
- source-path: '.'
name: "espressif:esp32"
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.find-boards.outputs.fqbns) }}
steps:
# This step makes the contents of the repository available to the workflow
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Check if build.board is uppercase
run: |
board_name=$(echo ${{ matrix.fqbn }} | awk -F':' '{print $NF}')
if grep -q "^$board_name.build.board=[A-Z0-9_]*$" boards.txt; then
echo "$board_name.build.board is valid.";
else
echo "Error: $board_name.build.board is not uppercase!";
exit 1;
fi
- name: Get libs cache
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }}
path: |
./tools/dist
./tools/esp32-arduino-libs
./tools/esptool
./tools/mk*
./tools/openocd-esp32
./tools/riscv32-*
./tools/xtensa-*
- name: Compile sketch
uses: P-R-O-C-H-Y/compile-sketches@a62f069b92dc8f5053da4ac439ea6d1950cf6379 # main
with:
platforms: |
${{ env.REPOSITORY }}
fqbn: ${{ matrix.fqbn }}
use-json-file: false
enable-deltas-report: false
enable-warnings-report: false
cli-compile-flags: |
- --warnings="all"
exit-on-fail: true
sketch-paths: "- ./libraries/ESP32/examples/CI/CIBoardsTest/CIBoardsTest.ino"
verbose: true

View file

@ -1,151 +0,0 @@
name: Build Python Tools
on:
pull_request:
paths:
- ".github/workflows/build_py_tools.yml"
- "tools/get.py"
- "tools/espota.py"
- "tools/gen_esp32part.py"
- "tools/gen_insights_package.py"
jobs:
find-changed-tools:
name: Check if tools have been changed
runs-on: ubuntu-latest
outputs:
any_changed: ${{ steps.verify-changed-files.outputs.any_changed }}
all_changed_files: ${{ steps.verify-changed-files.outputs.all_changed_files }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 2
ref: ${{ github.event.pull_request.head.ref }}
- name: Check if checkout failed
if: failure()
run: |
echo "Checkout failed."
echo "Make sure you are using a branch inside the repository and not a fork."
- name: Verify Python Tools Changed
uses: tj-actions/changed-files@2f7c5bfce28377bc069a65ba478de0a74aa0ca32 # v46.0.1
id: verify-changed-files
with:
fetch_depth: "2"
since_last_remote_commit: "true"
files: |
tools/get.py
tools/espota.py
tools/gen_esp32part.py
tools/gen_insights_package.py
- name: List all changed files
shell: bash
run: |
for file in ${{ steps.verify-changed-files.outputs.all_changed_files }}; do
echo "$file was changed"
done
build-pytools-binaries:
name: Build python tools binaries for ${{ matrix.os }}
runs-on: ${{ matrix.os }}
needs: find-changed-tools
if: needs.find-changed-tools.outputs.any_changed == 'true'
strategy:
fail-fast: false
matrix:
os: [windows-latest, macos-latest, ubuntu-latest, ubuntu-24.04-arm]
include:
- os: windows-latest
TARGET: win64
EXTEN: .exe
SEPARATOR: ";"
- os: macos-latest
TARGET: macos
SEPARATOR: ":"
- os: ubuntu-latest
TARGET: linux-amd64
SEPARATOR: ":"
- os: ubuntu-24.04-arm
TARGET: arm
SEPARATOR: ":"
env:
DISTPATH: pytools-${{ matrix.TARGET }}
PIP_EXTRA_INDEX_URL: "https://dl.espressif.com/pypi"
steps:
- name: List changed tools
shell: bash
run: |
CHANGED_FILES=()
for file in ${{ needs.find-changed-tools.outputs.all_changed_files }}; do
file="${file#*\/}"
file="${file%\.*}"
CHANGED_FILES+=("$file")
done
CHANGED_FILES="${CHANGED_FILES[@]}"
echo "CHANGED_TOOLS=$CHANGED_FILES" >> "$GITHUB_ENV"
for tool in ${{ env.CHANGED_TOOLS }}; do
echo "tool $tool was changed"
done
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
token: ${{ secrets.TOOLS_UPLOAD_PAT }}
ref: ${{ github.event.pull_request.head.ref }}
- name: Set up Python 3.8
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pyinstaller requests
- name: Build with PyInstaller
shell: bash
run: |
for tool in ${{ env.CHANGED_TOOLS }}; do
pyinstaller --distpath ./${{ env.DISTPATH }} -F --icon=.github/pytools/espressif.ico tools/$tool.py
done
- name: Sign binaries
if: matrix.os == 'windows-latest'
env:
CERTIFICATE: ${{ secrets.CERTIFICATE }}
CERTIFICATE_PASSWORD: ${{ secrets.CERTIFICATE_PASSWORD }}
shell: pwsh
run: |
$data = Write-Output ${{ env.CHANGED_TOOLS }}
foreach ( $node in $data )
{
./.github/pytools/Sign-File.ps1 -Path ./${{ env.DISTPATH }}/$node.exe
}
- name: Test binaries
shell: bash
run: |
for tool in ${{ env.CHANGED_TOOLS }}; do
./${{ env.DISTPATH }}/$tool${{ matrix.EXTEN }} -h
done
- name: Push binary to tools
if: matrix.os == 'windows-latest'
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
shell: bash
run: |
for tool in ${{ env.CHANGED_TOOLS }}; do
cp -f ./${{ env.DISTPATH }}/$tool.exe tools/$tool.exe
done
bash .github/scripts/upload_py_tools.sh "${{ env.CHANGED_TOOLS }}"
- name: Archive artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: ${{ env.DISTPATH }}
path: ${{ env.DISTPATH }}

View file

@ -1,31 +0,0 @@
name: CodeQL Actions Analysis
on:
workflow_dispatch:
push:
branches:
- master
pull_request:
paths:
- ".github/workflows/*.yml"
- ".github/workflows/*.yaml"
jobs:
codeql-analysis:
name: CodeQL Actions Analysis
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
languages: actions
config-file: ./.github/codeql/codeql-config.yml
- name: Run CodeQL Analysis
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
category: "Analysis: Actions"

View file

@ -1,30 +0,0 @@
name: CodeQL Python Analysis
on:
workflow_dispatch:
push:
branches:
- master
pull_request:
paths:
- "**/*.py"
jobs:
codeql-analysis:
name: CodeQL Python Analysis
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
languages: python
config-file: ./.github/codeql/codeql-config.yml
- name: Run CodeQL Analysis
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
with:
category: "Analysis: Python"

View file

@ -1,28 +0,0 @@
name: DangerJS Pull Request linter
on:
pull_request_target:
types: [opened, edited, reopened, synchronize]
permissions:
pull-requests: write
contents: write
jobs:
pull-request-style-linter:
runs-on: ubuntu-latest
steps:
- name: Check out PR head
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: DangerJS pull request linter
uses: espressif/shared-github-dangerjs@fb17367fd3e8ff7412603b8e946d9b19ffdb2d7f # v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
instructions-cla-link: "https://cla-assistant.io/espressif/arduino-esp32"
instructions-contributions-file: "docs/en/contributing.rst"
rule-max-commits: "false"
rule-target-branch: "false"
commit-messages-min-summary-length: "10"

View file

@ -1,48 +0,0 @@
name: Documentation Build and Deploy CI
on:
push:
branches:
- master
- release/v2.x
paths:
- "docs/**"
- ".github/workflows/docs_build.yml"
pull_request:
paths:
- "docs/**"
- ".github/workflows/docs_build.yml"
jobs:
build-docs:
name: Build ESP-Docs
runs-on: ubuntu-22.04
defaults:
run:
shell: bash
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
submodules: true
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with:
cache-dependency-path: docs/requirements.txt
cache: "pip"
python-version: "3.10"
- name: Build
run: |
sudo apt update
sudo apt install python3-pip python3-setuptools
# GitHub CI installs pip3 and setuptools outside the path.
# Update the path to include them and run.
cd ./docs
PATH=/home/runner/.local/bin:$PATH pip3 install -r requirements.txt --prefer-binary
PATH=/home/runner/.local/bin:$PATH SPHINXOPTS="-W" build-docs -l en
- name: Archive Docs
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: docs
path: docs

View file

@ -1,62 +0,0 @@
name: Documentation Build and Production Deploy CI
on:
workflow_run:
workflows: ["ESP32 Arduino Release"]
types:
- completed
push:
branches:
- release/v2.x
- master
paths:
- "docs/**"
- ".github/workflows/docs_deploy.yml"
jobs:
deploy-prod-docs:
name: Deploy Documentation on Production
runs-on: ubuntu-22.04
defaults:
run:
shell: bash
steps:
- name: Check if release workflow is successful
if: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.conclusion != 'success' }}
run: |
echo "Release workflow failed. Exiting..."
exit 1
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
submodules: true
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with:
cache-dependency-path: docs/requirements.txt
cache: "pip"
python-version: "3.10"
- name: Deploy Documentation
env:
# Deploy to production server
# DOCS_BUILD_DIR: "./docs/_build/"
DOCS_DEPLOY_PRIVATEKEY: ${{ secrets.DOCS_KEY }}
DOCS_DEPLOY_PATH: ${{ secrets.DOCS_PATH }}
DOCS_DEPLOY_SERVER: ${{ secrets.DOCS_SERVER }}
DOCS_DEPLOY_SERVER_USER: ${{ secrets.DOCS_USER }}
DOCS_DEPLOY_URL_BASE: ${{ secrets.DOCS_URL }}
run: |
sudo apt update
sudo apt install python3-pip python3-setuptools
source ./docs/utils.sh
add_doc_server_ssh_keys $DOCS_DEPLOY_PRIVATEKEY $DOCS_DEPLOY_SERVER $DOCS_DEPLOY_SERVER_USER
export GIT_VER=$(git describe --always)
echo "PIP install requirements..."
pip3 install --user -r ./docs/requirements.txt
echo "Building the Docs..."
cd ./docs && build-docs -l en
echo "Deploy the Docs..."
export DOCS_BUILD_DIR=$GITHUB_WORKSPACE/docs/
cd $GITHUB_WORKSPACE/docs
deploy-docs

View file

@ -1,24 +0,0 @@
name: GitHub Pages CI
on:
push:
branches:
- master
- pages
paths:
- "README.md"
- ".github/scripts/on-pages.sh"
- ".github/workflows/gh-pages.yml"
jobs:
build-pages:
name: Build GitHub Pages
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Copy Files
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: bash ./.github/scripts/on-pages.sh

19
.github/workflows/githubci.yml vendored Normal file
View file

@ -0,0 +1,19 @@
name: Github Arduino Library CI
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: pre-install
run: bash ./actions_install.sh
- name: test platforms
run: |
python3 build_platform.py uno leonardo mega2560 zero esp8266 esp32 pico_rp2040 feather_m4_express

View file

@ -1,111 +0,0 @@
[
{
"name": "Adafruit NeoPixel",
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/Adafruit_NeoPixel/examples/strandtest/strandtest.ino"
]
},
{
"name": "ArduinoBLE",
"exclude_targets": [
"esp32s2",
"esp32p4"
],
"sketch_path": [
"~/Arduino/libraries/ArduinoBLE/examples/Central/Scan/Scan.ino"
]
},
{
"name": "ESP32Servo",
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/ESP32Servo/examples/Multiple-Servo-Example-Arduino/Multiple-Servo-Example-Arduino.ino"
]
},
{
"source-url": "https://github.com/ESP32Async/ESPAsyncWebServer.git",
"required-libs": [
{"source-url": "https://github.com/ESP32Async/AsyncTCP.git"}
],
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/ESPAsyncWebServer/examples/Auth/Auth.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/CORS/CORS.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/CaptivePortal/CaptivePortal.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/CatchAllHandler/CatchAllHandler.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ChunkResponse/ChunkResponse.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ChunkRetryResponse/ChunkRetryResponse.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/EndBegin/EndBegin.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Filters/Filters.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/FlashResponse/FlashResponse.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/HeaderManipulation/HeaderManipulation.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Headers/Headers.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Json/Json.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Logging/Logging.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/MessagePack/MessagePack.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Middleware/Middleware.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Params/Params.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/PartitionDownloader/PartitionDownloader.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/PerfTests/PerfTests.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/RateLimit/RateLimit.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Redirect/Redirect.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/RequestContinuation/RequestContinuation.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/RequestContinuationComplete/RequestContinuationComplete.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ResumableDownload/ResumableDownload.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Rewrite/Rewrite.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ServerSentEvents/ServerSentEvents.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/ServerState/ServerState.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/SkipServerMiddleware/SkipServerMiddleware.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/SlowChunkResponse/SlowChunkResponse.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/StaticFile/StaticFile.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Templates/Templates.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/Upload/Upload.ino",
"~/Arduino/libraries/ESPAsyncWebServer/examples/WebSocket/WebSocket.ino"
]
},
{
"name": "EthernetESP32",
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/EthernetESP32/examples/LegacyEthernetTest/LegacyEthernetTest.ino",
"~/Arduino/libraries/EthernetESP32/examples/TwoEthernets/TwoEthernets.ino"
]
},
{
"name": "FastLED",
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/FastLED/examples/Blink/Blink.ino"
]
},
{
"name": "IRremote",
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/IRremote/examples/SendDemo/SendDemo.ino"
]
},
{
"name": "MFRC522",
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/MFRC522/examples/ReadUidMultiReader/ReadUidMultiReader.ino"
]
},
{
"name": "WS2812FX",
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/WS2812FX/examples/ws2812fx_spi/ws2812fx_spi.ino"
]
},
{
"name": "ZACwire for TSic",
"exclude_targets": [],
"sketch_path": [
"~/Arduino/libraries/ZACwire_for_TSic/examples/ReadingTwoTSICs/ReadingTwoTSICs.ino",
"~/Arduino/libraries/ZACwire_for_TSic/examples/ReadSingleTSIC206/ReadSingleTSIC206.ino"
]
}
]

View file

@ -1,145 +0,0 @@
name: External Libraries Test
# The workflow will run on schedule and labeled pull requests
on:
pull_request:
types: [opened, reopened, synchronize, labeled]
# Schedule weekly builds on every Sunday at 4 am
schedule:
- cron: "0 4 * * SUN"
concurrency:
group: libs-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
# It's convenient to set variables for values used multiple times in the workflow
SKETCHES_REPORTS_PATH: libraries-report
SKETCHES_REPORTS_ARTIFACT_NAME: libraries-report
RESULT_LIBRARY_TEST_FILE: LIBRARIES_TEST.md
JSON_LIBRARY_LIST_FILE: .github/workflows/lib.json
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
jobs:
compile-sketch:
if: |
contains(github.event.pull_request.labels.*.name, 'lib_test') ||
(github.event_name == 'schedule' && github.repository == 'espressif/arduino-esp32')
runs-on: ubuntu-latest
env:
REPOSITORY: |
- source-path: '.'
name: "espressif:esp32"
strategy:
matrix:
target:
- esp32
- esp32s2
- esp32c3
- esp32s3
- esp32c6
- esp32h2
- esp32p4
include:
- target: esp32
fqbn: espressif:esp32:esp32
- target: esp32s2
fqbn: espressif:esp32:esp32s2
- target: esp32c3
fqbn: espressif:esp32:esp32c3
- target: esp32s3
fqbn: espressif:esp32:esp32s3
- target: esp32c6
fqbn: espressif:esp32:esp32c6
- target: esp32h2
fqbn: espressif:esp32:esp32h2
- target: esp32p4
fqbn: espressif:esp32:esp32p4
steps:
# This step makes the contents of the repository available to the workflow
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Compile sketch
uses: P-R-O-C-H-Y/compile-sketches@a62f069b92dc8f5053da4ac439ea6d1950cf6379 # main
with:
platforms: |
${{ env.REPOSITORY }}
target: ${{ matrix.target }}
fqbn: ${{ matrix.fqbn }}
use-json-file: true
json-path: ${{ env.JSON_LIBRARY_LIST_FILE }}
enable-deltas-report: true
sketches-report-path: ${{ env.SKETCHES_REPORTS_PATH }}
enable-warnings-report: true
cli-compile-flags: |
- --warnings="all"
- name: Upload artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: ${{ env.SKETCHES_REPORTS_ARTIFACT_NAME }}-${{ matrix.target }}
path: ${{ env.SKETCHES_REPORTS_PATH }}
report-to-file:
needs: compile-sketch # Wait for the compile job to finish to get the data for the report
if: github.event_name == 'schedule' # Only run the job when the workflow is triggered by a schedule
runs-on: ubuntu-latest
steps:
# Check out repository
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
token: ${{ env.GITHUB_TOKEN }}
fetch-depth: "0"
- name: Switch branch
run: git checkout remotes/origin/gh-pages
# This step is needed to get the size data produced by the compile jobs
- name: Download sketches reports artifact
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
pattern: ${{ env.SKETCHES_REPORTS_ARTIFACT_NAME }}-*
merge-multiple: true
path: ${{ env.SKETCHES_REPORTS_PATH }}
- name: Report results
uses: P-R-O-C-H-Y/report-size-deltas@4a79caa6dcc3579024293638b97156106edc588e # main
with:
sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }}
destination-file: ${{ env.RESULT_LIBRARY_TEST_FILE }}
- name: Append file with action URL
run: echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_LIBRARY_TEST_FILE }}
- name: Push to github repo
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add ${{ env.RESULT_LIBRARY_TEST_FILE }}
git commit -m "Generated External Libraries Test Results"
git push origin HEAD:gh-pages
#Upload PR number as artifact
upload-pr-number:
name: Upload PR number
if: (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'lib_test'))
runs-on: ubuntu-latest
steps:
- name: Save the PR number in an artifact
shell: bash
env:
PR_NUM: ${{ github.event.number }}
run: echo $PR_NUM > pr_num.txt
- name: Upload PR number
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: pr_number
path: ./pr_num.txt
overwrite: true

View file

@ -1,64 +0,0 @@
# This needs to be in a separate workflow because it requires higher permissions than the calling workflow
name: Report Pre-commit Check Status
on:
workflow_run:
workflows: [Pre-commit hooks]
types:
- completed
permissions:
statuses: write
jobs:
report-success:
name: Report pre-commit success
if: github.event.workflow_run.conclusion == 'success'
runs-on: ubuntu-latest
steps:
- name: Report success
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Pre-commit checks',
description: 'Pre-commit checks successful',
owner: owner,
repo: repo,
sha: sha,
state: 'success',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}'
})).data;
core.info(`${name} is ${state}`);
report-pending:
name: Report pre-commit pending
if: github.event.workflow_run.conclusion != 'success'
runs-on: ubuntu-latest
steps:
- name: Report pending
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Pre-commit checks',
description: 'The pre-commit checks need to be successful before merging',
owner: owner,
repo: repo,
sha: sha,
state: 'pending',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}'
})).data;
core.info(`${name} is ${state}`);

View file

@ -1,80 +0,0 @@
name: Pre-commit hooks
on:
workflow_dispatch:
push:
branches:
- master
pull_request:
types: [opened, reopened, synchronize, labeled]
concurrency:
group: pre-commit-${{github.event.pull_request.number || github.ref}}
cancel-in-progress: true
jobs:
lint:
if: |
github.event_name != 'pull_request' ||
contains(github.event.pull_request.labels.*.name, 'Status: Pending Merge') ||
contains(github.event.pull_request.labels.*.name, 'Re-trigger Pre-commit Hooks')
name: Check if fixes are needed
runs-on: ubuntu-latest
steps:
- name: Checkout latest commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 2
- name: Remove Label
if: contains(github.event.pull_request.labels.*.name, 'Re-trigger Pre-commit Hooks')
run: gh pr edit ${{ github.event.number }} --remove-label 'Re-trigger Pre-commit Hooks'
env:
GH_TOKEN: ${{ github.token }}
- name: Set up Python 3
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with:
cache-dependency-path: tools/pre-commit/requirements.txt
cache: "pip"
python-version: "3.x"
- name: Get Python version hash
run: |
echo "Using $(python -VV)"
echo "PY_HASH=$(python -VV | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV
- name: Restore pre-commit cache
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
id: restore-cache
with:
path: |
~/.cache/pre-commit
key: pre-commit-${{ env.PY_HASH }}-${{ hashFiles('.pre-commit-config.yaml', '.github/workflows/pre-commit.yml', 'tools/pre-commit/requirements.txt') }}
- name: Install python dependencies
run: python -m pip install -r tools/pre-commit/requirements.txt
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@2f7c5bfce28377bc069a65ba478de0a74aa0ca32 # v46.0.1
- name: Run pre-commit hooks in changed files
run: pre-commit run --color=always --show-diff-on-failure --files ${{ steps.changed-files.outputs.all_changed_files }}
- name: Save pre-commit cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: ${{ always() && steps.restore-cache.outputs.cache-hit != 'true' }}
continue-on-error: true
with:
path: |
~/.cache/pre-commit
key: ${{ steps.restore-cache.outputs.cache-primary-key }}
- name: Push changes using pre-commit-ci-lite
uses: pre-commit-ci/lite-action@5d6cc0eb514c891a40562a58a8e71576c5c7fb43 # v1.1.0
# Only push changes in PRs
if: ${{ always() && github.event_name == 'pull_request' }}
with:
msg: "ci(pre-commit): Apply automatic fixes"

View file

@ -1,56 +0,0 @@
name: External Libraries Results
on:
workflow_run:
workflows: [External Libraries Test]
types:
- completed
workflow_dispatch:
env:
# It's convenient to set variables for values used multiple times in the workflow
SKETCHES_REPORTS_PATH: artifacts/libraries-report
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
jobs:
lib-test-results:
name: External Libraries Test Results
runs-on: ubuntu-latest
if: |
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
steps:
- name: Download and Extract Artifacts
run: |
mkdir -p artifacts && cd artifacts
mkdir -p libraries-report
mkdir -p workflows
artifacts_url=${{ github.event.workflow_run.artifacts_url }}
gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
do
IFS=$'\t' read name url <<< "$artifact"
gh api $url > "$name.zip"
unzip -j "$name.zip" -d "temp_$name"
if [[ "$name" == "pr_number" ]]; then
mv "temp_$name"/* workflows
else
mv "temp_$name"/* libraries-report
fi
rm -r "temp_$name"
done
echo "Contents of parent directory:"
ls -R ..
- name: Read the pr_num file
id: pr_num_reader
uses: juliangruber/read-file-action@b549046febe0fe86f8cb4f93c24e284433f9ab58 # v1.1.7
with:
path: ./artifacts/workflows/pr_num.txt
- name: Report results
uses: P-R-O-C-H-Y/report-size-deltas@256d1f13e4195cd7fd436d2f959e6dc4d5e4b406 # libs
with:
sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }}
github-token: ${{ env.GITHUB_TOKEN }}
pr-number: "${{ steps.pr_num_reader.outputs.content }}"

View file

@ -1,52 +0,0 @@
name: Sizes Results (master-v2.x)
on:
workflow_dispatch:
env:
# It's convenient to set variables for values used multiple times in the workflow
SKETCHES_REPORTS_PATH: artifacts/sizes-report
RESULT_SIZES_TEST_FILE: SIZES_TEST.md
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
jobs:
sizes-test-results:
name: Sizes Comparison Results
runs-on: ubuntu-latest
steps:
- name: Checkout gh-pages branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: gh-pages
- name: Create folder structure
run: |
mkdir -p artifacts && cd artifacts
mkdir -p sizes-report
mkdir -p sizes-report/master
mkdir -p sizes-report/pr
# master folder is a base for comparison
# pr folder is for comparison with master
- name: Download JSON file
run: |
mv master_cli_compile/*.json artifacts/sizes-report/pr/
mv v2.x_cli_compile/*.json artifacts/sizes-report/master/
- name: Report results
uses: P-R-O-C-H-Y/report-size-deltas@2043188c68f483a7b50527c4eacf609d05bb67a5 # sizes_v2
with:
sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }}
github-token: ${{ env.GITHUB_TOKEN }}
destination-file: ${{ env.RESULT_SIZES_TEST_FILE }}
- name: Append file with action URL
run: echo "/ [GitHub Action Link](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}})" >> ${{ env.RESULT_SIZES_TEST_FILE }}
- name: Push to github repo
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add ${{ env.RESULT_SIZES_TEST_FILE }}
git commit -m "Generated Sizes Results (master-v2.x)"
git push origin HEAD:gh-pages

View file

@ -1,73 +0,0 @@
name: Sizes Results
on:
workflow_run:
workflows: [Compilation Tests]
types:
- completed
workflow_dispatch:
env:
# It's convenient to set variables for values used multiple times in the workflow
SKETCHES_REPORTS_PATH: artifacts/sizes-report
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
jobs:
sizes-test-results:
name: Sizes Comparison Results
runs-on: ubuntu-latest
if: |
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
steps:
- name: Checkout gh-pages branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: gh-pages
- name: Create folder structure
run: |
mkdir -p artifacts && cd artifacts
mkdir -p sizes-report
mkdir -p sizes-report/master
mkdir -p sizes-report/pr
- name: Download JSON file
run: |
mv master_cli_compile/*.json artifacts/sizes-report/master/
- name: Download and Extract Artifacts
run: |
cd artifacts
artifacts_url=${{ github.event.workflow_run.artifacts_url }}
gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
do
IFS=$'\t' read name url <<< "$artifact"
# Only process pr_number and pr_cli_compile artifacts
if [[ "$name" == "pr_number" || "$name" =~ ^pr_cli_compile_[0-9]+$ ]]; then
gh api $url > "$name.zip"
unzip -o -j "$name.zip" -d "temp_$name"
if [[ "$name" == "pr_number" ]]; then
mv "temp_$name"/* sizes-report
elif [[ "$name" =~ ^pr_cli_compile_[0-9]+$ ]]; then
mv "temp_$name"/* sizes-report/pr
fi
rm -r "temp_$name"
fi
done
echo "Contents of parent directory:"
ls -R ..
- name: Read the pr_num file
id: pr_num_reader
uses: juliangruber/read-file-action@b549046febe0fe86f8cb4f93c24e284433f9ab58 # v1.1.7
with:
path: ./artifacts/sizes-report/pr_num.txt
- name: Report results
uses: P-R-O-C-H-Y/report-size-deltas@bea91d2c99ca80c88a883b39b1c4012f00ec3d09 # sizes_v2
with:
sketches-reports-source: ${{ env.SKETCHES_REPORTS_PATH }}
github-token: ${{ env.GITHUB_TOKEN }}
pr-number: "${{ steps.pr_num_reader.outputs.content }}"

View file

@ -1,343 +0,0 @@
name: Compilation Tests
on:
workflow_dispatch:
inputs:
log_level:
description: "Log level"
default: "none"
type: "choice"
required: true
options:
- "none"
- "error"
- "warn"
- "info"
- "debug"
- "verbose"
schedule:
# Every Sunday at 2:00 UTC run a build with verbose log level
- cron: "0 2 * * SUN"
push:
branches:
- master
- release/*
pull_request:
paths:
- "cores/**"
- "libraries/**"
- "!libraries/**.md"
- "!libraries/**.txt"
- "!libraries/**.properties"
- "!libraries/**.py"
- "package/**"
- "idf_component_examples/**"
- "tools/**.py"
- "platform.txt"
- "programmers.txt"
- "idf_component.yml"
- "Kconfig.projbuild"
- "package.json"
- "CMakeLists.txt"
- ".github/workflows/push.yml"
- ".github/scripts/**"
- "!.github/scripts/find_*"
- "!.github/scripts/on-release.sh"
- "!.github/scripts/tests_*"
- "!.github/scripts/upload_*"
- "variants/esp32/**/*"
- "variants/esp32c3/**/*"
- "variants/esp32c5/**/*"
- "variants/esp32c6/**/*"
- "variants/esp32h2/**/*"
- "variants/esp32p4/**/*"
- "variants/esp32s2/**/*"
- "variants/esp32s3/**/*"
concurrency:
group: build-${{github.event.pull_request.number || github.ref}}
cancel-in-progress: true
env:
MAX_CHUNKS: 15
jobs:
cmake-check:
name: Check cmake file
runs-on: ubuntu-latest
if: ${{ !(github.event_name == 'pull_request' && startsWith(github.head_ref, 'release/')) }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- run: bash ./.github/scripts/check-cmakelists.sh
gen-chunks:
name: Generate chunks
runs-on: ubuntu-latest
if: ${{ !(github.event_name == 'pull_request' && startsWith(github.head_ref, 'release/')) }}
outputs:
build_all: ${{ steps.set-chunks.outputs.build_all }}
build_libraries: ${{ steps.set-chunks.outputs.build_libraries }}
build_static_sketches: ${{ steps.set-chunks.outputs.build_static_sketches }}
build_idf: ${{ steps.set-chunks.outputs.build_idf }}
chunk_count: ${{ steps.set-chunks.outputs.chunk_count }}
chunks: ${{ steps.set-chunks.outputs.chunks }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 2
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@2f7c5bfce28377bc069a65ba478de0a74aa0ca32 # v46.0.1
with:
files_yaml: |
core:
- '.github/**'
- 'cores/**'
- 'package/**'
- 'tools/**'
- 'platform.txt'
- 'programmers.txt'
- "variants/esp32/**/*"
- "variants/esp32c3/**/*"
- "variants/esp32c6/**/*"
- "variants/esp32h2/**/*"
- "variants/esp32p4/**/*"
- "variants/esp32s2/**/*"
- "variants/esp32s3/**/*"
libraries:
- 'libraries/**/examples/**'
- 'libraries/**/src/**'
networking:
- 'libraries/Network/src/**'
fs:
- 'libraries/FS/src/**'
static_sketeches:
- 'libraries/NetworkClientSecure/examples/WiFiClientSecure/WiFiClientSecure.ino'
- 'libraries/BLE/examples/Server/Server.ino'
- 'libraries/ESP32/examples/Camera/CameraWebServer/CameraWebServer.ino'
- 'libraries/Insights/examples/MinimalDiagnostics/MinimalDiagnostics.ino'
- 'libraries/NetworkClientSecure/src/**'
- 'libraries/BLE/src/**'
- 'libraries/Insights/src/**'
idf:
- 'idf_component.yml'
- 'Kconfig.projbuild'
- 'CMakeLists.txt'
- "idf_component_examples/**"
- name: Set chunks
id: set-chunks
env:
LIB_FILES: ${{ steps.changed-files.outputs.libraries_all_changed_files }}
IS_PR: ${{ github.event_name == 'pull_request' }}
MAX_CHUNKS: ${{ env.MAX_CHUNKS }}
BUILD_IDF: ${{ steps.changed-files.outputs.idf_any_changed == 'true' }}
BUILD_LIBRARIES: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }}
BUILD_STATIC_SKETCHES: ${{ steps.changed-files.outputs.static_sketeches_any_changed == 'true' }}
FS_CHANGED: ${{ steps.changed-files.outputs.fs_any_changed == 'true' }}
NETWORKING_CHANGED: ${{ steps.changed-files.outputs.networking_any_changed == 'true' }}
CORE_CHANGED: ${{ steps.changed-files.outputs.core_any_changed == 'true' }}
LIB_CHANGED: ${{ steps.changed-files.outputs.libraries_any_changed == 'true' }}
run: |
bash ./.github/scripts/set_push_chunks.sh
- name: Upload sketches found
if: ${{ steps.set-chunks.outputs.build_all == 'false' && steps.set-chunks.outputs.build_libraries == 'true' }}
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: sketches_found
path: sketches_found.txt
overwrite: true
if-no-files-found: error
# Ubuntu
build-arduino-linux:
name: Arduino ${{ matrix.chunk }} on ubuntu-latest
if: ${{ needs.gen-chunks.outputs.build_all == 'true' || needs.gen-chunks.outputs.build_libraries == 'true' }}
needs: gen-chunks
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
chunk: ${{ fromJson(needs.gen-chunks.outputs.chunks) }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with:
python-version: "3.x"
- name: Get libs cache
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }}
path: |
./tools/dist
./tools/esp32-arduino-libs
./tools/esptool
./tools/mk*
./tools/openocd-esp32
./tools/riscv32-*
./tools/xtensa-*
- name: Set Log Level
run: |
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo "LOG_LEVEL=${{ github.event.inputs.log_level }}" >> $GITHUB_ENV
elif [ "${{ github.event_name }}" == "schedule" ]; then
echo "LOG_LEVEL=verbose" >> $GITHUB_ENV
else
echo "LOG_LEVEL=none" >> $GITHUB_ENV
fi
- name: Build all sketches
if: ${{ needs.gen-chunks.outputs.build_all == 'true' }}
run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ env.MAX_CHUNKS }} 1 ${{ env.LOG_LEVEL }}
- name: Download sketches found
if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }}
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
name: sketches_found
- name: Build selected sketches
if: ${{ needs.gen-chunks.outputs.build_all == 'false' && needs.gen-chunks.outputs.build_libraries == 'true' }}
run: bash ./.github/scripts/on-push.sh ${{ matrix.chunk }} ${{ needs.gen-chunks.outputs.chunk_count }} 1 ${{ env.LOG_LEVEL }} sketches_found.txt
#Upload cli compile json as artifact
- name: Upload cli compile json
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: pr_cli_compile_${{ matrix.chunk }}
path: cli_compile_${{ matrix.chunk }}.json
overwrite: true
# Windows and MacOS
build-arduino-win-mac:
name: Arduino on ${{ matrix.os }}
needs: gen-chunks
if: ${{ needs.gen-chunks.outputs.build_all == 'true' || needs.gen-chunks.outputs.build_static_sketches == 'true' }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [windows-latest, macOS-latest]
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with:
python-version: "3.x"
- name: Build Sketches
run: bash ./.github/scripts/on-push.sh
build-esp-idf-component:
name: Build with ESP-IDF ${{ matrix.idf_ver }} for ${{ matrix.idf_target }}
needs: gen-chunks
if: |
needs.gen-chunks.outputs.build_all == 'true' ||
needs.gen-chunks.outputs.build_libraries == 'true' ||
needs.gen-chunks.outputs.build_idf == 'true'
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
# The version names here correspond to the versions of espressif/idf Docker image.
# See https://hub.docker.com/r/espressif/idf/tags and
# https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-guides/tools/idf-docker-image.html
# for details.
idf_ver: ["release-v5.3","release-v5.4","release-v5.5"]
idf_target:
[
"esp32",
"esp32s2",
"esp32s3",
"esp32c2",
"esp32c3",
"esp32c6",
"esp32h2",
"esp32p4"
]
container: espressif/idf:${{ matrix.idf_ver }}
steps:
- name: Check out arduino-esp32 as a component
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
submodules: recursive
path: components/arduino-esp32
- name: Setup jq
uses: dcarbone/install-jq-action@e397bd87438d72198f81efd21f876461183d383a # v3.0.1
- name: Build
env:
IDF_TARGET: ${{ matrix.idf_target }}
shell: bash
run: |
chmod a+x ./components/arduino-esp32/.github/scripts/*
./components/arduino-esp32/.github/scripts/on-push-idf.sh
- name: Upload generated sdkconfig files for debugging
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: sdkconfig-${{ matrix.idf_ver }}-${{ matrix.idf_target }}
path: ./components/arduino-esp32/idf_component_examples/**/sdkconfig
# Save artifacts to gh-pages
save-master-artifacts:
name: Save master artifacts
needs: build-arduino-linux
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
runs-on: ubuntu-latest
steps:
# Check out repository
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
token: ${{secrets.GITHUB_TOKEN}}
fetch-depth: "0"
- name: Switch branch
run: git checkout remotes/origin/gh-pages
- name: Download sketches reports artifact
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
pattern: pr_cli_compile_*
merge-multiple: true
path: master_cli_compile
- name: List files in the directory
run: ls -R
- name: Commit json files to gh-pages if on master
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
continue-on-error: true
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add --all
git commit -m "Updated cli compile json files"
git push origin HEAD:gh-pages
#Upload PR number as artifact
upload-pr-number:
name: Upload PR number
if: ${{ github.event_name == 'pull_request' && !startsWith(github.head_ref, 'release/') }}
runs-on: ubuntu-latest
steps:
- name: Save the PR number in an artifact
shell: bash
env:
PR_NUM: ${{ github.event.number }}
run: echo $PR_NUM > pr_num.txt
- name: Upload PR number
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: pr_number
path: ./pr_num.txt
overwrite: true

View file

@ -1,32 +0,0 @@
name: ESP32 Arduino Release
on:
release:
types: published
jobs:
build:
name: Publish Release
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
with:
python-version: "3.x"
- name: Install packaging
run: pip install packaging
- name: Install pyserial
run: pip install pyserial
- name: Build Release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: bash ./.github/scripts/on-release.sh

View file

@ -1,123 +0,0 @@
# This file is used to run the runtime tests for the Arduino core for ESP32.
# The tests are run on the hardware, Wokwi and QEMU emulators.
# The QEMU tests are disabled for now as they are redundant with most of the Wokwi tests.
# As the Wokwi tests require access to secrets, they are run in a separate workflow.
# We need to ensure that the artifacts from previous tests in the chain are propagated for publishing the results.
# This is the current trigger sequence for the tests:
# tests.yml -> tests_wokwi.yml -> tests_results.yml
# ⌙> tests_build.yml
# ⌙> tests_hw.yml
# ⌙> tests_qemu.yml
name: Runtime Tests
on:
workflow_dispatch:
pull_request:
types: [opened, reopened, closed, synchronize, labeled, unlabeled]
paths:
- ".github/workflows/tests*"
- ".github/scripts/*.sh"
- "!.github/scripts/check-cmakelists.sh"
- "!.github/scripts/find_*"
- "!.github/scripts/on-*.sh"
- "!.github/scripts/set_push_chunks.sh"
- "!.github/scripts/update-version.sh"
- "!.github/scripts/upload_py_tools.sh"
- "tests/**"
- "cores/**"
- "libraries/*/src/**.cpp"
- "libraries/*/src/**.h"
- "libraries/*/src/**.c"
- "package/**"
schedule:
- cron: "0 2 * * *"
concurrency:
group: tests-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
push-event-file:
name: Push event file
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: event_file
path: ${{ github.event_path }}
gen-matrix:
name: Generate matrix
runs-on: ubuntu-latest
outputs:
build-types: ${{ steps.set-matrix.outputs.build-types }}
hw-types: ${{ steps.set-matrix.outputs.hw-types }}
wokwi-types: ${{ steps.set-matrix.outputs.wokwi-types }}
qemu-types: ${{ steps.set-matrix.outputs.qemu-types }}
targets: ${{ steps.set-matrix.outputs.targets }}
env:
IS_PR: ${{ github.event.pull_request.number != null }}
PERFORMANCE_ENABLED: ${{ contains(github.event.pull_request.labels.*.name, 'perf_test') }}
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
sparse-checkout: .github/scripts/tests_matrix.sh
- name: Set matrix
id: set-matrix
run: bash .github/scripts/tests_matrix.sh
- name: Upload
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: matrix_info
path: info/*
call-build-tests:
name: Build
uses: ./.github/workflows/tests_build.yml
needs: gen-matrix
strategy:
matrix:
type: ${{ fromJson(needs.gen-matrix.outputs.build-types) }}
chip: ${{ fromJson(needs.gen-matrix.outputs.targets) }}
with:
type: ${{ matrix.type }}
chip: ${{ matrix.chip }}
call-hardware-tests:
name: Hardware
uses: ./.github/workflows/tests_hw.yml
needs: [gen-matrix, call-build-tests]
if: |
github.repository == 'espressif/arduino-esp32' &&
(github.event_name != 'pull_request' ||
contains(github.event.pull_request.labels.*.name, 'hil_test'))
strategy:
fail-fast: false
matrix:
type: ${{ fromJson(needs.gen-matrix.outputs.hw-types) }}
chip: ${{ fromJson(needs.gen-matrix.outputs.targets) }}
with:
type: ${{ matrix.type }}
chip: ${{ matrix.chip }}
# This job is disabled for now
call-qemu-tests:
name: QEMU
uses: ./.github/workflows/tests_qemu.yml
needs: [gen-matrix, call-build-tests]
if: false
strategy:
fail-fast: false
matrix:
type: ${{ fromJson(needs.gen-matrix.outputs.qemu-types) }}
chip: ["esp32", "esp32c3"]
with:
type: ${{ matrix.type }}
chip: ${{ matrix.chip }}
# Wokwi tests are run after this workflow as it needs access to secrets

View file

@ -1,90 +0,0 @@
name: Build tests
on:
workflow_call:
inputs:
type:
type: string
description: "Type of tests to build"
required: true
chip:
type: string
description: "Chip to build tests for"
required: true
jobs:
build-tests:
name: Build ${{ inputs.type }} tests for ${{ inputs.chip }}
runs-on: ubuntu-latest
env:
id: ${{ github.event.pull_request.number || github.ref }}-${{ github.event.pull_request.head.sha || github.sha }}-${{ inputs.chip }}-${{ inputs.type }}
steps:
- name: Check if already built
id: cache-build-binaries
if: github.event.pull_request.number != null
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: tests-${{ env.id }}-bin
path: |
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.bin
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.elf
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.json
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/sdkconfig
- name: Evaluate if tests should be built
id: check-build
run: |
cache_exists=${{ steps.cache-build-binaries.outputs.cache-hit == 'true' }}
enabled=true
if [[ $cache_exists == 'true' ]]; then
echo "Already built, skipping"
enabled=false
fi
echo "enabled=$enabled" >> $GITHUB_OUTPUT
- name: Checkout user repository
if: ${{ steps.check-build.outputs.enabled == 'true' }}
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Get libs cache
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: ${{ steps.check-build.outputs.enabled == 'true' }}
with:
key: libs-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('package/package_esp32_index.template.json', 'tools/get.py') }}
path: |
./tools/dist
./tools/esp32-arduino-libs
./tools/esptool
./tools/mk*
./tools/openocd-esp32
./tools/riscv32-*
./tools/xtensa-*
- name: Build sketches
if: ${{ steps.check-build.outputs.enabled == 'true' }}
run: |
bash .github/scripts/tests_build.sh -c -type ${{ inputs.type }} -t ${{ inputs.chip }}
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} binaries as cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: steps.check-build.outputs.enabled == 'true' && github.event.pull_request.number != null
with:
key: tests-${{ env.id }}-bin
path: |
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.bin
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.elf
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.json
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/sdkconfig
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} binaries as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: tests-bin-${{ inputs.chip }}-${{ inputs.type }}
overwrite: true
path: |
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.bin
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.elf
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/*.json
~/.arduino/tests/${{ inputs.chip }}/**/build*.tmp/sdkconfig

View file

@ -1,118 +0,0 @@
name: Hardware tests
on:
workflow_call:
inputs:
type:
type: string
description: "Type of tests to run"
required: true
chip:
type: string
description: "Chip to run tests for"
required: true
env:
DEBIAN_FRONTEND: noninteractive
defaults:
run:
shell: bash
jobs:
hardware-test:
name: Hardware ${{ inputs.chip }} ${{ inputs.type }} tests
runs-on: ["arduino", "${{ inputs.chip }}"]
env:
id: ${{ github.event.pull_request.number || github.ref }}-${{ github.event.pull_request.head.sha || github.sha }}-${{ inputs.chip }}-${{ inputs.type }}
container:
image: python:3.10.1-bullseye
options: --privileged --device-cgroup-rule="c 188:* rmw" --device-cgroup-rule="c 166:* rmw"
steps:
- name: Clean workspace
run: |
rm -rf ./*
rm -rf ~/.arduino/tests
- name: Check if already passed
id: cache-results
if: github.event.pull_request.number != null
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: tests-${{ env.id }}-results-hw
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Evaluate if tests should be run
id: check-tests
run: |
cache_exists=${{ steps.cache-results.outputs.cache-hit == 'true' }}
enabled=true
if [[ $cache_exists == 'true' ]]; then
echo "Already ran, skipping"
enabled=false
fi
echo "enabled=$enabled" >> $GITHUB_OUTPUT
- name: Checkout user repository
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
sparse-checkout: |
*
# setup-python currently only works on ubuntu images
# - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
# if: ${{ steps.check-tests.outputs.enabled == 'true' }}
# with:
# cache-dependency-path: tests/requirements.txt
# cache: 'pip'
# python-version: '3.10.1'
- name: Install dependencies
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
pip install -U pip
pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi
apt update
apt install -y jq
- name: Get binaries
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
name: tests-bin-${{ inputs.chip }}-${{ inputs.type }}
path: |
~/.arduino/tests/${{ inputs.chip }}
- name: List binaries
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
ls -laR ~/.arduino/tests
- name: Run Tests
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
bash .github/scripts/tests_run.sh -c -type ${{ inputs.type }} -t ${{ inputs.chip }} -i 0 -m 1 -e
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} hardware results as cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: steps.check-tests.outputs.enabled == 'true' && github.event.pull_request.number != null
with:
key: tests-${{ env.id }}-results-hw
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} hardware results as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: tests-results-hw-${{ inputs.chip }}-${{ inputs.type }}
overwrite: true
path: |
tests/**/*.xml
tests/**/result_*.json

View file

@ -1,143 +0,0 @@
name: QEMU tests
on:
workflow_call:
inputs:
chip:
required: true
type: string
type:
required: true
type: string
jobs:
qemu-test:
name: QEMU ${{ inputs.chip }} ${{ inputs.type }} tests
env:
id: ${{ github.event.pull_request.number || github.ref }}-${{ github.event.pull_request.head.sha || github.sha }}-${{ inputs.chip }}-${{ inputs.type }}
QEMU_INSTALL_PATH: "$HOME"
runs-on: ubuntu-latest
steps:
- name: Check if already passed
id: get-cache-results
if: github.event.pull_request.number != null
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: tests-${{ env.id }}-results-qemu
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Evaluate if tests should be run
id: check-tests
run: |
cache_exists=${{ steps.get-cache-results.outputs.cache-hit == 'true' }}
enabled=true
if [[ $cache_exists == 'true' ]]; then
echo "Already ran, skipping"
enabled=false
fi
echo "enabled=$enabled" >> $GITHUB_OUTPUT
- name: Checkout user repository
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}
persist-credentials: false
sparse-checkout-cone-mode: false
sparse-checkout: |
/*
!.github
# To avoid giving unknown scripts elevated permissions, download them from the master branch
- name: Get CI scripts from master
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
mkdir -p .github
cd .github
curl https://codeload.github.com/${{ github.repository }}/tar.gz/master | tar -xz --strip=2 arduino-esp32-master/.github
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
with:
cache-dependency-path: tests/requirements.txt
cache: "pip"
python-version: "3.x"
- name: Install Python dependencies
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
pip install -U pip
pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi
- name: Install APT dependencies
uses: awalsh128/cache-apt-pkgs-action@5902b33ae29014e6ca012c5d8025d4346556bd40 # v1.4.3
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
with:
packages: libpixman-1-0 libnuma1 libglib2.0-0 libslirp0 libsdl2-2.0-0
version: 1.0
- name: Get QEMU version
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
id: get-qemu-version
with:
token: ${{secrets.GITHUB_TOKEN}}
owner: espressif
repo: qemu
excludes: prerelease, draft
- name: Cache QEMU
id: cache-qemu
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
with:
path: |
~/qemu
key: qemu-${{ steps.get-qemu-version.outputs.release }}-${{ hashFiles('.github/workflows/tests_qemu.yml') }}
- name: Download QEMU
if: ${{ steps.cache-qemu.outputs.cache-hit != 'true' && steps.check-tests.outputs.enabled == 'true' }}
run: |
cd ${{ env.QEMU_INSTALL_PATH }}
underscore_release=$(echo ${{ steps.get-qemu-version.outputs.release }} | sed 's/\-/_/g')
curl -L https://github.com/espressif/qemu/releases/download/${{ steps.get-qemu-version.outputs.release }}/qemu-riscv32-softmmu-${underscore_release}-x86_64-linux-gnu.tar.xz > qemu-riscv32.tar.xz
curl -L https://github.com/espressif/qemu/releases/download/${{ steps.get-qemu-version.outputs.release }}/qemu-xtensa-softmmu-${underscore_release}-x86_64-linux-gnu.tar.xz > qemu-xtensa.tar.xz
tar -xf qemu-riscv32.tar.xz
tar -xf qemu-xtensa.tar.xz
rm qemu-*
echo "QEMU_PATH=${{ env.QEMU_INSTALL_PATH }}/qemu" >> $GITHUB_ENV
- name: Get binaries
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
name: tests-bin-${{ inputs.chip }}-${{ inputs.type }}
path: |
~/.arduino/tests/${{ inputs.chip }}
- name: Run Tests
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: QEMU_PATH="${{ env.QEMU_INSTALL_PATH }}" bash .github/scripts/tests_run.sh -c -type ${{inputs.type}} -t ${{inputs.chip}} -i 0 -m 1 -Q
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} QEMU results as cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: steps.check-tests.outputs.enabled == 'true' && github.event.pull_request.number != null
with:
key: tests-${{ env.id }}-results-qemu
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Upload ${{ inputs.chip }} ${{ inputs.type }} QEMU results as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: tests-results-qemu-${{ inputs.chip }}-${{ inputs.type }}
overwrite: true
path: |
tests/**/*.xml
tests/**/result_*.json

View file

@ -1,195 +0,0 @@
name: Publish and clean test results
on:
workflow_run:
workflows: ["Wokwi tests"]
types:
- completed
# No permissions by default
permissions: { contents: read }
jobs:
unit-test-results:
name: Unit Test Results
if: |
github.event.workflow_run.conclusion == 'success' ||
github.event.workflow_run.conclusion == 'failure' ||
github.event.workflow_run.conclusion == 'timed_out'
runs-on: ubuntu-latest
permissions:
actions: write
statuses: write
checks: write
pull-requests: write
contents: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: gh-pages
- name: Download and Extract Artifacts
uses: dawidd6/action-download-artifact@07ab29fd4a977ae4d2b275087cf67563dfdf0295 # v9
with:
run_id: ${{ github.event.workflow_run.id }}
path: ./artifacts
- name: Get original info
run: |
original_event=$(cat ./artifacts/parent-artifacts/event.txt)
original_action=$(cat ./artifacts/parent-artifacts/action.txt)
original_sha=$(cat ./artifacts/parent-artifacts/sha.txt)
original_ref=$(cat ./artifacts/parent-artifacts/ref.txt)
original_conclusion=$(cat ./artifacts/parent-artifacts/conclusion.txt)
original_run_id=$(cat ./artifacts/parent-artifacts/run_id.txt)
# Sanitize the values to avoid security issues
# Event: Allow alphabetical characters and underscores
original_event=$(echo "$original_event" | tr -cd '[:alpha:]_')
# Action: Allow alphabetical characters and underscores
original_action=$(echo "$original_action" | tr -cd '[:alpha:]_')
# SHA: Allow alphanumeric characters
original_sha=$(echo "$original_sha" | tr -cd '[:alnum:]')
# Ref: Allow alphanumeric characters, slashes, underscores, dots, and dashes
original_ref=$(echo "$original_ref" | tr -cd '[:alnum:]/_.-')
# Conclusion: Allow alphabetical characters and underscores
original_conclusion=$(echo "$original_conclusion" | tr -cd '[:alpha:]_')
# Run ID: Allow numeric characters
original_run_id=$(echo "$original_run_id" | tr -cd '[:digit:]')
echo "original_event=$original_event" >> $GITHUB_ENV
echo "original_action=$original_action" >> $GITHUB_ENV
echo "original_sha=$original_sha" >> $GITHUB_ENV
echo "original_ref=$original_ref" >> $GITHUB_ENV
echo "original_conclusion=$original_conclusion" >> $GITHUB_ENV
echo "original_run_id=$original_run_id" >> $GITHUB_ENV
echo "original_event = $original_event"
echo "original_action = $original_action"
echo "original_sha = $original_sha"
echo "original_ref = $original_ref"
echo "original_conclusion = $original_conclusion"
echo "original_run_id = $original_run_id"
- name: Print links to other runs
run: |
echo "Build, Hardware and QEMU tests: https://github.com/${{ github.repository }}/actions/runs/${{ env.original_run_id }}"
echo "Wokwi tests: https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}"
- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@170bf24d20d201b842d7a52403b73ed297e6645b # v2.18.0
with:
commit: ${{ env.original_sha }}
event_file: ./artifacts/parent-artifacts/event_file/event.json
event_name: ${{ env.original_event }}
files: ./artifacts/**/*.xml
action_fail: true
compare_to_earlier_commit: false
json_file: ./unity_results.json
json_suite_details: true
- name: Upload JSON
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: ${{ always() }}
with:
name: unity_results
overwrite: true
path: |
./unity_results.json
- name: Fail if tests failed
if: ${{ env.original_conclusion == 'failure' || env.original_conclusion == 'timed_out' || github.event.workflow_run.conclusion == 'failure' || github.event.workflow_run.conclusion == 'timed_out' }}
run: exit 1
- name: Clean up caches
if: always()
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const ref = process.env.original_ref;
const key_prefix = 'tests-' + ref + '-';
if (process.env.original_event == 'pull_request' && process.env.original_action != 'closed') {
console.log('Skipping cache cleanup for open PR');
return;
}
await github.paginate(github.rest.actions.getActionsCacheList, {
owner: context.repo.owner,
repo: context.repo.repo,
per_page: 100,
key: key_prefix
}).then(caches => {
if (caches) {
for (const cache of caches) {
console.log(`Deleting cache: ${cache.key}`);
github.rest.actions.deleteActionsCacheById({
owner: context.repo.owner,
repo: context.repo.repo,
cache_id: cache.id
});
}
}
});
- name: Report conclusion
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
if: always()
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = process.env.original_sha;
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: `Runtime Tests / Report results (${process.env.original_event} -> workflow_run -> workflow_run)`,
owner: owner,
repo: repo,
sha: sha,
state: '${{ job.status }}',
description: '${{ job.status }}' == 'success' ? 'Runtime tests successful' : 'Runtime tests failed',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);
- name: Generate report
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
env:
REPORT_FILE: ./runtime-tests-results/RUNTIME_TESTS_REPORT.md
WOKWI_RUN_ID: ${{ github.event.workflow_run.id }}
BUILD_RUN_ID: ${{ env.original_run_id }}
IS_FAILING: ${{ env.original_conclusion == 'failure' || env.original_conclusion == 'timed_out' || github.event.workflow_run.conclusion == 'failure' || github.event.workflow_run.conclusion == 'timed_out' || job.status == 'failure' }}
run: |
rm -rf artifacts $REPORT_FILE
mv -f ./unity_results.json ./runtime-tests-results/unity_results.json
touch $REPORT_FILE
python3 ./runtime-tests-results/table_generator.py ./runtime-tests-results/unity_results.json >> $REPORT_FILE
- name: Generate badge
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
uses: jaywcjlove/generated-badges@0e078ae4d4bab3777ea4f137de496ab44688f5ad # v1.0.13
with:
label: Runtime Tests
status: ${{ job.status == 'success' && 'passing' || 'failing' }}
output: runtime-tests-results/badge.svg
color: ${{ job.status == 'success' && 'green' || 'red' }}
style: flat
- name: Push badge
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
if [[ `git status --porcelain` ]]; then
git add --all
git commit -m "Updated runtime tests report"
git push origin HEAD:gh-pages
fi

View file

@ -1,326 +0,0 @@
name: Wokwi tests
on:
workflow_run:
workflows: ["Runtime Tests"]
types:
- completed
# No permissions by default
permissions: { contents: read }
env:
WOKWI_TIMEOUT: 600000 # Milliseconds
jobs:
get-artifacts:
name: Get required artifacts
runs-on: ubuntu-latest
permissions:
actions: read
statuses: write
outputs:
pr_num: ${{ steps.set-ref.outputs.pr_num }}
ref: ${{ steps.set-ref.outputs.ref }}
base: ${{ steps.set-ref.outputs.base }}
targets: ${{ steps.set-ref.outputs.targets }}
types: ${{ steps.set-ref.outputs.types }}
steps:
- name: Report pending
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Runtime Tests / Wokwi (Get artifacts) (${{ github.event.workflow_run.event }} -> workflow_run)',
owner: owner,
repo: repo,
sha: sha,
state: 'pending',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);
- name: Download and extract event file
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
name: event_file
path: artifacts/event_file
- name: Download and extract matrix info
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
name: matrix_info
path: artifacts/matrix_info
- name: Try to read PR number
id: set-ref
run: |
pr_num=$(jq -r '.pull_request.number' artifacts/event_file/event.json | tr -cd "[:digit:]")
if [ -z "$pr_num" ] || [ "$pr_num" == "null" ]; then
pr_num=""
fi
ref=$pr_num
if [ -z "$ref" ] || [ "$ref" == "null" ]; then
ref=${{ github.ref }}
fi
action=$(jq -r '.action' artifacts/event_file/event.json | tr -cd "[:alpha:]_")
if [ "$action" == "null" ]; then
action=""
fi
base=$(jq -r '.pull_request.base.ref' artifacts/event_file/event.json | tr -cd "[:alnum:]/_.-")
if [ -z "$base" ] || [ "$base" == "null" ]; then
base=${{ github.ref }}
fi
types=$(cat artifacts/matrix_info/wokwi_types.txt | tr -cd "[:alpha:],[]'")
targets=$(cat artifacts/matrix_info/targets.txt | tr -cd "[:alnum:],[]'")
echo "base = $base"
echo "targets = $targets"
echo "types = $types"
echo "pr_num = $pr_num"
printf "$ref" >> artifacts/ref.txt
printf "Ref = "
cat artifacts/ref.txt
printf "${{ github.event.workflow_run.event }}" >> artifacts/event.txt
printf "\nEvent name = "
cat artifacts/event.txt
printf "${{ github.event.workflow_run.head_sha || github.sha }}" >> artifacts/sha.txt
printf "\nHead SHA = "
cat artifacts/sha.txt
printf "$action" >> artifacts/action.txt
printf "\nAction = "
cat artifacts/action.txt
printf "${{ github.event.workflow_run.id }}" >> artifacts/run_id.txt
printf "\nRun ID = "
cat artifacts/run_id.txt
if [ -z "$ref" ] || [ "$ref" == "null" ]; then
echo "Failed to get PR number or ref"
exit 1
fi
conclusion="${{ github.event.workflow_run.conclusion }}"
printf "$conclusion" >> artifacts/conclusion.txt
printf "\nConclusion = "
cat artifacts/conclusion.txt
echo "pr_num=$pr_num" >> $GITHUB_OUTPUT
echo "base=$base" >> $GITHUB_OUTPUT
echo "targets=$targets" >> $GITHUB_OUTPUT
echo "types=$types" >> $GITHUB_OUTPUT
echo "ref=$ref" >> $GITHUB_OUTPUT
- name: Download and extract parent hardware results
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
continue-on-error: true
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
pattern: tests-results-hw-*
merge-multiple: true
path: artifacts/results/hw
- name: Download and extract parent QEMU results
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
continue-on-error: true
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
pattern: tests-results-qemu-*
merge-multiple: true
path: artifacts/results/qemu
- name: Upload parent artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: parent-artifacts
path: artifacts
if-no-files-found: error
- name: Report conclusion
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
if: always()
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Runtime Tests / Wokwi (Get artifacts) (${{ github.event.workflow_run.event }} -> workflow_run)',
owner: owner,
repo: repo,
sha: sha,
state: '${{ job.status }}',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);
wokwi-test:
name: Wokwi ${{ matrix.chip }} ${{ matrix.type }} tests
if: |
github.event.workflow_run.conclusion == 'success' ||
github.event.workflow_run.conclusion == 'failure' ||
github.event.workflow_run.conclusion == 'timed_out'
runs-on: ubuntu-latest
needs: get-artifacts
env:
id: ${{ needs.get-artifacts.outputs.ref }}-${{ github.event.workflow_run.head_sha || github.sha }}-${{ matrix.chip }}-${{ matrix.type }}
permissions:
actions: read
statuses: write
strategy:
fail-fast: false
matrix:
type: ${{ fromJson(needs.get-artifacts.outputs.types) }}
chip: ${{ fromJson(needs.get-artifacts.outputs.targets) }}
steps:
- name: Report pending
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Runtime Tests / Wokwi (${{ matrix.type }}, ${{ matrix.chip }}) / Wokwi ${{ matrix.chip }} ${{ matrix.type }} tests (${{ github.event.workflow_run.event }} -> workflow_run)',
owner: owner,
repo: repo,
sha: sha,
state: 'pending',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);
- name: Check if already passed
id: get-cache-results
if: needs.get-artifacts.outputs.pr_num
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
key: tests-${{ env.id }}-results-wokwi
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Evaluate if tests should be run
id: check-tests
run: |
cache_exists=${{ steps.get-cache-results.outputs.cache-hit == 'true' }}
enabled=true
if [[ $cache_exists == 'true' ]]; then
echo "Already ran, skipping"
enabled=false
fi
echo "enabled=$enabled" >> $GITHUB_OUTPUT
# Note that changes to the workflows and tests will only be picked up after the PR is merged
# DO NOT CHECKOUT THE USER'S REPOSITORY IN THIS WORKFLOW. IT HAS HIGH SECURITY RISKS.
- name: Checkout repository
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ needs.get-artifacts.outputs.base || github.ref }}
- uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.0.4
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
with:
cache-dependency-path: tests/requirements.txt
cache: "pip"
python-version: "3.x"
- name: Install dependencies
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: |
pip install -U pip
pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi
- name: Install Wokwi CLI
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
run: curl -L https://wokwi.com/ci/install.sh | sh
- name: Wokwi CI Server
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: wokwi/wokwi-ci-server-action@a6fabb5a49e080158c7a1d121ea5b789536a82c3 # v1
- name: Get binaries
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
name: tests-bin-${{ matrix.chip }}-${{ matrix.type }}
path: |
~/.arduino/tests/${{ matrix.chip }}
- name: Run Tests
if: ${{ steps.check-tests.outputs.enabled == 'true' }}
env:
WOKWI_CLI_TOKEN: ${{ secrets.WOKWI_CLI_TOKEN }}
run: |
bash .github/scripts/tests_run.sh -c -type ${{ matrix.type }} -t ${{ matrix.chip }} -i 0 -m 1 -W ${{ env.WOKWI_TIMEOUT }}
- name: Upload ${{ matrix.chip }} ${{ matrix.type }} Wokwi results as cache
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
if: steps.check-tests.outputs.enabled == 'true' && needs.get-artifacts.outputs.pr_num
with:
key: tests-${{ env.id }}-results-wokwi
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Upload ${{ matrix.chip }} ${{ matrix.type }} Wokwi results as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: tests-results-wokwi-${{ matrix.chip }}-${{ matrix.type }}
overwrite: true
path: |
tests/**/*.xml
tests/**/result_*.json
- name: Report conclusion
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
if: always()
with:
script: |
const owner = '${{ github.repository_owner }}';
const repo = '${{ github.repository }}'.split('/')[1];
const sha = '${{ github.event.workflow_run.head_sha }}';
core.debug(`owner: ${owner}`);
core.debug(`repo: ${repo}`);
core.debug(`sha: ${sha}`);
const { context: name, state } = (await github.rest.repos.createCommitStatus({
context: 'Runtime Tests / Wokwi (${{ matrix.type }}, ${{ matrix.chip }}) / Wokwi ${{ matrix.chip }} ${{ matrix.type }} tests (${{ github.event.workflow_run.event }} -> workflow_run)',
owner: owner,
repo: repo,
sha: sha,
state: '${{ job.status }}',
target_url: 'https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}'
})).data;
core.info(`${name} is ${state}`);

View file

@ -1,59 +0,0 @@
name: Push components to https://components.espressif.com
on:
workflow_dispatch:
inputs:
tag:
description: 'Version to push to the component registry'
required: true
git_ref:
description: 'Git ref with the source to push to the component registry'
required: true
workflow_run:
workflows: ["ESP32 Arduino Release"]
types:
- completed
permissions:
contents: read
jobs:
upload_components:
runs-on: ubuntu-latest
steps:
- name: Get the release tag
env:
head_branch: ${{ inputs.tag || github.event.workflow_run.head_branch }}
run: |
if [ "${{ github.event.workflow_run.conclusion }}" != "success" ] && [ "${{ github.event_name }}" == "workflow_run" ]; then
echo "Release workflow failed. Exiting..."
exit 1
fi
# Read and sanitize the branch/tag name
branch=$(echo "$head_branch" | tr -cd '[:alnum:]/_.-')
if [[ $branch == refs/tags/* ]]; then
tag="${branch#refs/tags/}"
elif [[ $branch =~ ^[0-9]+\.[0-9]+\.[0-9]+.*$ ]]; then
tag=$branch
else
echo "Tag not found in $branch. Exiting..."
exit 1
fi
echo "Tag: $tag"
echo "RELEASE_TAG=$tag" >> $GITHUB_ENV
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ inputs.git_ref || env.RELEASE_TAG }}
submodules: "recursive"
- name: Upload components to the component registry
uses: espressif/upload-components-ci-action@b78a19fa5424714997596d3ecffa634aef8ae20b # v1.0.5
with:
name: arduino-esp32
version: ${{ env.RELEASE_TAG }}
namespace: espressif
api_token: ${{ secrets.IDF_COMPONENT_API_TOKEN }}

57
.gitignore vendored
View file

@ -1,57 +1,2 @@
tools/esp32-arduino-libs
tools/xtensa-esp-elf
tools/xtensa-esp32-elf
tools/xtensa-esp32s2-elf
tools/xtensa-esp32s3-elf
tools/xtensa-esp-elf-gdb
tools/riscv32-esp-elf
tools/riscv32-esp-elf-gdb
tools/dist
tools/esptool
tools/esptool.exe
tools/mkspiffs
tools/mklittlefs
tools/mkfatfs.exe
tools/openocd-esp32
# Ignore editor backup files and macOS system metadata
.DS_Store
.*.swp
.*.swo
*~
# Ignore build folder
/build
# Ignore files built by Visual Studio/Visual Micro
[Dd]ebug/
[Rr]elease/
.vs/
__vm/
*.vcxproj*
.vscode/
platform.sloeber.txt
boards.sloeber.txt
# Ignore docs build (Sphinx)
docs/build
docs/source/_build
__pycache__/
_build/
# Test log files
*.log
debug.cfg
debug.svd
debug_custom.json
libraries/Insights/examples/*/*.ino.zip
# Vale Style
.vale/styles/*
!.vale/styles/Vocab/
.vale/styles/Vocab/*
!.vale/styles/Vocab/Espressif/
# Ignore Lib Builder Docker run scripts
/run.sh
/run.ps1
.idea

View file

@ -1,25 +0,0 @@
workflow:
rules:
# Disable those non-protected push triggered pipelines
- if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^\d+\.\d+(\.\d+)?($|-)/ && $CI_PIPELINE_SOURCE == "push"'
when: never
# when running merged result pipelines, CI_COMMIT_SHA represents the temp commit it created.
# Please use PIPELINE_COMMIT_SHA at all places that require a commit sha of the original commit.
- if: $CI_OPEN_MERGE_REQUESTS != null
variables:
PIPELINE_COMMIT_SHA: $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
IS_MR_PIPELINE: 1
- if: $CI_OPEN_MERGE_REQUESTS == null
variables:
PIPELINE_COMMIT_SHA: $CI_COMMIT_SHA
IS_MR_PIPELINE: 0
- if: '$CI_PIPELINE_SOURCE == "schedule"'
variables:
IS_SCHEDULED_RUN: "true"
- when: always
# Place the default settings in `.gitlab/workflows/common.yml` instead
include:
- ".gitlab/workflows/common.yml"
- ".gitlab/workflows/sample.yml"

View file

@ -1,26 +0,0 @@
#####################
# Default Variables #
#####################
stages:
- pre_check
- build
- test
- result
variables:
ESP_IDF_VERSION: "5.5"
ESP_ARDUINO_VERSION: "3.3.0"
#############
# `default` #
#############
default:
retry:
max: 2
when:
# In case of a runner failure we could hop to another one, or a network error could go away.
- runner_system_failure
# Job execution timeout may be caused by a network issue.
- job_execution_timeout

View file

@ -1,6 +0,0 @@
hello-world:
stage: test
script:
- echo "Hello, World from GitLab CI!"
rules:
- if: $CI_PIPELINE_SOURCE == "push"

View file

@ -1,110 +0,0 @@
exclude: |
(?x)(
^\.github\/|
^tests\/performance\/coremark\/.*\.[ch]$|
^tests\/performance\/superpi\/.*\.(cpp|h)$|
LICENSE\.md$
)
default_language_version:
# force all unspecified python hooks to run python3
python: python3
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: "cef0300fd0fc4d2a87a85fa2093c6b283ea36f4b" # v5.0.0
hooks:
# Generic checks
- id: check-case-conflict
- id: check-symlinks
- id: debug-statements
- id: destroyed-symlinks
- id: detect-private-key
- id: end-of-file-fixer
exclude: ^.*\.(bin|BIN)$
- id: mixed-line-ending
args: [--fix=lf]
- id: trailing-whitespace
args: [--markdown-linebreak-ext=md]
# JSON formatting
- id: pretty-format-json
stages: [manual]
args: [--autofix]
types_or: [json]
exclude: |
(?x)(
diagram\..*\.json$|
package\.json$|
^package\/.*$
)
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: "f6446549e5e97ec9665b9b03e75b87b445857f9a" # v18.1.3
hooks:
# C/C++ formatting
- id: clang-format
types_or: [c, c++]
exclude: ^.*\/build_opt\.h$
- repo: https://github.com/psf/black-pre-commit-mirror
rev: "a4920527036bb9a3f3e6055d595849d67d0da066" # 25.1.0
hooks:
# Python formatting
- id: black
types_or: [python]
args: [--line-length=120] #From the arduino code style. Add as argument rather than creating a new config file.
- repo: https://github.com/PyCQA/flake8
rev: "16f5f28a384f0781bebb37a08aa45e65b9526c50" # 7.2.0
hooks:
# Python linting
- id: flake8
types_or: [python]
additional_dependencies:
- flake8-bugbear
- flake8-comprehensions
- flake8-simplify
- repo: https://github.com/pre-commit/mirrors-prettier
rev: "ffb6a759a979008c0e6dff86e39f4745a2d9eac4" # v3.1.0
hooks:
# YAML formatting
- id: prettier
types_or: [yaml]
- repo: https://github.com/codespell-project/codespell
rev: "63c8f8312b7559622c0d82815639671ae42132ac" # v2.4.1
hooks:
# Spell checking
- id: codespell
exclude: ^.*\.(svd|SVD)$
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: "a23f6b85d0fdd5bb9d564e2579e678033debbdff" # v0.10.0.1
hooks:
# Bash linting
- id: shellcheck
types: [shell]
- repo: https://github.com/openstack/bashate
rev: "fbd7c2534c2701351c603ff700ddf08202430a31" # 2.1.1
hooks:
# Bash formatting
- id: bashate
types: [shell]
args: ["-i", "E006"] # Ignore E006: Line too long
- repo: https://github.com/errata-ai/vale
rev: "dc4c47923788a413fb5677de6e3370d514aecb78" # v3.11.2
hooks:
# Sync vale styles and lint markdown and reStructuredText
- id: vale
name: vale-sync
language_version: "1.23.2"
pass_filenames: false
args: [sync]
types_or: [markdown, rst]
- id: vale
language_version: "1.23.2"
types_or: [markdown, rst]

View file

@ -1,4 +0,0 @@
__pycache__/
.clang-format
.licenses/
/.git/

View file

@ -1,20 +0,0 @@
# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Set the version of Python and other tools you might need
build:
os: ubuntu-22.04
tools:
python: "3.12"
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/source/conf.py
python:
install:
- requirements: docs/requirements.txt

View file

@ -1,11 +0,0 @@
# Shellcheck configuration file for ESP32 Arduino core
# Optional checks. https://github.com/koalaman/shellcheck/wiki/optional
enable=add-default-case,deprecate-which,avoid-nullary-conditions
# Enable search for external sources
external-sources=true
# Search folder for sourced files.
# Set to the folder where the original script is located.
source-path=SCRIPTDIR

29
.travis.yml Normal file
View file

@ -0,0 +1,29 @@
language: python
dist: focal
cache:
directories:
- ~/arduino_ide
git:
depth: false
quiet: true
before_install:
- source $TRAVIS_BUILD_DIR/install.sh
script:
- build_platform nrf52840
- build_platform esp32
- build_platform uno
- build_platform leonardo
- build_platform zero
- build_platform esp8266
- build_platform m4
notifications:
email:
on_success: change
on_failure: change

118
.vale.ini
View file

@ -1,118 +0,0 @@
###################
### Vale Config ###
###################
# This is a Vale linter configuration file.
# - Repo: arduino-esp32
# - Based on Default config: v0-1-1
# It lists all necessary parameters to configure Vale for your project.
# For official documentation on all config settings, see
# https://vale.sh/docs/topics/config
##############
### Global ###
##############
# This section lists core settings applying to Vale itself.
# Specify path to external resources (e.g., styles and vocab files).
# The path value may be absolute or relative to this configuration file.
StylesPath = .vale/styles
# Specify the minimum alert severity that Vale will report.
MinAlertLevel = error # "suggestion", "warning", or "error"
# Specify vocabulary for special treatment.
# Create a folder in <StylesPath>/Vocab/<name>/and add its name here
# The folder should contain two files:
# - accept.txt -- lists words with accepted case-sensitive spelling
# - reject.txt -- lists words whose occurrences throw an error
# Vocab = Espressif
# Specify the packages to import into your project.
# A package is a zip file containing a number of rules (style) written in YAML.
# For a list of official packages, see Package Hub at https://vale.sh/hub/
# For official documentation on packages, see
# https://vale.sh/docs/topics/packages/
# Before linting, navigate to your project and run `vale sync` to download
# the official packages specified below.
# Packages = Package1, Package2, \
# https://example.com/path/to/package/Package.zip
Packages = Google, Microsoft, RedHat, \
https://dl.espressif.com/dl/esp-vale-config/Espressif-latest.zip
###############
### Formats ###
###############
# This section enables association of "unknown" formats with the ones
# supported by Vale. For official documentation on supported formats, see
# https://vale.sh/docs/topics/scoping/
[formats]
# For example, treat MDX files as Markdown files.
# mdx = md
################################
### Format-specific settings ###
################################
# This section lists the settings that apply to specific file formats
# based on their glob pattern.
# Settings provided under a more specific glob pattern,
# such as [*.{md,txt}] will override those in [*].
[*.{md,rst}]
# Enable styles to activate all rules included in them.
# BasedOnStyles = Style1, Style2
BasedOnStyles = Vale, Espressif-latest
### Deactivate individual rules ###
### in enabled styles.
# Style1.Rule1 = NO
Vale.Repetition = NO
Vale.Spelling = NO
Espressif-latest.Admonitions = NO
Espressif-latest.Contractions = NO
Espressif-latest.Monospace = NO
### Change default severity level ###
### of an activated rule.
# Choose between "suggestion", "warning", or "error".
# Style1.Rule2 = error
### Activate individual rules ###
### in non-enabled styles stored in <StylesPath>.
# Style1.Rule = YES
Google.Gender = YES
Google.GenderBias = YES
Google.Slang = YES
Google.Spacing = YES
Microsoft.DateNumbers = YES
Microsoft.Ellipses = YES
Microsoft.FirstPerson = YES
Microsoft.Hyphens = YES
Microsoft.Ordinal = YES
Microsoft.OxfordComma = YES
Microsoft.Percentages = YES
Microsoft.RangeTime = YES
Microsoft.Semicolon = YES
Microsoft.SentenceLength = YES
Microsoft.Suspended = YES
Microsoft.Units = YES
Microsoft.URLFormat = YES
Microsoft.We = YES
Microsoft.Wordiness = YES
RedHat.Contractions = YES
RedHat.RepeatedWords = YES

View file

@ -1,436 +0,0 @@
# Check ESP-IDF version and error out if it is not in the supported range.
#
# Note for arduino-esp32 developers: to bypass the version check locally,
# set ARDUINO_SKIP_IDF_VERSION_CHECK environment variable to 1. For example:
# export ARDUINO_SKIP_IDF_VERSION_CHECK=1
# idf.py build
set(min_supported_idf_version "5.3.0")
set(max_supported_idf_version "5.5.99")
set(idf_version "${IDF_VERSION_MAJOR}.${IDF_VERSION_MINOR}.${IDF_VERSION_PATCH}")
if ("${idf_version}" AND NOT "$ENV{ARDUINO_SKIP_IDF_VERSION_CHECK}")
if (idf_version VERSION_LESS min_supported_idf_version)
message(FATAL_ERROR "Arduino-esp32 can be used with ESP-IDF versions "
"between ${min_supported_idf_version} and ${max_supported_idf_version}, "
"but an older version is detected: ${idf_version}.")
endif()
if (idf_version VERSION_GREATER max_supported_idf_version)
message(FATAL_ERROR "Arduino-esp32 can be used with ESP-IDF versions "
"between ${min_supported_idf_version} and ${max_supported_idf_version}, "
"but a newer version is detected: ${idf_version}.")
endif()
endif()
set(CORE_SRCS
cores/esp32/base64.cpp
cores/esp32/cbuf.cpp
cores/esp32/ColorFormat.c
cores/esp32/chip-debug-report.cpp
cores/esp32/esp32-hal-adc.c
cores/esp32/esp32-hal-bt.c
cores/esp32/esp32-hal-cpu.c
cores/esp32/esp32-hal-dac.c
cores/esp32/esp32-hal-gpio.c
cores/esp32/esp32-hal-i2c.c
cores/esp32/esp32-hal-i2c-ng.c
cores/esp32/esp32-hal-i2c-slave.c
cores/esp32/esp32-hal-ledc.c
cores/esp32/esp32-hal-matrix.c
cores/esp32/esp32-hal-misc.c
cores/esp32/esp32-hal-periman.c
cores/esp32/esp32-hal-psram.c
cores/esp32/esp32-hal-rgb-led.c
cores/esp32/esp32-hal-sigmadelta.c
cores/esp32/esp32-hal-spi.c
cores/esp32/esp32-hal-time.c
cores/esp32/esp32-hal-timer.c
cores/esp32/esp32-hal-tinyusb.c
cores/esp32/esp32-hal-touch.c
cores/esp32/esp32-hal-touch-ng.c
cores/esp32/esp32-hal-uart.c
cores/esp32/esp32-hal-rmt.c
cores/esp32/Esp.cpp
cores/esp32/freertos_stats.cpp
cores/esp32/FunctionalInterrupt.cpp
cores/esp32/HardwareSerial.cpp
cores/esp32/HEXBuilder.cpp
cores/esp32/IPAddress.cpp
cores/esp32/libb64/cdecode.c
cores/esp32/libb64/cencode.c
cores/esp32/MacAddress.cpp
cores/esp32/main.cpp
cores/esp32/MD5Builder.cpp
cores/esp32/Print.cpp
cores/esp32/SHA1Builder.cpp
cores/esp32/stdlib_noniso.c
cores/esp32/Stream.cpp
cores/esp32/StreamString.cpp
cores/esp32/Tone.cpp
cores/esp32/HWCDC.cpp
cores/esp32/USB.cpp
cores/esp32/USBCDC.cpp
cores/esp32/USBMSC.cpp
cores/esp32/FirmwareMSC.cpp
cores/esp32/firmware_msc_fat.c
cores/esp32/wiring_pulse.c
cores/esp32/wiring_shift.c
cores/esp32/WMath.cpp
cores/esp32/WString.cpp
)
set(ARDUINO_ALL_LIBRARIES
ArduinoOTA
AsyncUDP
BLE
BluetoothSerial
DNSServer
EEPROM
ESP_I2S
ESP_NOW
ESP_SR
ESPmDNS
Ethernet
FFat
FS
HTTPClient
HTTPUpdate
Insights
LittleFS
Matter
NetBIOS
Network
OpenThread
PPP
Preferences
RainMaker
SD_MMC
SD
SimpleBLE
SPIFFS
SPI
Ticker
Update
USB
WebServer
NetworkClientSecure
WiFi
WiFiProv
Wire
Zigbee
)
set(ARDUINO_LIBRARY_ArduinoOTA_SRCS libraries/ArduinoOTA/src/ArduinoOTA.cpp)
set(ARDUINO_LIBRARY_AsyncUDP_SRCS libraries/AsyncUDP/src/AsyncUDP.cpp)
set(ARDUINO_LIBRARY_BluetoothSerial_SRCS
libraries/BluetoothSerial/src/BluetoothSerial.cpp
libraries/BluetoothSerial/src/BTAddress.cpp
libraries/BluetoothSerial/src/BTAdvertisedDeviceSet.cpp
libraries/BluetoothSerial/src/BTScanResultsSet.cpp)
set(ARDUINO_LIBRARY_DNSServer_SRCS libraries/DNSServer/src/DNSServer.cpp)
set(ARDUINO_LIBRARY_EEPROM_SRCS libraries/EEPROM/src/EEPROM.cpp)
set(ARDUINO_LIBRARY_ESP_I2S_SRCS libraries/ESP_I2S/src/ESP_I2S.cpp)
set(ARDUINO_LIBRARY_ESP_NOW_SRCS
libraries/ESP_NOW/src/ESP32_NOW.cpp
libraries/ESP_NOW/src/ESP32_NOW_Serial.cpp)
set(ARDUINO_LIBRARY_ESP_SR_SRCS
libraries/ESP_SR/src/ESP_SR.cpp
libraries/ESP_SR/src/esp32-hal-sr.c)
set(ARDUINO_LIBRARY_ESPmDNS_SRCS libraries/ESPmDNS/src/ESPmDNS.cpp)
set(ARDUINO_LIBRARY_Ethernet_SRCS libraries/Ethernet/src/ETH.cpp)
set(ARDUINO_LIBRARY_FFat_SRCS libraries/FFat/src/FFat.cpp)
set(ARDUINO_LIBRARY_FS_SRCS
libraries/FS/src/FS.cpp
libraries/FS/src/vfs_api.cpp)
set(ARDUINO_LIBRARY_HTTPClient_SRCS libraries/HTTPClient/src/HTTPClient.cpp)
set(ARDUINO_LIBRARY_HTTPUpdate_SRCS libraries/HTTPUpdate/src/HTTPUpdate.cpp)
set(ARDUINO_LIBRARY_Insights_SRCS libraries/Insights/src/Insights.cpp)
set(ARDUINO_LIBRARY_LittleFS_SRCS libraries/LittleFS/src/LittleFS.cpp)
set(ARDUINO_LIBRARY_NetBIOS_SRCS libraries/NetBIOS/src/NetBIOS.cpp)
set(ARDUINO_LIBRARY_OpenThread_SRCS
libraries/OpenThread/src/OThread.cpp
libraries/OpenThread/src/OThreadCLI.cpp
libraries/OpenThread/src/OThreadCLI_Util.cpp)
set(ARDUINO_LIBRARY_Matter_SRCS
libraries/Matter/src/MatterEndpoints/MatterGenericSwitch.cpp
libraries/Matter/src/MatterEndpoints/MatterOnOffLight.cpp
libraries/Matter/src/MatterEndpoints/MatterDimmableLight.cpp
libraries/Matter/src/MatterEndpoints/MatterColorTemperatureLight.cpp
libraries/Matter/src/MatterEndpoints/MatterColorLight.cpp
libraries/Matter/src/MatterEndpoints/MatterEnhancedColorLight.cpp
libraries/Matter/src/MatterEndpoints/MatterFan.cpp
libraries/Matter/src/MatterEndpoints/MatterTemperatureSensor.cpp
libraries/Matter/src/MatterEndpoints/MatterHumiditySensor.cpp
libraries/Matter/src/MatterEndpoints/MatterContactSensor.cpp
libraries/Matter/src/MatterEndpoints/MatterPressureSensor.cpp
libraries/Matter/src/MatterEndpoints/MatterOccupancySensor.cpp
libraries/Matter/src/MatterEndpoints/MatterOnOffPlugin.cpp
libraries/Matter/src/MatterEndpoints/MatterThermostat.cpp
libraries/Matter/src/Matter.cpp
libraries/Matter/src/MatterEndPoint.cpp)
set(ARDUINO_LIBRARY_PPP_SRCS
libraries/PPP/src/PPP.cpp
libraries/PPP/src/ppp.c)
set(ARDUINO_LIBRARY_Preferences_SRCS libraries/Preferences/src/Preferences.cpp)
set(ARDUINO_LIBRARY_RainMaker_SRCS
libraries/RainMaker/src/RMaker.cpp
libraries/RainMaker/src/RMakerNode.cpp
libraries/RainMaker/src/RMakerParam.cpp
libraries/RainMaker/src/RMakerDevice.cpp
libraries/RainMaker/src/RMakerType.cpp
libraries/RainMaker/src/RMakerQR.cpp
libraries/RainMaker/src/RMakerUtils.cpp
libraries/RainMaker/src/AppInsights.cpp)
set(ARDUINO_LIBRARY_SD_MMC_SRCS libraries/SD_MMC/src/SD_MMC.cpp)
set(ARDUINO_LIBRARY_SD_SRCS
libraries/SD/src/SD.cpp
libraries/SD/src/sd_diskio.cpp
libraries/SD/src/sd_diskio_crc.c)
set(ARDUINO_LIBRARY_SimpleBLE_SRCS libraries/SimpleBLE/src/SimpleBLE.cpp)
set(ARDUINO_LIBRARY_SPIFFS_SRCS libraries/SPIFFS/src/SPIFFS.cpp)
set(ARDUINO_LIBRARY_SPI_SRCS libraries/SPI/src/SPI.cpp)
set(ARDUINO_LIBRARY_Ticker_SRCS libraries/Ticker/src/Ticker.cpp)
set(ARDUINO_LIBRARY_Update_SRCS
libraries/Update/src/Updater.cpp
libraries/Update/src/HttpsOTAUpdate.cpp)
set(ARDUINO_LIBRARY_USB_SRCS
libraries/USB/src/USBHID.cpp
libraries/USB/src/USBMIDI.cpp
libraries/USB/src/USBHIDMouse.cpp
libraries/USB/src/USBHIDKeyboard.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_da_DK.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_de_DE.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_en_US.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_es_ES.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_fr_FR.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_hu_HU.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_it_IT.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_pt_BR.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_pt_PT.cpp
libraries/USB/src/keyboardLayout/KeyboardLayout_sv_SE.cpp
libraries/USB/src/USBHIDGamepad.cpp
libraries/USB/src/USBHIDConsumerControl.cpp
libraries/USB/src/USBHIDSystemControl.cpp
libraries/USB/src/USBHIDVendor.cpp
libraries/USB/src/USBVendor.cpp)
set(ARDUINO_LIBRARY_WebServer_SRCS
libraries/WebServer/src/WebServer.cpp
libraries/WebServer/src/Parsing.cpp
libraries/WebServer/src/detail/mimetable.cpp
libraries/WebServer/src/middleware/MiddlewareChain.cpp
libraries/WebServer/src/middleware/AuthenticationMiddleware.cpp
libraries/WebServer/src/middleware/CorsMiddleware.cpp
libraries/WebServer/src/middleware/LoggingMiddleware.cpp)
set(ARDUINO_LIBRARY_NetworkClientSecure_SRCS
libraries/NetworkClientSecure/src/ssl_client.cpp
libraries/NetworkClientSecure/src/NetworkClientSecure.cpp)
set(ARDUINO_LIBRARY_Network_SRCS
libraries/Network/src/NetworkInterface.cpp
libraries/Network/src/NetworkEvents.cpp
libraries/Network/src/NetworkManager.cpp
libraries/Network/src/NetworkClient.cpp
libraries/Network/src/NetworkServer.cpp
libraries/Network/src/NetworkUdp.cpp)
set(ARDUINO_LIBRARY_WiFi_SRCS
libraries/WiFi/src/WiFiAP.cpp
libraries/WiFi/src/WiFi.cpp
libraries/WiFi/src/WiFiGeneric.cpp
libraries/WiFi/src/WiFiMulti.cpp
libraries/WiFi/src/WiFiScan.cpp
libraries/WiFi/src/WiFiSTA.cpp
libraries/WiFi/src/STA.cpp
libraries/WiFi/src/AP.cpp)
set(ARDUINO_LIBRARY_WiFiProv_SRCS libraries/WiFiProv/src/WiFiProv.cpp)
set(ARDUINO_LIBRARY_Wire_SRCS libraries/Wire/src/Wire.cpp)
set(ARDUINO_LIBRARY_Zigbee_SRCS
libraries/Zigbee/src/ZigbeeCore.cpp
libraries/Zigbee/src/ZigbeeEP.cpp
libraries/Zigbee/src/ZigbeeHandlers.cpp
libraries/Zigbee/src/ep/ZigbeeColorDimmableLight.cpp
libraries/Zigbee/src/ep/ZigbeeColorDimmerSwitch.cpp
libraries/Zigbee/src/ep/ZigbeeDimmableLight.cpp
libraries/Zigbee/src/ep/ZigbeeLight.cpp
libraries/Zigbee/src/ep/ZigbeeSwitch.cpp
libraries/Zigbee/src/ep/ZigbeeTempSensor.cpp
libraries/Zigbee/src/ep/ZigbeeThermostat.cpp
libraries/Zigbee/src/ep/ZigbeeFlowSensor.cpp
libraries/Zigbee/src/ep/ZigbeePressureSensor.cpp
libraries/Zigbee/src/ep/ZigbeeOccupancySensor.cpp
libraries/Zigbee/src/ep/ZigbeeCarbonDioxideSensor.cpp
libraries/Zigbee/src/ep/ZigbeeContactSwitch.cpp
libraries/Zigbee/src/ep/ZigbeeDoorWindowHandle.cpp
libraries/Zigbee/src/ep/ZigbeeWindowCovering.cpp
libraries/Zigbee/src/ep/ZigbeeVibrationSensor.cpp
libraries/Zigbee/src/ep/ZigbeeAnalog.cpp
libraries/Zigbee/src/ep/ZigbeeRangeExtender.cpp
libraries/Zigbee/src/ep/ZigbeeGateway.cpp
libraries/Zigbee/src/ep/ZigbeeWindSpeedSensor.cpp
libraries/Zigbee/src/ep/ZigbeeIlluminanceSensor.cpp
libraries/Zigbee/src/ep/ZigbeePM25Sensor.cpp
libraries/Zigbee/src/ep/ZigbeeElectricalMeasurement.cpp
libraries/Zigbee/src/ep/ZigbeeBinary.cpp
libraries/Zigbee/src/ep/ZigbeePowerOutlet.cpp
libraries/Zigbee/src/ep/ZigbeeFanControl.cpp
)
set(ARDUINO_LIBRARY_BLE_SRCS
libraries/BLE/src/BLE2901.cpp
libraries/BLE/src/BLE2902.cpp
libraries/BLE/src/BLE2904.cpp
libraries/BLE/src/BLEAddress.cpp
libraries/BLE/src/BLEAdvertisedDevice.cpp
libraries/BLE/src/BLEAdvertising.cpp
libraries/BLE/src/BLEBeacon.cpp
libraries/BLE/src/BLECharacteristic.cpp
libraries/BLE/src/BLECharacteristicMap.cpp
libraries/BLE/src/BLEClient.cpp
libraries/BLE/src/BLEDescriptor.cpp
libraries/BLE/src/BLEDescriptorMap.cpp
libraries/BLE/src/BLEDevice.cpp
libraries/BLE/src/BLEEddystoneTLM.cpp
libraries/BLE/src/BLEEddystoneURL.cpp
libraries/BLE/src/BLEExceptions.cpp
libraries/BLE/src/BLEHIDDevice.cpp
libraries/BLE/src/BLERemoteCharacteristic.cpp
libraries/BLE/src/BLERemoteDescriptor.cpp
libraries/BLE/src/BLERemoteService.cpp
libraries/BLE/src/BLEScan.cpp
libraries/BLE/src/BLESecurity.cpp
libraries/BLE/src/BLEServer.cpp
libraries/BLE/src/BLEService.cpp
libraries/BLE/src/BLEServiceMap.cpp
libraries/BLE/src/BLEUtils.cpp
libraries/BLE/src/BLEUUID.cpp
libraries/BLE/src/BLEValue.cpp
libraries/BLE/src/FreeRTOS.cpp
libraries/BLE/src/GeneralUtils.cpp
)
set(ARDUINO_LIBRARIES_SRCS)
set(ARDUINO_LIBRARIES_REQUIRES)
set(ARDUINO_LIBRARIES_INCLUDEDIRS)
foreach(libname IN LISTS ARDUINO_ALL_LIBRARIES)
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_${libname})
if(ARDUINO_LIBRARY_${libname}_SRCS)
list(APPEND ARDUINO_LIBRARIES_SRCS ${ARDUINO_LIBRARY_${libname}_SRCS})
endif()
if(ARDUINO_LIBRARY_${libname}_REQUIRES)
list(APPEND ARDUINO_LIBRARIES_REQUIRES ${ARDUINO_LIBRARY_${libname}_REQUIRES})
endif()
if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/libraries/${libname}/src)
list(APPEND ARDUINO_LIBRARIES_INCLUDEDIRS libraries/${libname}/src)
endif()
endif()
endforeach()
set(includedirs variants/${CONFIG_ARDUINO_VARIANT}/ cores/esp32/ ${ARDUINO_LIBRARIES_INCLUDEDIRS})
set(srcs ${CORE_SRCS} ${ARDUINO_LIBRARIES_SRCS})
set(priv_includes cores/esp32/libb64)
set(requires spi_flash esp_partition mbedtls wpa_supplicant esp_adc esp_eth http_parser esp_ringbuf esp_driver_gptimer esp_driver_usb_serial_jtag driver esp_http_client esp_https_ota)
set(priv_requires fatfs nvs_flash app_update spiffs bootloader_support bt esp_hid usb esp_psram ${ARDUINO_LIBRARIES_REQUIRES})
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_OpenThread)
#if(CONFIG_SOC_IEEE802154_SUPPORTED) # Does not work!
#if(CONFIG_OPENTHREAD_ENABLED) # Does not work!
if(IDF_TARGET STREQUAL "esp32c6" OR IDF_TARGET STREQUAL "esp32h2" OR IDF_TARGET STREQUAL "esp32c5") # Sadly only this works
list(APPEND requires openthread)
endif()
endif()
if(IDF_TARGET STREQUAL "esp32p4")
list(APPEND requires esp_driver_touch_sens)
endif()
idf_component_register(INCLUDE_DIRS ${includedirs} PRIV_INCLUDE_DIRS ${priv_includes} SRCS ${srcs} REQUIRES ${requires} PRIV_REQUIRES ${priv_requires})
if(NOT CONFIG_FREERTOS_HZ EQUAL 1000 AND NOT "$ENV{ARDUINO_SKIP_TICK_CHECK}")
# See delay() in cores/esp32/esp32-hal-misc.c.
message(FATAL_ERROR "esp32-arduino requires CONFIG_FREERTOS_HZ=1000 "
"(currently ${CONFIG_FREERTOS_HZ})")
endif()
string(TOUPPER ${CONFIG_ARDUINO_VARIANT} idf_target_caps)
string(REPLACE "-" "_" idf_target_for_macro "${idf_target_caps}")
target_compile_options(${COMPONENT_TARGET} PUBLIC
-DARDUINO=10812
-DARDUINO_${idf_target_for_macro}_DEV
-DARDUINO_ARCH_ESP32
-DARDUINO_BOARD="${idf_target_caps}_DEV"
-DARDUINO_VARIANT="${CONFIG_ARDUINO_VARIANT}"
-DESP32=ESP32)
if(CONFIG_AUTOSTART_ARDUINO)
# in autostart mode, arduino-esp32 contains app_main() function and needs to
# reference setup() and loop() in the main component. If we add main
# component to priv_requires then we create a large circular dependency
# (arduino-esp32 -> main -> arduino-esp32) and can get linker errors, so
# instead we add setup() and loop() to the undefined symbols list so the
# linker will always include them.
#
# (As they are C++ symbol, we need to add the C++ mangled names.)
target_link_libraries(${COMPONENT_LIB} INTERFACE "-u _Z5setupv -u _Z4loopv")
endif()
# This function adds a dependency on the given component if the component is included into the build.
function(maybe_add_component component_name)
idf_build_get_property(components BUILD_COMPONENTS)
if (${component_name} IN_LIST components)
idf_component_get_property(lib_name ${component_name} COMPONENT_LIB)
target_link_libraries(${COMPONENT_LIB} PUBLIC ${lib_name})
endif()
endfunction()
if(IDF_TARGET MATCHES "esp32s2|esp32s3|esp32p4" AND CONFIG_TINYUSB_ENABLED)
maybe_add_component(arduino_tinyusb)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_ArduinoOTA)
maybe_add_component(esp_https_ota)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_ESP_SR)
maybe_add_component(espressif__esp_sr)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_Matter)
maybe_add_component(espressif__esp_matter)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_LittleFS)
maybe_add_component(joltwallet__littlefs)
endif()
if(NOT CONFIG_ARDUINO_SELECTIVE_COMPILATION OR CONFIG_ARDUINO_SELECTIVE_WiFiProv)
maybe_add_component(espressif__network_provisioning)
endif()

View file

@ -1,128 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socioeconomic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
vojtech.bartoska@espressif.com.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

2458
Doxyfile.default Normal file

File diff suppressed because it is too large Load diff

View file

@ -1,432 +0,0 @@
menu "Arduino Configuration"
config ARDUINO_VARIANT
string "Arduino target variant (board)"
default IDF_TARGET
help
The name of a target variant (e.g., a specific board) in the variants/
folder, e.g. "heltec_wifi_lora_32_V2". The name is case sensitive.
Specifying a variant name different from the target enables additional
customization, for example the definition of GPIO pins.
config ENABLE_ARDUINO_DEPENDS
bool
select LWIP_SO_RCVBUF
select ETHERNET
select WIFI_ENABLED
select ESP32_PHY_CALIBRATION_AND_DATA_STORAGE if IDF_TARGET_ESP32
select MEMMAP_SMP
default "y"
config AUTOSTART_ARDUINO
bool "Autostart Arduino setup and loop on boot"
default "n"
help
Enabling this option will implement app_main and start Arduino.
All you need to implement in your main.cpp is setup() and loop()
and include Arduino.h
If disabled, you can call initArduino() to run any preparations
required by the framework
choice ARDUINO_RUNNING_CORE
bool "Core on which Arduino's setup() and loop() are running"
default ARDUINO_RUN_CORE0 if FREERTOS_UNICORE
default ARDUINO_RUN_CORE1 if !FREERTOS_UNICORE
help
Select on which core Arduino's setup() and loop() functions run
config ARDUINO_RUN_CORE0
bool "CORE 0"
config ARDUINO_RUN_CORE1
bool "CORE 1"
depends on !FREERTOS_UNICORE
config ARDUINO_RUN_NO_AFFINITY
bool "BOTH"
depends on !FREERTOS_UNICORE
endchoice
config ARDUINO_RUNNING_CORE
int
default 0 if ARDUINO_RUN_CORE0
default 1 if ARDUINO_RUN_CORE1
default -1 if ARDUINO_RUN_NO_AFFINITY
config ARDUINO_LOOP_STACK_SIZE
int "Loop thread stack size"
default 8192
help
Amount of stack available for the Arduino task.
choice ARDUINO_EVENT_RUNNING_CORE
bool "Core on which Arduino's event handler is running"
default ARDUINO_EVENT_RUN_CORE0 if FREERTOS_UNICORE
default ARDUINO_EVENT_RUN_CORE1 if !FREERTOS_UNICORE
help
Select on which core Arduino's WiFi.onEvent() run
config ARDUINO_EVENT_RUN_CORE0
bool "CORE 0"
config ARDUINO_EVENT_RUN_CORE1
bool "CORE 1"
depends on !FREERTOS_UNICORE
config ARDUINO_EVENT_RUN_NO_AFFINITY
bool "BOTH"
depends on !FREERTOS_UNICORE
endchoice
config ARDUINO_EVENT_RUNNING_CORE
int
default 0 if ARDUINO_EVENT_RUN_CORE0
default 1 if ARDUINO_EVENT_RUN_CORE1
default -1 if ARDUINO_EVENT_RUN_NO_AFFINITY
choice ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
bool "Core on which Arduino's Serial Event task is running"
default ARDUINO_SERIAL_EVENT_RUN_CORE0 if FREERTOS_UNICORE
default ARDUINO_SERIAL_EVENT_RUN_NO_AFFINITY if !FREERTOS_UNICORE
help
Select on which core Arduino's Serial Event task run
config ARDUINO_SERIAL_EVENT_RUN_CORE0
bool "CORE 0"
config ARDUINO_SERIAL_EVENT_RUN_CORE1
bool "CORE 1"
depends on !FREERTOS_UNICORE
config ARDUINO_SERIAL_EVENT_RUN_NO_AFFINITY
bool "BOTH"
depends on !FREERTOS_UNICORE
endchoice
config ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
int
default 0 if ARDUINO_SERIAL_EVENT_RUN_CORE0
default 1 if ARDUINO_SERIAL_EVENT_RUN_CORE1
default -1 if ARDUINO_SERIAL_EVENT_RUN_NO_AFFINITY
config ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE
int "Serial Event task stack size"
default 2048
help
Amount of stack available for the Serial Event task.
config ARDUINO_SERIAL_EVENT_TASK_PRIORITY
int "Priority of the Serial Event task"
default 24
help
Select at what priority you want the Serial Event task to run.
choice ARDUINO_UDP_RUNNING_CORE
bool "Core on which Arduino's UDP is running"
default ARDUINO_UDP_RUN_CORE0
help
Select on which core Arduino's UDP run
config ARDUINO_UDP_RUN_CORE0
bool "CORE 0"
config ARDUINO_UDP_RUN_CORE1
bool "CORE 1"
depends on !FREERTOS_UNICORE
config ARDUINO_UDP_RUN_NO_AFFINITY
bool "BOTH"
depends on !FREERTOS_UNICORE
endchoice
config ARDUINO_UDP_RUNNING_CORE
int
default 0 if ARDUINO_UDP_RUN_CORE0
default 1 if ARDUINO_UDP_RUN_CORE1
default -1 if ARDUINO_UDP_RUN_NO_AFFINITY
config ARDUINO_UDP_TASK_PRIORITY
int "Priority of the UDP task"
default 3
help
Select at what priority you want the UDP task to run.
config ARDUINO_ISR_IRAM
bool "Run interrupts in IRAM"
default "n"
help
Enabling this option will Attach all interrupts with the IRAm flag.
It will also make some HAL function, like, digitalRead/Write and more
be loaded into IRAM for access inside ISRs.
Beware that this is a very dangerous setting. Enable it only if you
are fully aware of the consequences.
config DISABLE_HAL_LOCKS
bool "Disable mutex locks for HAL"
default "n"
help
Enabling this option will run all hardware abstraction without locks.
While communication with external hardware will be faster, you need to
make sure that there is no option to use the same bus from another thread
or interrupt at the same time. Option is best used with Arduino enabled
and code implemented only in setup/loop and Arduino callbacks
menu "Debug Log Configuration"
choice ARDUHAL_LOG_DEFAULT_LEVEL
bool "Default log level"
default ARDUHAL_LOG_DEFAULT_LEVEL_ERROR
help
Specify how much output to see in logs by default.
config ARDUHAL_LOG_DEFAULT_LEVEL_NONE
bool "No output"
config ARDUHAL_LOG_DEFAULT_LEVEL_ERROR
bool "Error"
config ARDUHAL_LOG_DEFAULT_LEVEL_WARN
bool "Warning"
config ARDUHAL_LOG_DEFAULT_LEVEL_INFO
bool "Info"
config ARDUHAL_LOG_DEFAULT_LEVEL_DEBUG
bool "Debug"
config ARDUHAL_LOG_DEFAULT_LEVEL_VERBOSE
bool "Verbose"
endchoice
config ARDUHAL_LOG_DEFAULT_LEVEL
int
default 0 if ARDUHAL_LOG_DEFAULT_LEVEL_NONE
default 1 if ARDUHAL_LOG_DEFAULT_LEVEL_ERROR
default 2 if ARDUHAL_LOG_DEFAULT_LEVEL_WARN
default 3 if ARDUHAL_LOG_DEFAULT_LEVEL_INFO
default 4 if ARDUHAL_LOG_DEFAULT_LEVEL_DEBUG
default 5 if ARDUHAL_LOG_DEFAULT_LEVEL_VERBOSE
config ARDUHAL_LOG_COLORS
bool "Use ANSI terminal colors in log output"
default "n"
help
Enable ANSI terminal color codes in bootloader output.
In order to view these, your terminal program must support ANSI color codes.
config ARDUHAL_ESP_LOG
bool "Forward ESP_LOGx to Arduino log output"
default "n"
help
This option will redefine the ESP_LOGx macros to Arduino's log_x macros.
To enable for your application, add the following after your includes:
#ifdef ARDUINO_ARCH_ESP32
#include "esp32-hal-log.h"
#endif
endmenu
choice ARDUHAL_PARTITION_SCHEME
bool "Used partition scheme"
default ARDUHAL_PARTITION_SCHEME_DEFAULT
help
Specify which partition scheme to be used.
config ARDUHAL_PARTITION_SCHEME_DEFAULT
bool "Default"
config ARDUHAL_PARTITION_SCHEME_MINIMAL
bool "Minimal (for 2MB FLASH)"
config ARDUHAL_PARTITION_SCHEME_NO_OTA
bool "No OTA (for large apps)"
config ARDUHAL_PARTITION_SCHEME_HUGE_APP
bool "Huge App (for very large apps)"
config ARDUHAL_PARTITION_SCHEME_MIN_SPIFFS
bool "Minimal SPIFFS (for large apps with OTA)"
endchoice
config ARDUHAL_PARTITION_SCHEME
string
default "default" if ARDUHAL_PARTITION_SCHEME_DEFAULT
default "minimal" if ARDUHAL_PARTITION_SCHEME_MINIMAL
default "no_ota" if ARDUHAL_PARTITION_SCHEME_NO_OTA
default "huge_app" if ARDUHAL_PARTITION_SCHEME_HUGE_APP
default "min_spiffs" if ARDUHAL_PARTITION_SCHEME_MIN_SPIFFS
config AUTOCONNECT_WIFI
bool "Autoconnect WiFi on boot"
default "n"
depends on AUTOSTART_ARDUINO
select ARDUINO_SELECTIVE_WiFi
help
If enabled, WiFi will connect to the last used SSID (if station was enabled),
else connection will be started only after calling WiFi.begin(ssid, password)
config ARDUINO_SELECTIVE_COMPILATION
bool "Include only specific Arduino libraries"
default n
config ARDUINO_SELECTIVE_SPI
bool "Enable SPI"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Wire
bool "Enable Wire"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_ESP_SR
bool "Enable ESP-SR"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_EEPROM
bool "Enable EEPROM"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Preferences
bool "Enable Preferences"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Ticker
bool "Enable Ticker"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Update
bool "Enable Update"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Zigbee
bool "Enable Zigbee"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_FS
bool "Enable FS"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_SD
bool "Enable SD"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_SD_MMC
bool "Enable SD_MMC"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_SPIFFS
bool "Enable SPIFFS"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_FFat
bool "Enable FFat"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_LittleFS
bool "Enable LittleFS"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_FS
default y
config ARDUINO_SELECTIVE_Network
bool "Enable Networking"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Ethernet
bool "Enable Ethernet"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_PPP
bool "Enable PPP"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_ArduinoOTA
bool "Enable ArduinoOTA"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
select ARDUINO_SELECTIVE_ESPmDNS
default y
config ARDUINO_SELECTIVE_AsyncUDP
bool "Enable AsyncUDP"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_DNSServer
bool "Enable DNSServer"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_ESPmDNS
bool "Enable ESPmDNS"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_HTTPClient
bool "Enable HTTPClient"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
select ARDUINO_SELECTIVE_NetworkClientSecure
default y
config ARDUINO_SELECTIVE_Matter
bool "Enable Matter"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_NetBIOS
bool "Enable NetBIOS"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_WebServer
bool "Enable WebServer"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
select ARDUINO_SELECTIVE_FS
config ARDUINO_SELECTIVE_WiFi
bool "Enable WiFi"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_NetworkClientSecure
bool "Enable NetworkClientSecure"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network
default y
config ARDUINO_SELECTIVE_WiFiProv
bool "Enable WiFiProv"
depends on ARDUINO_SELECTIVE_COMPILATION && ARDUINO_SELECTIVE_Network && ARDUINO_SELECTIVE_WiFi
default y
config ARDUINO_SELECTIVE_BLE
bool "Enable BLE"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_BluetoothSerial
bool "Enable BluetoothSerial"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_SimpleBLE
bool "Enable SimpleBLE"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_RainMaker
bool "Enable RainMaker"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_OpenThread
bool "Enable OpenThread"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
config ARDUINO_SELECTIVE_Insights
bool "Enable Insights"
depends on ARDUINO_SELECTIVE_COMPILATION
default y
endmenu

22
LICENSE Normal file
View file

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2015 Adafruit Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,503 +0,0 @@
### GNU LESSER GENERAL PUBLIC LICENSE
Version 2.1, February 1999
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
[This is the first released version of the Lesser GPL. It also counts
as the successor of the GNU Library Public License, version 2, hence
the version number 2.1.]
### Preamble
The licenses for most software are designed to take away your freedom
to share and change it. By contrast, the GNU General Public Licenses
are intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users.
This license, the Lesser General Public License, applies to some
specially designated software packages--typically libraries--of the
Free Software Foundation and other authors who decide to use it. You
can use it too, but we suggest you first think carefully about whether
this license or the ordinary General Public License is the better
strategy to use in any particular case, based on the explanations
below.
When we speak of free software, we are referring to freedom of use,
not price. Our General Public Licenses are designed to make sure that
you have the freedom to distribute copies of free software (and charge
for this service if you wish); that you receive source code or can get
it if you want it; that you can change the software and use pieces of
it in new free programs; and that you are informed that you can do
these things.
To protect your rights, we need to make restrictions that forbid
distributors to deny you these rights or to ask you to surrender these
rights. These restrictions translate to certain responsibilities for
you if you distribute copies of the library or if you modify it.
For example, if you distribute copies of the library, whether gratis
or for a fee, you must give the recipients all the rights that we gave
you. You must make sure that they, too, receive or can get the source
code. If you link other code with the library, you must provide
complete object files to the recipients, so that they can relink them
with the library after making changes to the library and recompiling
it. And you must show them these terms so they know their rights.
We protect your rights with a two-step method: (1) we copyright the
library, and (2) we offer you this license, which gives you legal
permission to copy, distribute and/or modify the library.
To protect each distributor, we want to make it very clear that there
is no warranty for the free library. Also, if the library is modified
by someone else and passed on, the recipients should know that what
they have is not the original version, so that the original author's
reputation will not be affected by problems that might be introduced
by others.
Finally, software patents pose a constant threat to the existence of
any free program. We wish to make sure that a company cannot
effectively restrict the users of a free program by obtaining a
restrictive license from a patent holder. Therefore, we insist that
any patent license obtained for a version of the library must be
consistent with the full freedom of use specified in this license.
Most GNU software, including some libraries, is covered by the
ordinary GNU General Public License. This license, the GNU Lesser
General Public License, applies to certain designated libraries, and
is quite different from the ordinary General Public License. We use
this license for certain libraries in order to permit linking those
libraries into non-free programs.
When a program is linked with a library, whether statically or using a
shared library, the combination of the two is legally speaking a
combined work, a derivative of the original library. The ordinary
General Public License therefore permits such linking only if the
entire combination fits its criteria of freedom. The Lesser General
Public License permits more lax criteria for linking other code with
the library.
We call this license the "Lesser" General Public License because it
does Less to protect the user's freedom than the ordinary General
Public License. It also provides other free software developers Less
of an advantage over competing non-free programs. These disadvantages
are the reason we use the ordinary General Public License for many
libraries. However, the Lesser license provides advantages in certain
special circumstances.
For example, on rare occasions, there may be a special need to
encourage the widest possible use of a certain library, so that it
becomes a de-facto standard. To achieve this, non-free programs must
be allowed to use the library. A more frequent case is that a free
library does the same job as widely used non-free libraries. In this
case, there is little to gain by limiting the free library to free
software only, so we use the Lesser General Public License.
In other cases, permission to use a particular library in non-free
programs enables a greater number of people to use a large body of
free software. For example, permission to use the GNU C Library in
non-free programs enables many more people to use the whole GNU
operating system, as well as its variant, the GNU/Linux operating
system.
Although the Lesser General Public License is Less protective of the
users' freedom, it does ensure that the user of a program that is
linked with the Library has the freedom and the wherewithal to run
that program using a modified version of the Library.
The precise terms and conditions for copying, distribution and
modification follow. Pay close attention to the difference between a
"work based on the library" and a "work that uses the library". The
former contains code derived from the library, whereas the latter must
be combined with the library in order to run.
### TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
**0.** This License Agreement applies to any software library or other
program which contains a notice placed by the copyright holder or
other authorized party saying it may be distributed under the terms of
this Lesser General Public License (also called "this License"). Each
licensee is addressed as "you".
A "library" means a collection of software functions and/or data
prepared so as to be conveniently linked with application programs
(which use some of those functions and data) to form executables.
The "Library", below, refers to any such software library or work
which has been distributed under these terms. A "work based on the
Library" means either the Library or any derivative work under
copyright law: that is to say, a work containing the Library or a
portion of it, either verbatim or with modifications and/or translated
straightforwardly into another language. (Hereinafter, translation is
included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for
making modifications to it. For a library, complete source code means
all the source code for all modules it contains, plus any associated
interface definition files, plus the scripts used to control
compilation and installation of the library.
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running a program using the Library is not restricted, and output from
such a program is covered only if its contents constitute a work based
on the Library (independent of the use of the Library in a tool for
writing it). Whether that is true depends on what the Library does and
what the program that uses the Library does.
**1.** You may copy and distribute verbatim copies of the Library's
complete source code as you receive it, in any medium, provided that
you conspicuously and appropriately publish on each copy an
appropriate copyright notice and disclaimer of warranty; keep intact
all the notices that refer to this License and to the absence of any
warranty; and distribute a copy of this License along with the
Library.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a
fee.
**2.** You may modify your copy or copies of the Library or any
portion of it, thus forming a work based on the Library, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
- **a)** The modified work must itself be a software library.
- **b)** You must cause the files modified to carry prominent
notices stating that you changed the files and the date of
any change.
- **c)** You must cause the whole of the work to be licensed at no
charge to all third parties under the terms of this License.
- **d)** If a facility in the modified Library refers to a function
or a table of data to be supplied by an application program that
uses the facility, other than as an argument passed when the
facility is invoked, then you must make a good faith effort to
ensure that, in the event an application does not supply such
function or table, the facility still operates, and performs
whatever part of its purpose remains meaningful.
(For example, a function in a library to compute square roots has
a purpose that is entirely well-defined independent of
the application. Therefore, Subsection 2d requires that any
application-supplied function or table used by this function must
be optional: if the application does not supply it, the square
root function must still compute square roots.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Library,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Library, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote
it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Library.
In addition, mere aggregation of another work not based on the Library
with the Library (or with a work based on the Library) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
**3.** You may opt to apply the terms of the ordinary GNU General
Public License instead of this License to a given copy of the Library.
To do this, you must alter all the notices that refer to this License,
so that they refer to the ordinary GNU General Public License, version
2, instead of to this License. (If a newer version than version 2 of
the ordinary GNU General Public License has appeared, then you can
specify that version instead if you wish.) Do not make any other
change in these notices.
Once this change is made in a given copy, it is irreversible for that
copy, so the ordinary GNU General Public License applies to all
subsequent copies and derivative works made from that copy.
This option is useful when you wish to copy part of the code of the
Library into a program that is not a library.
**4.** You may copy and distribute the Library (or a portion or
derivative of it, under Section 2) in object code or executable form
under the terms of Sections 1 and 2 above provided that you accompany
it with the complete corresponding machine-readable source code, which
must be distributed under the terms of Sections 1 and 2 above on a
medium customarily used for software interchange.
If distribution of object code is made by offering access to copy from
a designated place, then offering equivalent access to copy the source
code from the same place satisfies the requirement to distribute the
source code, even though third parties are not compelled to copy the
source along with the object code.
**5.** A program that contains no derivative of any portion of the
Library, but is designed to work with the Library by being compiled or
linked with it, is called a "work that uses the Library". Such a work,
in isolation, is not a derivative work of the Library, and therefore
falls outside the scope of this License.
However, linking a "work that uses the Library" with the Library
creates an executable that is a derivative of the Library (because it
contains portions of the Library), rather than a "work that uses the
library". The executable is therefore covered by this License. Section
6 states terms for distribution of such executables.
When a "work that uses the Library" uses material from a header file
that is part of the Library, the object code for the work may be a
derivative work of the Library even though the source code is not.
Whether this is true is especially significant if the work can be
linked without the Library, or if the work is itself a library. The
threshold for this to be true is not precisely defined by law.
If such an object file uses only numerical parameters, data structure
layouts and accessors, and small macros and small inline functions
(ten lines or less in length), then the use of the object file is
unrestricted, regardless of whether it is legally a derivative work.
(Executables containing this object code plus portions of the Library
will still fall under Section 6.)
Otherwise, if the work is a derivative of the Library, you may
distribute the object code for the work under the terms of Section 6.
Any executables containing that work also fall under Section 6,
whether or not they are linked directly with the Library itself.
**6.** As an exception to the Sections above, you may also combine or
link a "work that uses the Library" with the Library to produce a work
containing portions of the Library, and distribute that work under
terms of your choice, provided that the terms permit modification of
the work for the customer's own use and reverse engineering for
debugging such modifications.
You must give prominent notice with each copy of the work that the
Library is used in it and that the Library and its use are covered by
this License. You must supply a copy of this License. If the work
during execution displays copyright notices, you must include the
copyright notice for the Library among them, as well as a reference
directing the user to the copy of this License. Also, you must do one
of these things:
- **a)** Accompany the work with the complete corresponding
machine-readable source code for the Library including whatever
changes were used in the work (which must be distributed under
Sections 1 and 2 above); and, if the work is an executable linked
with the Library, with the complete machine-readable "work that
uses the Library", as object code and/or source code, so that the
user can modify the Library and then relink to produce a modified
executable containing the modified Library. (It is understood that
the user who changes the contents of definitions files in the
Library will not necessarily be able to recompile the application
to use the modified definitions.)
- **b)** Use a suitable shared library mechanism for linking with
the Library. A suitable mechanism is one that (1) uses at run time
a copy of the library already present on the user's computer
system, rather than copying library functions into the executable,
and (2) will operate properly with a modified version of the
library, if the user installs one, as long as the modified version
is interface-compatible with the version that the work was
made with.
- **c)** Accompany the work with a written offer, valid for at least
three years, to give the same user the materials specified in
Subsection 6a, above, for a charge no more than the cost of
performing this distribution.
- **d)** If distribution of the work is made by offering access to
copy from a designated place, offer equivalent access to copy the
above specified materials from the same place.
- **e)** Verify that the user has already received a copy of these
materials or that you have already sent this user a copy.
For an executable, the required form of the "work that uses the
Library" must include any data and utility programs needed for
reproducing the executable from it. However, as a special exception,
the materials to be distributed need not include anything that is
normally distributed (in either source or binary form) with the major
components (compiler, kernel, and so on) of the operating system on
which the executable runs, unless that component itself accompanies
the executable.
It may happen that this requirement contradicts the license
restrictions of other proprietary libraries that do not normally
accompany the operating system. Such a contradiction means you cannot
use both them and the Library together in an executable that you
distribute.
**7.** You may place library facilities that are a work based on the
Library side-by-side in a single library together with other library
facilities not covered by this License, and distribute such a combined
library, provided that the separate distribution of the work based on
the Library and of the other library facilities is otherwise
permitted, and provided that you do these two things:
- **a)** Accompany the combined library with a copy of the same work
based on the Library, uncombined with any other
library facilities. This must be distributed under the terms of
the Sections above.
- **b)** Give prominent notice with the combined library of the fact
that part of it is a work based on the Library, and explaining
where to find the accompanying uncombined form of the same work.
**8.** You may not copy, modify, sublicense, link with, or distribute
the Library except as expressly provided under this License. Any
attempt otherwise to copy, modify, sublicense, link with, or
distribute the Library is void, and will automatically terminate your
rights under this License. However, parties who have received copies,
or rights, from you under this License will not have their licenses
terminated so long as such parties remain in full compliance.
**9.** You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Library or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Library (or any work based on the
Library), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Library or works based on it.
**10.** Each time you redistribute the Library (or any work based on
the Library), the recipient automatically receives a license from the
original licensor to copy, distribute, link with or modify the Library
subject to these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties with
this License.
**11.** If, as a consequence of a court judgment or allegation of
patent infringement or for any other reason (not limited to patent
issues), conditions are imposed on you (whether by court order,
agreement or otherwise) that contradict the conditions of this
License, they do not excuse you from the conditions of this License.
If you cannot distribute so as to satisfy simultaneously your
obligations under this License and any other pertinent obligations,
then as a consequence you may not distribute the Library at all. For
example, if a patent license would not permit royalty-free
redistribution of the Library by all those who receive copies directly
or indirectly through you, then the only way you could satisfy both it
and this License would be to refrain entirely from distribution of the
Library.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply, and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
**12.** If the distribution and/or use of the Library is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Library under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
**13.** The Free Software Foundation may publish revised and/or new
versions of the Lesser General Public License from time to time. Such
new versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Library
specifies a version number of this License which applies to it and
"any later version", you have the option of following the terms and
conditions either of that version or of any later version published by
the Free Software Foundation. If the Library does not specify a
license version number, you may choose any version ever published by
the Free Software Foundation.
**14.** If you wish to incorporate parts of the Library into other
free programs whose distribution conditions are incompatible with
these, write to the author to ask for permission. For software which
is copyrighted by the Free Software Foundation, write to the Free
Software Foundation; we sometimes make exceptions for this. Our
decision will be guided by the two goals of preserving the free status
of all derivatives of our free software and of promoting the sharing
and reuse of software generally.
**NO WARRANTY**
**15.** BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
**16.** IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGES.
### END OF TERMS AND CONDITIONS
### How to Apply These Terms to Your New Libraries
If you develop a new library, and you want it to be of the greatest
possible use to the public, we recommend making it free software that
everyone can redistribute and change. You can do so by permitting
redistribution under these terms (or, alternatively, under the terms
of the ordinary General Public License).
To apply these terms, attach the following notices to the library. It
is safest to attach them to the start of each source file to most
effectively convey the exclusion of warranty; and each file should
have at least the "copyright" line and a pointer to where the full
notice is found.
one line to give the library's name and an idea of what it does.
Copyright (C) year name of author
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Also add information on how to contact you by electronic and paper
mail.
You should also get your employer (if you work as a programmer) or
your school, if any, to sign a "copyright disclaimer" for the library,
if necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in
the library `Frob' (a library for tweaking knobs) written
by James Random Hacker.
signature of Ty Coon, 1 April 1990
Ty Coon, President of Vice
That's all there is to it!

131
README.md
View file

@ -1,105 +1,78 @@
# Arduino core for the ESP32, ESP32-C3, ESP32-C6, ESP32-H2, ESP32-P4, ESP32-S2 and ESP32-S3.
# Arduino CI Scripts
[![Build Status](https://img.shields.io/github/actions/workflow/status/espressif/arduino-esp32/push.yml?branch=master&event=push&label=Compilation%20Tests)](https://github.com/espressif/arduino-esp32/actions/workflows/push.yml?query=branch%3Amaster+event%3Apush)
[![Verbose Build Status](https://img.shields.io/github/actions/workflow/status/espressif/arduino-esp32/push.yml?branch=master&event=schedule&label=Compilation%20Tests%20(Verbose))](https://github.com/espressif/arduino-esp32/actions/workflows/push.yml?query=branch%3Amaster+event%3Aschedule)
[![External Libraries Test](https://img.shields.io/github/actions/workflow/status/espressif/arduino-esp32/lib.yml?branch=master&event=schedule&label=External%20Libraries%20Test)](https://github.com/espressif/arduino-esp32/blob/gh-pages/LIBRARIES_TEST.md)
[![Runtime Tests](https://github.com/espressif/arduino-esp32/blob/gh-pages/runtime-tests-results/badge.svg)](https://github.com/espressif/arduino-esp32/blob/gh-pages/runtime-tests-results/RUNTIME_TESTS_REPORT.md)
This repo contains various scripts and tools related to running continuous integration (CI) checks on Arduino Library Repos. The operations include:
### Need help or have a question? Join the chat at [Discord](https://discord.gg/8xY6e9crwv) or [open a new Discussion](https://github.com/espressif/arduino-esp32/discussions)
* checking formatting using [clang-format](https://clang.llvm.org/docs/ClangFormat.html),
* generating documentation from source comments using [Doxygen](https://www.doxygen.nl/), and
* building each example in the library for selected targets.
[![Discord invite](https://img.shields.io/discord/1327272229427216425?logo=discord&logoColor=white&logoSize=auto&label=Discord)](https://discord.gg/8xY6e9crwv)
There is an associated guide available here:
https://learn.adafruit.com/the-well-automated-arduino-library/
## Contents
## Adding GitHub Actions to Repo
- [Development Status](#development-status)
- [Development Planning](#development-planning)
- [Documentation](#documentation)
- [Supported Chips](#supported-chips)
- [Decoding exceptions](#decoding-exceptions)
- [Issue/Bug report template](#issuebug-report-template)
- [Contributing](#contributing)
To run these continuous integration checks on each push, pull-request or [repository dispatch](https://docs.github.com/en/rest/repos/repos?apiVersion=2022-11-28#create-a-repository-dispatch-event) using [GitHub actions](https://github.com/features/actions):
### Development Status
* Create a folder named `.github/worflows` in the root of the repo.
* Copy `example_actions.yml` into the above directory and rename it `githubci.yml`.
* Edit `githubci.yml` and change `PRETTYNAME` to the library repo name. Optionally, delete or comment out steps (using the `#` character), you don't want to include.
* Here's an example: [Adafruit_BME280_Library](https://github.com/adafruit/Adafruit_BME280_Library/blob/master/.github/workflows/githubci.yml)
#### Latest Stable Release
## Controlling Test Behavior
[![Release Version](https://img.shields.io/github/release/espressif/arduino-esp32.svg)](https://github.com/espressif/arduino-esp32/releases/latest/)
[![Release Date](https://img.shields.io/github/release-date/espressif/arduino-esp32.svg)](https://github.com/espressif/arduino-esp32/releases/latest/)
[![Downloads](https://img.shields.io/github/downloads/espressif/arduino-esp32/latest/total.svg)](https://github.com/espressif/arduino-esp32/releases/latest/)
The `build_platform.py` script is used to test each `.ino` example in the repo for selected build platforms. The [`ALL_PLATFORMS`](ci-arduino/blob/master/build_platform.py#L54) dictionary contains a listing of all available platforms and selected platform groups. By default, `main_platforms` is used. To select a specific platform or group, replace `main_platforms` in [`githubci.yml`](`example_actions.yml`) with the group or platform name.
#### Latest Development Release
Additionally, [UF2 files](https://github.com/microsoft/uf2) of the compiled sketches can be generated for supported platforms.
[![Release Version](https://img.shields.io/github/release/espressif/arduino-esp32/all.svg)](https://github.com/espressif/arduino-esp32/releases/)
[![Release Date](https://img.shields.io/github/release-date-pre/espressif/arduino-esp32.svg)](https://github.com/espressif/arduino-esp32/releases/)
[![Downloads](https://img.shields.io/github/downloads-pre/espressif/arduino-esp32/latest/total.svg)](https://github.com/espressif/arduino-esp32/releases/)
### Fine tuning test selection
### Development Planning
The script behavior can be controlled using special filenames:
Our Development is fully tracked on this public **[Roadmap 🎉](https://github.com/orgs/espressif/projects/3)**
* `.PLATFORM_ID.test.skip` - Skip the specified platform. All others are tested.
* `.PLATFORM_ID.test.only` - Test the specified platform. All others are skipped.
* `.PLATFORM_ID.generate` - Generate UF2 of sketch for specified platform (if supported).
For even more information you can join our **[Monthly Community Meetings 🔔](https://github.com/espressif/arduino-esp32/discussions/categories/monthly-community-meetings).**
These are just empty files placed in an example folder. Replace `PLATFORM_ID` in the name with the key from [`ALL_PLATFORMS`](ci-arduino/blob/master/build_platform.py#L54). `metro_m0` from the following line in `build_platform.py`, for example:
### Documentation
```python
"metro_m0" : ["adafruit:samd:adafruit_metro_m0", "0x68ed2b88", None],
```
You can use the [Arduino-ESP32 Online Documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/) to get all information about this project.
You can use several `.PLATFORM_ID.test.skip` or `.PLATFORM_ID.test.only` to exclude or include multiple platforms. For example:
---
* To **skip** testing on ESP8266, add a file named `.esp8266.test.skip`
* To test **only** the Arduino UNO, add a file named `.uno.test.only`
* To skip all and test **nothing**, add a file named `.none.test.only`
* To generate UF2s for PyPortal, add a file named `.pyportal.generate`
**Migration guide from version 2.x to 3.x is available [here](https://docs.espressif.com/projects/arduino-esp32/en/latest/migration_guides/2.x_to_3.0.html).**
### Dependencies
---
Any library dependencies included in the [`library.properties`](https://arduino.github.io/arduino-cli/0.19/library-specification/#libraryproperties-file-format) are automatically installed before the tests are started. To install additional dependencies (e.g., those required for some examples but not the library itself) using [`arduino-cli`](https://arduino.github.io/arduino-cli/0.19/commands/arduino-cli_lib_install/), you could add additional steps to the `githubci.yml` file. For example:
**APIs compatibility with ESP8266 and Arduino-CORE (Arduino.cc) is explained [here](https://docs.espressif.com/projects/arduino-esp32/en/latest/libraries.html#apis).**
```yaml
- name: Set configuration
run: arduino-cli config set library.enable_unsafe_install true
---
- name: Install test dependencies
run: arduino-cli lib install --git-url https://github.com/arduino-libraries/Servo --git-url https://github.com/arduino-libraries/Ethernet
```
* [Getting Started](https://docs.espressif.com/projects/arduino-esp32/en/latest/getting_started.html)
* [Installing (Windows, Linux and macOS)](https://docs.espressif.com/projects/arduino-esp32/en/latest/installing.html)
* [Libraries](https://docs.espressif.com/projects/arduino-esp32/en/latest/libraries.html)
* [Arduino as an ESP-IDF component](https://docs.espressif.com/projects/arduino-esp32/en/latest/esp-idf_component.html)
* [FAQ](https://docs.espressif.com/projects/arduino-esp32/en/latest/faq.html)
* [Troubleshooting](https://docs.espressif.com/projects/arduino-esp32/en/latest/troubleshooting.html)
Note: you'll only need to enable the [`enable_unsafe_install`](https://arduino.github.io/arduino-cli/0.32/configuration/#configuration-keys) option if you want to identify libraries using urls. This isn't necessary when using the library name.
### Supported Chips
## Formatting Check with Clang
Here are the ESP32 series supported by the Arduino-ESP32 project:
The `run-clang-format.py` script is used to run [clang-format](https://clang.llvm.org/docs/ClangFormat.html) and check file formatting.
See [the guide](https://learn.adafruit.com/the-well-automated-arduino-library/formatting-with-clang-format) for details on installing `clang-format` to run formatting locally.
Even a single extra white space can cause the CI to fail on formatting.
You can typically just let clang do its thing and edit files in place using:
| **SoC** | **Stable** | **Development** | **Datasheet** |
|----------|:----------:|:---------------:|:-------------------------------------------------------------------------------------------------:|
| ESP32 | Yes | Yes | [ESP32](https://www.espressif.com/sites/default/files/documentation/esp32_datasheet_en.pdf) |
| ESP32-C3 | Yes | Yes | [ESP32-C3](https://www.espressif.com/sites/default/files/documentation/esp32-c3_datasheet_en.pdf) |
| ESP32-C6 | Yes | Yes | [ESP32-C6](https://www.espressif.com/sites/default/files/documentation/esp32-c6_datasheet_en.pdf) |
| ESP32-H2 | Yes | Yes | [ESP32-H2](https://www.espressif.com/sites/default/files/documentation/esp32-h2_datasheet_en.pdf) |
| ESP32-P4 | Yes | Yes | [ESP32-P4](https://www.espressif.com/sites/default/files/documentation/esp32-p4_datasheet_en.pdf) |
| ESP32-S2 | Yes | Yes | [ESP32-S2](https://www.espressif.com/sites/default/files/documentation/esp32-s2_datasheet_en.pdf) |
| ESP32-S3 | Yes | Yes | [ESP32-S3](https://www.espressif.com/sites/default/files/documentation/esp32-s3_datasheet_en.pdf) |
```
clang-format -i File_To_Format.cpp
```
> [!NOTE]
> ESP32-C2 is also supported by Arduino-ESP32 but requires using Arduino as an ESP-IDF component or rebuilding the static libraries.
> For more information, see the [Arduino as an ESP-IDF component documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/esp-idf_component.html) or the
> [Lib Builder documentation](https://docs.espressif.com/projects/arduino-esp32/en/latest/lib_builder.html), respectively.
## Documentation with Doxygen
For more details visit the [supported chips](https://docs.espressif.com/projects/arduino-esp32/en/latest/getting_started.html#supported-soc-s) documentation page.
### Decoding exceptions
You can use [EspExceptionDecoder](https://github.com/me-no-dev/EspExceptionDecoder) to get meaningful call trace.
### Issue/Bug report template
Before reporting an issue, make sure you've searched for similar one that was already created. Also make sure to go through all the issues labeled as [Type: For reference](https://github.com/espressif/arduino-esp32/issues?q=is%3Aissue+label%3A%22Type%3A+For+reference%22+).
Finally, if you are sure no one else had the issue, follow the **Issue template** or **Feature request template** while reporting any [new Issue](https://github.com/espressif/arduino-esp32/issues/new/choose).
### External libraries compilation test
We have set-up CI testing for external libraries for ESP32 Arduino core. You can check test results in the file [LIBRARIES_TEST](https://github.com/espressif/arduino-esp32/blob/gh-pages/LIBRARIES_TEST.md).
For more information and how to add your library to the test see [external library testing](https://docs.espressif.com/projects/arduino-esp32/en/latest/external_libraries_test.html) in the documentation.
### Contributing
We welcome contributions to the Arduino ESP32 project!
See [contributing](https://docs.espressif.com/projects/arduino-esp32/en/latest/contributing.html) in the documentation for more information on how to contribute to the project.
> We would like to have this repository in a polite and friendly atmosphere, so please be kind and respectful to others. For more details, look at [Code of Conduct](https://github.com/espressif/arduino-esp32/blob/master/CODE_OF_CONDUCT.md).
The `doxy_gen_and_deploy.sh` script uses [Doxygen](https://www.doxygen.nl/) to generate and deploy documentation
for the library. Any issues, like missing documentation, will cause the CI to fail.
See the [guide](https://learn.adafruit.com/the-well-automated-arduino-library/doxygen) for details on installing and running Doxygen locally. The guide also has some
[tips](https://learn.adafruit.com/the-well-automated-arduino-library/doxygen-tips) on basic usage of Doxygen markup within your code.

46
actions_install.sh Executable file
View file

@ -0,0 +1,46 @@
#!/bin/bash
set -e
pip3 install clint pyserial setuptools adafruit-nrfutil
# Only install stuff if it is really missing. This should never be executed,
# as the default image contains clang-format v10, v11 (default) and v12.
# https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#language-and-runtime
if [ ! -f /usr/bin/clang-format ]; then
sudo gem install apt-spy2
sudo apt-spy2 check
sudo apt-spy2 fix --commit
# after selecting a specific mirror, we need to run 'apt-get update'
sudo apt-get -o Acquire::Retries=3 update
sudo apt-get -o Acquire::Retries=3 install -y clang-format-8 libllvm8
sudo ln -s /usr/bin/clang-format-8 /usr/bin/clang-format
fi
# make all our directories we need for files and libraries
mkdir ${HOME}/.arduino15
mkdir ${HOME}/.arduino15/packages
mkdir ${HOME}/Arduino
mkdir ${HOME}/Arduino/libraries
# install arduino IDE
export PATH=$PATH:$GITHUB_WORKSPACE/bin
echo $GITHUB_WORKSPACE/bin >> $GITHUB_PATH
curl -fsSL https://raw.githubusercontent.com/arduino/arduino-cli/master/install.sh | sh
arduino-cli config init > /dev/null
arduino-cli core update-index > /dev/null
# warn if this library does not have arduino-library tag in its topic
case "$GITHUB_REPOSITORY" in
(*/ci-arduino|*/Adafruit_Learning_System_Guides) ;;
(*)
repo_topics=$(curl -f --request GET --url "https://api.github.com/repos/$GITHUB_REPOSITORY" || echo '{"topics":[]}')
repo_topics=$(echo $repo_topics | jq -r '.topics[]' )
if [[ ! $repo_topics =~ "arduino-library" ]]; then
echo "::warning::arduino-library is not found in this repo topics. Please add this tag in repo About"
fi
esac

168
all_platforms.py Normal file
View file

@ -0,0 +1,168 @@
# board: [ platform, uf2_family, manual core URL]
ALL_PLATFORMS={
# classic Arduino AVR
"uno" : ["arduino:avr:uno", None, None],
"leonardo" : ["arduino:avr:leonardo", None, None],
"mega2560" : ["arduino:avr:mega:cpu=atmega2560", None, None],
# Arduino SAMD
"zero" : ["arduino:samd:arduino_zero_native", "0x68ed2b88", None, None],
"cpx" : ["arduino:samd:adafruit_circuitplayground_m0", "0x68ed2b88", None],
# Espressif
"esp8266" : ["esp8266:esp8266:huzzah:eesz=4M3M,xtal=80", None, None],
"esp32" : ["espressif:esp32:featheresp32:FlashFreq=80", None, "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"itsybitsy_esp32" : ["espressif:esp32:adafruit_itsybitsy_esp32:FlashFreq=80", None, "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp8266" : ["esp8266:esp8266:huzzah:xtal=80,vt=flash,exception=disabled,stacksmash=disabled,ssl=all,mmu=3232,non32xfer=fast,eesz=4M2M,ip=lm2f,dbg=Disabled,lvl=None____,wipe=none,baud=115200", None, None],
"feather_esp32" : ["espressif:esp32:featheresp32:FlashFreq=80", None, "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"wippersnapper_feather_esp32" : ["espressif:esp32:featheresp32:FlashFreq=80,PartitionScheme=min_spiffs", None, "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32_v2" : ["espressif:esp32:adafruit_feather_esp32_v2", None, "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"qtpy_esp32" : ["espressif:esp32:adafruit_qtpy_esp32_pico", None, "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
## ESP32-C3/C6
"feather_esp32c6" : ["espressif:esp32:adafruit_feather_esp32c6:FlashMode=qio", None, "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"wippersnapper_feather_esp32c6" : ["espressif:esp32:adafruit_feather_esp32c6:CDCOnBoot=cdc,CPUFreq=160,FlashFreq=80,FlashMode=qio,PartitionScheme=min_spiffs", None, "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"wippersnapper_feather_esp32c6_debug" : ["espressif:esp32:adafruit_feather_esp32c6:CDCOnBoot=cdc,DebugLevel=verbose,CPUFreq=160,FlashFreq=80,FlashMode=qio,PartitionScheme=min_spiffs", None, "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"qtpy_esp32c3" : ["espressif:esp32:adafruit_qtpy_esp32c3:FlashMode=qio", None, "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"wippersnapper_qtpy_esp32c3" : ["espressif:esp32:adafruit_qtpy_esp32c3:FlashMode=qio,PartitionScheme=min_spiffs", None, "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
## ESP32-S2
"magtag" : ["espressif:esp32:adafruit_magtag29_esp32s2", "0xbfdd4eee", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"funhouse" : ["espressif:esp32:adafruit_funhouse_esp32s2", "0xbfdd4eee", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"funhouse_noota" : ["espressif:esp32:adafruit_funhouse_esp32s2:PartitionScheme=tinyuf2_noota", "0xbfdd4eee", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"metroesp32s2" : ["espressif:esp32:adafruit_metro_esp32s2", "0xbfdd4eee", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"metroesp32s2_debug" : ["espressif:esp32:adafruit_metro_esp32s2:DebugLevel=verbose", "0xbfdd4eee", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"qtpy_esp32s2" : ["espressif:esp32:adafruit_qtpy_esp32s2", "0xbfdd4eee", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32s2" : ["espressif:esp32:adafruit_feather_esp32s2", "0xbfdd4eee", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32s2_debug" : ["espressif:esp32:adafruit_feather_esp32s2:DebugLevel=verbose", "0xbfdd4eee", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32s2_tft" : ["espressif:esp32:adafruit_feather_esp32s2_tft", "0xbfdd4eee", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32s2_tft_debug" : ["espressif:esp32:adafruit_feather_esp32s2_tft:DebugLevel=verbose", "0xbfdd4eee", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32s2_reverse_tft" : ["espressif:esp32:adafruit_feather_esp32s2_reversetft", "0xbfdd4eee", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
## ESP32-S3
"feather_esp32s3" : ["espressif:esp32:adafruit_feather_esp32s3_nopsram", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"wippersnapper_feather_esp32s3" : ["espressif:esp32:adafruit_feather_esp32s3_nopsram:PartitionScheme=tinyuf2_noota", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32s3_debug" : ["espressif:esp32:adafruit_feather_esp32s3_nopsram:DebugLevel=verbose", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"wippersnapper_feather_esp32s3_4mbflash_2mbpsram" : ["espressif:esp32:adafruit_feather_esp32s3:PartitionScheme=tinyuf2_noota", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32s3_4mbflash_2mbpsram" : ["espressif:esp32:adafruit_feather_esp32s3", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32s3_4mbflash_2mbpsram_debug" : ["espressif:esp32:adafruit_feather_esp32s3:DebugLevel=verbose,PartitionScheme=tinyuf2_noota", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32s3_tft" : ["espressif:esp32:adafruit_feather_esp32s3_tft", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"wippersnapper_feather_esp32s3_tft" : ["espressif:esp32:adafruit_feather_esp32s3_tft:PartitionScheme=tinyuf2_noota", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32s3_tft_debug" : ["espressif:esp32:adafruit_feather_esp32s3_tft:DebugLevel=verbose,PartitionScheme=tinyuf2_noota", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32s3_reverse_tft" : ["espressif:esp32:adafruit_feather_esp32s3_reversetft", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"feather_esp32s3_reverse_tft_debug" : ["espressif:esp32:adafruit_feather_esp32s3_reversetft:DebugLevel=verbose", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"wippersnapper_feather_esp32s3_reverse_tft" : ["espressif:esp32:adafruit_feather_esp32s3_reversetft:PartitionScheme=tinyuf2_noota", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"wippersnapper_feather_esp32s3_reverse_tft_debug" : ["espressif:esp32:adafruit_feather_esp32s3_reversetft:DebugLevel=verbose,PartitionScheme=tinyuf2_noota", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"matrixportal_s3" : ["esp32:esp32:adafruit_matrixportal_esp32s3", "0xc47e5767", None],
"metro_esp32s3" : ["espressif:esp32:adafruit_metro_esp32s3", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"metro_esp32s3_debug" : ["espressif:esp32:adafruit_metro_esp32s3:DebugLevel=verbose", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"pycamera_s3" : ["esp32:esp32:adafruit_camera_esp32s3", "0xc47e5767", None],
"qualia_s3_rgb666" : ["esp32:esp32:adafruit_qualia_s3_rgb666", "0xc47e5767", None],
"qtpy_esp32s3" : ["espressif:esp32:adafruit_qtpy_esp32s3_nopsram", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
"qtpy_esp32s3_n4r2" : ["espressif:esp32:adafruit_qtpy_esp32s3_n4r2", "0xc47e5767", "adafruit/wipper-bsp-3.0.6-idf-5-1-4"],
# Adafruit AVR
"trinket_3v" : ["adafruit:avr:trinket3", None, None],
"trinket_5v" : ["adafruit:avr:trinket5", None, None],
"protrinket_3v" : ["adafruit:avr:protrinket3", None, None],
"protrinket_5v" : ["adafruit:avr:protrinket5", None, None],
"gemma" : ["adafruit:avr:gemma", None, None],
"flora" : ["adafruit:avr:flora8", None, None],
"feather32u4" : ["adafruit:avr:feather32u4", None, None],
"cpc" : ["arduino:avr:circuitplay32u4cat", None, None],
# Adafruit SAMD
"gemma_m0" : ["adafruit:samd:adafruit_gemma_m0", "0x68ed2b88", None],
"trinket_m0" : ["adafruit:samd:adafruit_trinket_m0", "0x68ed2b88", None],
"feather_m0_express" : ["adafruit:samd:adafruit_feather_m0_express", "0x68ed2b88", None],
"feather_m0_express_tinyusb" : ["adafruit:samd:adafruit_feather_m0_express:usbstack=tinyusb", "0x68ed2b88", None],
"feather_m4_express" : ["adafruit:samd:adafruit_feather_m4:speed=120", "0x55114460", None],
"feather_m4_express_tinyusb" : ["adafruit:samd:adafruit_feather_m4:speed=120,usbstack=tinyusb", "0x55114460", None],
"feather_m4_can" : ["adafruit:samd:adafruit_feather_m4_can:speed=120", "0x55114460", None],
"feather_m4_can_tinyusb" : ["adafruit:samd:adafruit_feather_m4_can:speed=120,usbstack=tinyusb", "0x55114460", None],
"metro_m0" : ["adafruit:samd:adafruit_metro_m0", "0x68ed2b88", None],
"metro_m0_tinyusb" : ["adafruit:samd:adafruit_metro_m0:usbstack=tinyusb", "0x68ed2b88", None],
"metro_m4" : ["adafruit:samd:adafruit_metro_m4:speed=120", "0x55114460", None],
"metro_m4_tinyusb" : ["adafruit:samd:adafruit_metro_m4:speed=120,usbstack=tinyusb", "0x55114460", None],
"metro_m4_airliftlite" : ["adafruit:samd:adafruit_metro_m4_airliftlite:speed=120", "0x55114460", None],
"metro_m4_airliftlite_tinyusb" : ["adafruit:samd:adafruit_metro_m4_airliftlite:speed=120,usbstack=tinyusb", "0x55114460", None],
"pybadge" : ["adafruit:samd:adafruit_pybadge_m4:speed=120", "0x55114460", None],
"pybadge_tinyusb" : ["adafruit:samd:adafruit_pybadge_m4:speed=120,usbstack=tinyusb", "0x55114460", None],
"pygamer" : ["adafruit:samd:adafruit_pygamer_m4:speed=120", "0x55114460", None],
"pygamer_tinyusb" : ["adafruit:samd:adafruit_pygamer_m4:speed=120,usbstack=tinyusb", "0x55114460", None],
"hallowing_m0" : ["adafruit:samd:adafruit_hallowing", "0x68ed2b88", None],
"hallowing_m4" : ["adafruit:samd:adafruit_hallowing_m4:speed=120", "0x55114460", None],
"hallowing_m4_tinyusb" : ["adafruit:samd:adafruit_hallowing_m4:speed=120,usbstack=tinyusb", "0x55114460", None],
"neotrellis_m4" : ["adafruit:samd:adafruit_trellis_m4:speed=120", "0x55114460", None],
"monster_m4sk" : ["adafruit:samd:adafruit_monster_m4sk:speed=120", "0x55114460", None],
"monster_m4sk_tinyusb" : ["adafruit:samd:adafruit_monster_m4sk:speed=120,usbstack=tinyusb", "0x55114460", None],
"pyportal" : ["adafruit:samd:adafruit_pyportal_m4:speed=120", "0x55114460", None],
"pyportal_tinyusb" : ["adafruit:samd:adafruit_pyportal_m4:speed=120,usbstack=tinyusb", "0x55114460", None],
"pyportal_titano" : ["adafruit:samd:adafruit_pyportal_m4_titano:speed=120", "0x55114460", None],
"pyportal_titano_tinyusb" : ["adafruit:samd:adafruit_pyportal_m4_titano:speed=120,usbstack=tinyusb", "0x55114460", None],
"cpx_ada" : ["adafruit:samd:adafruit_circuitplayground_m0", "0x68ed2b88", None],
"grand_central" : ["adafruit:samd:adafruit_grandcentral_m4:speed=120", "0x55114460", None],
"grand_central_tinyusb" : ["adafruit:samd:adafruit_grandcentral_m4:speed=120,usbstack=tinyusb", "0x55114460", None],
"matrixportal" : ["adafruit:samd:adafruit_matrixportal_m4:speed=120", "0x55114460", None],
"matrixportal_tinyusb" : ["adafruit:samd:adafruit_matrixportal_m4:speed=120,usbstack=tinyusb", "0x55114460", None],
"neotrinkey_m0" : ["adafruit:samd:adafruit_neotrinkey_m0", "0x68ed2b88", None],
"rotarytrinkey_m0" : ["adafruit:samd:adafruit_rotarytrinkey_m0", "0x68ed2b88", None],
"neokeytrinkey_m0" : ["adafruit:samd:adafruit_neokeytrinkey_m0", "0x68ed2b88", None],
"slidetrinkey_m0" : ["adafruit:samd:adafruit_slidetrinkey_m0", "0x68ed2b88", None],
"proxlighttrinkey_m0" : ["adafruit:samd:adafruit_proxlighttrinkey_m0", "0x68ed2b88", None],
"trrstrinkey_m0" : ["adafruit:samd:adafruit_trrstrinkey_m0", "0x68ed2b88", None],
"thumbsticktrinkey_m0" : ["adafruit:samd:adafruit_thumbsticktrinkey_m0", "0x68ed2b88", None],
"sht4xtrinkey_m0" : ["adafruit:samd:adafruit_sht4xtrinkey_m0", "0x68ed2b88", None],
"pixeltrinkey_m0" : ["adafruit:samd:adafruit_pixeltrinkey_m0", "0x68ed2b88", None],
"X_m0" : ["adafruit:samd:adafruit_X_m0", "0x68ed2b88", None],
"qtpy_m0" : ["adafruit:samd:adafruit_qtpy_m0", "0x68ed2b88", None],
"qtpy_m0_tinyusb" : ["adafruit:samd:adafruit_qtpy_m0:usbstack=tinyusb", "0x68ed2b88", None],
# Arduino SAMD
"mkrwifi1010" : ["arduino:samd:mkrwifi1010", "0x8054", None],
"nano_33_iot" : ["arduino:samd:nano_33_iot", "0x8057", None],
# Arduino nRF
"microbit" : ["sandeepmistry:nRF5:BBCmicrobit:softdevice=s110", None, None],
# Adafruit nRF
"nrf52832" : ["adafruit:nrf52:feather52832:softdevice=s132v6,debug=l0", None, None],
"nrf52840" : ["adafruit:nrf52:feather52840:softdevice=s140v6,debug=l0", "0xada52840", None],
"cpb" : ["adafruit:nrf52:cplaynrf52840:softdevice=s140v6,debug=l0", "0xada52840", None],
"clue" : ["adafruit:nrf52:cluenrf52840:softdevice=s140v6,debug=l0", "0xada52840", None],
"ledglasses_nrf52840" : ["adafruit:nrf52:ledglasses_nrf52840:softdevice=s140v6,debug=l0", "0xada52840", None],
# RP2040 (Philhower)
"pico_rp2040" : ["rp2040:rp2040:rpipico:freq=125,flash=2097152_0", "0xe48bff56", None],
"pico_rp2040_tinyusb" : ["rp2040:rp2040:rpipico:flash=2097152_0,freq=120,dbgport=Disabled,dbglvl=None,usbstack=tinyusb", "0xe48bff56", None],
"pico_rp2040_tinyusb_host" : ["rp2040:rp2040:rpipico:flash=2097152_0,freq=120,dbgport=Disabled,dbglvl=None,usbstack=tinyusb_host", "0xe48bff56", None],
"picow_rp2040" : ["rp2040:rp2040:rpipicow:flash=2097152_0,freq=125", "0xe48bff56", None],
"picow_rp2040_tinyusb" : ["rp2040:rp2040:rpipicow:flash=2097152_131072,freq=120,dbgport=Disabled,dbglvl=None,usbstack=tinyusb", "0xe48bff56", None],
"feather_rp2040" : ["rp2040:rp2040:adafruit_feather:freq=125,flash=8388608_0", "0xe48bff56", None],
"feather_rp2040_tinyusb" : ["rp2040:rp2040:adafruit_feather:flash=8388608_0,freq=120,dbgport=Disabled,dbglvl=None,usbstack=tinyusb", "0xe48bff56", None],
"feather_rp2040_adalogger" : ["rp2040:rp2040:adafruit_feather_adalogger:freq=125,flash=8388608_0", "0xe48bff56", None],
"feather_rp2040_adalogger_tinyusb" : ["rp2040:rp2040:adafruit_feather_adalogger:flash=8388608_0,freq=120,dbgport=Disabled,dbglvl=None,usbstack=tinyusb", "0xe48bff56", None],
"feather_rp2040_rfm" : ["rp2040:rp2040:adafruit_feather_rfm:freq=125,flash=8388608_0", "0xe48bff56", None],
"feather_rp2040_rfm_tinyusb" : ["rp2040:rp2040:adafruit_feather_rfm:flash=8388608_0,freq=120,dbgport=Disabled,dbglvl=None,usbstack=tinyusb", "0xe48bff56", None],
"feather_rp2040_dvi" : ["rp2040:rp2040:adafruit_feather_dvi:freq=125,flash=8388608_0", "0xe48bff56", None],
"feather_rp2040_dvi_tinyusb" : ["rp2040:rp2040:adafruit_feather_dvi:flash=8388608_0,freq=120,dbgport=Disabled,dbglvl=None,usbstack=tinyusb", "0xe48bff56", None],
"feather_rp2040_usbhost_tinyusb" : ["rp2040:rp2040:adafruit_feather_usb_host:flash=8388608_0,freq=120,dbgport=Disabled,dbglvl=None,usbstack=tinyusb", "0xe48bff56", None],
"qt2040_trinkey" : ["rp2040:rp2040:adafruit_trinkeyrp2040qt:freq=125,flash=8388608_0", "0xe48bff56", None],
"qt2040_trinkey_tinyusb" : ["rp2040:rp2040:adafruit_trinkeyrp2040qt:flash=8388608_0,freq=120,dbgport=Disabled,dbglvl=None,usbstack=tinyusb", "0xe48bff56", None],
"qt_py_rp2040": ["rp2040:rp2040:adafruit_qtpy:freq=125,flash=8388608_0", "0xe48bff56", None],
"qt_py_rp2040_tinyusb": ["rp2040:rp2040:adafruit_qtpy:flash=8388608_0,freq=120,dbgport=Disabled,dbglvl=None,usbstack=tinyusb", "0xe48bff56", None],
"itsybitsy_rp2040" : ["rp2040:rp2040:adafruit_itsybitsy:freq=125,flash=8388608_524288", "0xe48bff56", None],
"itsybitsy_rp2040_tinyusb" : ["rp2040:rp2040:adafruit_itsybitsy:flash=8388608_524288,freq=120,dbgport=Disabled,dbglvl=None,usbstack=tinyusb", "0xe48bff56", None],
# Attiny8xy, 16xy, 32xy (SpenceKonde)
"attiny3217" : ["megaTinyCore:megaavr:atxy7:chip=3217", None, None],
"attiny3216" : ["megaTinyCore:megaavr:atxy6:chip=3216", None, None],
"attiny1617" : ["megaTinyCore:megaavr:atxy7:chip=1617", None, None],
"attiny1616" : ["megaTinyCore:megaavr:atxy6:chip=1616", None, None],
"attiny1607" : ["megaTinyCore:megaavr:atxy7:chip=1607", None, None],
"attiny1606" : ["megaTinyCore:megaavr:atxy6:chip=1606", None, None],
"attiny817" : ["megaTinyCore:megaavr:atxy7:chip=817", None, None],
"attiny816" : ["megaTinyCore:megaavr:atxy6:chip=816", None, None],
"attiny807" : ["megaTinyCore:megaavr:atxy7:chip=807", None, None],
"attiny806" : ["megaTinyCore:megaavr:atxy6:chip=806", None, None],
# CH32v2 (openwch)
"CH32V20x_EVT": ["WCH:ch32v:CH32V20x_EVT", None, None],
# groupings
"main_platforms" : ("uno", "leonardo", "mega2560", "zero", "qtpy_m0",
"esp8266", "esp32", "metro_m4", "trinket_m0"),
"arcada_platforms" : ("pybadge", "pygamer", "hallowing_m4",
"cpb", "cpx_ada"),
"wippersnapper_platforms" : ("metro_m4_airliftlite_tinyusb", "pyportal_tinyusb"),
"rp2040_platforms" : ("pico_rp2040", "feather_rp2040")
}

1
assets/doxygen_badge.svg Normal file
View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="96" height="20"><linearGradient id="s" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="r"><rect width="96" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#r)"><rect width="37" height="20" fill="#555"/><rect x="37" width="59" height="20" fill="#007ec6"/><rect width="96" height="20" fill="url(#s)"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"><text x="195" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="270">Docs</text><text x="195" y="140" transform="scale(.1)" textLength="270">Docs</text><text x="655" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="490">Doxygen</text><text x="655" y="140" transform="scale(.1)" textLength="490">Doxygen</text></g></svg>

After

Width:  |  Height:  |  Size: 970 B

51575
boards.txt

File diff suppressed because it is too large Load diff

463
build_platform.py Executable file
View file

@ -0,0 +1,463 @@
import sys
import glob
import time
import os
import re
import shutil
import subprocess
import collections
from contextlib import contextmanager
from all_platforms import ALL_PLATFORMS
# optional wall option cause build failed if has warnings
BUILD_WALL = False
BUILD_WARN = True
if "--wall" in sys.argv:
BUILD_WALL = True
sys.argv.remove("--wall")
if "--no_warn" in sys.argv:
BUILD_WARN = False
sys.argv.remove("--no_warn")
# optional timeout argument to extend build time
# for larger sketches or firmware builds
BUILD_TIMEOUT = False
if "--build_timeout" in sys.argv:
BUILD_TIMEOUT = True
popen_timeout = int(sys.argv[sys.argv.index("--build_timeout") + 1])
sys.argv.pop(sys.argv.index("--build_timeout") + 1)
sys.argv.remove("--build_timeout")
# add user bin to path!
BUILD_DIR = ''
# add user bin to path!
try:
# If we're on actions
BUILD_DIR = os.environ["GITHUB_WORKSPACE"]
except KeyError:
try:
# If we're on travis
BUILD_DIR = os.environ["TRAVIS_BUILD_DIR"]
except KeyError:
# If we're running on local machine
BUILD_DIR = os.path.abspath(".")
pass
os.environ["PATH"] += os.pathsep + BUILD_DIR + "/bin"
print("build dir:", BUILD_DIR)
IS_LEARNING_SYS = False
if "Adafruit_Learning_System_Guides" in BUILD_DIR:
print("Found learning system repo")
IS_LEARNING_SYS = True
shutil.rmtree(BUILD_DIR + "/ci/examples/Blink")
elif "METROX-Examples-and-Project-Sketches" in BUILD_DIR:
print("Found MetroX Examples Repo")
IS_LEARNING_SYS = True
#os.system('pwd')
#os.system('ls -lA')
CROSS = u'\N{cross mark}'
CHECK = u'\N{check mark}'
BSP_URLS = (
"https://adafruit.github.io/arduino-board-index/package_adafruit_index.json,"
"http://arduino.esp8266.com/stable/package_esp8266com_index.json,"
"https://raw.githubusercontent.com/espressif/arduino-esp32/gh-pages/package_esp32_dev_index.json,"
"https://sandeepmistry.github.io/arduino-nRF5/package_nRF5_boards_index.json,"
"https://github.com/earlephilhower/arduino-pico/releases/download/global/package_rp2040_index.json,"
"https://drazzy.good-enough.cloud/package_drazzy.com_index.json,"
"https://github.com/openwch/board_manager_files/raw/main/package_ch32v_index.json"
)
# global exit code
success = 0
class ColorPrint:
@staticmethod
def print_fail(message, end = '\n'):
sys.stdout.write('\x1b[1;31m' + message.strip() + '\x1b[0m' + end)
@staticmethod
def print_pass(message, end = '\n'):
sys.stdout.write('\x1b[1;32m' + message.strip() + '\x1b[0m' + end)
@staticmethod
def print_warn(message, end = '\n'):
sys.stdout.write('\x1b[1;33m' + message.strip() + '\x1b[0m' + end)
@staticmethod
def print_info(message, end = '\n'):
sys.stdout.write('\x1b[1;34m' + message.strip() + '\x1b[0m' + end)
@staticmethod
def print_bold(message, end = '\n'):
sys.stdout.write('\x1b[1;37m' + message.strip() + '\x1b[0m' + end)
def manually_install_esp32_bsp(repo_info):
print("Manually installing latest ESP32 BSP...")
# Assemble git url
repo_url = "git clone -b {0} https://github.com/{1}/arduino-esp32.git esp32".format(repo_info.split("/")[1], repo_info.split("/")[0])
# Locally clone repo (https://docs.espressif.com/projects/arduino-esp32/en/latest/installing.html#linux)
os.system("mkdir -p /home/runner/Arduino/hardware/espressif")
print("Cloning %s"%repo_url)
cmd = "cd /home/runner/Arduino/hardware/espressif && " + repo_url
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
r = proc.wait(timeout=1000)
out = proc.stdout.read()
err = proc.stderr.read()
if r != 0:
ColorPrint.print_fail("Failed to download ESP32 Arduino BSP!")
ColorPrint.print_fail(out.decode("utf-8"))
ColorPrint.print_fail(err.decode("utf-8"))
exit(-1)
print(out)
print("Cloned repository!")
print("Installing ESP32 Arduino BSP...")
cmd = "cd /home/runner/Arduino/hardware/espressif/esp32/tools && python3 get.py"
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
r = proc.wait(timeout=1000)
out = proc.stdout.read()
err = proc.stderr.read()
if r != 0:
ColorPrint.print_fail("Failed to install ESP32 Arduino BSP!")
ColorPrint.print_fail(out.decode("utf-8"))
ColorPrint.print_fail(err.decode("utf-8"))
exit(-1)
print(out)
print("Installed ESP32 BSP from source!")
def install_platform(fqbn, full_platform_name=None):
if os.path.exists("/home/runner/.arduino15/package_drazzy.json"):
print("Moving drazzy.json")
shutil.move("/home/runner/.arduino15/package_drazzy.json", "/home/runner/.arduino15/package_drazzy.com_index.json")
print("Installing", fqbn, end=" ")
if fqbn == "adafruit:avr": # we have a platform dep
install_platform("arduino:avr", full_platform_name)
if full_platform_name[2] is not None:
manually_install_esp32_bsp(full_platform_name[2]) # build esp32 bsp from desired source and branch
print(os.popen('arduino-cli core list | grep {}'.format(fqbn)).read(), end='')
return # bail out
for retry in range(0, 3):
print("arduino-cli core install "+fqbn+" --additional-urls "+BSP_URLS)
if os.system("arduino-cli core install "+fqbn+" --additional-urls "+BSP_URLS+" > /dev/null") == 0:
break
print("...retrying...", end=" ")
time.sleep(10) # wait 10 seconds then try again?
else:
# tried 3 times to no avail
ColorPrint.print_fail("FAILED to install "+fqbn)
exit(-1)
ColorPrint.print_pass(CHECK)
# print installed core version
print(os.popen('arduino-cli core list | grep {}'.format(fqbn)).read(), end='')
def run_or_die(cmd, error):
print(cmd)
attempt = 0
while attempt < 3:
if os.system(cmd) == 0:
return
attempt += 1
print('attempt {} failed, {} retry left'.format(attempt, 3-attempt))
time.sleep(5)
ColorPrint.print_fail(error)
exit(-1)
def is_library_installed(lib_name):
try:
installed_libs = subprocess.check_output(["arduino-cli", "lib", "list"]).decode("utf-8")
return not all(not item for item in [re.match('^'+lib_name+'\\s*\\d+\\.', line) for line in installed_libs.split('\n')])
except subprocess.CalledProcessError as e:
print("Error checking installed libraries:", e)
return False
def install_library_deps():
print()
ColorPrint.print_info('#'*40)
print("INSTALLING ARDUINO LIBRARIES")
ColorPrint.print_info('#'*40)
run_or_die("arduino-cli core update-index --additional-urls "+BSP_URLS+
" > /dev/null", "FAILED to update core indices")
print()
# Install dependencies
our_name = None
try:
if IS_LEARNING_SYS:
libprop = open(BUILD_DIR+'/library.deps')
else:
libprop = open(BUILD_DIR+'/library.properties')
for line in libprop:
if line.startswith("name="):
our_name = line.replace("name=", "").strip()
if line.startswith("depends="):
deps = line.replace("depends=", "").split(",")
for dep in deps:
dep = dep.strip()
if not is_library_installed(dep):
print("Installing "+dep)
run_or_die('arduino-cli lib install "'+dep+'" > /dev/null',
"FAILED to install dependency "+dep)
else:
print("Skipping already installed lib: "+dep)
except OSError:
print("No library dep or properties found!")
pass # no library properties
# Delete the existing library if we somehow downloaded
# due to dependencies
if our_name:
run_or_die("arduino-cli lib uninstall \""+our_name+"\"", "Could not uninstall")
print("Libraries installed: ", glob.glob(os.environ['HOME']+'/Arduino/libraries/*'))
# link our library folder to the arduino libraries folder
if not IS_LEARNING_SYS:
try:
os.symlink(BUILD_DIR, os.environ['HOME']+'/Arduino/libraries/' + os.path.basename(BUILD_DIR))
except FileExistsError:
pass
################################ UF2 Utils.
def glob01(pattern):
result = glob.glob(pattern)
if len(result) > 1:
raise RuntimeError(f"Required pattern {pattern} to match at most 1 file, got {result}")
return result[0] if result else None
def glob1(pattern):
result = glob.glob(pattern)
if len(result) != 1:
raise RuntimeError(f"Required pattern {pattern} to match exactly 1 file, got {result}")
return result[0]
def download_uf2_utils():
"""Downloads uf2conv tools if we don't already have them
"""
cmd = "wget -nc --no-check-certificate http://raw.githubusercontent.com/microsoft/uf2/master/utils/uf2families.json https://raw.githubusercontent.com/microsoft/uf2/master/utils/uf2conv.py"
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
r = proc.wait(timeout=60)
out = proc.stdout.read()
err = proc.stderr.read()
if r != 0:
ColorPrint.print_fail("Failed to download UF2 Utils!")
ColorPrint.print_fail(out.decode("utf-8"))
ColorPrint.print_fail(err.decode("utf-8"))
return False
return True
def generate_uf2(platform, fqbn, example_path):
"""Generates a .uf2 file from a .bin or .hex file.
:param str platform: The platform name.
:param str fqbn: The fully qualified board name.
:param str example_path: A path to the compiled .bin or .hex file.
"""
if not download_uf2_utils():
return None
cli_build_uf2_path = "build/*.*." + fqbn.split(':')[2] + "/*.uf2"
uf2_input_file = glob01(os.path.join(example_path, cli_build_uf2_path))
# Some platforms, like rp2040, directly generate a uf2 file, so no need to do it ourselves
if uf2_input_file is not None:
output_file = os.path.splitext(uf2_input_file)[0] + ".uf2"
ColorPrint.print_pass(CHECK)
ColorPrint.print_info("Used uf2 generated by arduino-cli")
return output_file
# Generate using a hex file for all platforms except for ESP32-S2, ESP32-S3 (exports as .bin files)
if not any (x in fqbn for x in ["esp32s2", "esp32s3"]):
cli_build_hex_path = "build/*.*." + fqbn.split(':')[2] + "/*.hex"
hex_input_file = glob1(os.path.join(example_path, cli_build_hex_path))
output_file = os.path.splitext(hex_input_file)[0] + ".uf2"
family_id = ALL_PLATFORMS[platform][1]
cmd = ['python3', 'uf2conv.py', hex_input_file, '-c', '-f', family_id, '-o', output_file]
else:
cli_build_path = "build/*.*." + fqbn.split(':')[2] + "/*.ino.bin"
input_file = glob1(os.path.join(example_path, cli_build_path))
output_file = os.path.splitext(input_file)[0] + ".uf2"
family_id = ALL_PLATFORMS[platform][1]
cmd = ['python3', 'uf2conv.py', input_file, '-c', '-f', family_id, '-b', "0x0000", '-o', output_file]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if BUILD_TIMEOUT:
r = proc.wait(timeout=popen_timeout)
else:
r = proc.wait(timeout=60)
out = proc.stdout.read()
err = proc.stderr.read()
if r == 0: # and not err: # we might get warnings that do not affect the result
ColorPrint.print_pass(CHECK)
ColorPrint.print_info(out.decode("utf-8"))
else:
ColorPrint.print_fail(CROSS)
ColorPrint.print_fail("\n\rERRCODE:", str(r))
ColorPrint.print_fail("\n\rOUTPUT: ", out.decode("utf-8"))
ColorPrint.print_fail("\n\rERROR: ", err.decode("utf-8"))
return None
return output_file
@contextmanager
def group_output(title):
sys.stdout.flush()
sys.stderr.flush()
print(f"::group::{title}")
try:
yield
finally:
sys.stdout.flush()
sys.stderr.flush()
print(f"::endgroup::")
sys.stdout.flush()
def test_examples_in_folder(platform, folderpath):
global success
fqbn = ALL_PLATFORMS[platform][0]
for example in sorted(os.listdir(folderpath)):
examplepath = folderpath+"/"+example
if os.path.isdir(examplepath):
test_examples_in_folder(platform, examplepath)
continue
if not examplepath.endswith(".ino"):
continue
print('\t'+example, end=' ')
# check if we should SKIP
skipfilename = folderpath+"/."+platform+".test.skip"
onlyfilename = folderpath+"/."+platform+".test.only"
# check if we should GENERATE UF2
gen_file_name = folderpath+"/."+platform+".generate"
# .skip txt include all skipped platforms, one per line
skip_txt = folderpath+"/.skip.txt"
is_skip = False
if os.path.exists(skipfilename):
is_skip = True
if os.path.exists(skip_txt):
with open(skip_txt) as f:
lines = f.readlines()
for line in lines:
if line.strip() == platform:
is_skip = True
break
if is_skip:
ColorPrint.print_warn("skipping")
continue
if glob.glob(folderpath+"/.*.test.only"):
platformname = glob.glob(folderpath+"/.*.test.only")[0].split('.')[1]
if platformname != "none" and not platformname in ALL_PLATFORMS:
# uh oh, this isnt a valid testonly!
ColorPrint.print_fail(CROSS)
ColorPrint.print_fail("This example does not have a valid .platform.test.only file")
success = 1
continue
if not os.path.exists(onlyfilename):
ColorPrint.print_warn("skipping")
continue
if os.path.exists(gen_file_name):
ColorPrint.print_info("generating")
if BUILD_WARN:
if os.path.exists(gen_file_name):
cmd = ['arduino-cli', 'compile', '--warnings', 'all', '--fqbn', fqbn, '-e', folderpath]
else:
cmd = ['arduino-cli', 'compile', '--warnings', 'all', '--fqbn', fqbn, folderpath]
else:
cmd = ['arduino-cli', 'compile', '--warnings', 'none', '--export-binaries', '--fqbn', fqbn, folderpath]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
try:
if BUILD_TIMEOUT:
out, err = proc.communicate(timeout=popen_timeout)
else:
out, err = proc.communicate(timeout=120)
r = proc.returncode
except:
proc.kill()
out, err = proc.communicate()
r = 1
if r == 0 and not (err and BUILD_WALL == True):
ColorPrint.print_pass(CHECK)
if err:
# also print out warning message
with group_output(f"{example} {fqbn} build output"):
ColorPrint.print_fail(err.decode("utf-8"))
if os.path.exists(gen_file_name):
if ALL_PLATFORMS[platform][1] is None:
ColorPrint.print_info("Platform does not support UF2 files, skipping...")
else:
ColorPrint.print_info("Generating UF2...")
filename = generate_uf2(platform, fqbn, folderpath)
if filename is None:
success = 1 # failure
if IS_LEARNING_SYS:
fqbnpath, uf2file = filename.split("/")[-2:]
os.makedirs(BUILD_DIR+"/build", exist_ok=True)
os.makedirs(BUILD_DIR+"/build/"+fqbnpath, exist_ok=True)
shutil.copy(filename, BUILD_DIR+"/build/"+fqbnpath+"-"+uf2file)
os.system("ls -lR "+BUILD_DIR+"/build")
else:
ColorPrint.print_fail(CROSS)
with group_output(f"{example} {fqbn} built output"):
ColorPrint.print_fail(out.decode("utf-8"))
ColorPrint.print_fail(err.decode("utf-8"))
success = 1
def main():
# Test platforms
platforms = []
# expand groups:
for arg in sys.argv[1:]:
platform = ALL_PLATFORMS.get(arg, None)
if isinstance(platform, list):
platforms.append(arg)
elif isinstance(platform, tuple):
for p in platform:
platforms.append(p)
else:
print("Unknown platform: ", arg)
exit(-1)
# Install libraries deps
install_library_deps()
for platform in platforms:
fqbn = ALL_PLATFORMS[platform][0]
print('#'*80)
ColorPrint.print_info("SWITCHING TO "+fqbn)
install_platform(":".join(fqbn.split(':', 2)[0:2]), ALL_PLATFORMS[platform]) # take only first two elements
print('#'*80)
if not IS_LEARNING_SYS:
test_examples_in_folder(platform, BUILD_DIR+"/examples")
else:
test_examples_in_folder(platform, BUILD_DIR)
if __name__ == "__main__":
main()
exit(success)

View file

@ -1,257 +0,0 @@
/*
Arduino.h - Main include file for the Arduino SDK
Copyright (c) 2005-2013 Arduino Team. All right reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef Arduino_h
#define Arduino_h
#include <stdbool.h>
#include <stdint.h>
#include <stdarg.h>
#include <stddef.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <inttypes.h>
#include "esp_arduino_version.h"
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "freertos/semphr.h"
#include "esp8266-compat.h"
#include "soc/gpio_reg.h"
#include "stdlib_noniso.h"
#include "binary.h"
#include "extra_attr.h"
#include "pins_arduino.h"
#include "esp32-hal.h"
#define PI 3.1415926535897932384626433832795
#define HALF_PI 1.5707963267948966192313216916398
#define TWO_PI 6.283185307179586476925286766559
#define DEG_TO_RAD 0.017453292519943295769236907684886
#define RAD_TO_DEG 57.295779513082320876798154814105
#define EULER 2.718281828459045235360287471352
#define SERIAL 0x0
#define DISPLAY 0x1
#define LSBFIRST 0
#define MSBFIRST 1
//Interrupt Modes
#define RISING 0x01
#define FALLING 0x02
#define CHANGE 0x03
#define ONLOW 0x04
#define ONHIGH 0x05
#define ONLOW_WE 0x0C
#define ONHIGH_WE 0x0D
#define DEFAULT 1
#define EXTERNAL 0
#ifndef __STRINGIFY
#define __STRINGIFY(a) #a
#endif
// can't define max() / min() because of conflicts with C++
#define _min(a, b) ((a) < (b) ? (a) : (b))
#define _max(a, b) ((a) > (b) ? (a) : (b))
#define _abs(x) ((x) > 0 ? (x) : -(x)) // abs() comes from STL
#define constrain(amt, low, high) ((amt) < (low) ? (low) : ((amt) > (high) ? (high) : (amt)))
#define _round(x) ((x) >= 0 ? (long)((x) + 0.5) : (long)((x) - 0.5)) // round() comes from STL
#define radians(deg) ((deg) * DEG_TO_RAD)
#define degrees(rad) ((rad) * RAD_TO_DEG)
#define sq(x) ((x) * (x))
// ESP32xx runs FreeRTOS... disabling interrupts can lead to issues, such as Watchdog Timeout
#define sei() portENABLE_INTERRUPTS()
#define cli() portDISABLE_INTERRUPTS()
#define interrupts() sei()
#define noInterrupts() cli()
#define clockCyclesPerMicrosecond() ((long int)getCpuFrequencyMhz())
#define clockCyclesToMicroseconds(a) ((a) / clockCyclesPerMicrosecond())
#define microsecondsToClockCycles(a) ((a) * clockCyclesPerMicrosecond())
#define lowByte(w) ((uint8_t)((w) & 0xff))
#define highByte(w) ((uint8_t)((w) >> 8))
#define bitRead(value, bit) (((value) >> (bit)) & 0x01)
#define bitSet(value, bit) ((value) |= (1UL << (bit)))
#define bitClear(value, bit) ((value) &= ~(1UL << (bit)))
#define bitToggle(value, bit) ((value) ^= (1UL << (bit)))
#define bitWrite(value, bit, bitvalue) ((bitvalue) ? bitSet(value, bit) : bitClear(value, bit))
// avr-libc defines _NOP() since 1.6.2
#ifndef _NOP
#define _NOP() \
do { \
__asm__ volatile("nop"); \
} while (0)
#endif
#define bit(b) (1UL << (b))
#define _BV(b) (1UL << (b))
#define digitalPinToTimer(pin) (0)
#define analogInPinToBit(P) (P)
#if SOC_GPIO_PIN_COUNT <= 32
#define digitalPinToPort(pin) (0)
#define digitalPinToBitMask(pin) (1UL << digitalPinToGPIONumber(pin))
#define portOutputRegister(port) ((volatile uint32_t *)GPIO_OUT_REG)
#define portInputRegister(port) ((volatile uint32_t *)GPIO_IN_REG)
#define portModeRegister(port) ((volatile uint32_t *)GPIO_ENABLE_REG)
#elif SOC_GPIO_PIN_COUNT <= 64
#define digitalPinToPort(pin) ((digitalPinToGPIONumber(pin) > 31) ? 1 : 0)
#define digitalPinToBitMask(pin) (1UL << (digitalPinToGPIONumber(pin) & 31))
#define portOutputRegister(port) ((volatile uint32_t *)((port) ? GPIO_OUT1_REG : GPIO_OUT_REG))
#define portInputRegister(port) ((volatile uint32_t *)((port) ? GPIO_IN1_REG : GPIO_IN_REG))
#define portModeRegister(port) ((volatile uint32_t *)((port) ? GPIO_ENABLE1_REG : GPIO_ENABLE_REG))
#else
#error SOC_GPIO_PIN_COUNT > 64 not implemented
#endif
#define NOT_A_PIN -1
#define NOT_A_PORT -1
#define NOT_AN_INTERRUPT -1
#define NOT_ON_TIMER 0
// some defines generic for all SoC moved from variants/board_name/pins_arduino.h
#define NUM_DIGITAL_PINS SOC_GPIO_PIN_COUNT // All GPIOs
#if SOC_ADC_PERIPH_NUM == 1
#define NUM_ANALOG_INPUTS (SOC_ADC_CHANNEL_NUM(0)) // Depends on the SoC (ESP32C6, ESP32H2, ESP32C2, ESP32P4)
#elif SOC_ADC_PERIPH_NUM == 2
#define NUM_ANALOG_INPUTS (SOC_ADC_CHANNEL_NUM(0) + SOC_ADC_CHANNEL_NUM(1)) // Depends on the SoC (ESP32, ESP32S2, ESP32S3, ESP32C3)
#endif
#define EXTERNAL_NUM_INTERRUPTS NUM_DIGITAL_PINS // All GPIOs
#define analogInputToDigitalPin(p) (((p) < NUM_ANALOG_INPUTS) ? (analogChannelToDigitalPin(p)) : -1)
#define digitalPinToInterrupt(p) ((((uint8_t)digitalPinToGPIONumber(p)) < NUM_DIGITAL_PINS) ? (p) : NOT_AN_INTERRUPT)
#define digitalPinHasPWM(p) (((uint8_t)digitalPinToGPIONumber(p)) < NUM_DIGITAL_PINS)
typedef bool boolean;
typedef uint8_t byte;
typedef unsigned int word;
#ifdef __cplusplus
void setup(void);
void loop(void);
// The default is using Real Hardware random number generator
// But when randomSeed() is called, it turns to Psedo random
// generator, exactly as done in Arduino mainstream
long random(long);
long random(long, long);
// Calling randomSeed() will make random()
// using pseudo random like in Arduino
void randomSeed(unsigned long);
// Allow the Application to decide if the random generator
// will use Real Hardware random generation (true - default)
// or Pseudo random generation (false) as in Arduino MainStream
void useRealRandomGenerator(bool useRandomHW);
#endif
long map(long, long, long, long, long);
#ifdef __cplusplus
extern "C" {
#endif
void init(void);
void initVariant(void);
void initArduino(void);
unsigned long pulseIn(uint8_t pin, uint8_t state, unsigned long timeout);
unsigned long pulseInLong(uint8_t pin, uint8_t state, unsigned long timeout);
uint8_t shiftIn(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder); // codespell:ignore shiftin
void shiftOut(uint8_t dataPin, uint8_t clockPin, uint8_t bitOrder, uint8_t val);
#ifdef __cplusplus
}
#include <algorithm>
#include <cmath>
#include "WCharacter.h"
#include "WString.h"
#include "Stream.h"
#include "Printable.h"
#include "Print.h"
#include "IPAddress.h"
#include "Client.h"
#include "Server.h"
#include "Udp.h"
#include "HardwareSerial.h"
#include "Esp.h"
#include "freertos_stats.h"
// Use float-compatible stl abs() and round(), we don't use Arduino macros to avoid issues with the C++ libraries
using std::abs;
using std::isinf;
using std::isnan;
using std::max;
using std::min;
using std::round;
uint16_t makeWord(uint16_t w);
uint16_t makeWord(uint8_t h, uint8_t l);
#define word(...) makeWord(__VA_ARGS__)
size_t getArduinoLoopTaskStackSize(void);
#define SET_LOOP_TASK_STACK_SIZE(sz) \
size_t getArduinoLoopTaskStackSize() { \
return sz; \
}
bool shouldPrintChipDebugReport(void);
#define ENABLE_CHIP_DEBUG_REPORT \
bool shouldPrintChipDebugReport(void) { \
return true; \
}
// allows user to bypass esp_spiram_test()
bool esp_psram_extram_test(void);
#define BYPASS_SPIRAM_TEST(bypass) \
bool testSPIRAM(void) { \
if (bypass) \
return true; \
else \
return esp_psram_extram_test(); \
}
unsigned long pulseIn(uint8_t pin, uint8_t state, unsigned long timeout = 1000000L);
unsigned long pulseInLong(uint8_t pin, uint8_t state, unsigned long timeout = 1000000L);
extern "C" bool getLocalTime(struct tm *info, uint32_t ms = 5000);
extern "C" void configTime(long gmtOffset_sec, int daylightOffset_sec, const char *server1, const char *server2 = nullptr, const char *server3 = nullptr);
extern "C" void configTzTime(const char *tz, const char *server1, const char *server2 = nullptr, const char *server3 = nullptr);
void setToneChannel(uint8_t channel = 0);
void tone(uint8_t _pin, unsigned int frequency, unsigned long duration = 0);
void noTone(uint8_t _pin);
#endif /* __cplusplus */
// must be applied last as it overrides some of the above
#include "io_pin_remap.h"
#endif /* _ESP32_CORE_ARDUINO_H_ */

View file

@ -1,47 +0,0 @@
/*
Client.h - Base class that provides Client
Copyright (c) 2011 Adrian McEwen. All right reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef client_h
#define client_h
#include "Print.h"
#include "Stream.h"
#include "IPAddress.h"
class Client : public Stream {
public:
virtual int connect(IPAddress ip, uint16_t port) = 0;
virtual int connect(const char *host, uint16_t port) = 0;
virtual size_t write(uint8_t) = 0;
virtual size_t write(const uint8_t *buf, size_t size) = 0;
virtual int available() = 0;
virtual int read() = 0;
virtual int read(uint8_t *buf, size_t size) = 0;
virtual int peek() = 0;
virtual void flush() = 0;
virtual void stop() = 0;
virtual uint8_t connected() = 0;
virtual operator bool() = 0;
protected:
uint8_t *rawIPAddress(IPAddress &addr) {
return addr.raw_address();
}
};
#endif

View file

@ -1,281 +0,0 @@
/*
*
* Copyright (c) 2021 Project CHIP Authors
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ColorFormat.h"
#include <math.h>
// define a clamp macro to substitute the std::clamp macro which is available from C++17 onwards
#define clamp(a, min, max) ((a) < (min) ? (min) : ((a) > (max) ? (max) : (a)))
const espHsvColor_t HSV_BLACK = {0, 0, 0};
const espHsvColor_t HSV_WHITE = {0, 0, 254};
const espHsvColor_t HSV_RED = {0, 254, 254};
const espHsvColor_t HSV_YELLOW = {42, 254, 254};
const espHsvColor_t HSV_GREEN = {84, 254, 254};
const espHsvColor_t HSV_CYAN = {127, 254, 254};
const espHsvColor_t HSV_BLUE = {169, 254, 254};
const espHsvColor_t HSV_MAGENTA = {211, 254, 254};
const espRgbColor_t RGB_BLACK = {0, 0, 0};
const espRgbColor_t RGB_WHITE = {255, 255, 255};
const espRgbColor_t RGB_RED = {255, 0, 0};
const espRgbColor_t RGB_YELLOW = {255, 255, 0};
const espRgbColor_t RGB_GREEN = {0, 255, 0};
const espRgbColor_t RGB_CYAN = {0, 255, 255};
const espRgbColor_t RGB_BLUE = {0, 0, 255};
const espRgbColor_t RGB_MAGENTA = {255, 0, 255};
// main color temperature values
const espCtColor_t COOL_WHITE_COLOR_TEMPERATURE = {142};
const espCtColor_t DAYLIGHT_WHITE_COLOR_TEMPERATURE = {181};
const espCtColor_t WHITE_COLOR_TEMPERATURE = {250};
const espCtColor_t SOFT_WHITE_COLOR_TEMPERATURE = {370};
const espCtColor_t WARM_WHITE_COLOR_TEMPERATURE = {454};
espRgbColor_t espHsvToRgbColor(uint16_t h, uint8_t s, uint8_t v) {
espHsvColor_t hsv = {h, s, v};
return espHsvColorToRgbColor(hsv);
}
espRgbColor_t espHsvColorToRgbColor(espHsvColor_t hsv) {
espRgbColor_t rgb;
uint8_t region, p, q, t;
uint32_t h, s, v, remainder;
if (hsv.s == 0) {
rgb.r = rgb.g = rgb.b = hsv.v;
} else {
h = hsv.h;
s = hsv.s;
v = hsv.v;
region = h / 43;
remainder = (h - (region * 43)) * 6;
p = (v * (255 - s)) >> 8;
q = (v * (255 - ((s * remainder) >> 8))) >> 8;
t = (v * (255 - ((s * (255 - remainder)) >> 8))) >> 8;
switch (region) {
case 0: rgb.r = v, rgb.g = t, rgb.b = p; break;
case 1: rgb.r = q, rgb.g = v, rgb.b = p; break;
case 2: rgb.r = p, rgb.g = v, rgb.b = t; break;
case 3: rgb.r = p, rgb.g = q, rgb.b = v; break;
case 4: rgb.r = t, rgb.g = p, rgb.b = v; break;
case 5:
default: rgb.r = v, rgb.g = p, rgb.b = q; break;
}
}
return rgb;
}
espHsvColor_t espRgbToHsvColor(uint8_t r, uint8_t g, uint8_t b) {
espRgbColor_t rgb = {r, g, b};
return espRgbColorToHsvColor(rgb);
}
espHsvColor_t espRgbColorToHsvColor(espRgbColor_t rgb) {
espHsvColor_t hsv;
uint8_t rgbMin, rgbMax;
rgbMin = rgb.r < rgb.g ? (rgb.r < rgb.b ? rgb.r : rgb.b) : (rgb.g < rgb.b ? rgb.g : rgb.b);
rgbMax = rgb.r > rgb.g ? (rgb.r > rgb.b ? rgb.r : rgb.b) : (rgb.g > rgb.b ? rgb.g : rgb.b);
hsv.v = rgbMax;
if (hsv.v == 0) {
hsv.h = 0;
hsv.s = 0;
return hsv;
}
hsv.s = 255 * (rgbMax - rgbMin) / hsv.v;
if (hsv.s == 0) {
hsv.h = 0;
return hsv;
}
if (rgbMax == rgb.r) {
hsv.h = 0 + 43 * (rgb.g - rgb.b) / (rgbMax - rgbMin);
} else if (rgbMax == rgb.g) {
hsv.h = 85 + 43 * (rgb.b - rgb.r) / (rgbMax - rgbMin);
} else {
hsv.h = 171 + 43 * (rgb.r - rgb.g) / (rgbMax - rgbMin);
}
return hsv;
}
espRgbColor_t espXYColorToRgbColor(uint8_t Level, espXyColor_t xy) {
return espXYToRgbColor(Level, xy.x, xy.y, true);
}
espRgbColor_t espXYToRgbColor(uint8_t Level, uint16_t current_X, uint16_t current_Y, bool addXYZScaling) {
// convert xyY color space to RGB
// https://www.easyrgb.com/en/math.php
// https://en.wikipedia.org/wiki/SRGB
// refer https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
// The current_X/current_Y attribute contains the current value of the normalized chromaticity value of x/y.
// The value of x/y shall be related to the current_X/current_Y attribute by the relationship
// x = current_X/65536
// y = current_Y/65536
// z = 1-x-y
espRgbColor_t rgb;
float x, y, z;
float X, Y, Z;
float r, g, b;
x = ((float)current_X) / 65535.0f;
y = ((float)current_Y) / 65535.0f;
z = 1.0f - x - y;
// Calculate XYZ values
// Y - given brightness in 0 - 1 range
Y = ((float)Level) / 254.0f;
X = (Y / y) * x;
Z = (Y / y) * z;
// X, Y and Z input refer to a D65/2° standard illuminant.
// sR, sG and sB (standard RGB) output range = 0 ÷ 255
// convert XYZ to RGB - CIE XYZ to sRGB
if (addXYZScaling) {
X = X / 100.0f;
Y = Y / 100.0f;
Z = Z / 100.0f;
}
r = (X * 3.2406f) - (Y * 1.5372f) - (Z * 0.4986f);
g = -(X * 0.9689f) + (Y * 1.8758f) + (Z * 0.0415f);
b = (X * 0.0557f) - (Y * 0.2040f) + (Z * 1.0570f);
// apply gamma 2.2 correction
r = (r <= 0.0031308f ? 12.92f * r : (1.055f) * pow(r, (1.0f / 2.4f)) - 0.055f);
g = (g <= 0.0031308f ? 12.92f * g : (1.055f) * pow(g, (1.0f / 2.4f)) - 0.055f);
b = (b <= 0.0031308f ? 12.92f * b : (1.055f) * pow(b, (1.0f / 2.4f)) - 0.055f);
// Round off
r = clamp(r, 0, 1);
g = clamp(g, 0, 1);
b = clamp(b, 0, 1);
// these rgb values are in the range of 0 to 1, convert to limit of HW specific LED
rgb.r = (uint8_t)(r * 255);
rgb.g = (uint8_t)(g * 255);
rgb.b = (uint8_t)(b * 255);
return rgb;
}
espXyColor_t espRgbToXYColor(uint8_t r, uint8_t g, uint8_t b) {
espRgbColor_t rgb = {r, g, b};
return espRgbColorToXYColor(rgb);
}
espXyColor_t espRgbColorToXYColor(espRgbColor_t rgb) {
// convert RGB to xy color space
// https://www.easyrgb.com/en/math.php
// https://en.wikipedia.org/wiki/SRGB
// refer https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
espXyColor_t xy;
float r, g, b;
float X, Y, Z;
float x, y;
r = ((float)rgb.r) / 255.0f;
g = ((float)rgb.g) / 255.0f;
b = ((float)rgb.b) / 255.0f;
// convert RGB to XYZ - sRGB to CIE XYZ
r = (r <= 0.04045f ? r / 12.92f : pow((r + 0.055f) / 1.055f, 2.4f));
g = (g <= 0.04045f ? g / 12.92f : pow((g + 0.055f) / 1.055f, 2.4f));
b = (b <= 0.04045f ? b / 12.92f : pow((b + 0.055f) / 1.055f, 2.4f));
// https://gist.github.com/popcorn245/30afa0f98eea1c2fd34d
X = r * 0.649926f + g * 0.103455f + b * 0.197109f;
Y = r * 0.234327f + g * 0.743075f + b * 0.022598f;
Z = r * 0.0000000f + g * 0.053077f + b * 1.035763f;
// sR, sG and sB (standard RGB) input range = 0 ÷ 255
// X, Y and Z output refer to a D65/2° standard illuminant.
X = r * 0.4124564f + g * 0.3575761f + b * 0.1804375f;
Y = r * 0.2126729f + g * 0.7151522f + b * 0.0721750f;
Z = r * 0.0193339f + g * 0.1191920f + b * 0.9503041f;
// Calculate xy values
x = X / (X + Y + Z);
y = Y / (X + Y + Z);
// convert to 0-65535 range
xy.x = (uint16_t)(x * 65535);
xy.y = (uint16_t)(y * 65535);
return xy;
}
espRgbColor_t espCTToRgbColor(uint16_t ct) {
espCtColor_t ctColor = {ct};
return espCTColorToRgbColor(ctColor);
}
espRgbColor_t espCTColorToRgbColor(espCtColor_t ct) {
espRgbColor_t rgb = {0, 0, 0};
float r, g, b;
if (ct.ctMireds == 0) {
return rgb;
}
// Algorithm credits to Tanner Helland: https://tannerhelland.com/2012/09/18/convert-temperature-rgb-algorithm-code.html
// Convert Mireds to centiKelvins. k = 1,000,000/mired
float ctCentiKelvin = 10000 / ct.ctMireds;
// Red
if (ctCentiKelvin <= 66) {
r = 255;
} else {
r = 329.698727446f * pow(ctCentiKelvin - 60, -0.1332047592f);
}
// Green
if (ctCentiKelvin <= 66) {
g = 99.4708025861f * log(ctCentiKelvin) - 161.1195681661f;
} else {
g = 288.1221695283f * pow(ctCentiKelvin - 60, -0.0755148492f);
}
// Blue
if (ctCentiKelvin >= 66) {
b = 255;
} else {
if (ctCentiKelvin <= 19) {
b = 0;
} else {
b = 138.5177312231 * log(ctCentiKelvin - 10) - 305.0447927307;
}
}
rgb.r = (uint8_t)clamp(r, 0, 255);
rgb.g = (uint8_t)clamp(g, 0, 255);
rgb.b = (uint8_t)clamp(b, 0, 255);
return rgb;
}

View file

@ -1,71 +0,0 @@
/*
*
* Copyright (c) 2021 Project CHIP Authors
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <stdint.h>
#include <stdbool.h>
#ifdef __cplusplus
extern "C" {
#endif
struct RgbColor_t {
uint8_t r;
uint8_t g;
uint8_t b;
};
struct HsvColor_t {
uint16_t h;
uint8_t s;
uint8_t v;
};
struct XyColor_t {
uint16_t x;
uint16_t y;
};
struct CtColor_t {
uint16_t ctMireds;
};
typedef struct RgbColor_t espRgbColor_t;
typedef struct HsvColor_t espHsvColor_t;
typedef struct XyColor_t espXyColor_t;
typedef struct CtColor_t espCtColor_t;
espRgbColor_t espXYToRgbColor(uint8_t Level, uint16_t current_X, uint16_t current_Y, bool addXYZScaling);
espRgbColor_t espXYColorToRgb(uint8_t Level, espXyColor_t xy);
espXyColor_t espRgbColorToXYColor(espRgbColor_t rgb);
espXyColor_t espRgbToXYColor(uint8_t r, uint8_t g, uint8_t b);
espRgbColor_t espHsvColorToRgbColor(espHsvColor_t hsv);
espRgbColor_t espHsvToRgbColor(uint16_t h, uint8_t s, uint8_t v);
espRgbColor_t espCTColorToRgbColor(espCtColor_t ct);
espRgbColor_t espCTToRgbColor(uint16_t ct);
espHsvColor_t espRgbColorToHsvColor(espRgbColor_t rgb);
espHsvColor_t espRgbToHsvColor(uint8_t r, uint8_t g, uint8_t b);
extern const espHsvColor_t HSV_BLACK, HSV_WHITE, HSV_RED, HSV_YELLOW, HSV_GREEN, HSV_CYAN, HSV_BLUE, HSV_MAGENTA;
extern const espCtColor_t COOL_WHITE_COLOR_TEMPERATURE, DAYLIGHT_WHITE_COLOR_TEMPERATURE, WHITE_COLOR_TEMPERATURE, SOFT_WHITE_COLOR_TEMPERATURE,
WARM_WHITE_COLOR_TEMPERATURE;
extern const espRgbColor_t RGB_BLACK, RGB_WHITE, RGB_RED, RGB_YELLOW, RGB_GREEN, RGB_CYAN, RGB_BLUE, RGB_MAGENTA;
#ifdef __cplusplus
}
#endif

View file

@ -1,518 +0,0 @@
/*
Esp.cpp - ESP31B-specific APIs
Copyright (c) 2015 Ivan Grokhotkov. All rights reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "Arduino.h"
#include "Esp.h"
#include "esp_sleep.h"
#include "spi_flash_mmap.h"
#include "esp_idf_version.h"
#include <memory>
#include <soc/soc.h>
#include <esp_partition.h>
extern "C" {
#include "esp_ota_ops.h"
#include "esp_image_format.h"
}
#include <MD5Builder.h>
#include "soc/spi_reg.h"
#include "esp_system.h"
#include "esp_chip_info.h"
#include "esp_mac.h"
#include "esp_flash.h"
#ifdef ESP_IDF_VERSION_MAJOR // IDF 4+
#if CONFIG_IDF_TARGET_ESP32 // ESP32/PICO-D4
#include "esp32/rom/spi_flash.h"
#include "soc/efuse_reg.h"
#define ESP_FLASH_IMAGE_BASE 0x1000 // Flash offset containing flash size and spi mode
#elif CONFIG_IDF_TARGET_ESP32S2
#include "esp32s2/rom/spi_flash.h"
#include "soc/efuse_reg.h"
#define ESP_FLASH_IMAGE_BASE 0x1000
#elif CONFIG_IDF_TARGET_ESP32S3
#include "esp32s3/rom/spi_flash.h"
#include "soc/efuse_reg.h"
#define ESP_FLASH_IMAGE_BASE 0x0000 // Esp32s3 is located at 0x0000
#elif CONFIG_IDF_TARGET_ESP32C2
#include "esp32c2/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x0000 // Esp32c2 is located at 0x0000
#elif CONFIG_IDF_TARGET_ESP32C3
#include "esp32c3/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x0000 // Esp32c3 is located at 0x0000
#elif CONFIG_IDF_TARGET_ESP32C6
#include "esp32c6/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x0000 // Esp32c6 is located at 0x0000
#elif CONFIG_IDF_TARGET_ESP32H2
#include "esp32h2/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x0000 // Esp32h2 is located at 0x0000
#elif CONFIG_IDF_TARGET_ESP32P4
#include "esp32p4/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x2000 // Esp32p4 is located at 0x2000
#elif CONFIG_IDF_TARGET_ESP32C5
#include "esp32c5/rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x2000 // Esp32c5 is located at 0x2000
#else
#error Target CONFIG_IDF_TARGET is not supported
#endif
#else // ESP32 Before IDF 4.0
#include "rom/spi_flash.h"
#define ESP_FLASH_IMAGE_BASE 0x1000
#endif
// REG_SPI_BASE is not defined for S3/C3 ??
#if CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3
#ifdef REG_SPI_BASE
#undef REG_SPI_BASE
#endif // REG_SPI_BASE
#define REG_SPI_BASE(i) (DR_REG_SPI1_BASE + (((i) > 1) ? (((i) * 0x1000) + 0x20000) : (((~(i)) & 1) * 0x1000)))
#endif // TARGET
/**
* User-defined Literals
* usage:
*
* uint32_t = test = 10_MHz; // --> 10000000
*/
unsigned long long operator"" _kHz(unsigned long long x) {
return x * 1000;
}
unsigned long long operator"" _MHz(unsigned long long x) {
return x * 1000 * 1000;
}
unsigned long long operator"" _GHz(unsigned long long x) {
return x * 1000 * 1000 * 1000;
}
unsigned long long operator"" _kBit(unsigned long long x) {
return x * 1024;
}
unsigned long long operator"" _MBit(unsigned long long x) {
return x * 1024 * 1024;
}
unsigned long long operator"" _GBit(unsigned long long x) {
return x * 1024 * 1024 * 1024;
}
unsigned long long operator"" _kB(unsigned long long x) {
return x * 1024;
}
unsigned long long operator"" _MB(unsigned long long x) {
return x * 1024 * 1024;
}
unsigned long long operator"" _GB(unsigned long long x) {
return x * 1024 * 1024 * 1024;
}
EspClass ESP;
void EspClass::deepSleep(uint64_t time_us) {
esp_deep_sleep(time_us);
}
void EspClass::restart(void) {
esp_restart();
}
uint32_t EspClass::getHeapSize(void) {
return heap_caps_get_total_size(MALLOC_CAP_INTERNAL);
}
uint32_t EspClass::getFreeHeap(void) {
return heap_caps_get_free_size(MALLOC_CAP_INTERNAL);
}
uint32_t EspClass::getMinFreeHeap(void) {
return heap_caps_get_minimum_free_size(MALLOC_CAP_INTERNAL);
}
uint32_t EspClass::getMaxAllocHeap(void) {
return heap_caps_get_largest_free_block(MALLOC_CAP_INTERNAL);
}
uint32_t EspClass::getPsramSize(void) {
if (psramFound()) {
return heap_caps_get_total_size(MALLOC_CAP_SPIRAM);
}
return 0;
}
uint32_t EspClass::getFreePsram(void) {
if (psramFound()) {
return heap_caps_get_free_size(MALLOC_CAP_SPIRAM);
}
return 0;
}
uint32_t EspClass::getMinFreePsram(void) {
if (psramFound()) {
return heap_caps_get_minimum_free_size(MALLOC_CAP_SPIRAM);
}
return 0;
}
uint32_t EspClass::getMaxAllocPsram(void) {
if (psramFound()) {
return heap_caps_get_largest_free_block(MALLOC_CAP_SPIRAM);
}
return 0;
}
static uint32_t sketchSize(sketchSize_t response) {
esp_image_metadata_t data;
const esp_partition_t *running = esp_ota_get_running_partition();
if (!running) {
return 0;
}
const esp_partition_pos_t running_pos = {
.offset = running->address,
.size = running->size,
};
data.start_addr = running_pos.offset;
esp_image_verify(ESP_IMAGE_VERIFY, &running_pos, &data);
if (response) {
return running_pos.size - data.image_len;
} else {
return data.image_len;
}
}
uint32_t EspClass::getSketchSize() {
return sketchSize(SKETCH_SIZE_TOTAL);
}
String EspClass::getSketchMD5() {
static String result;
if (result.length()) {
return result;
}
uint32_t lengthLeft = getSketchSize();
const esp_partition_t *running = esp_ota_get_running_partition();
if (!running) {
log_e("Partition could not be found");
return String();
}
const size_t bufSize = SPI_FLASH_SEC_SIZE;
uint8_t *pb = (uint8_t *)malloc(bufSize);
if (!pb) {
log_e("Not enough memory to allocate buffer");
return String();
}
uint32_t offset = 0;
MD5Builder md5;
md5.begin();
while (lengthLeft > 0) {
size_t readBytes = (lengthLeft < bufSize) ? lengthLeft : bufSize;
if (!ESP.flashRead(running->address + offset, (uint32_t *)pb, (readBytes + 3) & ~3)) {
free(pb);
log_e("Could not read buffer from flash");
return String();
}
md5.add(pb, readBytes);
lengthLeft -= readBytes;
offset += readBytes;
#if CONFIG_FREERTOS_UNICORE
delay(1); // Fix solo WDT
#endif
}
free(pb);
md5.calculate();
result = md5.toString();
return result;
}
uint32_t EspClass::getFreeSketchSpace() {
const esp_partition_t *_partition = esp_ota_get_next_update_partition(NULL);
if (!_partition) {
return 0;
}
return _partition->size;
}
uint16_t EspClass::getChipRevision(void) {
esp_chip_info_t chip_info;
esp_chip_info(&chip_info);
return chip_info.revision;
}
const char *EspClass::getChipModel(void) {
#if CONFIG_IDF_TARGET_ESP32
uint32_t chip_ver = REG_GET_FIELD(EFUSE_BLK0_RDATA3_REG, EFUSE_RD_CHIP_PACKAGE);
uint32_t pkg_ver = chip_ver & 0x7;
switch (pkg_ver) {
case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDQ6:
if ((getChipRevision() / 100) == 3) {
return "ESP32-D0WDQ6-V3";
} else {
return "ESP32-D0WDQ6";
}
case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDQ5:
if ((getChipRevision() / 100) == 3) {
return "ESP32-D0WD-V3";
} else {
return "ESP32-D0WD";
}
case EFUSE_RD_CHIP_VER_PKG_ESP32D2WDQ5: return "ESP32-D2WD";
case EFUSE_RD_CHIP_VER_PKG_ESP32U4WDH: return "ESP32-U4WDH";
case EFUSE_RD_CHIP_VER_PKG_ESP32PICOD4: return "ESP32-PICO-D4";
case EFUSE_RD_CHIP_VER_PKG_ESP32PICOV302: return "ESP32-PICO-V3-02";
case EFUSE_RD_CHIP_VER_PKG_ESP32D0WDR2V3: return "ESP32-D0WDR2-V3";
default: return "Unknown";
}
#elif CONFIG_IDF_TARGET_ESP32S2
uint32_t pkg_ver = REG_GET_FIELD(EFUSE_RD_MAC_SPI_SYS_3_REG, EFUSE_PKG_VERSION);
switch (pkg_ver) {
case 0: return "ESP32-S2";
case 1: return "ESP32-S2FH16";
case 2: return "ESP32-S2FH32";
default: return "ESP32-S2 (Unknown)";
}
#else
esp_chip_info_t chip_info;
esp_chip_info(&chip_info);
switch (chip_info.model) {
case CHIP_ESP32S3: return "ESP32-S3";
case CHIP_ESP32C3: return "ESP32-C3";
case CHIP_ESP32C2: return "ESP32-C2";
case CHIP_ESP32C6: return "ESP32-C6";
case CHIP_ESP32H2: return "ESP32-H2";
case CHIP_ESP32P4: return "ESP32-P4";
#if ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(5, 5, 0)
case CHIP_ESP32C5: return "ESP32-C5";
case CHIP_ESP32C61: return "ESP32-C61";
case CHIP_ESP32H21: return "ESP32-H21";
#endif
default: return "UNKNOWN";
}
#endif
}
uint8_t EspClass::getChipCores(void) {
esp_chip_info_t chip_info;
esp_chip_info(&chip_info);
return chip_info.cores;
}
const char *EspClass::getSdkVersion(void) {
return esp_get_idf_version();
}
const char *EspClass::getCoreVersion(void) {
return ESP_ARDUINO_VERSION_STR;
}
uint32_t ESP_getFlashChipId(void) {
uint32_t id = g_rom_flashchip.device_id;
id = ((id & 0xff) << 16) | ((id >> 16) & 0xff) | (id & 0xff00);
return id;
}
uint32_t EspClass::getFlashChipSize(void) {
uint32_t id = (ESP_getFlashChipId() >> 16) & 0xFF;
return 2 << (id - 1);
}
uint32_t EspClass::getFlashChipSpeed(void) {
esp_image_header_t fhdr;
if (esp_flash_read(esp_flash_default_chip, (void *)&fhdr, ESP_FLASH_IMAGE_BASE, sizeof(esp_image_header_t)) && fhdr.magic != ESP_IMAGE_HEADER_MAGIC) {
return 0;
}
return magicFlashChipSpeed(fhdr.spi_speed);
}
// FIXME for P4
#if !defined(CONFIG_IDF_TARGET_ESP32P4)
FlashMode_t EspClass::getFlashChipMode(void) {
#if CONFIG_IDF_TARGET_ESP32S2
uint32_t spi_ctrl = REG_READ(PERIPHS_SPI_FLASH_CTRL);
#else
#if CONFIG_IDF_TARGET_ESP32H2 || CONFIG_IDF_TARGET_ESP32C2 || CONFIG_IDF_TARGET_ESP32C6
uint32_t spi_ctrl = REG_READ(DR_REG_SPI0_BASE + 0x8);
#else
uint32_t spi_ctrl = REG_READ(SPI_CTRL_REG(0));
#endif
#endif
/* Not all of the following constants are already defined in older versions of spi_reg.h, so do it manually for now*/
if (spi_ctrl & BIT(24)) { //SPI_FREAD_QIO
return (FM_QIO);
} else if (spi_ctrl & BIT(20)) { //SPI_FREAD_QUAD
return (FM_QOUT);
} else if (spi_ctrl & BIT(23)) { //SPI_FREAD_DIO
return (FM_DIO);
} else if (spi_ctrl & BIT(14)) { // SPI_FREAD_DUAL
return (FM_DOUT);
} else if (spi_ctrl & BIT(13)) { //SPI_FASTRD_MODE
return (FM_FAST_READ);
} else {
return (FM_SLOW_READ);
}
return (FM_DOUT);
}
#endif // if !defined(CONFIG_IDF_TARGET_ESP32P4)
uint32_t EspClass::magicFlashChipSize(uint8_t byte) {
/*
FLASH_SIZES = {
"1MB": 0x00,
"2MB": 0x10,
"4MB": 0x20,
"8MB": 0x30,
"16MB": 0x40,
"32MB": 0x50,
"64MB": 0x60,
"128MB": 0x70,
}
*/
switch (byte & 0x0F) {
case 0x0: return (1_MB); // 8 MBit (1MB)
case 0x1: return (2_MB); // 16 MBit (2MB)
case 0x2: return (4_MB); // 32 MBit (4MB)
case 0x3: return (8_MB); // 64 MBit (8MB)
case 0x4: return (16_MB); // 128 MBit (16MB)
case 0x5: return (32_MB); // 256 MBit (32MB)
case 0x6: return (64_MB); // 512 MBit (64MB)
case 0x7: return (128_MB); // 1 GBit (128MB)
default: // fail?
return 0;
}
}
uint32_t EspClass::magicFlashChipSpeed(uint8_t byte) {
#if CONFIG_IDF_TARGET_ESP32C2
/*
FLASH_FREQUENCY = {
"60m": 0xF,
"30m": 0x0,
"20m": 0x1,
"15m": 0x2,
}
*/
switch (byte & 0x0F) {
case 0xF: return (60_MHz);
case 0x0: return (30_MHz);
case 0x1: return (20_MHz);
case 0x2: return (15_MHz);
default: // fail?
return 0;
}
#elif CONFIG_IDF_TARGET_ESP32C6
/*
FLASH_FREQUENCY = {
"80m": 0x0, # workaround for wrong mspi HS div value in ROM
"40m": 0x0,
"20m": 0x2,
}
*/
switch (byte & 0x0F) {
case 0x0: return (80_MHz);
case 0x2: return (20_MHz);
default: // fail?
return 0;
}
#elif CONFIG_IDF_TARGET_ESP32H2
/*
FLASH_FREQUENCY = {
"48m": 0xF,
"24m": 0x0,
"16m": 0x1,
"12m": 0x2,
}
*/
switch (byte & 0x0F) {
case 0xF: return (48_MHz);
case 0x0: return (24_MHz);
case 0x1: return (16_MHz);
case 0x2: return (12_MHz);
default: // fail?
return 0;
}
#else
/*
FLASH_FREQUENCY = {
"80m": 0xF,
"40m": 0x0,
"26m": 0x1,
"20m": 0x2,
}
*/
switch (byte & 0x0F) {
case 0xF: return (80_MHz);
case 0x0: return (40_MHz);
case 0x1: return (26_MHz);
case 0x2: return (20_MHz);
default: // fail?
return 0;
}
#endif
}
FlashMode_t EspClass::magicFlashChipMode(uint8_t byte) {
FlashMode_t mode = (FlashMode_t)byte;
if (mode > FM_SLOW_READ) {
mode = FM_UNKNOWN;
}
return mode;
}
bool EspClass::flashEraseSector(uint32_t sector) {
return esp_flash_erase_region(esp_flash_default_chip, sector * SPI_FLASH_SEC_SIZE, SPI_FLASH_SEC_SIZE) == ESP_OK;
}
// Warning: These functions do not work with encrypted flash
bool EspClass::flashWrite(uint32_t offset, uint32_t *data, size_t size) {
return esp_flash_write(esp_flash_default_chip, (const void *)data, offset, size) == ESP_OK;
}
bool EspClass::flashRead(uint32_t offset, uint32_t *data, size_t size) {
return esp_flash_read(esp_flash_default_chip, (void *)data, offset, size) == ESP_OK;
}
bool EspClass::partitionEraseRange(const esp_partition_t *partition, uint32_t offset, size_t size) {
return esp_partition_erase_range(partition, offset, size) == ESP_OK;
}
bool EspClass::partitionWrite(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size) {
return esp_partition_write(partition, offset, data, size) == ESP_OK;
}
bool EspClass::partitionRead(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size) {
return esp_partition_read(partition, offset, data, size) == ESP_OK;
}
uint64_t EspClass::getEfuseMac(void) {
uint64_t _chipmacid = 0LL;
esp_efuse_mac_get_default((uint8_t *)(&_chipmacid));
return _chipmacid;
}

View file

@ -1,120 +0,0 @@
/*
Esp.h - ESP31B-specific APIs
Copyright (c) 2015 Ivan Grokhotkov. All rights reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef ESP_H
#define ESP_H
#include <Arduino.h>
#include <esp_partition.h>
#include <hal/cpu_hal.h>
#include "esp_cpu.h"
/**
* AVR macros for WDT management
*/
typedef enum {
WDTO_0MS = 0, //!< WDTO_0MS
WDTO_15MS = 15, //!< WDTO_15MS
WDTO_30MS = 30, //!< WDTO_30MS
WDTO_60MS = 60, //!< WDTO_60MS
WDTO_120MS = 120, //!< WDTO_120MS
WDTO_250MS = 250, //!< WDTO_250MS
WDTO_500MS = 500, //!< WDTO_500MS
WDTO_1S = 1000, //!< WDTO_1S
WDTO_2S = 2000, //!< WDTO_2S
WDTO_4S = 4000, //!< WDTO_4S
WDTO_8S = 8000 //!< WDTO_8S
} WDTO_t;
typedef enum {
FM_QIO = 0x00,
FM_QOUT = 0x01,
FM_DIO = 0x02,
FM_DOUT = 0x03,
FM_FAST_READ = 0x04,
FM_SLOW_READ = 0x05,
FM_UNKNOWN = 0xff
} FlashMode_t;
typedef enum {
SKETCH_SIZE_TOTAL = 0,
SKETCH_SIZE_FREE = 1
} sketchSize_t;
class EspClass {
public:
EspClass() {}
~EspClass() {}
void restart();
//Internal RAM
uint32_t getHeapSize(); //total heap size
uint32_t getFreeHeap(); //available heap
uint32_t getMinFreeHeap(); //lowest level of free heap since boot
uint32_t getMaxAllocHeap(); //largest block of heap that can be allocated at once
//SPI RAM
uint32_t getPsramSize();
uint32_t getFreePsram();
uint32_t getMinFreePsram();
uint32_t getMaxAllocPsram();
uint16_t getChipRevision();
const char *getChipModel();
uint8_t getChipCores();
uint32_t getCpuFreqMHz() {
return getCpuFrequencyMhz();
}
inline uint32_t getCycleCount() __attribute__((always_inline));
const char *getSdkVersion(); //version of ESP-IDF
const char *getCoreVersion(); //version of this core
void deepSleep(uint64_t time_us);
uint32_t getFlashChipSize();
uint32_t getFlashChipSpeed();
FlashMode_t getFlashChipMode();
uint32_t magicFlashChipSize(uint8_t byte);
uint32_t magicFlashChipSpeed(uint8_t byte);
FlashMode_t magicFlashChipMode(uint8_t byte);
uint32_t getSketchSize();
String getSketchMD5();
uint32_t getFreeSketchSpace();
bool flashEraseSector(uint32_t sector);
bool flashWrite(uint32_t offset, uint32_t *data, size_t size);
bool flashRead(uint32_t offset, uint32_t *data, size_t size);
bool partitionEraseRange(const esp_partition_t *partition, uint32_t offset, size_t size);
bool partitionWrite(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size);
bool partitionRead(const esp_partition_t *partition, uint32_t offset, uint32_t *data, size_t size);
uint64_t getEfuseMac();
};
uint32_t ARDUINO_ISR_ATTR EspClass::getCycleCount() {
return (uint32_t)esp_cpu_get_cycle_count();
}
extern EspClass ESP;
#endif //ESP_H

View file

@ -1,427 +0,0 @@
// Copyright 2015-2021 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "FirmwareMSC.h"
#if CONFIG_TINYUSB_MSC_ENABLED
#include <cstring>
#include "esp_partition.h"
#include "esp_ota_ops.h"
#include "esp_image_format.h"
#include "pins_arduino.h"
#include "esp32-hal.h"
#include "firmware_msc_fat.h"
#include "spi_flash_mmap.h"
#ifndef USB_FW_MSC_VENDOR_ID
#define USB_FW_MSC_VENDOR_ID "ESP32" //max 8 chars
#endif
#ifndef USB_FW_MSC_PRODUCT_ID
#define USB_FW_MSC_PRODUCT_ID "Firmware MSC" //max 16 chars
#endif
#ifndef USB_FW_MSC_PRODUCT_REVISION
#define USB_FW_MSC_PRODUCT_REVISION "1.0" //max 4 chars
#endif
#ifndef USB_FW_MSC_VOLUME_NAME
#define USB_FW_MSC_VOLUME_NAME "ESP32-FWMSC" //max 11 chars
#endif
#ifndef USB_FW_MSC_SERIAL_NUMBER
#define USB_FW_MSC_SERIAL_NUMBER 0x00000000
#endif
ESP_EVENT_DEFINE_BASE(ARDUINO_FIRMWARE_MSC_EVENTS);
esp_err_t arduino_usb_event_post(esp_event_base_t event_base, int32_t event_id, void *event_data, size_t event_data_size, TickType_t ticks_to_wait);
esp_err_t arduino_usb_event_handler_register_with(esp_event_base_t event_base, int32_t event_id, esp_event_handler_t event_handler, void *event_handler_arg);
//General Variables
static uint8_t *msc_ram_disk = NULL;
static fat_boot_sector_t *msc_boot = NULL;
static uint8_t *msc_table = NULL;
static uint16_t msc_table_sectors = 0;
static uint16_t msc_total_sectors = 0;
static bool mcs_is_fat16 = false;
//Firmware Read
static const esp_partition_t *msc_run_partition = NULL;
static uint16_t fw_start_sector = 0;
static uint16_t fw_end_sector = 0;
static size_t fw_size = 0;
static fat_dir_entry_t *fw_entry = NULL;
//Firmware Write
typedef enum {
MSC_UPDATE_IDLE,
MSC_UPDATE_STARTING,
MSC_UPDATE_RUNNING,
MSC_UPDATE_END
} msc_update_state_t;
static const esp_partition_t *msc_ota_partition = NULL;
static msc_update_state_t msc_update_state = MSC_UPDATE_IDLE;
static uint16_t msc_update_start_sector = 0;
static uint32_t msc_update_bytes_written = 0;
static fat_dir_entry_t *msc_update_entry = NULL;
static uint32_t get_firmware_size(const esp_partition_t *partition) {
esp_image_metadata_t data;
const esp_partition_pos_t running_pos = {
.offset = partition->address,
.size = partition->size,
};
data.start_addr = running_pos.offset;
esp_image_verify(ESP_IMAGE_VERIFY, &running_pos, &data);
return data.image_len;
}
//Get number of sectors required based on the size of the firmware and OTA partition
static size_t msc_update_get_required_disk_sectors() {
size_t data_sectors = 16;
size_t total_sectors = 0;
msc_run_partition = esp_ota_get_running_partition();
msc_ota_partition = esp_ota_get_next_update_partition(NULL);
if (msc_run_partition) {
fw_size = get_firmware_size(msc_run_partition);
data_sectors += FAT_SIZE_TO_SECTORS(fw_size);
log_d("APP size: %u (%u sectors)", fw_size, FAT_SIZE_TO_SECTORS(fw_size));
} else {
log_w("APP partition not found. Reading disabled");
}
if (msc_ota_partition) {
data_sectors += FAT_SIZE_TO_SECTORS(msc_ota_partition->size);
log_d("OTA size: %u (%u sectors)", msc_ota_partition->size, FAT_SIZE_TO_SECTORS(msc_ota_partition->size));
} else {
log_w("OTA partition not found. Writing disabled");
}
msc_table_sectors = fat_sectors_per_alloc_table(data_sectors, false);
total_sectors = data_sectors + msc_table_sectors + 2;
if (total_sectors > 0xFF4) {
log_d("USING FAT16");
mcs_is_fat16 = true;
total_sectors -= msc_table_sectors;
msc_table_sectors = fat_sectors_per_alloc_table(data_sectors, true);
total_sectors += msc_table_sectors;
} else {
log_d("USING FAT12");
mcs_is_fat16 = false;
}
log_d("FAT sector size: %u", DISK_SECTOR_SIZE);
log_d("FAT data sectors: %u", data_sectors);
log_d("FAT table sectors: %u", msc_table_sectors);
log_d("FAT total sectors: %u (%uKB)", total_sectors, (total_sectors * DISK_SECTOR_SIZE) / 1024);
return total_sectors;
}
//setup the ramdisk and add the firmware download file
static bool msc_update_setup_disk(const char *volume_label, uint32_t serial_number) {
msc_total_sectors = msc_update_get_required_disk_sectors();
uint8_t ram_sectors = msc_table_sectors + 2;
msc_ram_disk = (uint8_t *)calloc(ram_sectors, DISK_SECTOR_SIZE);
if (!msc_ram_disk) {
log_e("Failed to allocate RAM Disk: %u bytes", ram_sectors * DISK_SECTOR_SIZE);
return false;
}
fw_start_sector = ram_sectors;
fw_end_sector = fw_start_sector;
msc_boot = fat_add_boot_sector(msc_ram_disk, msc_total_sectors, msc_table_sectors, fat_file_system_type(mcs_is_fat16), volume_label, serial_number);
msc_table = fat_add_table(msc_ram_disk, msc_boot, mcs_is_fat16);
//fat_dir_entry_t * label = fat_add_label(msc_ram_disk, volume_label);
if (msc_run_partition) {
fw_entry = fat_add_root_file(msc_ram_disk, 0, "FIRMWARE", "BIN", fw_size, 2, mcs_is_fat16);
fw_end_sector = FAT_SIZE_TO_SECTORS(fw_size) + fw_start_sector;
}
return true;
}
static void msc_update_delete_disk() {
fw_entry = NULL;
fw_size = 0;
fw_end_sector = 0;
fw_start_sector = 0;
msc_table = NULL;
msc_boot = NULL;
msc_table_sectors = 0;
msc_total_sectors = 0;
msc_run_partition = NULL;
msc_ota_partition = NULL;
msc_update_state = MSC_UPDATE_IDLE;
msc_update_start_sector = 0;
msc_update_bytes_written = 0;
msc_update_entry = NULL;
free(msc_ram_disk);
msc_ram_disk = NULL;
}
//filter out entries to only include BINs in the root folder
static fat_dir_entry_t *msc_update_get_root_bin_entry(uint8_t index) {
fat_dir_entry_t *entry = (fat_dir_entry_t *)(msc_ram_disk + ((msc_boot->sectors_per_alloc_table + 1) * DISK_SECTOR_SIZE) + (index * sizeof(fat_dir_entry_t)));
fat_lfn_entry_t *lfn = (fat_lfn_entry_t *)entry;
//empty entry
if (entry->file_magic == 0) {
return NULL;
}
//long file name
if (lfn->attr == 0x0F && lfn->type == 0x00 && lfn->first_cluster == 0x0000) {
return NULL;
}
//only files marked as archives
if (entry->file_attr != FAT_FILE_ATTR_ARCHIVE) {
return NULL;
}
//deleted
if (entry->file_magic == 0xE5 || entry->file_magic == 0x05) {
return NULL;
}
//not bins
if (memcmp("BIN", entry->file_extension, 3)) {
return NULL;
}
return entry;
}
//get an empty bin (the host will add an entry for file about to be written with size of zero)
static fat_dir_entry_t *msc_update_find_new_bin() {
for (uint8_t i = 16; i;) {
i--;
fat_dir_entry_t *entry = msc_update_get_root_bin_entry(i);
if (entry && entry->file_size == 0) {
return entry;
}
}
return NULL;
}
//get a bin starting from particular sector
static fat_dir_entry_t *msc_update_find_bin(uint16_t sector) {
for (uint8_t i = 16; i;) {
i--;
fat_dir_entry_t *entry = msc_update_get_root_bin_entry(i);
if (entry && entry->data_start_sector == (sector - msc_boot->sectors_per_alloc_table)) {
return entry;
}
}
return NULL;
}
//write the new data and erase the flash blocks when necessary
static esp_err_t msc_update_write(const esp_partition_t *partition, uint32_t offset, void *data, size_t size) {
esp_err_t err = ESP_OK;
if ((offset & (SPI_FLASH_SEC_SIZE - 1)) == 0) {
err = esp_partition_erase_range(partition, offset, SPI_FLASH_SEC_SIZE);
log_v("ERASE[0x%08X]: %s", offset, (err != ESP_OK) ? "FAIL" : "OK");
if (err != ESP_OK) {
return err;
}
}
return esp_partition_write(partition, offset, data, size);
}
//called when error was encountered while updating
static void msc_update_error() {
log_e("UPDATE_ERROR: %u", msc_update_bytes_written);
arduino_firmware_msc_event_data_t p;
p.error.size = msc_update_bytes_written;
arduino_usb_event_post(ARDUINO_FIRMWARE_MSC_EVENTS, ARDUINO_FIRMWARE_MSC_ERROR_EVENT, &p, sizeof(arduino_firmware_msc_event_data_t), portMAX_DELAY);
msc_update_state = MSC_UPDATE_IDLE;
msc_update_entry = NULL;
msc_update_bytes_written = 0;
msc_update_start_sector = 0;
}
//called when all firmware bytes have been received
static void msc_update_end() {
log_d("UPDATE_END: %u", msc_update_entry->file_size);
msc_update_state = MSC_UPDATE_END;
size_t ota_size = get_firmware_size(msc_ota_partition);
if (ota_size != msc_update_entry->file_size) {
log_e("OTA SIZE MISMATCH %u != %u", ota_size, msc_update_entry->file_size);
msc_update_error();
return;
}
if (!ota_size || esp_ota_set_boot_partition(msc_ota_partition) != ESP_OK) {
log_e("ENABLING OTA PARTITION FAILED");
msc_update_error();
return;
}
arduino_firmware_msc_event_data_t p;
p.end.size = msc_update_entry->file_size;
arduino_usb_event_post(ARDUINO_FIRMWARE_MSC_EVENTS, ARDUINO_FIRMWARE_MSC_END_EVENT, &p, sizeof(arduino_firmware_msc_event_data_t), portMAX_DELAY);
}
static int32_t msc_write(uint32_t lba, uint32_t offset, uint8_t *buffer, uint32_t bufsize) {
//log_d("lba: %u, offset: %u, bufsize: %u", lba, offset, bufsize);
if (lba < fw_start_sector) {
//write to sectors that are in RAM
memcpy(msc_ram_disk + (lba * DISK_SECTOR_SIZE) + offset, buffer, bufsize);
if (msc_ota_partition && lba == (fw_start_sector - 1)) {
//monitor the root folder table
if (msc_update_state <= MSC_UPDATE_RUNNING) {
fat_dir_entry_t *update_entry = msc_update_find_new_bin();
if (update_entry) {
if (msc_update_entry) {
log_v("REPLACING ENTRY");
} else {
log_v("ASSIGNING ENTRY");
}
if (msc_update_state <= MSC_UPDATE_STARTING) {
msc_update_state = MSC_UPDATE_STARTING;
msc_update_bytes_written = 0;
msc_update_start_sector = 0;
}
msc_update_entry = update_entry;
} else if (msc_update_state == MSC_UPDATE_RUNNING) {
if (!msc_update_entry && msc_update_start_sector) {
msc_update_entry = msc_update_find_bin(msc_update_start_sector);
}
if (msc_update_entry && msc_update_bytes_written >= msc_update_entry->file_size) {
msc_update_end();
}
}
}
}
} else if (msc_ota_partition && lba >= msc_update_start_sector) {
//handle writes to the region where the new firmware will be uploaded
arduino_firmware_msc_event_data_t p;
if (msc_update_state <= MSC_UPDATE_STARTING && buffer[0] == 0xE9) {
msc_update_state = MSC_UPDATE_RUNNING;
msc_update_start_sector = lba;
msc_update_bytes_written = 0;
log_d("UPDATE_START: %u (0x%02X)", lba, lba - msc_boot->sectors_per_alloc_table);
arduino_usb_event_post(ARDUINO_FIRMWARE_MSC_EVENTS, ARDUINO_FIRMWARE_MSC_START_EVENT, &p, sizeof(arduino_firmware_msc_event_data_t), portMAX_DELAY);
if (msc_update_write(msc_ota_partition, ((lba - msc_update_start_sector) * DISK_SECTOR_SIZE) + offset, buffer, bufsize) == ESP_OK) {
log_v("UPDATE_WRITE: %u %u", ((lba - msc_update_start_sector) * DISK_SECTOR_SIZE) + offset, bufsize);
msc_update_bytes_written = ((lba - msc_update_start_sector) * DISK_SECTOR_SIZE) + offset + bufsize;
p.write.offset = ((lba - msc_update_start_sector) * DISK_SECTOR_SIZE) + offset;
p.write.size = bufsize;
arduino_usb_event_post(ARDUINO_FIRMWARE_MSC_EVENTS, ARDUINO_FIRMWARE_MSC_WRITE_EVENT, &p, sizeof(arduino_firmware_msc_event_data_t), portMAX_DELAY);
} else {
msc_update_error();
return 0;
}
} else if (msc_update_state == MSC_UPDATE_RUNNING) {
if (msc_update_entry && msc_update_entry->file_size && msc_update_bytes_written < msc_update_entry->file_size
&& (msc_update_bytes_written + bufsize) >= msc_update_entry->file_size) {
bufsize = msc_update_entry->file_size - msc_update_bytes_written;
}
if (msc_update_write(msc_ota_partition, ((lba - msc_update_start_sector) * DISK_SECTOR_SIZE) + offset, buffer, bufsize) == ESP_OK) {
log_v("UPDATE_WRITE: %u %u", ((lba - msc_update_start_sector) * DISK_SECTOR_SIZE) + offset, bufsize);
msc_update_bytes_written = ((lba - msc_update_start_sector) * DISK_SECTOR_SIZE) + offset + bufsize;
p.write.offset = ((lba - msc_update_start_sector) * DISK_SECTOR_SIZE) + offset;
p.write.size = bufsize;
arduino_usb_event_post(ARDUINO_FIRMWARE_MSC_EVENTS, ARDUINO_FIRMWARE_MSC_WRITE_EVENT, &p, sizeof(arduino_firmware_msc_event_data_t), portMAX_DELAY);
if (msc_update_entry && msc_update_entry->file_size && msc_update_bytes_written >= msc_update_entry->file_size) {
msc_update_end();
}
} else {
msc_update_error();
return 0;
}
}
}
return bufsize;
}
static int32_t msc_read(uint32_t lba, uint32_t offset, void *buffer, uint32_t bufsize) {
//log_d("lba: %u, offset: %u, bufsize: %u", lba, offset, bufsize);
if (lba < fw_start_sector) {
memcpy(buffer, msc_ram_disk + (lba * DISK_SECTOR_SIZE) + offset, bufsize);
} else if (msc_run_partition && lba < fw_end_sector) {
//read the currently running firmware
if (esp_partition_read(msc_run_partition, ((lba - fw_start_sector) * DISK_SECTOR_SIZE) + offset, buffer, bufsize) != ESP_OK) {
return 0;
}
} else {
memset(buffer, 0, bufsize);
}
return bufsize;
}
static bool msc_start_stop(uint8_t power_condition, bool start, bool load_eject) {
//log_d("power: %u, start: %u, eject: %u", power_condition, start, load_eject);
arduino_firmware_msc_event_data_t p;
p.power.power_condition = power_condition;
p.power.start = start;
p.power.load_eject = load_eject;
arduino_usb_event_post(ARDUINO_FIRMWARE_MSC_EVENTS, ARDUINO_FIRMWARE_MSC_POWER_EVENT, &p, sizeof(arduino_firmware_msc_event_data_t), portMAX_DELAY);
return true;
}
static volatile TaskHandle_t msc_task_handle = NULL;
static void msc_task(void *pvParameters) {
for (;;) {
if (msc_update_state == MSC_UPDATE_END) {
delay(100);
esp_restart();
}
delay(100);
}
msc_task_handle = NULL;
vTaskDelete(NULL);
}
FirmwareMSC::FirmwareMSC() : msc() {}
FirmwareMSC::~FirmwareMSC() {
end();
}
bool FirmwareMSC::begin() {
if (msc_ram_disk) {
return true;
}
if (!msc_update_setup_disk(USB_FW_MSC_VOLUME_NAME, USB_FW_MSC_SERIAL_NUMBER)) {
return false;
}
if (!msc_task_handle) {
xTaskCreateUniversal(msc_task, "msc_disk", 1024, NULL, 2, (TaskHandle_t *)&msc_task_handle, 0);
if (!msc_task_handle) {
msc_update_delete_disk();
return false;
}
}
msc.vendorID(USB_FW_MSC_VENDOR_ID);
msc.productID(USB_FW_MSC_PRODUCT_ID);
msc.productRevision(USB_FW_MSC_PRODUCT_REVISION);
msc.onStartStop(msc_start_stop);
msc.onRead(msc_read);
msc.onWrite(msc_write);
msc.mediaPresent(true);
msc.begin(msc_boot->fat12_sector_num, DISK_SECTOR_SIZE);
return true;
}
void FirmwareMSC::end() {
msc.end();
if (msc_task_handle) {
vTaskDelete(msc_task_handle);
msc_task_handle = NULL;
}
msc_update_delete_disk();
}
void FirmwareMSC::onEvent(esp_event_handler_t callback) {
onEvent(ARDUINO_FIRMWARE_MSC_ANY_EVENT, callback);
}
void FirmwareMSC::onEvent(arduino_firmware_msc_event_t event, esp_event_handler_t callback) {
arduino_usb_event_handler_register_with(ARDUINO_FIRMWARE_MSC_EVENTS, event, callback, this);
}
#if ARDUINO_USB_MSC_ON_BOOT
FirmwareMSC MSC_Update;
#endif
#endif /* CONFIG_USB_MSC_ENABLED */

View file

@ -1,70 +0,0 @@
// Copyright 2015-2021 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <stdbool.h>
#include "USBMSC.h"
#if CONFIG_TINYUSB_MSC_ENABLED
#include "esp_event.h"
ESP_EVENT_DECLARE_BASE(ARDUINO_FIRMWARE_MSC_EVENTS);
typedef enum {
ARDUINO_FIRMWARE_MSC_ANY_EVENT = ESP_EVENT_ANY_ID,
ARDUINO_FIRMWARE_MSC_START_EVENT = 0,
ARDUINO_FIRMWARE_MSC_WRITE_EVENT,
ARDUINO_FIRMWARE_MSC_END_EVENT,
ARDUINO_FIRMWARE_MSC_ERROR_EVENT,
ARDUINO_FIRMWARE_MSC_POWER_EVENT,
ARDUINO_FIRMWARE_MSC_MAX_EVENT,
} arduino_firmware_msc_event_t;
typedef union {
struct {
size_t offset;
size_t size;
} write;
struct {
uint8_t power_condition;
bool start;
bool load_eject;
} power;
struct {
size_t size;
} end;
struct {
size_t size;
} error;
} arduino_firmware_msc_event_data_t;
class FirmwareMSC {
private:
USBMSC msc;
public:
FirmwareMSC();
~FirmwareMSC();
bool begin();
void end();
void onEvent(esp_event_handler_t callback);
void onEvent(arduino_firmware_msc_event_t event, esp_event_handler_t callback);
};
#if ARDUINO_USB_MSC_ON_BOOT
extern FirmwareMSC MSC_Update;
#endif
#endif /* CONFIG_TINYUSB_MSC_ENABLED */

View file

@ -1,34 +0,0 @@
/*
* FunctionalInterrupt.cpp
*
* Created on: 8 jul. 2018
* Author: Herman
*/
#include "FunctionalInterrupt.h"
#include "Arduino.h"
typedef void (*voidFuncPtr)(void);
typedef void (*voidFuncPtrArg)(void *);
extern "C" {
extern void __attachInterruptFunctionalArg(uint8_t pin, voidFuncPtrArg userFunc, void *arg, int intr_type, bool functional);
}
void ARDUINO_ISR_ATTR interruptFunctional(void *arg) {
InterruptArgStructure *localArg = (InterruptArgStructure *)arg;
if (localArg->interruptFunction) {
localArg->interruptFunction();
}
}
void attachInterrupt(uint8_t pin, std::function<void(void)> intRoutine, int mode) {
// use the local interrupt routine which takes the ArgStructure as argument
__attachInterruptFunctionalArg(pin, (voidFuncPtrArg)interruptFunctional, new InterruptArgStructure{intRoutine}, mode, true);
}
extern "C" {
void cleanupFunctional(void *arg) {
delete (InterruptArgStructure *)arg;
}
}

View file

@ -1,22 +0,0 @@
/*
* FunctionalInterrupt.h
*
* Created on: 8 jul. 2018
* Author: Herman
*/
#ifndef CORE_CORE_FUNCTIONALINTERRUPT_H_
#define CORE_CORE_FUNCTIONALINTERRUPT_H_
#include <functional>
#include <stdint.h>
struct InterruptArgStructure {
std::function<void(void)> interruptFunction;
};
// The extra set of parentheses here prevents macros defined
// in io_pin_remap.h from applying to this declaration.
void(attachInterrupt)(uint8_t pin, std::function<void(void)> intRoutine, int mode);
#endif /* CORE_CORE_FUNCTIONALINTERRUPT_H_ */

View file

@ -1,76 +0,0 @@
/*
Copyright (c) 2015 Hristo Gochkov. All rights reserved.
This file is part of the esp32 core for Arduino environment.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <Arduino.h>
#include <HEXBuilder.h>
static uint8_t hex_char_to_byte(uint8_t c) {
return (c >= 'a' && c <= 'f') ? (c - ((uint8_t)'a' - 0xa))
: (c >= 'A' && c <= 'F') ? (c - ((uint8_t)'A' - 0xA))
: (c >= '0' && c <= '9') ? (c - (uint8_t)'0')
: 0x10; // unknown char is 16
}
size_t HEXBuilder::hex2bytes(unsigned char *out, size_t maxlen, String &in) {
return hex2bytes(out, maxlen, in.c_str());
}
size_t HEXBuilder::hex2bytes(unsigned char *out, size_t maxlen, const char *in) {
size_t len = 0;
for (; *in; in++) {
uint8_t c = hex_char_to_byte(*in);
// Silently skip anything unknown.
if (c > 15) {
continue;
}
if (len & 1) {
if (len / 2 < maxlen) {
out[len / 2] |= c;
}
} else {
if (len / 2 < maxlen) {
out[len / 2] = c << 4;
}
}
len++;
}
return (len + 1) / 2;
}
size_t HEXBuilder::bytes2hex(char *out, size_t maxlen, const unsigned char *in, size_t len) {
for (size_t i = 0; i < len; i++) {
if (i * 2 + 1 < maxlen) {
sprintf(out + (i * 2), "%02x", in[i]);
}
}
return len * 2 + 1;
}
String HEXBuilder::bytes2hex(const unsigned char *in, size_t len) {
size_t maxlen = len * 2 + 1;
char *out = (char *)malloc(maxlen);
if (!out) {
return String();
}
bytes2hex(out, maxlen, in, len);
String ret = String(out);
free(out);
return ret;
}

View file

@ -1,34 +0,0 @@
/*
Copyright (c) 2015 Hristo Gochkov. All rights reserved.
This file is part of the esp32 core for Arduino environment.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef HEXBuilder_h
#define HEXBuilder_h
#include <WString.h>
#include <Stream.h>
class HEXBuilder {
public:
static size_t hex2bytes(unsigned char *out, size_t maxlen, String &in);
static size_t hex2bytes(unsigned char *out, size_t maxlen, const char *in);
static String bytes2hex(const unsigned char *in, size_t len);
static size_t bytes2hex(char *out, size_t maxlen, const unsigned char *in, size_t len);
};
#endif

View file

@ -1,614 +0,0 @@
// Copyright 2015-2024 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "USB.h"
#if SOC_USB_SERIAL_JTAG_SUPPORTED
#include "esp32-hal.h"
#include "esp32-hal-periman.h"
#include "HWCDC.h"
#include "freertos/FreeRTOS.h"
#include "freertos/semphr.h"
#include "freertos/queue.h"
#include "freertos/ringbuf.h"
#include "esp_intr_alloc.h"
#include "soc/periph_defs.h"
#include "soc/io_mux_reg.h"
#include "soc/usb_serial_jtag_struct.h"
#pragma GCC diagnostic ignored "-Wvolatile"
#include "hal/usb_serial_jtag_ll.h"
#pragma GCC diagnostic warning "-Wvolatile"
#include "rom/ets_sys.h"
ESP_EVENT_DEFINE_BASE(ARDUINO_HW_CDC_EVENTS);
static RingbufHandle_t tx_ring_buf = NULL;
static QueueHandle_t rx_queue = NULL;
static uint8_t rx_data_buf[64] = {0};
static intr_handle_t intr_handle = NULL;
static SemaphoreHandle_t tx_lock = NULL;
static volatile bool connected = false;
// SOF in ISR causes problems for uploading firmware
//static volatile unsigned long lastSOF_ms;
//static volatile uint8_t SOF_TIMEOUT;
// timeout has no effect when USB CDC is unplugged
static uint32_t tx_timeout_ms = 100;
static esp_event_loop_handle_t arduino_hw_cdc_event_loop_handle = NULL;
static esp_err_t
arduino_hw_cdc_event_post(esp_event_base_t event_base, int32_t event_id, void *event_data, size_t event_data_size, BaseType_t *task_unblocked) {
if (arduino_hw_cdc_event_loop_handle == NULL) {
return ESP_FAIL;
}
return esp_event_isr_post_to(arduino_hw_cdc_event_loop_handle, event_base, event_id, event_data, event_data_size, task_unblocked);
}
static esp_err_t
arduino_hw_cdc_event_handler_register_with(esp_event_base_t event_base, int32_t event_id, esp_event_handler_t event_handler, void *event_handler_arg) {
if (!arduino_hw_cdc_event_loop_handle) {
esp_event_loop_args_t event_task_args = {
.queue_size = 5, .task_name = "arduino_hw_cdc_events", .task_priority = 5, .task_stack_size = 2048, .task_core_id = tskNO_AFFINITY
};
if (esp_event_loop_create(&event_task_args, &arduino_hw_cdc_event_loop_handle) != ESP_OK) {
log_e("esp_event_loop_create failed");
}
}
if (arduino_hw_cdc_event_loop_handle == NULL) {
return ESP_FAIL;
}
return esp_event_handler_register_with(arduino_hw_cdc_event_loop_handle, event_base, event_id, event_handler, event_handler_arg);
}
static void hw_cdc_isr_handler(void *arg) {
portBASE_TYPE xTaskWoken = 0;
uint32_t usbjtag_intr_status = 0;
arduino_hw_cdc_event_data_t event = {0};
usbjtag_intr_status = usb_serial_jtag_ll_get_intsts_mask();
if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY) {
// Interrupt tells us the host picked up the data we sent.
if (!HWCDC::isPlugged()) {
connected = false;
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
// USB is unplugged, nothing to be done here
return;
} else {
connected = true;
}
if (tx_ring_buf != NULL && usb_serial_jtag_ll_txfifo_writable() == 1) {
// We disable the interrupt here so that the interrupt won't be triggered if there is no data to send.
usb_serial_jtag_ll_disable_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
size_t queued_size = 0;
uint8_t *queued_buff = (uint8_t *)xRingbufferReceiveUpToFromISR(tx_ring_buf, &queued_size, 64);
// If the hardware fifo is available, write in it. Otherwise, do nothing.
if (queued_buff != NULL) { //Although tx_queued_bytes may be larger than 0. We may have interrupt before xRingbufferSend() was called.
//Copy the queued buffer into the TX FIFO
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
usb_serial_jtag_ll_write_txfifo(queued_buff, queued_size);
usb_serial_jtag_ll_txfifo_flush();
vRingbufferReturnItemFromISR(tx_ring_buf, queued_buff, &xTaskWoken);
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
//send event?
//ets_printf("TX:%u\n", queued_size);
event.tx.len = queued_size;
arduino_hw_cdc_event_post(ARDUINO_HW_CDC_EVENTS, ARDUINO_HW_CDC_TX_EVENT, &event, sizeof(arduino_hw_cdc_event_data_t), &xTaskWoken);
}
} else {
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
}
if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT) {
// read rx buffer(max length is 64), and send available data to ringbuffer.
// Ensure the rx buffer size is larger than RX_MAX_SIZE.
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT);
uint32_t rx_fifo_len = usb_serial_jtag_ll_read_rxfifo(rx_data_buf, 64);
uint32_t i = 0;
for (i = 0; i < rx_fifo_len; i++) {
if (rx_queue == NULL || !xQueueSendFromISR(rx_queue, rx_data_buf + i, &xTaskWoken)) {
break;
}
}
event.rx.len = i;
arduino_hw_cdc_event_post(ARDUINO_HW_CDC_EVENTS, ARDUINO_HW_CDC_RX_EVENT, &event, sizeof(arduino_hw_cdc_event_data_t), &xTaskWoken);
connected = true;
}
if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_BUS_RESET) {
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_BUS_RESET);
arduino_hw_cdc_event_post(ARDUINO_HW_CDC_EVENTS, ARDUINO_HW_CDC_BUS_RESET_EVENT, &event, sizeof(arduino_hw_cdc_event_data_t), &xTaskWoken);
connected = false;
}
// SOF ISR is causing esptool to be unable to upload firmware to the board
// if (usbjtag_intr_status & USB_SERIAL_JTAG_INTR_SOF) {
// usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_INTR_SOF);
// lastSOF_ms = millis();
// }
if (xTaskWoken == pdTRUE) {
portYIELD_FROM_ISR();
}
}
// Moved to header file as inline function. Kept just as future reference.
//inline bool HWCDC::isPlugged(void) {
// SOF ISR is causing esptool to be unable to upload firmware to the board
// Timer test for SOF seems to work when uploading firmware
// return usb_serial_jtag_is_connected();//(lastSOF_ms + SOF_TIMEOUT) >= millis();
//}
bool HWCDC::isCDC_Connected() {
static bool running = false;
// USB may be unplugged
if (!isPlugged()) {
connected = false;
running = false;
// SOF in ISR causes problems for uploading firmware
//SOF_TIMEOUT = 5; // SOF timeout when unplugged
return false;
}
//else {
// SOF_TIMEOUT = 50; // SOF timeout when plugged
//}
if (connected) {
running = false;
return true;
}
if (running == false && !connected) { // enables it only once!
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
// this will feed CDC TX FIFO to trigger IN_EMPTY
usb_serial_jtag_ll_txfifo_flush();
running = true;
return false;
}
static void flushTXBuffer(const uint8_t *buffer, size_t size) {
if (!tx_ring_buf) {
return;
}
UBaseType_t uxItemsWaiting = 0;
vRingbufferGetInfo(tx_ring_buf, NULL, NULL, NULL, NULL, &uxItemsWaiting);
size_t freeSpace = xRingbufferGetCurFreeSize(tx_ring_buf);
size_t ringbufferLength = freeSpace + uxItemsWaiting;
if (buffer == NULL) {
// just flush the whole ring buffer and exit - used by HWCDC::flush()
size_t queued_size = 0;
uint8_t *queued_buff = (uint8_t *)xRingbufferReceiveUpTo(tx_ring_buf, &queued_size, 0, ringbufferLength);
if (queued_size && queued_buff != NULL) {
vRingbufferReturnItem(tx_ring_buf, (void *)queued_buff);
}
return;
}
if (size == 0) {
return; // nothing to do
}
if (freeSpace >= size) {
// there is enough space, just add the data to the ring buffer
if (xRingbufferSend(tx_ring_buf, (void *)buffer, size, 0) != pdTRUE) {
return;
}
} else {
// how many byte should be flushed to make space for the new data
size_t to_flush = size - freeSpace;
if (to_flush > ringbufferLength) {
to_flush = ringbufferLength;
}
size_t queued_size = 0;
uint8_t *queued_buff = (uint8_t *)xRingbufferReceiveUpTo(tx_ring_buf, &queued_size, 0, to_flush);
if (queued_size && queued_buff != NULL) {
vRingbufferReturnItem(tx_ring_buf, (void *)queued_buff);
}
// now add the new data that fits into the ring buffer
uint8_t *bptr = (uint8_t *)buffer;
if (size >= ringbufferLength) {
size = ringbufferLength;
bptr = (uint8_t *)buffer + (size - ringbufferLength);
}
if (xRingbufferSend(tx_ring_buf, (void *)bptr, size, 0) != pdTRUE) {
return;
}
}
// flushes CDC FIFO
usb_serial_jtag_ll_txfifo_flush();
}
static void ARDUINO_ISR_ATTR cdc0_write_char(char c) {
if (tx_ring_buf == NULL) {
return;
}
if (!HWCDC::isConnected()) {
// just pop/push RingBuffer and apply FIFO policy
flushTXBuffer((const uint8_t *)&c, 1);
return;
}
if (xPortInIsrContext()) {
xRingbufferSendFromISR(tx_ring_buf, (void *)(&c), 1, NULL);
} else {
xRingbufferSend(tx_ring_buf, (void *)(&c), 1, tx_timeout_ms / portTICK_PERIOD_MS);
}
usb_serial_jtag_ll_txfifo_flush();
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
HWCDC::HWCDC() {
perimanSetBusDeinit(ESP32_BUS_TYPE_USB_DM, HWCDC::deinit);
perimanSetBusDeinit(ESP32_BUS_TYPE_USB_DP, HWCDC::deinit);
// SOF in ISR causes problems for uploading firmware
// lastSOF_ms = 0;
// SOF_TIMEOUT = 5;
}
HWCDC::~HWCDC() {
end();
}
// It should return <true> just when USB is plugged and CDC is connected.
HWCDC::operator bool() const {
return HWCDC::isCDC_Connected();
}
void HWCDC::onEvent(esp_event_handler_t callback) {
onEvent(ARDUINO_HW_CDC_ANY_EVENT, callback);
}
void HWCDC::onEvent(arduino_hw_cdc_event_t event, esp_event_handler_t callback) {
arduino_hw_cdc_event_handler_register_with(ARDUINO_HW_CDC_EVENTS, event, callback, this);
}
bool HWCDC::deinit(void *busptr) {
// avoid any recursion issue with Peripheral Manager perimanSetPinBus() call
static bool running = false;
if (running) {
return true;
}
running = true;
// Setting USB D+ D- pins
bool retCode = true;
retCode &= perimanClearPinBus(USB_INT_PHY0_DM_GPIO_NUM);
retCode &= perimanClearPinBus(USB_INT_PHY0_DP_GPIO_NUM);
if (retCode) {
// Force the host to re-enumerate (BUS_RESET)
pinMode(USB_INT_PHY0_DM_GPIO_NUM, OUTPUT_OPEN_DRAIN);
pinMode(USB_INT_PHY0_DP_GPIO_NUM, OUTPUT_OPEN_DRAIN);
digitalWrite(USB_INT_PHY0_DM_GPIO_NUM, LOW);
digitalWrite(USB_INT_PHY0_DP_GPIO_NUM, LOW);
}
// release the flag
running = false;
return retCode;
}
void HWCDC::begin(unsigned long baud) {
if (tx_lock == NULL) {
tx_lock = xSemaphoreCreateMutex();
}
//RX Buffer default has 256 bytes if not preset
if (rx_queue == NULL) {
if (!setRxBufferSize(256)) {
log_e("HW CDC RX Buffer error");
}
}
//TX Buffer default has 256 bytes if not preset
if (tx_ring_buf == NULL) {
if (!setTxBufferSize(256)) {
log_e("HW CDC TX Buffer error");
}
}
// the HW Serial pins needs to be first deinited in order to allow `if(Serial)` to work :-(
// But this is also causing terminal to hang, so they are disabled
// deinit(NULL);
// delay(10); // USB Host has to enumerate it again
// Peripheral Manager setting for USB D+ D- pins
uint8_t pin = USB_INT_PHY0_DM_GPIO_NUM;
if (!perimanSetPinBus(pin, ESP32_BUS_TYPE_USB_DM, (void *)this, -1, -1)) {
goto err;
}
pin = USB_INT_PHY0_DP_GPIO_NUM;
if (!perimanSetPinBus(pin, ESP32_BUS_TYPE_USB_DP, (void *)this, -1, -1)) {
goto err;
}
// Configure PHY
// USB_Serial_JTAG use internal PHY
USB_SERIAL_JTAG.conf0.phy_sel = 0;
// Disable software control USB D+ D- pullup pulldown (Device FS: dp_pullup = 1)
USB_SERIAL_JTAG.conf0.pad_pull_override = 0;
// Enable USB D+ pullup
USB_SERIAL_JTAG.conf0.dp_pullup = 1;
// Enable USB pad function
USB_SERIAL_JTAG.conf0.usb_pad_enable = 1;
usb_serial_jtag_ll_disable_intr_mask(USB_SERIAL_JTAG_LL_INTR_MASK);
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY | USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT | USB_SERIAL_JTAG_INTR_BUS_RESET);
// SOF ISR is causing esptool to be unable to upload firmware to the board
// usb_serial_jtag_ll_ena_intr_mask(
// USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY | USB_SERIAL_JTAG_INTR_SERIAL_OUT_RECV_PKT | USB_SERIAL_JTAG_INTR_BUS_RESET | USB_SERIAL_JTAG_INTR_SOF
// );
if (!intr_handle && esp_intr_alloc(ETS_USB_SERIAL_JTAG_INTR_SOURCE, 0, hw_cdc_isr_handler, NULL, &intr_handle) != ESP_OK) {
isr_log_e("HW USB CDC failed to init interrupts");
end();
return;
}
return;
err:
log_e("Serial JTAG Pin %u can't be set into Peripheral Manager.", pin);
end();
}
void HWCDC::end() {
//Disable/clear/free tx/rx interrupt.
usb_serial_jtag_ll_disable_intr_mask(USB_SERIAL_JTAG_LL_INTR_MASK);
usb_serial_jtag_ll_clr_intsts_mask(USB_SERIAL_JTAG_LL_INTR_MASK);
esp_intr_free(intr_handle);
intr_handle = NULL;
if (tx_lock != NULL) {
vSemaphoreDelete(tx_lock);
tx_lock = NULL;
}
setRxBufferSize(0);
setTxBufferSize(0);
if (arduino_hw_cdc_event_loop_handle) {
esp_event_loop_delete(arduino_hw_cdc_event_loop_handle);
arduino_hw_cdc_event_loop_handle = NULL;
}
HWCDC::deinit(this);
setDebugOutput(false);
connected = false;
}
void HWCDC::setTxTimeoutMs(uint32_t timeout) {
tx_timeout_ms = timeout;
}
/*
* WRITING
*/
size_t HWCDC::setTxBufferSize(size_t tx_queue_len) {
if (tx_ring_buf) {
vRingbufferDelete(tx_ring_buf);
tx_ring_buf = NULL;
}
if (!tx_queue_len) {
return 0;
}
tx_ring_buf = xRingbufferCreate(tx_queue_len, RINGBUF_TYPE_BYTEBUF);
if (!tx_ring_buf) {
return 0;
}
return tx_queue_len;
}
int HWCDC::availableForWrite(void) {
if (tx_ring_buf == NULL || tx_lock == NULL) {
return 0;
}
if (xSemaphoreTake(tx_lock, tx_timeout_ms / portTICK_PERIOD_MS) != pdPASS) {
return 0;
}
size_t a = xRingbufferGetCurFreeSize(tx_ring_buf);
xSemaphoreGive(tx_lock);
return a;
}
size_t HWCDC::write(const uint8_t *buffer, size_t size) {
if (buffer == NULL || size == 0 || tx_ring_buf == NULL || tx_lock == NULL) {
return 0;
}
if (xSemaphoreTake(tx_lock, tx_timeout_ms / portTICK_PERIOD_MS) != pdPASS) {
return 0;
}
if (!isCDC_Connected()) {
// just pop/push RingBuffer and apply FIFO policy
flushTXBuffer(buffer, size);
} else {
size_t space = xRingbufferGetCurFreeSize(tx_ring_buf);
size_t to_send = size, so_far = 0;
if (space > size) {
space = size;
}
// Non-Blocking method, Sending data to ringbuffer, and handle the data in ISR.
if (space > 0 && xRingbufferSend(tx_ring_buf, (void *)(buffer), space, 0) != pdTRUE) {
size = 0;
} else {
to_send -= space;
so_far += space;
// Now trigger the ISR to read data from the ring buffer.
usb_serial_jtag_ll_txfifo_flush();
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
// tracks CDC transmission progress to avoid hanging if CDC is unplugged while still sending data
size_t last_toSend = to_send;
uint32_t tries = tx_timeout_ms; // waits 1ms per sending data attempt, in case CDC is unplugged
while (connected && to_send) {
space = xRingbufferGetCurFreeSize(tx_ring_buf);
if (space > to_send) {
space = to_send;
}
// Blocking method, Sending data to ringbuffer, and handle the data in ISR.
if (xRingbufferSend(tx_ring_buf, (void *)(buffer + so_far), space, tx_timeout_ms / portTICK_PERIOD_MS) != pdTRUE) {
size = so_far;
log_w("write failed due to ring buffer full - timeout");
break;
}
so_far += space;
to_send -= space;
// Now trigger the ISR to read data from the ring buffer.
usb_serial_jtag_ll_txfifo_flush();
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
if (last_toSend == to_send) {
// no progress in sending data... USB CDC is probably unplugged
tries--;
delay(1);
} else {
last_toSend = to_send;
tries = tx_timeout_ms; // reset the timeout
}
if (tries == 0) { // CDC isn't connected anymore...
size = so_far;
log_w("write failed due to waiting USB Host - timeout");
connected = false;
}
}
}
// CDC was disconnected while sending data ==> flush the TX buffer keeping the last data
if (to_send && !usb_serial_jtag_ll_txfifo_writable()) {
connected = false;
flushTXBuffer(buffer + so_far, to_send);
}
}
xSemaphoreGive(tx_lock);
return size;
}
size_t HWCDC::write(uint8_t c) {
return write(&c, 1);
}
void HWCDC::flush(void) {
if (tx_ring_buf == NULL || tx_lock == NULL) {
return;
}
if (xSemaphoreTake(tx_lock, tx_timeout_ms / portTICK_PERIOD_MS) != pdPASS) {
return;
}
if (!isCDC_Connected()) {
flushTXBuffer(NULL, 0);
} else {
UBaseType_t uxItemsWaiting = 0;
vRingbufferGetInfo(tx_ring_buf, NULL, NULL, NULL, NULL, &uxItemsWaiting);
if (uxItemsWaiting) {
// Now trigger the ISR to read data from the ring buffer.
usb_serial_jtag_ll_txfifo_flush();
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
}
uint32_t tries = tx_timeout_ms; // waits 1ms per ISR sending data attempt, in case CDC is unplugged
while (connected && tries && uxItemsWaiting) {
delay(1);
UBaseType_t lastUxItemsWaiting = uxItemsWaiting;
vRingbufferGetInfo(tx_ring_buf, NULL, NULL, NULL, NULL, &uxItemsWaiting);
if (lastUxItemsWaiting == uxItemsWaiting) {
tries--;
}
if (connected) {
usb_serial_jtag_ll_ena_intr_mask(USB_SERIAL_JTAG_INTR_SERIAL_IN_EMPTY);
}
}
if (tries == 0) { // CDC isn't connected anymore...
connected = false;
flushTXBuffer(NULL, 0); // flushes all TX Buffer
}
}
xSemaphoreGive(tx_lock);
}
/*
* READING
*/
size_t HWCDC::setRxBufferSize(size_t rx_queue_len) {
if (rx_queue) {
vQueueDelete(rx_queue);
rx_queue = NULL;
}
if (!rx_queue_len) {
return 0;
}
rx_queue = xQueueCreate(rx_queue_len, sizeof(uint8_t));
if (!rx_queue) {
return 0;
}
return rx_queue_len;
}
int HWCDC::available(void) {
if (rx_queue == NULL) {
return -1;
}
return uxQueueMessagesWaiting(rx_queue);
}
int HWCDC::peek(void) {
if (rx_queue == NULL) {
return -1;
}
uint8_t c;
if (xQueuePeek(rx_queue, &c, 0)) {
return c;
}
return -1;
}
int HWCDC::read(void) {
if (rx_queue == NULL) {
return -1;
}
uint8_t c = 0;
if (xQueueReceive(rx_queue, &c, 0)) {
return c;
}
return -1;
}
size_t HWCDC::read(uint8_t *buffer, size_t size) {
if (rx_queue == NULL) {
return -1;
}
uint8_t c = 0;
size_t count = 0;
while (count < size && xQueueReceive(rx_queue, &c, 0)) {
buffer[count++] = c;
}
return count;
}
/*
* DEBUG
*/
void HWCDC::setDebugOutput(bool en) {
if (en) {
uartSetDebug(NULL);
ets_install_putc2((void (*)(char)) & cdc0_write_char);
} else {
ets_install_putc2(NULL);
}
ets_install_putc1(NULL); // closes UART log output
}
#if ARDUINO_USB_MODE && ARDUINO_USB_CDC_ON_BOOT // Hardware JTAG CDC selected
// USBSerial is always available to be used
HWCDC HWCDCSerial;
#endif
#endif /* SOC_USB_SERIAL_JTAG_SUPPORTED */

View file

@ -1,118 +0,0 @@
// Copyright 2015-2024 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include "sdkconfig.h"
#include "soc/soc_caps.h"
#if SOC_USB_SERIAL_JTAG_SUPPORTED
#include <inttypes.h>
#include "esp_event.h"
#include "Stream.h"
#include "driver/usb_serial_jtag.h"
ESP_EVENT_DECLARE_BASE(ARDUINO_HW_CDC_EVENTS);
typedef enum {
ARDUINO_HW_CDC_ANY_EVENT = ESP_EVENT_ANY_ID,
ARDUINO_HW_CDC_CONNECTED_EVENT = 0,
ARDUINO_HW_CDC_BUS_RESET_EVENT,
ARDUINO_HW_CDC_RX_EVENT,
ARDUINO_HW_CDC_TX_EVENT,
ARDUINO_HW_CDC_MAX_EVENT,
} arduino_hw_cdc_event_t;
typedef union {
struct {
size_t len;
} rx;
struct {
size_t len;
} tx;
} arduino_hw_cdc_event_data_t;
class HWCDC : public Stream {
private:
static bool deinit(void *busptr);
static bool isCDC_Connected();
public:
HWCDC();
~HWCDC();
void onEvent(esp_event_handler_t callback);
void onEvent(arduino_hw_cdc_event_t event, esp_event_handler_t callback);
size_t setRxBufferSize(size_t);
size_t setTxBufferSize(size_t);
void setTxTimeoutMs(uint32_t timeout);
void begin(unsigned long baud = 0);
void end();
int available(void);
int availableForWrite(void);
int peek(void);
int read(void);
size_t read(uint8_t *buffer, size_t size);
size_t write(uint8_t);
size_t write(const uint8_t *buffer, size_t size);
void flush(void);
inline static bool isPlugged(void) {
// SOF ISR is causing esptool to be unable to upload firmware to the board
// Using IDF 5.1 helper function because it is based on Timer check instead of ISR
return usb_serial_jtag_is_connected();
}
inline static bool isConnected(void) {
return isCDC_Connected();
}
inline size_t read(char *buffer, size_t size) {
return read((uint8_t *)buffer, size);
}
inline size_t write(const char *buffer, size_t size) {
return write((uint8_t *)buffer, size);
}
inline size_t write(const char *s) {
return write((uint8_t *)s, strlen(s));
}
inline size_t write(unsigned long n) {
return write((uint8_t)n);
}
inline size_t write(long n) {
return write((uint8_t)n);
}
inline size_t write(unsigned int n) {
return write((uint8_t)n);
}
inline size_t write(int n) {
return write((uint8_t)n);
}
operator bool() const;
void setDebugOutput(bool);
uint32_t baudRate() {
return 115200;
}
};
#if ARDUINO_USB_MODE && ARDUINO_USB_CDC_ON_BOOT // Hardware JTAG CDC selected
#ifndef HWCDC_SERIAL_IS_DEFINED
#define HWCDC_SERIAL_IS_DEFINED 1
#endif
// HWCDCSerial is always available to be used
extern HWCDC HWCDCSerial;
#endif
#endif /* SOC_USB_SERIAL_JTAG_SUPPORTED */

View file

@ -1,43 +0,0 @@
/*
Copyright (c) 2016 Arduino LLC. All right reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#pragma once
#include <inttypes.h>
#include "Stream.h"
#include <functional>
class HardwareI2C : public Stream {
public:
virtual bool begin() = 0;
virtual bool begin(uint8_t address) = 0;
virtual bool end() = 0;
virtual bool setClock(uint32_t freq) = 0;
virtual void beginTransmission(uint8_t address) = 0;
virtual uint8_t endTransmission(bool stopBit) = 0;
virtual uint8_t endTransmission(void) = 0;
virtual size_t requestFrom(uint8_t address, size_t len, bool stopBit) = 0;
virtual size_t requestFrom(uint8_t address, size_t len) = 0;
// Update base class to use std::function
virtual void onReceive(const std::function<void(int)> &) = 0;
virtual void onRequest(const std::function<void()> &) = 0;
};

View file

@ -1,664 +0,0 @@
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <inttypes.h>
#include <ctime>
#include "pins_arduino.h"
#include "io_pin_remap.h"
#include "HardwareSerial.h"
#include "soc/soc_caps.h"
#include "driver/uart.h"
#include "freertos/queue.h"
#if (SOC_UART_LP_NUM >= 1)
#define UART_HW_FIFO_LEN(uart_num) ((uart_num < SOC_UART_HP_NUM) ? SOC_UART_FIFO_LEN : SOC_LP_UART_FIFO_LEN)
#else
#define UART_HW_FIFO_LEN(uart_num) SOC_UART_FIFO_LEN
#endif
void serialEvent(void) __attribute__((weak));
#if SOC_UART_NUM > 1
void serialEvent1(void) __attribute__((weak));
#endif /* SOC_UART_NUM > 1 */
#if SOC_UART_NUM > 2
void serialEvent2(void) __attribute__((weak));
#endif /* SOC_UART_NUM > 2 */
#if SOC_UART_NUM > 3
void serialEvent3(void) __attribute__((weak));
#endif /* SOC_UART_NUM > 3 */
#if SOC_UART_NUM > 4
void serialEvent4(void) __attribute__((weak));
#endif /* SOC_UART_NUM > 4 */
#if SOC_UART_NUM > 5
void serialEvent5(void) __attribute__((weak));
#endif /* SOC_UART_NUM > 5 */
#if !defined(NO_GLOBAL_INSTANCES) && !defined(NO_GLOBAL_SERIAL)
// There is always Seria0 for UART0
HardwareSerial Serial0(0);
#if SOC_UART_NUM > 1
HardwareSerial Serial1(1);
#endif
#if SOC_UART_NUM > 2
HardwareSerial Serial2(2);
#endif
#if SOC_UART_NUM > 3
HardwareSerial Serial3(3);
#endif
#if SOC_UART_NUM > 4
HardwareSerial Serial4(4);
#endif
#if (SOC_UART_NUM > 5)
HardwareSerial Serial5(5);
#endif
#if HWCDC_SERIAL_IS_DEFINED == 1 // Hardware JTAG CDC Event
extern void HWCDCSerialEvent(void) __attribute__((weak));
#endif
#if USB_SERIAL_IS_DEFINED == 1 // Native USB CDC Event
// Used by Hardware Serial for USB CDC events
extern void USBSerialEvent(void) __attribute__((weak));
#endif
void serialEventRun(void) {
#if HWCDC_SERIAL_IS_DEFINED == 1 // Hardware JTAG CDC Event
if (HWCDCSerialEvent && HWCDCSerial.available()) {
HWCDCSerialEvent();
}
#endif
#if USB_SERIAL_IS_DEFINED == 1 // Native USB CDC Event
if (USBSerialEvent && USBSerial.available()) {
USBSerialEvent();
}
#endif
// UART0 is default serialEvent()
if (serialEvent && Serial0.available()) {
serialEvent();
}
#if SOC_UART_NUM > 1
if (serialEvent1 && Serial1.available()) {
serialEvent1();
}
#endif
#if SOC_UART_NUM > 2
if (serialEvent2 && Serial2.available()) {
serialEvent2();
}
#endif
#if SOC_UART_NUM > 3
if (serialEvent3 && Serial3.available()) {
serialEvent3();
}
#endif
#if SOC_UART_NUM > 4
if (serialEvent4 && Serial4.available()) {
serialEvent4();
}
#endif
#if SOC_UART_NUM > 5
if (serialEvent5 && Serial5.available()) {
serialEvent5();
}
#endif
}
#endif
#if !CONFIG_DISABLE_HAL_LOCKS
#define HSERIAL_MUTEX_LOCK() \
do { \
} while (xSemaphoreTake(_lock, portMAX_DELAY) != pdPASS)
#define HSERIAL_MUTEX_UNLOCK() xSemaphoreGive(_lock)
#else
#define HSERIAL_MUTEX_LOCK()
#define HSERIAL_MUTEX_UNLOCK()
#endif
HardwareSerial::HardwareSerial(uint8_t uart_nr)
: _uart_nr(uart_nr), _uart(NULL), _rxBufferSize(256), _txBufferSize(0), _onReceiveCB(NULL), _onReceiveErrorCB(NULL), _onReceiveTimeout(false), _rxTimeout(1),
_rxFIFOFull(0), _eventTask(NULL)
#if !CONFIG_DISABLE_HAL_LOCKS
,
_lock(NULL)
#endif
{
#if !CONFIG_DISABLE_HAL_LOCKS
if (_lock == NULL) {
_lock = xSemaphoreCreateMutex();
if (_lock == NULL) {
log_e("xSemaphoreCreateMutex failed");
return;
}
}
#endif
// set deinit function in the Peripheral Manager
uart_init_PeriMan();
}
HardwareSerial::~HardwareSerial() {
end(); // explicit Full UART termination
#if !CONFIG_DISABLE_HAL_LOCKS
if (_lock != NULL) {
vSemaphoreDelete(_lock);
}
#endif
}
void HardwareSerial::_createEventTask(void *args) {
// Creating UART event Task
xTaskCreateUniversal(
_uartEventTask, "uart_event_task", ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE, this, ARDUINO_SERIAL_EVENT_TASK_PRIORITY, &_eventTask,
ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
);
if (_eventTask == NULL) {
log_e(" -- UART%d Event Task not Created!", _uart_nr);
}
}
void HardwareSerial::_destroyEventTask(void) {
if (_eventTask != NULL) {
vTaskDelete(_eventTask);
_eventTask = NULL;
}
}
void HardwareSerial::onReceiveError(OnReceiveErrorCb function) {
HSERIAL_MUTEX_LOCK();
// function may be NULL to cancel onReceive() from its respective task
_onReceiveErrorCB = function;
// this can be called after Serial.begin(), therefore it shall create the event task
if (function != NULL && _uart != NULL && _eventTask == NULL) {
_createEventTask(this);
}
HSERIAL_MUTEX_UNLOCK();
}
void HardwareSerial::onReceive(OnReceiveCb function, bool onlyOnTimeout) {
HSERIAL_MUTEX_LOCK();
// function may be NULL to cancel onReceive() from its respective task
_onReceiveCB = function;
// setting the callback to NULL will just disable it
if (_onReceiveCB != NULL) {
// When Rx timeout is Zero (disabled), there is only one possible option that is callback when FIFO reaches 120 bytes
_onReceiveTimeout = _rxTimeout > 0 ? onlyOnTimeout : false;
// in case that onReceive() shall work only with RX Timeout, FIFO shall be high
// this is a work around for an IDF issue with events and low FIFO Full value (< 3)
// Not valid for the LP UART
if (_onReceiveTimeout && _uart_nr < SOC_UART_HP_NUM) {
uartSetRxFIFOFull(_uart, 120);
log_w("OnReceive is set to Timeout only, thus FIFO Full is now 120 bytes.");
}
// this method can be called after Serial.begin(), therefore it shall create the event task
if (_uart != NULL && _eventTask == NULL) {
_createEventTask(this); // Create event task
}
}
HSERIAL_MUTEX_UNLOCK();
}
// This function allow the user to define how many bytes will trigger an Interrupt that will copy RX FIFO to the internal RX Ringbuffer
// ISR will also move data from FIFO to RX Ringbuffer after a RX Timeout defined in HardwareSerial::setRxTimeout(uint8_t symbols_timeout)
// A low value of FIFO Full bytes will consume more CPU time within the ISR
// A high value of FIFO Full bytes will make the application wait longer to have byte available for the Stkech in a streaming scenario
// Both RX FIFO Full and RX Timeout may affect when onReceive() will be called
bool HardwareSerial::setRxFIFOFull(uint8_t fifoBytes) {
HSERIAL_MUTEX_LOCK();
// in case that onReceive() shall work only with RX Timeout, FIFO shall be high
// this is a work around for an IDF issue with events and low FIFO Full value (< 3)
// Not valid for the LP UART
if (_onReceiveCB != NULL && _onReceiveTimeout && _uart_nr < SOC_UART_HP_NUM) {
fifoBytes = 120;
log_w("OnReceive is set to Timeout only, thus FIFO Full is now 120 bytes.");
}
bool retCode = uartSetRxFIFOFull(_uart, fifoBytes); // Set new timeout
if (fifoBytes > 0 && fifoBytes < UART_HW_FIFO_LEN(_uart_nr) - 1) {
_rxFIFOFull = fifoBytes;
}
HSERIAL_MUTEX_UNLOCK();
return retCode;
}
// timeout is calculates in time to receive UART symbols at the UART baudrate.
// the estimation is about 11 bits per symbol (SERIAL_8N1)
bool HardwareSerial::setRxTimeout(uint8_t symbols_timeout) {
HSERIAL_MUTEX_LOCK();
// Zero disables timeout, thus, onReceive callback will only be called when RX FIFO reaches 120 bytes
// Any non-zero value will activate onReceive callback based on UART baudrate with about 11 bits per symbol
_rxTimeout = symbols_timeout;
if (!symbols_timeout) {
_onReceiveTimeout = false; // only when RX timeout is disabled, we also must disable this flag
}
bool retCode = uartSetRxTimeout(_uart, _rxTimeout); // Set new timeout
HSERIAL_MUTEX_UNLOCK();
return retCode;
}
void HardwareSerial::eventQueueReset() {
QueueHandle_t uartEventQueue = NULL;
if (_uart == NULL) {
return;
}
uartGetEventQueue(_uart, &uartEventQueue);
if (uartEventQueue != NULL) {
xQueueReset(uartEventQueue);
}
}
void HardwareSerial::_uartEventTask(void *args) {
HardwareSerial *uart = (HardwareSerial *)args;
uart_event_t event;
QueueHandle_t uartEventQueue = NULL;
uartGetEventQueue(uart->_uart, &uartEventQueue);
if (uartEventQueue != NULL) {
for (;;) {
//Waiting for UART event.
if (xQueueReceive(uartEventQueue, (void *)&event, (TickType_t)portMAX_DELAY)) {
hardwareSerial_error_t currentErr = UART_NO_ERROR;
switch (event.type) {
case UART_DATA:
if (uart->_onReceiveCB && uart->available() > 0 && ((uart->_onReceiveTimeout && event.timeout_flag) || !uart->_onReceiveTimeout)) {
uart->_onReceiveCB();
}
break;
case UART_FIFO_OVF:
log_w("UART%d FIFO Overflow. Consider adding Hardware Flow Control to your Application.", uart->_uart_nr);
currentErr = UART_FIFO_OVF_ERROR;
break;
case UART_BUFFER_FULL:
log_w("UART%d Buffer Full. Consider increasing your buffer size of your Application.", uart->_uart_nr);
currentErr = UART_BUFFER_FULL_ERROR;
break;
case UART_BREAK:
log_v("UART%d RX break.", uart->_uart_nr);
currentErr = UART_BREAK_ERROR;
break;
case UART_PARITY_ERR:
log_v("UART%d parity error.", uart->_uart_nr);
currentErr = UART_PARITY_ERROR;
break;
case UART_FRAME_ERR:
log_v("UART%d frame error.", uart->_uart_nr);
currentErr = UART_FRAME_ERROR;
break;
default: log_v("UART%d unknown event type %d.", uart->_uart_nr, event.type); break;
}
if (currentErr != UART_NO_ERROR) {
if (uart->_onReceiveErrorCB) {
uart->_onReceiveErrorCB(currentErr);
}
}
}
}
}
vTaskDelete(NULL);
}
void HardwareSerial::begin(unsigned long baud, uint32_t config, int8_t rxPin, int8_t txPin, bool invert, unsigned long timeout_ms, uint8_t rxfifo_full_thrhd) {
if (_uart_nr >= SOC_UART_NUM) {
log_e("Serial number is invalid, please use a number from 0 to %u", SOC_UART_NUM - 1);
return;
}
#if !CONFIG_DISABLE_HAL_LOCKS
if (_lock == NULL) {
log_e("MUTEX Lock failed. Can't begin.");
return;
}
#endif
// map logical pins to GPIO numbers
rxPin = digitalPinToGPIONumber(rxPin);
txPin = digitalPinToGPIONumber(txPin);
int8_t _rxPin = uart_get_RxPin(_uart_nr);
int8_t _txPin = uart_get_TxPin(_uart_nr);
rxPin = rxPin < 0 ? _rxPin : rxPin;
txPin = txPin < 0 ? _txPin : txPin;
HSERIAL_MUTEX_LOCK();
// First Time or after end() --> set default Pins
if (!uartIsDriverInstalled(_uart)) {
// get previously used RX/TX pins, if any.
int8_t _rxPin = uart_get_RxPin(_uart_nr);
int8_t _txPin = uart_get_TxPin(_uart_nr);
switch (_uart_nr) {
case UART_NUM_0:
if (rxPin < 0 && txPin < 0) {
// do not change RX0/TX0 if it has already been set before
rxPin = _rxPin < 0 ? (int8_t)SOC_RX0 : _rxPin;
txPin = _txPin < 0 ? (int8_t)SOC_TX0 : _txPin;
}
break;
#if SOC_UART_HP_NUM > 1
case UART_NUM_1:
if (rxPin < 0 && txPin < 0) {
// do not change RX1/TX1 if it has already been set before
rxPin = _rxPin < 0 ? (int8_t)RX1 : _rxPin;
txPin = _txPin < 0 ? (int8_t)TX1 : _txPin;
}
break;
#endif // UART_NUM_1
#if SOC_UART_HP_NUM > 2
case UART_NUM_2:
if (rxPin < 0 && txPin < 0) {
// do not change RX2/TX2 if it has already been set before
#ifdef RX2
rxPin = _rxPin < 0 ? (int8_t)RX2 : _rxPin;
#endif
#ifdef TX2
txPin = _txPin < 0 ? (int8_t)TX2 : _txPin;
#endif
}
break;
#endif // UART_NUM_2
#if SOC_UART_HP_NUM > 3
case UART_NUM_3:
if (rxPin < 0 && txPin < 0) {
// do not change RX3/TX3 if it has already been set before
#ifdef RX3
rxPin = _rxPin < 0 ? (int8_t)RX3 : _rxPin;
#endif
#ifdef TX3
txPin = _txPin < 0 ? (int8_t)TX3 : _txPin;
#endif
}
break;
#endif // UART_NUM_3
#if SOC_UART_HP_NUM > 4
case UART_NUM_4:
if (rxPin < 0 && txPin < 0) {
// do not change RX4/TX4 if it has already been set before
#ifdef RX4
rxPin = _rxPin < 0 ? (int8_t)RX4 : _rxPin;
#endif
#ifdef TX4
txPin = _txPin < 0 ? (int8_t)TX4 : _txPin;
#endif
}
break;
#endif // UART_NUM_4
#if (SOC_UART_LP_NUM >= 1)
case LP_UART_NUM_0:
if (rxPin < 0 && txPin < 0) {
// do not change RX0_LP/TX0_LP if it has already been set before
#ifdef LP_RX0
rxPin = _rxPin < 0 ? (int8_t)LP_RX0 : _rxPin;
#endif
#ifdef LP_TX0
txPin = _txPin < 0 ? (int8_t)LP_TX0 : _txPin;
#endif
}
break;
#endif // LP_UART_NUM_0
}
}
// if no RX/TX pins are defined, it will not start the UART driver
if (rxPin < 0 && txPin < 0) {
log_e("No RX/TX pins defined. Please set RX/TX pins.");
HSERIAL_MUTEX_UNLOCK();
return;
}
// IDF UART driver keeps Pin setting on restarting. Negative Pin number will keep it unmodified.
// it will detach previous UART attached pins
// indicates that uartbegin() has to initialize a new IDF driver
if (_testUartBegin(_uart_nr, baud ? baud : 9600, config, rxPin, txPin, _rxBufferSize, _txBufferSize, invert, rxfifo_full_thrhd)) {
_destroyEventTask(); // when IDF uart driver must be restarted, _eventTask must finish too
}
// IDF UART driver keeps Pin setting on restarting. Negative Pin number will keep it unmodified.
// it will detach previous UART attached pins
_uart = uartBegin(_uart_nr, baud ? baud : 9600, config, rxPin, txPin, _rxBufferSize, _txBufferSize, invert, rxfifo_full_thrhd);
if (_uart == NULL) {
log_e("UART driver failed to start. Please check the logs.");
HSERIAL_MUTEX_UNLOCK();
return;
}
if (!baud) {
// using baud rate as zero, forces it to try to detect the current baud rate in place
uartStartDetectBaudrate(_uart);
time_t startMillis = millis();
unsigned long detectedBaudRate = 0;
while (millis() - startMillis < timeout_ms && !(detectedBaudRate = uartDetectBaudrate(_uart))) {
yield();
}
if (detectedBaudRate) {
delay(100); // Give some time...
_uart = uartBegin(_uart_nr, detectedBaudRate, config, rxPin, txPin, _rxBufferSize, _txBufferSize, invert, rxfifo_full_thrhd);
if (_uart == NULL) {
log_e("UART driver failed to start. Please check the logs.");
HSERIAL_MUTEX_UNLOCK();
return;
}
} else {
log_e("Could not detect baudrate. Serial data at the port must be present within the timeout for detection to be possible");
_uart = NULL;
}
}
// create a task to deal with Serial Events when, for example, calling begin() twice to change the baudrate,
// or when setting the callback before calling begin()
if (_uart != NULL && (_onReceiveCB != NULL || _onReceiveErrorCB != NULL) && _eventTask == NULL) {
_createEventTask(this);
}
// Set UART RX timeout
uartSetRxTimeout(_uart, _rxTimeout);
// Set UART FIFO Full depending on the baud rate.
// Lower baud rates will force to emulate byte-by-byte reading
// Higher baud rates will keep IDF default of 120 bytes for FIFO FULL Interrupt
// It can also be changed by the application at any time
if (!_rxFIFOFull) { // it has not being changed before calling begin()
// set a default FIFO Full value for the IDF driver
uint8_t fifoFull = 1;
// if baud rate is higher than 57600 or onReceive() is set, it will set FIFO Full to 120 bytes, except for LP UART
if (_uart_nr < SOC_UART_HP_NUM && (baud > 57600 || (_onReceiveCB != NULL && _onReceiveTimeout))) {
fifoFull = 120;
}
uartSetRxFIFOFull(_uart, fifoFull);
_rxFIFOFull = fifoFull;
}
HSERIAL_MUTEX_UNLOCK();
}
void HardwareSerial::updateBaudRate(unsigned long baud) {
uartSetBaudRate(_uart, baud);
}
void HardwareSerial::end() {
// default Serial.end() will completely disable HardwareSerial,
// including any tasks or debug message channel (log_x()) - but not for IDF log messages!
_onReceiveCB = NULL;
_onReceiveErrorCB = NULL;
if (uartGetDebug() == _uart_nr) {
uartSetDebug(0);
}
_rxFIFOFull = 0;
uartEnd(_uart_nr); // fully detach all pins and delete the UART driver
_destroyEventTask(); // when IDF uart driver is deleted, _eventTask must finish too
_uart = NULL;
}
void HardwareSerial::setDebugOutput(bool en) {
if (_uart == 0) {
return;
}
#if (SOC_UART_LP_NUM >= 1)
if (_uart_nr >= SOC_UART_HP_NUM) {
log_e("LP UART does not support Debug Output.");
return;
}
#endif
if (en) {
uartSetDebug(_uart);
} else {
if (uartGetDebug() == _uart_nr) {
uartSetDebug(NULL);
}
}
}
int HardwareSerial::available(void) {
return uartAvailable(_uart);
}
int HardwareSerial::availableForWrite(void) {
return uartAvailableForWrite(_uart);
}
int HardwareSerial::peek(void) {
if (available()) {
return uartPeek(_uart);
}
return -1;
}
int HardwareSerial::read(void) {
uint8_t c = 0;
if (uartReadBytes(_uart, &c, 1, 0) == 1) {
return c;
} else {
return -1;
}
}
// read characters into buffer
// terminates if size characters have been read, or no further are pending
// returns the number of characters placed in the buffer
// the buffer is NOT null terminated.
size_t HardwareSerial::read(uint8_t *buffer, size_t size) {
return uartReadBytes(_uart, buffer, size, 0);
}
// Overrides Stream::readBytes() to be faster using IDF
size_t HardwareSerial::readBytes(uint8_t *buffer, size_t length) {
return uartReadBytes(_uart, buffer, length, (uint32_t)getTimeout());
}
void HardwareSerial::flush(void) {
uartFlush(_uart);
}
void HardwareSerial::flush(bool txOnly) {
uartFlushTxOnly(_uart, txOnly);
}
size_t HardwareSerial::write(uint8_t c) {
uartWrite(_uart, c);
return 1;
}
size_t HardwareSerial::write(const uint8_t *buffer, size_t size) {
uartWriteBuf(_uart, buffer, size);
return size;
}
uint32_t HardwareSerial::baudRate() {
return uartGetBaudRate(_uart);
}
HardwareSerial::operator bool() const {
return uartIsDriverInstalled(_uart);
}
void HardwareSerial::setRxInvert(bool invert) {
uartSetRxInvert(_uart, invert);
}
// negative Pin value will keep it unmodified
// can be called after or before begin()
bool HardwareSerial::setPins(int8_t rxPin, int8_t txPin, int8_t ctsPin, int8_t rtsPin) {
// map logical pins to GPIO numbers
rxPin = digitalPinToGPIONumber(rxPin);
txPin = digitalPinToGPIONumber(txPin);
ctsPin = digitalPinToGPIONumber(ctsPin);
rtsPin = digitalPinToGPIONumber(rtsPin);
// uartSetPins() checks if pins are valid and, if necessary, detaches the previous ones
return uartSetPins(_uart_nr, rxPin, txPin, ctsPin, rtsPin);
}
// Enables or disables Hardware Flow Control using RTS and/or CTS pins
// must use setAllPins() in order to set RTS/CTS pins
// SerialHwFlowCtrl = UART_HW_FLOWCTRL_DISABLE, UART_HW_FLOWCTRL_RTS,
// UART_HW_FLOWCTRL_CTS, UART_HW_FLOWCTRL_CTS_RTS
bool HardwareSerial::setHwFlowCtrlMode(SerialHwFlowCtrl mode, uint8_t threshold) {
return uartSetHwFlowCtrlMode(_uart, mode, threshold);
}
// Sets the uart mode in the esp32 uart for use with RS485 modes
// HwFlowCtrl must be disabled and RTS pin set
// SerialMode = UART_MODE_UART, UART_MODE_RS485_HALF_DUPLEX, UART_MODE_IRDA,
// or testing mode: UART_MODE_RS485_COLLISION_DETECT, UART_MODE_RS485_APP_CTRL
bool HardwareSerial::setMode(SerialMode mode) {
return uartSetMode(_uart, mode);
}
// Sets the UART Clock Source based on the compatible SoC options
// This method must be called before starting UART using begin(), otherwise it won't have any effect.
// Clock Source Options are:
// UART_CLK_SRC_DEFAULT :: any SoC - it will set whatever IDF defines as the default UART Clock Source
// UART_CLK_SRC_APB :: ESP32, ESP32-S2, ESP32-C3 and ESP32-S3
// UART_CLK_SRC_PLL :: ESP32-C2, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2 and ESP32-P4
// UART_CLK_SRC_XTAL :: ESP32-C2, ESP32-C3, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2, ESP32-S3 and ESP32-P4
// UART_CLK_SRC_RTC :: ESP32-C2, ESP32-C3, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2, ESP32-S3 and ESP32-P4
// UART_CLK_SRC_REF_TICK :: ESP32 and ESP32-S2
// Note: CLK_SRC_PLL Freq depends on the SoC - ESP32-C2 has 40MHz, ESP32-H2 has 48MHz and ESP32-C5, C6, C61 and P4 has 80MHz
// Note: ESP32-C6, C61, ESP32-P4 and ESP32-C5 have LP UART that will use only RTC_FAST or XTAL/2 as Clock Source
bool HardwareSerial::setClockSource(SerialClkSrc clkSrc) {
if (_uart) {
log_e("No Clock Source change was done. This function must be called before beginning UART%d.", _uart_nr);
return false;
}
return uartSetClockSource(_uart_nr, (uart_sclk_t)clkSrc);
}
// minimum total RX Buffer size is the UART FIFO space (128 bytes for most SoC) + 1. IDF imposition.
// LP UART has FIFO of 16 bytes
size_t HardwareSerial::setRxBufferSize(size_t new_size) {
if (_uart) {
log_e("RX Buffer can't be resized when Serial is already running. Set it before calling begin().");
return 0;
}
uint8_t FIFOLen = UART_HW_FIFO_LEN(_uart_nr);
// Valid value is higher than the FIFO length
if (new_size <= FIFOLen) {
new_size = FIFOLen + 1;
log_w("RX Buffer set to minimum value: %d.", new_size);
}
_rxBufferSize = new_size;
return _rxBufferSize;
}
// minimum total TX Buffer size is the UART FIFO space (128 bytes for most SoC) + 1.
// LP UART has FIFO of 16 bytes
size_t HardwareSerial::setTxBufferSize(size_t new_size) {
if (_uart) {
log_e("TX Buffer can't be resized when Serial is already running. Set it before calling begin().");
return 0;
}
uint8_t FIFOLen = UART_HW_FIFO_LEN(_uart_nr);
// Valid values are zero or higher than the FIFO length
if (new_size > 0 && new_size <= FIFOLen) {
new_size = FIFOLen + 1;
log_w("TX Buffer set to minimum value: %d.", new_size);
}
// if new_size is higher than SOC_UART_FIFO_LEN, TX Ringbuffer will be active and it will be used to report back "availableToWrite()"
_txBufferSize = new_size;
return new_size;
}

View file

@ -1,449 +0,0 @@
/*
HardwareSerial.h - Hardware serial library for Wiring
Copyright (c) 2006 Nicholas Zambetti. All right reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Modified 28 September 2010 by Mark Sproul
Modified 14 August 2012 by Alarus
Modified 3 December 2013 by Matthijs Kooijman
Modified 18 December 2014 by Ivan Grokhotkov (esp8266 platform support)
Modified 31 March 2015 by Markus Sattler (rewrite the code for UART0 + UART1 support in ESP8266)
Modified 25 April 2015 by Thomas Flayols (add configuration different from 8N1 in ESP8266)
Modified 13 October 2018 by Jeroen Döll (add baudrate detection)
Baudrate detection example usage (detection on Serial1):
void setup() {
Serial.begin(115200);
delay(100);
Serial.println();
Serial1.begin(0, SERIAL_8N1, -1, -1, true, 11000UL); // Passing 0 for baudrate to detect it, the last parameter is a timeout in ms
unsigned long detectedBaudRate = Serial1.baudRate();
if(detectedBaudRate) {
Serial.printf("Detected baudrate is %lu\n", detectedBaudRate);
} else {
Serial.println("No baudrate detected, Serial1 will not work!");
}
}
Pay attention: the baudrate returned by baudRate() may be rounded, eg 115200 returns 115201
*/
#ifndef HardwareSerial_h
#define HardwareSerial_h
#include <inttypes.h>
#include <functional>
#include "Stream.h"
#include "esp32-hal.h"
#include "soc/soc_caps.h"
#include "HWCDC.h"
#include "USBCDC.h"
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "freertos/semphr.h"
enum SerialConfig {
SERIAL_5N1 = 0x8000010,
SERIAL_6N1 = 0x8000014,
SERIAL_7N1 = 0x8000018,
SERIAL_8N1 = 0x800001c,
SERIAL_5N2 = 0x8000030,
SERIAL_6N2 = 0x8000034,
SERIAL_7N2 = 0x8000038,
SERIAL_8N2 = 0x800003c,
SERIAL_5E1 = 0x8000012,
SERIAL_6E1 = 0x8000016,
SERIAL_7E1 = 0x800001a,
SERIAL_8E1 = 0x800001e,
SERIAL_5E2 = 0x8000032,
SERIAL_6E2 = 0x8000036,
SERIAL_7E2 = 0x800003a,
SERIAL_8E2 = 0x800003e,
SERIAL_5O1 = 0x8000013,
SERIAL_6O1 = 0x8000017,
SERIAL_7O1 = 0x800001b,
SERIAL_8O1 = 0x800001f,
SERIAL_5O2 = 0x8000033,
SERIAL_6O2 = 0x8000037,
SERIAL_7O2 = 0x800003b,
SERIAL_8O2 = 0x800003f
};
typedef uart_mode_t SerialMode;
typedef uart_hw_flowcontrol_t SerialHwFlowCtrl;
typedef enum {
UART_NO_ERROR,
UART_BREAK_ERROR,
UART_BUFFER_FULL_ERROR,
UART_FIFO_OVF_ERROR,
UART_FRAME_ERROR,
UART_PARITY_ERROR
} hardwareSerial_error_t;
typedef enum {
UART_CLK_SRC_DEFAULT = UART_SCLK_DEFAULT,
#if SOC_UART_SUPPORT_APB_CLK
UART_CLK_SRC_APB = UART_SCLK_APB,
#endif
#if SOC_UART_SUPPORT_PLL_F40M_CLK
UART_CLK_SRC_PLL = UART_SCLK_PLL_F40M,
#elif SOC_UART_SUPPORT_PLL_F80M_CLK
UART_CLK_SRC_PLL = UART_SCLK_PLL_F80M,
#elif CONFIG_IDF_TARGET_ESP32H2
UART_CLK_SRC_PLL = UART_SCLK_PLL_F48M,
#endif
#if SOC_UART_SUPPORT_XTAL_CLK
UART_CLK_SRC_XTAL = UART_SCLK_XTAL,
#endif
#if SOC_UART_SUPPORT_RTC_CLK
UART_CLK_SRC_RTC = UART_SCLK_RTC,
#endif
#if SOC_UART_SUPPORT_REF_TICK
UART_CLK_SRC_REF_TICK = UART_SCLK_REF_TICK,
#endif
} SerialClkSrc;
#ifndef ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE
#ifndef CONFIG_ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE
#define ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE 2048
#else
#define ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE CONFIG_ARDUINO_SERIAL_EVENT_TASK_STACK_SIZE
#endif
#endif
#ifndef ARDUINO_SERIAL_EVENT_TASK_PRIORITY
#ifndef CONFIG_ARDUINO_SERIAL_EVENT_TASK_PRIORITY
#define ARDUINO_SERIAL_EVENT_TASK_PRIORITY (configMAX_PRIORITIES - 1)
#else
#define ARDUINO_SERIAL_EVENT_TASK_PRIORITY CONFIG_ARDUINO_SERIAL_EVENT_TASK_PRIORITY
#endif
#endif
#ifndef ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
#ifndef CONFIG_ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
#define ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE -1
#else
#define ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE CONFIG_ARDUINO_SERIAL_EVENT_TASK_RUNNING_CORE
#endif
#endif
// UART0 pins are defined by default by the bootloader.
// The definitions for SOC_* should not be changed unless the bootloader pins
// have changed and you know what you are doing.
#ifndef SOC_RX0
#if CONFIG_IDF_TARGET_ESP32
#define SOC_RX0 (gpio_num_t)3
#elif CONFIG_IDF_TARGET_ESP32S2 || CONFIG_IDF_TARGET_ESP32S3
#define SOC_RX0 (gpio_num_t)44
#elif CONFIG_IDF_TARGET_ESP32C2
#define SOC_RX0 (gpio_num_t)19
#elif CONFIG_IDF_TARGET_ESP32C3
#define SOC_RX0 (gpio_num_t)20
#elif CONFIG_IDF_TARGET_ESP32C6
#define SOC_RX0 (gpio_num_t)17
#elif CONFIG_IDF_TARGET_ESP32H2
#define SOC_RX0 (gpio_num_t)23
#elif CONFIG_IDF_TARGET_ESP32P4
#define SOC_RX0 (gpio_num_t)38
#elif CONFIG_IDF_TARGET_ESP32C5
#define SOC_RX0 (gpio_num_t)12
#endif
#endif
#ifndef SOC_TX0
#if CONFIG_IDF_TARGET_ESP32
#define SOC_TX0 (gpio_num_t)1
#elif CONFIG_IDF_TARGET_ESP32S2 || CONFIG_IDF_TARGET_ESP32S3
#define SOC_TX0 (gpio_num_t)43
#elif CONFIG_IDF_TARGET_ESP32C2
#define SOC_TX0 (gpio_num_t)20
#elif CONFIG_IDF_TARGET_ESP32C3
#define SOC_TX0 (gpio_num_t)21
#elif CONFIG_IDF_TARGET_ESP32C6
#define SOC_TX0 (gpio_num_t)16
#elif CONFIG_IDF_TARGET_ESP32H2
#define SOC_TX0 (gpio_num_t)24
#elif CONFIG_IDF_TARGET_ESP32P4
#define SOC_TX0 (gpio_num_t)37
#elif CONFIG_IDF_TARGET_ESP32C5
#define SOC_TX0 (gpio_num_t)11
#endif
#endif
// Default pins for UART1 are arbitrary, and defined here for convenience.
#if SOC_UART_HP_NUM > 1
#ifndef RX1
#if CONFIG_IDF_TARGET_ESP32
#define RX1 (gpio_num_t)26
#elif CONFIG_IDF_TARGET_ESP32S2
#define RX1 (gpio_num_t)4
#elif CONFIG_IDF_TARGET_ESP32C2
#define RX1 (gpio_num_t)10
#elif CONFIG_IDF_TARGET_ESP32C3
#define RX1 (gpio_num_t)18
#elif CONFIG_IDF_TARGET_ESP32S3
#define RX1 (gpio_num_t)15
#elif CONFIG_IDF_TARGET_ESP32C6
#define RX1 (gpio_num_t)4
#elif CONFIG_IDF_TARGET_ESP32H2
#define RX1 (gpio_num_t)0
#elif CONFIG_IDF_TARGET_ESP32P4
#define RX1 (gpio_num_t)11
#elif CONFIG_IDF_TARGET_ESP32C5
#define RX1 (gpio_num_t)4
#endif
#endif
#ifndef TX1
#if CONFIG_IDF_TARGET_ESP32
#define TX1 (gpio_num_t)27
#elif CONFIG_IDF_TARGET_ESP32S2
#define TX1 (gpio_num_t)5
#elif CONFIG_IDF_TARGET_ESP32C2
#define TX1 (gpio_num_t)18
#elif CONFIG_IDF_TARGET_ESP32C3
#define TX1 (gpio_num_t)19
#elif CONFIG_IDF_TARGET_ESP32S3
#define TX1 (gpio_num_t)16
#elif CONFIG_IDF_TARGET_ESP32C6
#define TX1 (gpio_num_t)5
#elif CONFIG_IDF_TARGET_ESP32H2
#define TX1 (gpio_num_t)1
#elif CONFIG_IDF_TARGET_ESP32P4
#define TX1 (gpio_num_t)10
#elif CONFIG_IDF_TARGET_ESP32C5
#define TX1 (gpio_num_t)5
#endif
#endif
#endif /* SOC_UART_HP_NUM > 1 */
// Default pins for UART2 are arbitrary, and defined here for convenience.
#if SOC_UART_HP_NUM > 2
#ifndef RX2
#if CONFIG_IDF_TARGET_ESP32
#define RX2 (gpio_num_t)4
#elif CONFIG_IDF_TARGET_ESP32S3
#define RX2 (gpio_num_t)19
#endif
#endif
#ifndef TX2
#if CONFIG_IDF_TARGET_ESP32
#define TX2 (gpio_num_t)25
#elif CONFIG_IDF_TARGET_ESP32S3
#define TX2 (gpio_num_t)20
#endif
#endif
#endif /* SOC_UART_HP_NUM > 2 */
#if SOC_UART_LP_NUM >= 1
#ifndef LP_RX0
#define LP_RX0 (gpio_num_t) LP_U0RXD_GPIO_NUM
#endif
#ifndef LP_TX0
#define LP_TX0 (gpio_num_t) LP_U0TXD_GPIO_NUM
#endif
#endif /* SOC_UART_LP_NUM >= 1 */
typedef std::function<void(void)> OnReceiveCb;
typedef std::function<void(hardwareSerial_error_t)> OnReceiveErrorCb;
class HardwareSerial : public Stream {
public:
HardwareSerial(uint8_t uart_nr);
~HardwareSerial();
// setRxTimeout sets the timeout after which onReceive callback will be called (after receiving data, it waits for this time of UART rx inactivity to call the callback fnc)
// param symbols_timeout defines a timeout threshold in uart symbol periods. Setting 0 symbol timeout disables the callback call by timeout.
// Maximum timeout setting is calculacted automatically by IDF. If set above the maximum, it is ignored and an error is printed on Serial0 (check console).
// Examples: Maximum for 11 bits symbol is 92 (SERIAL_8N2, SERIAL_8E1, SERIAL_8O1, etc), Maximum for 10 bits symbol is 101 (SERIAL_8N1).
// For example symbols_timeout=1 defines a timeout equal to transmission time of one symbol (~11 bit) on current baudrate.
// For a baudrate of 9600, SERIAL_8N1 (10 bit symbol) and symbols_timeout = 3, the timeout would be 3 / (9600 / 10) = 3.125 ms
bool setRxTimeout(uint8_t symbols_timeout);
// setRxFIFOFull(uint8_t fifoBytes) will set the number of bytes that will trigger UART_INTR_RXFIFO_FULL interrupt and fill up RxRingBuffer
// This affects some functions such as Serial::available() and Serial.read() because, in a UART flow of receiving data, Serial internal
// RxRingBuffer will be filled only after these number of bytes arrive or a RX Timeout happens.
// This parameter can be set to 1 in order to receive byte by byte, but it will also consume more CPU time as the ISR will be activates often.
bool setRxFIFOFull(uint8_t fifoBytes);
// onReceive will setup a callback that will be called whenever an UART interruption occurs (UART_INTR_RXFIFO_FULL or UART_INTR_RXFIFO_TOUT)
// UART_INTR_RXFIFO_FULL interrupt triggers at UART_FULL_THRESH_DEFAULT bytes received (defined as 120 bytes by default in IDF)
// UART_INTR_RXFIFO_TOUT interrupt triggers at UART_TOUT_THRESH_DEFAULT symbols passed without any reception (defined as 10 symbols by default in IDF)
// onlyOnTimeout parameter will define how onReceive will behave:
// Default: true -- The callback will only be called when RX Timeout happens.
// Whole stream of bytes will be ready for being read on the callback function at once.
// This option may lead to Rx Overflow depending on the Rx Buffer Size and number of bytes received in the streaming
// false -- The callback will be called when FIFO reaches 120 bytes and also on RX Timeout.
// The stream of incommig bytes will be "split" into blocks of 120 bytes on each callback.
// This option avoid any sort of Rx Overflow, but leaves the UART packet reassembling work to the Application.
void onReceive(OnReceiveCb function, bool onlyOnTimeout = false);
// onReceive will be called on error events (see hardwareSerial_error_t)
void onReceiveError(OnReceiveErrorCb function);
// eventQueueReset clears all events in the queue (the events that trigger onReceive and onReceiveError) - maybe useful in some use cases
void eventQueueReset();
// When pins are changed, it will detach the previous ones
// if pin is negative, it won't be set/changed and will be kept as is
// timeout_ms is used in baudrate detection (ESP32, ESP32S2 only)
// invert will invert RX/TX polarity
// rxfifo_full_thrhd if the UART Flow Control Threshold in the UART FIFO (max 127)
void begin(
unsigned long baud, uint32_t config = SERIAL_8N1, int8_t rxPin = -1, int8_t txPin = -1, bool invert = false, unsigned long timeout_ms = 20000UL,
uint8_t rxfifo_full_thrhd = 120
);
void end(void);
void updateBaudRate(unsigned long baud);
int available(void);
int availableForWrite(void);
int peek(void);
int read(void);
size_t read(uint8_t *buffer, size_t size);
inline size_t read(char *buffer, size_t size) {
return read((uint8_t *)buffer, size);
}
// Overrides Stream::readBytes() to be faster using IDF
size_t readBytes(uint8_t *buffer, size_t length);
size_t readBytes(char *buffer, size_t length) {
return readBytes((uint8_t *)buffer, length);
}
void flush(void);
void flush(bool txOnly);
size_t write(uint8_t);
size_t write(const uint8_t *buffer, size_t size);
inline size_t write(const char *buffer, size_t size) {
return write((uint8_t *)buffer, size);
}
inline size_t write(const char *s) {
return write((uint8_t *)s, strlen(s));
}
inline size_t write(unsigned long n) {
return write((uint8_t)n);
}
inline size_t write(long n) {
return write((uint8_t)n);
}
inline size_t write(unsigned int n) {
return write((uint8_t)n);
}
inline size_t write(int n) {
return write((uint8_t)n);
}
uint32_t baudRate();
operator bool() const;
void setDebugOutput(bool);
void setRxInvert(bool);
// Negative Pin Number will keep it unmodified, thus this function can set individual pins
// setPins() can be called after or before begin()
// When pins are changed, it will detach the previous ones
bool setPins(int8_t rxPin, int8_t txPin, int8_t ctsPin = -1, int8_t rtsPin = -1);
// Enables or disables Hardware Flow Control using RTS and/or CTS pins (must use setAllPins() before)
// UART_HW_FLOWCTRL_DISABLE = 0x0 disable hardware flow control
// UART_HW_FLOWCTRL_RTS = 0x1 enable RX hardware flow control (rts)
// UART_HW_FLOWCTRL_CTS = 0x2 enable TX hardware flow control (cts)
// UART_HW_FLOWCTRL_CTS_RTS = 0x3 enable hardware flow control
bool setHwFlowCtrlMode(SerialHwFlowCtrl mode = UART_HW_FLOWCTRL_CTS_RTS, uint8_t threshold = 64); // 64 is half FIFO Length
// Used to set RS485 modes such as UART_MODE_RS485_HALF_DUPLEX for Auto RTS function on ESP32
// UART_MODE_UART = 0x00 mode: regular UART mode
// UART_MODE_RS485_HALF_DUPLEX = 0x01 mode: half duplex RS485 UART mode control by RTS pin
// UART_MODE_IRDA = 0x02 mode: IRDA UART mode
// UART_MODE_RS485_COLLISION_DETECT = 0x03 mode: RS485 collision detection UART mode (used for test purposes)
// UART_MODE_RS485_APP_CTRL = 0x04 mode: application control RS485 UART mode (used for test purposes)
bool setMode(SerialMode mode);
// Used to set the UART clock source mode. It must be set before calling begin(), otherwise it won't have any effect.
// Not all clock source are available to every SoC. The compatible option are listed here:
// UART_CLK_SRC_DEFAULT :: any SoC - it will set whatever IDF defines as the default UART Clock Source
// UART_CLK_SRC_APB :: ESP32, ESP32-S2, ESP32-C3 and ESP32-S3
// UART_CLK_SRC_PLL :: ESP32-C2, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2 and ESP32-P4
// UART_CLK_SRC_XTAL :: ESP32-C2, ESP32-C3, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2, ESP32-S3 and ESP32-P4
// UART_CLK_SRC_RTC :: ESP32-C2, ESP32-C3, ESP32-C5, ESP32-C6, ESP32-C61, ESP32-H2, ESP32-S3 and ESP32-P4
// UART_CLK_SRC_REF_TICK :: ESP32 and ESP32-S2
// Note: CLK_SRC_PLL Freq depends on the SoC - ESP32-C2 has 40MHz, ESP32-H2 has 48MHz and ESP32-C5, C6, C61 and P4 has 80MHz
// Note: ESP32-C6, C61, ESP32-P4 and ESP32-C5 have LP UART that will use only RTC_FAST or XTAL/2 as Clock Source
bool setClockSource(SerialClkSrc clkSrc);
size_t setRxBufferSize(size_t new_size);
size_t setTxBufferSize(size_t new_size);
protected:
uint8_t _uart_nr;
uart_t *_uart;
size_t _rxBufferSize;
size_t _txBufferSize;
OnReceiveCb _onReceiveCB;
OnReceiveErrorCb _onReceiveErrorCB;
// _onReceive and _rxTimeout have be consistent when timeout is disabled
bool _onReceiveTimeout;
uint8_t _rxTimeout, _rxFIFOFull;
TaskHandle_t _eventTask;
#if !CONFIG_DISABLE_HAL_LOCKS
SemaphoreHandle_t _lock;
#endif
void _createEventTask(void *args);
void _destroyEventTask(void);
static void _uartEventTask(void *args);
};
extern void serialEventRun(void) __attribute__((weak));
#if !defined(NO_GLOBAL_INSTANCES) && !defined(NO_GLOBAL_SERIAL)
#ifndef ARDUINO_USB_CDC_ON_BOOT
#define ARDUINO_USB_CDC_ON_BOOT 0
#endif
#if ARDUINO_USB_CDC_ON_BOOT //Serial used from Native_USB_CDC | HW_CDC_JTAG
#if ARDUINO_USB_MODE // Hardware CDC mode
// Arduino Serial is the HW JTAG CDC device
#define Serial HWCDCSerial
#else // !ARDUINO_USB_MODE -- Native USB Mode
// Arduino Serial is the Native USB CDC device
#define Serial USBSerial
#endif // ARDUINO_USB_MODE
#else // !ARDUINO_USB_CDC_ON_BOOT -- Serial is used from UART0
// if not using CDC on Boot, Arduino Serial is the UART0 device
#define Serial Serial0
#endif // ARDUINO_USB_CDC_ON_BOOT
// There is always Seria0 for UART0
extern HardwareSerial Serial0;
#if SOC_UART_NUM > 1
extern HardwareSerial Serial1;
#endif
#if SOC_UART_NUM > 2
extern HardwareSerial Serial2;
#endif
#if SOC_UART_NUM > 3
extern HardwareSerial Serial3;
#endif
#if SOC_UART_NUM > 4
extern HardwareSerial Serial4;
#endif
#if SOC_UART_NUM > 5
extern HardwareSerial Serial5;
#endif
#endif //!defined(NO_GLOBAL_INSTANCES) && !defined(NO_GLOBAL_SERIAL)
#endif // HardwareSerial_h

View file

@ -1,48 +0,0 @@
// Copyright 2024 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef HashBuilder_h
#define HashBuilder_h
#include <WString.h>
#include <Stream.h>
#include "HEXBuilder.h"
class HashBuilder : public HEXBuilder {
public:
virtual ~HashBuilder() {}
virtual void begin() = 0;
virtual void add(const uint8_t *data, size_t len) = 0;
virtual void add(const char *data) {
add((const uint8_t *)data, strlen(data));
}
virtual void add(String data) {
add(data.c_str());
}
virtual void addHexString(const char *data) = 0;
virtual void addHexString(String data) {
addHexString(data.c_str());
}
virtual bool addStream(Stream &stream, const size_t maxLen) = 0;
virtual void calculate() = 0;
virtual void getBytes(uint8_t *output) = 0;
virtual void getChars(char *output) = 0;
virtual String toString() = 0;
};
#endif

View file

@ -1,453 +0,0 @@
/*
IPAddress.cpp - Base class that provides IPAddress
Copyright (c) 2011 Adrian McEwen. All right reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "IPAddress.h"
#include "Print.h"
#include "lwip/netif.h"
#include "StreamString.h"
#ifndef CONFIG_LWIP_IPV6
#define IP6_NO_ZONE 0
#endif
IPAddress::IPAddress() : IPAddress(IPv4) {}
IPAddress::IPAddress(IPType ip_type) {
_type = ip_type;
_zone = IP6_NO_ZONE;
memset(_address.bytes, 0, sizeof(_address.bytes));
}
IPAddress::IPAddress(uint8_t first_octet, uint8_t second_octet, uint8_t third_octet, uint8_t fourth_octet) {
_type = IPv4;
_zone = IP6_NO_ZONE;
memset(_address.bytes, 0, sizeof(_address.bytes));
_address.bytes[IPADDRESS_V4_BYTES_INDEX] = first_octet;
_address.bytes[IPADDRESS_V4_BYTES_INDEX + 1] = second_octet;
_address.bytes[IPADDRESS_V4_BYTES_INDEX + 2] = third_octet;
_address.bytes[IPADDRESS_V4_BYTES_INDEX + 3] = fourth_octet;
}
IPAddress::IPAddress(
uint8_t o1, uint8_t o2, uint8_t o3, uint8_t o4, uint8_t o5, uint8_t o6, uint8_t o7, uint8_t o8, uint8_t o9, uint8_t o10, uint8_t o11, uint8_t o12,
uint8_t o13, uint8_t o14, uint8_t o15, uint8_t o16, uint8_t z
) {
_type = IPv6;
_address.bytes[0] = o1;
_address.bytes[1] = o2;
_address.bytes[2] = o3;
_address.bytes[3] = o4;
_address.bytes[4] = o5;
_address.bytes[5] = o6;
_address.bytes[6] = o7;
_address.bytes[7] = o8;
_address.bytes[8] = o9;
_address.bytes[9] = o10;
_address.bytes[10] = o11;
_address.bytes[11] = o12;
_address.bytes[12] = o13;
_address.bytes[13] = o14;
_address.bytes[14] = o15;
_address.bytes[15] = o16;
_zone = z;
}
IPAddress::IPAddress(uint32_t address) {
// IPv4 only
_type = IPv4;
_zone = IP6_NO_ZONE;
memset(_address.bytes, 0, sizeof(_address.bytes));
_address.dword[IPADDRESS_V4_DWORD_INDEX] = address;
// NOTE on conversion/comparison and uint32_t:
// These conversions are host platform dependent.
// There is a defined integer representation of IPv4 addresses,
// based on network byte order (will be the value on big endian systems),
// e.g. http://2398766798 is the same as http://142.250.70.206,
// However on little endian systems the octets 0x83, 0xFA, 0x46, 0xCE,
// in that order, will form the integer (uint32_t) 3460758158 .
}
IPAddress::IPAddress(const uint8_t *address) : IPAddress(IPv4, address) {}
IPAddress::IPAddress(IPType ip_type, const uint8_t *address, uint8_t z) {
_type = ip_type;
if (ip_type == IPv4) {
memset(_address.bytes, 0, sizeof(_address.bytes));
memcpy(&_address.bytes[IPADDRESS_V4_BYTES_INDEX], address, sizeof(uint32_t));
_zone = 0;
} else {
memcpy(_address.bytes, address, sizeof(_address.bytes));
_zone = z;
}
}
IPAddress::IPAddress(const char *address) {
fromString(address);
}
IPAddress::IPAddress(const IPAddress &address) {
*this = address;
}
String IPAddress::toString(bool includeZone) const {
StreamString s;
printTo(s, includeZone);
return String(s);
}
bool IPAddress::fromString(const char *address) {
if (!fromString4(address)) {
return fromString6(address);
}
return true;
}
bool IPAddress::fromString4(const char *address) {
// TODO: add support for "a", "a.b", "a.b.c" formats
int16_t acc = -1; // Accumulator
uint8_t dots = 0;
memset(_address.bytes, 0, sizeof(_address.bytes));
while (*address) {
char c = *address++;
if (c >= '0' && c <= '9') {
acc = (acc < 0) ? (c - '0') : acc * 10 + (c - '0');
if (acc > 255) {
// Value out of [0..255] range
return false;
}
} else if (c == '.') {
if (dots == 3) {
// Too many dots (there must be 3 dots)
return false;
}
if (acc < 0) {
/* No value between dots, e.g. '1..' */
return false;
}
_address.bytes[IPADDRESS_V4_BYTES_INDEX + dots++] = acc;
acc = -1;
} else {
// Invalid char
return false;
}
}
if (dots != 3) {
// Too few dots (there must be 3 dots)
return false;
}
if (acc < 0) {
/* No value between dots, e.g. '1..' */
return false;
}
_address.bytes[IPADDRESS_V4_BYTES_INDEX + 3] = acc;
_type = IPv4;
return true;
}
bool IPAddress::fromString6(const char *address) {
uint32_t acc = 0; // Accumulator
int colons = 0, double_colons = -1;
while (*address) {
char c = tolower(*address++);
if (isalnum(c) && c <= 'f') {
if (c >= 'a') {
c -= 'a' - '0' - 10;
}
acc = acc * 16 + (c - '0');
if (acc > 0xffff) {
// Value out of range
return false;
}
} else if (c == ':') {
if (*address == ':') {
if (double_colons >= 0) {
// :: allowed once
return false;
}
if (*address != '\0' && *(address + 1) == ':') {
// ::: not allowed
return false;
}
// remember location
double_colons = colons + !!acc;
address++;
} else if (*address == '\0') {
// can't end with a single colon
return false;
}
if (colons == 7) {
// too many separators
return false;
}
_address.bytes[colons * 2] = acc >> 8;
_address.bytes[colons * 2 + 1] = acc & 0xff;
colons++;
acc = 0;
} else if (c == '%') {
// netif_index_to_name crashes on latest esp-idf
// _zone = netif_name_to_index(address);
// in the interim, we parse the suffix as a zone number
while ((*address != '\0') && (!isdigit(*address))) { // skip all non-digit after '%'
address++;
}
_zone = atol(address) + 1; // increase by one by convention, so we can have zone '0'
while (*address != '\0') {
address++;
}
} else {
// Invalid char
return false;
}
}
if (double_colons == -1 && colons != 7) {
// Too few separators
return false;
}
if (double_colons > -1 && colons > 6) {
// Too many segments (double colon must be at least one zero field)
return false;
}
_address.bytes[colons * 2] = acc >> 8;
_address.bytes[colons * 2 + 1] = acc & 0xff;
colons++;
if (double_colons != -1) {
for (int i = colons * 2 - double_colons * 2 - 1; i >= 0; i--) {
_address.bytes[16 - colons * 2 + double_colons * 2 + i] = _address.bytes[double_colons * 2 + i];
}
for (int i = double_colons * 2; i < 16 - colons * 2 + double_colons * 2; i++) {
_address.bytes[i] = 0;
}
}
_type = IPv6;
return true;
}
IPAddress &IPAddress::operator=(const uint8_t *address) {
// IPv4 only conversion from byte pointer
_type = IPv4;
memset(_address.bytes, 0, sizeof(_address.bytes));
memcpy(&_address.bytes[IPADDRESS_V4_BYTES_INDEX], address, sizeof(uint32_t));
return *this;
}
IPAddress &IPAddress::operator=(const char *address) {
fromString(address);
return *this;
}
IPAddress &IPAddress::operator=(uint32_t address) {
// IPv4 conversion
// See note on conversion/comparison and uint32_t
_type = IPv4;
memset(_address.bytes, 0, sizeof(_address.bytes));
_address.dword[IPADDRESS_V4_DWORD_INDEX] = address;
return *this;
}
IPAddress &IPAddress::operator=(const IPAddress &address) {
_type = address.type();
_zone = address.zone();
memcpy(_address.bytes, address._address.bytes, sizeof(_address.bytes));
return *this;
}
bool IPAddress::operator==(const IPAddress &addr) const {
return (addr._type == _type) && (_type == IPType::IPv4 ? addr._address.dword[IPADDRESS_V4_DWORD_INDEX] == _address.dword[IPADDRESS_V4_DWORD_INDEX] : memcmp(addr._address.bytes, _address.bytes, sizeof(_address.bytes)) == 0);
}
bool IPAddress::operator==(const uint8_t *addr) const {
// IPv4 only comparison to byte pointer
// Can't support IPv6 as we know our type, but not the length of the pointer
return _type == IPv4 && memcmp(addr, &_address.bytes[IPADDRESS_V4_BYTES_INDEX], sizeof(uint32_t)) == 0;
}
uint8_t IPAddress::operator[](int index) const {
if (_type == IPv4) {
return _address.bytes[IPADDRESS_V4_BYTES_INDEX + index];
}
return _address.bytes[index];
}
uint8_t &IPAddress::operator[](int index) {
if (_type == IPv4) {
return _address.bytes[IPADDRESS_V4_BYTES_INDEX + index];
}
return _address.bytes[index];
}
size_t IPAddress::printTo(Print &p) const {
return printTo(p, false);
}
size_t IPAddress::printTo(Print &p, bool includeZone) const {
size_t n = 0;
if (_type == IPv6) {
// IPv6 IETF canonical format: compress left-most longest run of two or more zero fields, lower case
int8_t longest_start = -1;
int8_t longest_length = 1;
int8_t current_start = -1;
int8_t current_length = 0;
for (int8_t f = 0; f < 8; f++) {
if (_address.bytes[f * 2] == 0 && _address.bytes[f * 2 + 1] == 0) {
if (current_start == -1) {
current_start = f;
current_length = 1;
} else {
current_length++;
}
if (current_length > longest_length) {
longest_start = current_start;
longest_length = current_length;
}
} else {
current_start = -1;
}
}
for (int f = 0; f < 8; f++) {
if (f < longest_start || f >= longest_start + longest_length) {
uint8_t c1 = _address.bytes[f * 2] >> 4;
uint8_t c2 = _address.bytes[f * 2] & 0xf;
uint8_t c3 = _address.bytes[f * 2 + 1] >> 4;
uint8_t c4 = _address.bytes[f * 2 + 1] & 0xf;
if (c1 > 0) {
n += p.print((char)(c1 < 10 ? '0' + c1 : 'a' + c1 - 10));
}
if (c1 > 0 || c2 > 0) {
n += p.print((char)(c2 < 10 ? '0' + c2 : 'a' + c2 - 10));
}
if (c1 > 0 || c2 > 0 || c3 > 0) {
n += p.print((char)(c3 < 10 ? '0' + c3 : 'a' + c3 - 10));
}
n += p.print((char)(c4 < 10 ? '0' + c4 : 'a' + c4 - 10));
if (f < 7) {
n += p.print(':');
}
} else if (f == longest_start) {
if (longest_start == 0) {
n += p.print(':');
}
n += p.print(':');
}
}
// add a zone if zone-id is non-zero (causes exception on recent IDF builds)
// if (_zone > 0 && includeZone) {
// n += p.print('%');
// char if_name[NETIF_NAMESIZE];
// netif_index_to_name(_zone, if_name);
// n += p.print(if_name);
// }
// In the interim, we just output the index number
if (_zone > 0 && includeZone) {
n += p.print('%');
// look for the interface name
for (netif *intf = netif_list; intf != nullptr; intf = intf->next) {
if (_zone - 1 == intf->num) {
n += p.print(intf->name[0]);
n += p.print(intf->name[1]);
break;
}
}
n += p.print(_zone - 1);
}
return n;
}
// IPv4
for (int i = 0; i < 3; i++) {
n += p.print(_address.bytes[IPADDRESS_V4_BYTES_INDEX + i], DEC);
n += p.print('.');
}
n += p.print(_address.bytes[IPADDRESS_V4_BYTES_INDEX + 3], DEC);
return n;
}
IPAddress::IPAddress(const ip_addr_t *addr) {
from_ip_addr_t(addr);
}
void IPAddress::to_ip_addr_t(ip_addr_t *addr) const {
#if CONFIG_LWIP_IPV6
if (_type == IPv6) {
addr->type = IPADDR_TYPE_V6;
addr->u_addr.ip6.addr[0] = _address.dword[0];
addr->u_addr.ip6.addr[1] = _address.dword[1];
addr->u_addr.ip6.addr[2] = _address.dword[2];
addr->u_addr.ip6.addr[3] = _address.dword[3];
#if LWIP_IPV6_SCOPES
addr->u_addr.ip6.zone = _zone;
#endif /* LWIP_IPV6_SCOPES */
} else {
addr->type = IPADDR_TYPE_V4;
addr->u_addr.ip4.addr = _address.dword[IPADDRESS_V4_DWORD_INDEX];
}
#else
addr->addr = _address.dword[IPADDRESS_V4_DWORD_INDEX];
#endif
}
IPAddress &IPAddress::from_ip_addr_t(const ip_addr_t *addr) {
#if CONFIG_LWIP_IPV6
if (addr->type == IPADDR_TYPE_V6) {
_type = IPv6;
_address.dword[0] = addr->u_addr.ip6.addr[0];
_address.dword[1] = addr->u_addr.ip6.addr[1];
_address.dword[2] = addr->u_addr.ip6.addr[2];
_address.dword[3] = addr->u_addr.ip6.addr[3];
#if LWIP_IPV6_SCOPES
_zone = addr->u_addr.ip6.zone;
#endif /* LWIP_IPV6_SCOPES */
} else {
#endif
_type = IPv4;
memset(_address.bytes, 0, sizeof(_address.bytes));
#if CONFIG_LWIP_IPV6
_address.dword[IPADDRESS_V4_DWORD_INDEX] = addr->u_addr.ip4.addr;
#else
_address.dword[IPADDRESS_V4_DWORD_INDEX] = addr->addr;
#endif
#if CONFIG_LWIP_IPV6
}
#endif
return *this;
}
#if CONFIG_LWIP_IPV6
esp_ip6_addr_type_t IPAddress::addr_type() const {
if (_type != IPv6) {
return ESP_IP6_ADDR_IS_UNKNOWN;
}
ip_addr_t addr;
to_ip_addr_t(&addr);
return esp_netif_ip6_get_addr_type((esp_ip6_addr_t *)(&(addr.u_addr.ip6)));
}
#endif
#if CONFIG_LWIP_IPV6
const IPAddress IN6ADDR_ANY(IPv6);
#endif
const IPAddress INADDR_NONE(0, 0, 0, 0);

Some files were not shown because too many files have changed in this diff Show more