ci: update ci to check all targets and idf

This commit is contained in:
zhouli 2021-08-13 20:27:56 +08:00
parent 1770f425f8
commit f26cfdcea2
17 changed files with 1675 additions and 211 deletions

View file

@ -30,28 +30,38 @@ before_script:
artifacts:
when: always
paths:
- $BUILD_PATH/*/*/build/*.bin
- $BUILD_PATH/*/*/build/*.elf
- $BUILD_PATH/*/*/build/*.map
- $BUILD_PATH/*/*/build/download.config
- $BUILD_PATH/*/*/build/bootloader/*.bin
- $LOG_PATH
- $BUILD_PATH/*/*/*/build/*.bin
- $BUILD_PATH/*/*/*/build/*/*.bin
- $BUILD_PATH/*/*/*/build/*.json
- $BUILD_PATH/*.json
- $LOG_PATH/*
expire_in: 1 week
variables:
IDF_CI_BUILD: "1"
LOG_PATH: "$CI_PROJECT_DIR/log_examples"
BUILD_PATH: "$CI_PROJECT_DIR/build_examples"
SIZE_INFO_LOCATION: "$CI_PROJECT_DIR/log_examples/size_info.txt"
PEDANTIC_CFLAGS: ""
PEDANTIC_CXXFLAGS: ""
script:
# it's not possible to build 100% out-of-tree and have the "artifacts"
# mechanism work, but this is the next best thing
- rm -rf ${BUILD_PATH}
- mkdir ${BUILD_PATH}
- mkdir -p ${LOG_PATH}
# build some of examples
- ${IOT_SOLUTION_PATH}/tools/ci/build_examples.sh "${CI_JOB_NAME}"
- python ${IOT_SOLUTION_PATH}/tools/ci/genarate_build_json.py
- ${IOT_SOLUTION_PATH}/tools/ci/build_examples.sh
build_examples_00:
build_examples_00_with_idf_v4.2:
<<: *build_examples_template
image: espressif/idf:release-v4.2
build_examples_00_with_idf_v4.3:
<<: *build_examples_template
image: espressif/idf:release-v4.3
build_examples_00_with_idf_master:
<<: *build_examples_template
image: espressif/idf:latest
build_docs:
stage: build

View file

@ -18,5 +18,5 @@ SCREEN_DIR = controller_driver/ili9341 \
controller_driver/ssd1322 \
controller_driver/ssd1963
COMPONENT_ADD_INCLUDEDIRS := . iface_driver $(SCREEN_DIR) screen_utility
COMPONENT_SRCDIRS := . iface_driver $(SCREEN_DIR) screen_utility
COMPONENT_ADD_INCLUDEDIRS := . interface_driver $(SCREEN_DIR) screen_utility
COMPONENT_SRCDIRS := . interface_driver $(SCREEN_DIR) screen_utility

View file

@ -14,9 +14,8 @@ endif
COMPONENT_ADD_INCLUDEDIRS := $(COMPONENT_SRCDIRS)
$(COMPONENT_SRCDIRS)/board.o:
@echo "-----------Board Info---------"
@echo "IDF_TARGET = $(IDF_TARGET)"
@echo "Board DIR = $(COMPONENT_SRCDIRS)"
@echo "---------Board Info End---------"
# $(COMPONENT_SRCDIRS)/board.o:
# @echo "-----------Board Info---------"
# @echo "IDF_TARGET = $(IDF_TARGET)"
# @echo "Board DIR = $(COMPONENT_SRCDIRS)"
# @echo "---------Board Info End---------"

View file

@ -7,14 +7,7 @@ PROJECT_NAME := lvgl_coffee
#If IOT_SOLUTION_PATH is not defined, use relative path as default value
IOT_SOLUTION_PATH ?= $(abspath $(shell pwd)/../../../)
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/general/param
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/bus
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/ginput/touch
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/lvgl_gui
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/gdisp
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/lcd_paint
include $(IOT_SOLUTION_PATH)/component.mk
include $(IDF_PATH)/make/project.mk

View file

@ -8,13 +8,6 @@ PROJECT_NAME := lvgl_example
#If IOT_SOLUTION_PATH is not defined, use relative path as default value
IOT_SOLUTION_PATH ?= $(abspath $(shell pwd)/../../../)
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/general/param
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/bus
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/ginput/touch
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/lvgl_gui
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/gdisp
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/lcd_paint
include $(IOT_SOLUTION_PATH)/component.mk
include $(IDF_PATH)/make/project.mk

View file

@ -7,12 +7,6 @@ PROJECT_NAME := lvgl_thermostat
#If IOT_SOLUTION_PATH is not defined, use relative path as default value
IOT_SOLUTION_PATH ?= $(abspath $(shell pwd)/../../../)
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/general/param
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/bus
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/ginput/touch
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/lvgl_gui
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/gdisp
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/lcd_paint
include $(IOT_SOLUTION_PATH)/component.mk
include $(IDF_PATH)/make/project.mk

View file

@ -8,12 +8,6 @@ PROJECT_NAME := lvgl_wificonfig
#If IOT_SOLUTION_PATH is not defined, use relative path as default value
IOT_SOLUTION_PATH ?= $(abspath $(shell pwd)/../../../)
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/general/param
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/bus
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/ginput/touch
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/lvgl_gui
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/gdisp
EXTRA_COMPONENT_DIRS += $(IOT_SOLUTION_PATH)/components/gui/lcd_paint
include $(IOT_SOLUTION_PATH)/component.mk
include $(IDF_PATH)/make/project.mk

162
tools/build_apps.py Executable file
View file

@ -0,0 +1,162 @@
#!/usr/bin/env python
# coding=utf-8
#
# ESP-IDF helper script to build multiple applications. Consumes the input of find_apps.py.
#
import argparse
import logging
import os.path
import re
import sys
from find_build_apps import BUILD_SYSTEMS, BuildError, BuildItem, setup_logging
from find_build_apps.common import SIZE_JSON_FN, rmdir
# This RE will match GCC errors and many other fatal build errors and warnings as well
LOG_ERROR_WARNING = re.compile(r'(error|warning):', re.IGNORECASE)
# Log this many trailing lines from a failed build log, also
LOG_DEBUG_LINES = 25
def main(): # type: () -> None
parser = argparse.ArgumentParser(description='ESP-IDF app builder')
parser.add_argument(
'-v',
'--verbose',
action='count',
help='Increase the logging level of the script. Can be specified multiple times.',
)
parser.add_argument(
'--build-verbose',
action='store_true',
help='Enable verbose output from build system.',
)
parser.add_argument(
'--log-file',
type=argparse.FileType('w'),
help='Write the script log to the specified file, instead of stderr',
)
parser.add_argument(
'--parallel-count',
default=1,
type=int,
help="Number of parallel build jobs. Note that this script doesn't start the jobs, " +
'it needs to be executed multiple times with same value of --parallel-count and ' +
'different values of --parallel-index.',
)
parser.add_argument(
'--parallel-index',
default=1,
type=int,
help='Index (1-based) of the job, out of the number specified by --parallel-count.',
)
parser.add_argument(
'--format',
default='json',
choices=['json'],
help='Format to read the list of builds',
)
parser.add_argument(
'--dry-run',
action='store_true',
help="Don't actually build, only print the build commands",
)
parser.add_argument(
'--keep-going',
action='store_true',
help="Don't exit immediately when a build fails.",
)
parser.add_argument(
'--output-build-list',
type=argparse.FileType('w'),
help='If specified, the list of builds (with all the placeholders expanded) will be written to this file.',
)
parser.add_argument(
'--size-info',
type=argparse.FileType('a'),
help='If specified, the test case name and size info json will be written to this file'
)
parser.add_argument(
'build_list',
type=argparse.FileType('r'),
nargs='?',
default=sys.stdin,
help='Name of the file to read the list of builds from. If not specified, read from stdin.',
)
args = parser.parse_args()
setup_logging(args)
build_items = [BuildItem.from_json(line) for line in args.build_list]
if not build_items:
logging.warning('Empty build list')
SystemExit(0)
num_builds = len(build_items)
num_jobs = args.parallel_count
job_index = args.parallel_index - 1 # convert to 0-based index
num_builds_per_job = (num_builds + num_jobs - 1) // num_jobs
min_job_index = num_builds_per_job * job_index
if min_job_index >= num_builds:
logging.warn('Nothing to do for job {} (build total: {}, per job: {})'.format(
job_index + 1, num_builds, num_builds_per_job))
raise SystemExit(0)
max_job_index = min(num_builds_per_job * (job_index + 1) - 1, num_builds - 1)
logging.info('Total {} builds, max. {} builds per job, running builds {}-{}'.format(
num_builds, num_builds_per_job, min_job_index + 1, max_job_index + 1))
builds_for_current_job = build_items[min_job_index:max_job_index + 1]
for i, build_info in enumerate(builds_for_current_job):
index = i + min_job_index + 1
build_info.index = index
build_info.dry_run = args.dry_run
build_info.verbose = args.build_verbose
build_info.keep_going = args.keep_going
logging.debug(' Build {}: {}'.format(index, repr(build_info)))
if args.output_build_list:
args.output_build_list.write(build_info.to_json_expanded() + '\n')
failed_builds = []
for build_info in builds_for_current_job:
logging.info('Running build {}: {}'.format(build_info.index, repr(build_info)))
build_system_class = BUILD_SYSTEMS[build_info.build_system]
try:
build_system_class.build(build_info)
except BuildError as e:
logging.error(str(e))
if build_info.build_log_path:
log_filename = os.path.basename(build_info.build_log_path)
with open(build_info.build_log_path, 'r') as f:
lines = [line.rstrip() for line in f.readlines() if line.rstrip()] # non-empty lines
logging.debug('Error and warning lines from {}:'.format(log_filename))
for line in lines:
if LOG_ERROR_WARNING.search(line):
logging.warning('>>> {}'.format(line))
logging.debug('Last {} lines of {}:'.format(LOG_DEBUG_LINES, log_filename))
for line in lines[-LOG_DEBUG_LINES:]:
logging.debug('>>> {}'.format(line))
if args.keep_going:
failed_builds.append(build_info)
else:
raise SystemExit(1)
else:
if args.size_info:
build_info.write_size_info(args.size_info)
if not build_info.preserve:
logging.info('Removing build directory {}'.format(build_info.build_path))
# we only remove binaries here, log files are still needed by check_build_warnings.py
rmdir(build_info.build_path, exclude_file_pattern=SIZE_JSON_FN)
if failed_builds:
logging.error('The following build have failed:')
for build in failed_builds:
logging.error(' {}'.format(build))
raise SystemExit(1)
if __name__ == '__main__':
main()

View file

@ -1,198 +1,127 @@
#!/bin/bash
#!/usr/bin/env bash
#
# Build all examples from the examples directory, out of tree to
# ensure they can run when copied to a new directory.
# Find apps and build apps for example_test, custom_test, and unit_test
#
# Runs as part of CI process.
#
# Assumes PWD is IOT_SOLUTION_PATH directory, and will copy examples
# to individual subdirectories, one by one.
#
#
# Without arguments it just builds all examples
#
# With one argument <JOB_NAME> it builds part of the examples. This is a useful for
# parallel execution in CI.
# <JOB_NAME> must look like this:
# <some_text_label>_<num>
# It scans .gitlab-ci.yaml to count number of jobs which have name "<some_text_label>_<num>"
# It scans the filesystem to count all examples
# Based on this, it decides to run qa set of examples.
#
# -----------------------------------------------------------------------------
# Safety settings (see https://gist.github.com/ilg-ul/383869cbb01f61a51c4d).
if [[ ! -z ${DEBUG} ]]
then
if [[ -n ${DEBUG_SHELL} ]]; then
set -x # Activate the expand mode if DEBUG is anything but empty.
fi
set -o errexit # Exit if command failed.
set -o pipefail # Exit if pipe failed.
set -o nounset # Exit if variable not set.
if [ -z ${CI_NODE_TOTAL} ]; then
CI_NODE_TOTAL=1
echo "Assuming CI_NODE_TOTAL=${CI_NODE_TOTAL}"
fi
# Remove the initial space and instead use '\n'.
IFS=$'\n\t'
if [ -z ${CI_NODE_INDEX} ]; then
# Gitlab uses a 1-based index
CI_NODE_INDEX=1
echo "Assuming CI_NODE_INDEX=${CI_NODE_INDEX}"
fi
set -o errexit # Exit if command failed.
set -o pipefail # Exit if pipe failed.
set -o nounset # Exit if variable not set.
# -----------------------------------------------------------------------------
function die() {
echo "${1:-"Unknown Error"}" 1>&2
exit 1
die() {
echo "${1:-"Unknown Error"}" 1>&2
exit 1
}
[ -z "${IOT_SOLUTION_PATH}" ] && die "IOT_SOLUTION_PATH is not set"
[ -z "${IDF_PATH}" ] && die "IDF_PATH is not set"
[ -z "${BUILD_PATH}" ] && die "BUILD_PATH is not set"
[ -z "${LOG_PATH}" ] && die "LOG_PATH is not set"
[ -d "${LOG_PATH}" ] || mkdir -p ${LOG_PATH}
echo "build_examples running in ${PWD}"
[ -d ${BUILD_PATH} ] || mkdir -p ${BUILD_PATH}
[ -d ${LOG_PATH} ] || mkdir -p ${LOG_PATH}
[ -f ${SIZE_INFO_LOCATION} ] && rm ${SIZE_INFO_LOCATION}
# only 0 or 1 arguments
[ $# -le 1 ] || die "Have to run as $(basename $0) [<JOB_NAME>]"
echo "${BUILD_PATH} ${LOG_PATH} ${SIZE_INFO_LOCATION}"
export BATCH_BUILD=1
export V=0 # only build verbose if there's an error
shopt -s lastpipe # Workaround for Bash to use variables in loops (http://mywiki.wooledge.org/BashFAQ/024)
function find_examples() {
LIST_OF_EXAMPLES=($(find ./examples -type d -name main | sort))
local INDEX=0
for FN in "${LIST_OF_EXAMPLES[@]}";
do
if [[ $FN =~ "build/" ]]
then
unset LIST_OF_EXAMPLES[INDEX]
fi
INDEX=$(( $INDEX + 1 ))
done
}
RESULT=0
FAILED_EXAMPLES=""
RESULT_ISSUES=22 # magic number result code for issues found
LOG_SUSPECTED=${LOG_PATH}/common_log.txt
touch ${LOG_SUSPECTED}
LIST_OF_EXAMPLES=[]
find_examples
NUM_OF_EXAMPLES=${#LIST_OF_EXAMPLES[@]} # count number of examples
[ -z ${NUM_OF_EXAMPLES} ] && die "NUM_OF_EXAMPLES is bad"
if [ $# -eq 0 ]
then
START_NUM=0
END_NUM=999
else
JOB_NAME=$1
# parse text prefix at the beginning of string 'some_your_text_NUM'
# (will be 'some_your_text' without last '_')
JOB_PATTERN=$( echo ${JOB_NAME} | sed -n -r 's/^(.*)_[0-9]+$/\1/p' )
[ -z ${JOB_PATTERN} ] && die "JOB_PATTERN is bad"
# parse number 'NUM' at the end of string 'some_your_text_NUM'
JOB_NUM=$( echo ${JOB_NAME} | sed -n -r 's/^.*_([0-9]+)$/\1/p' )
[ -z ${JOB_NUM} ] && die "JOB_NUM is bad"
# count number of the jobs
NUM_OF_JOBS=$( grep -c -E "^${JOB_PATTERN}_[0-9]+:$" "${IOT_SOLUTION_PATH}/.gitlab-ci.yml" )
[ -z ${NUM_OF_JOBS} ] && die "NUM_OF_JOBS is bad"
# separate intervals
#57 / 5 == 12
NUM_OF_EX_PER_JOB=$(( (${NUM_OF_EXAMPLES} + ${NUM_OF_JOBS} - 1) / ${NUM_OF_JOBS} ))
[ -z ${NUM_OF_EX_PER_JOB} ] && die "NUM_OF_EX_PER_JOB is bad"
# ex.: [0; 12); [12; 24); [24; 36); [36; 48); [48; 60)
START_NUM=$(( ${JOB_NUM} * ${NUM_OF_EX_PER_JOB} ))
[ -z ${START_NUM} ] && die "START_NUM is bad"
END_NUM=$(( (${JOB_NUM} + 1) * ${NUM_OF_EX_PER_JOB} ))
[ -z ${END_NUM} ] && die "END_NUM is bad"
export REALPATH=realpath
if [ "$(uname -s)" = "Darwin" ]; then
export REALPATH=grealpath
fi
# Convert LOG_PATH and BUILD_PATH to relative, to make the json file less verbose.
# BUILD_PATH=$(${REALPATH} --relative-to ${IDF_PATH} ${BUILD_PATH})
# LOG_PATH=$(${REALPATH} --relative-to ${IDF_PATH} ${LOG_PATH})
# ALL_BUILD_LIST_JSON="${BUILD_PATH}/list.json"
# JOB_BUILD_LIST_JSON="${BUILD_PATH}/list_job_${CI_NODE_INDEX}.json"
function build_example () {
local ID=$1
shift
local MAKE_FILE=$1
shift
# -----------------------------------------------------------------------------
# common variables, will specify special cases later
# WORK_DIR="--work-dir ${BUILD_PATH}/@f/@w/@t"
# BUILD_DIR="build"
# BUILD_LOG="${LOG_PATH}/@f_@w_@t.txt"
# CONFIG="--config sdkconfig.ci=default
# --config sdkconfig.ci.*=
# --config =default"
local EXAMPLE_DIR=$(dirname "${MAKE_FILE}")
local EXAMPLE_NAME=$(basename "${EXAMPLE_DIR}")
export EXTRA_CFLAGS="${PEDANTIC_CFLAGS}"
export EXTRA_CXXFLAGS="${PEDANTIC_CXXFLAGS}"
echo "Building ${EXAMPLE_NAME} as ${ID}..."
mkdir -p "${BUILD_PATH}/${ID}"
cp -r "${EXAMPLE_DIR}" "${BUILD_PATH}/${ID}"
pushd "${BUILD_PATH}/${ID}/${EXAMPLE_NAME}"
# be stricter in the CI build than the default IDF settings
export EXTRA_CFLAGS="-Werror -Werror=deprecated-declarations"
export EXTRA_CXXFLAGS=${EXTRA_CFLAGS}
# --config rules above explained:
# 1. If sdkconfig.ci exists, use it build the example with configuration name "default"
# 2. If sdkconfig.ci.* exists, use it to build the "*" configuration
# 3. If none of the above exist, build the default configuration under the name "default"
# --work-dir and --build-log above uses "placeholders" @x:
# - @f: full path to the test with slashes replaced with underscores
# - @w: wildcard used as config name
# - @t: target name
# so the workdir .../@f/@w/@t would expand to e.g. tools_test_apps_system_startup/default/esp32
# build non-verbose first
local BUILDLOG=${LOG_PATH}/example_${ID}_log.txt
echo " " > ${BUILDLOG}
# EXTRA_ARGS="
# -p ${IOT_SOLUTION_PATH}/examples
# --build-system ${BUILD_SYSTEM}
# --target ${IDF_TARGET}
# --recursive
# "
# make defconfig >>${BUILDLOG} 2>&1
# make all -j8 >>${BUILDLOG} 2>&1
# ( make print_flash_cmd | tail -n 1 >build/download.config ) >>${BUILDLOG} 2>&1 ||
# {
# RESULT=$?; FAILED_EXAMPLES+=" ${EXAMPLE_NAME}" ;
# }
# This part of the script produces the same result for all the build jobs.
#
# It may be moved to a separate stage (pre-build) later, then the build jobs
# will receive ${BUILD_LIST_JSON} file as an artifact.
#
# If changing the work-dir or build-dir, remember to update the "artifacts" in
# gitlab-ci configs, and IDFApp.py.
# rm -r build >/dev/null &&
# rm sdkconfig >/dev/null &&
idf.py fullclean
idf.py build >>${BUILDLOG} 2>&1
# ${IDF_PATH}/tools/find_apps.py \
# -vv \
# --format json \
# ${WORK_DIR} \
# --build-dir ${BUILD_DIR} \
# --build-log ${BUILD_LOG} \
# --output ${ALL_BUILD_LIST_JSON} \
# ${EXTRA_ARGS} \
# ${CONFIG}
cat ${BUILDLOG}
popd
# The part below is where the actual builds happen
build_check() {
NAME=$(basename $1)
NAME=${NAME%.*}
JOB_BUILD_LIST_JSON="${BUILD_PATH}/list_job_${CI_NODE_INDEX}_${NAME}.json"
${IOT_SOLUTION_PATH}/tools/build_apps.py \
-vv \
--format json \
--keep-going \
--parallel-count ${CI_NODE_TOTAL} \
--parallel-index ${CI_NODE_INDEX} \
--output-build-list ${JOB_BUILD_LIST_JSON} \
--size-info ${SIZE_INFO_LOCATION} \
$1
grep -i "error\|warning" "${BUILDLOG}" 2>&1 >> "${LOG_SUSPECTED}" || :
# Check for build warnings
${IOT_SOLUTION_PATH}/tools/ci/check_build_warnings.py -vv ${JOB_BUILD_LIST_JSON}
}
EXAMPLE_NUM=0
if [[ $END_NUM -gt $NUM_OF_EXAMPLES ]]
then
END_NUM=$NUM_OF_EXAMPLES
fi
for FN in "${LIST_OF_EXAMPLES[@]}";
ALL_BUILD_LIST_JSON=$(ls ${BUILD_PATH})
for f in ${ALL_BUILD_LIST_JSON}
do
if [[ $EXAMPLE_NUM -lt $START_NUM || $EXAMPLE_NUM -ge $END_NUM ]]
then
EXAMPLE_NUM=$(( $EXAMPLE_NUM + 1 ))
continue
fi
TOTAL_NUM=$(( ${END_NUM} - ${START_NUM}))
INDEX=$(( ${EXAMPLE_NUM} - ${START_NUM} + 1))
echo -e "\033[1;34m>>> example [ ${EXAMPLE_NUM} ][ ${INDEX}/${TOTAL_NUM} ] - $FN\033[0m"
build_example "${EXAMPLE_NUM}" "${FN}"
EXAMPLE_NUM=$(( $EXAMPLE_NUM + 1 ))
echo -e "\033[1;34m>>> Start to build ${BUILD_PATH}/$f\033[0m"
build_check ${BUILD_PATH}/$f
done
# # show warnings
# echo -e "\nFound issues:"
# # Ignore the next messages:
# # files end with error: "error.o" or "error.c" or "error.h" or "error.d"
# # "-Werror" in compiler's command line
# # "reassigning to symbol" or "changes choice state" in sdkconfig
# sort -u "${LOG_SUSPECTED}" | \
# grep -v "error.[ochd]\|\ -Werror\|reassigning to symbol\|changes choice state" \
# && RESULT=$RESULT_ISSUES \
# || echo -e "\tNone"
# [ -z ${FAILED_EXAMPLES} ] || echo -e "\nThere are errors in the next examples: $FAILED_EXAMPLES"
# [ $RESULT -eq 0 ] || echo -e "\nFix all warnings and errors above to pass the test!"
# echo -e "\nReturn code = $RESULT"
# exit $RESULT

107
tools/ci/check_build_warnings.py Executable file
View file

@ -0,0 +1,107 @@
#!/usr/bin/env python
# coding=utf-8
#
# CI script to check build logs for warnings.
# Reads the list of builds, in the format produced by find_apps.py or build_apps.py, and finds warnings in the
# log files for every build.
# Exits with a non-zero exit code if any warning is found.
import argparse
import logging
import os
import re
import sys
try:
from find_build_apps import BuildItem, setup_logging
except ImportError:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
from find_build_apps import BuildItem, setup_logging
WARNING_REGEX = re.compile(r'(?:error|warning)[^\w]', re.MULTILINE | re.IGNORECASE)
IGNORE_WARNS = [
re.compile(r_str) for r_str in [
r'library/error\.o',
r'/.*error\S*\.o',
r'.*error.*\.c\.obj',
r'.*error.*\.cpp\.obj',
r'.*error.*\.cxx\.obj',
r'.*error.*\.cc\.obj',
r'-Werror',
r'error\.d',
r'/.*error\S*.d',
r'reassigning to symbol',
r'changes choice state',
r'crosstool_version_check\.cmake',
r'CryptographyDeprecationWarning',
r'Warning: \d+/\d+ app partitions are too small for binary'
]
]
def line_has_warnings(line): # type: (str) -> bool
if not WARNING_REGEX.search(line):
return False
has_warnings = True
for ignored in IGNORE_WARNS:
if re.search(ignored, line):
has_warnings = False
break
return has_warnings
def main(): # type: () -> None
parser = argparse.ArgumentParser(description='ESP-IDF app builder')
parser.add_argument(
'-v',
'--verbose',
action='count',
help='Increase the logging level of the script. Can be specified multiple times.',
)
parser.add_argument(
'--log-file',
type=argparse.FileType('w'),
help='Write the script log to the specified file, instead of stderr',
)
parser.add_argument(
'build_list',
type=argparse.FileType('r'),
nargs='?',
default=sys.stdin,
help='Name of the file to read the list of builds from. If not specified, read from stdin.',
)
args = parser.parse_args()
setup_logging(args)
build_items = [BuildItem.from_json(line) for line in args.build_list]
if not build_items:
logging.warning('Empty build list')
SystemExit(0)
found_warnings = 0
for build_item in build_items:
if not build_item.build_log_path:
logging.debug('No log file for {}'.format(build_item.work_dir))
continue
with open(build_item.build_log_path, 'r') as log_file:
for line_no, line in enumerate(log_file):
if line_has_warnings(line):
logging.error('Issue in app {}, config {}:'.format(build_item.app_dir, build_item.config_name))
logging.error(line.rstrip('\n'))
logging.error('See {}:{} for details'.format(os.path.basename(build_item.build_log_path),
line_no + 1))
found_warnings += 1
break
if found_warnings:
logging.error('Checked {} builds, found {} warnings'.format(len(build_items), found_warnings))
raise SystemExit(1)
logging.info('No warnings found')
if __name__ == '__main__':
main()

View file

@ -0,0 +1,102 @@
{
"examples": [{
"name": "wav_player",
"buildsystem": [
"cmake",
"make"
],
"targets": [
"esp32",
"esp32s2"
]
},
{
"name": "check_pedestrian_flow",
"buildsystem": [
"cmake",
"make"
],
"targets": [
"esp32",
"esp32s2",
"esp32s3",
"esp32c3"
]
},
{
"name": "lvgl_coffee",
"buildsystem": [
"cmake",
"make"
],
"targets": [
"esp32",
"esp32s2"
]
},
{
"name": "lvgl_example",
"buildsystem": [
"cmake",
"make"
],
"targets": [
"esp32",
"esp32s2"
]
},
{
"name": "lvgl_thermostat",
"buildsystem": [
"cmake",
"make"
],
"targets": [
"esp32",
"esp32s2"
]
},
{
"name": "lvgl_wificonfig",
"buildsystem": [
"cmake",
"make"
],
"targets": [
"esp32",
"esp32s2"
]
},
{
"name": "screen",
"buildsystem": [
"cmake",
"make"
],
"targets": [
"esp32",
"esp32s2"
]
},
{
"name": "sensor_control_led",
"buildsystem": [
"cmake"
],
"targets": [
"esp32",
"esp32s2"
]
},
{
"name": "sensor_hub_monitor",
"buildsystem": [
"cmake"
],
"targets": [
"esp32",
"esp32s2"
]
}
]
}

View file

@ -0,0 +1,200 @@
#!/usr/bin/env python
import json
import os
import pprint
import subprocess
import sys
# =============================================================================
# Service funcs
# =============================================================================
def get_idf_targets():
args = [sys.executable, get_idf_path('tools/idf.py'), "--version"]
output = subprocess.check_output(args).decode('utf-8')
output = output.split('.')
m = int(output[0][-1:])
s = int(output[1][:1])
print('- IDF Version v%d.%d' % (m, s))
v = m * 10 + s
if v == 44:
TARGETS = ['esp32', 'esp32s2', 'esp32s3', 'esp32c3']
elif v == 43:
TARGETS = ['esp32', 'esp32s2', 'esp32c3']
elif v == 42:
TARGETS = ['esp32', 'esp32s2']
else:
TARGETS = ['esp32']
return TARGETS
def _build_path(path, *paths):
return str(os.path.normpath(os.path.join(path, *paths)).replace('\\', '/'))
def _unify_paths(path_list):
return [_build_path(p) for p in path_list]
def _file2linelist(path):
with open(path) as f:
lines = [line.rstrip() for line in f]
return [str(line) for line in lines]
# =============================================================================
# Test funcs
# =============================================================================
def get_idf_path(path, *paths):
IDF_PATH = os.getenv('IDF_PATH')
return _build_path(IDF_PATH, path, *paths)
def get_iot_solution_path(path, *paths):
IDF_PATH = os.getenv('IOT_SOLUTION_PATH')
return _build_path(IDF_PATH, path, *paths)
def get_json_name(target, build_system):
return os.getenv(
'BUILD_PATH') + '/list_' + target + '_' + build_system + '.json'
def read_json_list_from_file(file):
f_json = open(file, 'r')
o_list = f_json.read()
f_json.close()
o_list = o_list.split('\n')
json_list = []
for j in o_list:
if j:
json_list.append(json.loads(j))
return json_list
def write_json_list_to_file(json_list, file):
f_json = open(file, 'w')
for a in json_list:
f_json.write(json.dumps(a) + '\n')
f_json.close()
def apply_targets(app_list, configs, target):
print('- Applying targets')
app_list_res = list(app_list)
for p in app_list:
for app in configs['examples']:
if os.path.basename(p['app_dir']) == app['name']:
support = 0
for t in app['targets']:
if t == target:
support = 1
if support != 1:
print('[%s] unsupport %s' % (app['name'], target))
app_list_res.remove(p)
return app_list_res
def apply_buildsystem(app_list, configs, bs):
print('- Applying buildsystem')
app_list_res = list(app_list)
for p in app_list:
for app in configs['examples']:
if os.path.basename(p['app_dir']) == app['name']:
support = 0
for b in app['buildsystem']:
if b == bs:
support = 1
if support != 1:
print('[%s] unsupport %s' % (app['name'], bs))
app_list_res.remove(p)
return app_list_res
def get_apps(target, build_system):
print('- Getting paths of apps for %s with %s' % (target, build_system))
json_file = get_json_name(target, build_system)
args = [
sys.executable,
get_iot_solution_path('tools/find_apps.py'),
# '-vv',
'--format',
'json',
'--work-dir',
os.getenv('BUILD_PATH') + '/@f/@w/@t',
'--build-dir',
'build',
'--build-log',
os.getenv('LOG_PATH') + '/@f_@w_@t_' + build_system + '.txt',
'-p',
os.getenv('IOT_SOLUTION_PATH') + '/examples',
'--recursive',
'--target',
target,
'--output',
json_file,
'--build-system',
build_system,
'--config',
'sdkconfig.ci=default',
'--config',
'sdkconfig.ci.*=',
'--config',
'=default'
]
output = subprocess.check_output(args).decode('utf-8')
return json_file
def diff(first, second):
print('- Comparing...')
first = set(first)
second = set(second)
res = list(first - second) + list(second - first)
return res
def generate_app_json(target, build_system, configs_json):
json_file_path = get_apps(target, build_system)
json_list = read_json_list_from_file(json_file_path)
# apply target
json_list = apply_targets(json_list, configs_json, target)
# apply buildsystem
json_list = apply_buildsystem(json_list, configs_json, build_system)
write_json_list_to_file(json_list, json_file_path)
def main():
# If the target is not specified in the example_config.json file,
# it is considered that all targets and all build system are supported
f_config = open(os.getenv('IOT_SOLUTION_PATH') + '/tools/ci/example_config.json')
configs = f_config.read()
configs = json.loads(configs)
f_config.close()
if not os.path.exists(os.getenv('BUILD_PATH')):
os.mkdir(os.getenv('BUILD_PATH'))
if not os.path.exists(os.getenv('LOG_PATH')):
os.mkdir(os.getenv('LOG_PATH'))
TARGETS = get_idf_targets()
# make build system only support esp32
generate_app_json("esp32", 'make', configs)
for t in TARGETS:
generate_app_json(t, 'cmake', configs)
print('[ DONE ]')
if __name__ == '__main__':
main()

320
tools/find_apps.py Executable file
View file

@ -0,0 +1,320 @@
#!/usr/bin/env python
# coding=utf-8
#
# ESP-IDF helper script to enumerate the builds of multiple configurations of multiple apps.
# Produces the list of builds. The list can be consumed by build_apps.py, which performs the actual builds.
import argparse
import glob
import json
import logging
import os
import re
import sys
import typing
from find_build_apps import (BUILD_SYSTEM_CMAKE, BUILD_SYSTEMS, DEFAULT_TARGET, BuildItem, BuildSystem, ConfigRule,
config_rules_from_str, setup_logging)
# Helper functions
def dict_from_sdkconfig(path):
"""
Parse the sdkconfig file at 'path', return name:value pairs as a dict
"""
regex = re.compile(r'^([^#=]+)=(.+)$')
result = {}
with open(path) as f:
for line in f:
m = regex.match(line)
if m:
val = m.group(2)
if val.startswith('"') and val.endswith('"'):
val = val[1:-1]
result[m.group(1)] = val
return result
# Main logic: enumerating apps and builds
def find_builds_for_app(app_path, work_dir, build_dir, build_log, target_arg,
build_system, config_rules, preserve_artifacts=True):
# type: (str, str, str, str, str, str, typing.List[ConfigRule], bool) -> typing.List[BuildItem]
"""
Find configurations (sdkconfig file fragments) for the given app, return them as BuildItem objects
:param app_path: app directory (can be / usually will be a relative path)
:param work_dir: directory where the app should be copied before building.
May contain env. variables and placeholders.
:param build_dir: directory where the build will be done, relative to the work_dir. May contain placeholders.
:param build_log: path of the build log. May contain placeholders. May be None, in which case the log should go
into stdout/stderr.
:param target_arg: the value of IDF_TARGET passed to the script. Used to filter out configurations with
a different CONFIG_IDF_TARGET value.
:param build_system: name of the build system, index into BUILD_SYSTEMS dictionary
:param config_rules: mapping of sdkconfig file name patterns to configuration names
:param preserve_artifacts: determine if the built binary will be uploaded as artifacts.
:return: list of BuildItems representing build configuration of the app
"""
build_items = [] # type: typing.List[BuildItem]
default_config_name = ''
for rule in config_rules:
if not rule.file_name:
default_config_name = rule.config_name
continue
sdkconfig_paths = glob.glob(os.path.join(app_path, rule.file_name))
sdkconfig_paths = sorted(sdkconfig_paths)
for sdkconfig_path in sdkconfig_paths:
# Check if the sdkconfig file specifies IDF_TARGET, and if it is matches the --target argument.
sdkconfig_dict = dict_from_sdkconfig(sdkconfig_path)
target_from_config = sdkconfig_dict.get('CONFIG_IDF_TARGET')
if target_from_config is not None and target_from_config != target_arg:
logging.debug('Skipping sdkconfig {} which requires target {}'.format(
sdkconfig_path, target_from_config))
continue
# Figure out the config name
config_name = rule.config_name or ''
if '*' in rule.file_name:
# convert glob pattern into a regex
regex_str = r'.*' + rule.file_name.replace('.', r'\.').replace('*', r'(.*)')
groups = re.match(regex_str, sdkconfig_path)
assert groups
config_name = groups.group(1)
sdkconfig_path = os.path.relpath(sdkconfig_path, app_path)
logging.debug('Adding build: app {}, sdkconfig {}, config name "{}"'.format(
app_path, sdkconfig_path, config_name))
build_items.append(
BuildItem(
app_path,
work_dir,
build_dir,
build_log,
target_arg,
sdkconfig_path,
config_name,
build_system,
preserve_artifacts,
))
if not build_items:
logging.debug('Adding build: app {}, default sdkconfig, config name "{}"'.format(app_path, default_config_name))
return [
BuildItem(
app_path,
work_dir,
build_dir,
build_log,
target_arg,
None,
default_config_name,
build_system,
preserve_artifacts,
)
]
return build_items
def find_apps(build_system_class, path, recursive, exclude_list, target):
# type: (typing.Type[BuildSystem], str, bool, typing.List[str], str) -> typing.List[str]
"""
Find app directories in path (possibly recursively), which contain apps for the given build system, compatible
with the given target.
:param build_system_class: class derived from BuildSystem, representing the build system in use
:param path: path where to look for apps
:param recursive: whether to recursively descend into nested directories if no app is found
:param exclude_list: list of paths to be excluded from the recursive search
:param target: desired value of IDF_TARGET; apps incompatible with the given target are skipped.
:return: list of paths of the apps found
"""
build_system_name = build_system_class.NAME
logging.debug('Looking for {} apps in {}{}'.format(build_system_name, path, ' recursively' if recursive else ''))
if not recursive:
if exclude_list:
logging.warning('--exclude option is ignored when used without --recursive')
if not build_system_class.is_app(path):
logging.warning('Path {} specified without --recursive flag, but no {} app found there'.format(
path, build_system_name))
return []
return [path]
# The remaining part is for recursive == True
apps_found = [] # type: typing.List[str]
for root, dirs, _ in os.walk(path, topdown=True):
logging.debug('Entering {}'.format(root))
if root in exclude_list:
logging.debug('Skipping {} (excluded)'.format(root))
del dirs[:]
continue
if build_system_class.is_app(root):
logging.debug('Found {} app in {}'.format(build_system_name, root))
# Don't recurse into app subdirectories
del dirs[:]
supported_targets = build_system_class.supported_targets(root)
if supported_targets and (target in supported_targets):
apps_found.append(root)
else:
if supported_targets:
logging.debug('Skipping, app only supports targets: ' + ', '.join(supported_targets))
else:
logging.debug('Skipping, app has no supported targets')
continue
return apps_found
def main():
parser = argparse.ArgumentParser(description='Tool to generate build steps for IDF apps')
parser.add_argument(
'-v',
'--verbose',
action='count',
help='Increase the logging level of the script. Can be specified multiple times.',
)
parser.add_argument(
'--log-file',
type=argparse.FileType('w'),
help='Write the script log to the specified file, instead of stderr',
)
parser.add_argument(
'--recursive',
action='store_true',
help='Look for apps in the specified directories recursively.',
)
parser.add_argument(
'--build-system',
choices=BUILD_SYSTEMS.keys()
)
parser.add_argument(
'--work-dir',
help='If set, the app is first copied into the specified directory, and then built.' +
'If not set, the work directory is the directory of the app.',
)
parser.add_argument(
'--config',
action='append',
help='Adds configurations (sdkconfig file names) to build. This can either be ' +
'FILENAME[=NAME] or FILEPATTERN. FILENAME is the name of the sdkconfig file, ' +
'relative to the project directory, to be used. Optional NAME can be specified, ' +
'which can be used as a name of this configuration. FILEPATTERN is the name of ' +
'the sdkconfig file, relative to the project directory, with at most one wildcard. ' +
'The part captured by the wildcard is used as the name of the configuration.',
)
parser.add_argument(
'--build-dir',
help='If set, specifies the build directory name. Can expand placeholders. Can be either a ' +
'name relative to the work directory, or an absolute path.',
)
parser.add_argument(
'--build-log',
help='If specified, the build log will be written to this file. Can expand placeholders.',
)
parser.add_argument('--target', help='Build apps for given target.')
parser.add_argument(
'--format',
default='json',
choices=['json'],
help='Format to write the list of builds as',
)
parser.add_argument(
'--exclude',
action='append',
help='Ignore specified directory (if --recursive is given). Can be used multiple times.',
)
parser.add_argument(
'-o',
'--output',
type=argparse.FileType('w'),
help='Output the list of builds to the specified file',
)
parser.add_argument(
'--app-list',
default=None,
help='Scan tests results. Restrict the build/artifacts preservation behavior to apps need to be built. '
'If the file does not exist, will build all apps and upload all artifacts.'
)
parser.add_argument(
'-p', '--paths',
nargs='+',
help='One or more app paths.'
)
args = parser.parse_args()
setup_logging(args)
# Arguments Validation
if args.app_list:
conflict_args = [args.recursive, args.build_system, args.target, args.exclude, args.paths]
if any(conflict_args):
raise ValueError('Conflict settings. "recursive", "build_system", "target", "exclude", "paths" should not '
'be specified with "app_list"')
if not os.path.exists(args.app_list):
raise OSError('File not found {}'.format(args.app_list))
else:
# If the build target is not set explicitly, get it from the environment or use the default one (esp32)
if not args.target:
env_target = os.environ.get('IDF_TARGET')
if env_target:
logging.info('--target argument not set, using IDF_TARGET={} from the environment'.format(env_target))
args.target = env_target
else:
logging.info('--target argument not set, using IDF_TARGET={} as the default'.format(DEFAULT_TARGET))
args.target = DEFAULT_TARGET
if not args.build_system:
logging.info('--build-system argument not set, using {} as the default'.format(BUILD_SYSTEM_CMAKE))
args.build_system = BUILD_SYSTEM_CMAKE
required_args = [args.build_system, args.target, args.paths]
if not all(required_args):
raise ValueError('If app_list not set, arguments "build_system", "target", "paths" are required.')
# Prepare the list of app paths, try to read from the scan_tests result.
# If the file exists, then follow the file's app_dir and build/artifacts behavior, won't do find_apps() again.
# If the file not exists, will do find_apps() first, then build all apps and upload all artifacts.
if args.app_list:
apps = [json.loads(line) for line in open(args.app_list)]
else:
app_dirs = []
build_system_class = BUILD_SYSTEMS[args.build_system]
for path in args.paths:
app_dirs += find_apps(build_system_class, path, args.recursive, args.exclude or [], args.target)
apps = [{'app_dir': app_dir, 'build': True, 'preserve': True} for app_dir in app_dirs]
if not apps:
logging.warning('No apps found')
SystemExit(0)
logging.info('Found {} apps'.format(len(apps)))
apps.sort(key=lambda x: x['app_dir'])
# Find compatible configurations of each app, collect them as BuildItems
build_items = [] # type: typing.List[BuildItem]
config_rules = config_rules_from_str(args.config or [])
for app in apps:
build_items += find_builds_for_app(
app['app_dir'],
args.work_dir,
args.build_dir,
args.build_log,
args.target or app['target'],
args.build_system or app['build_system'],
config_rules,
app['preserve'],
)
logging.info('Found {} builds'.format(len(build_items)))
# Write out the BuildItems. Only JSON supported now (will add YAML later).
if args.format != 'json':
raise NotImplementedError()
out = args.output or sys.stdout
out.writelines([item.to_json() + '\n' for item in build_items])
if __name__ == '__main__':
main()

View file

@ -0,0 +1,24 @@
from .cmake import BUILD_SYSTEM_CMAKE, CMakeBuildSystem
from .common import (DEFAULT_TARGET, BuildError, BuildItem, BuildSystem, ConfigRule, config_rules_from_str,
setup_logging)
from .make import BUILD_SYSTEM_MAKE, MakeBuildSystem
BUILD_SYSTEMS = {
BUILD_SYSTEM_MAKE: MakeBuildSystem,
BUILD_SYSTEM_CMAKE: CMakeBuildSystem,
}
__all__ = [
'BuildItem',
'BuildSystem',
'BuildError',
'ConfigRule',
'config_rules_from_str',
'setup_logging',
'DEFAULT_TARGET',
'CMakeBuildSystem',
'BUILD_SYSTEM_CMAKE',
'MakeBuildSystem',
'BUILD_SYSTEM_MAKE',
'BUILD_SYSTEMS',
]

View file

@ -0,0 +1,99 @@
import logging
import os
import shutil
import subprocess
import sys
from .common import BuildError, BuildItem, BuildSystem
try:
from typing import Any, Optional
except ImportError:
pass
BUILD_SYSTEM_CMAKE = 'cmake'
IDF_PY = os.path.join(os.environ['IDF_PATH'], 'tools', 'idf.py')
# While ESP-IDF component CMakeLists files can be identified by the presence of 'idf_component_register' string,
# there is no equivalent for the project CMakeLists files. This seems to be the best option...
CMAKE_PROJECT_LINE = r'include($ENV{IDF_PATH}/tools/cmake/project.cmake)'
class CMakeBuildSystem(BuildSystem):
NAME = BUILD_SYSTEM_CMAKE
@classmethod
def build(cls, build_item): # type: (BuildItem) -> None
build_path, work_path, extra_cmakecache_items = cls.build_prepare(build_item)
# Prepare the build arguments
args = [
sys.executable,
IDF_PY,
'-B',
build_path,
'-C',
work_path,
'-DIDF_TARGET=' + build_item.target,
]
if extra_cmakecache_items:
for key, val in extra_cmakecache_items.items():
args.append('-D{}={}'.format(key, val))
if 'TEST_EXCLUDE_COMPONENTS' in extra_cmakecache_items \
and 'TEST_COMPONENTS' not in extra_cmakecache_items:
args.append('-DTESTS_ALL=1')
if build_item.verbose:
args.append('-v')
if 'CONFIG_APP_BUILD_BOOTLOADER' in extra_cmakecache_items:
# In case if secure_boot is enabled then for bootloader build need to add `bootloader` cmd
args.append('bootloader')
args.append('build')
cmdline = format(' '.join(args))
logging.info('Running {}'.format(cmdline))
if build_item.dry_run:
return
log_file = None
build_stdout = sys.stdout
build_stderr = sys.stderr
if build_item.build_log_path:
logging.info('Writing build log to {}'.format(build_item.build_log_path))
log_file = open(build_item.build_log_path, 'w')
build_stdout = log_file
build_stderr = log_file
try:
subprocess.check_call(args, stdout=build_stdout, stderr=build_stderr)
except subprocess.CalledProcessError as e:
raise BuildError('Build failed with exit code {}'.format(e.returncode))
else:
# Also save the sdkconfig file in the build directory
shutil.copyfile(
os.path.join(work_path, 'sdkconfig'),
os.path.join(build_path, 'sdkconfig'),
)
build_item.size_json_fp = build_item.get_size_json_fp()
finally:
if log_file:
log_file.close()
@staticmethod
def _read_cmakelists(app_path): # type: (str) -> Optional[str]
cmakelists_path = os.path.join(app_path, 'CMakeLists.txt')
if not os.path.exists(cmakelists_path):
return None
with open(cmakelists_path, 'r') as cmakelists_file:
return cmakelists_file.read()
@staticmethod
def is_app(path): # type: (str) -> bool
cmakelists_file_content = CMakeBuildSystem._read_cmakelists(path)
if not cmakelists_file_content:
return False
if CMAKE_PROJECT_LINE not in cmakelists_file_content:
return False
return True
@classmethod
def supported_targets(cls, app_path): # type: (str) -> Any
return cls._supported_targets(app_path)

View file

@ -0,0 +1,466 @@
# coding=utf-8
import fnmatch
import json
import logging
import os
import re
import shutil
import subprocess
import sys
import typing
from abc import abstractmethod
from collections import namedtuple
from io import open
DEFAULT_TARGET = 'esp32'
TARGET_PLACEHOLDER = '@t'
WILDCARD_PLACEHOLDER = '@w'
NAME_PLACEHOLDER = '@n'
FULL_NAME_PLACEHOLDER = '@f'
INDEX_PLACEHOLDER = '@i'
IDF_SIZE_PY = os.path.join(os.environ['IDF_PATH'], 'tools', 'idf_size.py')
SIZE_JSON_FN = 'size.json'
SDKCONFIG_LINE_REGEX = re.compile(r"^([^=]+)=\"?([^\"\n]*)\"?\n*$")
# If these keys are present in sdkconfig.defaults, they will be extracted and passed to CMake
SDKCONFIG_TEST_OPTS = [
'EXCLUDE_COMPONENTS',
'TEST_EXCLUDE_COMPONENTS',
'TEST_COMPONENTS',
]
# These keys in sdkconfig.defaults are not propagated to the final sdkconfig file:
SDKCONFIG_IGNORE_OPTS = [
'TEST_GROUPS'
]
# ConfigRule represents one --config argument of find_apps.py.
# file_name is the name of the sdkconfig file fragment, optionally with a single wildcard ('*' character).
# file_name can also be empty to indicate that the default configuration of the app should be used.
# config_name is the name of the corresponding build configuration, or None if the value of wildcard is to be used.
# For example:
# filename='', config_name='default' — represents the default app configuration, and gives it a name 'default'
# filename='sdkconfig.*', config_name=None - represents the set of configurations, names match the wildcard value
ConfigRule = namedtuple('ConfigRule', ['file_name', 'config_name'])
def config_rules_from_str(rule_strings): # type: (typing.List[str]) -> typing.List[ConfigRule]
"""
Helper function to convert strings like 'file_name=config_name' into ConfigRule objects
:param rule_strings: list of rules as strings
:return: list of ConfigRules
"""
rules = [] # type: typing.List[ConfigRule]
for rule_str in rule_strings:
items = rule_str.split('=', 2)
rules.append(ConfigRule(items[0], items[1] if len(items) == 2 else None))
return rules
def find_first_match(pattern, path):
for root, _, files in os.walk(path):
res = fnmatch.filter(files, pattern)
if res:
return os.path.join(root, res[0])
return None
def rmdir(path, exclude_file_pattern=None):
if not exclude_file_pattern:
shutil.rmtree(path, ignore_errors=True)
return
for root, dirs, files in os.walk(path, topdown=False):
for f in files:
if not fnmatch.fnmatch(f, exclude_file_pattern):
os.remove(os.path.join(root, f))
for d in dirs:
try:
os.rmdir(os.path.join(root, d))
except OSError:
pass
class BuildItem(object):
"""
Instance of this class represents one build of an application.
The parameters which distinguish the build are passed to the constructor.
"""
def __init__(
self,
app_path,
work_dir,
build_path,
build_log_path,
target,
sdkconfig_path,
config_name,
build_system,
preserve_artifacts,
):
# These internal variables store the paths with environment variables and placeholders;
# Public properties with similar names use the _expand method to get the actual paths.
self._app_dir = app_path
self._work_dir = work_dir
self._build_dir = build_path
self._build_log_path = build_log_path
self.sdkconfig_path = sdkconfig_path
self.config_name = config_name
self.target = target
self.build_system = build_system
self.preserve = preserve_artifacts
self._app_name = os.path.basename(os.path.normpath(app_path))
self.size_json_fp = None
# Some miscellaneous build properties which are set later, at the build stage
self.index = None
self.verbose = False
self.dry_run = False
self.keep_going = False
self.work_path = self.work_dir or self.app_dir
if not self.build_dir:
self.build_path = os.path.join(self.work_path, 'build')
elif os.path.isabs(self.build_dir):
self.build_path = self.build_dir
else:
self.build_path = os.path.normpath(os.path.join(self.work_path, self.build_dir))
@property
def app_dir(self):
"""
:return: directory of the app
"""
return self._expand(self._app_dir)
@property
def work_dir(self):
"""
:return: directory where the app should be copied to, prior to the build. Can be None, which means that the app
directory should be used.
"""
return self._expand(self._work_dir)
@property
def build_dir(self):
"""
:return: build directory, either relative to the work directory (if relative path is used) or absolute path.
"""
return self._expand(self._build_dir)
@property
def build_log_path(self):
"""
:return: path of the build log file
"""
return self._expand(self._build_log_path)
def __repr__(self):
return '({}) Build app {} for target {}, sdkconfig {} in {}'.format(
self.build_system,
self.app_dir,
self.target,
self.sdkconfig_path or '(default)',
self.build_dir,
)
def to_json(self): # type: () -> str
"""
:return: JSON string representing this object
"""
return self._to_json(self._app_dir, self._work_dir, self._build_dir, self._build_log_path)
def to_json_expanded(self): # type: () -> str
"""
:return: JSON string representing this object, with all placeholders in paths expanded
"""
return self._to_json(self.app_dir, self.work_dir, self.build_dir, self.build_log_path)
def _to_json(self, app_dir, work_dir, build_dir, build_log_path): # type: (str, str, str, str) -> str
"""
Internal function, called by to_json and to_json_expanded
"""
return json.dumps({
'build_system': self.build_system,
'app_dir': app_dir,
'work_dir': work_dir,
'build_dir': build_dir,
'build_log_path': build_log_path,
'sdkconfig': self.sdkconfig_path,
'config': self.config_name,
'target': self.target,
'verbose': self.verbose,
'preserve': self.preserve,
})
@staticmethod
def from_json(json_str): # type: (typing.Text) -> BuildItem
"""
:return: Get the BuildItem from a JSON string
"""
d = json.loads(str(json_str))
result = BuildItem(
app_path=d['app_dir'],
work_dir=d['work_dir'],
build_path=d['build_dir'],
build_log_path=d['build_log_path'],
sdkconfig_path=d['sdkconfig'],
config_name=d['config'],
target=d['target'],
build_system=d['build_system'],
preserve_artifacts=d['preserve']
)
result.verbose = d['verbose']
return result
def _expand(self, path): # type: (str) -> str
"""
Internal method, expands any of the placeholders in {app,work,build} paths.
"""
if not path:
return path
if self.index is not None:
path = path.replace(INDEX_PLACEHOLDER, str(self.index))
path = path.replace(TARGET_PLACEHOLDER, self.target)
path = path.replace(NAME_PLACEHOLDER, self._app_name)
if (FULL_NAME_PLACEHOLDER in path): # to avoid recursion to the call to app_dir in the next line:
path = path.replace(FULL_NAME_PLACEHOLDER, self.app_dir.replace(os.path.sep, '_'))
wildcard_pos = path.find(WILDCARD_PLACEHOLDER)
if wildcard_pos != -1:
if self.config_name:
# if config name is defined, put it in place of the placeholder
path = path.replace(WILDCARD_PLACEHOLDER, self.config_name)
else:
# otherwise, remove the placeholder and one character on the left
# (which is usually an underscore, dash, or other delimiter)
left_of_wildcard = max(0, wildcard_pos - 1)
right_of_wildcard = wildcard_pos + len(WILDCARD_PLACEHOLDER)
path = path[0:left_of_wildcard] + path[right_of_wildcard:]
path = os.path.expandvars(path)
return path
def get_size_json_fp(self):
if self.size_json_fp and os.path.exists(self.size_json_fp):
return self.size_json_fp
assert os.path.exists(self.build_path)
assert os.path.exists(self.work_path)
map_file = find_first_match('*.map', self.build_path)
if not map_file:
raise ValueError('.map file not found under "{}"'.format(self.build_path))
size_json_fp = os.path.join(self.build_path, SIZE_JSON_FN)
idf_size_args = [
sys.executable,
IDF_SIZE_PY,
'--json',
'-o', size_json_fp,
map_file
]
subprocess.check_call(idf_size_args)
return size_json_fp
def write_size_info(self, size_info_fs):
if not self.size_json_fp or (not os.path.exists(self.size_json_fp)):
raise OSError('Run get_size_json_fp() for app {} after built binary'.format(self.app_dir))
size_info_dict = {
'app_name': self._app_name,
'config_name': self.config_name,
'target': self.target,
'path': self.size_json_fp,
}
size_info_fs.write(json.dumps(size_info_dict) + '\n')
class BuildSystem:
"""
Class representing a build system.
Derived classes implement the methods below.
Objects of these classes aren't instantiated, instead the class (type object) is used.
"""
NAME = 'undefined'
SUPPORTED_TARGETS_REGEX = re.compile(r'Supported [Tt]argets((?:[ |]+(?:[0-9a-zA-Z\-]+))+)')
FORMAL_TO_USUAL = {
'ESP32': 'esp32',
'ESP32-S2': 'esp32s2',
'ESP32-S3': 'esp32s3',
'ESP32-C3': 'esp32c3',
'ESP32-H2': 'esp32h2',
'Linux': 'linux',
}
@classmethod
def build_prepare(cls, build_item):
app_path = build_item.app_dir
work_path = build_item.work_path
build_path = build_item.build_path
if work_path != app_path:
if os.path.exists(work_path):
logging.debug('Work directory {} exists, removing'.format(work_path))
if not build_item.dry_run:
shutil.rmtree(work_path)
logging.debug('Copying app from {} to {}'.format(app_path, work_path))
if not build_item.dry_run:
shutil.copytree(app_path, work_path)
if os.path.exists(build_path):
logging.debug('Build directory {} exists, removing'.format(build_path))
if not build_item.dry_run:
shutil.rmtree(build_path)
if not build_item.dry_run:
os.makedirs(build_path)
# Prepare the sdkconfig file, from the contents of sdkconfig.defaults (if exists) and the contents of
# build_info.sdkconfig_path, i.e. the config-specific sdkconfig file.
#
# Note: the build system supports taking multiple sdkconfig.defaults files via SDKCONFIG_DEFAULTS
# CMake variable. However here we do this manually to perform environment variable expansion in the
# sdkconfig files.
sdkconfig_defaults_list = ['sdkconfig.defaults', 'sdkconfig.defaults.' + build_item.target]
if build_item.sdkconfig_path:
sdkconfig_defaults_list.append(build_item.sdkconfig_path)
sdkconfig_file = os.path.join(work_path, 'sdkconfig')
if os.path.exists(sdkconfig_file):
logging.debug('Removing sdkconfig file: {}'.format(sdkconfig_file))
if not build_item.dry_run:
os.unlink(sdkconfig_file)
logging.debug('Creating sdkconfig file: {}'.format(sdkconfig_file))
extra_cmakecache_items = {}
if not build_item.dry_run:
with open(sdkconfig_file, 'w') as f_out:
for sdkconfig_name in sdkconfig_defaults_list:
sdkconfig_path = os.path.join(work_path, sdkconfig_name)
if not sdkconfig_path or not os.path.exists(sdkconfig_path):
continue
logging.debug('Appending {} to sdkconfig'.format(sdkconfig_name))
with open(sdkconfig_path, 'r') as f_in:
for line in f_in:
if not line.endswith('\n'):
line += '\n'
if cls.NAME == 'cmake':
m = SDKCONFIG_LINE_REGEX.match(line)
key = m.group(1) if m else None
if key in SDKCONFIG_TEST_OPTS:
extra_cmakecache_items[key] = m.group(2)
continue
if key in SDKCONFIG_IGNORE_OPTS:
continue
f_out.write(os.path.expandvars(line))
else:
for sdkconfig_name in sdkconfig_defaults_list:
sdkconfig_path = os.path.join(app_path, sdkconfig_name)
if not sdkconfig_path:
continue
logging.debug('Considering sdkconfig {}'.format(sdkconfig_path))
if not os.path.exists(sdkconfig_path):
continue
logging.debug('Appending {} to sdkconfig'.format(sdkconfig_name))
# The preparation of build is finished. Implement the build part in sub classes.
if cls.NAME == 'cmake':
return build_path, work_path, extra_cmakecache_items
else:
return build_path, work_path
@staticmethod
@abstractmethod
def build(build_item):
pass
@staticmethod
@abstractmethod
def is_app(path):
pass
@staticmethod
def _read_readme(app_path):
# Markdown supported targets should be:
# e.g. | Supported Targets | ESP32 |
# | ----------------- | ----- |
# reStructuredText supported targets should be:
# e.g. ================= =====
# Supported Targets ESP32
# ================= =====
def get_md_or_rst(app_path):
readme_path = os.path.join(app_path, 'README.md')
if not os.path.exists(readme_path):
readme_path = os.path.join(app_path, 'README.rst')
if not os.path.exists(readme_path):
return None
return readme_path
readme_path = get_md_or_rst(app_path)
# Handle sub apps situation, e.g. master-slave
if not readme_path:
readme_path = get_md_or_rst(os.path.dirname(app_path))
if not readme_path:
return None
with open(readme_path, 'r', encoding='utf8') as readme_file:
return readme_file.read()
@classmethod
def _supported_targets(cls, app_path):
readme_file_content = BuildSystem._read_readme(app_path)
if not readme_file_content:
return cls.FORMAL_TO_USUAL.values() # supports all targets if no readme found
match = re.findall(BuildSystem.SUPPORTED_TARGETS_REGEX, readme_file_content)
if not match:
return cls.FORMAL_TO_USUAL.values() # supports all targets if no such header in readme
if len(match) > 1:
raise NotImplementedError("Can't determine the value of SUPPORTED_TARGETS in {}".format(app_path))
support_str = match[0].strip()
targets = []
for part in support_str.split('|'):
for inner in part.split(' '):
inner = inner.strip()
if not inner:
continue
elif inner in cls.FORMAL_TO_USUAL:
targets.append(cls.FORMAL_TO_USUAL[inner])
else:
raise NotImplementedError("Can't recognize value of target {} in {}, now we only support '{}'"
.format(inner, app_path, ', '.join(cls.FORMAL_TO_USUAL.keys())))
return targets
@classmethod
@abstractmethod
def supported_targets(cls, app_path):
pass
class BuildError(RuntimeError):
pass
def setup_logging(args):
"""
Configure logging module according to the number of '--verbose'/'-v' arguments and the --log-file argument.
:param args: namespace obtained from argparse
"""
if not args.verbose:
log_level = logging.WARNING
elif args.verbose == 1:
log_level = logging.INFO
else:
log_level = logging.DEBUG
logging.basicConfig(
format='%(levelname)s: %(message)s',
stream=args.log_file or sys.stderr,
level=log_level,
)

View file

@ -0,0 +1,72 @@
import logging
import os
import shlex
import subprocess
import sys
from .common import BuildError, BuildSystem
# Same for the Makefile projects:
MAKE_PROJECT_LINE = r'include $(IDF_PATH)/make/project.mk'
BUILD_SYSTEM_MAKE = 'make'
try:
string_type = basestring # type: ignore
except NameError:
string_type = str
class MakeBuildSystem(BuildSystem):
NAME = BUILD_SYSTEM_MAKE
@classmethod
def build(cls, build_item):
build_path, work_path = cls.build_prepare(build_item)
commands = [
'make clean',
'make defconfig',
'make all',
# In case if secure_boot is enabled then for bootloader build need to add `bootloader` cmd
'make bootloader',
'make print_flash_cmd',
]
log_file = None
build_stdout = sys.stdout
build_stderr = sys.stderr
if build_item.build_log_path:
logging.info('Writing build log to {}'.format(build_item.build_log_path))
log_file = open(build_item.build_log_path, 'w')
build_stdout = log_file
build_stderr = log_file
for cmd in commands:
cmd = shlex.split(cmd) if isinstance(cmd, string_type) else cmd
try:
subprocess.check_call(cmd, stdout=build_stdout, stderr=build_stderr, cwd=work_path)
except subprocess.CalledProcessError as e:
if log_file:
log_file.close()
raise BuildError('Build failed with exit code {}'.format(e.returncode))
build_item.size_json_fp = build_item.get_size_json_fp()
@staticmethod
def is_app(path):
makefile_path = os.path.join(path, 'Makefile')
if not os.path.exists(makefile_path):
return False
with open(makefile_path, 'r') as makefile:
makefile_content = makefile.read()
if MAKE_PROJECT_LINE not in makefile_content:
return False
return True
@classmethod
def supported_targets(cls, app_path):
readme_supported_targets = cls._supported_targets(app_path)
if readme_supported_targets and 'esp32' in readme_supported_targets:
return ['esp32']
else:
return []