actions: twister/clang: cleanup test plan generation
Merged 3 files used to generate the test plan per PR based on the changes. 2 python scripts and a shell script are now all merged into 1 python script that generates the input file for twister based on a list of changed files by the PR. This remove lots of old and obsolete code and simplifies things a bit, no need anymore for an intermediate script to call twister, we call it directly in the workflow and use the new test_plan script to generate the test plan. This also reenables the recently disabled tag based filtering which had a bug, bug is resolved in this new implementation. On push events, we now run twister without the --integration option to catch any issues in the main branch that were not caught in PRs. PRs continue to run with --integration enabled. This event (push) is now run on 15 builders due to the increased size. Signed-off-by: Anas Nashif <anas.nashif@intel.com>
This commit is contained in:
parent
113c6f249e
commit
0b2df8f41c
6 changed files with 389 additions and 605 deletions
51
.github/workflows/clang.yaml
vendored
51
.github/workflows/clang.yaml
vendored
|
|
@ -24,26 +24,30 @@ jobs:
|
|||
ZEPHYR_SDK_INSTALL_DIR: /opt/toolchains/zephyr-sdk-0.13.1
|
||||
CLANG_ROOT_DIR: /usr/lib/llvm-12
|
||||
COMMIT_RANGE: ${{ github.event.pull_request.base.sha }}..${{ github.event.pull_request.head.sha }}
|
||||
BASE_REF: ${{ github.base_ref }}
|
||||
outputs:
|
||||
report_needed: ${{ steps.twister.outputs.report_needed }}
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
- name: Update PATH for west
|
||||
- name: Cleanup
|
||||
run: |
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: checkout
|
||||
# hotfix, until we have a better way to deal with existing data
|
||||
rm -rf zephyr zephyr-testing
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: west setup
|
||||
- name: Environment Setup
|
||||
run: |
|
||||
pip3 install GitPython
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
git config --global user.email "bot@zephyrproject.org"
|
||||
git config --global user.name "Zephyr Bot"
|
||||
git rebase origin/${BASE_REF}
|
||||
git log --pretty=oneline | head -n 10
|
||||
west init -l . || true
|
||||
west config --global update.narrow true
|
||||
# In some cases modules are left in a state where they can't be
|
||||
# updated (i.e. when we cancel a job and the builder is killed),
|
||||
# So first retry to update, if that does not work, remove all modules
|
||||
|
|
@ -57,6 +61,7 @@ jobs:
|
|||
${CLANG_ROOT_DIR}/bin/clang --version
|
||||
gcc --version
|
||||
ls -la
|
||||
|
||||
- name: Prepare ccache timestamp/data
|
||||
id: ccache_cache_timestamp
|
||||
shell: cmake -P {0}
|
||||
|
|
@ -84,30 +89,20 @@ jobs:
|
|||
- name: Run Tests with Twister
|
||||
id: twister
|
||||
run: |
|
||||
git config --global user.email "bot@zephyrproject.org"
|
||||
git config --global user.name "Zephyr Builder"
|
||||
export ZEPHYR_BASE=${PWD}
|
||||
export ZEPHYR_TOOLCHAIN_VARIANT=llvm
|
||||
|
||||
# check if we need to run a full twister or not based on files changed
|
||||
SC=$(./scripts/ci/what_changed.py --commits ${COMMIT_RANGE})
|
||||
# Get twister arguments based on the files changed
|
||||
./scripts/ci/get_twister_opt.py --commits ${COMMIT_RANGE}
|
||||
if [ "$SC" = "full" ]; then
|
||||
# Full twister
|
||||
python3 ./scripts/ci/test_plan.py --platform ${{ matrix.platform }} -c origin/${BASE_REF}..
|
||||
|
||||
# We can limit scope to just what has changed
|
||||
if [ -s testplan.csv ]; then
|
||||
echo "::set-output name=report_needed::1";
|
||||
./scripts/twister --inline-logs -M -N -v -p ${{ matrix.platform }} --retry-failed 2
|
||||
# Full twister but with options based on changes
|
||||
./scripts/twister --inline-logs -M -N -v --load-tests testplan.csv --retry-failed 2
|
||||
else
|
||||
# We can limit scope to just what has changed
|
||||
if [ -s modified_tests.args ]; then
|
||||
# we are working with one platform at a time
|
||||
sed -i '/--all/d' modified_tests.args
|
||||
echo "::set-output name=report_needed::1";
|
||||
# Full twister but with options based on changes
|
||||
./scripts/twister --inline-logs -M -N -v -p ${{ matrix.platform }} +modified_tests.args --retry-failed 2
|
||||
else
|
||||
# if nothing is run, skip reporting step
|
||||
echo "::set-output name=report_needed::0";
|
||||
fi
|
||||
# if nothing is run, skip reporting step
|
||||
echo "::set-output name=report_needed::0";
|
||||
fi
|
||||
|
||||
- name: ccache stats post
|
||||
|
|
|
|||
76
.github/workflows/twister.yaml
vendored
76
.github/workflows/twister.yaml
vendored
|
|
@ -30,34 +30,41 @@ jobs:
|
|||
subset: ${{ steps.output-services.outputs.subset }}
|
||||
size: ${{ steps.output-services.outputs.size }}
|
||||
env:
|
||||
MATRIX_SIZE: 10
|
||||
MATRIX_SIZE: 15
|
||||
DAILY_MATRIX_SIZE: 120
|
||||
ZEPHYR_SDK_INSTALL_DIR: /opt/toolchains/zephyr-sdk-0.13.1
|
||||
CLANG_ROOT_DIR: /usr/lib/llvm-12
|
||||
TESTS_PER_BUILDER: 700
|
||||
COMMIT_RANGE: ${{ github.event.pull_request.base.sha }}..${{ github.event.pull_request.head.sha }}
|
||||
BASE_REF: ${{ github.base_ref }}
|
||||
steps:
|
||||
- name: checkout
|
||||
- name: Cleanup
|
||||
run: |
|
||||
# hotfix, until we have a better way to deal with existing data
|
||||
rm -rf zephyr zephyr-testing
|
||||
|
||||
- name: Checkout
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: west setup
|
||||
- name: Environment Setup
|
||||
if: github.event_name == 'pull_request_target'
|
||||
run: |
|
||||
pip3 install GitPython
|
||||
git config --global user.email "bot@zephyrproject.org"
|
||||
git config --global user.name "Zephyr Bot"
|
||||
git rebase origin/${BASE_REF}
|
||||
git log --pretty=oneline | head -n 10
|
||||
west init -l . || true
|
||||
west config --global update.narrow true
|
||||
west update 2>&1 1> west.update.log || west update 2>&1 1> west.update.log
|
||||
west forall -c 'git reset --hard HEAD'
|
||||
# no need for west update here
|
||||
|
||||
- name: Generate Test Plan with Twister
|
||||
if: github.event_name == 'pull_request_target'
|
||||
id: test-plan
|
||||
run: |
|
||||
git config --global user.email "bot@zephyrproject.org"
|
||||
git config --global user.name "Zephyr Bot"
|
||||
export ZEPHYR_BASE=${PWD}
|
||||
export ZEPHYR_TOOLCHAIN_VARIANT=zephyr
|
||||
# temporary until we have all PRs rebased on top of this commit.
|
||||
|
|
@ -65,12 +72,10 @@ jobs:
|
|||
echo "Your branch is not up to date, you need to rebase on top of latest HEAD of main branch"
|
||||
exit 1
|
||||
)
|
||||
./scripts/ci/run_ci.sh -S -c -b ${{github.base_ref}} -r origin \
|
||||
-p ${{github.event.pull_request.number}} -R ${COMMIT_RANGE}
|
||||
# remove all tests to be skipped
|
||||
grep -v skipped test_file.txt > no_skipped.txt
|
||||
python3 ./scripts/ci/test_plan.py -c origin/${BASE_REF}.. --pull-request
|
||||
|
||||
# get number of tests
|
||||
lines=$(wc -l < no_skipped.txt)
|
||||
lines=$(wc -l < testplan.csv)
|
||||
if [ "$lines" = 1 ]; then
|
||||
# no tests, so we need 0 nodes
|
||||
nodes=0
|
||||
|
|
@ -82,7 +87,7 @@ jobs:
|
|||
fi
|
||||
fi
|
||||
echo "::set-output name=calculated_matrix_size::${nodes}";
|
||||
rm test_file.txt no_skipped.txt
|
||||
rm -f testplan.csv
|
||||
|
||||
- name: Determine matrix size
|
||||
id: output-services
|
||||
|
|
@ -97,7 +102,7 @@ jobs:
|
|||
elif [ "${{github.event_name}}" = "push" ]; then
|
||||
subset="[$(seq -s',' 1 ${MATRIX_SIZE})]"
|
||||
size=${MATRIX_SIZE}
|
||||
else
|
||||
elif [ "${{github.event_name}}" = "schedule" ]; then
|
||||
subset="[$(seq -s',' 1 ${DAILY_MATRIX_SIZE})]"
|
||||
size=${DAILY_MATRIX_SIZE}
|
||||
fi
|
||||
|
|
@ -119,21 +124,35 @@ jobs:
|
|||
env:
|
||||
ZEPHYR_SDK_INSTALL_DIR: /opt/toolchains/zephyr-sdk-0.13.1
|
||||
CLANG_ROOT_DIR: /usr/lib/llvm-12
|
||||
DAILY_OPTIONS: ' --inline-logs -M -N --build-only --all --retry-failed 3 -v '
|
||||
TWISTER_COMMON: ' --inline-logs -v -N -M --retry-failed 3 '
|
||||
DAILY_OPTIONS: ' -M --build-only --all '
|
||||
PR_OPTIONS: ' --clobber-output --integration '
|
||||
PUSH_OPTIONS: ' --clobber-output -M '
|
||||
COMMIT_RANGE: ${{ github.event.pull_request.base.sha }}..${{ github.event.pull_request.head.sha }}
|
||||
BASE_REF: ${{ github.base_ref }}
|
||||
steps:
|
||||
- name: Update PATH for west
|
||||
- name: Cleanup
|
||||
run: |
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
# hotfix, until we have a better way to deal with existing data
|
||||
rm -rf zephyr zephyr-testing
|
||||
|
||||
- name: checkout
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: west setup
|
||||
- name: Environment Setup
|
||||
run: |
|
||||
pip3 install GitPython
|
||||
if [ "${{github.event_name}}" = "pull_request_target" ]; then
|
||||
git config --global user.email "bot@zephyrproject.org"
|
||||
git config --global user.name "Zephyr Builder"
|
||||
git rebase origin/${BASE_REF}
|
||||
git log --pretty=oneline | head -n 10
|
||||
fi
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
west init -l . || true
|
||||
west config --global update.narrow true
|
||||
west update 2>&1 1> west.update.log || west update 2>&1 1> west.update.log
|
||||
|
|
@ -179,26 +198,22 @@ jobs:
|
|||
run: |
|
||||
export ZEPHYR_BASE=${PWD}
|
||||
export ZEPHYR_TOOLCHAIN_VARIANT=zephyr
|
||||
./scripts/ci/run_ci.sh -c -b main -r origin -m ${{matrix.subset}} \
|
||||
-M ${{ strategy.job-total }}
|
||||
./scripts/twister --subset ${{matrix.subset}}/${{ strategy.job-total }} ${TWISTER_COMMON} ${PUSH_OPTIONS}
|
||||
|
||||
- if: github.event_name == 'pull_request_target'
|
||||
name: Run Tests with Twister (Pull Request)
|
||||
run: |
|
||||
git config --global user.email "bot@zephyrproject.org"
|
||||
git config --global user.name "Zephyr Builder"
|
||||
export ZEPHYR_BASE=${PWD}
|
||||
export ZEPHYR_TOOLCHAIN_VARIANT=zephyr
|
||||
./scripts/ci/run_ci.sh -c -b ${{github.base_ref}} -r origin \
|
||||
-m ${{matrix.subset}} -M ${{ strategy.job-total }} \
|
||||
-p ${{github.event.pull_request.number}} -R ${COMMIT_RANGE}
|
||||
python3 ./scripts/ci/test_plan.py -c origin/${BASE_REF}.. --pull-request
|
||||
./scripts/twister --subset ${{matrix.subset}}/${{ strategy.job-total }} --load-tests testplan.csv ${TWISTER_COMMON} ${PR_OPTIONS}
|
||||
|
||||
- if: github.event_name == 'schedule'
|
||||
name: Run Tests with Twister (Daily)
|
||||
run: |
|
||||
export ZEPHYR_BASE=${PWD}
|
||||
export ZEPHYR_TOOLCHAIN_VARIANT=zephyr
|
||||
./scripts/twister --subset ${{matrix.subset}}/${{ strategy.job-total }} ${DAILY_OPTIONS}
|
||||
./scripts/twister --subset ${{matrix.subset}}/${{ strategy.job-total }} ${TWISTER_COMMON} ${DAILY_OPTIONS}
|
||||
|
||||
- name: ccache stats post
|
||||
run: |
|
||||
|
|
@ -211,6 +226,11 @@ jobs:
|
|||
name: Unit Test Results (Subset ${{ matrix.subset }})
|
||||
path: twister-out/twister.xml
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
git clean -dxf
|
||||
|
||||
twister-test-results:
|
||||
name: "Publish Unit Tests Results"
|
||||
needs: twister-build
|
||||
|
|
|
|||
|
|
@ -1,204 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# Copyright (c) 2021 Intel Corporation
|
||||
|
||||
# A script to generate twister options based on modified files.
|
||||
|
||||
import re, os
|
||||
import sh
|
||||
import argparse
|
||||
import glob
|
||||
import yaml
|
||||
|
||||
if "ZEPHYR_BASE" not in os.environ:
|
||||
exit("$ZEPHYR_BASE environment variable undefined.")
|
||||
|
||||
repository_path = os.environ['ZEPHYR_BASE']
|
||||
sh_special_args = {
|
||||
'_tty_out': False,
|
||||
'_cwd': repository_path
|
||||
}
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate twister argument files based on modified file")
|
||||
parser.add_argument('-c', '--commits', default=None,
|
||||
help="Commit range in the form: a..b")
|
||||
return parser.parse_args()
|
||||
|
||||
def find_archs(files):
|
||||
# we match both arch/<arch>/* and include/arch/<arch> and skip common.
|
||||
# Some architectures like riscv require special handling, i.e. riscv
|
||||
# directory covers 2 architectures known to twister: riscv32 and riscv64.
|
||||
archs = set()
|
||||
|
||||
for f in files:
|
||||
p = re.match(r"^arch\/([^/]+)\/", f)
|
||||
if not p:
|
||||
p = re.match(r"^include\/arch\/([^/]+)\/", f)
|
||||
if p:
|
||||
if p.group(1) != 'common':
|
||||
if p.group(1) == 'riscv':
|
||||
archs.add('riscv32')
|
||||
archs.add('riscv64')
|
||||
else:
|
||||
archs.add(p.group(1))
|
||||
|
||||
if archs:
|
||||
with open("modified_archs.args", "w") as fp:
|
||||
fp.write("-a\n%s" %("\n-a\n".join(archs)))
|
||||
|
||||
def find_boards(files):
|
||||
boards = set()
|
||||
all_boards = set()
|
||||
|
||||
for f in files:
|
||||
if f.endswith(".rst") or f.endswith(".png") or f.endswith(".jpg"):
|
||||
continue
|
||||
p = re.match(r"^boards\/[^/]+\/([^/]+)\/", f)
|
||||
if p and p.groups():
|
||||
boards.add(p.group(1))
|
||||
|
||||
for b in boards:
|
||||
suboards = glob.glob("boards/*/%s/*.yaml" %(b))
|
||||
for subboard in suboards:
|
||||
name = os.path.splitext(os.path.basename(subboard))[0]
|
||||
if name:
|
||||
all_boards.add(name)
|
||||
|
||||
if all_boards:
|
||||
with open("modified_boards.args", "w") as fp:
|
||||
fp.write("-p\n%s" %("\n-p\n".join(all_boards)))
|
||||
|
||||
def find_tests(files):
|
||||
tests = set()
|
||||
for f in files:
|
||||
if f.endswith(".rst"):
|
||||
continue
|
||||
d = os.path.dirname(f)
|
||||
while d:
|
||||
if os.path.exists(os.path.join(d, "testcase.yaml")) or \
|
||||
os.path.exists(os.path.join(d, "sample.yaml")):
|
||||
tests.add(d)
|
||||
break
|
||||
else:
|
||||
d = os.path.dirname(d)
|
||||
|
||||
if tests:
|
||||
with open("modified_tests.args", "w") as fp:
|
||||
fp.write("-T\n%s\n--all" %("\n-T\n".join(tests)))
|
||||
|
||||
def _get_match_fn(globs, regexes):
|
||||
# Constructs a single regex that tests for matches against the globs in
|
||||
# 'globs' and the regexes in 'regexes'. Parts are joined with '|' (OR).
|
||||
# Returns the search() method of the compiled regex.
|
||||
#
|
||||
# Returns None if there are neither globs nor regexes, which should be
|
||||
# interpreted as no match.
|
||||
|
||||
if not (globs or regexes):
|
||||
return None
|
||||
|
||||
regex = ""
|
||||
|
||||
if globs:
|
||||
glob_regexes = []
|
||||
for glob in globs:
|
||||
# Construct a regex equivalent to the glob
|
||||
glob_regex = glob.replace(".", "\\.").replace("*", "[^/]*") \
|
||||
.replace("?", "[^/]")
|
||||
|
||||
if not glob.endswith("/"):
|
||||
# Require a full match for globs that don't end in /
|
||||
glob_regex += "$"
|
||||
|
||||
glob_regexes.append(glob_regex)
|
||||
|
||||
# The glob regexes must anchor to the beginning of the path, since we
|
||||
# return search(). (?:) is a non-capturing group.
|
||||
regex += "^(?:{})".format("|".join(glob_regexes))
|
||||
|
||||
if regexes:
|
||||
if regex:
|
||||
regex += "|"
|
||||
regex += "|".join(regexes)
|
||||
|
||||
return re.compile(regex).search
|
||||
|
||||
class Tag:
|
||||
"""
|
||||
Represents an entry for a tag in tags.yaml.
|
||||
|
||||
These attributes are available:
|
||||
|
||||
name:
|
||||
List of GitHub labels for the area. Empty if the area has no 'labels'
|
||||
key.
|
||||
|
||||
description:
|
||||
Text from 'description' key, or None if the area has no 'description'
|
||||
key
|
||||
"""
|
||||
def _contains(self, path):
|
||||
# Returns True if the area contains 'path', and False otherwise
|
||||
|
||||
return self._match_fn and self._match_fn(path) and not \
|
||||
(self._exclude_match_fn and self._exclude_match_fn(path))
|
||||
|
||||
def __repr__(self):
|
||||
return "<Tag {}>".format(self.name)
|
||||
|
||||
def find_tags(files):
|
||||
|
||||
tag_cfg_file = os.path.join(repository_path, 'scripts', 'ci', 'tags.yaml')
|
||||
with open(tag_cfg_file, 'r') as ymlfile:
|
||||
tags_config = yaml.safe_load(ymlfile)
|
||||
|
||||
tags = {}
|
||||
for t,x in tags_config.items():
|
||||
tag = Tag()
|
||||
tag.exclude = True
|
||||
tag.name = t
|
||||
|
||||
# tag._match_fn(path) tests if the path matches files and/or
|
||||
# files-regex
|
||||
tag._match_fn = _get_match_fn(x.get("files"), x.get("files-regex"))
|
||||
|
||||
# Like tag._match_fn(path), but for files-exclude and
|
||||
# files-regex-exclude
|
||||
tag._exclude_match_fn = \
|
||||
_get_match_fn(x.get("files-exclude"), x.get("files-regex-exclude"))
|
||||
|
||||
tags[tag.name] = tag
|
||||
|
||||
for f in files:
|
||||
for t in tags.values():
|
||||
if t._contains(f):
|
||||
t.exclude = False
|
||||
|
||||
exclude_tags = set()
|
||||
for t in tags.values():
|
||||
if t.exclude:
|
||||
exclude_tags.add(t.name)
|
||||
|
||||
if exclude_tags:
|
||||
with open("modified_tags.args", "w") as fp:
|
||||
fp.write("-e\n%s" %("\n-e\n".join(exclude_tags)))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
args = parse_args()
|
||||
if not args.commits:
|
||||
exit(1)
|
||||
|
||||
# pylint does not like the 'sh' library
|
||||
# pylint: disable=too-many-function-args,unexpected-keyword-arg
|
||||
commit = sh.git("diff", "--name-only", args.commits, **sh_special_args)
|
||||
files = commit.split("\n")
|
||||
|
||||
find_boards(files)
|
||||
find_archs(files)
|
||||
find_tests(files)
|
||||
# disabling for now due to #40235
|
||||
#find_tags(files)
|
||||
|
|
@ -1,290 +0,0 @@
|
|||
#!/bin/bash
|
||||
# Copyright (c) 2017 Linaro Limited
|
||||
# Copyright (c) 2018 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
# Author: Anas Nashif
|
||||
#
|
||||
# This script is run in CI and support both pull request and commit modes. So
|
||||
# it can be used also on commits to the tree or on tags.
|
||||
# The following options are supports:
|
||||
# -p start the script for pull requests
|
||||
# -m matrix node number, for example 3 on a 5 node matrix
|
||||
# -M total number of nodes in the matrix
|
||||
# -b base branch
|
||||
# -r the remote to rebase on
|
||||
#
|
||||
# The script can be run locally using for example:
|
||||
# ./scripts/ci/run_ci.sh -b main -r origin -l -R <commit range>
|
||||
|
||||
set -xe
|
||||
|
||||
twister_options=" --clobber-output --inline-logs -M -N -v --integration"
|
||||
|
||||
west_commands_results_file="./pytest_out/west_commands.xml"
|
||||
|
||||
matrix_builds=1
|
||||
matrix=1
|
||||
|
||||
function handle_coverage() {
|
||||
# Upload to codecov.io only on merged builds or if CODECOV_IO variable
|
||||
# is set.
|
||||
if [ -n "${CODECOV_IO}" -o -z "${pull_request_nr}" ]; then
|
||||
# Capture data
|
||||
echo "Running lcov --capture ..."
|
||||
lcov --capture \
|
||||
--directory twister-out/native_posix/ \
|
||||
--directory twister-out/nrf52_bsim/ \
|
||||
--directory twister-out/unit_testing/ \
|
||||
--output-file lcov.pre.info -q --rc lcov_branch_coverage=1
|
||||
|
||||
# Remove noise
|
||||
echo "Exclude data from coverage report..."
|
||||
lcov -q \
|
||||
--remove lcov.pre.info mylib.c \
|
||||
--remove lcov.pre.info tests/\* \
|
||||
--remove lcov.pre.info samples/\* \
|
||||
--remove lcov.pre.info ext/\* \
|
||||
--remove lcov.pre.info *generated* \
|
||||
-o lcov.info --rc lcov_branch_coverage=1
|
||||
|
||||
# Cleanup
|
||||
rm lcov.pre.info
|
||||
rm -rf twister-out out-2nd-pass
|
||||
|
||||
# Upload to codecov.io
|
||||
echo "Upload coverage reports to codecov.io"
|
||||
bash <(curl -s https://codecov.io/bash) -f "lcov.info" -X coveragepy -X fixes
|
||||
rm -f lcov.info
|
||||
fi
|
||||
|
||||
rm -rf twister-out out-2nd-pass
|
||||
|
||||
}
|
||||
|
||||
function handle_compiler_cache() {
|
||||
# Give more details in case we fail because of compiler cache
|
||||
if [ -f "$HOME/.cache/zephyr/ToolchainCapabilityDatabase.cmake" ]; then
|
||||
echo "Dumping the capability database in case we are affected by #9992"
|
||||
cat $HOME/.cache/zephyr/ToolchainCapabilityDatabase.cmake
|
||||
fi
|
||||
}
|
||||
|
||||
function on_complete() {
|
||||
source zephyr-env.sh
|
||||
if [ "$1" == "failure" ]; then
|
||||
handle_compiler_cache
|
||||
fi
|
||||
|
||||
rm -rf ccache $HOME/.cache/zephyr
|
||||
|
||||
if [ "$matrix" = "1" ]; then
|
||||
echo "Skip handling coverage data..."
|
||||
#handle_coverage
|
||||
else
|
||||
rm -rf twister-out out-2nd-pass
|
||||
fi
|
||||
}
|
||||
|
||||
function build_test_file() {
|
||||
# cleanup
|
||||
rm -f test_file_boards.txt test_file_tests.txt test_file_archs.txt test_file_full.txt
|
||||
touch test_file_boards.txt test_file_tests.txt test_file_archs.txt test_file_full.txt
|
||||
|
||||
twister_exclude_tag_opt=""
|
||||
|
||||
# In a pull-request see if we have changed any tests or board definitions
|
||||
if [ -n "${pull_request_nr}" -o -n "${local_run}" ]; then
|
||||
./scripts/zephyr_module.py --twister-out module_tests.args
|
||||
./scripts/ci/get_twister_opt.py --commits ${commit_range}
|
||||
|
||||
# disabled for now due to a bug
|
||||
#if [ -s modified_tags.args ]; then
|
||||
# twister_exclude_tag_opt="+modified_tags.args"
|
||||
#fi
|
||||
|
||||
if [ -s modified_boards.args ]; then
|
||||
${twister} ${twister_options} ${twister_exclude_tag_opt} \
|
||||
+modified_boards.args \
|
||||
--save-tests test_file_boards.txt || exit 1
|
||||
fi
|
||||
if [ -s modified_tests.args ]; then
|
||||
${twister} ${twister_options} ${twister_exclude_tag_opt} \
|
||||
+modified_tests.args \
|
||||
--save-tests test_file_tests.txt || exit 1
|
||||
fi
|
||||
if [ -s modified_archs.args ]; then
|
||||
${twister} ${twister_options} ${twister_exclude_tag_opt} \
|
||||
+modified_archs.args \
|
||||
--save-tests test_file_archs.txt || exit 1
|
||||
fi
|
||||
rm -f modified_tests.args modified_boards.args modified_archs.args
|
||||
fi
|
||||
|
||||
if [ "$SC" == "full" ]; then
|
||||
# Save list of tests to be run
|
||||
${twister} ${twister_options} ${twister_exclude_tag_opt} \
|
||||
--save-tests test_file_full.txt || exit 1
|
||||
fi
|
||||
|
||||
rm -f modified_tags.args
|
||||
|
||||
# Remove headers from all files. We insert it into test_file.txt explicitly
|
||||
# so we treat all test_file*.txt files the same.
|
||||
tail -n +2 test_file_full.txt > test_file_full_in.txt
|
||||
tail -n +2 test_file_archs.txt > test_file_archs_in.txt
|
||||
tail -n +2 test_file_tests.txt > test_file_tests_in.txt
|
||||
tail -n +2 test_file_boards.txt > test_file_boards_in.txt
|
||||
|
||||
echo -n "Full: "
|
||||
wc -l test_file_full.txt
|
||||
echo -n "Arch: "
|
||||
wc -l test_file_archs.txt
|
||||
echo -n "Tests: "
|
||||
wc -l test_file_tests.txt
|
||||
echo -n "Boards: "
|
||||
wc -l test_file_boards.txt
|
||||
|
||||
|
||||
echo "test,arch,platform,status,extra_args,handler,handler_time,ram_size,rom_size" \
|
||||
> test_file.txt
|
||||
cat test_file_full_in.txt test_file_archs_in.txt test_file_tests_in.txt \
|
||||
test_file_boards_in.txt >> test_file.txt
|
||||
|
||||
echo -n "Total: "
|
||||
wc -l test_file.txt
|
||||
}
|
||||
|
||||
function west_setup() {
|
||||
# West handling
|
||||
git_dir=$(basename $PWD)
|
||||
pushd ..
|
||||
if [ ! -d .west ]; then
|
||||
west init -l ${git_dir}
|
||||
west update 1> west.update.log || west update 1> west.update-2.log
|
||||
west forall -c 'git reset --hard HEAD'
|
||||
fi
|
||||
popd
|
||||
}
|
||||
|
||||
|
||||
while getopts ":p:m:b:r:M:cSfslR:" opt; do
|
||||
case $opt in
|
||||
c)
|
||||
echo "Execute CI" >&2
|
||||
main_ci=1
|
||||
;;
|
||||
l)
|
||||
echo "Executing script locally" >&2
|
||||
local_run=1
|
||||
main_ci=1
|
||||
;;
|
||||
s)
|
||||
echo "Success" >&2
|
||||
success=1
|
||||
;;
|
||||
f)
|
||||
echo "Failure" >&2
|
||||
failure=1
|
||||
;;
|
||||
p)
|
||||
echo "Testing a Pull Request: $OPTARG." >&2
|
||||
pull_request_nr=$OPTARG
|
||||
;;
|
||||
m)
|
||||
echo "Running on Matrix $OPTARG" >&2
|
||||
matrix=$OPTARG
|
||||
;;
|
||||
M)
|
||||
echo "Running a matrix of $OPTARG nodes" >&2
|
||||
matrix_builds=$OPTARG
|
||||
;;
|
||||
b)
|
||||
echo "Base Branch: $OPTARG" >&2
|
||||
branch=$OPTARG
|
||||
;;
|
||||
S)
|
||||
output_plan=1
|
||||
;;
|
||||
r)
|
||||
echo "Remote: $OPTARG" >&2
|
||||
remote=$OPTARG
|
||||
;;
|
||||
R)
|
||||
echo "Range: $OPTARG" >&2
|
||||
range=$OPTARG
|
||||
;;
|
||||
\?)
|
||||
echo "Invalid option: -$OPTARG" >&2
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -n "$main_ci" ]; then
|
||||
|
||||
west_setup
|
||||
|
||||
if [ -z "$branch" ]; then
|
||||
echo "No base branch given"
|
||||
exit 1
|
||||
else
|
||||
commit_range=$remote/${branch}..HEAD
|
||||
echo "Commit range:" ${commit_range}
|
||||
fi
|
||||
|
||||
if [ -n "$range" ]; then
|
||||
commit_range=$range
|
||||
fi
|
||||
|
||||
source zephyr-env.sh
|
||||
twister="${ZEPHYR_BASE}/scripts/twister"
|
||||
|
||||
# Possibly the only record of what exact version is being tested:
|
||||
short_git_log='git log -n 5 --oneline --decorate --abbrev=12 '
|
||||
|
||||
# check what files have changed for PRs or local runs. If we are
|
||||
# building for a commit than we always do a "Full Run".
|
||||
if [ -n "${pull_request_nr}" -o -n "${local_run}" ]; then
|
||||
SC=`./scripts/ci/what_changed.py --commits ${commit_range}`
|
||||
else
|
||||
echo "Full Run"
|
||||
SC="full"
|
||||
fi
|
||||
|
||||
if [ -n "$pull_request_nr" ]; then
|
||||
$short_git_log $commit_range
|
||||
fi
|
||||
$short_git_log
|
||||
|
||||
build_test_file
|
||||
|
||||
if [ -n "${output_plan}" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "+++ run twister"
|
||||
|
||||
# Run a subset of tests based on matrix size
|
||||
${twister} ${twister_options} --load-tests test_file.txt \
|
||||
--subset ${matrix}/${matrix_builds} --retry-failed 3
|
||||
|
||||
# Run module tests on matrix #1
|
||||
if [ "$matrix" = "1" -a "$SC" == "full" ]; then
|
||||
if [ -s module_tests.args ]; then
|
||||
${twister} ${twister_options} \
|
||||
+module_tests.args --outdir module_tests
|
||||
fi
|
||||
fi
|
||||
|
||||
# cleanup
|
||||
rm -f test_file*
|
||||
|
||||
elif [ -n "$failure" ]; then
|
||||
on_complete failure
|
||||
elif [ -n "$success" ]; then
|
||||
on_complete
|
||||
else
|
||||
echo "Nothing to do"
|
||||
fi
|
||||
|
||||
318
scripts/ci/test_plan.py
Executable file
318
scripts/ci/test_plan.py
Executable file
|
|
@ -0,0 +1,318 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# Copyright (c) 2021 Intel Corporation
|
||||
|
||||
# A script to generate twister options based on modified files.
|
||||
|
||||
import re, os
|
||||
import sh
|
||||
import argparse
|
||||
import glob
|
||||
import yaml
|
||||
import json
|
||||
import fnmatch
|
||||
import subprocess
|
||||
import csv
|
||||
import logging
|
||||
from git import Git, Repo
|
||||
|
||||
if "ZEPHYR_BASE" not in os.environ:
|
||||
exit("$ZEPHYR_BASE environment variable undefined.")
|
||||
|
||||
repository_path = os.environ['ZEPHYR_BASE']
|
||||
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
|
||||
|
||||
def _get_match_fn(globs, regexes):
|
||||
# Constructs a single regex that tests for matches against the globs in
|
||||
# 'globs' and the regexes in 'regexes'. Parts are joined with '|' (OR).
|
||||
# Returns the search() method of the compiled regex.
|
||||
#
|
||||
# Returns None if there are neither globs nor regexes, which should be
|
||||
# interpreted as no match.
|
||||
|
||||
if not (globs or regexes):
|
||||
return None
|
||||
|
||||
regex = ""
|
||||
|
||||
if globs:
|
||||
glob_regexes = []
|
||||
for glob in globs:
|
||||
# Construct a regex equivalent to the glob
|
||||
glob_regex = glob.replace(".", "\\.").replace("*", "[^/]*") \
|
||||
.replace("?", "[^/]")
|
||||
|
||||
if not glob.endswith("/"):
|
||||
# Require a full match for globs that don't end in /
|
||||
glob_regex += "$"
|
||||
|
||||
glob_regexes.append(glob_regex)
|
||||
|
||||
# The glob regexes must anchor to the beginning of the path, since we
|
||||
# return search(). (?:) is a non-capturing group.
|
||||
regex += "^(?:{})".format("|".join(glob_regexes))
|
||||
|
||||
if regexes:
|
||||
if regex:
|
||||
regex += "|"
|
||||
regex += "|".join(regexes)
|
||||
|
||||
return re.compile(regex).search
|
||||
|
||||
class Tag:
|
||||
"""
|
||||
Represents an entry for a tag in tags.yaml.
|
||||
|
||||
These attributes are available:
|
||||
|
||||
name:
|
||||
List of GitHub labels for the area. Empty if the area has no 'labels'
|
||||
key.
|
||||
|
||||
description:
|
||||
Text from 'description' key, or None if the area has no 'description'
|
||||
key
|
||||
"""
|
||||
def _contains(self, path):
|
||||
# Returns True if the area contains 'path', and False otherwise
|
||||
|
||||
return self._match_fn and self._match_fn(path) and not \
|
||||
(self._exclude_match_fn and self._exclude_match_fn(path))
|
||||
|
||||
def __repr__(self):
|
||||
return "<Tag {}>".format(self.name)
|
||||
|
||||
class Filters:
|
||||
def __init__(self, modified_files, pull_request=False, platforms=[]):
|
||||
self.modified_files = modified_files
|
||||
self.twister_options = []
|
||||
self.full_twister = False
|
||||
self.all_tests = []
|
||||
self.tag_options = []
|
||||
self.pull_request = pull_request
|
||||
self.platforms = platforms
|
||||
|
||||
|
||||
def process(self):
|
||||
self.find_tags()
|
||||
self.find_excludes()
|
||||
self.find_tests()
|
||||
self.find_archs()
|
||||
self.find_boards()
|
||||
|
||||
def get_plan(self, options):
|
||||
fname = "_test_plan_partial.csv"
|
||||
cmd = ["scripts/twister", "-c"] + options + ["--save-tests", fname ]
|
||||
if self.pull_request:
|
||||
cmd.append("--integration")
|
||||
|
||||
p = subprocess.call(cmd)
|
||||
with open(fname, newline='') as csvfile:
|
||||
csv_reader = csv.reader(csvfile, delimiter=',')
|
||||
header = next(csv_reader)
|
||||
for e in csv_reader:
|
||||
self.all_tests.append(e)
|
||||
if os.path.exists(fname):
|
||||
os.remove(fname)
|
||||
|
||||
def find_archs(self):
|
||||
# we match both arch/<arch>/* and include/arch/<arch> and skip common.
|
||||
# Some architectures like riscv require special handling, i.e. riscv
|
||||
# directory covers 2 architectures known to twister: riscv32 and riscv64.
|
||||
archs = set()
|
||||
|
||||
for f in self.modified_files:
|
||||
p = re.match(r"^arch\/([^/]+)\/", f)
|
||||
if not p:
|
||||
p = re.match(r"^include\/arch\/([^/]+)\/", f)
|
||||
if p:
|
||||
if p.group(1) != 'common':
|
||||
if p.group(1) == 'riscv':
|
||||
archs.add('riscv32')
|
||||
archs.add('riscv64')
|
||||
else:
|
||||
archs.add(p.group(1))
|
||||
|
||||
_options = []
|
||||
for arch in archs:
|
||||
_options.extend(["-a", arch ])
|
||||
|
||||
if _options:
|
||||
logging.info(f'Potential architecture filters...')
|
||||
self.get_plan(_options)
|
||||
|
||||
def find_boards(self):
|
||||
boards = set()
|
||||
all_boards = set()
|
||||
|
||||
for f in self.modified_files:
|
||||
if f.endswith(".rst") or f.endswith(".png") or f.endswith(".jpg"):
|
||||
continue
|
||||
p = re.match(r"^boards\/[^/]+\/([^/]+)\/", f)
|
||||
if p and p.groups():
|
||||
boards.add(p.group(1))
|
||||
|
||||
for b in boards:
|
||||
suboards = glob.glob("boards/*/%s/*.yaml" %(b))
|
||||
for subboard in suboards:
|
||||
name = os.path.splitext(os.path.basename(subboard))[0]
|
||||
if name:
|
||||
all_boards.add(name)
|
||||
|
||||
_options = []
|
||||
for board in all_boards:
|
||||
_options.extend(["-p", board ])
|
||||
|
||||
if _options:
|
||||
logging.info(f'Potential board filters...')
|
||||
self.get_plan(_options)
|
||||
|
||||
def find_tests(self):
|
||||
tests = set()
|
||||
for f in self.modified_files:
|
||||
if f.endswith(".rst"):
|
||||
continue
|
||||
d = os.path.dirname(f)
|
||||
while d:
|
||||
if os.path.exists(os.path.join(d, "testcase.yaml")) or \
|
||||
os.path.exists(os.path.join(d, "sample.yaml")):
|
||||
tests.add(d)
|
||||
break
|
||||
else:
|
||||
d = os.path.dirname(d)
|
||||
|
||||
_options = []
|
||||
for t in tests:
|
||||
_options.extend(["-T", t ])
|
||||
|
||||
if _options:
|
||||
logging.info(f'Potential test filters...')
|
||||
if self.platforms:
|
||||
for platform in self.platforms:
|
||||
_options.extend(["-p", platform])
|
||||
else:
|
||||
_options.append("--all")
|
||||
self.get_plan(_options)
|
||||
|
||||
def find_tags(self):
|
||||
|
||||
tag_cfg_file = os.path.join(repository_path, 'scripts', 'ci', 'tags.yaml')
|
||||
with open(tag_cfg_file, 'r') as ymlfile:
|
||||
tags_config = yaml.safe_load(ymlfile)
|
||||
|
||||
tags = {}
|
||||
for t,x in tags_config.items():
|
||||
tag = Tag()
|
||||
tag.exclude = True
|
||||
tag.name = t
|
||||
|
||||
# tag._match_fn(path) tests if the path matches files and/or
|
||||
# files-regex
|
||||
tag._match_fn = _get_match_fn(x.get("files"), x.get("files-regex"))
|
||||
|
||||
# Like tag._match_fn(path), but for files-exclude and
|
||||
# files-regex-exclude
|
||||
tag._exclude_match_fn = \
|
||||
_get_match_fn(x.get("files-exclude"), x.get("files-regex-exclude"))
|
||||
|
||||
tags[tag.name] = tag
|
||||
|
||||
for f in self.modified_files:
|
||||
for t in tags.values():
|
||||
if t._contains(f):
|
||||
t.exclude = False
|
||||
|
||||
exclude_tags = set()
|
||||
for t in tags.values():
|
||||
if t.exclude:
|
||||
exclude_tags.add(t.name)
|
||||
|
||||
for tag in exclude_tags:
|
||||
self.tag_options.extend(["-e", tag ])
|
||||
|
||||
if exclude_tags:
|
||||
logging.info(f'Potential tag based filters...')
|
||||
|
||||
def find_excludes(self):
|
||||
with open("scripts/ci/twister_ignore.txt", "r") as twister_ignore:
|
||||
ignores = twister_ignore.read().splitlines()
|
||||
ignores = filter(lambda x: not x.startswith("#"), ignores)
|
||||
|
||||
found = set()
|
||||
files = list(filter(lambda x: x, self.modified_files))
|
||||
|
||||
for pattern in ignores:
|
||||
if pattern:
|
||||
found.update(fnmatch.filter(files, pattern))
|
||||
|
||||
logging.debug(found)
|
||||
logging.debug(files)
|
||||
|
||||
if sorted(files) != sorted(found):
|
||||
_options = []
|
||||
logging.info(f'Need to run full or partial twister...')
|
||||
self.full_twister = True
|
||||
if self.platforms:
|
||||
for platform in self.platforms:
|
||||
_options.extend(["-p", platform])
|
||||
|
||||
_options.extend(self.tag_options)
|
||||
self.get_plan(_options)
|
||||
else:
|
||||
logging.info(f'No twister needed or partial twister run only...')
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate twister argument files based on modified file")
|
||||
parser.add_argument('-c', '--commits', default=None,
|
||||
help="Commit range in the form: a..b")
|
||||
parser.add_argument('-m', '--modified-files', default=None,
|
||||
help="File with information about changed/deleted/added files.")
|
||||
parser.add_argument('-o', '--output-file', default="testplan.csv",
|
||||
help="CSV file with the test plan to be passed to twister")
|
||||
parser.add_argument('-P', '--pull-request', action="store_true",
|
||||
help="This is a pull request")
|
||||
parser.add_argument('-p', '--platform', action="append",
|
||||
help="Limit this for a platform or a list of platforms.")
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
args = parse_args()
|
||||
if args.commits:
|
||||
repo = Repo(repository_path)
|
||||
commit = repo.git.diff("--name-only", args.commits)
|
||||
files = commit.split("\n")
|
||||
elif args.modified_files:
|
||||
with open(args.modified_files, "r") as fp:
|
||||
files = json.load(fp)
|
||||
|
||||
print("Changed files:\n=========")
|
||||
print("\n".join(files))
|
||||
print("=========")
|
||||
|
||||
f = Filters(files, args.pull_request, args.platform)
|
||||
f.process()
|
||||
|
||||
# remove dupes and filtered cases
|
||||
dup_free = []
|
||||
dup_free_set = set()
|
||||
for x in f.all_tests:
|
||||
if x[3] == 'skipped':
|
||||
continue
|
||||
if tuple(x) not in dup_free_set:
|
||||
dup_free.append(x)
|
||||
dup_free_set.add(tuple(x))
|
||||
|
||||
logging.info(f'Total tests to be run: {len(dup_free)}')
|
||||
header = ['test', 'arch', 'platform', 'status', 'extra_args', 'handler',
|
||||
'handler_time', 'ram_size', 'rom_size']
|
||||
|
||||
# write plan
|
||||
if dup_free:
|
||||
with open(args.output_file, 'w', newline='') as csv_file:
|
||||
writer = csv.writer(csv_file)
|
||||
writer.writerow(header)
|
||||
writer.writerows(dup_free)
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# Copyright (c) 2020 Intel Corporation
|
||||
# Check if full twister is needed.
|
||||
|
||||
import os
|
||||
import sh
|
||||
import argparse
|
||||
import fnmatch
|
||||
|
||||
|
||||
if "ZEPHYR_BASE" not in os.environ:
|
||||
exit("$ZEPHYR_BASE environment variable undefined.")
|
||||
|
||||
repository_path = os.environ['ZEPHYR_BASE']
|
||||
sh_special_args = {
|
||||
'_tty_out': False,
|
||||
'_cwd': repository_path
|
||||
}
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Check if change requires full twister")
|
||||
parser.add_argument('-c', '--commits', default=None,
|
||||
help="Commit range in the form: a..b")
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
if not args.commits:
|
||||
exit(1)
|
||||
|
||||
# pylint does not like the 'sh' library
|
||||
# pylint: disable=too-many-function-args,unexpected-keyword-arg
|
||||
commit = sh.git("diff", "--name-only", args.commits, **sh_special_args)
|
||||
files = set()
|
||||
files.update(commit.split("\n"))
|
||||
|
||||
with open("scripts/ci/twister_ignore.txt", "r") as sc_ignore:
|
||||
ignores = sc_ignore.read().splitlines()
|
||||
ignores = filter(lambda x: not x.startswith("#"), ignores)
|
||||
|
||||
found = set()
|
||||
files = list(filter(lambda x: x, files))
|
||||
|
||||
for pattern in ignores:
|
||||
if pattern:
|
||||
found.update(fnmatch.filter(files, pattern))
|
||||
|
||||
if sorted(files) != sorted(found):
|
||||
print("full")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Loading…
Reference in a new issue