scripts: twister: Unify Twister Statuses
Various different Statuses were joined into a single class, TwisterStatus. This change anticipates further streamlining of the Twister's approach to Status. Code guarding Twister's Properties was shortened to a value check only. QEMUOutputStatus was left separate, as doubts were cast whether it should remain a status. Leaving it separate makes its removal easier. Signed-off-by: Lukasz Mrugala <lukaszx.mrugala@intel.com>
This commit is contained in:
parent
6f452e81f7
commit
212f48c146
18 changed files with 570 additions and 554 deletions
|
|
@ -23,7 +23,8 @@ from queue import Queue, Empty
|
|||
from twisterlib.environment import ZEPHYR_BASE, strip_ansi_sequences
|
||||
from twisterlib.error import TwisterException
|
||||
from twisterlib.platform import Platform
|
||||
from twisterlib.statuses import HarnessStatus, OutputStatus, TestCaseStatus, TestInstanceStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
|
||||
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/build_helpers"))
|
||||
from domains import Domains
|
||||
|
||||
|
|
@ -102,7 +103,7 @@ class Handler:
|
|||
terminate_process(proc)
|
||||
self.terminated = True
|
||||
|
||||
def _verify_ztest_suite_name(self, harness_state, detected_suite_names, handler_time):
|
||||
def _verify_ztest_suite_name(self, harness_status, detected_suite_names, handler_time):
|
||||
"""
|
||||
If test suite names was found in test's C source code, then verify if
|
||||
detected suite names from output correspond to expected suite names
|
||||
|
|
@ -112,7 +113,7 @@ class Handler:
|
|||
logger.debug(f"Expected suite names:{expected_suite_names}")
|
||||
logger.debug(f"Detected suite names:{detected_suite_names}")
|
||||
if not expected_suite_names or \
|
||||
not harness_state == HarnessStatus.PASS:
|
||||
not harness_status == TwisterStatus.PASS:
|
||||
return
|
||||
if not detected_suite_names:
|
||||
self._missing_suite_name(expected_suite_names, handler_time)
|
||||
|
|
@ -128,10 +129,10 @@ class Handler:
|
|||
Change result of performed test if problem with missing or unpropper
|
||||
suite name was occurred.
|
||||
"""
|
||||
self.instance.status = TestInstanceStatus.FAIL
|
||||
self.instance.status = TwisterStatus.FAIL
|
||||
self.instance.execution_time = handler_time
|
||||
for tc in self.instance.testcases:
|
||||
tc.status = TestCaseStatus.FAIL
|
||||
tc.status = TwisterStatus.FAIL
|
||||
self.instance.reason = f"Testsuite mismatch"
|
||||
logger.debug("Test suite names were not printed or some of them in " \
|
||||
"output do not correspond with expected: %s",
|
||||
|
|
@ -142,15 +143,15 @@ class Handler:
|
|||
# only for Ztest tests:
|
||||
harness_class_name = type(harness).__name__
|
||||
if self.suite_name_check and harness_class_name == "Test":
|
||||
self._verify_ztest_suite_name(harness.state, harness.detected_suite_names, handler_time)
|
||||
if self.instance.status == TestInstanceStatus.FAIL:
|
||||
self._verify_ztest_suite_name(harness.status, harness.detected_suite_names, handler_time)
|
||||
if self.instance.status == TwisterStatus.FAIL:
|
||||
return
|
||||
if not harness.matched_run_id and harness.run_id_exists:
|
||||
self.instance.status = TestInstanceStatus.FAIL
|
||||
self.instance.status = TwisterStatus.FAIL
|
||||
self.instance.execution_time = handler_time
|
||||
self.instance.reason = "RunID mismatch"
|
||||
for tc in self.instance.testcases:
|
||||
tc.status = TestCaseStatus.FAIL
|
||||
tc.status = TwisterStatus.FAIL
|
||||
|
||||
self.instance.record(harness.recording)
|
||||
|
||||
|
|
@ -216,7 +217,7 @@ class BinaryHandler(Handler):
|
|||
log_out_fp.write(strip_ansi_sequences(line_decoded))
|
||||
log_out_fp.flush()
|
||||
harness.handle(stripped_line)
|
||||
if harness.state != HarnessStatus.NONE:
|
||||
if harness.status != TwisterStatus.NONE:
|
||||
if not timeout_extended or harness.capture_coverage:
|
||||
timeout_extended = True
|
||||
if harness.capture_coverage:
|
||||
|
|
@ -292,24 +293,24 @@ class BinaryHandler(Handler):
|
|||
|
||||
return env
|
||||
|
||||
def _update_instance_info(self, harness_state, handler_time):
|
||||
def _update_instance_info(self, harness_status, handler_time):
|
||||
self.instance.execution_time = handler_time
|
||||
if not self.terminated and self.returncode != 0:
|
||||
self.instance.status = TestInstanceStatus.FAIL
|
||||
self.instance.status = TwisterStatus.FAIL
|
||||
if self.options.enable_valgrind and self.returncode == 2:
|
||||
self.instance.reason = "Valgrind error"
|
||||
else:
|
||||
# When a process is killed, the default handler returns 128 + SIGTERM
|
||||
# so in that case the return code itself is not meaningful
|
||||
self.instance.reason = "Failed"
|
||||
elif harness_state != HarnessStatus.NONE:
|
||||
self.instance.status = harness_state
|
||||
if harness_state == HarnessStatus.FAIL:
|
||||
elif harness_status != TwisterStatus.NONE:
|
||||
self.instance.status = harness_status
|
||||
if harness_status == TwisterStatus.FAIL:
|
||||
self.instance.reason = "Failed"
|
||||
else:
|
||||
self.instance.status = TestInstanceStatus.FAIL
|
||||
self.instance.status = TwisterStatus.FAIL
|
||||
self.instance.reason = "Timeout"
|
||||
self.instance.add_missing_case_status(TestCaseStatus.BLOCK, "Timeout")
|
||||
self.instance.add_missing_case_status(TwisterStatus.BLOCK, "Timeout")
|
||||
|
||||
def handle(self, harness):
|
||||
robot_test = getattr(harness, "is_robot_test", False)
|
||||
|
|
@ -352,7 +353,7 @@ class BinaryHandler(Handler):
|
|||
if sys.stdout.isatty():
|
||||
subprocess.call(["stty", "sane"], stdin=sys.stdout)
|
||||
|
||||
self._update_instance_info(harness.state, handler_time)
|
||||
self._update_instance_info(harness.status, handler_time)
|
||||
|
||||
self._final_handle_actions(harness, handler_time)
|
||||
|
||||
|
|
@ -449,7 +450,7 @@ class DeviceHandler(Handler):
|
|||
log_out_fp.flush()
|
||||
harness.handle(sl.rstrip())
|
||||
|
||||
if harness.state != HarnessStatus.NONE:
|
||||
if harness.status != TwisterStatus.NONE:
|
||||
if not harness.capture_coverage:
|
||||
ser.close()
|
||||
break
|
||||
|
|
@ -561,19 +562,19 @@ class DeviceHandler(Handler):
|
|||
|
||||
return command
|
||||
|
||||
def _update_instance_info(self, harness_state, handler_time, flash_error):
|
||||
def _update_instance_info(self, harness_status, handler_time, flash_error):
|
||||
self.instance.execution_time = handler_time
|
||||
if harness_state != HarnessStatus.NONE:
|
||||
self.instance.status = harness_state
|
||||
if harness_state == HarnessStatus.FAIL:
|
||||
if harness_status != TwisterStatus.NONE:
|
||||
self.instance.status = harness_status
|
||||
if harness_status == TwisterStatus.FAIL:
|
||||
self.instance.reason = "Failed"
|
||||
self.instance.add_missing_case_status(TestCaseStatus.BLOCK, harness_state)
|
||||
self.instance.add_missing_case_status(TwisterStatus.BLOCK, harness_status)
|
||||
elif not flash_error:
|
||||
self.instance.status = TestInstanceStatus.FAIL
|
||||
self.instance.status = TwisterStatus.FAIL
|
||||
self.instance.reason = "Timeout"
|
||||
|
||||
if self.instance.status in [TestInstanceStatus.ERROR, TestInstanceStatus.FAIL]:
|
||||
self.instance.add_missing_case_status(TestCaseStatus.BLOCK, self.instance.reason)
|
||||
if self.instance.status in [TwisterStatus.ERROR, TwisterStatus.FAIL]:
|
||||
self.instance.add_missing_case_status(TwisterStatus.BLOCK, self.instance.reason)
|
||||
|
||||
def _terminate_pty(self, ser_pty, ser_pty_process):
|
||||
logger.debug(f"Terminating serial-pty:'{ser_pty}'")
|
||||
|
|
@ -598,11 +599,11 @@ class DeviceHandler(Handler):
|
|||
timeout=max(flash_timeout, self.get_test_timeout())
|
||||
)
|
||||
except serial.SerialException as e:
|
||||
self.instance.status = TestInstanceStatus.FAIL
|
||||
self.instance.status = TwisterStatus.FAIL
|
||||
self.instance.reason = "Serial Device Error"
|
||||
logger.error("Serial device error: %s" % (str(e)))
|
||||
|
||||
self.instance.add_missing_case_status(TestCaseStatus.BLOCK, "Serial Device Error")
|
||||
self.instance.add_missing_case_status(TwisterStatus.BLOCK, "Serial Device Error")
|
||||
if serial_pty and ser_pty_process:
|
||||
self._terminate_pty(serial_pty, ser_pty_process)
|
||||
|
||||
|
|
@ -622,7 +623,7 @@ class DeviceHandler(Handler):
|
|||
time.sleep(1)
|
||||
hardware = self.device_is_available(self.instance)
|
||||
except TwisterException as error:
|
||||
self.instance.status = TestInstanceStatus.FAIL
|
||||
self.instance.status = TwisterStatus.FAIL
|
||||
self.instance.reason = str(error)
|
||||
logger.error(self.instance.reason)
|
||||
return hardware
|
||||
|
|
@ -715,7 +716,7 @@ class DeviceHandler(Handler):
|
|||
logger.debug(stdout.decode(errors="ignore"))
|
||||
|
||||
if proc.returncode != 0:
|
||||
self.instance.status = TestInstanceStatus.ERROR
|
||||
self.instance.status = TwisterStatus.ERROR
|
||||
self.instance.reason = "Device issue (Flash error?)"
|
||||
flash_error = True
|
||||
with open(d_log, "w") as dlog_fp:
|
||||
|
|
@ -725,7 +726,7 @@ class DeviceHandler(Handler):
|
|||
logger.warning("Flash operation timed out.")
|
||||
self.terminate(proc)
|
||||
(stdout, stderr) = proc.communicate()
|
||||
self.instance.status = TestInstanceStatus.ERROR
|
||||
self.instance.status = TwisterStatus.ERROR
|
||||
self.instance.reason = "Device issue (Timeout)"
|
||||
flash_error = True
|
||||
|
||||
|
|
@ -734,7 +735,7 @@ class DeviceHandler(Handler):
|
|||
|
||||
except subprocess.CalledProcessError:
|
||||
halt_monitor_evt.set()
|
||||
self.instance.status = TestInstanceStatus.ERROR
|
||||
self.instance.status = TwisterStatus.ERROR
|
||||
self.instance.reason = "Device issue (Flash error)"
|
||||
flash_error = True
|
||||
|
||||
|
|
@ -772,7 +773,7 @@ class DeviceHandler(Handler):
|
|||
|
||||
handler_time = time.time() - start_time
|
||||
|
||||
self._update_instance_info(harness.state, handler_time, flash_error)
|
||||
self._update_instance_info(harness.status, handler_time, flash_error)
|
||||
|
||||
self._final_handle_actions(harness, handler_time)
|
||||
|
||||
|
|
@ -895,7 +896,7 @@ class QEMUHandler(Handler):
|
|||
timeout_time = start_time + timeout
|
||||
p = select.poll()
|
||||
p.register(in_fp, select.POLLIN)
|
||||
_status = OutputStatus.NONE
|
||||
_status = TwisterStatus.NONE
|
||||
_reason = None
|
||||
|
||||
line = ""
|
||||
|
|
@ -917,18 +918,18 @@ class QEMUHandler(Handler):
|
|||
# of not enough CPU time scheduled by host for
|
||||
# QEMU process during p.poll(this_timeout)
|
||||
cpu_time = QEMUHandler._get_cpu_time(pid)
|
||||
if cpu_time < timeout and _status == OutputStatus.NONE:
|
||||
if cpu_time < timeout and _status == TwisterStatus.NONE:
|
||||
timeout_time = time.time() + (timeout - cpu_time)
|
||||
continue
|
||||
except psutil.NoSuchProcess:
|
||||
pass
|
||||
except ProcessLookupError:
|
||||
_status = OutputStatus.FAIL
|
||||
_status = TwisterStatus.FAIL
|
||||
_reason = "Execution error"
|
||||
break
|
||||
|
||||
if _status == OutputStatus.NONE:
|
||||
_status = OutputStatus.FAIL
|
||||
if _status == TwisterStatus.NONE:
|
||||
_status = TwisterStatus.FAIL
|
||||
_reason = "timeout"
|
||||
break
|
||||
|
||||
|
|
@ -939,14 +940,14 @@ class QEMUHandler(Handler):
|
|||
c = in_fp.read(1).decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
# Test is writing something weird, fail
|
||||
_status = OutputStatus.FAIL
|
||||
_status = TwisterStatus.FAIL
|
||||
_reason = "unexpected byte"
|
||||
break
|
||||
|
||||
if c == "":
|
||||
# EOF, this shouldn't happen unless QEMU crashes
|
||||
if not ignore_unexpected_eof:
|
||||
_status = OutputStatus.FAIL
|
||||
_status = TwisterStatus.FAIL
|
||||
_reason = "unexpected eof"
|
||||
break
|
||||
line = line + c
|
||||
|
|
@ -960,12 +961,12 @@ class QEMUHandler(Handler):
|
|||
logger.debug(f"QEMU ({pid}): {line}")
|
||||
|
||||
harness.handle(line)
|
||||
if harness.state != HarnessStatus.NONE:
|
||||
if harness.status != TwisterStatus.NONE:
|
||||
# if we have registered a fail make sure the status is not
|
||||
# overridden by a false success message coming from the
|
||||
# testsuite
|
||||
if _status != OutputStatus.FAIL:
|
||||
_status = harness.state
|
||||
if _status != TwisterStatus.FAIL:
|
||||
_status = harness.status
|
||||
_reason = harness.reason
|
||||
|
||||
# if we get some status, that means test is doing well, we reset
|
||||
|
|
@ -1006,16 +1007,16 @@ class QEMUHandler(Handler):
|
|||
|
||||
return command
|
||||
|
||||
def _update_instance_info(self, harness_state, is_timeout):
|
||||
def _update_instance_info(self, harness_status, is_timeout):
|
||||
if (self.returncode != 0 and not self.ignore_qemu_crash) or \
|
||||
harness_state == HarnessStatus.NONE:
|
||||
self.instance.status = TestInstanceStatus.FAIL
|
||||
harness_status == TwisterStatus.NONE:
|
||||
self.instance.status = TwisterStatus.FAIL
|
||||
if is_timeout:
|
||||
self.instance.reason = "Timeout"
|
||||
else:
|
||||
if not self.instance.reason:
|
||||
self.instance.reason = "Exited with {}".format(self.returncode)
|
||||
self.instance.add_missing_case_status(TestCaseStatus.BLOCK)
|
||||
self.instance.add_missing_case_status(TwisterStatus.BLOCK)
|
||||
|
||||
def handle(self, harness):
|
||||
self.run = True
|
||||
|
|
@ -1056,7 +1057,7 @@ class QEMUHandler(Handler):
|
|||
|
||||
is_timeout = True
|
||||
self.terminate(proc)
|
||||
if harness.state == HarnessStatus.PASS:
|
||||
if harness.status == TwisterStatus.PASS:
|
||||
self.returncode = 0
|
||||
else:
|
||||
self.returncode = proc.returncode
|
||||
|
|
@ -1078,7 +1079,7 @@ class QEMUHandler(Handler):
|
|||
|
||||
logger.debug(f"return code from QEMU ({qemu_pid}): {self.returncode}")
|
||||
|
||||
self._update_instance_info(harness.state, is_timeout)
|
||||
self._update_instance_info(harness.status, is_timeout)
|
||||
|
||||
self._final_handle_actions(harness, 0)
|
||||
|
||||
|
|
@ -1172,16 +1173,16 @@ class QEMUWinHandler(Handler):
|
|||
|
||||
return command
|
||||
|
||||
def _update_instance_info(self, harness_state, is_timeout):
|
||||
def _update_instance_info(self, harness_status, is_timeout):
|
||||
if (self.returncode != 0 and not self.ignore_qemu_crash) or \
|
||||
harness_state == HarnessStatus.NONE:
|
||||
self.instance.status = TestInstanceStatus.FAIL
|
||||
harness_status == TwisterStatus.NONE:
|
||||
self.instance.status = TwisterStatus.FAIL
|
||||
if is_timeout:
|
||||
self.instance.reason = "Timeout"
|
||||
else:
|
||||
if not self.instance.reason:
|
||||
self.instance.reason = "Exited with {}".format(self.returncode)
|
||||
self.instance.add_missing_case_status(TestInstanceStatus.BLOCK)
|
||||
self.instance.add_missing_case_status(TwisterStatus.BLOCK)
|
||||
|
||||
def _enqueue_char(self, queue):
|
||||
while not self.stop_thread:
|
||||
|
|
@ -1203,7 +1204,7 @@ class QEMUWinHandler(Handler):
|
|||
def _monitor_output(self, queue, timeout, logfile, pid_fn, harness, ignore_unexpected_eof=False):
|
||||
start_time = time.time()
|
||||
timeout_time = start_time + timeout
|
||||
_status = OutputStatus.NONE
|
||||
_status = TwisterStatus.NONE
|
||||
_reason = None
|
||||
line = ""
|
||||
timeout_extended = False
|
||||
|
|
@ -1220,18 +1221,18 @@ class QEMUWinHandler(Handler):
|
|||
# of not enough CPU time scheduled by host for
|
||||
# QEMU process during p.poll(this_timeout)
|
||||
cpu_time = self._get_cpu_time(self.pid)
|
||||
if cpu_time < timeout and _status == OutputStatus.NONE:
|
||||
if cpu_time < timeout and _status == TwisterStatus.NONE:
|
||||
timeout_time = time.time() + (timeout - cpu_time)
|
||||
continue
|
||||
except psutil.NoSuchProcess:
|
||||
pass
|
||||
except ProcessLookupError:
|
||||
_status = OutputStatus.FAIL
|
||||
_status = TwisterStatus.FAIL
|
||||
_reason = "Execution error"
|
||||
break
|
||||
|
||||
if _status == OutputStatus.NONE:
|
||||
_status = OutputStatus.FAIL
|
||||
if _status == TwisterStatus.NONE:
|
||||
_status = TwisterStatus.FAIL
|
||||
_reason = "timeout"
|
||||
break
|
||||
|
||||
|
|
@ -1250,14 +1251,14 @@ class QEMUWinHandler(Handler):
|
|||
c = c.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
# Test is writing something weird, fail
|
||||
_status = OutputStatus.FAIL
|
||||
_status = TwisterStatus.FAIL
|
||||
_reason = "unexpected byte"
|
||||
break
|
||||
|
||||
if c == "":
|
||||
# EOF, this shouldn't happen unless QEMU crashes
|
||||
if not ignore_unexpected_eof:
|
||||
_status = OutputStatus.FAIL
|
||||
_status = TwisterStatus.FAIL
|
||||
_reason = "unexpected eof"
|
||||
break
|
||||
line = line + c
|
||||
|
|
@ -1271,15 +1272,15 @@ class QEMUWinHandler(Handler):
|
|||
logger.debug(f"QEMU ({self.pid}): {line}")
|
||||
|
||||
harness.handle(line)
|
||||
if harness.state != HarnessStatus.NONE:
|
||||
# if we have registered a fail make sure the state is not
|
||||
if harness.status != TwisterStatus.NONE:
|
||||
# if we have registered a fail make sure the status is not
|
||||
# overridden by a false success message coming from the
|
||||
# testsuite
|
||||
if _status != OutputStatus.FAIL:
|
||||
_status = harness.state
|
||||
if _status != TwisterStatus.FAIL:
|
||||
_status = harness.status
|
||||
_reason = harness.reason
|
||||
|
||||
# if we get some state, that means test is doing well, we reset
|
||||
# if we get some status, that means test is doing well, we reset
|
||||
# the timeout and wait for 2 more seconds to catch anything
|
||||
# printed late. We wait much longer if code
|
||||
# coverage is enabled since dumping this information can
|
||||
|
|
@ -1332,7 +1333,7 @@ class QEMUWinHandler(Handler):
|
|||
time.sleep(0.5)
|
||||
proc.kill()
|
||||
|
||||
if harness.state == HarnessStatus.PASS:
|
||||
if harness.status == TwisterStatus.PASS:
|
||||
self.returncode = 0
|
||||
else:
|
||||
self.returncode = proc.returncode
|
||||
|
|
@ -1345,7 +1346,7 @@ class QEMUWinHandler(Handler):
|
|||
os.close(self.pipe_handle)
|
||||
self.pipe_handle = None
|
||||
|
||||
self._update_instance_info(harness.state, is_timeout)
|
||||
self._update_instance_info(harness.status, is_timeout)
|
||||
|
||||
self._final_handle_actions(harness, 0)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# SPDX-License-Identifier: Apache-2.0
|
||||
from __future__ import annotations
|
||||
from asyncio.log import logger
|
||||
from enum import Enum
|
||||
import platform
|
||||
import re
|
||||
import os
|
||||
|
|
@ -15,10 +16,11 @@ import time
|
|||
import shutil
|
||||
import json
|
||||
|
||||
from twisterlib.reports import ReportStatus
|
||||
from twisterlib.error import ConfigurationError
|
||||
from twisterlib.environment import ZEPHYR_BASE, PYTEST_PLUGIN_INSTALLED
|
||||
from twisterlib.handlers import Handler, terminate_process, SUPPORTED_SIMS_IN_PYTEST
|
||||
from twisterlib.statuses import HarnessStatus, ReportStatus, TestCaseStatus, TestInstanceStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
from twisterlib.testinstance import TestInstance
|
||||
|
||||
|
||||
|
|
@ -37,16 +39,8 @@ class Harness:
|
|||
RUN_FAILED = "PROJECT EXECUTION FAILED"
|
||||
run_id_pattern = r"RunID: (?P<run_id>.*)"
|
||||
|
||||
|
||||
ztest_to_status = {
|
||||
'PASS': TestCaseStatus.PASS,
|
||||
'SKIP': TestCaseStatus.SKIP,
|
||||
'BLOCK': TestCaseStatus.BLOCK,
|
||||
'FAIL': TestCaseStatus.FAIL
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.state = HarnessStatus.NONE
|
||||
self._status = TwisterStatus.NONE
|
||||
self.reason = None
|
||||
self.type = None
|
||||
self.regex = []
|
||||
|
|
@ -70,6 +64,21 @@ class Harness:
|
|||
self.testcase_output = ""
|
||||
self._match = False
|
||||
|
||||
@property
|
||||
def status(self) -> TwisterStatus:
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, value : TwisterStatus) -> None:
|
||||
# Check for illegal assignments by value
|
||||
try:
|
||||
key = value.name if isinstance(value, Enum) else value
|
||||
self._status = TwisterStatus[key]
|
||||
except KeyError:
|
||||
logger.warning(f'Harness assigned status "{value}"'
|
||||
f' without an equivalent in TwisterStatus.'
|
||||
f' Assignment was ignored.')
|
||||
|
||||
def configure(self, instance):
|
||||
self.instance = instance
|
||||
config = instance.testsuite.harness_config
|
||||
|
|
@ -133,13 +142,13 @@ class Harness:
|
|||
|
||||
if self.RUN_PASSED in line:
|
||||
if self.fault:
|
||||
self.state = HarnessStatus.FAIL
|
||||
self.status = TwisterStatus.FAIL
|
||||
self.reason = "Fault detected while running test"
|
||||
else:
|
||||
self.state = HarnessStatus.PASS
|
||||
self.status = TwisterStatus.PASS
|
||||
|
||||
if self.RUN_FAILED in line:
|
||||
self.state = HarnessStatus.FAIL
|
||||
self.status = TwisterStatus.FAIL
|
||||
self.reason = "Testsuite failed"
|
||||
|
||||
if self.fail_on_fault:
|
||||
|
|
@ -170,9 +179,9 @@ class Robot(Harness):
|
|||
handle is trying to give a PASS or FAIL to avoid timeout, nothing
|
||||
is writen into handler.log
|
||||
'''
|
||||
self.instance.state = TestInstanceStatus.PASS
|
||||
self.instance.status = TwisterStatus.PASS
|
||||
tc = self.instance.get_case_or_create(self.id)
|
||||
tc.status = TestCaseStatus.PASS
|
||||
tc.status = TwisterStatus.PASS
|
||||
|
||||
def run_robot_test(self, command, handler):
|
||||
start_time = time.time()
|
||||
|
|
@ -203,16 +212,16 @@ class Robot(Harness):
|
|||
self.instance.execution_time = time.time() - start_time
|
||||
|
||||
if renode_test_proc.returncode == 0:
|
||||
self.instance.status = TestInstanceStatus.PASS
|
||||
self.instance.status = TwisterStatus.PASS
|
||||
# all tests in one Robot file are treated as a single test case,
|
||||
# so its status should be set accordingly to the instance status
|
||||
# please note that there should be only one testcase in testcases list
|
||||
self.instance.testcases[0].status = TestCaseStatus.PASS
|
||||
self.instance.testcases[0].status = TwisterStatus.PASS
|
||||
else:
|
||||
logger.error("Robot test failure: %s for %s" %
|
||||
(handler.sourcedir, self.instance.platform.name))
|
||||
self.instance.status = TestInstanceStatus.FAIL
|
||||
self.instance.testcases[0].status = TestCaseStatus.FAIL
|
||||
self.instance.status = TwisterStatus.FAIL
|
||||
self.instance.testcases[0].status = TwisterStatus.FAIL
|
||||
|
||||
if out:
|
||||
with open(os.path.join(self.instance.build_dir, handler.log), "wt") as log:
|
||||
|
|
@ -237,10 +246,10 @@ class Console(Harness):
|
|||
def configure(self, instance):
|
||||
super(Console, self).configure(instance)
|
||||
if self.regex is None or len(self.regex) == 0:
|
||||
self.state = HarnessStatus.FAIL
|
||||
self.status = TwisterStatus.FAIL
|
||||
tc = self.instance.set_case_status_by_name(
|
||||
self.get_testcase_name(),
|
||||
TestCaseStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
f"HARNESS:{self.__class__.__name__}:no regex patterns configured."
|
||||
)
|
||||
raise ConfigurationError(self.instance.name, tc.reason)
|
||||
|
|
@ -253,10 +262,10 @@ class Console(Harness):
|
|||
self.patterns.append(re.compile(r))
|
||||
self.patterns_expected = len(self.patterns)
|
||||
else:
|
||||
self.state = HarnessStatus.FAIL
|
||||
self.status = TwisterStatus.FAIL
|
||||
tc = self.instance.set_case_status_by_name(
|
||||
self.get_testcase_name(),
|
||||
TestCaseStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
f"HARNESS:{self.__class__.__name__}:incorrect type={self.type}"
|
||||
)
|
||||
raise ConfigurationError(self.instance.name, tc.reason)
|
||||
|
|
@ -268,7 +277,7 @@ class Console(Harness):
|
|||
logger.debug(f"HARNESS:{self.__class__.__name__}:EXPECTED:"
|
||||
f"'{self.pattern.pattern}'")
|
||||
self.next_pattern += 1
|
||||
self.state = HarnessStatus.PASS
|
||||
self.status = TwisterStatus.PASS
|
||||
elif self.type == "multi_line" and self.ordered:
|
||||
if (self.next_pattern < len(self.patterns) and
|
||||
self.patterns[self.next_pattern].search(line)):
|
||||
|
|
@ -277,7 +286,7 @@ class Console(Harness):
|
|||
f"'{self.patterns[self.next_pattern].pattern}'")
|
||||
self.next_pattern += 1
|
||||
if self.next_pattern >= len(self.patterns):
|
||||
self.state = HarnessStatus.PASS
|
||||
self.status = TwisterStatus.PASS
|
||||
elif self.type == "multi_line" and not self.ordered:
|
||||
for i, pattern in enumerate(self.patterns):
|
||||
r = self.regex[i]
|
||||
|
|
@ -287,7 +296,7 @@ class Console(Harness):
|
|||
f"{len(self.matches)}/{self.patterns_expected}):"
|
||||
f"'{pattern.pattern}'")
|
||||
if len(self.matches) == len(self.regex):
|
||||
self.state = HarnessStatus.PASS
|
||||
self.status = TwisterStatus.PASS
|
||||
else:
|
||||
logger.error("Unknown harness_config type")
|
||||
|
||||
|
|
@ -308,28 +317,28 @@ class Console(Harness):
|
|||
# test image was executed.
|
||||
# TODO: Introduce explicit match policy type to reject
|
||||
# unexpected console output, allow missing patterns, deny duplicates.
|
||||
if self.state == HarnessStatus.PASS and \
|
||||
if self.status == TwisterStatus.PASS and \
|
||||
self.ordered and \
|
||||
self.next_pattern < self.patterns_expected:
|
||||
logger.error(f"HARNESS:{self.__class__.__name__}: failed with"
|
||||
f" {self.next_pattern} of {self.patterns_expected}"
|
||||
f" expected ordered patterns.")
|
||||
self.state = HarnessStatus.FAIL
|
||||
self.status = TwisterStatus.FAIL
|
||||
self.reason = "patterns did not match (ordered)"
|
||||
if self.state == HarnessStatus.PASS and \
|
||||
if self.status == TwisterStatus.PASS and \
|
||||
not self.ordered and \
|
||||
len(self.matches) < self.patterns_expected:
|
||||
logger.error(f"HARNESS:{self.__class__.__name__}: failed with"
|
||||
f" {len(self.matches)} of {self.patterns_expected}"
|
||||
f" expected unordered patterns.")
|
||||
self.state = HarnessStatus.FAIL
|
||||
self.status = TwisterStatus.FAIL
|
||||
self.reason = "patterns did not match (unordered)"
|
||||
|
||||
tc = self.instance.get_case_or_create(self.get_testcase_name())
|
||||
if self.state == HarnessStatus.PASS:
|
||||
tc.status = TestCaseStatus.PASS
|
||||
if self.status == TwisterStatus.PASS:
|
||||
tc.status = TwisterStatus.PASS
|
||||
else:
|
||||
tc.status = TestCaseStatus.FAIL
|
||||
tc.status = TwisterStatus.FAIL
|
||||
|
||||
|
||||
class PytestHarnessException(Exception):
|
||||
|
|
@ -352,7 +361,7 @@ class Pytest(Harness):
|
|||
self.run_command(cmd, timeout)
|
||||
except PytestHarnessException as pytest_exception:
|
||||
logger.error(str(pytest_exception))
|
||||
self.state = HarnessStatus.FAIL
|
||||
self.status = TwisterStatus.FAIL
|
||||
self.instance.reason = str(pytest_exception)
|
||||
finally:
|
||||
if self.reserved_serial:
|
||||
|
|
@ -486,10 +495,10 @@ class Pytest(Harness):
|
|||
logger.warning('Timeout has occurred. Can be extended in testspec file. '
|
||||
f'Currently set to {timeout} seconds.')
|
||||
self.instance.reason = 'Pytest timeout'
|
||||
self.state = HarnessStatus.FAIL
|
||||
self.status = TwisterStatus.FAIL
|
||||
proc.wait(timeout)
|
||||
except subprocess.TimeoutExpired:
|
||||
self.state = HarnessStatus.FAIL
|
||||
self.status = TwisterStatus.FAIL
|
||||
proc.kill()
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -525,37 +534,37 @@ class Pytest(Harness):
|
|||
proc.communicate()
|
||||
|
||||
def _update_test_status(self):
|
||||
if self.state == HarnessStatus.NONE:
|
||||
if self.status == TwisterStatus.NONE:
|
||||
self.instance.testcases = []
|
||||
try:
|
||||
self._parse_report_file(self.report_file)
|
||||
except Exception as e:
|
||||
logger.error(f'Error when parsing file {self.report_file}: {e}')
|
||||
self.state = HarnessStatus.FAIL
|
||||
self.status = TwisterStatus.FAIL
|
||||
finally:
|
||||
if not self.instance.testcases:
|
||||
self.instance.init_cases()
|
||||
|
||||
self.instance.status = self.state if self.state != HarnessStatus.NONE else \
|
||||
TestInstanceStatus.FAIL
|
||||
if self.instance.status in [TestInstanceStatus.ERROR, TestInstanceStatus.FAIL]:
|
||||
self.instance.status = self.status if self.status != TwisterStatus.NONE else \
|
||||
TwisterStatus.FAIL
|
||||
if self.instance.status in [TwisterStatus.ERROR, TwisterStatus.FAIL]:
|
||||
self.instance.reason = self.instance.reason or 'Pytest failed'
|
||||
self.instance.add_missing_case_status(TestCaseStatus.BLOCK, self.instance.reason)
|
||||
self.instance.add_missing_case_status(TwisterStatus.BLOCK, self.instance.reason)
|
||||
|
||||
def _parse_report_file(self, report):
|
||||
tree = ET.parse(report)
|
||||
root = tree.getroot()
|
||||
if elem_ts := root.find('testsuite'):
|
||||
if elem_ts.get('failures') != '0':
|
||||
self.state = HarnessStatus.FAIL
|
||||
self.status = TwisterStatus.FAIL
|
||||
self.instance.reason = f"{elem_ts.get('failures')}/{elem_ts.get('tests')} pytest scenario(s) failed"
|
||||
elif elem_ts.get('errors') != '0':
|
||||
self.state = HarnessStatus.ERROR
|
||||
self.status = TwisterStatus.ERROR
|
||||
self.instance.reason = 'Error during pytest execution'
|
||||
elif elem_ts.get('skipped') == elem_ts.get('tests'):
|
||||
self.state = HarnessStatus.SKIP
|
||||
self.status = TwisterStatus.SKIP
|
||||
else:
|
||||
self.state = HarnessStatus.PASS
|
||||
self.status = TwisterStatus.PASS
|
||||
self.instance.execution_time = float(elem_ts.get('time'))
|
||||
|
||||
for elem_tc in elem_ts.findall('testcase'):
|
||||
|
|
@ -563,18 +572,18 @@ class Pytest(Harness):
|
|||
tc.duration = float(elem_tc.get('time'))
|
||||
elem = elem_tc.find('*')
|
||||
if elem is None:
|
||||
tc.status = TestCaseStatus.PASS
|
||||
tc.status = TwisterStatus.PASS
|
||||
else:
|
||||
if elem.tag == ReportStatus.SKIP:
|
||||
tc.status = TestCaseStatus.SKIP
|
||||
tc.status = TwisterStatus.SKIP
|
||||
elif elem.tag == ReportStatus.FAIL:
|
||||
tc.status = TestCaseStatus.FAIL
|
||||
tc.status = TwisterStatus.FAIL
|
||||
else:
|
||||
tc.status = TestCaseStatus.ERROR
|
||||
tc.status = TwisterStatus.ERROR
|
||||
tc.reason = elem.get('message')
|
||||
tc.output = elem.text
|
||||
else:
|
||||
self.state = HarnessStatus.SKIP
|
||||
self.status = TwisterStatus.SKIP
|
||||
self.instance.reason = 'No tests collected'
|
||||
|
||||
|
||||
|
|
@ -595,7 +604,7 @@ class Gtest(Harness):
|
|||
# Strip the ANSI characters, they mess up the patterns
|
||||
non_ansi_line = self.ANSI_ESCAPE.sub('', line)
|
||||
|
||||
if self.state != HarnessStatus.NONE:
|
||||
if self.status != TwisterStatus.NONE:
|
||||
return
|
||||
|
||||
# Check if we started running a new test
|
||||
|
|
@ -621,7 +630,7 @@ class Gtest(Harness):
|
|||
# Create the test instance and set the context
|
||||
tc = self.instance.get_case_or_create(name)
|
||||
self.tc = tc
|
||||
self.tc.status = TestCaseStatus.STARTED
|
||||
self.tc.status = TwisterStatus.STARTED
|
||||
self.testcase_output += line + "\n"
|
||||
self._match = True
|
||||
|
||||
|
|
@ -630,16 +639,16 @@ class Gtest(Harness):
|
|||
if finished_match:
|
||||
tc = self.instance.get_case_or_create(self.id)
|
||||
if self.has_failures or self.tc is not None:
|
||||
self.state = HarnessStatus.FAIL
|
||||
tc.status = TestCaseStatus.FAIL
|
||||
self.status = TwisterStatus.FAIL
|
||||
tc.status = TwisterStatus.FAIL
|
||||
else:
|
||||
self.state = HarnessStatus.PASS
|
||||
tc.status = TestCaseStatus.PASS
|
||||
self.status = TwisterStatus.PASS
|
||||
tc.status = TwisterStatus.PASS
|
||||
return
|
||||
|
||||
# Check if the individual test finished
|
||||
state, name = self._check_result(non_ansi_line)
|
||||
if state == TestCaseStatus.NONE or name is None:
|
||||
if state == TwisterStatus.NONE or name is None:
|
||||
# Nothing finished, keep processing lines
|
||||
return
|
||||
|
||||
|
|
@ -654,7 +663,7 @@ class Gtest(Harness):
|
|||
|
||||
# Update the status of the test
|
||||
tc.status = state
|
||||
if tc.status == TestCaseStatus.FAIL:
|
||||
if tc.status == TwisterStatus.FAIL:
|
||||
self.has_failures = True
|
||||
tc.output = self.testcase_output
|
||||
self.testcase_output = ""
|
||||
|
|
@ -663,21 +672,21 @@ class Gtest(Harness):
|
|||
def _check_result(self, line):
|
||||
test_pass_match = re.search(self.TEST_PASS_PATTERN, line)
|
||||
if test_pass_match:
|
||||
return TestCaseStatus.PASS, \
|
||||
return TwisterStatus.PASS, \
|
||||
"{}.{}.{}".format(
|
||||
self.id, test_pass_match.group("suite_name"),
|
||||
test_pass_match.group("test_name")
|
||||
)
|
||||
test_skip_match = re.search(self.TEST_SKIP_PATTERN, line)
|
||||
if test_skip_match:
|
||||
return TestCaseStatus.SKIP, \
|
||||
return TwisterStatus.SKIP, \
|
||||
"{}.{}.{}".format(
|
||||
self.id, test_skip_match.group("suite_name"),
|
||||
test_skip_match.group("test_name")
|
||||
)
|
||||
test_fail_match = re.search(self.TEST_FAIL_PATTERN, line)
|
||||
if test_fail_match:
|
||||
return TestCaseStatus.FAIL, \
|
||||
return TwisterStatus.FAIL, \
|
||||
"{}.{}.{}".format(
|
||||
self.id, test_fail_match.group("suite_name"),
|
||||
test_fail_match.group("test_name")
|
||||
|
|
@ -705,7 +714,7 @@ class Test(Harness):
|
|||
# Mark the test as started, if something happens here, it is mostly
|
||||
# due to this tests, for example timeout. This should in this case
|
||||
# be marked as failed and not blocked (not run).
|
||||
tc.status = TestCaseStatus.STARTED
|
||||
tc.status = TwisterStatus.STARTED
|
||||
|
||||
if testcase_match or self._match:
|
||||
self.testcase_output += line + "\n"
|
||||
|
|
@ -723,11 +732,11 @@ class Test(Harness):
|
|||
matched_status = result_match.group(1)
|
||||
name = "{}.{}".format(self.id, result_match.group(3))
|
||||
tc = self.instance.get_case_or_create(name)
|
||||
tc.status = self.ztest_to_status[matched_status]
|
||||
if tc.status == TestCaseStatus.SKIP:
|
||||
tc.status = TwisterStatus[matched_status]
|
||||
if tc.status == TwisterStatus.SKIP:
|
||||
tc.reason = "ztest skip"
|
||||
tc.duration = float(result_match.group(4))
|
||||
if tc.status == TestCaseStatus.FAIL:
|
||||
if tc.status == TwisterStatus.FAIL:
|
||||
tc.output = self.testcase_output
|
||||
self.testcase_output = ""
|
||||
self._match = False
|
||||
|
|
@ -737,11 +746,11 @@ class Test(Harness):
|
|||
self.detected_suite_names.append(summary_match.group(2))
|
||||
name = "{}.{}".format(self.id, summary_match.group(4))
|
||||
tc = self.instance.get_case_or_create(name)
|
||||
tc.status = self.ztest_to_status[matched_status]
|
||||
if tc.status == TestCaseStatus.SKIP:
|
||||
tc.status = TwisterStatus[matched_status]
|
||||
if tc.status == TwisterStatus.SKIP:
|
||||
tc.reason = "ztest skip"
|
||||
tc.duration = float(summary_match.group(5))
|
||||
if tc.status == TestCaseStatus.FAIL:
|
||||
if tc.status == TwisterStatus.FAIL:
|
||||
tc.output = self.testcase_output
|
||||
self.testcase_output = ""
|
||||
self._match = False
|
||||
|
|
@ -749,13 +758,13 @@ class Test(Harness):
|
|||
|
||||
self.process_test(line)
|
||||
|
||||
if not self.ztest and self.state != HarnessStatus.NONE:
|
||||
if not self.ztest and self.status != TwisterStatus.NONE:
|
||||
logger.debug(f"not a ztest and no state for {self.id}")
|
||||
tc = self.instance.get_case_or_create(self.id)
|
||||
if self.state == HarnessStatus.PASS:
|
||||
tc.status = TestCaseStatus.PASS
|
||||
if self.status == TwisterStatus.PASS:
|
||||
tc.status = TwisterStatus.PASS
|
||||
else:
|
||||
tc.status = TestCaseStatus.FAIL
|
||||
tc.status = TwisterStatus.FAIL
|
||||
tc.reason = "Test failure"
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import tarfile
|
|||
import json
|
||||
import os
|
||||
|
||||
from twisterlib.statuses import TestSuiteStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
|
||||
class Artifacts:
|
||||
|
||||
|
|
@ -27,7 +27,7 @@ class Artifacts:
|
|||
with open(os.path.join(self.options.outdir, "twister.json"), "r") as json_test_plan:
|
||||
jtp = json.load(json_test_plan)
|
||||
for t in jtp['testsuites']:
|
||||
if t['status'] != TestSuiteStatus.FILTER:
|
||||
if t['status'] != TwisterStatus.FILTER:
|
||||
p = t['platform']
|
||||
normalized = p.replace("/", "_")
|
||||
dirs.append(os.path.join(self.options.outdir, normalized, t['name']))
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
# Copyright (c) 2018 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from enum import Enum
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
|
|
@ -13,11 +14,21 @@ import string
|
|||
from datetime import datetime
|
||||
from pathlib import PosixPath
|
||||
|
||||
from twisterlib.statuses import ReportStatus, TestCaseStatus, TestInstanceStatus, TestSuiteStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
|
||||
logger = logging.getLogger('twister')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
class ReportStatus(str, Enum):
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
ERROR = 'error'
|
||||
FAIL = 'failure'
|
||||
SKIP = 'skipped'
|
||||
|
||||
|
||||
class Reporting:
|
||||
|
||||
json_filters = {
|
||||
|
|
@ -57,7 +68,7 @@ class Reporting:
|
|||
def xunit_testcase(eleTestsuite, name, classname, status, ts_status, reason, duration, runnable, stats, log, build_only_as_skip):
|
||||
fails, passes, errors, skips = stats
|
||||
|
||||
if status in [TestCaseStatus.SKIP, TestCaseStatus.FILTER]:
|
||||
if status in [TwisterStatus.SKIP, TwisterStatus.FILTER]:
|
||||
duration = 0
|
||||
|
||||
eleTestcase = ET.SubElement(
|
||||
|
|
@ -66,32 +77,32 @@ class Reporting:
|
|||
name=f"{name}",
|
||||
time=f"{duration}")
|
||||
|
||||
if status in [TestCaseStatus.SKIP, TestCaseStatus.FILTER]:
|
||||
if status in [TwisterStatus.SKIP, TwisterStatus.FILTER]:
|
||||
skips += 1
|
||||
# temporarily add build_only_as_skip to restore existing CI report behaviour
|
||||
if ts_status == TestSuiteStatus.PASS and not runnable:
|
||||
if ts_status == TwisterStatus.PASS and not runnable:
|
||||
tc_type = "build"
|
||||
else:
|
||||
tc_type = status
|
||||
ET.SubElement(eleTestcase, ReportStatus.SKIP, type=f"{tc_type}", message=f"{reason}")
|
||||
elif status in [TestCaseStatus.FAIL, TestCaseStatus.BLOCK]:
|
||||
elif status in [TwisterStatus.FAIL, TwisterStatus.BLOCK]:
|
||||
fails += 1
|
||||
el = ET.SubElement(eleTestcase, ReportStatus.FAIL, type="failure", message=f"{reason}")
|
||||
if log:
|
||||
el.text = log
|
||||
elif status == TestCaseStatus.ERROR:
|
||||
elif status == TwisterStatus.ERROR:
|
||||
errors += 1
|
||||
el = ET.SubElement(eleTestcase, ReportStatus.ERROR, type="failure", message=f"{reason}")
|
||||
if log:
|
||||
el.text = log
|
||||
elif status == TestCaseStatus.PASS:
|
||||
elif status == TwisterStatus.PASS:
|
||||
if not runnable and build_only_as_skip:
|
||||
ET.SubElement(eleTestcase, ReportStatus.SKIP, type="build", message="built only")
|
||||
skips += 1
|
||||
else:
|
||||
passes += 1
|
||||
else:
|
||||
if status == TestCaseStatus.NONE:
|
||||
if status == TwisterStatus.NONE:
|
||||
logger.debug(f"{name}: No status")
|
||||
ET.SubElement(eleTestcase, ReportStatus.SKIP, type=f"untested", message="No results captured, testsuite misconfiguration?")
|
||||
else:
|
||||
|
|
@ -116,7 +127,7 @@ class Reporting:
|
|||
suites_to_report = all_suites
|
||||
# do not create entry if everything is filtered out
|
||||
if not self.env.options.detailed_skipped_report:
|
||||
suites_to_report = list(filter(lambda d: d.get('status') != TestSuiteStatus.FILTER, all_suites))
|
||||
suites_to_report = list(filter(lambda d: d.get('status') != TwisterStatus.FILTER, all_suites))
|
||||
|
||||
for suite in suites_to_report:
|
||||
duration = 0
|
||||
|
|
@ -186,7 +197,7 @@ class Reporting:
|
|||
suites = list(filter(lambda d: d['platform'] == platform, all_suites))
|
||||
# do not create entry if everything is filtered out
|
||||
if not self.env.options.detailed_skipped_report:
|
||||
non_filtered = list(filter(lambda d: d.get('status') != TestSuiteStatus.FILTER, suites))
|
||||
non_filtered = list(filter(lambda d: d.get('status') != TwisterStatus.FILTER, suites))
|
||||
if not non_filtered:
|
||||
continue
|
||||
|
||||
|
|
@ -212,7 +223,7 @@ class Reporting:
|
|||
|
||||
ts_status = ts.get('status')
|
||||
# Do not report filtered testcases
|
||||
if ts_status == TestSuiteStatus.FILTER and not self.env.options.detailed_skipped_report:
|
||||
if ts_status == TwisterStatus.FILTER and not self.env.options.detailed_skipped_report:
|
||||
continue
|
||||
if full_report:
|
||||
for tc in ts.get("testcases", []):
|
||||
|
|
@ -303,7 +314,7 @@ class Reporting:
|
|||
suite['run_id'] = instance.run_id
|
||||
|
||||
suite["runnable"] = False
|
||||
if instance.status != TestInstanceStatus.FILTER:
|
||||
if instance.status != TwisterStatus.FILTER:
|
||||
suite["runnable"] = instance.run
|
||||
|
||||
if used_ram:
|
||||
|
|
@ -319,7 +330,7 @@ class Reporting:
|
|||
suite["available_ram"] = available_ram
|
||||
if available_rom:
|
||||
suite["available_rom"] = available_rom
|
||||
if instance.status in [TestInstanceStatus.ERROR, TestInstanceStatus.FAIL]:
|
||||
if instance.status in [TwisterStatus.ERROR, TwisterStatus.FAIL]:
|
||||
suite['status'] = instance.status
|
||||
suite["reason"] = instance.reason
|
||||
# FIXME
|
||||
|
|
@ -331,16 +342,16 @@ class Reporting:
|
|||
suite["log"] = self.process_log(device_log)
|
||||
else:
|
||||
suite["log"] = self.process_log(build_log)
|
||||
elif instance.status == TestInstanceStatus.FILTER:
|
||||
suite["status"] = TestSuiteStatus.FILTER
|
||||
elif instance.status == TwisterStatus.FILTER:
|
||||
suite["status"] = TwisterStatus.FILTER
|
||||
suite["reason"] = instance.reason
|
||||
elif instance.status == TestInstanceStatus.PASS:
|
||||
suite["status"] = TestSuiteStatus.PASS
|
||||
elif instance.status == TestInstanceStatus.SKIP:
|
||||
suite["status"] = TestSuiteStatus.SKIP
|
||||
elif instance.status == TwisterStatus.PASS:
|
||||
suite["status"] = TwisterStatus.PASS
|
||||
elif instance.status == TwisterStatus.SKIP:
|
||||
suite["status"] = TwisterStatus.SKIP
|
||||
suite["reason"] = instance.reason
|
||||
|
||||
if instance.status != TestInstanceStatus.NONE:
|
||||
if instance.status != TwisterStatus.NONE:
|
||||
suite["execution_time"] = f"{float(handler_time):.2f}"
|
||||
suite["build_time"] = f"{float(instance.build_time):.2f}"
|
||||
|
||||
|
|
@ -356,11 +367,11 @@ class Reporting:
|
|||
# if we discover those at runtime, the fallback testcase wont be
|
||||
# needed anymore and can be removed from the output, it does
|
||||
# not have a status and would otherwise be reported as skipped.
|
||||
if case.freeform and case.status == TestCaseStatus.NONE and len(instance.testcases) > 1:
|
||||
if case.freeform and case.status == TwisterStatus.NONE and len(instance.testcases) > 1:
|
||||
continue
|
||||
testcase = {}
|
||||
testcase['identifier'] = case.name
|
||||
if instance.status != TestInstanceStatus.NONE:
|
||||
if instance.status != TwisterStatus.NONE:
|
||||
if single_case_duration:
|
||||
testcase['execution_time'] = single_case_duration
|
||||
else:
|
||||
|
|
@ -369,11 +380,11 @@ class Reporting:
|
|||
if case.output != "":
|
||||
testcase['log'] = case.output
|
||||
|
||||
if case.status == TestCaseStatus.SKIP:
|
||||
if instance.status == TestInstanceStatus.FILTER:
|
||||
testcase["status"] = TestCaseStatus.FILTER
|
||||
if case.status == TwisterStatus.SKIP:
|
||||
if instance.status == TwisterStatus.FILTER:
|
||||
testcase["status"] = TwisterStatus.FILTER
|
||||
else:
|
||||
testcase["status"] = TestCaseStatus.SKIP
|
||||
testcase["status"] = TwisterStatus.SKIP
|
||||
testcase["reason"] = case.reason or instance.reason
|
||||
else:
|
||||
testcase["status"] = case.status
|
||||
|
|
@ -515,7 +526,7 @@ class Reporting:
|
|||
example_instance = None
|
||||
detailed_test_id = self.env.options.detailed_test_id
|
||||
for instance in self.instances.values():
|
||||
if instance.status not in [TestInstanceStatus.PASS, TestInstanceStatus.FILTER, TestInstanceStatus.SKIP]:
|
||||
if instance.status not in [TwisterStatus.PASS, TwisterStatus.FILTER, TwisterStatus.SKIP]:
|
||||
cnt += 1
|
||||
if cnt == 1:
|
||||
logger.info("-+" * 40)
|
||||
|
|
@ -523,7 +534,7 @@ class Reporting:
|
|||
|
||||
status = instance.status
|
||||
if self.env.options.report_summary is not None and \
|
||||
status in [TestInstanceStatus.ERROR, TestInstanceStatus.FAIL]:
|
||||
status in [TwisterStatus.ERROR, TwisterStatus.FAIL]:
|
||||
status = Fore.RED + status.upper() + Fore.RESET
|
||||
logger.info(f"{cnt}) {instance.testsuite.name} on {instance.platform.name} {status} ({instance.reason})")
|
||||
example_instance = instance
|
||||
|
|
@ -551,7 +562,7 @@ class Reporting:
|
|||
failed = 0
|
||||
run = 0
|
||||
for instance in self.instances.values():
|
||||
if instance.status == TestInstanceStatus.FAIL:
|
||||
if instance.status == TwisterStatus.FAIL:
|
||||
failed += 1
|
||||
elif not ignore_unrecognized_sections and instance.metrics.get("unrecognized"):
|
||||
logger.error("%sFAILED%s: %s has unrecognized binary sections: %s" %
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ from domains import Domains
|
|||
from twisterlib.cmakecache import CMakeCache
|
||||
from twisterlib.environment import canonical_zephyr_base
|
||||
from twisterlib.error import BuildError, ConfigurationError
|
||||
from twisterlib.statuses import TestCaseStatus, TestInstanceStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
|
||||
import elftools
|
||||
from elftools.elf.elffile import ELFFile
|
||||
|
|
@ -285,9 +285,9 @@ class CMake:
|
|||
msg = f"Finished building {self.source_dir} for {self.platform.name} in {duration:.2f} seconds"
|
||||
logger.debug(msg)
|
||||
|
||||
self.instance.status = TestInstanceStatus.PASS
|
||||
self.instance.status = TwisterStatus.PASS
|
||||
if not self.instance.run:
|
||||
self.instance.add_missing_case_status(TestCaseStatus.SKIP, "Test was built only")
|
||||
self.instance.add_missing_case_status(TwisterStatus.SKIP, "Test was built only")
|
||||
ret = {"returncode": p.returncode}
|
||||
|
||||
if out:
|
||||
|
|
@ -309,15 +309,15 @@ class CMake:
|
|||
imgtool_overflow_found = re.findall(r"Error: Image size \(.*\) \+ trailer \(.*\) exceeds requested size", log_msg)
|
||||
if overflow_found and not self.options.overflow_as_errors:
|
||||
logger.debug("Test skipped due to {} Overflow".format(overflow_found[0]))
|
||||
self.instance.status = TestInstanceStatus.SKIP
|
||||
self.instance.status = TwisterStatus.SKIP
|
||||
self.instance.reason = "{} overflow".format(overflow_found[0])
|
||||
change_skip_to_error_if_integration(self.options, self.instance)
|
||||
elif imgtool_overflow_found and not self.options.overflow_as_errors:
|
||||
self.instance.status = TestInstanceStatus.SKIP
|
||||
self.instance.status = TwisterStatus.SKIP
|
||||
self.instance.reason = "imgtool overflow"
|
||||
change_skip_to_error_if_integration(self.options, self.instance)
|
||||
else:
|
||||
self.instance.status = TestInstanceStatus.ERROR
|
||||
self.instance.status = TwisterStatus.ERROR
|
||||
self.instance.reason = "Build failure"
|
||||
|
||||
ret = {
|
||||
|
|
@ -416,7 +416,7 @@ class CMake:
|
|||
'filter': filter_results
|
||||
}
|
||||
else:
|
||||
self.instance.status = TestInstanceStatus.ERROR
|
||||
self.instance.status = TwisterStatus.ERROR
|
||||
self.instance.reason = "Cmake build failure"
|
||||
|
||||
for tc in self.instance.testcases:
|
||||
|
|
@ -608,16 +608,16 @@ class ProjectBuilder(FilterBuilder):
|
|||
|
||||
if op == "filter":
|
||||
ret = self.cmake(filter_stages=self.instance.filter_stages)
|
||||
if self.instance.status in [TestInstanceStatus.FAIL, TestInstanceStatus.ERROR]:
|
||||
if self.instance.status in [TwisterStatus.FAIL, TwisterStatus.ERROR]:
|
||||
pipeline.put({"op": "report", "test": self.instance})
|
||||
else:
|
||||
# Here we check the dt/kconfig filter results coming from running cmake
|
||||
if self.instance.name in ret['filter'] and ret['filter'][self.instance.name]:
|
||||
logger.debug("filtering %s" % self.instance.name)
|
||||
self.instance.status = TestInstanceStatus.FILTER
|
||||
self.instance.status = TwisterStatus.FILTER
|
||||
self.instance.reason = "runtime filter"
|
||||
results.skipped_runtime += 1
|
||||
self.instance.add_missing_case_status(TestCaseStatus.SKIP)
|
||||
self.instance.add_missing_case_status(TwisterStatus.SKIP)
|
||||
pipeline.put({"op": "report", "test": self.instance})
|
||||
else:
|
||||
pipeline.put({"op": "cmake", "test": self.instance})
|
||||
|
|
@ -625,20 +625,20 @@ class ProjectBuilder(FilterBuilder):
|
|||
# The build process, call cmake and build with configured generator
|
||||
elif op == "cmake":
|
||||
ret = self.cmake()
|
||||
if self.instance.status in [TestInstanceStatus.FAIL, TestInstanceStatus.ERROR]:
|
||||
if self.instance.status in [TwisterStatus.FAIL, TwisterStatus.ERROR]:
|
||||
pipeline.put({"op": "report", "test": self.instance})
|
||||
elif self.options.cmake_only:
|
||||
if self.instance.status == TestInstanceStatus.NONE:
|
||||
self.instance.status = TestInstanceStatus.PASS
|
||||
if self.instance.status == TwisterStatus.NONE:
|
||||
self.instance.status = TwisterStatus.PASS
|
||||
pipeline.put({"op": "report", "test": self.instance})
|
||||
else:
|
||||
# Here we check the runtime filter results coming from running cmake
|
||||
if self.instance.name in ret['filter'] and ret['filter'][self.instance.name]:
|
||||
logger.debug("filtering %s" % self.instance.name)
|
||||
self.instance.status = TestInstanceStatus.FILTER
|
||||
self.instance.status = TwisterStatus.FILTER
|
||||
self.instance.reason = "runtime filter"
|
||||
results.skipped_runtime += 1
|
||||
self.instance.add_missing_case_status(TestCaseStatus.SKIP)
|
||||
self.instance.add_missing_case_status(TwisterStatus.SKIP)
|
||||
pipeline.put({"op": "report", "test": self.instance})
|
||||
else:
|
||||
pipeline.put({"op": "build", "test": self.instance})
|
||||
|
|
@ -647,18 +647,18 @@ class ProjectBuilder(FilterBuilder):
|
|||
logger.debug("build test: %s" % self.instance.name)
|
||||
ret = self.build()
|
||||
if not ret:
|
||||
self.instance.status = TestInstanceStatus.ERROR
|
||||
self.instance.status = TwisterStatus.ERROR
|
||||
self.instance.reason = "Build Failure"
|
||||
pipeline.put({"op": "report", "test": self.instance})
|
||||
else:
|
||||
# Count skipped cases during build, for example
|
||||
# due to ram/rom overflow.
|
||||
if self.instance.status == TestInstanceStatus.SKIP:
|
||||
if self.instance.status == TwisterStatus.SKIP:
|
||||
results.skipped_runtime += 1
|
||||
self.instance.add_missing_case_status(TestCaseStatus.SKIP, self.instance.reason)
|
||||
self.instance.add_missing_case_status(TwisterStatus.SKIP, self.instance.reason)
|
||||
|
||||
if ret.get('returncode', 1) > 0:
|
||||
self.instance.add_missing_case_status(TestCaseStatus.BLOCK, self.instance.reason)
|
||||
self.instance.add_missing_case_status(TwisterStatus.BLOCK, self.instance.reason)
|
||||
pipeline.put({"op": "report", "test": self.instance})
|
||||
else:
|
||||
if self.instance.testsuite.harness in ['ztest', 'test']:
|
||||
|
|
@ -668,7 +668,7 @@ class ProjectBuilder(FilterBuilder):
|
|||
pipeline.put({"op": "gather_metrics", "test": self.instance})
|
||||
except BuildError as e:
|
||||
logger.error(str(e))
|
||||
self.instance.status = TestInstanceStatus.ERROR
|
||||
self.instance.status = TwisterStatus.ERROR
|
||||
self.instance.reason = str(e)
|
||||
pipeline.put({"op": "report", "test": self.instance})
|
||||
else:
|
||||
|
|
@ -714,7 +714,7 @@ class ProjectBuilder(FilterBuilder):
|
|||
if not self.options.coverage:
|
||||
if self.options.prep_artifacts_for_testing:
|
||||
pipeline.put({"op": "cleanup", "mode": "device", "test": self.instance})
|
||||
elif self.options.runtime_artifact_cleanup == "pass" and self.instance.status == TestInstanceStatus.PASS:
|
||||
elif self.options.runtime_artifact_cleanup == "pass" and self.instance.status == TwisterStatus.PASS:
|
||||
pipeline.put({"op": "cleanup", "mode": "passed", "test": self.instance})
|
||||
elif self.options.runtime_artifact_cleanup == "all":
|
||||
pipeline.put({"op": "cleanup", "mode": "all", "test": self.instance})
|
||||
|
|
@ -967,8 +967,8 @@ class ProjectBuilder(FilterBuilder):
|
|||
if results.iteration == 1:
|
||||
results.cases += len(instance.testcases)
|
||||
|
||||
if instance.status in [TestInstanceStatus.ERROR, TestInstanceStatus.FAIL]:
|
||||
if instance.status == TestInstanceStatus.ERROR:
|
||||
if instance.status in [TwisterStatus.ERROR, TwisterStatus.FAIL]:
|
||||
if instance.status == TwisterStatus.ERROR:
|
||||
results.error += 1
|
||||
txt = " ERROR "
|
||||
else:
|
||||
|
|
@ -987,17 +987,17 @@ class ProjectBuilder(FilterBuilder):
|
|||
instance.reason))
|
||||
if not self.options.verbose:
|
||||
self.log_info_file(self.options.inline_logs)
|
||||
elif instance.status in [TestInstanceStatus.SKIP, TestInstanceStatus.FILTER]:
|
||||
elif instance.status in [TwisterStatus.SKIP, TwisterStatus.FILTER]:
|
||||
status = Fore.YELLOW + "SKIPPED" + Fore.RESET
|
||||
results.skipped_configs += 1
|
||||
# test cases skipped at the test instance level
|
||||
results.skipped_cases += len(instance.testsuite.testcases)
|
||||
elif instance.status == TestInstanceStatus.PASS:
|
||||
elif instance.status == TwisterStatus.PASS:
|
||||
status = Fore.GREEN + "PASSED" + Fore.RESET
|
||||
results.passed += 1
|
||||
for case in instance.testcases:
|
||||
# test cases skipped at the test case level
|
||||
if case.status == TestCaseStatus.SKIP:
|
||||
if case.status == TwisterStatus.SKIP:
|
||||
results.skipped_cases += 1
|
||||
else:
|
||||
logger.debug(f"Unknown status = {instance.status}")
|
||||
|
|
@ -1006,7 +1006,7 @@ class ProjectBuilder(FilterBuilder):
|
|||
if self.options.verbose:
|
||||
if self.options.cmake_only:
|
||||
more_info = "cmake"
|
||||
elif instance.status in [TestInstanceStatus.SKIP, TestInstanceStatus.FILTER]:
|
||||
elif instance.status in [TwisterStatus.SKIP, TwisterStatus.FILTER]:
|
||||
more_info = instance.reason
|
||||
else:
|
||||
if instance.handler.ready and instance.run:
|
||||
|
|
@ -1019,7 +1019,7 @@ class ProjectBuilder(FilterBuilder):
|
|||
else:
|
||||
more_info = "build"
|
||||
|
||||
if ( instance.status in [TestInstanceStatus.ERROR, TestInstanceStatus.FAIL]
|
||||
if ( instance.status in [TwisterStatus.ERROR, TwisterStatus.FAIL]
|
||||
and hasattr(self.instance.handler, 'seed')
|
||||
and self.instance.handler.seed is not None ):
|
||||
more_info += "/seed: " + str(self.options.seed)
|
||||
|
|
@ -1027,7 +1027,7 @@ class ProjectBuilder(FilterBuilder):
|
|||
results.done, total_tests_width, total_to_do , instance.platform.name,
|
||||
instance.testsuite.name, status, more_info))
|
||||
|
||||
if instance.status in [TestInstanceStatus.ERROR, TestInstanceStatus.FAIL]:
|
||||
if instance.status in [TwisterStatus.ERROR, TwisterStatus.FAIL]:
|
||||
self.log_info_file(self.options.inline_logs)
|
||||
else:
|
||||
completed_perc = 0
|
||||
|
|
@ -1110,7 +1110,7 @@ class ProjectBuilder(FilterBuilder):
|
|||
harness.instance = self.instance
|
||||
harness.build()
|
||||
except ConfigurationError as error:
|
||||
self.instance.status = TestInstanceStatus.ERROR
|
||||
self.instance.status = TwisterStatus.ERROR
|
||||
self.instance.reason = str(error)
|
||||
logger.error(self.instance.reason)
|
||||
return
|
||||
|
|
@ -1122,7 +1122,7 @@ class ProjectBuilder(FilterBuilder):
|
|||
|
||||
if instance.handler.ready:
|
||||
logger.debug(f"Reset instance status from '{instance.status}' to None before run.")
|
||||
instance.status = TestInstanceStatus.NONE
|
||||
instance.status = TwisterStatus.NONE
|
||||
|
||||
if instance.handler.type_str == "device":
|
||||
instance.handler.duts = self.duts
|
||||
|
|
@ -1140,7 +1140,7 @@ class ProjectBuilder(FilterBuilder):
|
|||
try:
|
||||
harness.configure(instance)
|
||||
except ConfigurationError as error:
|
||||
instance.status = TestInstanceStatus.ERROR
|
||||
instance.status = TwisterStatus.ERROR
|
||||
instance.reason = str(error)
|
||||
logger.error(instance.reason)
|
||||
return
|
||||
|
|
@ -1168,7 +1168,7 @@ class ProjectBuilder(FilterBuilder):
|
|||
|
||||
@staticmethod
|
||||
def calc_size(instance: TestInstance, from_buildlog: bool):
|
||||
if instance.status not in [TestInstanceStatus.ERROR, TestInstanceStatus.FAIL, TestInstanceStatus.SKIP]:
|
||||
if instance.status not in [TwisterStatus.ERROR, TwisterStatus.FAIL, TwisterStatus.SKIP]:
|
||||
if not instance.platform.type in ["native", "qemu", "unit"]:
|
||||
generate_warning = bool(instance.platform.type == "mcu")
|
||||
size_calc = instance.calculate_sizes(from_buildlog=from_buildlog, generate_warning=generate_warning)
|
||||
|
|
@ -1279,12 +1279,12 @@ class TwisterRunner:
|
|||
the static filter stats. So need to prepare them before pipline starts.
|
||||
'''
|
||||
for instance in self.instances.values():
|
||||
if instance.status == TestInstanceStatus.FILTER and not instance.reason == 'runtime filter':
|
||||
if instance.status == TwisterStatus.FILTER and not instance.reason == 'runtime filter':
|
||||
self.results.skipped_filter += 1
|
||||
self.results.skipped_configs += 1
|
||||
self.results.skipped_cases += len(instance.testsuite.testcases)
|
||||
self.results.cases += len(instance.testsuite.testcases)
|
||||
elif instance.status == TestInstanceStatus.ERROR:
|
||||
elif instance.status == TwisterStatus.ERROR:
|
||||
self.results.error += 1
|
||||
|
||||
def show_brief(self):
|
||||
|
|
@ -1300,15 +1300,15 @@ class TwisterRunner:
|
|||
if build_only:
|
||||
instance.run = False
|
||||
|
||||
no_retry_statuses = [TestInstanceStatus.PASS, TestInstanceStatus.SKIP, TestInstanceStatus.FILTER]
|
||||
no_retry_statuses = [TwisterStatus.PASS, TwisterStatus.SKIP, TwisterStatus.FILTER]
|
||||
if not retry_build_errors:
|
||||
no_retry_statuses.append("error")
|
||||
|
||||
if instance.status not in no_retry_statuses:
|
||||
logger.debug(f"adding {instance.name}")
|
||||
if instance.status != TestInstanceStatus.NONE:
|
||||
if instance.status != TwisterStatus.NONE:
|
||||
instance.retries += 1
|
||||
instance.status = TestInstanceStatus.NONE
|
||||
instance.status = TwisterStatus.NONE
|
||||
|
||||
# Check if cmake package_helper script can be run in advance.
|
||||
instance.filter_stages = []
|
||||
|
|
|
|||
|
|
@ -9,76 +9,21 @@ Status classes to be used instead of str statuses.
|
|||
from enum import Enum
|
||||
|
||||
|
||||
class TestInstanceStatus(str, Enum):
|
||||
class TwisterStatus(str, Enum):
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
NONE = None # to preserve old functionality
|
||||
ERROR = 'error'
|
||||
FAIL = 'failed'
|
||||
FILTER = 'filtered'
|
||||
PASS = 'passed'
|
||||
SKIP = 'skipped'
|
||||
|
||||
|
||||
# Possible direct assignments:
|
||||
# * TestSuiteStatus <- TestInstanceStatus
|
||||
class TestSuiteStatus(str, Enum):
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
NONE = None # to preserve old functionality
|
||||
FILTER = 'filtered'
|
||||
PASS = 'passed'
|
||||
SKIP = 'skipped'
|
||||
|
||||
|
||||
# Possible direct assignments:
|
||||
# * TestCaseStatus <- TestInstanceStatus
|
||||
class TestCaseStatus(str, Enum):
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
NONE = None # to preserve old functionality
|
||||
# All statuses below this comment can be used for TestCase
|
||||
BLOCK = 'blocked'
|
||||
ERROR = 'error'
|
||||
FAIL = 'failed'
|
||||
FILTER = 'filtered'
|
||||
PASS = 'passed'
|
||||
SKIP = 'skipped'
|
||||
STARTED = 'started'
|
||||
|
||||
# All statuses below this comment can be used for TestSuite
|
||||
# All statuses below this comment can be used for TestInstance
|
||||
FILTER = 'filtered'
|
||||
|
||||
# Possible direct assignments:
|
||||
# * OutputStatus <- HarnessStatus
|
||||
class OutputStatus(str, Enum):
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
NONE = None # to preserve old functionality
|
||||
BYTE = 'unexpected byte'
|
||||
EOF = 'unexpected eof'
|
||||
FAIL = 'failed'
|
||||
TIMEOUT = 'timeout'
|
||||
|
||||
|
||||
# Possible direct assignments:
|
||||
# * TestInstanceStatus <- HarnessStatus
|
||||
class HarnessStatus(str, Enum):
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
NONE = None # to preserve old functionality
|
||||
# All statuses below this comment can be used for Harness
|
||||
NONE = None
|
||||
ERROR = 'error'
|
||||
FAIL = 'failed'
|
||||
PASS = 'passed'
|
||||
SKIP = 'skipped'
|
||||
|
||||
|
||||
class ReportStatus(str, Enum):
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
ERROR = 'error'
|
||||
FAIL = 'failure' # Note the difference!
|
||||
SKIP = 'skipped'
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
# Copyright 2022 NXP
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
from __future__ import annotations
|
||||
from enum import Enum
|
||||
import os
|
||||
import hashlib
|
||||
import random
|
||||
|
|
@ -16,7 +17,7 @@ from twisterlib.testsuite import TestCase, TestSuite
|
|||
from twisterlib.platform import Platform
|
||||
from twisterlib.error import BuildError
|
||||
from twisterlib.size_calc import SizeCalculator
|
||||
from twisterlib.statuses import TestCaseStatus, TestInstanceStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
from twisterlib.handlers import (
|
||||
Handler,
|
||||
SimulationHandler,
|
||||
|
|
@ -47,7 +48,7 @@ class TestInstance:
|
|||
self.testsuite: TestSuite = testsuite
|
||||
self.platform: Platform = platform
|
||||
|
||||
self.status = TestInstanceStatus.NONE
|
||||
self._status = TwisterStatus.NONE
|
||||
self.reason = "Unknown"
|
||||
self.metrics = dict()
|
||||
self.handler = None
|
||||
|
|
@ -93,9 +94,24 @@ class TestInstance:
|
|||
cw.writeheader()
|
||||
cw.writerows(self.recording)
|
||||
|
||||
@property
|
||||
def status(self) -> TwisterStatus:
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, value : TwisterStatus) -> None:
|
||||
# Check for illegal assignments by value
|
||||
try:
|
||||
key = value.name if isinstance(value, Enum) else value
|
||||
self._status = TwisterStatus[key]
|
||||
except KeyError:
|
||||
logger.warning(f'TestInstance assigned status "{value}"'
|
||||
f' without an equivalent in TwisterStatus.'
|
||||
f' Assignment was ignored.')
|
||||
|
||||
def add_filter(self, reason, filter_type):
|
||||
self.filters.append({'type': filter_type, 'reason': reason })
|
||||
self.status = TestInstanceStatus.FILTER
|
||||
self.status = TwisterStatus.FILTER
|
||||
self.reason = reason
|
||||
self.filter_type = filter_type
|
||||
|
||||
|
|
@ -125,9 +141,9 @@ class TestInstance:
|
|||
|
||||
def add_missing_case_status(self, status, reason=None):
|
||||
for case in self.testcases:
|
||||
if case.status == TestCaseStatus.STARTED:
|
||||
case.status = TestCaseStatus.FAIL
|
||||
elif case.status == TestCaseStatus.NONE:
|
||||
if case.status == TwisterStatus.STARTED:
|
||||
case.status = TwisterStatus.FAIL
|
||||
elif case.status == TwisterStatus.NONE:
|
||||
case.status = status
|
||||
if reason:
|
||||
case.reason = reason
|
||||
|
|
|
|||
|
|
@ -17,7 +17,6 @@ import copy
|
|||
import shutil
|
||||
import random
|
||||
import snippets
|
||||
from colorama import Fore
|
||||
from pathlib import Path
|
||||
from argparse import Namespace
|
||||
|
||||
|
|
@ -33,7 +32,7 @@ from twisterlib.testsuite import TestSuite, scan_testsuite_path
|
|||
from twisterlib.error import TwisterRuntimeError
|
||||
from twisterlib.platform import Platform
|
||||
from twisterlib.config_parser import TwisterConfigParser
|
||||
from twisterlib.statuses import TestCaseStatus, TestInstanceStatus, TestSuiteStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
from twisterlib.testinstance import TestInstance
|
||||
from twisterlib.quarantine import Quarantine
|
||||
|
||||
|
|
@ -287,7 +286,7 @@ class TestPlan:
|
|||
# at runtime, ignore the cases we already know going to be skipped.
|
||||
# This fixes an issue where some sets would get majority of skips and
|
||||
# basically run nothing beside filtering.
|
||||
to_run = {k : v for k,v in self.instances.items() if v.status == TestInstanceStatus.NONE}
|
||||
to_run = {k : v for k,v in self.instances.items() if v.status == TwisterStatus.NONE}
|
||||
total = len(to_run)
|
||||
per_set = int(total / sets)
|
||||
num_extra_sets = total - (per_set * sets)
|
||||
|
|
@ -304,8 +303,8 @@ class TestPlan:
|
|||
end = start + per_set
|
||||
|
||||
sliced_instances = islice(to_run.items(), start, end)
|
||||
skipped = {k : v for k,v in self.instances.items() if v.status == TestInstanceStatus.SKIP}
|
||||
errors = {k : v for k,v in self.instances.items() if v.status == TestInstanceStatus.ERROR}
|
||||
skipped = {k : v for k,v in self.instances.items() if v.status == TwisterStatus.SKIP}
|
||||
errors = {k : v for k,v in self.instances.items() if v.status == TwisterStatus.ERROR}
|
||||
self.instances = OrderedDict(sliced_instances)
|
||||
if subset == 1:
|
||||
# add all pre-filtered tests that are skipped or got error status
|
||||
|
|
@ -625,21 +624,22 @@ class TestPlan:
|
|||
instance.metrics['available_ram'] = ts.get('available_ram', 0)
|
||||
instance.metrics['available_rom'] = ts.get('available_rom', 0)
|
||||
|
||||
status = ts.get('status', TestSuiteStatus.NONE)
|
||||
status = ts.get('status')
|
||||
status = TwisterStatus(status) if status else TwisterStatus.NONE
|
||||
reason = ts.get("reason", "Unknown")
|
||||
if status in [TestInstanceStatus.ERROR, TestInstanceStatus.FAIL]:
|
||||
if status in [TwisterStatus.ERROR, TwisterStatus.FAIL]:
|
||||
if self.options.report_summary is not None:
|
||||
instance.status = status
|
||||
instance.reason = reason
|
||||
self.instance_fail_count += 1
|
||||
else:
|
||||
instance.status = TestInstanceStatus.NONE
|
||||
instance.status = TwisterStatus.NONE
|
||||
instance.reason = None
|
||||
instance.retries += 1
|
||||
# test marked as passed (built only) but can run when
|
||||
# --test-only is used. Reset status to capture new results.
|
||||
elif status == TestInstanceStatus.PASS and instance.run and self.options.test_only:
|
||||
instance.status = TestInstanceStatus.NONE
|
||||
elif status == TwisterStatus.PASS and instance.run and self.options.test_only:
|
||||
instance.status = TwisterStatus.NONE
|
||||
instance.reason = None
|
||||
else:
|
||||
instance.status = status
|
||||
|
|
@ -649,13 +649,14 @@ class TestPlan:
|
|||
|
||||
for tc in ts.get('testcases', []):
|
||||
identifier = tc['identifier']
|
||||
tc_status = tc.get('status', TestCaseStatus.NONE)
|
||||
tc_status = tc.get('status')
|
||||
tc_status = TwisterStatus(tc_status) if tc_status else TwisterStatus.NONE
|
||||
tc_reason = None
|
||||
# we set reason only if status is valid, it might have been
|
||||
# reset above...
|
||||
if instance.status != TestInstanceStatus.NONE:
|
||||
if instance.status != TwisterStatus.NONE:
|
||||
tc_reason = tc.get('reason')
|
||||
if tc_status != TestCaseStatus.NONE:
|
||||
if tc_status != TwisterStatus.NONE:
|
||||
case = instance.set_case_status_by_name(identifier, tc_status, tc_reason)
|
||||
case.duration = tc.get('execution_time', 0)
|
||||
if tc.get('log'):
|
||||
|
|
@ -903,7 +904,7 @@ class TestPlan:
|
|||
for this_snippet in snippet_args['snippets']:
|
||||
if this_snippet not in found_snippets:
|
||||
logger.error(f"Can't find snippet '%s' for test '%s'", this_snippet, ts.name)
|
||||
instance.status = TestInstanceStatus.ERROR
|
||||
instance.status = TwisterStatus.ERROR
|
||||
instance.reason = f"Snippet {this_snippet} not found"
|
||||
missing_snippet = True
|
||||
break
|
||||
|
|
@ -1014,14 +1015,14 @@ class TestPlan:
|
|||
|
||||
self.selected_platforms = set(p.platform.name for p in self.instances.values())
|
||||
|
||||
filtered_instances = list(filter(lambda item: item.status == TestInstanceStatus.FILTER, self.instances.values()))
|
||||
filtered_instances = list(filter(lambda item: item.status == TwisterStatus.FILTER, self.instances.values()))
|
||||
for filtered_instance in filtered_instances:
|
||||
change_skip_to_error_if_integration(self.options, filtered_instance)
|
||||
|
||||
filtered_instance.add_missing_case_status(filtered_instance.status)
|
||||
|
||||
self.filtered_platforms = set(p.platform.name for p in self.instances.values()
|
||||
if p.status != TestInstanceStatus.SKIP )
|
||||
if p.status != TwisterStatus.SKIP )
|
||||
|
||||
def add_instances(self, instance_list):
|
||||
for instance in instance_list:
|
||||
|
|
@ -1062,7 +1063,7 @@ class TestPlan:
|
|||
os.mkdir(links_dir_path)
|
||||
|
||||
for instance in self.instances.values():
|
||||
if instance.status != TestInstanceStatus.SKIP:
|
||||
if instance.status != TwisterStatus.SKIP:
|
||||
self._create_build_dir_link(links_dir_path, instance)
|
||||
|
||||
def _create_build_dir_link(self, links_dir_path, instance):
|
||||
|
|
@ -1102,5 +1103,5 @@ def change_skip_to_error_if_integration(options, instance):
|
|||
Filters.QUARANTINE}
|
||||
if filters.intersection(ignore_filters):
|
||||
return
|
||||
instance.status = TestInstanceStatus.ERROR
|
||||
instance.status = TwisterStatus.ERROR
|
||||
instance.reason += " but is one of the integration platforms"
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
# Copyright (c) 2018-2022 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from enum import Enum
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
|
@ -15,7 +16,7 @@ from typing import List
|
|||
from twisterlib.mixins import DisablePyTestCollectionMixin
|
||||
from twisterlib.environment import canonical_zephyr_base
|
||||
from twisterlib.error import TwisterException, TwisterRuntimeError
|
||||
from twisterlib.statuses import TestCaseStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
|
||||
logger = logging.getLogger('twister')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
|
@ -359,12 +360,27 @@ class TestCase(DisablePyTestCollectionMixin):
|
|||
def __init__(self, name=None, testsuite=None):
|
||||
self.duration = 0
|
||||
self.name = name
|
||||
self.status = TestCaseStatus.NONE
|
||||
self._status = TwisterStatus.NONE
|
||||
self.reason = None
|
||||
self.testsuite = testsuite
|
||||
self.output = ""
|
||||
self.freeform = False
|
||||
|
||||
@property
|
||||
def status(self) -> TwisterStatus:
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, value : TwisterStatus) -> None:
|
||||
# Check for illegal assignments by value
|
||||
try:
|
||||
key = value.name if isinstance(value, Enum) else value
|
||||
self._status = TwisterStatus[key]
|
||||
except KeyError:
|
||||
logger.warning(f'TestCase assigned status "{value}"'
|
||||
f' without an equivalent in TwisterStatus.'
|
||||
f' Assignment was ignored.')
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.name < other.name
|
||||
|
||||
|
|
@ -413,9 +429,25 @@ class TestSuite(DisablePyTestCollectionMixin):
|
|||
|
||||
self.ztest_suite_names = []
|
||||
|
||||
self._status = TwisterStatus.NONE
|
||||
|
||||
if data:
|
||||
self.load(data)
|
||||
|
||||
@property
|
||||
def status(self) -> TwisterStatus:
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, value : TwisterStatus) -> None:
|
||||
# Check for illegal assignments by value
|
||||
try:
|
||||
key = value.name if isinstance(value, Enum) else value
|
||||
self._status = TwisterStatus[key]
|
||||
except KeyError:
|
||||
logger.warning(f'TestSuite assigned status "{value}"'
|
||||
f' without an equivalent in TwisterStatus.'
|
||||
f' Assignment was ignored.')
|
||||
|
||||
def load(self, data):
|
||||
for k, v in data.items():
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import time
|
|||
|
||||
from colorama import Fore
|
||||
|
||||
from twisterlib.statuses import TestInstanceStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
from twisterlib.testplan import TestPlan
|
||||
from twisterlib.reports import Reporting
|
||||
from twisterlib.hardwaremap import HardwareMap
|
||||
|
|
@ -142,7 +142,7 @@ def main(options, default_options):
|
|||
# command line
|
||||
|
||||
for i in tplan.instances.values():
|
||||
if i.status == TestInstanceStatus.FILTER:
|
||||
if i.status == TwisterStatus.FILTER:
|
||||
if options.platform and i.platform.name not in options.platform:
|
||||
continue
|
||||
logger.debug(
|
||||
|
|
|
|||
|
|
@ -169,7 +169,7 @@ def test_if_report_is_parsed(pytester, testinstance: TestInstance):
|
|||
|
||||
pytest_harness._update_test_status()
|
||||
|
||||
assert pytest_harness.state == "passed"
|
||||
assert pytest_harness.status == "passed"
|
||||
assert testinstance.status == "passed"
|
||||
assert len(testinstance.testcases) == 2
|
||||
for tc in testinstance.testcases:
|
||||
|
|
@ -199,7 +199,7 @@ def test_if_report_with_error(pytester, testinstance: TestInstance):
|
|||
|
||||
pytest_harness._update_test_status()
|
||||
|
||||
assert pytest_harness.state == "failed"
|
||||
assert pytest_harness.status == "failed"
|
||||
assert testinstance.status == "failed"
|
||||
assert len(testinstance.testcases) == 2
|
||||
for tc in testinstance.testcases:
|
||||
|
|
@ -235,7 +235,7 @@ def test_if_report_with_skip(pytester, testinstance: TestInstance):
|
|||
|
||||
pytest_harness._update_test_status()
|
||||
|
||||
assert pytest_harness.state == "skipped"
|
||||
assert pytest_harness.status == "skipped"
|
||||
assert testinstance.status == "skipped"
|
||||
assert len(testinstance.testcases) == 2
|
||||
for tc in testinstance.testcases:
|
||||
|
|
@ -265,7 +265,7 @@ def test_if_report_with_filter(pytester, testinstance: TestInstance):
|
|||
pytest_harness.configure(testinstance)
|
||||
pytest_harness.report_file = report_file
|
||||
pytest_harness._update_test_status()
|
||||
assert pytest_harness.state == "passed"
|
||||
assert pytest_harness.status == "passed"
|
||||
assert testinstance.status == "passed"
|
||||
assert len(testinstance.testcases) == 1
|
||||
|
||||
|
|
@ -291,5 +291,5 @@ def test_if_report_with_no_collected(pytester, testinstance: TestInstance):
|
|||
pytest_harness.configure(testinstance)
|
||||
pytest_harness.report_file = report_file
|
||||
pytest_harness._update_test_status()
|
||||
assert pytest_harness.state == "skipped"
|
||||
assert pytest_harness.status == "skipped"
|
||||
assert testinstance.status == "skipped"
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ import twisterlib.harness
|
|||
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
|
||||
|
||||
from twisterlib.error import TwisterException
|
||||
from twisterlib.statuses import HarnessStatus, OutputStatus, TestCaseStatus, TestInstanceStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
from twisterlib.handlers import (
|
||||
Handler,
|
||||
BinaryHandler,
|
||||
|
|
@ -56,7 +56,7 @@ def mocked_instance(tmp_path):
|
|||
return_value=2
|
||||
)
|
||||
|
||||
instance.status = TestInstanceStatus.NONE
|
||||
instance.status = TwisterStatus.NONE
|
||||
instance.reason = 'Unknown'
|
||||
|
||||
return instance
|
||||
|
|
@ -128,7 +128,7 @@ def test_handler_final_handle_actions(mocked_instance):
|
|||
handler.suite_name_check = True
|
||||
|
||||
harness = twisterlib.harness.Test()
|
||||
harness.state = mock.Mock()
|
||||
harness.status = mock.Mock()
|
||||
harness.detected_suite_names = mock.Mock()
|
||||
harness.matched_run_id = False
|
||||
harness.run_id_exists = True
|
||||
|
|
@ -138,10 +138,10 @@ def test_handler_final_handle_actions(mocked_instance):
|
|||
|
||||
handler._final_handle_actions(harness, handler_time)
|
||||
|
||||
assert handler.instance.status == TestInstanceStatus.FAIL
|
||||
assert handler.instance.status == TwisterStatus.FAIL
|
||||
assert handler.instance.execution_time == handler_time
|
||||
assert handler.instance.reason == 'RunID mismatch'
|
||||
assert all(testcase.status == TestCaseStatus.FAIL for \
|
||||
assert all(testcase.status == TwisterStatus.FAIL for \
|
||||
testcase in handler.instance.testcases)
|
||||
|
||||
handler.instance.reason = 'This reason shan\'t be changed.'
|
||||
|
|
@ -171,14 +171,14 @@ def test_handler_verify_ztest_suite_name(
|
|||
instance = mocked_instance
|
||||
type(instance.testsuite).ztest_suite_names = ['dummy_testsuite_name']
|
||||
|
||||
harness_state = HarnessStatus.PASS
|
||||
harness_status = TwisterStatus.PASS
|
||||
|
||||
handler_time = mock.Mock()
|
||||
|
||||
with mock.patch.object(Handler, '_missing_suite_name') as _missing_mocked:
|
||||
handler = Handler(instance)
|
||||
handler._verify_ztest_suite_name(
|
||||
harness_state,
|
||||
harness_status,
|
||||
detected_suite_names,
|
||||
handler_time
|
||||
)
|
||||
|
|
@ -202,11 +202,11 @@ def test_handler_missing_suite_name(mocked_instance):
|
|||
|
||||
handler._missing_suite_name(expected_suite_names, handler_time)
|
||||
|
||||
assert handler.instance.status == TestInstanceStatus.FAIL
|
||||
assert handler.instance.status == TwisterStatus.FAIL
|
||||
assert handler.instance.execution_time == handler_time
|
||||
assert handler.instance.reason == 'Testsuite mismatch'
|
||||
assert all(
|
||||
testcase.status == TestCaseStatus.FAIL for testcase in handler.instance.testcases
|
||||
testcase.status == TwisterStatus.FAIL for testcase in handler.instance.testcases
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -295,7 +295,7 @@ def test_binaryhandler_try_kill_process_by_pid(mocked_instance):
|
|||
TESTDATA_3 = [
|
||||
(
|
||||
[b'This\\r\\n', b'is\r', b'a short', b'file.'],
|
||||
mock.Mock(state=HarnessStatus.NONE, capture_coverage=False),
|
||||
mock.Mock(status=TwisterStatus.NONE, capture_coverage=False),
|
||||
[
|
||||
mock.call('This\\r\\n'),
|
||||
mock.call('is\r'),
|
||||
|
|
@ -313,7 +313,7 @@ TESTDATA_3 = [
|
|||
),
|
||||
(
|
||||
[b'Too much.'] * 120, # Should be more than the timeout
|
||||
mock.Mock(state=HarnessStatus.PASS, capture_coverage=False),
|
||||
mock.Mock(status=TwisterStatus.PASS, capture_coverage=False),
|
||||
None,
|
||||
None,
|
||||
True,
|
||||
|
|
@ -321,7 +321,7 @@ TESTDATA_3 = [
|
|||
),
|
||||
(
|
||||
[b'Too much.'] * 120, # Should be more than the timeout
|
||||
mock.Mock(state=HarnessStatus.PASS, capture_coverage=False),
|
||||
mock.Mock(status=TwisterStatus.PASS, capture_coverage=False),
|
||||
None,
|
||||
None,
|
||||
True,
|
||||
|
|
@ -329,7 +329,7 @@ TESTDATA_3 = [
|
|||
),
|
||||
(
|
||||
[b'Too much.'] * 120, # Should be more than the timeout
|
||||
mock.Mock(state=HarnessStatus.PASS, capture_coverage=True),
|
||||
mock.Mock(status=TwisterStatus.PASS, capture_coverage=True),
|
||||
None,
|
||||
None,
|
||||
False,
|
||||
|
|
@ -344,7 +344,7 @@ TESTDATA_3 = [
|
|||
ids=[
|
||||
'no timeout',
|
||||
'timeout',
|
||||
'timeout with harness state',
|
||||
'timeout with harness status',
|
||||
'timeout with capture_coverage, wait timeout'
|
||||
]
|
||||
)
|
||||
|
|
@ -506,22 +506,22 @@ def test_binaryhandler_create_env(
|
|||
|
||||
|
||||
TESTDATA_6 = [
|
||||
(HarnessStatus.NONE, False, 2, True, TestInstanceStatus.FAIL, 'Valgrind error', False),
|
||||
(HarnessStatus.NONE, False, 1, False, TestInstanceStatus.FAIL, 'Failed', False),
|
||||
(HarnessStatus.FAIL, False, 0, False, TestInstanceStatus.FAIL, 'Failed', False),
|
||||
(TwisterStatus.NONE, False, 2, True, TwisterStatus.FAIL, 'Valgrind error', False),
|
||||
(TwisterStatus.NONE, False, 1, False, TwisterStatus.FAIL, 'Failed', False),
|
||||
(TwisterStatus.FAIL, False, 0, False, TwisterStatus.FAIL, 'Failed', False),
|
||||
('success', False, 0, False, 'success', 'Unknown', False),
|
||||
(HarnessStatus.NONE, True, 1, True, TestInstanceStatus.FAIL, 'Timeout', True),
|
||||
(TwisterStatus.NONE, True, 1, True, TwisterStatus.FAIL, 'Timeout', True),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'harness_state, terminated, returncode, enable_valgrind,' \
|
||||
'harness_status, terminated, returncode, enable_valgrind,' \
|
||||
' expected_status, expected_reason, do_add_missing',
|
||||
TESTDATA_6,
|
||||
ids=['valgrind error', 'failed', 'harness failed', 'custom success', 'no state']
|
||||
ids=['valgrind error', 'failed', 'harness failed', 'custom success', 'no status']
|
||||
)
|
||||
def test_binaryhandler_update_instance_info(
|
||||
mocked_instance,
|
||||
harness_state,
|
||||
harness_status,
|
||||
terminated,
|
||||
returncode,
|
||||
enable_valgrind,
|
||||
|
|
@ -537,7 +537,7 @@ def test_binaryhandler_update_instance_info(
|
|||
missing_mock = mock.Mock()
|
||||
handler.instance.add_missing_case_status = missing_mock
|
||||
|
||||
handler._update_instance_info(harness_state, handler_time)
|
||||
handler._update_instance_info(harness_status, handler_time)
|
||||
|
||||
assert handler.instance.execution_time == handler_time
|
||||
|
||||
|
|
@ -545,7 +545,7 @@ def test_binaryhandler_update_instance_info(
|
|||
assert handler.instance.reason == expected_reason
|
||||
|
||||
if do_add_missing:
|
||||
missing_mock.assert_called_once_with(TestCaseStatus.BLOCK, expected_reason)
|
||||
missing_mock.assert_called_once_with(TwisterStatus.BLOCK, expected_reason)
|
||||
|
||||
|
||||
TESTDATA_7 = [
|
||||
|
|
@ -656,13 +656,13 @@ TESTDATA_9 = [
|
|||
|
||||
@pytest.mark.parametrize(
|
||||
'success_count, in_waiting_count, oserror_count, readline_error_count,'
|
||||
' haltless_count, stateless_count, end_by_halt, end_by_close,'
|
||||
' end_by_state, expected_line_count',
|
||||
' haltless_count, statusless_count, end_by_halt, end_by_close,'
|
||||
' end_by_status, expected_line_count',
|
||||
TESTDATA_9,
|
||||
ids=[
|
||||
'halt event',
|
||||
'serial closes',
|
||||
'harness state with errors'
|
||||
'harness status with errors'
|
||||
]
|
||||
)
|
||||
def test_devicehandler_monitor_serial(
|
||||
|
|
@ -672,10 +672,10 @@ def test_devicehandler_monitor_serial(
|
|||
oserror_count,
|
||||
readline_error_count,
|
||||
haltless_count,
|
||||
stateless_count,
|
||||
statusless_count,
|
||||
end_by_halt,
|
||||
end_by_close,
|
||||
end_by_state,
|
||||
end_by_status,
|
||||
expected_line_count
|
||||
):
|
||||
is_open_iter = iter(lambda: True, False)
|
||||
|
|
@ -696,8 +696,8 @@ def test_devicehandler_monitor_serial(
|
|||
is_set_iter = [False] * haltless_count + [True] \
|
||||
if end_by_halt else iter(lambda: False, True)
|
||||
|
||||
state_iter = [HarnessStatus.NONE] * stateless_count + [HarnessStatus.PASS] \
|
||||
if end_by_state else iter(lambda: HarnessStatus.NONE, HarnessStatus.PASS)
|
||||
status_iter = [TwisterStatus.NONE] * statusless_count + [TwisterStatus.PASS] \
|
||||
if end_by_status else iter(lambda: TwisterStatus.NONE, TwisterStatus.PASS)
|
||||
|
||||
halt_event = mock.Mock(is_set=mock.Mock(side_effect=is_set_iter))
|
||||
ser = mock.Mock(
|
||||
|
|
@ -709,10 +709,10 @@ def test_devicehandler_monitor_serial(
|
|||
return_value=False
|
||||
)
|
||||
harness = mock.Mock(capture_coverage=False)
|
||||
type(harness).state=mock.PropertyMock(side_effect=state_iter)
|
||||
type(harness).status=mock.PropertyMock(side_effect=status_iter)
|
||||
|
||||
handler = DeviceHandler(mocked_instance, 'build')
|
||||
handler.options = mock.Mock(enable_coverage=not end_by_state)
|
||||
handler.options = mock.Mock(enable_coverage=not end_by_status)
|
||||
|
||||
with mock.patch('builtins.open', mock.mock_open(read_data='')):
|
||||
handler.monitor_serial(ser, halt_event, harness)
|
||||
|
|
@ -720,6 +720,8 @@ def test_devicehandler_monitor_serial(
|
|||
if not end_by_close:
|
||||
ser.close.assert_called_once()
|
||||
|
||||
print(harness.call_args_list)
|
||||
|
||||
harness.handle.assert_has_calls(
|
||||
[mock.call(f'line no {idx}') for idx in range(expected_line_count)]
|
||||
)
|
||||
|
|
@ -944,7 +946,7 @@ def test_devicehandler_get_hardware(
|
|||
|
||||
if raise_exception:
|
||||
assert 'dummy message' in caplog.text.lower()
|
||||
assert mocked_instance.status == TestInstanceStatus.FAIL
|
||||
assert mocked_instance.status == TwisterStatus.FAIL
|
||||
assert mocked_instance.reason == 'dummy message'
|
||||
else:
|
||||
assert hardware == expected_hardware
|
||||
|
|
@ -1084,21 +1086,21 @@ def test_devicehandler_create_command(
|
|||
|
||||
TESTDATA_14 = [
|
||||
('success', False, 'success', 'Unknown', False),
|
||||
(HarnessStatus.FAIL, False, TestInstanceStatus.FAIL, 'Failed', True),
|
||||
(HarnessStatus.ERROR, False, TestInstanceStatus.ERROR, 'Unknown', True),
|
||||
(HarnessStatus.NONE, True, TestInstanceStatus.NONE, 'Unknown', False),
|
||||
(HarnessStatus.NONE, False, TestInstanceStatus.FAIL, 'Timeout', True),
|
||||
(TwisterStatus.FAIL, False, TwisterStatus.FAIL, 'Failed', True),
|
||||
(TwisterStatus.ERROR, False, TwisterStatus.ERROR, 'Unknown', True),
|
||||
(TwisterStatus.NONE, True, TwisterStatus.NONE, 'Unknown', False),
|
||||
(TwisterStatus.NONE, False, TwisterStatus.FAIL, 'Timeout', True),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'harness_state, flash_error,' \
|
||||
'harness_status, flash_error,' \
|
||||
' expected_status, expected_reason, do_add_missing',
|
||||
TESTDATA_14,
|
||||
ids=['custom success', 'failed', 'error', 'flash error', 'no status']
|
||||
)
|
||||
def test_devicehandler_update_instance_info(
|
||||
mocked_instance,
|
||||
harness_state,
|
||||
harness_status,
|
||||
flash_error,
|
||||
expected_status,
|
||||
expected_reason,
|
||||
|
|
@ -1109,7 +1111,7 @@ def test_devicehandler_update_instance_info(
|
|||
missing_mock = mock.Mock()
|
||||
handler.instance.add_missing_case_status = missing_mock
|
||||
|
||||
handler._update_instance_info(harness_state, handler_time, flash_error)
|
||||
handler._update_instance_info(harness_status, handler_time, flash_error)
|
||||
|
||||
assert handler.instance.execution_time == handler_time
|
||||
|
||||
|
|
@ -1187,7 +1189,7 @@ def test_devicehandler_create_serial_connection(
|
|||
assert result is not None
|
||||
|
||||
if expected_exception:
|
||||
assert handler.instance.status == TestInstanceStatus.FAIL
|
||||
assert handler.instance.status == TwisterStatus.FAIL
|
||||
assert handler.instance.reason == 'Serial Device Error'
|
||||
|
||||
missing_mock.assert_called_once_with('blocked', 'Serial Device Error')
|
||||
|
|
@ -1242,20 +1244,20 @@ def test_devicehandler_get_serial_device(
|
|||
|
||||
TESTDATA_17 = [
|
||||
(False, False, False, False, None, False, False,
|
||||
TestInstanceStatus.NONE, None, []),
|
||||
TwisterStatus.NONE, None, []),
|
||||
(True, True, False, False, None, False, False,
|
||||
TestInstanceStatus.NONE, None, []),
|
||||
TwisterStatus.NONE, None, []),
|
||||
(True, False, True, False, None, False, False,
|
||||
TestInstanceStatus.ERROR, 'Device issue (Flash error)', []),
|
||||
TwisterStatus.ERROR, 'Device issue (Flash error)', []),
|
||||
(True, False, False, True, None, False, False,
|
||||
TestInstanceStatus.ERROR, 'Device issue (Timeout)', ['Flash operation timed out.']),
|
||||
TwisterStatus.ERROR, 'Device issue (Timeout)', ['Flash operation timed out.']),
|
||||
(True, False, False, False, 1, False, False,
|
||||
TestInstanceStatus.ERROR, 'Device issue (Flash error?)', []),
|
||||
TwisterStatus.ERROR, 'Device issue (Flash error?)', []),
|
||||
(True, False, False, False, 0, True, False,
|
||||
TestInstanceStatus.NONE, None, ['Timed out while monitoring serial output on IPName']),
|
||||
TwisterStatus.NONE, None, ['Timed out while monitoring serial output on IPName']),
|
||||
(True, False, False, False, 0, False, True,
|
||||
TestInstanceStatus.NONE, None, ["Terminating serial-pty:'Serial PTY'",
|
||||
"Terminated serial-pty:'Serial PTY', stdout:'', stderr:''"]),
|
||||
TwisterStatus.NONE, None, ["Terminating serial-pty:'Serial PTY'",
|
||||
"Terminated serial-pty:'Serial PTY', stdout:'', stderr:''"]),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
|
@ -1546,9 +1548,9 @@ TESTDATA_21 = [
|
|||
0,
|
||||
False,
|
||||
None,
|
||||
'good dummy state',
|
||||
'good dummy status',
|
||||
False,
|
||||
TestInstanceStatus.NONE,
|
||||
TwisterStatus.NONE,
|
||||
None,
|
||||
False
|
||||
),
|
||||
|
|
@ -1556,9 +1558,9 @@ TESTDATA_21 = [
|
|||
1,
|
||||
True,
|
||||
None,
|
||||
'good dummy state',
|
||||
'good dummy status',
|
||||
False,
|
||||
TestInstanceStatus.NONE,
|
||||
TwisterStatus.NONE,
|
||||
None,
|
||||
False
|
||||
),
|
||||
|
|
@ -1566,9 +1568,9 @@ TESTDATA_21 = [
|
|||
0,
|
||||
False,
|
||||
None,
|
||||
HarnessStatus.NONE,
|
||||
TwisterStatus.NONE,
|
||||
True,
|
||||
TestInstanceStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
'Timeout',
|
||||
True
|
||||
),
|
||||
|
|
@ -1576,9 +1578,9 @@ TESTDATA_21 = [
|
|||
1,
|
||||
False,
|
||||
None,
|
||||
HarnessStatus.NONE,
|
||||
TwisterStatus.NONE,
|
||||
False,
|
||||
TestInstanceStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
'Exited with 1',
|
||||
True
|
||||
),
|
||||
|
|
@ -1586,9 +1588,9 @@ TESTDATA_21 = [
|
|||
1,
|
||||
False,
|
||||
'preexisting reason',
|
||||
'good dummy state',
|
||||
'good dummy status',
|
||||
False,
|
||||
TestInstanceStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
'preexisting reason',
|
||||
True
|
||||
),
|
||||
|
|
@ -1596,7 +1598,7 @@ TESTDATA_21 = [
|
|||
|
||||
@pytest.mark.parametrize(
|
||||
'self_returncode, self_ignore_qemu_crash,' \
|
||||
' self_instance_reason, harness_state, is_timeout,' \
|
||||
' self_instance_reason, harness_status, is_timeout,' \
|
||||
' expected_status, expected_reason, expected_called_missing_case',
|
||||
TESTDATA_21,
|
||||
ids=['not failed', 'qemu ignore', 'timeout', 'bad returncode', 'other fail']
|
||||
|
|
@ -1606,7 +1608,7 @@ def test_qemuhandler_update_instance_info(
|
|||
self_returncode,
|
||||
self_ignore_qemu_crash,
|
||||
self_instance_reason,
|
||||
harness_state,
|
||||
harness_status,
|
||||
is_timeout,
|
||||
expected_status,
|
||||
expected_reason,
|
||||
|
|
@ -1619,14 +1621,14 @@ def test_qemuhandler_update_instance_info(
|
|||
handler.returncode = self_returncode
|
||||
handler.ignore_qemu_crash = self_ignore_qemu_crash
|
||||
|
||||
handler._update_instance_info(harness_state, is_timeout)
|
||||
handler._update_instance_info(harness_status, is_timeout)
|
||||
|
||||
assert handler.instance.status == expected_status
|
||||
assert handler.instance.reason == expected_reason
|
||||
|
||||
if expected_called_missing_case:
|
||||
mocked_instance.add_missing_case_status.assert_called_once_with(
|
||||
TestCaseStatus.BLOCK
|
||||
TwisterStatus.BLOCK
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -1732,11 +1734,11 @@ def test_qemuhandler_thread_close_files(is_pid, is_lookup_error):
|
|||
|
||||
|
||||
TESTDATA_24 = [
|
||||
(OutputStatus.FAIL, 'timeout', TestInstanceStatus.FAIL, 'timeout'),
|
||||
(OutputStatus.FAIL, 'Execution error', TestInstanceStatus.FAIL, 'Execution error'),
|
||||
(OutputStatus.FAIL, 'unexpected eof', TestInstanceStatus.FAIL, 'unexpected eof'),
|
||||
(OutputStatus.FAIL, 'unexpected byte', TestInstanceStatus.FAIL, 'unexpected byte'),
|
||||
(OutputStatus.NONE, None, TestInstanceStatus.NONE, 'Unknown'),
|
||||
(TwisterStatus.FAIL, 'timeout', TwisterStatus.FAIL, 'timeout'),
|
||||
(TwisterStatus.FAIL, 'Execution error', TwisterStatus.FAIL, 'Execution error'),
|
||||
(TwisterStatus.FAIL, 'unexpected eof', TwisterStatus.FAIL, 'unexpected eof'),
|
||||
(TwisterStatus.FAIL, 'unexpected byte', TwisterStatus.FAIL, 'unexpected byte'),
|
||||
(TwisterStatus.NONE, None, TwisterStatus.NONE, 'Unknown'),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
|
@ -1767,10 +1769,10 @@ TESTDATA_25 = [
|
|||
('1\n' * 60).encode('utf-8'),
|
||||
60,
|
||||
1,
|
||||
[HarnessStatus.NONE] * 60 + ['success'] * 6,
|
||||
[TwisterStatus.NONE] * 60 + [TwisterStatus.PASS] * 6,
|
||||
1000,
|
||||
False,
|
||||
OutputStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
'timeout',
|
||||
[mock.call('1\n'), mock.call('1\n')]
|
||||
),
|
||||
|
|
@ -1778,10 +1780,10 @@ TESTDATA_25 = [
|
|||
('1\n' * 60).encode('utf-8'),
|
||||
60,
|
||||
-1,
|
||||
[HarnessStatus.NONE] * 60 + ['success'] * 30,
|
||||
[TwisterStatus.NONE] * 60 + [TwisterStatus.PASS] * 30,
|
||||
100,
|
||||
False,
|
||||
OutputStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
None,
|
||||
[mock.call('1\n'), mock.call('1\n')]
|
||||
),
|
||||
|
|
@ -1789,10 +1791,10 @@ TESTDATA_25 = [
|
|||
b'',
|
||||
60,
|
||||
1,
|
||||
['success'] * 3,
|
||||
[TwisterStatus.PASS] * 3,
|
||||
100,
|
||||
False,
|
||||
OutputStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
'unexpected eof',
|
||||
[]
|
||||
),
|
||||
|
|
@ -1800,10 +1802,10 @@ TESTDATA_25 = [
|
|||
b'\x81',
|
||||
60,
|
||||
1,
|
||||
['success'] * 3,
|
||||
[TwisterStatus.PASS] * 3,
|
||||
100,
|
||||
False,
|
||||
OutputStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
'unexpected byte',
|
||||
[]
|
||||
),
|
||||
|
|
@ -1811,10 +1813,10 @@ TESTDATA_25 = [
|
|||
'1\n2\n3\n4\n5\n'.encode('utf-8'),
|
||||
600,
|
||||
1,
|
||||
[HarnessStatus.NONE] * 3 + ['success'] * 7,
|
||||
[TwisterStatus.NONE] * 3 + [TwisterStatus.PASS] * 7,
|
||||
100,
|
||||
False,
|
||||
'success',
|
||||
TwisterStatus.PASS,
|
||||
None,
|
||||
[mock.call('1\n'), mock.call('2\n'), mock.call('3\n'), mock.call('4\n')]
|
||||
),
|
||||
|
|
@ -1822,10 +1824,10 @@ TESTDATA_25 = [
|
|||
'1\n2\n3\n4\n5\n'.encode('utf-8'),
|
||||
600,
|
||||
0,
|
||||
[HarnessStatus.NONE] * 3 + ['success'] * 7,
|
||||
[TwisterStatus.NONE] * 3 + [TwisterStatus.PASS] * 7,
|
||||
100,
|
||||
False,
|
||||
OutputStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
'timeout',
|
||||
[mock.call('1\n'), mock.call('2\n')]
|
||||
),
|
||||
|
|
@ -1833,17 +1835,17 @@ TESTDATA_25 = [
|
|||
'1\n2\n3\n4\n5\n'.encode('utf-8'),
|
||||
60,
|
||||
1,
|
||||
[HarnessStatus.NONE] * 3 + ['success'] * 7,
|
||||
[TwisterStatus.NONE] * 3 + [TwisterStatus.PASS] * 7,
|
||||
(n for n in [100, 100, 10000]),
|
||||
True,
|
||||
'success',
|
||||
TwisterStatus.PASS,
|
||||
None,
|
||||
[mock.call('1\n'), mock.call('2\n'), mock.call('3\n'), mock.call('4\n')]
|
||||
),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'content, timeout, pid, harness_states, cputime, capture_coverage,' \
|
||||
'content, timeout, pid, harness_statuses, cputime, capture_coverage,' \
|
||||
' expected_status, expected_reason, expected_log_calls',
|
||||
TESTDATA_25,
|
||||
ids=[
|
||||
|
|
@ -1862,7 +1864,7 @@ def test_qemuhandler_thread(
|
|||
content,
|
||||
timeout,
|
||||
pid,
|
||||
harness_states,
|
||||
harness_statuses,
|
||||
cputime,
|
||||
capture_coverage,
|
||||
expected_status,
|
||||
|
|
@ -1895,7 +1897,7 @@ def test_qemuhandler_thread(
|
|||
return file_object
|
||||
|
||||
harness = mock.Mock(capture_coverage=capture_coverage, handle=print)
|
||||
type(harness).state = mock.PropertyMock(side_effect=harness_states)
|
||||
type(harness).status = mock.PropertyMock(side_effect=harness_statuses)
|
||||
|
||||
p = mock.Mock()
|
||||
p.poll = mock.Mock(
|
||||
|
|
@ -1953,15 +1955,15 @@ def test_qemuhandler_thread(
|
|||
|
||||
|
||||
TESTDATA_26 = [
|
||||
(True, False, HarnessStatus.NONE, True,
|
||||
(True, False, TwisterStatus.NONE, True,
|
||||
['No timeout, return code from QEMU (1): 1',
|
||||
'return code from QEMU (1): 1']),
|
||||
(False, True, HarnessStatus.PASS, True, ['return code from QEMU (1): 0']),
|
||||
(False, True, HarnessStatus.FAIL, False, ['return code from QEMU (None): 1']),
|
||||
(False, True, TwisterStatus.PASS, True, ['return code from QEMU (1): 0']),
|
||||
(False, True, TwisterStatus.FAIL, False, ['return code from QEMU (None): 1']),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'isatty, do_timeout, harness_state, exists_pid_fn, expected_logs',
|
||||
'isatty, do_timeout, harness_status, exists_pid_fn, expected_logs',
|
||||
TESTDATA_26,
|
||||
ids=['no timeout, isatty', 'timeout passed', 'timeout, no pid_fn']
|
||||
)
|
||||
|
|
@ -1971,7 +1973,7 @@ def test_qemuhandler_handle(
|
|||
tmp_path,
|
||||
isatty,
|
||||
do_timeout,
|
||||
harness_state,
|
||||
harness_status,
|
||||
exists_pid_fn,
|
||||
expected_logs
|
||||
):
|
||||
|
|
@ -2005,7 +2007,7 @@ def test_qemuhandler_handle(
|
|||
handler.pid_fn = os.path.join(sysbuild_build_dir, 'qemu.pid')
|
||||
handler.log_fn = os.path.join('dummy', 'log')
|
||||
|
||||
harness = mock.Mock(state=harness_state)
|
||||
harness = mock.Mock(status=harness_status)
|
||||
handler_options_west_flash = []
|
||||
|
||||
domain_build_dir = os.path.join('sysbuild', 'dummydir')
|
||||
|
|
|
|||
|
|
@ -17,7 +17,6 @@ import logging as logger
|
|||
from conftest import ZEPHYR_BASE
|
||||
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
|
||||
|
||||
#from scripts.pylib.twister.twisterlib.statuses import HarnessStatus, TestCaseStatus, TestInstanceStatus
|
||||
from twisterlib.harness import (
|
||||
Bsim,
|
||||
Console,
|
||||
|
|
@ -29,7 +28,7 @@ from twisterlib.harness import (
|
|||
Robot,
|
||||
Test
|
||||
)
|
||||
from twisterlib.statuses import HarnessStatus, TestCaseStatus, TestInstanceStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
from twisterlib.testinstance import TestInstance
|
||||
|
||||
GTEST_START_STATE = " RUN "
|
||||
|
|
@ -93,13 +92,13 @@ def test_harness_parse_record(lines, pattern, expected_records, as_json):
|
|||
assert harness.recording == expected_records
|
||||
|
||||
|
||||
TEST_DATA_1 = [('RunID: 12345', False, False, False, HarnessStatus.NONE, True),
|
||||
('PROJECT EXECUTION SUCCESSFUL', False, False, False, HarnessStatus.PASS, False),
|
||||
('PROJECT EXECUTION SUCCESSFUL', True, False, False, HarnessStatus.FAIL, False),
|
||||
('PROJECT EXECUTION FAILED', False, False, False, HarnessStatus.FAIL, False),
|
||||
('ZEPHYR FATAL ERROR', False, True, False, HarnessStatus.NONE, False),
|
||||
('GCOV_COVERAGE_DUMP_START', None, None, True, HarnessStatus.NONE, False),
|
||||
('GCOV_COVERAGE_DUMP_END', None, None, False, HarnessStatus.NONE, False),]
|
||||
TEST_DATA_1 = [('RunID: 12345', False, False, False, TwisterStatus.NONE, True),
|
||||
('PROJECT EXECUTION SUCCESSFUL', False, False, False, TwisterStatus.PASS, False),
|
||||
('PROJECT EXECUTION SUCCESSFUL', True, False, False, TwisterStatus.FAIL, False),
|
||||
('PROJECT EXECUTION FAILED', False, False, False, TwisterStatus.FAIL, False),
|
||||
('ZEPHYR FATAL ERROR', False, True, False, TwisterStatus.NONE, False),
|
||||
('GCOV_COVERAGE_DUMP_START', None, None, True, TwisterStatus.NONE, False),
|
||||
('GCOV_COVERAGE_DUMP_END', None, None, False, TwisterStatus.NONE, False),]
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"line, fault, fail_on_fault, cap_cov, exp_stat, exp_id",
|
||||
|
|
@ -110,7 +109,7 @@ def test_harness_process_test(line, fault, fail_on_fault, cap_cov, exp_stat, exp
|
|||
#Arrange
|
||||
harness = Harness()
|
||||
harness.run_id = 12345
|
||||
harness.state = HarnessStatus.NONE
|
||||
harness.status = TwisterStatus.NONE
|
||||
harness.fault = fault
|
||||
harness.fail_on_fault = fail_on_fault
|
||||
mock.patch.object(Harness, 'parse_record', return_value=None)
|
||||
|
|
@ -120,7 +119,7 @@ def test_harness_process_test(line, fault, fail_on_fault, cap_cov, exp_stat, exp
|
|||
|
||||
#Assert
|
||||
assert harness.matched_run_id == exp_id
|
||||
assert harness.state == exp_stat
|
||||
assert harness.status == exp_stat
|
||||
assert harness.capture_coverage == cap_cov
|
||||
assert harness.recording == []
|
||||
|
||||
|
|
@ -180,13 +179,13 @@ def test_robot_handle(tmp_path):
|
|||
tc = instance.get_case_or_create('test_case_1')
|
||||
|
||||
#Assert
|
||||
assert instance.state == TestInstanceStatus.PASS
|
||||
assert tc.status == TestCaseStatus.PASS
|
||||
assert instance.status == TwisterStatus.PASS
|
||||
assert tc.status == TwisterStatus.PASS
|
||||
|
||||
|
||||
TEST_DATA_2 = [
|
||||
("", 0, TestInstanceStatus.PASS),
|
||||
("Robot test failure: sourcedir for mock_platform", 1, TestInstanceStatus.FAIL),
|
||||
("", 0, TwisterStatus.PASS),
|
||||
("Robot test failure: sourcedir for mock_platform", 1, TwisterStatus.FAIL),
|
||||
]
|
||||
@pytest.mark.parametrize(
|
||||
"exp_out, returncode, expected_status",
|
||||
|
|
@ -282,13 +281,13 @@ def test_console_configure(tmp_path, type, num_patterns):
|
|||
assert console.pattern.pattern == 'pattern1'
|
||||
|
||||
|
||||
TEST_DATA_4 = [("one_line", True, HarnessStatus.PASS, "line", False, False),
|
||||
("multi_line", True, HarnessStatus.PASS, "line", False, False),
|
||||
("multi_line", False, HarnessStatus.PASS, "line", False, False),
|
||||
("invalid_type", False, HarnessStatus.NONE, "line", False, False),
|
||||
("invalid_type", False, HarnessStatus.NONE, "ERROR", True, False),
|
||||
("invalid_type", False, HarnessStatus.NONE, "COVERAGE_START", False, True),
|
||||
("invalid_type", False, HarnessStatus.NONE, "COVERAGE_END", False, False)]
|
||||
TEST_DATA_4 = [("one_line", True, TwisterStatus.PASS, "line", False, False),
|
||||
("multi_line", True, TwisterStatus.PASS, "line", False, False),
|
||||
("multi_line", False, TwisterStatus.PASS, "line", False, False),
|
||||
("invalid_type", False, TwisterStatus.NONE, "line", False, False),
|
||||
("invalid_type", False, TwisterStatus.NONE, "ERROR", True, False),
|
||||
("invalid_type", False, TwisterStatus.NONE, "COVERAGE_START", False, True),
|
||||
("invalid_type", False, TwisterStatus.NONE, "COVERAGE_END", False, False)]
|
||||
@pytest.mark.parametrize(
|
||||
"line_type, ordered_val, exp_state, line, exp_fault, exp_capture",
|
||||
TEST_DATA_4,
|
||||
|
|
@ -314,7 +313,7 @@ def test_console_handle(tmp_path, line_type, ordered_val, exp_state, line, exp_f
|
|||
console.patterns = [re.compile("pattern1"), re.compile("pattern2")]
|
||||
console.pattern = re.compile("pattern")
|
||||
console.patterns_expected = 0
|
||||
console.state = HarnessStatus.NONE
|
||||
console.status = TwisterStatus.NONE
|
||||
console.fail_on_fault = True
|
||||
console.FAULT = "ERROR"
|
||||
console.GCOV_START = "COVERAGE_START"
|
||||
|
|
@ -337,7 +336,7 @@ def test_console_handle(tmp_path, line_type, ordered_val, exp_state, line, exp_f
|
|||
line2 = "pattern2"
|
||||
console.handle(line1)
|
||||
console.handle(line2)
|
||||
assert console.state == exp_state
|
||||
assert console.status == exp_state
|
||||
with pytest.raises(Exception):
|
||||
console.handle(line)
|
||||
assert logger.error.called
|
||||
|
|
@ -471,7 +470,7 @@ def test_pytest_run(tmp_path, caplog):
|
|||
# Act
|
||||
test_obj.pytest_run(timeout)
|
||||
# Assert
|
||||
assert test_obj.state == HarnessStatus.FAIL
|
||||
assert test_obj.status == TwisterStatus.FAIL
|
||||
assert exp_out in caplog.text
|
||||
|
||||
|
||||
|
|
@ -493,13 +492,13 @@ def test_get_harness(name):
|
|||
assert isinstance(harness_class, Test)
|
||||
|
||||
|
||||
TEST_DATA_7 = [("", "Running TESTSUITE suite_name", ['suite_name'], TestCaseStatus.NONE, True, HarnessStatus.NONE),
|
||||
("", "START - test_testcase", [], TestCaseStatus.STARTED, True, HarnessStatus.NONE),
|
||||
("", "PASS - test_example in 0 seconds", [], TestCaseStatus.PASS, True, HarnessStatus.NONE),
|
||||
("", "SKIP - test_example in 0 seconds", [], TestCaseStatus.SKIP, True, HarnessStatus.NONE),
|
||||
("", "FAIL - test_example in 0 seconds", [], TestCaseStatus.FAIL, True, HarnessStatus.NONE),
|
||||
("not a ztest and no state for test_id", "START - test_testcase", [], TestCaseStatus.PASS, False, HarnessStatus.PASS),
|
||||
("not a ztest and no state for test_id", "START - test_testcase", [], TestCaseStatus.FAIL, False, HarnessStatus.FAIL)]
|
||||
TEST_DATA_7 = [("", "Running TESTSUITE suite_name", ['suite_name'], TwisterStatus.NONE, True, TwisterStatus.NONE),
|
||||
("", "START - test_testcase", [], TwisterStatus.STARTED, True, TwisterStatus.NONE),
|
||||
("", "PASS - test_example in 0 seconds", [], TwisterStatus.PASS, True, TwisterStatus.NONE),
|
||||
("", "SKIP - test_example in 0 seconds", [], TwisterStatus.SKIP, True, TwisterStatus.NONE),
|
||||
("", "FAIL - test_example in 0 seconds", [], TwisterStatus.FAIL, True, TwisterStatus.NONE),
|
||||
("not a ztest and no state for test_id", "START - test_testcase", [], TwisterStatus.PASS, False, TwisterStatus.PASS),
|
||||
("not a ztest and no state for test_id", "START - test_testcase", [], TwisterStatus.FAIL, False, TwisterStatus.FAIL)]
|
||||
@pytest.mark.parametrize(
|
||||
"exp_out, line, exp_suite_name, exp_status, ztest, state",
|
||||
TEST_DATA_7,
|
||||
|
|
@ -525,7 +524,7 @@ def test_test_handle(tmp_path, caplog, exp_out, line, exp_suite_name, exp_status
|
|||
test_obj.configure(instance)
|
||||
test_obj.id = "test_id"
|
||||
test_obj.ztest = ztest
|
||||
test_obj.state = state
|
||||
test_obj.status = state
|
||||
test_obj.id = 'test_id'
|
||||
#Act
|
||||
test_obj.handle(line)
|
||||
|
|
@ -563,7 +562,7 @@ def gtest(tmp_path):
|
|||
def test_gtest_start_test_no_suites_detected(gtest):
|
||||
process_logs(gtest, [SAMPLE_GTEST_START])
|
||||
assert len(gtest.detected_suite_names) == 0
|
||||
assert gtest.state == HarnessStatus.NONE
|
||||
assert gtest.status == TwisterStatus.NONE
|
||||
|
||||
|
||||
def test_gtest_start_test(gtest):
|
||||
|
|
@ -576,12 +575,12 @@ def test_gtest_start_test(gtest):
|
|||
),
|
||||
],
|
||||
)
|
||||
assert gtest.state == HarnessStatus.NONE
|
||||
assert gtest.status == TwisterStatus.NONE
|
||||
assert len(gtest.detected_suite_names) == 1
|
||||
assert gtest.detected_suite_names[0] == "suite_name"
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name") is not None
|
||||
assert (
|
||||
gtest.instance.get_case_by_name("id.suite_name.test_name").status == TestCaseStatus.STARTED
|
||||
gtest.instance.get_case_by_name("id.suite_name.test_name").status == TwisterStatus.STARTED
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -598,11 +597,11 @@ def test_gtest_pass(gtest):
|
|||
),
|
||||
],
|
||||
)
|
||||
assert gtest.state == HarnessStatus.NONE
|
||||
assert gtest.status == TwisterStatus.NONE
|
||||
assert len(gtest.detected_suite_names) == 1
|
||||
assert gtest.detected_suite_names[0] == "suite_name"
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name") != TestCaseStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name").status == TestCaseStatus.PASS
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name").status == TwisterStatus.PASS
|
||||
|
||||
|
||||
def test_gtest_failed(gtest):
|
||||
|
|
@ -618,11 +617,11 @@ def test_gtest_failed(gtest):
|
|||
),
|
||||
],
|
||||
)
|
||||
assert gtest.state == HarnessStatus.NONE
|
||||
assert gtest.status == TwisterStatus.NONE
|
||||
assert len(gtest.detected_suite_names) == 1
|
||||
assert gtest.detected_suite_names[0] == "suite_name"
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name") != TestCaseStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name").status == TestCaseStatus.FAIL
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name").status == TwisterStatus.FAIL
|
||||
|
||||
|
||||
def test_gtest_skipped(gtest):
|
||||
|
|
@ -638,11 +637,11 @@ def test_gtest_skipped(gtest):
|
|||
),
|
||||
],
|
||||
)
|
||||
assert gtest.state == HarnessStatus.NONE
|
||||
assert gtest.status == TwisterStatus.NONE
|
||||
assert len(gtest.detected_suite_names) == 1
|
||||
assert gtest.detected_suite_names[0] == "suite_name"
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name") != TestCaseStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name").status == TestCaseStatus.SKIP
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name").status == TwisterStatus.SKIP
|
||||
|
||||
|
||||
def test_gtest_all_pass(gtest):
|
||||
|
|
@ -659,11 +658,11 @@ def test_gtest_all_pass(gtest):
|
|||
SAMPLE_GTEST_END,
|
||||
],
|
||||
)
|
||||
assert gtest.state == HarnessStatus.PASS
|
||||
assert gtest.status == TwisterStatus.PASS
|
||||
assert len(gtest.detected_suite_names) == 1
|
||||
assert gtest.detected_suite_names[0] == "suite_name"
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name") != TestCaseStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name").status == TestCaseStatus.PASS
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name").status == TwisterStatus.PASS
|
||||
|
||||
|
||||
def test_gtest_one_skipped(gtest):
|
||||
|
|
@ -686,13 +685,13 @@ def test_gtest_one_skipped(gtest):
|
|||
SAMPLE_GTEST_END,
|
||||
],
|
||||
)
|
||||
assert gtest.state == HarnessStatus.PASS
|
||||
assert gtest.status == TwisterStatus.PASS
|
||||
assert len(gtest.detected_suite_names) == 1
|
||||
assert gtest.detected_suite_names[0] == "suite_name"
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name") != TestCaseStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name").status == TestCaseStatus.PASS
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name1") != TestCaseStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name1").status == TestCaseStatus.SKIP
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name") != TwisterStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name").status == TwisterStatus.PASS
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name1") != TwisterStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test_name1").status == TwisterStatus.SKIP
|
||||
|
||||
|
||||
def test_gtest_one_fail(gtest):
|
||||
|
|
@ -715,13 +714,13 @@ def test_gtest_one_fail(gtest):
|
|||
SAMPLE_GTEST_END,
|
||||
],
|
||||
)
|
||||
assert gtest.state == HarnessStatus.FAIL
|
||||
assert gtest.status == TwisterStatus.FAIL
|
||||
assert len(gtest.detected_suite_names) == 1
|
||||
assert gtest.detected_suite_names[0] == "suite_name"
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test0") != TestCaseStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test0").status == TestCaseStatus.PASS
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test1") != TestCaseStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test1").status == TestCaseStatus.FAIL
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test0") != TwisterStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test0").status == TwisterStatus.PASS
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test1") != TwisterStatus.NONE
|
||||
assert gtest.instance.get_case_by_name("id.suite_name.test1").status == TwisterStatus.FAIL
|
||||
|
||||
|
||||
def test_gtest_missing_result(gtest):
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ from typing import List
|
|||
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
|
||||
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
|
||||
|
||||
from twisterlib.statuses import TestCaseStatus, TestInstanceStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
from twisterlib.error import BuildError
|
||||
from twisterlib.harness import Pytest
|
||||
|
||||
|
|
@ -254,15 +254,15 @@ TESTDATA_1_1 = [
|
|||
]
|
||||
TESTDATA_1_2 = [
|
||||
(0, False, 'dummy out',
|
||||
True, True, TestInstanceStatus.PASS, None, False, True),
|
||||
True, True, TwisterStatus.PASS, None, False, True),
|
||||
(0, True, '',
|
||||
False, False, TestInstanceStatus.PASS, None, False, False),
|
||||
False, False, TwisterStatus.PASS, None, False, False),
|
||||
(1, True, 'ERROR: region `FLASH\' overflowed by 123 MB',
|
||||
True, True, TestInstanceStatus.SKIP, 'FLASH overflow', True, False),
|
||||
True, True, TwisterStatus.SKIP, 'FLASH overflow', True, False),
|
||||
(1, True, 'Error: Image size (99 B) + trailer (1 B) exceeds requested size',
|
||||
True, True, TestInstanceStatus.SKIP, 'imgtool overflow', True, False),
|
||||
True, True, TwisterStatus.SKIP, 'imgtool overflow', True, False),
|
||||
(1, True, 'mock.ANY',
|
||||
True, True, TestInstanceStatus.ERROR, 'Build failure', False, False)
|
||||
True, True, TwisterStatus.ERROR, 'Build failure', False, False)
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
|
@ -307,7 +307,7 @@ def test_cmake_run_build(
|
|||
instance_mock = mock.Mock(add_missing_case_status=mock.Mock())
|
||||
instance_mock.build_time = 0
|
||||
instance_mock.run = is_instance_run
|
||||
instance_mock.status = TestInstanceStatus.NONE
|
||||
instance_mock.status = TwisterStatus.NONE
|
||||
instance_mock.reason = None
|
||||
|
||||
cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
|
||||
|
|
@ -355,7 +355,7 @@ def test_cmake_run_build(
|
|||
|
||||
if expected_add_missing:
|
||||
cmake.instance.add_missing_case_status.assert_called_once_with(
|
||||
TestInstanceStatus.SKIP, 'Test was built only'
|
||||
TwisterStatus.SKIP, 'Test was built only'
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -367,7 +367,7 @@ TESTDATA_2_2 = [
|
|||
(True, ['dummy_stage_1', 'ds2'],
|
||||
0, False, '',
|
||||
True, True, False,
|
||||
TestInstanceStatus.NONE, None,
|
||||
TwisterStatus.NONE, None,
|
||||
[os.path.join('dummy', 'cmake'),
|
||||
'-B' + os.path.join('build', 'dir'), '-DTC_RUNID=1',
|
||||
'-DSB_CONFIG_COMPILER_WARNINGS_AS_ERRORS=y',
|
||||
|
|
@ -381,7 +381,7 @@ TESTDATA_2_2 = [
|
|||
(False, [],
|
||||
1, True, 'ERROR: region `FLASH\' overflowed by 123 MB',
|
||||
True, False, True,
|
||||
TestInstanceStatus.ERROR, 'Cmake build failure',
|
||||
TwisterStatus.ERROR, 'Cmake build failure',
|
||||
[os.path.join('dummy', 'cmake'),
|
||||
'-B' + os.path.join('build', 'dir'), '-DTC_RUNID=1',
|
||||
'-DSB_CONFIG_COMPILER_WARNINGS_AS_ERRORS=n',
|
||||
|
|
@ -439,13 +439,13 @@ def test_cmake_run_cmake(
|
|||
instance_mock.run = is_instance_run
|
||||
instance_mock.run_id = 1
|
||||
instance_mock.build_time = 0
|
||||
instance_mock.status = TestInstanceStatus.NONE
|
||||
instance_mock.status = TwisterStatus.NONE
|
||||
instance_mock.reason = None
|
||||
instance_mock.testsuite = mock.Mock()
|
||||
instance_mock.testsuite.required_snippets = ['dummy snippet 1', 'ds2']
|
||||
instance_mock.testcases = [mock.Mock(), mock.Mock()]
|
||||
instance_mock.testcases[0].status = TestCaseStatus.NONE
|
||||
instance_mock.testcases[1].status = TestCaseStatus.NONE
|
||||
instance_mock.testcases[0].status = TwisterStatus.NONE
|
||||
instance_mock.testcases[1].status = TwisterStatus.NONE
|
||||
|
||||
cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
|
||||
jobserver_mock)
|
||||
|
|
@ -860,7 +860,7 @@ def test_projectbuilder_log_info_file(
|
|||
TESTDATA_6 = [
|
||||
(
|
||||
{'op': 'filter'},
|
||||
TestInstanceStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
'Failed',
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
|
|
@ -875,14 +875,14 @@ TESTDATA_6 = [
|
|||
mock.ANY,
|
||||
[],
|
||||
{'op': 'report', 'test': mock.ANY},
|
||||
TestInstanceStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
'Failed',
|
||||
0,
|
||||
None
|
||||
),
|
||||
(
|
||||
{'op': 'filter'},
|
||||
TestInstanceStatus.PASS,
|
||||
TwisterStatus.PASS,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
|
|
@ -897,14 +897,14 @@ TESTDATA_6 = [
|
|||
mock.ANY,
|
||||
['filtering dummy instance name'],
|
||||
{'op': 'report', 'test': mock.ANY},
|
||||
TestInstanceStatus.FILTER,
|
||||
TwisterStatus.FILTER,
|
||||
'runtime filter',
|
||||
1,
|
||||
(TestCaseStatus.SKIP,)
|
||||
(TwisterStatus.SKIP,)
|
||||
),
|
||||
(
|
||||
{'op': 'filter'},
|
||||
TestInstanceStatus.PASS,
|
||||
TwisterStatus.PASS,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
|
|
@ -919,14 +919,14 @@ TESTDATA_6 = [
|
|||
mock.ANY,
|
||||
[],
|
||||
{'op': 'cmake', 'test': mock.ANY},
|
||||
TestInstanceStatus.PASS,
|
||||
TwisterStatus.PASS,
|
||||
mock.ANY,
|
||||
0,
|
||||
None
|
||||
),
|
||||
(
|
||||
{'op': 'cmake'},
|
||||
TestInstanceStatus.ERROR,
|
||||
TwisterStatus.ERROR,
|
||||
'dummy error',
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
|
|
@ -941,14 +941,14 @@ TESTDATA_6 = [
|
|||
mock.ANY,
|
||||
[],
|
||||
{'op': 'report', 'test': mock.ANY},
|
||||
TestInstanceStatus.ERROR,
|
||||
TwisterStatus.ERROR,
|
||||
'dummy error',
|
||||
0,
|
||||
None
|
||||
),
|
||||
(
|
||||
{'op': 'cmake'},
|
||||
TestInstanceStatus.NONE,
|
||||
TwisterStatus.NONE,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
|
|
@ -963,7 +963,7 @@ TESTDATA_6 = [
|
|||
mock.ANY,
|
||||
[],
|
||||
{'op': 'report', 'test': mock.ANY},
|
||||
TestInstanceStatus.PASS,
|
||||
TwisterStatus.PASS,
|
||||
mock.ANY,
|
||||
0,
|
||||
None
|
||||
|
|
@ -1007,10 +1007,10 @@ TESTDATA_6 = [
|
|||
mock.ANY,
|
||||
['filtering dummy instance name'],
|
||||
{'op': 'report', 'test': mock.ANY},
|
||||
TestInstanceStatus.FILTER,
|
||||
TwisterStatus.FILTER,
|
||||
'runtime filter',
|
||||
1,
|
||||
(TestCaseStatus.SKIP,)
|
||||
(TwisterStatus.SKIP,)
|
||||
),
|
||||
(
|
||||
{'op': 'cmake'},
|
||||
|
|
@ -1051,14 +1051,14 @@ TESTDATA_6 = [
|
|||
mock.ANY,
|
||||
['build test: dummy instance name'],
|
||||
{'op': 'report', 'test': mock.ANY},
|
||||
TestInstanceStatus.ERROR,
|
||||
TwisterStatus.ERROR,
|
||||
'Build Failure',
|
||||
0,
|
||||
None
|
||||
),
|
||||
(
|
||||
{'op': 'build'},
|
||||
TestInstanceStatus.SKIP,
|
||||
TwisterStatus.SKIP,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
|
|
@ -1077,11 +1077,11 @@ TESTDATA_6 = [
|
|||
mock.ANY,
|
||||
mock.ANY,
|
||||
1,
|
||||
(TestCaseStatus.SKIP, mock.ANY)
|
||||
(TwisterStatus.SKIP, mock.ANY)
|
||||
),
|
||||
(
|
||||
{'op': 'build'},
|
||||
TestInstanceStatus.PASS,
|
||||
TwisterStatus.PASS,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
|
|
@ -1096,10 +1096,10 @@ TESTDATA_6 = [
|
|||
mock.ANY,
|
||||
['build test: dummy instance name'],
|
||||
{'op': 'report', 'test': mock.ANY},
|
||||
TestInstanceStatus.PASS,
|
||||
TwisterStatus.PASS,
|
||||
mock.ANY,
|
||||
0,
|
||||
(TestCaseStatus.BLOCK, mock.ANY)
|
||||
(TwisterStatus.BLOCK, mock.ANY)
|
||||
),
|
||||
(
|
||||
{'op': 'build'},
|
||||
|
|
@ -1142,7 +1142,7 @@ TESTDATA_6 = [
|
|||
['build test: dummy instance name',
|
||||
'Determine test cases for test instance: dummy instance name'],
|
||||
{'op': 'report', 'test': mock.ANY},
|
||||
TestInstanceStatus.ERROR,
|
||||
TwisterStatus.ERROR,
|
||||
'Determine Testcases Error!',
|
||||
0,
|
||||
None
|
||||
|
|
@ -1238,7 +1238,7 @@ TESTDATA_6 = [
|
|||
),
|
||||
(
|
||||
{'op': 'run'},
|
||||
TestInstanceStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
|
|
@ -1255,7 +1255,7 @@ TESTDATA_6 = [
|
|||
'run status: dummy instance name failed',
|
||||
'RuntimeError: Pipeline Error!'],
|
||||
None,
|
||||
TestInstanceStatus.FAIL,
|
||||
TwisterStatus.FAIL,
|
||||
mock.ANY,
|
||||
0,
|
||||
None
|
||||
|
|
@ -1284,7 +1284,7 @@ TESTDATA_6 = [
|
|||
),
|
||||
(
|
||||
{'op': 'report'},
|
||||
TestInstanceStatus.PASS,
|
||||
TwisterStatus.PASS,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
mock.ANY,
|
||||
|
|
@ -1929,14 +1929,14 @@ def test_projectbuilder_sanitize_zephyr_base_from_files(
|
|||
|
||||
TESTDATA_13 = [
|
||||
(
|
||||
TestInstanceStatus.ERROR, True, True, False,
|
||||
TwisterStatus.ERROR, True, True, False,
|
||||
['INFO 20/25 dummy platform' \
|
||||
' dummy.testsuite.name' \
|
||||
' ERROR dummy reason (cmake)'],
|
||||
None
|
||||
),
|
||||
(
|
||||
TestInstanceStatus.FAIL, False, False, False,
|
||||
TwisterStatus.FAIL, False, False, False,
|
||||
['ERROR dummy platform' \
|
||||
' dummy.testsuite.name' \
|
||||
' FAILED : dummy reason'],
|
||||
|
|
@ -1944,20 +1944,20 @@ TESTDATA_13 = [
|
|||
' failed: 3, error: 1'
|
||||
),
|
||||
(
|
||||
TestInstanceStatus.SKIP, True, False, False,
|
||||
TwisterStatus.SKIP, True, False, False,
|
||||
['INFO 20/25 dummy platform' \
|
||||
' dummy.testsuite.name' \
|
||||
' SKIPPED (dummy reason)'],
|
||||
None
|
||||
),
|
||||
(
|
||||
TestInstanceStatus.FILTER, False, False, False,
|
||||
TwisterStatus.FILTER, False, False, False,
|
||||
[],
|
||||
'INFO - Total complete: 20/ 25 80% skipped: 4,' \
|
||||
' failed: 2, error: 1'
|
||||
),
|
||||
(
|
||||
TestInstanceStatus.PASS, True, False, True,
|
||||
TwisterStatus.PASS, True, False, True,
|
||||
['INFO 20/25 dummy platform' \
|
||||
' dummy.testsuite.name' \
|
||||
' PASSED' \
|
||||
|
|
@ -1965,7 +1965,7 @@ TESTDATA_13 = [
|
|||
None
|
||||
),
|
||||
(
|
||||
TestInstanceStatus.PASS, True, False, False,
|
||||
TwisterStatus.PASS, True, False, False,
|
||||
['INFO 20/25 dummy platform' \
|
||||
' dummy.testsuite.name' \
|
||||
' PASSED (build)'],
|
||||
|
|
@ -2009,7 +2009,7 @@ def test_projectbuilder_report_out(
|
|||
instance_mock.testsuite.name = 'dummy.testsuite.name'
|
||||
instance_mock.testsuite.testcases = [mock.Mock() for _ in range(25)]
|
||||
instance_mock.testcases = [mock.Mock() for _ in range(24)] + \
|
||||
[mock.Mock(status=TestCaseStatus.SKIP)]
|
||||
[mock.Mock(status=TwisterStatus.SKIP)]
|
||||
env_mock = mock.Mock()
|
||||
|
||||
pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
|
||||
|
|
@ -2301,14 +2301,14 @@ def test_projectbuilder_gather_metrics(
|
|||
|
||||
|
||||
TESTDATA_16 = [
|
||||
(TestInstanceStatus.ERROR, mock.ANY, False, False, False),
|
||||
(TestInstanceStatus.FAIL, mock.ANY, False, False, False),
|
||||
(TestInstanceStatus.SKIP, mock.ANY, False, False, False),
|
||||
(TestInstanceStatus.FILTER, 'native', False, False, True),
|
||||
(TestInstanceStatus.PASS, 'qemu', False, False, True),
|
||||
(TestInstanceStatus.FILTER, 'unit', False, False, True),
|
||||
(TestInstanceStatus.FILTER, 'mcu', True, True, False),
|
||||
(TestInstanceStatus.PASS, 'frdm_k64f', False, True, False),
|
||||
(TwisterStatus.ERROR, mock.ANY, False, False, False),
|
||||
(TwisterStatus.FAIL, mock.ANY, False, False, False),
|
||||
(TwisterStatus.SKIP, mock.ANY, False, False, False),
|
||||
(TwisterStatus.FILTER, 'native', False, False, True),
|
||||
(TwisterStatus.PASS, 'qemu', False, False, True),
|
||||
(TwisterStatus.FILTER, 'unit', False, False, True),
|
||||
(TwisterStatus.FILTER, 'mcu', True, True, False),
|
||||
(TwisterStatus.PASS, 'frdm_k64f', False, True, False),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
|
@ -2466,35 +2466,35 @@ def test_twisterrunner_run(
|
|||
def test_twisterrunner_update_counting_before_pipeline():
|
||||
instances = {
|
||||
'dummy1': mock.Mock(
|
||||
status=TestInstanceStatus.FILTER,
|
||||
status=TwisterStatus.FILTER,
|
||||
reason='runtime filter',
|
||||
testsuite=mock.Mock(
|
||||
testcases=[mock.Mock()]
|
||||
)
|
||||
),
|
||||
'dummy2': mock.Mock(
|
||||
status=TestInstanceStatus.FILTER,
|
||||
status=TwisterStatus.FILTER,
|
||||
reason='static filter',
|
||||
testsuite=mock.Mock(
|
||||
testcases=[mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock()]
|
||||
)
|
||||
),
|
||||
'dummy3': mock.Mock(
|
||||
status=TestInstanceStatus.ERROR,
|
||||
status=TwisterStatus.ERROR,
|
||||
reason='error',
|
||||
testsuite=mock.Mock(
|
||||
testcases=[mock.Mock()]
|
||||
)
|
||||
),
|
||||
'dummy4': mock.Mock(
|
||||
status=TestInstanceStatus.PASS,
|
||||
status=TwisterStatus.PASS,
|
||||
reason='OK',
|
||||
testsuite=mock.Mock(
|
||||
testcases=[mock.Mock()]
|
||||
)
|
||||
),
|
||||
'dummy5': mock.Mock(
|
||||
status=TestInstanceStatus.SKIP,
|
||||
status=TwisterStatus.SKIP,
|
||||
reason=None,
|
||||
testsuite=mock.Mock(
|
||||
testcases=[mock.Mock()]
|
||||
|
|
@ -2581,11 +2581,11 @@ def test_twisterrunner_add_tasks_to_queue(
|
|||
return [filter]
|
||||
|
||||
instances = {
|
||||
'dummy1': mock.Mock(run=True, retries=0, status=TestInstanceStatus.PASS, build_dir="/tmp"),
|
||||
'dummy2': mock.Mock(run=True, retries=0, status=TestInstanceStatus.SKIP, build_dir="/tmp"),
|
||||
'dummy3': mock.Mock(run=True, retries=0, status=TestInstanceStatus.FILTER, build_dir="/tmp"),
|
||||
'dummy4': mock.Mock(run=True, retries=0, status=TestInstanceStatus.ERROR, build_dir="/tmp"),
|
||||
'dummy5': mock.Mock(run=True, retries=0, status=TestInstanceStatus.FAIL, build_dir="/tmp")
|
||||
'dummy1': mock.Mock(run=True, retries=0, status=TwisterStatus.PASS, build_dir="/tmp"),
|
||||
'dummy2': mock.Mock(run=True, retries=0, status=TwisterStatus.SKIP, build_dir="/tmp"),
|
||||
'dummy3': mock.Mock(run=True, retries=0, status=TwisterStatus.FILTER, build_dir="/tmp"),
|
||||
'dummy4': mock.Mock(run=True, retries=0, status=TwisterStatus.ERROR, build_dir="/tmp"),
|
||||
'dummy5': mock.Mock(run=True, retries=0, status=TwisterStatus.FAIL, build_dir="/tmp")
|
||||
}
|
||||
instances['dummy4'].testsuite.filter = 'some'
|
||||
instances['dummy5'].testsuite.filter = 'full'
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ import mock
|
|||
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
|
||||
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
|
||||
|
||||
from twisterlib.statuses import TestCaseStatus, TestInstanceStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
from twisterlib.testinstance import TestInstance
|
||||
from twisterlib.error import BuildError
|
||||
from twisterlib.runner import TwisterRunner
|
||||
|
|
@ -266,7 +266,7 @@ def test_testinstance_add_filter(testinstance):
|
|||
testinstance.add_filter(reason, filter_type)
|
||||
|
||||
assert {'type': filter_type, 'reason': reason} in testinstance.filters
|
||||
assert testinstance.status == TestInstanceStatus.FILTER
|
||||
assert testinstance.status == TwisterStatus.FILTER
|
||||
assert testinstance.reason == reason
|
||||
assert testinstance.filter_type == filter_type
|
||||
|
||||
|
|
@ -311,17 +311,17 @@ TESTDATA_2 = [
|
|||
def test_testinstance_add_missing_case_status(testinstance, reason, expected_reason):
|
||||
testinstance.reason = 'dummy reason'
|
||||
|
||||
status = TestCaseStatus.PASS
|
||||
status = TwisterStatus.PASS
|
||||
|
||||
assert len(testinstance.testcases) > 1, 'Selected testsuite does not have enough testcases.'
|
||||
|
||||
testinstance.testcases[0].status = TestCaseStatus.STARTED
|
||||
testinstance.testcases[-1].status = TestCaseStatus.NONE
|
||||
testinstance.testcases[0].status = TwisterStatus.STARTED
|
||||
testinstance.testcases[-1].status = TwisterStatus.NONE
|
||||
|
||||
testinstance.add_missing_case_status(status, reason)
|
||||
|
||||
assert testinstance.testcases[0].status == TestCaseStatus.FAIL
|
||||
assert testinstance.testcases[-1].status == TestCaseStatus.PASS
|
||||
assert testinstance.testcases[0].status == TwisterStatus.FAIL
|
||||
assert testinstance.testcases[-1].status == TwisterStatus.PASS
|
||||
assert testinstance.testcases[-1].reason == expected_reason
|
||||
|
||||
|
||||
|
|
@ -356,7 +356,7 @@ def test_testinstance_dunders(all_testsuites_dict, class_testplan, platforms_lis
|
|||
@pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'tests'}], indirect=True)
|
||||
def test_testinstance_set_case_status_by_name(testinstance):
|
||||
name = 'test_a.check_1.2a'
|
||||
status = 'dummy status'
|
||||
status = TwisterStatus.PASS
|
||||
reason = 'dummy reason'
|
||||
|
||||
tc = testinstance.set_case_status_by_name(name, status, reason)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from contextlib import nullcontext
|
|||
ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
|
||||
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
|
||||
|
||||
from twisterlib.statuses import TestInstanceStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
from twisterlib.testplan import TestPlan, change_skip_to_error_if_integration
|
||||
from twisterlib.testinstance import TestInstance
|
||||
from twisterlib.testsuite import TestSuite
|
||||
|
|
@ -181,7 +181,7 @@ def test_apply_filters_part1(class_testplan, all_testsuites_dict, platforms_list
|
|||
plan.apply_filters(exclude_platform=['demo_board_1'],
|
||||
platform=['demo_board_2'])
|
||||
|
||||
filtered_instances = list(filter(lambda item: item.status == TestInstanceStatus.FILTER, plan.instances.values()))
|
||||
filtered_instances = list(filter(lambda item: item.status == TwisterStatus.FILTER, plan.instances.values()))
|
||||
for d in filtered_instances:
|
||||
assert d.reason == expected_discards
|
||||
|
||||
|
|
@ -215,7 +215,7 @@ def test_apply_filters_part2(class_testplan, all_testsuites_dict,
|
|||
]
|
||||
}
|
||||
class_testplan.apply_filters(**kwargs)
|
||||
filtered_instances = list(filter(lambda item: item.status == TestInstanceStatus.FILTER, class_testplan.instances.values()))
|
||||
filtered_instances = list(filter(lambda item: item.status == TwisterStatus.FILTER, class_testplan.instances.values()))
|
||||
for d in filtered_instances:
|
||||
assert d.reason == expected_discards
|
||||
|
||||
|
|
@ -246,7 +246,7 @@ def test_apply_filters_part3(class_testplan, all_testsuites_dict, platforms_list
|
|||
class_testplan.apply_filters(exclude_platform=['demo_board_1'],
|
||||
platform=['demo_board_2'])
|
||||
|
||||
filtered_instances = list(filter(lambda item: item.status == TestInstanceStatus.FILTER, class_testplan.instances.values()))
|
||||
filtered_instances = list(filter(lambda item: item.status == TwisterStatus.FILTER, class_testplan.instances.values()))
|
||||
assert not filtered_instances
|
||||
|
||||
def test_add_instances_short(tmp_path, class_env, all_testsuites_dict, platforms_list):
|
||||
|
|
@ -339,16 +339,16 @@ def test_quarantine_short(class_testplan, platforms_list, test_data,
|
|||
for testname, instance in class_testplan.instances.items():
|
||||
if quarantine_verify:
|
||||
if testname in expected_val:
|
||||
assert instance.status == TestInstanceStatus.NONE
|
||||
assert instance.status == TwisterStatus.NONE
|
||||
else:
|
||||
assert instance.status == TestInstanceStatus.FILTER
|
||||
assert instance.status == TwisterStatus.FILTER
|
||||
assert instance.reason == "Not under quarantine"
|
||||
else:
|
||||
if testname in expected_val:
|
||||
assert instance.status == TestInstanceStatus.FILTER
|
||||
assert instance.status == TwisterStatus.FILTER
|
||||
assert instance.reason == "Quarantine: " + expected_val[testname]
|
||||
else:
|
||||
assert instance.status == TestInstanceStatus.NONE
|
||||
assert instance.status == TwisterStatus.NONE
|
||||
|
||||
|
||||
TESTDATA_PART4 = [
|
||||
|
|
@ -393,7 +393,7 @@ def test_required_snippets_short(
|
|||
plan.apply_filters()
|
||||
|
||||
filtered_instances = list(
|
||||
filter(lambda item: item.status == TestInstanceStatus.FILTER, plan.instances.values())
|
||||
filter(lambda item: item.status == TwisterStatus.FILTER, plan.instances.values())
|
||||
)
|
||||
if expected_filtered_len is not None:
|
||||
assert len(filtered_instances) == expected_filtered_len
|
||||
|
|
@ -809,14 +809,14 @@ def test_testplan_generate_subset(
|
|||
shuffle_tests_seed=seed
|
||||
)
|
||||
testplan.instances = {
|
||||
'plat1/testA': mock.Mock(status=TestInstanceStatus.NONE),
|
||||
'plat1/testB': mock.Mock(status=TestInstanceStatus.NONE),
|
||||
'plat1/testC': mock.Mock(status=TestInstanceStatus.NONE),
|
||||
'plat2/testA': mock.Mock(status=TestInstanceStatus.NONE),
|
||||
'plat2/testB': mock.Mock(status=TestInstanceStatus.NONE),
|
||||
'plat3/testA': mock.Mock(status=TestInstanceStatus.SKIP),
|
||||
'plat3/testB': mock.Mock(status=TestInstanceStatus.SKIP),
|
||||
'plat3/testC': mock.Mock(status=TestInstanceStatus.ERROR),
|
||||
'plat1/testA': mock.Mock(status=TwisterStatus.NONE),
|
||||
'plat1/testB': mock.Mock(status=TwisterStatus.NONE),
|
||||
'plat1/testC': mock.Mock(status=TwisterStatus.NONE),
|
||||
'plat2/testA': mock.Mock(status=TwisterStatus.NONE),
|
||||
'plat2/testB': mock.Mock(status=TwisterStatus.NONE),
|
||||
'plat3/testA': mock.Mock(status=TwisterStatus.SKIP),
|
||||
'plat3/testB': mock.Mock(status=TwisterStatus.SKIP),
|
||||
'plat3/testC': mock.Mock(status=TwisterStatus.ERROR),
|
||||
}
|
||||
|
||||
testplan.generate_subset(subset, sets)
|
||||
|
|
@ -1568,7 +1568,7 @@ def test_testplan_load_from_file(caplog, device_testing, expected_tfilter):
|
|||
'retries': 0,
|
||||
'testcases': {
|
||||
'TS1.tc1': {
|
||||
'status': TestInstanceStatus.PASS,
|
||||
'status': TwisterStatus.PASS,
|
||||
'reason': None,
|
||||
'duration': 60.0,
|
||||
'output': ''
|
||||
|
|
@ -1597,13 +1597,13 @@ def test_testplan_load_from_file(caplog, device_testing, expected_tfilter):
|
|||
'retries': 1,
|
||||
'testcases': {
|
||||
'TS3.tc1': {
|
||||
'status': TestInstanceStatus.ERROR,
|
||||
'status': TwisterStatus.ERROR,
|
||||
'reason': None,
|
||||
'duration': 360.0,
|
||||
'output': '[ERROR]: File \'dummy.yaml\' not found!\nClosing...'
|
||||
},
|
||||
'TS3.tc2': {
|
||||
'status': TestInstanceStatus.NONE,
|
||||
'status': TwisterStatus.NONE,
|
||||
'reason': None,
|
||||
'duration': 0,
|
||||
'output': ''
|
||||
|
|
@ -1621,7 +1621,7 @@ def test_testplan_load_from_file(caplog, device_testing, expected_tfilter):
|
|||
'retries': 0,
|
||||
'testcases': {
|
||||
'TS4.tc1': {
|
||||
'status': TestInstanceStatus.SKIP,
|
||||
'status': TwisterStatus.SKIP,
|
||||
'reason': 'Not in requested test list.',
|
||||
'duration': 360.0,
|
||||
'output': '[INFO] Parsing...'
|
||||
|
|
@ -1722,9 +1722,9 @@ def test_testplan_create_build_dir_links(exists):
|
|||
instances_linked.append(instance)
|
||||
|
||||
instances = {
|
||||
'inst0': mock.Mock(status=TestInstanceStatus.PASS),
|
||||
'inst1': mock.Mock(status=TestInstanceStatus.SKIP),
|
||||
'inst2': mock.Mock(status=TestInstanceStatus.ERROR),
|
||||
'inst0': mock.Mock(status=TwisterStatus.PASS),
|
||||
'inst1': mock.Mock(status=TwisterStatus.SKIP),
|
||||
'inst2': mock.Mock(status=TwisterStatus.ERROR),
|
||||
}
|
||||
expected_instances = [instances['inst0'], instances['inst2']]
|
||||
|
||||
|
|
@ -1789,7 +1789,7 @@ TESTDATA_14 = [
|
|||
('bad platform', 'dummy reason', [],
|
||||
'dummy status', 'dummy reason'),
|
||||
('good platform', 'quarantined', [],
|
||||
TestInstanceStatus.ERROR, 'quarantined but is one of the integration platforms'),
|
||||
TwisterStatus.ERROR, 'quarantined but is one of the integration platforms'),
|
||||
('good platform', 'dummy reason', [{'type': 'command line filter'}],
|
||||
'dummy status', 'dummy reason'),
|
||||
('good platform', 'dummy reason', [{'type': 'Skip filter'}],
|
||||
|
|
@ -1801,7 +1801,7 @@ TESTDATA_14 = [
|
|||
('good platform', 'dummy reason', [{'type': 'Module filter'}],
|
||||
'dummy status', 'dummy reason'),
|
||||
('good platform', 'dummy reason', [{'type': 'testsuite filter'}],
|
||||
TestInstanceStatus.ERROR, 'dummy reason but is one of the integration platforms'),
|
||||
TwisterStatus.ERROR, 'dummy reason but is one of the integration platforms'),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ from contextlib import nullcontext
|
|||
ZEPHYR_BASE = os.getenv('ZEPHYR_BASE')
|
||||
sys.path.insert(0, os.path.join(ZEPHYR_BASE, 'scripts', 'pylib', 'twister'))
|
||||
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
from twisterlib.testsuite import (
|
||||
_find_src_dir_path,
|
||||
_get_search_area_boundary,
|
||||
|
|
@ -856,11 +857,11 @@ def test_testsuite_load(
|
|||
def test_testcase_dunders():
|
||||
case_lesser = TestCase(name='A lesser name')
|
||||
case_greater = TestCase(name='a greater name')
|
||||
case_greater.status = 'success'
|
||||
case_greater.status = TwisterStatus.FAIL
|
||||
|
||||
assert case_lesser < case_greater
|
||||
assert str(case_greater) == 'a greater name'
|
||||
assert repr(case_greater) == '<TestCase a greater name with success>'
|
||||
assert repr(case_greater) == f'<TestCase a greater name with {str(TwisterStatus.FAIL)}>'
|
||||
|
||||
|
||||
TESTDATA_11 = [
|
||||
|
|
|
|||
|
|
@ -14,9 +14,8 @@ import pytest
|
|||
import sys
|
||||
import json
|
||||
|
||||
# pylint: disable=no-name-in-module
|
||||
from conftest import ZEPHYR_BASE, TEST_DATA, sample_filename_mock, testsuite_filename_mock
|
||||
from twisterlib.statuses import TestCaseStatus
|
||||
from twisterlib.statuses import TwisterStatus
|
||||
from twisterlib.testplan import TestPlan
|
||||
|
||||
|
||||
|
|
@ -85,7 +84,7 @@ class TestTooling:
|
|||
|
||||
# Normally, board not supporting our toolchain would be filtered, so we check against that
|
||||
assert len(filtered_j) == 1
|
||||
assert filtered_j[0][3] != TestCaseStatus.FILTER
|
||||
assert filtered_j[0][3] != TwisterStatus.FILTER
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'test_path, test_platforms',
|
||||
|
|
|
|||
Loading…
Reference in a new issue