diff --git a/scripts/logging/dictionary/dictionary_parser/log_database.py b/scripts/logging/dictionary/dictionary_parser/log_database.py index 15ae3feff6b..69070a07990 100644 --- a/scripts/logging/dictionary/dictionary_parser/log_database.py +++ b/scripts/logging/dictionary/dictionary_parser/log_database.py @@ -63,16 +63,16 @@ class LogDatabase(): BIG_ENDIAN = False def __init__(self): - new_db = dict() + new_db = {} new_db['version'] = self.ZEPHYR_DICT_LOG_VER - new_db['target'] = dict() - new_db['sections'] = dict() - new_db['log_subsys'] = dict() - new_db['log_subsys']['log_instances'] = dict() + new_db['target'] = {} + new_db['sections'] = {} + new_db['log_subsys'] = {} + new_db['log_subsys']['log_instances'] = {} new_db['build_id'] = None new_db['arch'] = None - new_db['kconfigs'] = dict() + new_db['kconfigs'] = {} self.database = new_db @@ -216,7 +216,7 @@ class LogDatabase(): def read_json_database(db_file_name): """Read database from file and return a LogDatabase object""" try: - with open(db_file_name, "r") as db_fd: + with open(db_file_name, "r", encoding="iso-8859-1") as db_fd: json_db = json.load(db_fd) except (OSError, json.JSONDecodeError): return None @@ -243,7 +243,7 @@ class LogDatabase(): del sect['data'] try: - with open(db_file_name, "w") as db_fd: + with open(db_file_name, "w", encoding="iso-8859-1") as db_fd: db_fd.write(json.dumps(json_db)) except OSError: return False diff --git a/scripts/logging/dictionary/dictionary_parser/log_parser_v1.py b/scripts/logging/dictionary/dictionary_parser/log_parser_v1.py index 3009ef5c61c..b29d4e8bb19 100644 --- a/scripts/logging/dictionary/dictionary_parser/log_parser_v1.py +++ b/scripts/logging/dictionary/dictionary_parser/log_parser_v1.py @@ -21,7 +21,13 @@ from .log_parser import LogParser HEX_BYTES_IN_LINE = 16 -LOG_LEVELS = [('none', Fore.WHITE), ('err', Fore.RED), ('wrn', Fore.YELLOW), ('inf', Fore.GREEN), ('dbg', Fore.BLUE)] +LOG_LEVELS = [ + ('none', Fore.WHITE), + ('err', Fore.RED), + ('wrn', Fore.YELLOW), + ('inf', Fore.GREEN), + ('dbg', Fore.BLUE) +] # Need to keep sync with struct log_dict_output_msg_hdr in # include/logging/log_output_dict.h. @@ -97,7 +103,7 @@ class DataTypes(): def __init__(self, database): self.database = database - self.data_types = dict() + self.data_types = {} if database.is_tgt_64bit(): self.add_data_type(self.LONG, "q") @@ -122,7 +128,7 @@ class DataTypes(): formatter = endianness + fmt - self.data_types[data_type] = dict() + self.data_types[data_type] = {} self.data_types[data_type]['fmt'] = formatter size = struct.calcsize(formatter) @@ -212,7 +218,7 @@ class LogParserV1(LogParser): is_parsing = False do_extract = False - args = list() + args = [] # Translated from cbvprintf_package() for idx, fmt in enumerate(fmt_str): @@ -298,7 +304,7 @@ class LogParserV1(LogParser): @staticmethod def extract_string_table(str_tbl): """Extract string table in a packaged log message""" - tbl = dict() + tbl = {} one_str = "" next_new_string = True @@ -337,14 +343,16 @@ class LogParserV1(LogParser): chr_vals += " " elif chr_done == HEX_BYTES_IN_LINE: - print(f"{color}%s%s|%s{Fore.RESET}" % ((" " * prefix_len), hex_vals, chr_vals)) + print(f"{color}%s%s|%s{Fore.RESET}" % ((" " * prefix_len), + hex_vals, chr_vals)) hex_vals = "" chr_vals = "" chr_done = 0 if len(chr_vals) > 0: hex_padding = " " * (HEX_BYTES_IN_LINE - chr_done) - print(f"{color}%s%s%s|%s{Fore.RESET}" % ((" " * prefix_len), hex_vals, hex_padding, chr_vals)) + print(f"{color}%s%s%s|%s{Fore.RESET}" % ((" " * prefix_len), + hex_vals, hex_padding, chr_vals)) def parse_one_normal_msg(self, logdata, offset): @@ -420,7 +428,7 @@ class LogParserV1(LogParser): log_msg = fmt_str % args if level == 0: - print("%s" % log_msg, end='') + print(f"{log_msg}", end='') else: log_prefix = f"[{timestamp:>10}] <{level_str}> {source_id_str}: " print(f"{color}%s%s{Fore.RESET}" % (log_prefix, log_msg)) @@ -446,7 +454,7 @@ class LogParserV1(LogParser): num_dropped = struct.unpack_from(self.fmt_dropped_cnt, logdata, offset) offset += struct.calcsize(self.fmt_dropped_cnt) - print("--- %d messages dropped ---" % num_dropped) + print(f"--- {num_dropped} messages dropped ---") elif msg_type == MSG_TYPE_NORMAL: ret = self.parse_one_normal_msg(logdata, offset) diff --git a/scripts/logging/dictionary/dictionary_parser/utils.py b/scripts/logging/dictionary/dictionary_parser/utils.py index f94a8090f0d..b614bdf1b1c 100644 --- a/scripts/logging/dictionary/dictionary_parser/utils.py +++ b/scripts/logging/dictionary/dictionary_parser/utils.py @@ -15,7 +15,7 @@ def convert_hex_file_to_bin(hexfile): """This converts a file in hexadecimal to binary""" bin_data = b'' - with open(hexfile, "r") as hfile: + with open(hexfile, "r", encoding="iso-8859-1") as hfile: for line in hfile.readlines(): hex_str = line.strip() diff --git a/scripts/logging/dictionary/log_parser.py b/scripts/logging/dictionary/log_parser.py index 273b0cc239f..a77e8fef1ab 100755 --- a/scripts/logging/dictionary/log_parser.py +++ b/scripts/logging/dictionary/log_parser.py @@ -42,22 +42,11 @@ def parse_args(): return argparser.parse_args() -def main(): - """Main function of log parser""" - args = parse_args() - - # Setup logging for parser - logging.basicConfig(format=LOGGER_FORMAT) - if args.debug: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) - - # Read from database file - database = LogDatabase.read_json_database(args.dbfile) - if database is None: - logger.error("ERROR: Cannot open database file: %s, exiting...", args.dbfile) - sys.exit(1) +def read_log_file(args): + """ + Read the log from file + """ + logdata = None # Open log data file for reading if args.hex: @@ -67,7 +56,7 @@ def main(): else: hexdata = '' - with open(args.logfile, "r") as hexfile: + with open(args.logfile, "r", encoding="iso-8859-1") as hexfile: for line in hexfile.readlines(): hexdata += line.strip() @@ -109,6 +98,31 @@ def main(): logfile.close() + return logdata + + +def main(): + """Main function of log parser""" + args = parse_args() + + # Setup logging for parser + logging.basicConfig(format=LOGGER_FORMAT) + if args.debug: + logger.setLevel(logging.DEBUG) + else: + logger.setLevel(logging.INFO) + + # Read from database file + database = LogDatabase.read_json_database(args.dbfile) + if database is None: + logger.error("ERROR: Cannot open database file: %s, exiting...", args.dbfile) + sys.exit(1) + + logdata = read_log_file(args) + if logdata is None: + logger.error("ERROR: cannot read log from file: %s, exiting...", args.logfile) + sys.exit(1) + log_parser = dictionary_parser.get_parser(database) if log_parser is not None: logger.debug("# Build ID: %s", database.get_build_id())