Skip to content

Commit fb055ea

Browse files
dcpleungkartben
authored andcommitted
logging: dictionary: format scripts with ruff
Simple formatting via ruff on dictionary logging scripts. No manual editing was done on the scripts. Signed-off-by: Daniel Leung <daniel.leung@intel.com>
1 parent e37b3ca commit fb055ea

File tree

11 files changed

+199
-238
lines changed

11 files changed

+199
-238
lines changed

.ruff-excludes.toml

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1250,15 +1250,6 @@ exclude = [
12501250
"./scripts/list_boards.py",
12511251
"./scripts/list_hardware.py",
12521252
"./scripts/list_shields.py",
1253-
"./scripts/logging/dictionary/database_gen.py",
1254-
"./scripts/logging/dictionary/dictionary_parser/data_types.py",
1255-
"./scripts/logging/dictionary/dictionary_parser/log_database.py",
1256-
"./scripts/logging/dictionary/dictionary_parser/log_parser.py",
1257-
"./scripts/logging/dictionary/dictionary_parser/log_parser_v1.py",
1258-
"./scripts/logging/dictionary/dictionary_parser/log_parser_v3.py",
1259-
"./scripts/logging/dictionary/dictionary_parser/utils.py",
1260-
"./scripts/logging/dictionary/log_parser.py",
1261-
"./scripts/logging/dictionary/log_parser_uart.py",
12621253
"./scripts/make_bugs_pickle.py",
12631254
"./scripts/net/enumerate_http_status.py",
12641255
"./scripts/profiling/stackcollapse.py",
@@ -1474,6 +1465,5 @@ exclude = [
14741465
"./tests/net/socket/tls_configurations/pytest/test_app_vs_openssl.py",
14751466
"./tests/net/socket/udp/generate-c-string.py",
14761467
"./tests/subsys/debug/gdbstub/pytest/test_gdbstub.py",
1477-
"./tests/subsys/logging/dictionary/pytest/test_logging_dictionary.py",
14781468
"./tests/ztest/ztest_param/pytest/test_parameters.py",
14791469
]

scripts/logging/dictionary/database_gen.py

Lines changed: 57 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,7 @@
4343
]
4444

4545
# Sections that contains static strings but are not part of the binary (allocable).
46-
REMOVED_STRING_SECTIONS = [
47-
'log_strings'
48-
]
46+
REMOVED_STRING_SECTIONS = ['log_strings']
4947

5048

5149
# Regulation expression to match DWARF location
@@ -76,23 +74,18 @@ def parse_args():
7674

7775
argparser.add_argument("elffile", help="Zephyr ELF binary")
7876
argparser.add_argument("--build", help="Build ID")
79-
argparser.add_argument("--build-header",
80-
help="Header file containing BUILD_VERSION define")
81-
argparser.add_argument("--debug", action="store_true",
82-
help="Print extra debugging information")
83-
argparser.add_argument("-v", "--verbose", action="store_true",
84-
help="Print more information")
77+
argparser.add_argument("--build-header", help="Header file containing BUILD_VERSION define")
78+
argparser.add_argument("--debug", action="store_true", help="Print extra debugging information")
79+
argparser.add_argument("-v", "--verbose", action="store_true", help="Print more information")
8580

8681
outfile_grp = argparser.add_mutually_exclusive_group(required=True)
87-
outfile_grp.add_argument("--json",
88-
help="Output Dictionary Logging Database file in JSON")
89-
outfile_grp.add_argument("--syst",
90-
help="Output MIPI Sys-T Collateral XML file")
82+
outfile_grp.add_argument("--json", help="Output Dictionary Logging Database file in JSON")
83+
outfile_grp.add_argument("--syst", help="Output MIPI Sys-T Collateral XML file")
9184

9285
return argparser.parse_args()
9386

9487

95-
def extract_elf_code_data_sections(elf, wildcards = None):
88+
def extract_elf_code_data_sections(elf, wildcards=None):
9689
"""Find all sections in ELF file"""
9790
sections = {}
9891

@@ -101,17 +94,17 @@ def extract_elf_code_data_sections(elf, wildcards = None):
10194
# since they actually have code/data.
10295
#
10396
# On contrary, BSS is allocated but NOBITS.
104-
if (((wildcards is not None) and (sect.name in wildcards)) or
105-
((sect['sh_flags'] & SH_FLAGS.SHF_ALLOC) == SH_FLAGS.SHF_ALLOC
106-
and sect['sh_type'] == 'SHT_PROGBITS')
97+
if ((wildcards is not None) and (sect.name in wildcards)) or (
98+
(sect['sh_flags'] & SH_FLAGS.SHF_ALLOC) == SH_FLAGS.SHF_ALLOC
99+
and sect['sh_type'] == 'SHT_PROGBITS'
107100
):
108101
sections[sect.name] = {
109-
'name' : sect.name,
110-
'size' : sect['sh_size'],
111-
'start' : sect['sh_addr'],
112-
'end' : sect['sh_addr'] + sect['sh_size'] - 1,
113-
'data' : sect.data(),
114-
}
102+
'name': sect.name,
103+
'size': sect['sh_size'],
104+
'start': sect['sh_addr'],
105+
'end': sect['sh_addr'] + sect['sh_size'] - 1,
106+
'data': sect.data(),
107+
}
115108

116109
return sections
117110

@@ -121,11 +114,11 @@ def find_elf_sections(elf, sh_name):
121114
for section in elf.iter_sections():
122115
if section.name == sh_name:
123116
ret = {
124-
'name' : section.name,
125-
'size' : section['sh_size'],
126-
'start' : section['sh_addr'],
127-
'end' : section['sh_addr'] + section['sh_size'] - 1,
128-
'data' : section.data(),
117+
'name': section.name,
118+
'size': section['sh_size'],
119+
'start': section['sh_addr'],
120+
'end': section['sh_addr'] + section['sh_size'] - 1,
121+
'data': section.data(),
129122
}
130123

131124
return ret
@@ -137,17 +130,20 @@ def get_kconfig_symbols(elf):
137130
"""Get kconfig symbols from the ELF file"""
138131
for section in elf.iter_sections():
139132
if isinstance(section, SymbolTableSection) and section['sh_type'] != 'SHT_DYNSYM':
140-
return {sym.name: sym.entry.st_value
141-
for sym in section.iter_symbols()
142-
if sym.name.startswith("CONFIG_")}
133+
return {
134+
sym.name: sym.entry.st_value
135+
for sym in section.iter_symbols()
136+
if sym.name.startswith("CONFIG_")
137+
}
143138

144139
raise LookupError("Could not find symbol table")
145140

146141

147142
def find_log_const_symbols(elf):
148143
"""Extract all "log_const_*" symbols from ELF file"""
149-
symbol_tables = [s for s in elf.iter_sections()
150-
if isinstance(s, elftools.elf.sections.SymbolTableSection)]
144+
symbol_tables = [
145+
s for s in elf.iter_sections() if isinstance(s, elftools.elf.sections.SymbolTableSection)
146+
]
151147

152148
ret_list = []
153149

@@ -259,8 +255,7 @@ def process_kconfigs(elf, database):
259255
#
260256
# Use 32-bit timestamp? or 64-bit?
261257
if "CONFIG_LOG_TIMESTAMP_64BIT" in kconfigs:
262-
database.add_kconfig("CONFIG_LOG_TIMESTAMP_64BIT",
263-
kconfigs['CONFIG_LOG_TIMESTAMP_64BIT'])
258+
database.add_kconfig("CONFIG_LOG_TIMESTAMP_64BIT", kconfigs['CONFIG_LOG_TIMESTAMP_64BIT'])
264259

265260

266261
def extract_logging_subsys_information(elf, database, string_mappings):
@@ -289,9 +284,9 @@ def is_die_attr_ref(attr):
289284
"""
290285
Returns True if the DIE attribute is a reference.
291286
"""
292-
return bool(attr.form in ('DW_FORM_ref1', 'DW_FORM_ref2',
293-
'DW_FORM_ref4', 'DW_FORM_ref8',
294-
'DW_FORM_ref'))
287+
return bool(
288+
attr.form in ('DW_FORM_ref1', 'DW_FORM_ref2', 'DW_FORM_ref4', 'DW_FORM_ref8', 'DW_FORM_ref')
289+
)
295290

296291

297292
def find_die_var_base_type(compile_unit, die, is_const):
@@ -351,7 +346,8 @@ def extract_string_variables(elf):
351346
for die in compile_unit.iter_DIEs():
352347
# Only care about variables with location information
353348
# and of type "char"
354-
if die.tag == 'DW_TAG_variable' and ('DW_AT_type' in die.attributes
349+
if die.tag == 'DW_TAG_variable' and (
350+
'DW_AT_type' in die.attributes
355351
and 'DW_AT_location' in die.attributes
356352
and is_die_var_const_char(compile_unit, die)
357353
):
@@ -362,23 +358,25 @@ def extract_string_variables(elf):
362358
loc = loc_parser.parse_from_attribute(loc_attr, die.cu['version'], die)
363359
if isinstance(loc, LocationExpr):
364360
try:
365-
addr = describe_DWARF_expr(loc.loc_expr,
366-
dwarf_info.structs)
361+
addr = describe_DWARF_expr(loc.loc_expr, dwarf_info.structs)
367362

368363
matcher = DT_LOCATION_REGEX.match(addr)
369364
if matcher:
370365
addr = int(matcher.group(1), 16)
371366
if addr > 0:
372-
strings.append({
373-
'name': die.attributes['DW_AT_name'].value,
374-
'addr': addr,
375-
'die': die
376-
})
367+
strings.append(
368+
{
369+
'name': die.attributes['DW_AT_name'].value,
370+
'addr': addr,
371+
'die': die,
372+
}
373+
)
377374
except KeyError:
378375
pass
379376

380377
return strings
381378

379+
382380
def try_decode_string(str_maybe):
383381
"""Check if it is a printable string"""
384382
for encoding in STR_ENCODINGS:
@@ -389,6 +387,7 @@ def try_decode_string(str_maybe):
389387

390388
return None
391389

390+
392391
def is_printable(b):
393392
# Check if string is printable according to Python
394393
# since the parser (written in Python) will need to
@@ -398,6 +397,7 @@ def is_printable(b):
398397
# string.printable so they need to be checked separately.
399398
return (b in string.printable) or (b in ACCEPTABLE_ESCAPE_CHARS)
400399

400+
401401
def extract_strings_in_one_section(section, str_mappings):
402402
"""Extract NULL-terminated strings in one ELF section"""
403403
data = section['data']
@@ -412,7 +412,7 @@ def extract_strings_in_one_section(section, str_mappings):
412412
# End of possible string
413413
if start is not None:
414414
# Found potential string
415-
str_maybe = data[start : idx]
415+
str_maybe = data[start:idx]
416416
decoded_str = try_decode_string(str_maybe)
417417

418418
if decoded_str is not None:
@@ -425,8 +425,9 @@ def extract_strings_in_one_section(section, str_mappings):
425425
# (e.g. extended ASC-II characters) or control
426426
# characters (e.g. '\r' or '\n'), so simply print
427427
# the byte string instead.
428-
logger.debug('Found string via extraction at ' + PTR_FMT + ': %s',
429-
addr, str_maybe)
428+
logger.debug(
429+
'Found string via extraction at ' + PTR_FMT + ': %s', addr, str_maybe
430+
)
430431

431432
# GCC-based toolchain will reuse the NULL character
432433
# for empty strings. There is no way to know which
@@ -435,8 +436,7 @@ def extract_strings_in_one_section(section, str_mappings):
435436
null_addr = section['start'] + idx
436437
str_mappings[null_addr] = ''
437438

438-
logger.debug('Found null string via extraction at ' + PTR_FMT,
439-
null_addr)
439+
logger.debug('Found null string via extraction at ' + PTR_FMT, null_addr)
440440
start = None
441441
else:
442442
# Non-printable byte, remove start location
@@ -461,8 +461,9 @@ def extract_static_strings(elf, database, section_extraction=False):
461461
one_str = extract_one_string_in_section(sect, str_var['addr'])
462462
if one_str is not None:
463463
string_mappings[str_var['addr']] = one_str
464-
logger.debug('Found string variable at ' + PTR_FMT + ': %s',
465-
str_var['addr'], one_str)
464+
logger.debug(
465+
'Found string variable at ' + PTR_FMT + ': %s', str_var['addr'], one_str
466+
)
466467
break
467468

468469
if section_extraction:
@@ -478,8 +479,7 @@ def extract_static_strings(elf, database, section_extraction=False):
478479

479480
for sect_name in string_sections:
480481
if sect_name in elf_sections:
481-
rawstr_map = extract_strings_in_one_section(elf_sections[sect_name],
482-
rawstr_map)
482+
rawstr_map = extract_strings_in_one_section(elf_sections[sect_name], rawstr_map)
483483

484484
for one_str in rawstr_map:
485485
if one_str not in string_mappings:
@@ -562,13 +562,11 @@ def main():
562562

563563
# Write database file
564564
if args.json and not LogDatabase.write_json_database(args.json, database):
565-
logger.error("ERROR: Cannot open database file for write: %s, exiting...",
566-
args.json)
565+
logger.error("ERROR: Cannot open database file for write: %s, exiting...", args.json)
567566
sys.exit(1)
568567

569568
if args.syst and not LogDatabase.write_syst_database(args.syst, database):
570-
logger.error("ERROR: Cannot open database file for write: %s, exiting...",
571-
args.syst)
569+
logger.error("ERROR: Cannot open database file for write: %s, exiting...", args.syst)
572570
sys.exit(1)
573571

574572
elffile.close()

scripts/logging/dictionary/dictionary_parser/data_types.py

Lines changed: 6 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
class DataTypes:
1616
"""Class regarding data types, their alignments and sizes"""
17+
1718
INT = 0
1819
UINT = 1
1920
LONG = 2
@@ -47,14 +48,13 @@ def __init__(self, database):
4748
self.add_data_type(self.DOUBLE, "d")
4849
self.add_data_type(self.LONG_DOUBLE, "d")
4950

50-
5151
@staticmethod
5252
def get_stack_min_align(arch, is_tgt_64bit):
5353
'''
5454
Correspond to the VA_STACK_ALIGN and VA_STACK_MIN_ALIGN
5555
in cbprintf_internal.h. Note that there might be some
56-
variations that is obtained via actually running through
57-
the log parser.
56+
variations that is obtained via actually running through
57+
the log parser.
5858
5959
Return a tuple where the first element is stack alignment
6060
value. The second element is true if alignment needs to
@@ -102,7 +102,6 @@ def get_stack_min_align(arch, is_tgt_64bit):
102102

103103
return (stack_min_align, need_further_align)
104104

105-
106105
@staticmethod
107106
def get_data_type_align(data_type, is_tgt_64bit):
108107
'''
@@ -121,7 +120,6 @@ def get_data_type_align(data_type, is_tgt_64bit):
121120

122121
return align
123122

124-
125123
def add_data_type(self, data_type, fmt):
126124
"""Add one data type"""
127125
if self.database.is_tgt_little_endian():
@@ -158,31 +156,26 @@ def add_data_type(self, data_type, fmt):
158156
# 'stack_align' should correspond to VA_STACK_ALIGN
159157
# in cbprintf_internal.h
160158
stack_align, need_more_align = DataTypes.get_stack_min_align(
161-
self.database.get_arch(),
162-
self.database.is_tgt_64bit())
159+
self.database.get_arch(), self.database.is_tgt_64bit()
160+
)
163161

164162
if need_more_align:
165-
stack_align = DataTypes.get_data_type_align(data_type,
166-
self.database.is_tgt_64bit())
163+
stack_align = DataTypes.get_data_type_align(data_type, self.database.is_tgt_64bit())
167164

168165
self.data_types[data_type]['stack_align'] = stack_align
169166

170-
171167
def get_sizeof(self, data_type):
172168
"""Get sizeof() of a data type"""
173169
return self.data_types[data_type]['sizeof']
174170

175-
176171
def get_alignment(self, data_type):
177172
"""Get the alignment of a data type"""
178173
return self.data_types[data_type]['align']
179174

180-
181175
def get_stack_alignment(self, data_type):
182176
"""Get the stack alignment of a data type"""
183177
return self.data_types[data_type]['stack_align']
184178

185-
186179
def get_formatter(self, data_type):
187180
"""Get the formatter for a data type"""
188181
return self.data_types[data_type]['fmt']

0 commit comments

Comments
 (0)