text stringlengths 9 39.2M | dir stringlengths 25 226 | lang stringclasses 163 values | created_date timestamp[s] | updated_date timestamp[s] | repo_name stringclasses 751 values | repo_full_name stringclasses 752 values | star int64 1.01k 183k | len_tokens int64 1 18.5M |
|---|---|---|---|---|---|---|---|---|
```python
#!/usr/bin/env python3
#
#
"""
Dictionary-based Logging Database Generator
This takes the built Zephyr ELF binary and produces a JSON database
file for dictionary-based logging. This database is used together
with the parser to decode binary log messages.
"""
import argparse
import logging
import os
import re
import string
import struct
import sys
import dictionary_parser.log_database
from dictionary_parser.log_database import LogDatabase
from dictionary_parser.utils import extract_one_string_in_section
from dictionary_parser.utils import find_string_in_mappings
import elftools
from elftools.elf.constants import SH_FLAGS
from elftools.elf.elffile import ELFFile
from elftools.elf.descriptions import describe_ei_data
from elftools.elf.sections import SymbolTableSection
from elftools.dwarf.descriptions import (
describe_DWARF_expr
)
from elftools.dwarf.locationlists import (
LocationExpr, LocationParser
)
LOGGER_FORMAT = "%(name)s: %(levelname)s: %(message)s"
logger = logging.getLogger(os.path.basename(sys.argv[0]))
# Sections that contains static strings
STATIC_STRING_SECTIONS = [
'rodata',
'.rodata',
'pinned.rodata',
]
# Sections that contains static strings but are not part of the binary (allocable).
REMOVED_STRING_SECTIONS = [
'log_strings'
]
# Regulation expression to match DWARF location
DT_LOCATION_REGEX = re.compile(r"\(DW_OP_addr: ([0-9a-f]+)")
# Format string for pointers (default for 32-bit pointers)
PTR_FMT = '0x%08x'
# Potential string encodings. Add as needed.
STR_ENCODINGS = [
'ascii',
'iso-8859-1',
]
# List of acceptable escape character
ACCEPTABLE_ESCAPE_CHARS = [
b'\r',
b'\n',
]
def parse_args():
"""Parse command line arguments"""
argparser = argparse.ArgumentParser(allow_abbrev=False)
argparser.add_argument("elffile", help="Zephyr ELF binary")
argparser.add_argument("--build", help="Build ID")
argparser.add_argument("--build-header",
help="Header file containing BUILD_VERSION define")
argparser.add_argument("--debug", action="store_true",
help="Print extra debugging information")
argparser.add_argument("-v", "--verbose", action="store_true",
help="Print more information")
outfile_grp = argparser.add_mutually_exclusive_group(required=True)
outfile_grp.add_argument("--json",
help="Output Dictionary Logging Database file in JSON")
outfile_grp.add_argument("--syst",
help="Output MIPI Sys-T Collateral XML file")
return argparser.parse_args()
def extract_elf_code_data_sections(elf, wildcards = None):
"""Find all sections in ELF file"""
sections = {}
for sect in elf.iter_sections():
# Only Allocated sections with PROGBITS are included
# since they actually have code/data.
#
# On contrary, BSS is allocated but NOBITS.
if (((wildcards is not None) and (sect.name in wildcards)) or
((sect['sh_flags'] & SH_FLAGS.SHF_ALLOC) == SH_FLAGS.SHF_ALLOC
and sect['sh_type'] == 'SHT_PROGBITS')
):
sections[sect.name] = {
'name' : sect.name,
'size' : sect['sh_size'],
'start' : sect['sh_addr'],
'end' : sect['sh_addr'] + sect['sh_size'] - 1,
'data' : sect.data(),
}
return sections
def find_elf_sections(elf, sh_name):
"""Find all sections in ELF file"""
for section in elf.iter_sections():
if section.name == sh_name:
ret = {
'name' : section.name,
'size' : section['sh_size'],
'start' : section['sh_addr'],
'end' : section['sh_addr'] + section['sh_size'] - 1,
'data' : section.data(),
}
return ret
return None
def get_kconfig_symbols(elf):
"""Get kconfig symbols from the ELF file"""
for section in elf.iter_sections():
if isinstance(section, SymbolTableSection) and section['sh_type'] != 'SHT_DYNSYM':
return {sym.name: sym.entry.st_value
for sym in section.iter_symbols()
if sym.name.startswith("CONFIG_")}
raise LookupError("Could not find symbol table")
def find_log_const_symbols(elf):
"""Extract all "log_const_*" symbols from ELF file"""
symbol_tables = [s for s in elf.iter_sections()
if isinstance(s, elftools.elf.sections.SymbolTableSection)]
ret_list = []
for section in symbol_tables:
if not isinstance(section, elftools.elf.sections.SymbolTableSection):
continue
if section['sh_entsize'] == 0:
continue
for symbol in section.iter_symbols():
if symbol.name.startswith("log_const_"):
ret_list.append(symbol)
return ret_list
def parse_log_const_symbols(database, log_const_area, log_const_symbols, string_mappings):
"""Find the log instances and map source IDs to names"""
if database.is_tgt_little_endian():
formatter = "<"
else:
formatter = ">"
if database.is_tgt_64bit():
# 64-bit pointer to string
formatter += "Q"
else:
# 32-bit pointer to string
formatter += "L"
# log instance level
formatter += "B"
datum_size = struct.calcsize(formatter)
# Get the address of first log instance
first_offset = log_const_symbols[0].entry['st_value']
for sym in log_const_symbols:
if sym.entry['st_value'] < first_offset:
first_offset = sym.entry['st_value']
first_offset -= log_const_area['start']
# find all log_const_*
for sym in log_const_symbols:
# Find data offset in log_const_area for this symbol
offset = sym.entry['st_value'] - log_const_area['start']
idx_s = offset
idx_e = offset + datum_size
datum = log_const_area['data'][idx_s:idx_e]
if len(datum) != datum_size:
# Not enough data to unpack
continue
str_ptr, level = struct.unpack(formatter, datum)
# Offset to rodata section for string
instance_name = find_string_in_mappings(string_mappings, str_ptr)
if instance_name is None:
instance_name = "unknown"
logger.info("Found Log Instance: %s, level: %d", instance_name, level)
# source ID is simply the element index in the log instance array
source_id = int((offset - first_offset) / sym.entry['st_size'])
database.add_log_instance(source_id, instance_name, level, sym.entry['st_value'])
def extract_elf_information(elf, database):
"""Extract information from ELF file and store in database"""
e_ident = elf.header['e_ident']
elf_data = describe_ei_data(e_ident['EI_DATA'])
if elf_data == elftools.elf.descriptions._DESCR_EI_DATA['ELFDATA2LSB']:
database.set_tgt_endianness(LogDatabase.LITTLE_ENDIAN)
elif elf_data == elftools.elf.descriptions._DESCR_EI_DATA['ELFDATA2MSB']:
database.set_tgt_endianness(LogDatabase.BIG_ENDIAN)
else:
logger.error("Cannot determine endianness from ELF file, exiting...")
sys.exit(1)
def process_kconfigs(elf, database):
"""Process kconfigs to extract information"""
kconfigs = get_kconfig_symbols(elf)
# 32 or 64-bit target
database.set_tgt_bits(64 if "CONFIG_64BIT" in kconfigs else 32)
# Architecture
for name, arch in dictionary_parser.log_database.ARCHS.items():
if arch['kconfig'] in kconfigs:
database.set_arch(name)
break
else:
logger.error("Did not found architecture")
sys.exit(1)
# Put some kconfigs into the database
#
# Use 32-bit timestamp? or 64-bit?
if "CONFIG_LOG_TIMESTAMP_64BIT" in kconfigs:
database.add_kconfig("CONFIG_LOG_TIMESTAMP_64BIT",
kconfigs['CONFIG_LOG_TIMESTAMP_64BIT'])
def extract_logging_subsys_information(elf, database, string_mappings):
"""
Extract logging subsys related information and store in database.
For example, this extracts the list of log instances to establish
mapping from source ID to name.
"""
# Extract log constant section for module names
section_log_const = find_elf_sections(elf, "log_const_area")
if section_log_const is None:
# ESP32 puts "log_const_*" info log_static_section instead of log_const_areas
section_log_const = find_elf_sections(elf, "log_static_section")
if section_log_const is None:
logger.error("Cannot find section 'log_const_areas' in ELF file, exiting...")
sys.exit(1)
# Find all "log_const_*" symbols and parse them
log_const_symbols = find_log_const_symbols(elf)
parse_log_const_symbols(database, section_log_const, log_const_symbols, string_mappings)
def is_die_attr_ref(attr):
"""
Returns True if the DIE attribute is a reference.
"""
return bool(attr.form in ('DW_FORM_ref1', 'DW_FORM_ref2',
'DW_FORM_ref4', 'DW_FORM_ref8',
'DW_FORM_ref'))
def find_die_var_base_type(compile_unit, die, is_const):
"""
Finds the base type of a DIE and returns the name.
If DW_AT_type is a reference, it will recursively go through
the references to find the base type. Returns None is no
base type is found.
"""
# DIE is of base type. So extract the name.
if die.tag == 'DW_TAG_base_type':
return die.attributes['DW_AT_name'].value.decode('ascii'), is_const
# Not a type, cannot continue
if not 'DW_AT_type' in die.attributes:
return None, None
if die.tag == 'DW_TAG_const_type':
is_const = True
# DIE is probably a reference to another.
# If so, check if the reference is a base type.
type_attr = die.attributes['DW_AT_type']
if is_die_attr_ref(type_attr):
ref_addr = compile_unit.cu_offset + type_attr.raw_value
ref_die = compile_unit.get_DIE_from_refaddr(ref_addr)
return find_die_var_base_type(compile_unit, ref_die, is_const)
# Not a base type, and not reference
return None, None
def is_die_var_const_char(compile_unit, die):
"""
Returns True if DIE of type variable is const char.
"""
var_type, is_const = find_die_var_base_type(compile_unit, die, False)
if var_type is not None and var_type.endswith('char') and is_const:
return True
return False
def extract_string_variables(elf):
"""
Find all string variables (char) in all Compilation Units and
Debug information Entry (DIE) in ELF file.
"""
dwarf_info = elf.get_dwarf_info()
loc_lists = dwarf_info.location_lists()
loc_parser = LocationParser(loc_lists)
strings = []
# Loop through all Compilation Units and
# Debug information Entry (DIE) to extract all string variables
for compile_unit in dwarf_info.iter_CUs():
for die in compile_unit.iter_DIEs():
# Only care about variables with location information
# and of type "char"
if die.tag == 'DW_TAG_variable':
if ('DW_AT_type' in die.attributes
and 'DW_AT_location' in die.attributes
and is_die_var_const_char(compile_unit, die)
):
# Extract location information, which is
# its address in memory.
loc_attr = die.attributes['DW_AT_location']
if loc_parser.attribute_has_location(loc_attr, die.cu['version']):
loc = loc_parser.parse_from_attribute(loc_attr, die.cu['version'])
if isinstance(loc, LocationExpr):
try:
addr = describe_DWARF_expr(loc.loc_expr,
dwarf_info.structs)
matcher = DT_LOCATION_REGEX.match(addr)
if matcher:
addr = int(matcher.group(1), 16)
if addr > 0:
strings.append({
'name': die.attributes['DW_AT_name'].value,
'addr': addr,
'die': die
})
except KeyError:
pass
return strings
def try_decode_string(str_maybe):
"""Check if it is a printable string"""
for encoding in STR_ENCODINGS:
try:
return str_maybe.decode(encoding)
except UnicodeDecodeError:
pass
return None
def is_printable(b):
# Check if string is printable according to Python
# since the parser (written in Python) will need to
# print the string.
#
# Note that '\r' and '\n' are not included in
# string.printable so they need to be checked separately.
return (b in string.printable) or (b in ACCEPTABLE_ESCAPE_CHARS)
def extract_strings_in_one_section(section, str_mappings):
"""Extract NULL-terminated strings in one ELF section"""
data = section['data']
idx = 0
start = None
for x in data:
if is_printable(chr(x)):
# Printable character, potential part of string
if start is None:
# Beginning of potential string
start = idx
elif x == 0:
# End of possible string
if start is not None:
# Found potential string
str_maybe = data[start : idx]
decoded_str = try_decode_string(str_maybe)
if decoded_str is not None:
addr = section['start'] + start
if addr not in str_mappings:
str_mappings[addr] = decoded_str
# Decoded string may contain un-printable characters
# (e.g. extended ASC-II characters) or control
# characters (e.g. '\r' or '\n'), so simply print
# the byte string instead.
logger.debug('Found string via extraction at ' + PTR_FMT + ': %s',
addr, str_maybe)
# GCC-based toolchain will reuse the NULL character
# for empty strings. There is no way to know which
# one is being reused, so just treat all NULL character
# at the end of legitimate strings as empty strings.
null_addr = section['start'] + idx
str_mappings[null_addr] = ''
logger.debug('Found null string via extraction at ' + PTR_FMT,
null_addr)
start = None
else:
# Non-printable byte, remove start location
start = None
idx += 1
return str_mappings
def extract_static_strings(elf, database, section_extraction=False):
"""
Extract static strings from ELF file using DWARF,
and also extraction from binary data.
"""
string_mappings = {}
elf_sections = extract_elf_code_data_sections(elf, REMOVED_STRING_SECTIONS)
# Extract strings using ELF DWARF information
str_vars = extract_string_variables(elf)
for str_var in str_vars:
for _, sect in elf_sections.items():
one_str = extract_one_string_in_section(sect, str_var['addr'])
if one_str is not None:
string_mappings[str_var['addr']] = one_str
logger.debug('Found string variable at ' + PTR_FMT + ': %s',
str_var['addr'], one_str)
break
if section_extraction:
# Extract strings from ELF sections
string_sections = STATIC_STRING_SECTIONS
rawstr_map = {}
# Some architectures may put static strings into additional sections.
# So need to extract them too.
arch_data = dictionary_parser.log_database.ARCHS[database.get_arch()]
if "extra_string_section" in arch_data:
string_sections.extend(arch_data['extra_string_section'])
for sect_name in string_sections:
if sect_name in elf_sections:
rawstr_map = extract_strings_in_one_section(elf_sections[sect_name],
rawstr_map)
for one_str in rawstr_map:
if one_str not in string_mappings:
string_mappings[one_str] = rawstr_map[one_str]
return string_mappings
def main():
"""Main function of database generator"""
args = parse_args()
# Setup logging
logging.basicConfig(format=LOGGER_FORMAT, level=logging.WARNING)
if args.debug:
logger.setLevel(logging.DEBUG)
elif args.verbose:
logger.setLevel(logging.INFO)
elffile = open(args.elffile, "rb")
if not elffile:
logger.error("ERROR: Cannot open ELF file: %s, exiting...", args.elffile)
sys.exit(1)
logger.info("ELF file %s", args.elffile)
if args.json:
logger.info("JSON Database file %s", args.json)
section_extraction = True
if args.syst:
logger.info("MIPI Sys-T Collateral file %s", args.syst)
section_extraction = False
elf = ELFFile(elffile)
database = LogDatabase()
if args.build_header:
with open(args.build_header) as f:
for l in f:
match = re.match(r'\s*#define\s+BUILD_VERSION\s+(.*)', l)
if match:
database.set_build_id(match.group(1))
break
if args.build:
database.set_build_id(args.build)
logger.info("Build ID: %s", args.build)
extract_elf_information(elf, database)
process_kconfigs(elf, database)
logger.info("Target: %s, %d-bit", database.get_arch(), database.get_tgt_bits())
if database.is_tgt_little_endian():
logger.info("Endianness: Little")
else:
logger.info("Endianness: Big")
if database.is_tgt_64bit():
global PTR_FMT
PTR_FMT = '0x%016x'
# Extract strings from ELF files
string_mappings = extract_static_strings(elf, database, section_extraction)
if len(string_mappings) > 0:
database.set_string_mappings(string_mappings)
logger.info("Found %d strings", len(string_mappings))
# Extract information related to logging subsystem
if not section_extraction:
# The logging subsys information (e.g. log module names)
# may require additional strings outside of those extracted
# via ELF DWARF variables. So generate a new string mappings
# with strings in various ELF sections.
string_mappings = extract_static_strings(elf, database, section_extraction=True)
extract_logging_subsys_information(elf, database, string_mappings)
# Write database file
if args.json:
if not LogDatabase.write_json_database(args.json, database):
logger.error("ERROR: Cannot open database file for write: %s, exiting...",
args.json)
sys.exit(1)
if args.syst:
if not LogDatabase.write_syst_database(args.syst, database):
logger.error("ERROR: Cannot open database file for write: %s, exiting...",
args.syst)
sys.exit(1)
elffile.close()
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/logging/dictionary/database_gen.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 4,239 |
```python
#!/usr/bin/env python3
#
#
"""
Class for Dictionary-based Logging Database
"""
import base64
import copy
import json
from .mipi_syst import gen_syst_xml_file
from .utils import extract_one_string_in_section
from .utils import find_string_in_mappings
ARCHS = {
"arc" : {
"kconfig": "CONFIG_ARC",
},
"arm" : {
"kconfig": "CONFIG_ARM",
},
"arm64" : {
"kconfig": "CONFIG_ARM64",
},
"mips" : {
"kconfig": "CONFIG_MIPS",
},
"sparc" : {
"kconfig": "CONFIG_SPARC",
},
"x86" : {
"kconfig": "CONFIG_X86",
},
"nios2" : {
"kconfig": "CONFIG_NIOS2",
# Small static strings are put into section "datas"
# so we need to include them also.
#
# See include/arch/nios2/linker.ld on .sdata.*
# for explanation.
"extra_string_section": ['datas'],
},
"posix" : {
"kconfig": "CONFIG_ARCH_POSIX",
},
"riscv32e" : {
"kconfig": "CONFIG_RISCV_ISA_RV32E",
},
"riscv" : {
"kconfig": "CONFIG_RISCV",
},
"xtensa" : {
"kconfig": "CONFIG_XTENSA",
},
}
class LogDatabase():
"""Class of log database"""
# Update this if database format of dictionary based logging
# has changed
ZEPHYR_DICT_LOG_VER = 3
LITTLE_ENDIAN = True
BIG_ENDIAN = False
def __init__(self):
new_db = {}
new_db['version'] = self.ZEPHYR_DICT_LOG_VER
new_db['target'] = {}
new_db['log_subsys'] = {}
new_db['log_subsys']['log_instances'] = {}
new_db['build_id'] = None
new_db['arch'] = None
new_db['kconfigs'] = {}
self.database = new_db
def get_version(self):
"""Get Database Version"""
return self.database['version']
def get_build_id(self):
"""Get Build ID"""
return self.database['build_id']
def set_build_id(self, build_id):
"""Set Build ID in Database"""
self.database['build_id'] = build_id
def get_arch(self):
"""Get the Target Architecture"""
return self.database['arch']
def set_arch(self, arch):
"""Set the Target Architecture"""
self.database['arch'] = arch
def get_tgt_bits(self):
"""Get Target Bitness: 32 or 64"""
if 'bits' in self.database['target']:
return self.database['target']['bits']
return None
def set_tgt_bits(self, bits):
"""Set Target Bitness: 32 or 64"""
self.database['target']['bits'] = bits
def is_tgt_64bit(self):
"""Return True if target is 64-bit, False if 32-bit.
None if error."""
if 'bits' not in self.database['target']:
return None
if self.database['target']['bits'] == 32:
return False
if self.database['target']['bits'] == 64:
return True
return None
def get_tgt_endianness(self):
"""
Get Target Endianness.
Return True if little endian, False if big.
"""
if 'little_endianness' in self.database['target']:
return self.database['target']['little_endianness']
return None
def set_tgt_endianness(self, endianness):
"""
Set Target Endianness
True if little endian, False if big.
"""
self.database['target']['little_endianness'] = endianness
def is_tgt_little_endian(self):
"""Return True if target is little endian"""
if 'little_endianness' not in self.database['target']:
return None
return self.database['target']['little_endianness'] == self.LITTLE_ENDIAN
def get_string_mappings(self):
"""Get string mappings to database"""
return self.database['string_mappings']
def set_string_mappings(self, database):
"""Add string mappings to database"""
self.database['string_mappings'] = database
def has_string_mappings(self):
"""Return True if there are string mappings in database"""
if 'string_mappings' in self.database:
return True
return False
def has_string_sections(self):
"""Return True if there are any static string sections"""
if 'sections' not in self.database:
return False
return len(self.database['sections']) != 0
def __find_string_in_mappings(self, string_ptr):
"""
Find string pointed by string_ptr in the string mapping
list. Return None if not found.
"""
return find_string_in_mappings(self.database['string_mappings'], string_ptr)
def __find_string_in_sections(self, string_ptr):
"""
Find string pointed by string_ptr in the binary data
sections. Return None if not found.
"""
for _, sect in self.database['sections'].items():
one_str = extract_one_string_in_section(sect, string_ptr)
if one_str is not None:
return one_str
return None
def find_string(self, string_ptr):
"""Find string pointed by string_ptr in the database.
Return None if not found."""
one_str = None
if self.has_string_mappings():
one_str = self.__find_string_in_mappings(string_ptr)
if one_str is None and self.has_string_sections():
one_str = self.__find_string_in_sections(string_ptr)
return one_str
def add_log_instance(self, source_id, name, level, address):
"""Add one log instance into database"""
self.database['log_subsys']['log_instances'][source_id] = {
'source_id' : source_id,
'name' : name,
'level' : level,
'addr' : address,
}
def get_log_source_string(self, domain_id, source_id):
"""Get the source string based on source ID"""
# JSON stores key as string, so we need to convert
src_id = str(source_id)
if src_id in self.database['log_subsys']['log_instances']:
return self.database['log_subsys']['log_instances'][src_id]['name']
return f"unknown<{domain_id}:{source_id}>"
def add_kconfig(self, name, val):
"""Add a kconfig name-value pair into database"""
self.database['kconfigs'][name] = val
def get_kconfigs(self):
"""Return kconfig name-value pairs"""
return self.database['kconfigs']
@staticmethod
def read_json_database(db_file_name):
"""Read database from file and return a LogDatabase object"""
try:
with open(db_file_name, "r", encoding="iso-8859-1") as db_fd:
json_db = json.load(db_fd)
except (OSError, json.JSONDecodeError):
return None
# Decode data in JSON back into binary data
if 'sections' in json_db:
for _, sect in json_db['sections'].items():
sect['data'] = base64.b64decode(sect['data_b64'])
database = LogDatabase()
database.database = json_db
# JSON encodes the addresses in string mappings as literal strings.
# So convert them back to integers, as this is needed for partial
# matchings.
if database.has_string_mappings():
new_str_map = {}
for addr, one_str in database.get_string_mappings().items():
new_str_map[int(addr)] = one_str
database.set_string_mappings(new_str_map)
return database
@staticmethod
def write_json_database(db_file_name, database):
"""Write the database into file"""
json_db = copy.deepcopy(database.database)
# Make database object into something JSON can dump
if 'sections' in json_db:
for _, sect in json_db['sections'].items():
encoded = base64.b64encode(sect['data'])
sect['data_b64'] = encoded.decode('ascii')
del sect['data']
try:
with open(db_file_name, "w", encoding="iso-8859-1") as db_fd:
db_fd.write(json.dumps(json_db))
except OSError:
return False
return True
@staticmethod
def write_syst_database(db_file_name, database):
"""
Write the database into MIPI Sys-T Collateral XML file
"""
try:
with open(db_file_name, "w", encoding="iso-8859-1") as db_fd:
xml = gen_syst_xml_file(database)
db_fd.write(xml)
except OSError:
return False
return True
``` | /content/code_sandbox/scripts/logging/dictionary/dictionary_parser/log_database.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,965 |
```python
#!/usr/bin/env python3
#
#
"""
Contains a class to describe data types used for
dictionary logging.
"""
import struct
class DataTypes():
"""Class regarding data types, their alignments and sizes"""
INT = 0
UINT = 1
LONG = 2
ULONG = 3
LONG_LONG = 4
ULONG_LONG = 5
PTR = 6
DOUBLE = 7
LONG_DOUBLE = 8
NUM_TYPES = 9
def __init__(self, database):
self.database = database
self.data_types = {}
if database.is_tgt_64bit():
self.add_data_type(self.LONG, "q")
self.add_data_type(self.LONG_LONG, "q")
self.add_data_type(self.PTR, "Q")
else:
self.add_data_type(self.LONG, "i")
self.add_data_type(self.LONG_LONG, "q")
self.add_data_type(self.PTR, "I")
self.add_data_type(self.INT, "i")
self.add_data_type(self.DOUBLE, "d")
self.add_data_type(self.LONG_DOUBLE, "d")
@staticmethod
def get_stack_min_align(arch, is_tgt_64bit):
'''
Correspond to the VA_STACK_ALIGN and VA_STACK_MIN_ALIGN
in cbprintf_internal.h. Note that there might be some
variations that is obtained via actually running through
the log parser.
Return a tuple where the first element is stack alignment
value. The second element is true if alignment needs to
be further refined according to data type, false if not.
'''
if arch == "arc":
if is_tgt_64bit:
need_further_align = True
stack_min_align = 8
else:
need_further_align = False
stack_min_align = 1
elif arch == "arm64":
need_further_align = True
stack_min_align = 8
elif arch == "sparc":
need_further_align = False
stack_min_align = 1
elif arch == "x86":
if is_tgt_64bit:
need_further_align = True
stack_min_align = 8
else:
need_further_align = False
stack_min_align = 1
elif arch == "riscv32e":
need_further_align = False
stack_min_align = 1
elif arch == "riscv":
need_further_align = True
if is_tgt_64bit:
stack_min_align = 8
else:
stack_min_align = 1
elif arch == "nios2":
need_further_align = False
stack_min_align = 1
else:
need_further_align = True
stack_min_align = 1
return (stack_min_align, need_further_align)
@staticmethod
def get_data_type_align(data_type, is_tgt_64bit):
'''
Get the alignment for a particular data type.
'''
if data_type == DataTypes.LONG_LONG:
align = 8
elif data_type == DataTypes.LONG:
if is_tgt_64bit:
align = 8
else:
align = 4
else:
# va_list alignment is at least a integer
align = 4
return align
def add_data_type(self, data_type, fmt):
"""Add one data type"""
if self.database.is_tgt_little_endian():
endianness = "<"
else:
endianness = ">"
formatter = endianness + fmt
self.data_types[data_type] = {}
self.data_types[data_type]['fmt'] = formatter
size = struct.calcsize(formatter)
if data_type == self.LONG_DOUBLE:
# Python doesn't have long double but we still
# need to skip correct number of bytes
size = 16
self.data_types[data_type]['sizeof'] = size
# Might need actual number for different architectures
# but these seem to work fine for now.
if self.database.is_tgt_64bit():
align = 8
else:
align = 4
# 'align' is used to "jump" over an argument so it has
# to be at least size of the data type.
align = max(align, size)
self.data_types[data_type]['align'] = align
# 'stack_align' should correspond to VA_STACK_ALIGN
# in cbprintf_internal.h
stack_align, need_more_align = DataTypes.get_stack_min_align(
self.database.get_arch(),
self.database.is_tgt_64bit())
if need_more_align:
stack_align = DataTypes.get_data_type_align(data_type,
self.database.is_tgt_64bit())
self.data_types[data_type]['stack_align'] = stack_align
def get_sizeof(self, data_type):
"""Get sizeof() of a data type"""
return self.data_types[data_type]['sizeof']
def get_alignment(self, data_type):
"""Get the alignment of a data type"""
return self.data_types[data_type]['align']
def get_stack_alignment(self, data_type):
"""Get the stack alignment of a data type"""
return self.data_types[data_type]['stack_align']
def get_formatter(self, data_type):
"""Get the formatter for a data type"""
return self.data_types[data_type]['fmt']
``` | /content/code_sandbox/scripts/logging/dictionary/dictionary_parser/data_types.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,175 |
```python
#!/usr/bin/env python3
#
#
# Parsers are gonig to have very similar code.
# So tell pylint not to care.
# pylint: disable=duplicate-code
"""
Dictionary-based Logging Parser Version 3
This contains the implementation of the parser for
version 3 databases.
"""
import logging
import struct
import colorama
from colorama import Fore
from .log_parser import (LogParser, get_log_level_str_color, formalize_fmt_string)
from .data_types import DataTypes
HEX_BYTES_IN_LINE = 16
# Need to keep sync with struct log_dict_output_msg_hdr in
# include/logging/log_output_dict.h.
#
# struct log_dict_output_normal_msg_hdr_t {
# uint8_t type;
# uint32_t domain:4;
# uint32_t level:4;
# uint32_t package_len:16;
# uint32_t data_len:16;
# uintptr_t source;
# log_timestamp_t timestamp;
# } __packed;
#
# Note "type" and "timestamp" are encoded separately below.
FMT_MSG_HDR_32 = "BHHI"
FMT_MSG_HDR_64 = "BHHQ"
# Message type
# 0: normal message
# 1: number of dropped messages
FMT_MSG_TYPE = "B"
# Depends on CONFIG_LOG_TIMESTAMP_64BIT
FMT_MSG_TIMESTAMP_32 = "I"
FMT_MSG_TIMESTAMP_64 = "Q"
# Keep message types in sync with include/logging/log_output_dict.h
MSG_TYPE_NORMAL = 0
MSG_TYPE_DROPPED = 1
# Number of dropped messages
FMT_DROPPED_CNT = "H"
logger = logging.getLogger("parser")
class LogParserV3(LogParser):
"""Log Parser V1"""
def __init__(self, database):
super().__init__(database=database)
if self.database.is_tgt_little_endian():
endian = "<"
self.is_big_endian = False
else:
endian = ">"
self.is_big_endian = True
self.fmt_msg_type = endian + FMT_MSG_TYPE
self.fmt_dropped_cnt = endian + FMT_DROPPED_CNT
if self.database.is_tgt_64bit():
self.fmt_msg_hdr = endian + FMT_MSG_HDR_64
else:
self.fmt_msg_hdr = endian + FMT_MSG_HDR_32
if "CONFIG_LOG_TIMESTAMP_64BIT" in self.database.get_kconfigs():
self.fmt_msg_timestamp = endian + FMT_MSG_TIMESTAMP_64
else:
self.fmt_msg_timestamp = endian + FMT_MSG_TIMESTAMP_32
def __get_string(self, arg, arg_offset, string_tbl):
one_str = self.database.find_string(arg)
if one_str is not None:
ret = one_str
else:
# The index from the string table is basically
# the order in va_list. Need to add to the index
# to skip the packaged string header and
# the format string.
str_idx = arg_offset + self.data_types.get_sizeof(DataTypes.PTR) * 2
str_idx /= self.data_types.get_sizeof(DataTypes.INT)
if int(str_idx) not in string_tbl:
ret = f'<string@0x{arg:x}>'
else:
ret = string_tbl[int(str_idx)]
return ret
def process_one_fmt_str(self, fmt_str, arg_list, string_tbl):
"""Parse the format string to extract arguments from
the binary arglist and return a tuple usable with
Python's string formatting"""
idx = 0
arg_offset = 0
arg_data_type = None
is_parsing = False
do_extract = False
args = []
# Translated from cbvprintf_package()
for idx, fmt in enumerate(fmt_str):
if not is_parsing:
if fmt == '%':
is_parsing = True
arg_data_type = DataTypes.INT
continue
elif fmt == '%':
# '%%' -> literal percentage sign
is_parsing = False
continue
elif fmt == '*':
pass
elif fmt.isdecimal() or str.lower(fmt) == 'l' \
or fmt in (' ', '#', '-', '+', '.', 'h'):
# formatting modifiers, just ignore
continue
elif fmt in ('j', 'z', 't'):
# intmax_t, size_t or ptrdiff_t
arg_data_type = DataTypes.LONG
elif fmt in ('c', 'd', 'i', 'o', 'u') or str.lower(fmt) == 'x':
if fmt_str[idx - 1] == 'l':
if fmt_str[idx - 2] == 'l':
arg_data_type = DataTypes.LONG_LONG
else:
arg_data_type = DataTypes.LONG
else:
arg_data_type = DataTypes.INT
is_parsing = False
do_extract = True
elif fmt in ('s', 'p', 'n'):
arg_data_type = DataTypes.PTR
is_parsing = False
do_extract = True
elif str.lower(fmt) in ('a', 'e', 'f', 'g'):
# Python doesn't do"long double".
#
# Parse it as double (probably incorrect), but
# still have to skip enough bytes.
if fmt_str[idx - 1] == 'L':
arg_data_type = DataTypes.LONG_DOUBLE
else:
arg_data_type = DataTypes.DOUBLE
is_parsing = False
do_extract = True
else:
is_parsing = False
continue
if do_extract:
do_extract = False
align = self.data_types.get_alignment(arg_data_type)
size = self.data_types.get_sizeof(arg_data_type)
unpack_fmt = self.data_types.get_formatter(arg_data_type)
# Align the argument list by rounding up
stack_align = self.data_types.get_stack_alignment(arg_data_type)
if stack_align > 1:
arg_offset = int((arg_offset + (align - 1)) / align) * align
one_arg = struct.unpack_from(unpack_fmt, arg_list, arg_offset)[0]
if fmt == 's':
one_arg = self.__get_string(one_arg, arg_offset, string_tbl)
args.append(one_arg)
arg_offset += size
# Align the offset
if stack_align > 1:
arg_offset = int((arg_offset + align - 1) / align) * align
return tuple(args)
@staticmethod
def extract_string_table(str_tbl):
"""Extract string table in a packaged log message"""
tbl = {}
one_str = ""
next_new_string = True
# Translated from cbvprintf_package()
for one_ch in str_tbl:
if next_new_string:
str_idx = one_ch
next_new_string = False
continue
if one_ch == 0:
tbl[str_idx] = one_str
one_str = ""
next_new_string = True
continue
one_str += chr(one_ch)
return tbl
@staticmethod
def print_hexdump(hex_data, prefix_len, color):
"""Print hex dump"""
hex_vals = ""
chr_vals = ""
chr_done = 0
for one_hex in hex_data:
hex_vals += f'{one_hex:02x} '
chr_vals += chr(one_hex)
chr_done += 1
if chr_done == HEX_BYTES_IN_LINE / 2:
hex_vals += " "
chr_vals += " "
elif chr_done == HEX_BYTES_IN_LINE:
print(f"{color}%s%s|%s{Fore.RESET}" % ((" " * prefix_len),
hex_vals, chr_vals))
hex_vals = ""
chr_vals = ""
chr_done = 0
if len(chr_vals) > 0:
hex_padding = " " * (HEX_BYTES_IN_LINE - chr_done)
print(f"{color}%s%s%s|%s{Fore.RESET}" % ((" " * prefix_len),
hex_vals, hex_padding, chr_vals))
def parse_one_normal_msg(self, logdata, offset):
"""Parse one normal log message and print the encoded message"""
# Parse log message header
domain_lvl, pkg_len, data_len, source_id = struct.unpack_from(self.fmt_msg_hdr,
logdata, offset)
offset += struct.calcsize(self.fmt_msg_hdr)
timestamp = struct.unpack_from(self.fmt_msg_timestamp, logdata, offset)[0]
offset += struct.calcsize(self.fmt_msg_timestamp)
# domain_id, level
if self.is_big_endian:
level = domain_lvl & 0x0F
domain_id = (domain_lvl >> 4) & 0x0F
else:
domain_id = domain_lvl & 0x0F
level = (domain_lvl >> 4) & 0x0F
level_str, color = get_log_level_str_color(level)
source_id_str = self.database.get_log_source_string(domain_id, source_id)
# Skip over data to point to next message (save as return value)
next_msg_offset = offset + pkg_len + data_len
# Offset from beginning of cbprintf_packaged data to end of va_list arguments
offset_end_of_args = struct.unpack_from("B", logdata, offset)[0]
offset_end_of_args *= self.data_types.get_sizeof(DataTypes.INT)
offset_end_of_args += offset
# Extra data after packaged log
extra_data = logdata[(offset + pkg_len):next_msg_offset]
# Number of appended strings in package
num_packed_strings = struct.unpack_from("B", logdata, offset+1)[0]
# Number of read-only string indexes
num_ro_str_indexes = struct.unpack_from("B", logdata, offset+2)[0]
offset_end_of_args += num_ro_str_indexes
# Number of read-write string indexes
num_rw_str_indexes = struct.unpack_from("B", logdata, offset+3)[0]
offset_end_of_args += num_rw_str_indexes
# Extract the string table in the packaged log message
string_tbl = self.extract_string_table(logdata[offset_end_of_args:(offset + pkg_len)])
if len(string_tbl) != num_packed_strings:
logger.error("------ Error extracting string table")
return None
# Skip packaged string header
offset += self.data_types.get_sizeof(DataTypes.PTR)
# Grab the format string
#
# Note the negative offset to __get_string(). It is because
# the offset begins at 0 for va_list. However, the format string
# itself is before the va_list, so need to go back the width of
# a pointer.
fmt_str_ptr = struct.unpack_from(self.data_types.get_formatter(DataTypes.PTR),
logdata, offset)[0]
fmt_str = self.__get_string(fmt_str_ptr,
-self.data_types.get_sizeof(DataTypes.PTR),
string_tbl)
offset += self.data_types.get_sizeof(DataTypes.PTR)
if not fmt_str:
logger.error("------ Error getting format string at 0x%x", fmt_str_ptr)
return None
args = self.process_one_fmt_str(fmt_str, logdata[offset:offset_end_of_args], string_tbl)
fmt_str = formalize_fmt_string(fmt_str)
log_msg = fmt_str % args
if level == 0:
print(f"{log_msg}", end='')
log_prefix = ""
else:
log_prefix = f"[{timestamp:>10}] <{level_str}> {source_id_str}: "
print(f"{color}%s%s{Fore.RESET}" % (log_prefix, log_msg))
if data_len > 0:
# Has hexdump data
self.print_hexdump(extra_data, len(log_prefix), color)
# Point to next message
return next_msg_offset
def parse_log_data(self, logdata, debug=False):
"""Parse binary log data and print the encoded log messages"""
offset = 0
while offset < len(logdata):
# Get message type
msg_type = struct.unpack_from(self.fmt_msg_type, logdata, offset)[0]
offset += struct.calcsize(self.fmt_msg_type)
if msg_type == MSG_TYPE_DROPPED:
num_dropped = struct.unpack_from(self.fmt_dropped_cnt, logdata, offset)
offset += struct.calcsize(self.fmt_dropped_cnt)
print(f"--- {num_dropped} messages dropped ---")
elif msg_type == MSG_TYPE_NORMAL:
ret = self.parse_one_normal_msg(logdata, offset)
if ret is None:
return False
offset = ret
else:
logger.error("------ Unknown message type: %s", msg_type)
return False
return True
colorama.init()
``` | /content/code_sandbox/scripts/logging/dictionary/dictionary_parser/log_parser_v3.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,788 |
```python
#!/usr/bin/env python3
#
#
"""
Utilities for MIPI Sys-T Collateral XML data
"""
from xml.sax.saxutils import escape
XML_HEADER = """
<?xml version="1.0" encoding="utf-8"?>
<syst:Collateral xmlns:syst="path_to_url"
xmlns:xsi="path_to_url"
xsi:schemaLocation="path_to_url
path_to_url">
<syst:Client Name="Zephyr">
"""
XML_FOOTER = """
</syst:Client>
</syst:Collateral>
"""
XML_MODULE_HEADER = """
<syst:Modules>"""
XML_MODULE_EACH = """
<syst:Module ID="{0}"><![CDATA[{1}]]></syst:Module>"""
XML_MODULE_FOOTER = """
</syst:Modules>
"""
XML_CATALOG32_HEADER = """
<syst:Catalog32>"""
XML_CATALOG32_EACH = """
<syst:Format ID="0x{0:08x}"><![CDATA[{1}]]></syst:Format>"""
XML_CATALOG32_FOOTER = """
</syst:Catalog32>
"""
XML_CATALOG64_HEADER = """
<syst:Catalog64>"""
XML_CATALOG64_EACH = """
<syst:Format ID="0x{0:016x}"><![CDATA[{1}]]></syst:Format>"""
XML_CATALOG64_FOOTER = """
</syst:Catalog64>
"""
XML_GUIDS = """
<syst:Guids>
<syst:Guid ID="{00000000-0000-0000-0000-000000000000}"
Mask="{00000000-0000-0000-FF00-000000000000}"><![CDATA[Zephyr]]></syst:Guid>
</syst:Guids>
"""
def __gen_syst_modules(database):
"""
Generate syst:Modules, which corresponds to log source ID and name
"""
if 'log_subsys' not in database.database:
return ""
if 'log_instances' not in database.database['log_subsys']:
return ""
instances = database.database['log_subsys']['log_instances']
if not instances:
# Empty dictionary: no instances
return ""
xml = XML_MODULE_HEADER
for _, one_inst in instances.items():
xml += XML_MODULE_EACH.format(one_inst['source_id'], escape(one_inst['name']))
xml += XML_MODULE_FOOTER
return xml
def __gen_syst_catalog(database):
"""
Generate syst:Catalog, which corresponds to log strings
"""
if not database.has_string_mappings():
return ""
if database.is_tgt_64bit():
xml = XML_CATALOG64_HEADER
fmt = XML_CATALOG64_EACH
else:
xml = XML_CATALOG32_HEADER
fmt = XML_CATALOG32_EACH
for addr, one_str in database.get_string_mappings().items():
xml += fmt.format(addr, one_str)
if database.is_tgt_64bit():
xml += XML_CATALOG64_FOOTER
else:
xml += XML_CATALOG32_FOOTER
return xml
def gen_syst_xml_file(database):
"""
Generate MIPI Sys-T Collateral XML data
"""
xml = XML_HEADER
xml += XML_GUIDS
xml += __gen_syst_modules(database)
xml += __gen_syst_catalog(database)
xml += XML_FOOTER
return xml
``` | /content/code_sandbox/scripts/logging/dictionary/dictionary_parser/mipi_syst.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 754 |
```yaml
#
# A pykwalify schema for basic validation of the snippet.yml format.
schema;append-schema:
# Sub-schema for appending onto CMake list variables.
# See uses under 'append:' keys below.
type: map
mapping:
EXTRA_DTC_OVERLAY_FILE:
type: str
EXTRA_CONF_FILE:
type: str
DTS_EXTRA_CPPFLAGS:
type: str
type: map
mapping:
name:
required: true
type: str
append:
example: |
Snippet-wide appending can be done here:
name: foo
append:
EXTRA_DTC_OVERLAY_FILE: m3.overlay
include: append-schema
boards:
example: |
Board-specific appending can be done here:
name: foo
boards:
qemu_cortex_m3:
append:
EXTRA_DTC_OVERLAY_FILE: m3.overlay
type: map
mapping:
regex;(.*):
type: map
mapping:
append:
include: append-schema
``` | /content/code_sandbox/scripts/schemas/snippet-schema.yml | yaml | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 227 |
```yaml
#
## A pykwalify schema for basic validation of the structure of a
## arch metadata YAML file.
##
# The archs.yml file is a simple list of key value pairs containing architectures
# and their location which is used by the build system.
type: map
mapping:
archs:
required: true
type: seq
sequence:
- type: map
mapping:
name:
required: true
type: str
desc: Name of the arch
path:
required: true
type: str
desc: Location of the arch implementation relative to the archs.yml file.
comment:
required: false
type: str
desc: Free form comment with extra information regarding the arch.
``` | /content/code_sandbox/scripts/schemas/arch-schema.yml | yaml | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 162 |
```yaml
#
## A pykwalify schema for basic validation of the structure of a SoC
## metadata YAML file.
##
# The soc.yml file is a simple list of key value pairs containing SoCs
# located and the current structure level.
schema;cpucluster-schema:
required: false
type: seq
sequence:
- type: map
mapping:
name:
required: true
type: str
schema;soc-schema:
required: false
type: seq
sequence:
- type: map
mapping:
name:
required: true
type: str
cpuclusters:
include: cpucluster-schema
schema;series-schema:
required: false
type: seq
sequence:
- type: map
mapping:
name:
required: true
type: str
socs:
required: false
include: soc-schema
type: map
mapping:
family:
required: false
type: seq
sequence:
- type: map
mapping:
name:
required: true
type: str
series:
include: series-schema
socs:
include: soc-schema
series:
include: series-schema
socs:
include: soc-schema
vendor:
required: false
type: str
desc: SoC series of the SoC.
This field is of informational use and can be used for filtering of SoCs.
comment:
required: false
type: str
desc: Free form comment with extra information regarding the SoC.
runners:
type: map
mapping:
run_once:
type: map
desc: |
Allows for restricting west flash commands when using sysbuild to run once per given
grouping of board targets. This is to allow for future image program cycles to not
erase the flash of a device which has just been programmed by another image.
mapping:
regex;(.*):
type: seq
desc: |
A dictionary of commands which should be limited to running once per invocation
of west flash for a given set of flash runners and board targets.
sequence:
- type: map
mapping:
run:
required: true
type: str
enum: ['first', 'last']
desc: |
If first, will run this command once when the first image is flashed, if
last, will run this command once when the final image is flashed.
runners:
required: true
type: seq
sequence:
- type: str
desc: |
A list of flash runners that this applies to, can use `all` to apply
to all runners.
groups:
required: true
type: seq
sequence:
- type: map
desc: |
A grouping of board targets which the command should apply to. Can
be used multiple times to have multiple groups.
mapping:
qualifiers:
required: true
type: seq
sequence:
- type: str
desc: |
A board qualifier to match against in regex form. Must be one
entry per board target, a single regex entry will not match
two board targets even if they both match.
``` | /content/code_sandbox/scripts/schemas/soc-schema.yml | yaml | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 710 |
```yaml
#
## A pykwalify schema for basic validation of the structure of a
## board metadata YAML file.
##
# The board.yml file is a simple list of key value pairs containing board
# information like: name, vendor, socs, variants.
schema;variant-schema:
required: false
type: seq
sequence:
- type: map
mapping:
name:
required: true
type: str
cpucluster:
required: false
type: str
variants:
required: false
include: variant-schema
schema;board-schema:
type: map
mapping:
name:
required: true
type: str
desc: Name of the board
vendor:
required: false
type: str
desc: SoC family of the SoC on the board.
revision:
required: false
type: map
mapping:
format:
required: true
type: str
enum:
["major.minor.patch", "letter", "number", "custom"]
default:
required: false # This field is required when 'format' != 'custom'
type: str
exact:
required: false
type: bool
revisions:
required: false # This field is required when 'format' != 'custom'
type: seq
sequence:
- type: map
mapping:
name:
required: true
type: str
socs:
required: true
type: seq
sequence:
- type: map
mapping:
name:
required: true
type: str
variants:
include: variant-schema
type: map
mapping:
board:
include: board-schema
boards:
type: seq
sequence:
- include: board-schema
runners:
type: map
mapping:
run_once:
type: map
desc: |
Allows for restricting west flash commands when using sysbuild to run once per given
grouping of board targets. This is to allow for future image program cycles to not
erase the flash of a device which has just been programmed by another image.
mapping:
regex;(.*):
type: seq
desc: |
A dictionary of commands which should be limited to running once per invocation
of west flash for a given set of flash runners and board targets.
sequence:
- type: map
mapping:
run:
required: true
type: str
enum: ['first', 'last']
desc: |
If first, will run this command once when the first image is flashed, if
last, will run this command once when the final image is flashed.
runners:
required: true
type: seq
sequence:
- type: str
desc: |
A list of flash runners that this applies to, can use `all` to apply
to all runners.
groups:
required: true
type: seq
sequence:
- type: map
desc: |
A grouping of board targets which the command should apply to. Can
be used multiple times to have multiple groups.
mapping:
boards:
required: true
type: seq
sequence:
- type: str
desc: |
A board target to match against in regex. Must be one entry
per board target, a single regex entry will not match two
board targets even if they both match.
``` | /content/code_sandbox/scripts/schemas/board-schema.yml | yaml | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 761 |
```python
#!/usr/bin/env python3
#
#
"""
Dictionary-based Logging Parser Version 1
This contains the implementation of the parser for
version 1 databases.
"""
import logging
import math
import struct
import colorama
from colorama import Fore
from .log_parser import (LogParser, get_log_level_str_color, formalize_fmt_string)
from .data_types import DataTypes
HEX_BYTES_IN_LINE = 16
# Need to keep sync with struct log_dict_output_msg_hdr in
# include/logging/log_output_dict.h.
#
# struct log_dict_output_normal_msg_hdr_t {
# uint8_t type;
# uint32_t domain:3;
# uint32_t level:3;
# uint32_t package_len:10;
# uint32_t data_len:12;
# uintptr_t source;
# log_timestamp_t timestamp;
# } __packed;
#
# Note "type" and "timestamp" are encoded separately below.
FMT_MSG_HDR_32 = "II"
FMT_MSG_HDR_64 = "IQ"
# Message type
# 0: normal message
# 1: number of dropped messages
FMT_MSG_TYPE = "B"
# Depends on CONFIG_LOG_TIMESTAMP_64BIT
FMT_MSG_TIMESTAMP_32 = "I"
FMT_MSG_TIMESTAMP_64 = "Q"
# Keep message types in sync with include/logging/log_output_dict.h
MSG_TYPE_NORMAL = 0
MSG_TYPE_DROPPED = 1
# Number of dropped messages
FMT_DROPPED_CNT = "H"
logger = logging.getLogger("parser")
class LogParserV1(LogParser):
"""Log Parser V1"""
def __init__(self, database):
super().__init__(database=database)
if self.database.is_tgt_little_endian():
endian = "<"
else:
endian = ">"
self.fmt_msg_type = endian + FMT_MSG_TYPE
self.fmt_dropped_cnt = endian + FMT_DROPPED_CNT
if self.database.is_tgt_64bit():
self.fmt_msg_hdr = endian + FMT_MSG_HDR_64
else:
self.fmt_msg_hdr = endian + FMT_MSG_HDR_32
if "CONFIG_LOG_TIMESTAMP_64BIT" in self.database.get_kconfigs():
self.fmt_msg_timestamp = endian + FMT_MSG_TIMESTAMP_64
else:
self.fmt_msg_timestamp = endian + FMT_MSG_TIMESTAMP_32
def __get_string(self, arg, arg_offset, string_tbl):
one_str = self.database.find_string(arg)
if one_str is not None:
ret = one_str
else:
# The index from the string table is basically
# the order in va_list. Need to add to the index
# to skip the packaged string header and
# the format string.
str_idx = arg_offset + self.data_types.get_sizeof(DataTypes.PTR) * 2
str_idx /= self.data_types.get_sizeof(DataTypes.INT)
if int(str_idx) not in string_tbl:
ret = "<string@0x{0:x}>".format(arg)
else:
ret = string_tbl[int(str_idx)]
return ret
def process_one_fmt_str(self, fmt_str, arg_list, string_tbl):
"""Parse the format string to extract arguments from
the binary arglist and return a tuple usable with
Python's string formatting"""
idx = 0
arg_offset = 0
arg_data_type = None
is_parsing = False
do_extract = False
args = []
# Translated from cbvprintf_package()
for idx, fmt in enumerate(fmt_str):
if not is_parsing:
if fmt == '%':
is_parsing = True
arg_data_type = DataTypes.INT
continue
elif fmt == '%':
# '%%' -> literal percentage sign
is_parsing = False
continue
elif fmt == '*':
pass
elif fmt.isdecimal() or str.lower(fmt) == 'l' \
or fmt in (' ', '#', '-', '+', '.', 'h'):
# formatting modifiers, just ignore
continue
elif fmt in ('j', 'z', 't'):
# intmax_t, size_t or ptrdiff_t
arg_data_type = DataTypes.LONG
elif fmt in ('c', 'd', 'i', 'o', 'u') or str.lower(fmt) == 'x':
if fmt_str[idx - 1] == 'l':
if fmt_str[idx - 2] == 'l':
arg_data_type = DataTypes.LONG_LONG
else:
arg_data_type = DataTypes.LONG
else:
arg_data_type = DataTypes.INT
is_parsing = False
do_extract = True
elif fmt in ('s', 'p', 'n'):
arg_data_type = DataTypes.PTR
is_parsing = False
do_extract = True
elif str.lower(fmt) in ('a', 'e', 'f', 'g'):
# Python doesn't do"long double".
#
# Parse it as double (probably incorrect), but
# still have to skip enough bytes.
if fmt_str[idx - 1] == 'L':
arg_data_type = DataTypes.LONG_DOUBLE
else:
arg_data_type = DataTypes.DOUBLE
is_parsing = False
do_extract = True
else:
is_parsing = False
continue
if do_extract:
do_extract = False
align = self.data_types.get_alignment(arg_data_type)
size = self.data_types.get_sizeof(arg_data_type)
unpack_fmt = self.data_types.get_formatter(arg_data_type)
# Align the argument list by rounding up
stack_align = self.data_types.get_stack_alignment(arg_data_type)
if stack_align > 1:
arg_offset = int((arg_offset + (align - 1)) / align) * align
one_arg = struct.unpack_from(unpack_fmt, arg_list, arg_offset)[0]
if fmt == 's':
one_arg = self.__get_string(one_arg, arg_offset, string_tbl)
args.append(one_arg)
arg_offset += size
# Align the offset
if stack_align > 1:
arg_offset = int((arg_offset + align - 1) / align) * align
return tuple(args)
@staticmethod
def extract_string_table(str_tbl):
"""Extract string table in a packaged log message"""
tbl = {}
one_str = ""
next_new_string = True
# Translated from cbvprintf_package()
for one_ch in str_tbl:
if next_new_string:
str_idx = one_ch
next_new_string = False
continue
if one_ch == 0:
tbl[str_idx] = one_str
one_str = ""
next_new_string = True
continue
one_str += chr(one_ch)
return tbl
@staticmethod
def print_hexdump(hex_data, prefix_len, color):
"""Print hex dump"""
hex_vals = ""
chr_vals = ""
chr_done = 0
for one_hex in hex_data:
hex_vals += "%x " % one_hex
chr_vals += chr(one_hex)
chr_done += 1
if chr_done == HEX_BYTES_IN_LINE / 2:
hex_vals += " "
chr_vals += " "
elif chr_done == HEX_BYTES_IN_LINE:
print(f"{color}%s%s|%s{Fore.RESET}" % ((" " * prefix_len),
hex_vals, chr_vals))
hex_vals = ""
chr_vals = ""
chr_done = 0
if len(chr_vals) > 0:
hex_padding = " " * (HEX_BYTES_IN_LINE - chr_done)
print(f"{color}%s%s%s|%s{Fore.RESET}" % ((" " * prefix_len),
hex_vals, hex_padding, chr_vals))
def parse_one_normal_msg(self, logdata, offset):
"""Parse one normal log message and print the encoded message"""
# Parse log message header
log_desc, source_id = struct.unpack_from(self.fmt_msg_hdr, logdata, offset)
offset += struct.calcsize(self.fmt_msg_hdr)
timestamp = struct.unpack_from(self.fmt_msg_timestamp, logdata, offset)[0]
offset += struct.calcsize(self.fmt_msg_timestamp)
# domain_id, level, pkg_len, data_len
domain_id = log_desc & 0x07
level = (log_desc >> 3) & 0x07
pkg_len = (log_desc >> 6) & int(math.pow(2, 10) - 1)
data_len = (log_desc >> 16) & int(math.pow(2, 12) - 1)
level_str, color = get_log_level_str_color(level)
source_id_str = self.database.get_log_source_string(domain_id, source_id)
# Skip over data to point to next message (save as return value)
next_msg_offset = offset + pkg_len + data_len
# Offset from beginning of cbprintf_packaged data to end of va_list arguments
offset_end_of_args = struct.unpack_from("B", logdata, offset)[0]
offset_end_of_args *= self.data_types.get_sizeof(DataTypes.INT)
offset_end_of_args += offset
# Extra data after packaged log
extra_data = logdata[(offset + pkg_len):next_msg_offset]
# Number of appended strings in package
num_packed_strings = struct.unpack_from("B", logdata, offset+1)[0]
# Number of read-only string indexes
num_ro_str_indexes = struct.unpack_from("B", logdata, offset+2)[0]
offset_end_of_args += num_ro_str_indexes
# Number of read-write string indexes
num_rw_str_indexes = struct.unpack_from("B", logdata, offset+3)[0]
offset_end_of_args += num_rw_str_indexes
# Extract the string table in the packaged log message
string_tbl = self.extract_string_table(logdata[offset_end_of_args:(offset + pkg_len)])
if len(string_tbl) != num_packed_strings:
logger.error("------ Error extracting string table")
return None
# Skip packaged string header
offset += self.data_types.get_sizeof(DataTypes.PTR)
# Grab the format string
#
# Note the negative offset to __get_string(). It is because
# the offset begins at 0 for va_list. However, the format string
# itself is before the va_list, so need to go back the width of
# a pointer.
fmt_str_ptr = struct.unpack_from(self.data_types.get_formatter(DataTypes.PTR),
logdata, offset)[0]
fmt_str = self.__get_string(fmt_str_ptr,
-self.data_types.get_sizeof(DataTypes.PTR),
string_tbl)
offset += self.data_types.get_sizeof(DataTypes.PTR)
if not fmt_str:
logger.error("------ Error getting format string at 0x%x", fmt_str_ptr)
return None
args = self.process_one_fmt_str(fmt_str, logdata[offset:offset_end_of_args], string_tbl)
fmt_str = formalize_fmt_string(fmt_str)
log_msg = fmt_str % args
if level == 0:
print(f"{log_msg}", end='')
else:
log_prefix = f"[{timestamp:>10}] <{level_str}> {source_id_str}: "
print(f"{color}%s%s{Fore.RESET}" % (log_prefix, log_msg))
if data_len > 0:
# Has hexdump data
self.print_hexdump(extra_data, len(log_prefix), color)
# Point to next message
return next_msg_offset
def parse_log_data(self, logdata, debug=False):
"""Parse binary log data and print the encoded log messages"""
offset = 0
while offset < len(logdata):
# Get message type
msg_type = struct.unpack_from(self.fmt_msg_type, logdata, offset)[0]
offset += struct.calcsize(self.fmt_msg_type)
if msg_type == MSG_TYPE_DROPPED:
num_dropped = struct.unpack_from(self.fmt_dropped_cnt, logdata, offset)
offset += struct.calcsize(self.fmt_dropped_cnt)
print(f"--- {num_dropped} messages dropped ---")
elif msg_type == MSG_TYPE_NORMAL:
ret = self.parse_one_normal_msg(logdata, offset)
if ret is None:
return False
offset = ret
else:
logger.error("------ Unknown message type: %s", msg_type)
return False
return True
colorama.init()
``` | /content/code_sandbox/scripts/logging/dictionary/dictionary_parser/log_parser_v1.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,745 |
```yaml
type: seq
sequence:
- type: map
required: false
mapping:
"available":
type: bool
required: false
"connected":
type: bool
required: true
"id":
type: str
required: true
"notes":
type: str
required: false
"platform":
type: str
required: true
"probe_id":
type: str
required: false
"product":
type: str
required: true
"runner":
type: str
required: true
"runner_params":
type: seq
required: false
sequence:
- type: str
"serial_pty":
type: str
required: false
"serial":
type: str
required: false
"baud":
type: int
required: false
"post_script":
type: str
required: false
"post_flash_script":
type: str
required: false
"pre_script":
type: str
required: false
"fixtures":
type: seq
required: false
sequence:
- type: str
"flash_timeout":
type: int
required: false
"flash_with_test":
type: bool
required: false
"flash_before":
type: bool
required: false
``` | /content/code_sandbox/scripts/schemas/twister/hwmap-schema.yaml | yaml | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 318 |
```yaml
#
# Schema to validate a YAML file describing a Zephyr test configuration.
#
type: map
mapping:
"platforms":
type: map
required: false
mapping:
"override_default_platforms":
type: bool
required: false
"increased_platform_scope":
type: bool
required: false
"default_platforms":
type: seq
required: false
sequence:
- type: str
"levels":
type: seq
required: false
sequence:
- type: map
required: false
mapping:
"name":
type: str
required: true
"description":
type: str
required: false
"adds":
type: seq
required: false
sequence:
- type: str
"inherits":
type: seq
required: false
sequence:
- type: str
``` | /content/code_sandbox/scripts/schemas/twister/test-config-schema.yaml | yaml | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 206 |
```yaml
#
# Schema to validate a YAML file describing a Zephyr test platform
#
# We load this with pykwalify
# (path_to_url
# a YAML structure validator, to validate the YAML files that describe
# Zephyr test platforms
#
# The original spec comes from Zephyr's twister script
#
type: map
mapping:
"identifier":
type: str
"maintainers":
type: seq
seq:
- type: str
"name":
type: str
"type":
type: str
enum: ["mcu", "qemu", "sim", "unit", "native"]
"simulation":
type: str
enum:
[
"qemu",
"simics",
"xt-sim",
"renode",
"nsim",
"mdb-nsim",
"tsim",
"armfvp",
"native",
"custom",
]
"simulation_exec":
type: str
"arch":
type: str
enum:
[
# architectures
"arc",
"arm",
"arm64",
"mips",
"nios2",
"posix",
"riscv",
"sparc",
"x86",
"xtensa",
# unit testing
"unit",
]
"vendor":
type: str
"tier":
type: int
"toolchain":
type: seq
seq:
- type: str
"sysbuild":
type: bool
"env":
type: seq
seq:
- type: str
"ram":
type: int
"flash":
type: int
"twister":
type: bool
"supported":
type: seq
seq:
- type: str
"testing":
type: map
mapping:
"timeout_multiplier":
type: number
required: false
"default":
type: bool
"binaries":
type: seq
seq:
- type: str
"only_tags":
type: seq
seq:
- type: str
"ignore_tags":
type: seq
seq:
- type: str
"renode":
type: map
mapping:
"uart":
type: str
"resc":
type: str
``` | /content/code_sandbox/scripts/schemas/twister/platform-schema.yaml | yaml | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 535 |
```yaml
#
# Schema to validate a YAML file providing the list of configurations
# under quarantine
#
# We load this with pykwalify
# (path_to_url
# a YAML structure validator, to validate the YAML files that provide
# a list of configurations (scenarios + platforms) under quarantine
#
type: seq
matching: all
sequence:
- type: map
required: true
matching: all
mapping:
"scenarios":
type: seq
required: false
sequence:
- type: str
- unique: true
"platforms":
required: false
type: seq
sequence:
- type: str
- unique: true
"architectures":
required: false
type: seq
sequence:
- type: str
- unique: true
"simulations":
required: false
type: seq
sequence:
- type: str
- unique: true
"comment":
type: str
required: false
``` | /content/code_sandbox/scripts/schemas/twister/quarantine-schema.yaml | yaml | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 229 |
```yaml
#
# Schema to validate a YAML file describing a Zephyr test platform
#
# We load this with pykwalify
# (path_to_url
# a YAML structure validator, to validate the YAML files that describe
# Zephyr test platforms
#
# The original spec comes from Zephyr's twister script
#
schema;scenario-schema:
type: map
# has to be not-required, otherwise the parser gets
# confused and things it never found it
required: false
mapping:
"arch_exclude":
type: any
required: false
"arch_allow":
type: any
required: false
"testcases":
type: seq
required: false
sequence:
- type: str
"build_only":
type: bool
required: false
"build_on_all":
type: bool
required: false
"depends_on":
type: any
required: false
"extra_args":
type: any
required: false
"extra_configs":
type: seq
required: false
sequence:
- type: str
"extra_conf_files":
type: seq
required: false
sequence:
- type: str
"extra_overlay_confs":
type: seq
required: false
sequence:
- type: str
"extra_dtc_overlay_files":
type: seq
required: false
sequence:
- type: str
"extra_sections":
type: any
required: false
"required_snippets":
type: seq
required: false
sequence:
- type: str
"filter":
type: str
required: false
"levels":
type: seq
required: false
sequence:
- type: str
enum: ["smoke", "unit", "integration", "acceptance", "system", "regression"]
"integration_platforms":
type: seq
required: false
sequence:
- type: str
"ignore_faults":
type: bool
required: false
"ignore_qemu_crash":
type: bool
required: false
"harness":
type: str
required: false
"harness_config":
type: map
required: false
mapping:
"type":
type: str
required: false
"fixture":
type: str
required: false
"ordered":
type: bool
required: false
"pytest_root":
type: seq
required: false
sequence:
- type: str
"pytest_args":
type: seq
required: false
sequence:
- type: str
"pytest_dut_scope":
type: str
enum: ["function", "class", "module", "package", "session"]
required: false
"regex":
type: seq
required: false
sequence:
- type: str
"robot_testsuite":
type: any
required: false
"robot_option":
type: any
required: false
"record":
type: map
required: false
mapping:
"regex":
type: str
required: true
"as_json":
type: seq
required: false
sequence:
- type: str
"bsim_exe_name":
type: str
required: false
"min_ram":
type: int
required: false
"min_flash":
type: int
required: false
"modules":
type: seq
required: false
sequence:
- type: str
"platform_exclude":
type: any
required: false
"platform_allow":
type: any
required: false
"platform_type":
type: seq
required: false
sequence:
- type: str
enum: ["mcu", "qemu", "sim", "unit", "native"]
"platform_key":
required: false
type: seq
matching: "all"
sequence:
- type: str
"simulation_exclude":
type: seq
required: false
sequence:
- type: str
enum:
[
"qemu",
"simics",
"xt-sim",
"renode",
"nsim",
"mdb-nsim",
"tsim",
"armfvp",
"native",
"custom",
]
"tags":
type: any
required: false
"timeout":
type: int
required: false
"toolchain_exclude":
type: any
required: false
"toolchain_allow":
type: any
required: false
"type":
type: str
enum: ["unit"]
"skip":
type: bool
required: false
"slow":
type: bool
required: false
"sysbuild":
type: bool
required: false
type: map
mapping:
"common":
include: scenario-schema
# The sample descriptor, if present
"sample":
type: map
required: false
mapping:
"name":
type: str
required: true
"description":
type: str
required: false
# The list of testcases -- IDK why this is a sequence of
# maps maps, shall just be a sequence of maps
# maybe it is just an artifact?
"tests":
type: map
required: true
matching-rule: "any"
mapping:
# The key for the testname is any, so
# regex;(([a-zA-Z0-9_]+)) for this to work, note below we
# make it required: false
regex;(([a-zA-Z0-9_]+)):
include: scenario-schema
``` | /content/code_sandbox/scripts/schemas/twister/testsuite-schema.yaml | yaml | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,328 |
```python
"""
Utility script to migrate Zephyr-based projects to the new <zephyr/...> include
prefix.
.. note::
The script will also migrate <zephyr.h> or <zephyr/zephyr.h> to
<zephyr/kernel.h>.
Usage::
python $ZEPHYR_BASE/scripts/utils/migrate_includes.py \
-p path/to/zephyr-based-project
"""
import argparse
from pathlib import Path
import re
import sys
ZEPHYR_BASE = Path(__file__).parents[2]
EXTENSIONS = ("c", "cpp", "h", "hpp", "dts", "dtsi", "rst", "S", "overlay", "ld")
def update_includes(project, dry_run):
for p in project.glob("**/*"):
if not p.is_file() or not p.suffix or p.suffix[1:] not in EXTENSIONS:
continue
try:
with open(p) as f:
lines = f.readlines()
except UnicodeDecodeError:
print(f"File with invalid encoding: {p}, skipping", file=sys.stderr)
continue
content = ""
migrate = False
for line in lines:
m = re.match(r"^(.*)#include <(.*\.h)>(.*)$", line)
if m and m.group(2) in ("zephyr.h", "zephyr/zephyr.h"):
content += (
m.group(1)
+ "#include <zephyr/kernel.h>"
+ m.group(3)
+ "\n"
)
migrate = True
elif (
m
and not m.group(2).startswith("zephyr/")
and (ZEPHYR_BASE / "include" / "zephyr" / m.group(2)).exists()
):
content += (
m.group(1)
+ "#include <zephyr/"
+ m.group(2)
+ ">"
+ m.group(3)
+ "\n"
)
migrate = True
else:
content += line
if migrate:
print(f"Updating {p}{' (dry run)' if dry_run else ''}")
if not dry_run:
with open(p, "w") as f:
f.write(content)
if __name__ == "__main__":
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument(
"-p", "--project", type=Path, required=True, help="Zephyr-based project path"
)
parser.add_argument("--dry-run", action="store_true", help="Dry run")
args = parser.parse_args()
update_includes(args.project, args.dry_run)
``` | /content/code_sandbox/scripts/utils/migrate_includes.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 581 |
```python
"""
Utility script to migrate SYS_INIT() calls from the old signature
``int (*init_fn)(const struct device *dev)`` to ``int (*init_fn)(void)``.
.. warning::
Review the output carefully! This script may not cover all cases!
Usage::
python $ZEPHYR_BASE/scripts/utils/migrate_sys_init.py \
-p path/to/zephyr-based-project
"""
import argparse
from pathlib import Path
import re
def update_sys_init(project, dry_run):
for p in project.glob("**/*"):
if not p.is_file() or not p.suffix or p.suffix[1:] not in ("c", "cpp"):
continue
with open(p) as f:
lines = f.readlines()
sys_inits = []
for line in lines:
m = re.match(r"^SYS_INIT\(([A-Za-z0-9_]+),.*", line)
if m:
sys_inits.append(m.group(1))
continue
m = re.match(r"^SYS_INIT_NAMED\([A-Za-z0-9_]+,\s?([A-Za-z0-9_]+).*", line)
if m:
sys_inits.append(m.group(1))
continue
if not sys_inits:
continue
arg = None
content = ""
update = False
unused = False
for line in lines:
m = re.match(
r"(.*)int ("
+ "|".join(sys_inits)
+ r")\(const\s+struct\s+device\s+\*\s?(.*)\)(.*)",
line,
)
if m:
b, sys_init, arg, e = m.groups()
content += f"{b}int {sys_init}(void){e}\n"
update = True
elif arg:
m = re.match(r"^\s?ARG_UNUSED\(" + arg + r"\);.*$", line)
if m:
arg = None
unused = True
else:
content += line
elif unused:
m = re.match(r"^\s?\n$", line)
if not m:
content += line
unused = False
else:
content += line
if update:
print(f"Updating {p}{' (dry run)' if dry_run else ''}")
if not dry_run:
with open(p, "w") as f:
f.write(content)
if __name__ == "__main__":
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument(
"-p", "--project", type=Path, required=True, help="Zephyr-based project path"
)
parser.add_argument("--dry-run", action="store_true", help="Dry run")
args = parser.parse_args()
update_sys_init(args.project, args.dry_run)
``` | /content/code_sandbox/scripts/utils/migrate_sys_init.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 613 |
```python
"""
Utility script to migrate Zephyr-based projects to normative POSIX Kconfig options.
This script should be used for migrating from versions of Zephyr older than v3.7.0 to Zephyr
version v3.7.0 or later.
Usage::
python $ZEPHYR_BASE/scripts/utils/migrate_posix_kconfigs.py -r root_path
The utility will process c, cpp, h, hpp, rst, conf, CMakeLists.txt,
yml, yaml and Kconfig files.
"""
import argparse
from pathlib import Path
import re
import sys
ZEPHYR_BASE = Path(__file__).parents[2]
FILE_PATTERNS = (
r".+\.c", r".+\.cpp", r".+\.hpp", r".+\.h", r".+\.rst", r".+\.conf",
r".+\.yml", r".+\.yaml", r"CMakeLists.txt", r"Kconfig(\..+)?"
)
REPLACEMENTS = {
"EVENTFD_MAX": "ZVFS_EVENTFD_MAX",
"FNMATCH": "POSIX_C_LIB_EXT",
"GETENTROPY": "POSIX_C_LIB_EXT",
"GETOPT": "POSIX_C_LIB_EXT",
"MAX_PTHREAD_COUNT": "POSIX_THREAD_THREADS_MAX",
"MAX_PTHREAD_KEY_COUNT": "POSIX_THREAD_KEYS_MAX",
"MAX_TIMER_COUNT": "POSIX_TIMER_MAX",
"MSG_COUNT_MAX": "POSIX_MQ_OPEN_MAX",
"POSIX_CLOCK": "POSIX_TIMERS",
"POSIX_CONFSTR": "POSIX_SINGLE_PROCESS",
"POSIX_ENV": "POSIX_SINGLE_PROCESS",
"POSIX_FS": "POSIX_FILE_SYSTEM",
"POSIX_LIMITS_RTSIG_MAX": "POSIX_RTSIG_MAX",
"POSIX_MAX_FDS": "ZVFS_OPEN_MAX",
"POSIX_MAX_OPEN_FILES": "ZVFS_OPEN_MAX",
"POSIX_MQUEUE": "POSIX_MESSAGE_PASSING",
"POSIX_PUTMSG": "XOPEN_STREAMS",
"POSIX_SIGNAL": "POSIX_SIGNALS",
"POSIX_SYSCONF": "POSIX_SINGLE_PROCESS",
"POSIX_SYSLOG": "XSI_SYSTEM_LOGGING",
"POSIX_UNAME": "POSIX_SINGLE_PROCESS",
"PTHREAD": "POSIX_THREADS",
"PTHREAD_BARRIER": "POSIX_BARRIERS",
"PTHREAD_COND": "POSIX_THREADS",
"PTHREAD_IPC": "POSIX_THREADS",
"PTHREAD_KEY": "POSIX_THREADS",
"PTHREAD_MUTEX": "POSIX_THREADS",
"PTHREAD_RWLOCK": "POSIX_READER_WRITER_LOCKS",
"PTHREAD_SPINLOCK": "POSIX_SPIN_LOCKS",
"TIMER": "POSIX_TIMERS",
"TIMER_DELAYTIMER_MAX": "POSIX_DELAYTIMER_MAX",
"SEM_NAMELEN_MAX": "POSIX_SEM_NAME_MAX",
"SEM_VALUE_MAX": "POSIX_SEM_VALUE_MAX",
}
MESSAGES = {
"POSIX_CLOCK":
"POSIX_CLOCK is a one-to-many replacement. If this simple substitution is not "
"sufficient, it's best to try a combination of POSIX_CLOCK_SELECTION, POSIX_CPUTIME, "
"POSIX_MONOTONIC_CLOCK, POSIX_TIMERS, and POSIX_TIMEOUTS.",
"POSIX_MAX_FDS":
"A read-only version of this symbol is POSIX_OPEN_MAX, which is of course, the standard "
"symbol. ZVFS_OPEN_MAX may be set by the user. Consider using POSIX_MAX_FDS if the "
"use-case is read-only.",
}
def process_file(path):
modified = False
output = []
try:
with open(path) as f:
lines = f.readlines()
for line in lines:
longest = ""
length = 0
for m in REPLACEMENTS:
if re.match(".*" + m + ".*", line) and len(m) > length:
length = len(m)
longest = m
if length != 0:
modified = True
line = line.replace(longest, REPLACEMENTS[longest])
msg = MESSAGES.get(longest)
if msg:
print(
f"Notice: {longest} -> {REPLACEMENTS[longest]}: {msg}")
output.append(line)
if modified is False:
return
with open(path, "w") as f:
f.writelines(output)
except UnicodeDecodeError:
print(f"Unable to read lines from {path}", file=sys.stderr)
except Exception as e:
print(f"Failed with exception {e}", e)
def process_tree(project):
for p in project.glob("**/*"):
for fp in FILE_PATTERNS:
cfp = re.compile(".+/" + fp + "$")
if re.match(cfp, str(p)) is not None:
process_file(p)
if __name__ == "__main__":
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument(
"-r",
"--root",
type=Path,
required=True,
help="Zephyr-based project path")
args = parser.parse_args()
process_tree(args.root)
``` | /content/code_sandbox/scripts/utils/migrate_posix_kconfigs.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,142 |
```python
#!/usr/bin/env python3
"""This script converting the Zephyr coding guideline rst file to a output file,
or print the output to the console. Which than can be used by a tool which
needs to have that information in a specific format (e.g. for cppcheck).
Or simply use the rule list to generate a filter to suppress all other rules
used by default from such a tool.
"""
import sys
import re
import argparse
from pathlib import Path
class RuleFormatter:
"""
Base class for the different output formats
"""
def table_start_print(self, outputfile):
pass
def severity_print(self, outputfile, guideline_number, severity):
pass
def description_print(self, outputfile, guideline_number, description):
pass
def closing_print(self, outputfile):
pass
class CppCheckFormatter(RuleFormatter):
"""
Formatter class to print the rules in a format which can be used by cppcheck
"""
def table_start_print(self, outputfile):
# Start search by cppcheck misra addon
print('Appendix A Summary of guidelines', file=outputfile)
def severity_print(self, outputfile, guideline_number, severity):
print('Rule ' + guideline_number + ' ' + severity, file=outputfile)
def description_print(self, outputfile, guideline_number, description):
print(description + '(Misra rule ' + guideline_number + ')', file=outputfile)
def closing_print(self, outputfile):
# Make cppcheck happy by starting the appendix
print('Appendix B', file=outputfile)
print('', file=outputfile)
def convert_guidelines(args):
inputfile = args.input
outputfile = sys.stdout
formatter = None
# If the output is not empty, open the given file for writing
if args.output is not None:
outputfile = open(args.output, "w")
try:
file_stream = open(inputfile, 'rt', errors='ignore')
except Exception:
print('Error opening ' + inputfile +'.')
sys.exit()
# Set formatter according to the used format
if args.format == 'cppcheck':
formatter = CppCheckFormatter()
# Search for table named Main rules
pattern_table_start = re.compile(r'.*list-table:: Main rules')
# Each Rule is a new table column so start with '[tab]* - Rule'
# Ignore directives here
pattern_new_line = re.compile(r'^ \* - Rule ([0-9]+.[0-9]+).*$')
# Each table column start with '[tab]- '
pattern_new_col = re.compile(r'^ - (.*)$')
table_start = False
guideline_number = ''
guideline_state = 0
guideline_list = []
for line in file_stream:
line = line.replace('\r', '').replace('\n', '')
# Done if we find the Additional rules table start
if line.find('Additional rules') >= 0:
break
if len(line) == 0:
continue
if not table_start:
res = pattern_table_start.match(line)
if res:
table_start = True
formatter.table_start_print(outputfile)
continue
res = pattern_new_line.match(line)
if res:
guideline_state = "severity"
guideline_number = res.group(1)
guideline_list.append(guideline_number)
continue
elif guideline_number == '':
continue
res = pattern_new_col.match(line)
if res:
if guideline_state == "severity":
# Severity
formatter.severity_print(outputfile, guideline_number, res.group(1))
guideline_state = "description"
continue
if guideline_state == "description":
# Description
formatter.description_print(outputfile, guideline_number, res.group(1))
guideline_state = "None"
# We stop here for now, we do not handle the CERT C col
guideline_number = ''
continue
formatter.closing_print(outputfile)
if __name__ == "__main__":
supported_formats = ['cppcheck']
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument(
"-i", "--input", metavar="RST_FILE", type=Path, required=True,
help="Path to rst input source file, where the guidelines are written down."
)
parser.add_argument(
"-f", "--format", metavar="FORMAT", choices=supported_formats, required=True,
help="Format to convert guidlines to. Supported formats are: " + str(supported_formats)
)
parser.add_argument(
"-o", "--output", metavar="OUTPUT_FILE", type=Path, required=False,
help="Path to output file, where the converted guidelines are written to. If outputfile is not specified, print to stdout."
)
args = parser.parse_args()
convert_guidelines(args)
``` | /content/code_sandbox/scripts/utils/convert_guidelines.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,046 |
```python
"""
Utility script to migrate Zephyr-based projects to new MCUmgr Kconfig options
Usage::
python $ZEPHYR_BASE/scripts/utils/migrate_mcumgr_kconfigs.py -r root_path
The utility will process c, cpp, h, hpp, rst, conf, CMakeLists.txt,
yml, yaml and Kconfig files.
"""
import argparse
from pathlib import Path
import re
import sys
ZEPHYR_BASE = Path(__file__).parents[2]
FILE_PATTERNS = (
r".+\.c", r".+\.cpp", r".+\.hpp", r".+\.h", r".+\.rst", r".+\.conf",
r".+\.yml", r".+\.yaml", r"CMakeLists.txt", r"Kconfig(\..+)?"
)
REPLACEMENTS = {
"MCUMGR_SMP_WORKQUEUE_STACK_SIZE" : "MCUMGR_TRANSPORT_WORKQUEUE_STACK_SIZE",
"MCUMGR_SMP_WORKQUEUE_THREAD_PRIO" : "MCUMGR_TRANSPORT_WORKQUEUE_THREAD_PRIO",
"MGMT_MAX_MAIN_MAP_ENTRIES" : "MCUMGR_SMP_CBOR_MAX_MAIN_MAP_ENTRIES",
"MGMT_MIN_DECODING_LEVELS" : "MCUMGR_SMP_CBOR_MIN_DECODING_LEVELS",
"MGMT_MIN_DECODING_LEVEL_1" : "MCUMGR_SMP_CBOR_MIN_DECODING_LEVEL_1",
"MGMT_MIN_DECODING_LEVEL_2" : "MCUMGR_SMP_CBOR_MIN_DECODING_LEVEL_2",
"MGMT_MIN_DECODING_LEVEL_3" : "MCUMGR_SMP_CBOR_MIN_DECODING_LEVEL_3",
"MGMT_MIN_DECODING_LEVEL_4" : "MCUMGR_SMP_CBOR_MIN_DECODING_LEVEL_4",
"MGMT_MIN_DECODING_LEVEL_5" : "MCUMGR_SMP_CBOR_MIN_DECODING_LEVEL_5",
"MGMT_MAX_DECODING_LEVELS" : "MCUMGR_SMP_CBOR_MAX_DECODING_LEVELS",
"MCUMGR_CMD_FS_MGMT" : "MCUMGR_GRP_FS",
"FS_MGMT_MAX_FILE_SIZE_64KB" : "MCUMGR_GRP_FS_MAX_FILE_SIZE_64KB",
"FS_MGMT_MAX_FILE_SIZE_4GB" : "MCUMGR_GRP_FS_MAX_FILE_SIZE_4GB",
"FS_MGMT_MAX_OFFSET_LEN" : "MCUMGR_GRP_FS_MAX_OFFSET_LEN",
"FS_MGMT_DL_CHUNK_SIZE_LIMIT" : "MCUMGR_GRP_FS_DL_CHUNK_SIZE_LIMIT",
"FS_MGMT_DL_CHUNK_SIZE" : "MCUMGR_GRP_FS_DL_CHUNK_SIZE",
"FS_MGMT_FILE_STATUS" : "MCUMGR_GRP_FS_FILE_STATUS",
"FS_MGMT_CHECKSUM_HASH" : "MCUMGR_GRP_FS_CHECKSUM_HASH",
"FS_MGMT_CHECKSUM_HASH_CHUNK_SIZE" : "MCUMGR_GRP_FS_CHECKSUM_HASH_CHUNK_SIZE",
"FS_MGMT_CHECKSUM_IEEE_CRC32" : "MCUMGR_GRP_FS_CHECKSUM_IEEE_CRC32",
"FS_MGMT_HASH_SHA256" : "MCUMGR_GRP_FS_HASH_SHA256",
"FS_MGMT_FILE_ACCESS_HOOK" : "MCUMGR_GRP_FS_FILE_ACCESS_HOOK",
"FS_MGMT_PATH_SIZE" : "MCUMGR_GRP_FS_PATH_LEN",
"MCUMGR_CMD_IMG_MGMT" : "MCUMGR_GRP_IMG",
"IMG_MGMT_USE_HEAP_FOR_FLASH_IMG_CONTEXT" : "MCUMGR_GRP_IMG_USE_HEAP_FOR_FLASH_IMG_CONTEXT",
"IMG_MGMT_UPDATABLE_IMAGE_NUMBER" : "MCUMGR_GRP_IMG_UPDATABLE_IMAGE_NUMBER",
"IMG_MGMT_VERBOSE_ERR" : "MCUMGR_GRP_IMG_VERBOSE_ERR",
"IMG_MGMT_DIRECT_IMAGE_UPLOAD" : "MCUMGR_GRP_IMG_DIRECT_UPLOAD",
"IMG_MGMT_REJECT_DIRECT_XIP_MISMATCHED_SLOT" : "MCUMGR_GRP_IMG_REJECT_DIRECT_XIP_MISMATCHED_SLOT",
"IMG_MGMT_FRUGAL_LIST" : "MCUMGR_GRP_IMG_FRUGAL_LIST",
"MCUMGR_CMD_OS_MGMT" : "MCUMGR_GRP_OS",
"MCUMGR_GRP_OS_OS_RESET_HOOK" : "MCUMGR_GRP_OS_RESET_HOOK",
"OS_MGMT_RESET_MS" : "MCUMGR_GRP_OS_RESET_MS",
"OS_MGMT_TASKSTAT" : "MCUMGR_GRP_OS_TASKSTAT",
"OS_MGMT_TASKSTAT_ONLY_SUPPORTED_STATS" : "MCUMGR_GRP_OS_TASKSTAT_ONLY_SUPPORTED_STATS",
"OS_MGMT_TASKSTAT_MAX_NUM_THREADS" : "MCUMGR_GRP_OS_TASKSTAT_MAX_NUM_THREADS",
"OS_MGMT_TASKSTAT_THREAD_NAME_LEN" : "MCUMGR_GRP_OS_TASKSTAT_THREAD_NAME_LEN",
"OS_MGMT_TASKSTAT_SIGNED_PRIORITY" : "MCUMGR_GRP_OS_TASKSTAT_SIGNED_PRIORITY",
"OS_MGMT_TASKSTAT_STACK_INFO" : "MCUMGR_GRP_OS_TASKSTAT_STACK_INFO",
"OS_MGMT_ECHO" : "MCUMGR_GRP_OS_ECHO",
"OS_MGMT_MCUMGR_PARAMS" : "MCUMGR_GRP_OS_MCUMGR_PARAMS",
"MCUMGR_CMD_SHELL_MGMT" : "MCUMGR_GRP_SHELL",
"MCUMGR_CMD_SHELL_MGMT_LEGACY_RC_RETURN_CODE" : "MCUMGR_GRP_SHELL_LEGACY_RC_RETURN_CODE",
"MCUMGR_CMD_STAT_MGMT" : "MCUMGR_GRP_STAT",
"STAT_MGMT_MAX_NAME_LEN" : "MCUMGR_GRP_STAT_MAX_NAME_LEN",
"MCUMGR_GRP_ZEPHYR_BASIC" : "MCUMGR_GRP_ZBASIC",
"MCUMGR_GRP_BASIC_CMD_STORAGE_ERASE" : "MCUMGR_GRP_ZBASIC_STORAGE_ERASE",
"MGMT_VERBOSE_ERR_RESPONSE" : "MCUMGR_SMP_VERBOSE_ERR_RESPONSE",
"MCUMGR_SMP_REASSEMBLY_BT" : "MCUMGR_TRANSPORT_BT_REASSEMBLY",
"MCUMGR_SMP_REASSEMBLY" : "MCUMGR_TRANSPORT_REASSEMBLY",
"MCUMGR_SMP_REASSEMBLY_UNIT_TESTS" : "MCUMGR_TRANSPORT_REASSEMBLY_UNIT_TESTS",
"MCUMGR_BUF_COUNT" : "MCUMGR_TRANSPORT_NETBUF_COUNT",
"MCUMGR_BUF_SIZE" : "MCUMGR_TRANSPORT_NETBUF_SIZE",
"MCUMGR_BUF_USER_DATA_SIZE" : "MCUMGR_TRANSPORT_NETBUF_USER_DATA_SIZE",
"MCUMGR_SMP_BT" : "MCUMGR_TRANSPORT_BT",
"MCUMGR_SMP_BT_AUTHEN" : "MCUMGR_TRANSPORT_BT_AUTHEN",
"MCUMGR_SMP_BT_CONN_PARAM_CONTROL" : "MCUMGR_TRANSPORT_BT_CONN_PARAM_CONTROL",
"MCUMGR_SMP_BT_CONN_PARAM_CONTROL_MIN_INT" : "MCUMGR_TRANSPORT_BT_CONN_PARAM_CONTROL_MIN_INT",
"MCUMGR_SMP_BT_CONN_PARAM_CONTROL_MAX_INT" : "MCUMGR_TRANSPORT_BT_CONN_PARAM_CONTROL_MAX_INT",
"MCUMGR_SMP_BT_CONN_PARAM_CONTROL_LATENCY" : "MCUMGR_TRANSPORT_BT_CONN_PARAM_CONTROL_LATENCY",
"MCUMGR_SMP_BT_CONN_PARAM_CONTROL_TIMEOUT" : "MCUMGR_TRANSPORT_BT_CONN_PARAM_CONTROL_TIMEOUT",
"MCUMGR_SMP_BT_CONN_PARAM_CONTROL_RESTORE_TIME" : "MCUMGR_TRANSPORT_BT_CONN_PARAM_CONTROL_RESTORE_TIME",
"MCUMGR_SMP_BT_CONN_PARAM_CONTROL_RETRY_TIME" : "MCUMGR_TRANSPORT_BT_CONN_PARAM_CONTROL_RETRY_TIME",
"MCUMGR_SMP_DUMMY" : "MCUMGR_TRANSPORT_DUMMY",
"MCUMGR_SMP_DUMMY_RX_BUF_SIZE" : "MCUMGR_TRANSPORT_DUMMY_RX_BUF_SIZE",
"MCUMGR_SMP_SHELL" : "MCUMGR_TRANSPORT_SHELL",
"MCUMGR_SMP_SHELL_MTU" : "MCUMGR_TRANSPORT_SHELL_MTU",
"MCUMGR_SMP_SHELL_RX_BUF_COUNT" : "MCUMGR_TRANSPORT_SHELL_RX_BUF_COUNT",
"MCUMGR_SMP_UART" : "MCUMGR_TRANSPORT_UART",
"MCUMGR_SMP_UART_ASYNC" : "MCUMGR_TRANSPORT_UART_ASYNC",
"MCUMGR_SMP_UART_ASYNC_BUFS" : "MCUMGR_TRANSPORT_UART_ASYNC_BUFS",
"MCUMGR_SMP_UART_ASYNC_BUF_SIZE" : "MCUMGR_TRANSPORT_UART_ASYNC_BUF_SIZE",
"MCUMGR_SMP_UART_MTU" : "MCUMGR_TRANSPORT_UART_MTU",
"MCUMGR_SMP_UDP" : "MCUMGR_TRANSPORT_UDP",
"MCUMGR_SMP_UDP_IPV4" : "MCUMGR_TRANSPORT_UDP_IPV4",
"MCUMGR_SMP_UDP_IPV6" : "MCUMGR_TRANSPORT_UDP_IPV6",
"MCUMGR_SMP_UDP_PORT" : "MCUMGR_TRANSPORT_UDP_PORT",
"MCUMGR_SMP_UDP_STACK_SIZE" : "MCUMGR_TRANSPORT_UDP_STACK_SIZE",
"MCUMGR_SMP_UDP_THREAD_PRIO" : "MCUMGR_TRANSPORT_UDP_THREAD_PRIO",
"MCUMGR_SMP_UDP_MTU" : "MCUMGR_TRANSPORT_UDP_MTU",
}
def process_file(path):
modified = False
output = []
try:
with open(path) as f:
lines = f.readlines()
for line in lines:
longest = ""
length = 0
for m in REPLACEMENTS:
if re.match(".*" + m + ".*", line) and len(m) > length:
length = len(m)
longest = m
if length != 0:
modified = True
line = line.replace(longest, REPLACEMENTS[longest])
output.append(line)
if modified is False:
return
with open(path, "w") as f:
f.writelines(output)
except UnicodeDecodeError:
print(f"Unable to read lines from {path}", file=sys.stderr)
except Exception as e:
print(f"Failed with exception {e}", e)
def process_tree(project):
for p in project.glob("**/*"):
for fp in FILE_PATTERNS:
cfp = re.compile(".+/" + fp + "$")
if re.match(cfp, str(p)) is not None:
process_file(p)
if __name__ == "__main__":
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument(
"-r", "--root", type=Path, required=True, help="Zephyr-based project path"
)
args = parser.parse_args()
process_tree(args.root)
``` | /content/code_sandbox/scripts/utils/migrate_mcumgr_kconfigs.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,329 |
```python
#!/usr/bin/env python3
"""
Zephyr's NTC Thermistor DTS Table generator
###########################################
This script can be used to generate an NTC thermistor DTS node with a
"zephyr,compensation-table" property. This uses the Beta Parameter Equation
path_to_url#beta-parameter-equation
Look-up the following electrical characteristics in the thermistor's data sheet:
Nominal resistance (R25) - The thermistor's resistance measured at 25C
Beta value (25/85) - This is the resistance value at 25C and at 85C
Usage::
python3 ntc_thermistor_table.py \
-r25 10000 \
-b 3974 > thermistor.dtsi
"""
import argparse
import os
import math
def c_to_k(c: float):
""" Convert Celicius to Kelvin """
return c + 273.15
def beta_equation_calc_resistance(r25, beta, temp_c):
t0_kelvin = c_to_k(25)
exp = beta * ((1 / c_to_k(temp_c)) - (1 / t0_kelvin))
resistance = r25 * math.exp(exp)
return resistance
def main(
r25: float, beta: float, interval: int, temp_init: int, temp_final: int
) -> None:
temps_range = range(temp_init, temp_final + interval - 1, interval)
print(f"/* NTC Thermistor Table Generated with {os.path.basename(__file__)} */\n")
print(
f"thermistor_R25_{int(r25)}_B_{int(beta)}: thermistor-R25-{int(r25)}-B-{int(beta)} {{"
)
table = []
for temp in temps_range:
resistance = beta_equation_calc_resistance(r25, beta, temp)
table.append(f"<({int(temp)}) {int(resistance)}>")
tbl_string = ',\n\t\t'.join(table)
print(f"\tzephyr,compensation-table = {tbl_string};")
print(f"}};")
if __name__ == "__main__":
parser = argparse.ArgumentParser("NTC Thermistor generator", allow_abbrev=False)
parser.add_argument(
"-r25", type=float, required=True, help="Nominal resistance of thermistor"
)
parser.add_argument(
"-b", "--beta", type=float, required=True, help="Beta(25/85) value"
)
parser.add_argument(
"-i",
"--interval",
type=int,
required=False,
help="Generated table interval",
default=10,
)
parser.add_argument(
"-ti",
type=int,
required=False,
help="First temperature",
default=-25,
)
parser.add_argument(
"-tf",
type=int,
required=False,
help="Final temperature",
default=125,
)
args = parser.parse_args()
main(args.r25, args.beta, args.interval, args.ti, args.tf)
``` | /content/code_sandbox/scripts/utils/ntc_thermistor_table.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 659 |
```python
"""
Utility script to generate headers for the following macros
- Z_LISTIFY
- Z_UTIL_INC
- Z_UTIL_DEC
- Z_UTIL_X2
- Z_IS_EQ
.. note::
The script will simply create the header files in the current working directory,
they should be copied manually to $ZEPHYR_BASE/include/zephyr/sys/ accordingly.
Usage::
python $ZEPHYR_BASE/scripts/utils/gen_util_macros.py -l 4095
"""
import argparse
def write_hidden_start(file):
file.write("/**\n")
file.write(" * @cond INTERNAL_HIDDEN\n")
file.write(" */\n")
def write_hidden_stop(file):
file.write("/**\n")
file.write(" * INTERNAL_HIDDEN @endcond\n")
file.write(" */\n")
def gen_util_listify(limit:int):
with open("util_listify.h", "w") as file:
write_hidden_start(file)
file.write("\n")
file.write("#ifndef ZEPHYR_INCLUDE_SYS_UTIL_LOOPS_H_\n")
file.write("#error \"This header should not be used directly, please include util_loops.h instead\"\n")
file.write("#endif /* ZEPHYR_INCLUDE_SYS_UTIL_LOOPS_H_ */\n")
file.write("\n")
file.write("#ifndef ZEPHYR_INCLUDE_SYS_UTIL_LISTIFY_H_\n")
file.write("#define ZEPHYR_INCLUDE_SYS_UTIL_LISTIFY_H_\n")
file.write("\n")
file.write("/* Set of UTIL_LISTIFY particles */\n")
file.write("#define Z_UTIL_LISTIFY_0(F, sep, ...)\n\n")
file.write("#define Z_UTIL_LISTIFY_1(F, sep, ...) \\\n")
file.write(" F(0, __VA_ARGS__)\n\n")
for i in range(2, limit + 3):
file.write(f"#define Z_UTIL_LISTIFY_{i}(F, sep, ...) \\\n")
file.write(f" Z_UTIL_LISTIFY_{i - 1}(F, sep, __VA_ARGS__) __DEBRACKET sep \\\n")
file.write(f" F({i - 1}, __VA_ARGS__)\n")
file.write("\n")
file.write("#endif /* ZEPHYR_INCLUDE_SYS_UTIL_LISTIFY_H_ */\n")
file.write("\n")
write_hidden_stop(file)
def gen_util_internal_is_eq(limit):
with open("util_internal_is_eq.h", "w") as file:
write_hidden_start(file)
file.write("\n")
file.write("#ifndef ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_H_\n")
file.write("#error \"This header should not be used directly, \
please include util_internal.h instead\"\n")
file.write("#endif /* ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_H_ */\n")
file.write("\n")
file.write("#ifndef ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_IS_EQ_H_\n")
file.write("#define ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_IS_EQ_H_\n")
file.write("\n")
for i in range(0, limit + 1):
file.write(f"#define Z_IS_{i}_EQ_{i}(...) \\,\n")
file.write("\n")
file.write("#endif /* ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_IS_EQ_H_ */\n")
file.write("\n")
write_hidden_stop(file)
def gen_util_internal_util_inc(limit):
with open("util_internal_util_inc.h", "w") as file:
write_hidden_start(file)
file.write("\n")
file.write("#ifndef ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_H_\n")
file.write("#error \"This header should not be used directly, \
please include util_internal.h instead\"\n")
file.write("#endif /* ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_H_ */\n")
file.write("\n")
file.write("#ifndef ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_UTIL_INC_H_\n")
file.write("#define ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_UTIL_INC_H_\n")
file.write("\n")
for i in range(0, limit + 2):
file.write(f"#define Z_UTIL_INC_{i} {i + 1}\n")
file.write("\n")
file.write("#endif /* ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_UTIL_INC_H_ */\n")
file.write("\n")
write_hidden_stop(file)
def gen_util_internal_util_dec(limit):
with open("util_internal_util_dec.h", "w") as file:
write_hidden_start(file)
file.write("\n")
file.write("#ifndef ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_H_\n")
file.write("#error \"This header should not be used directly, \
please include util_internal.h instead\"\n")
file.write("#endif /* ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_H_ */\n")
file.write("\n")
file.write("#ifndef ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_UTIL_DEC_H_\n")
file.write("#define ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_UTIL_DEC_H_\n")
file.write("\n")
file.write(f"#define Z_UTIL_DEC_0 0\n")
for i in range(1, limit + 2):
file.write(f"#define Z_UTIL_DEC_{i} {i - 1}\n")
file.write("\n")
file.write("#endif /* ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_UTIL_DEC_H_ */\n")
file.write("\n")
write_hidden_stop(file)
def gen_util_internal_util_x2(limit):
with open("util_internal_util_x2.h", "w") as file:
write_hidden_start(file)
file.write("\n")
file.write("#ifndef ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_H_\n")
file.write("#error \"This header should not be used directly, \
please include util_internal.h instead\"\n")
file.write("#endif /* ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_H_ */\n")
file.write("\n")
file.write("#ifndef ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_UTIL_X2_H_\n")
file.write("#define ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_UTIL_X2_H_\n")
file.write("\n")
for i in range(0, limit + 1):
file.write(f"#define Z_UTIL_X2_{i} {i *2}\n")
file.write("\n")
file.write("#endif /* ZEPHYR_INCLUDE_SYS_UTIL_INTERNAL_UTIL_X2_H_ */\n")
file.write("\n")
write_hidden_stop(file)
if __name__ == "__main__":
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument(
"-l", "--limit", type=int, required=True, help="Limit of macros"
)
args = parser.parse_args()
gen_util_listify(args.limit)
gen_util_internal_is_eq(args.limit)
gen_util_internal_util_inc(args.limit)
gen_util_internal_util_dec(args.limit)
gen_util_internal_util_x2(args.limit)
``` | /content/code_sandbox/scripts/utils/gen_util_macros.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,509 |
```python
#!/usr/bin/env python
"""
Utility script to assist in the migration of a board from hardware model v1
(HWMv1) to hardware model v2 (HWMv2).
.. warning::
This script is not a complete migration tool. It is meant to assist in the
migration process, but it does not handle all cases.
This script requires the following arguments:
- ``-b|--board``: The name of the board to migrate.
- ``-g|--group``: The group the board belongs to. This is used to group a set of
boards in the same folder. In HWMv2, the boards are no longer organized by
architecture.
- ``-v|--vendor``: The vendor name.
- ``-s|--soc``: The SoC name.
In some cases, the new board name will differ from the old board name. For
example, the old board name may have the SoC name as a suffix, while in HWMv2,
this is no longer needed. In such cases, ``-n|--new-board`` needs to be
provided.
For boards with variants, ``--variants`` needs to be provided.
For out-of-tree boards, provide ``--board-root`` pointing to the custom board
root.
"""
import argparse
from pathlib import Path
import re
import sys
import ruamel.yaml
ZEPHYR_BASE = Path(__file__).parents[2]
def board_v1_to_v2(board_root, board, new_board, group, vendor, soc, variants):
try:
board_path = next(board_root.glob(f"boards/*/{board}"))
except StopIteration:
sys.exit(f"Board not found: {board}")
new_board_path = board_root / "boards" / group / new_board
if new_board_path.exists():
print("New board already exists, updating board with additional SoC")
if not soc:
sys.exit("No SoC provided")
new_board_path.mkdir(parents=True, exist_ok=True)
print("Moving files to the new board folder...")
for f in board_path.iterdir():
f_new = new_board_path / f.name
if f_new.exists():
print(f"Skipping existing file: {f_new}")
continue
f.rename(f_new)
print("Creating or updating board.yaml...")
board_settings_file = new_board_path / "board.yml"
if not board_settings_file.exists():
board_settings = {
"board": {
"name": new_board,
"vendor": vendor,
"socs": []
}
}
else:
with open(board_settings_file) as f:
yaml = ruamel.yaml.YAML(typ='safe', pure=True)
board_settings = yaml.load(f) # pylint: disable=assignment-from-no-return
soc = {"name": soc}
if variants:
soc["variants"] = [{"name": variant} for variant in variants]
board_settings["board"]["socs"].append(soc)
yaml = ruamel.yaml.YAML()
yaml.indent(sequence=4, offset=2)
with open(board_settings_file, "w") as f:
yaml.dump(board_settings, f)
print(f"Updating {board}_defconfig...")
board_defconfig_file = new_board_path / f"{board}_defconfig"
with open(board_defconfig_file) as f:
board_soc_settings = []
board_defconfig = ""
dropped_line = False
for line in f.readlines():
m = re.match(r"^CONFIG_BOARD_.*$", line)
if m:
dropped_line = True
continue
m = re.match(r"^CONFIG_(SOC_[A-Z0-9_]+).*$", line)
if m:
dropped_line = True
if not re.match(r"^CONFIG_SOC_SERIES_.*$", line):
board_soc_settings.append(m.group(1))
continue
if dropped_line and re.match(r"^$", line):
continue
dropped_line = False
board_defconfig += line
with open(board_defconfig_file, "w") as f:
f.write(board_defconfig)
print("Updating Kconfig.defconfig...")
board_kconfig_defconfig_file = new_board_path / "Kconfig.defconfig"
with open(board_kconfig_defconfig_file) as f:
board_kconfig_defconfig = ""
has_kconfig_defconfig_entries = False
in_board = False
for line in f.readlines():
# drop "config BOARD" entry from Kconfig.defconfig
m = re.match(r"^config BOARD$", line)
if m:
in_board = True
continue
if in_board and re.match(r"^\s+.*$", line):
continue
in_board = False
m = re.match(r"^config .*$", line)
if m:
has_kconfig_defconfig_entries = True
m = re.match(rf"^(.*)BOARD_{board.upper()}(.*)$", line)
if m:
board_kconfig_defconfig += (
m.group(1) + "BOARD_" + new_board.upper() + m.group(2) + "\n"
)
continue
board_kconfig_defconfig += line
if has_kconfig_defconfig_entries:
with open(board_kconfig_defconfig_file, "w") as f:
f.write(board_kconfig_defconfig)
else:
print("Removing empty Kconfig.defconfig after update...")
board_kconfig_defconfig_file.unlink()
print(f"Creating or updating Kconfig.{new_board}...")
board_kconfig_file = new_board_path / "Kconfig.board"
copyright = None
with open(board_kconfig_file) as f:
for line in f.readlines():
copyright = line
new_board_kconfig_file = new_board_path / f"Kconfig.{new_board}"
if copyright is not None:
header = copyright + header
selects = "\n\t" + "\n\t".join(["select " + setting for setting in board_soc_settings]) + "\n"
if not new_board_kconfig_file.exists():
with open(new_board_kconfig_file, "w") as f:
f.write(
header +
f"\nconfig BOARD_{new_board.upper()}{selects}"
)
else:
with open(new_board_kconfig_file, "a") as f:
f.write(selects)
print("Removing old Kconfig.board...")
board_kconfig_file.unlink()
print("Conversion done!")
if __name__ == "__main__":
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument(
"--board-root",
type=Path,
default=ZEPHYR_BASE,
help="Board root",
)
parser.add_argument("-b", "--board", type=str, required=True, help="Board name")
parser.add_argument("-n", "--new-board", type=str, help="New board name")
parser.add_argument("-g", "--group", type=str, required=True, help="Board group")
parser.add_argument("-v", "--vendor", type=str, required=True, help="Vendor name")
parser.add_argument("-s", "--soc", type=str, required=True, help="Board SoC")
parser.add_argument("--variants", nargs="+", default=[], help="Board variants")
args = parser.parse_args()
board_v1_to_v2(
args.board_root,
args.board,
args.new_board or args.board,
args.group,
args.vendor,
args.soc,
args.variants,
)
``` | /content/code_sandbox/scripts/utils/board_v1_to_v2.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,608 |
```python
#!/usr/bin/env python3
"""
Pinctrl Migration Utility Script for nRF Boards
###############################################
This script can be used to automatically migrate the Devicetree files of
nRF-based boards using the old <signal>-pin properties to select peripheral
pins. The script will parse a board Devicetree file and will first adjust that
file by removing old pin-related properties replacing them with pinctrl states.
A board-pinctrl.dtsi file will be generated containing the configuration for
all pinctrl states. Note that script will also work on files that have been
partially ported.
.. warning::
This script uses a basic line based parser, therefore not all valid
Devicetree files will be converted correctly. **ADJUSTED/GENERATED FILES
MUST BE MANUALLY REVIEWED**.
Known limitations: All SPI nodes will be assumed to be a master device.
Usage::
python3 pinctrl_nrf_migrate.py
-i path/to/board.dts
[--no-backup]
[--skip-nrf-check]
[--header ""]
Example:
.. code-block:: devicetree
/* Old board.dts */
...
&uart0 {
...
tx-pin = <5>;
rx-pin = <33>;
rx-pull-up;
...
};
/* Adjusted board.dts */
...
#include "board-pinctrl.dtsi"
...
&uart0 {
...
pinctrl-0 = <&uart0_default>;
pinctrl-1 = <&uart0_sleep>;
pinctrl-names = "default", "sleep";
...
};
/* Generated board-pinctrl.dtsi */
&pinctrl {
uart0_default: uart0_default {
group1 {
psels = <NRF_PSEL(UART_TX, 0, 5);
};
group2 {
psels = <NRF_PSEL(UART_RX, 1, 1)>;
bias-pull-up;
};
};
uart0_sleep: uart0_sleep {
group1 {
psels = <NRF_PSEL(UART_TX, 0, 5)>,
<NRF_PSEL(UART_RX, 1, 1)>;
low-power-enable;
};
};
};
"""
import argparse
import enum
from pathlib import Path
import re
import shutil
from typing import Callable, Optional, Dict, List
#
# Data types and containers
#
class PIN_CONFIG(enum.Enum):
"""Pin configuration attributes"""
PULL_UP = "bias-pull-up"
PULL_DOWN = "bias-pull-down"
LOW_POWER = "low-power-enable"
NORDIC_INVERT = "nordic,invert"
class Device(object):
"""Device configuration class"""
def __init__(
self,
pattern: str,
callback: Callable,
signals: Dict[str, str],
needs_sleep: bool,
) -> None:
self.pattern = pattern
self.callback = callback
self.signals = signals
self.needs_sleep = needs_sleep
self.attrs = {}
class SignalMapping(object):
"""Signal mapping (signal<>pin)"""
def __init__(self, signal: str, pin: int) -> None:
self.signal = signal
self.pin = pin
class PinGroup(object):
"""Pin group"""
def __init__(self, pins: List[SignalMapping], config: List[PIN_CONFIG]) -> None:
self.pins = pins
self.config = config
class PinConfiguration(object):
"""Pin configuration (mapping and configuration)"""
def __init__(self, mapping: SignalMapping, config: List[PIN_CONFIG]) -> None:
self.mapping = mapping
self.config = config
class DeviceConfiguration(object):
"""Device configuration"""
def __init__(self, name: str, pins: List[PinConfiguration]) -> None:
self.name = name
self.pins = pins
def add_signal_config(self, signal: str, config: PIN_CONFIG) -> None:
"""Add configuration to signal"""
for pin in self.pins:
if signal == pin.mapping.signal:
pin.config.append(config)
return
self.pins.append(PinConfiguration(SignalMapping(signal, -1), [config]))
def set_signal_pin(self, signal: str, pin: int) -> None:
"""Set signal pin"""
for pin_ in self.pins:
if signal == pin_.mapping.signal:
pin_.mapping.pin = pin
return
self.pins.append(PinConfiguration(SignalMapping(signal, pin), []))
#
# Content formatters and writers
#
def gen_pinctrl(
configs: List[DeviceConfiguration], input_file: Path, header: str
) -> None:
"""Generate board-pinctrl.dtsi file
Args:
configs: Board configs.
input_file: Board DTS file.
"""
last_line = 0
pinctrl_file = input_file.parent / (input_file.stem + "-pinctrl.dtsi")
# append content before last node closing
if pinctrl_file.exists():
content = open(pinctrl_file).readlines()
for i, line in enumerate(content[::-1]):
if re.match(r"\s*};.*", line):
last_line = len(content) - (i + 1)
break
out = open(pinctrl_file, "w")
if not last_line:
out.write(header)
out.write("&pinctrl {\n")
else:
for line in content[:last_line]:
out.write(line)
for config in configs:
# create pin groups with common configuration (default state)
default_groups: List[PinGroup] = []
for pin in config.pins:
merged = False
for group in default_groups:
if group.config == pin.config:
group.pins.append(pin.mapping)
merged = True
break
if not merged:
default_groups.append(PinGroup([pin.mapping], pin.config))
# create pin group for low power state
group = PinGroup([], [PIN_CONFIG.LOW_POWER])
for pin in config.pins:
group.pins.append(pin.mapping)
sleep_groups = [group]
# generate default and sleep state entries
out.write(f"\t{config.name}_default: {config.name}_default {{\n")
out.write(fmt_pinctrl_groups(default_groups))
out.write("\t};\n\n")
out.write(f"\t{config.name}_sleep: {config.name}_sleep {{\n")
out.write(fmt_pinctrl_groups(sleep_groups))
out.write("\t};\n\n")
if not last_line:
out.write("};\n")
else:
for line in content[last_line:]:
out.write(line)
out.close()
def board_is_nrf(content: List[str]) -> bool:
"""Check if board is nRF based.
Args:
content: DT file content as list of lines.
Returns:
True if board is nRF based, False otherwise.
"""
for line in content:
m = re.match(r'^#include\s+(?:"|<).*nrf.*(?:>|").*', line)
if m:
return True
return False
def fmt_pinctrl_groups(groups: List[PinGroup]) -> str:
"""Format pinctrl groups.
Example generated content::
group1 {
psels = <NRF_PSEL(UART_TX, 0, 5)>;
};
group2 {
psels = <NRF_PSEL(UART_RX, 1, 1)>;
bias-pull-up;
};
Returns:
Generated groups.
"""
content = ""
for i, group in enumerate(groups):
content += f"\t\tgroup{i + 1} {{\n"
# write psels entries
for i, mapping in enumerate(group.pins):
prefix = "psels = " if i == 0 else "\t"
suffix = ";" if i == len(group.pins) - 1 else ","
pin = mapping.pin
port = 0 if pin < 32 else 1
if port == 1:
pin -= 32
content += (
f"\t\t\t{prefix}<NRF_PSEL({mapping.signal}, {port}, {pin})>{suffix}\n"
)
# write all pin configuration (bias, low-power, etc.)
for entry in group.config:
content += f"\t\t\t{entry.value};\n"
content += "\t\t};\n"
return content
def fmt_states(device: str, indent: str, needs_sleep: bool) -> str:
"""Format state entries for the given device.
Args:
device: Device name.
indent: Indentation.
needs_sleep: If sleep entry is needed.
Returns:
State entries to be appended to the device.
"""
if needs_sleep:
return "\n".join(
(
f"{indent}pinctrl-0 = <&{device}_default>;",
f"{indent}pinctrl-1 = <&{device}_sleep>;",
f'{indent}pinctrl-names = "default", "sleep";\n',
)
)
else:
return "\n".join(
(
f"{indent}pinctrl-0 = <&{device}_default>;",
f'{indent}pinctrl-names = "default";\n',
)
)
def insert_pinctrl_include(content: List[str], board: str) -> None:
"""Insert board pinctrl include if not present.
Args:
content: DT file content as list of lines.
board: Board name
"""
already = False
include_last_line = -1
root_line = -1
for i, line in enumerate(content):
# check if file already includes a board pinctrl file
m = re.match(r'^#include\s+".*-pinctrl\.dtsi".*', line)
if m:
already = True
continue
# check if including
m = re.match(r'^#include\s+(?:"|<)(.*)(?:>|").*', line)
if m:
include_last_line = i
continue
# check for root entry
m = re.match(r"^\s*/\s*{.*", line)
if m:
root_line = i
break
if include_last_line < 0 and root_line < 0:
raise ValueError("Unexpected DT file content")
if not already:
if include_last_line >= 0:
line = include_last_line + 1
else:
line = max(0, root_line - 1)
content.insert(line, f'#include "{board}-pinctrl.dtsi"\n')
def adjust_content(content: List[str], board: str) -> List[DeviceConfiguration]:
"""Adjust content
Args:
content: File content to be adjusted.
board: Board name.
"""
configs: List[DeviceConfiguration] = []
level = 0
in_device = False
states_written = False
new_content = []
for line in content:
# look for a device reference node (e.g. &uart0)
if not in_device:
m = re.match(r"^[^&]*&([a-z0-9]+)\s*{[^}]*$", line)
if m:
# check if device requires processing
current_device = None
for device in DEVICES:
if re.match(device.pattern, m.group(1)):
current_device = device
indent = ""
config = DeviceConfiguration(m.group(1), [])
configs.append(config)
break
# we are now inside a device node
level = 1
in_device = True
states_written = False
else:
# entering subnode (must come after all properties)
if re.match(r"[^\/\*]*{.*", line):
level += 1
# exiting subnode (or device node)
elif re.match(r"[^\/\*]*}.*", line):
level -= 1
in_device = level > 0
elif current_device:
# device already ported, drop
if re.match(r"[^\/\*]*pinctrl-\d+.*", line):
current_device = None
configs.pop()
# determine indentation
elif not indent:
m = re.match(r"(\s+).*", line)
if m:
indent = m.group(1)
# process each device line, append states at the end
if current_device:
if level == 1:
line = current_device.callback(config, current_device.signals, line)
if (level == 2 or not in_device) and not states_written:
line = (
fmt_states(config.name, indent, current_device.needs_sleep)
+ line
)
states_written = True
current_device = None
if line:
new_content.append(line)
if configs:
insert_pinctrl_include(new_content, board)
content[:] = new_content
return configs
#
# Processing utilities
#
def match_and_store_pin(
config: DeviceConfiguration, signals: Dict[str, str], line: str
) -> Optional[str]:
"""Match and store a pin mapping.
Args:
config: Device configuration.
signals: Signals name mapping.
line: Line containing potential pin mapping.
Returns:
Line if found a pin mapping, None otherwise.
"""
# handle qspi special case for io-pins (array case)
m = re.match(r"\s*io-pins\s*=\s*([\s<>,0-9]+).*", line)
if m:
pins = re.sub(r"[<>,]", "", m.group(1)).split()
for i, pin in enumerate(pins):
config.set_signal_pin(signals[f"io{i}"], int(pin))
return
m = re.match(r"\s*([a-z]+\d?)-pins?\s*=\s*<(\d+)>.*", line)
if m:
config.set_signal_pin(signals[m.group(1)], int(m.group(2)))
return
return line
#
# Device processing callbacks
#
def process_uart(config: DeviceConfiguration, signals, line: str) -> Optional[str]:
"""Process UART/UARTE devices."""
# check if line specifies a pin
if not match_and_store_pin(config, signals, line):
return
# check if pull-up is specified
m = re.match(r"\s*([a-z]+)-pull-up.*", line)
if m:
config.add_signal_config(signals[m.group(1)], PIN_CONFIG.PULL_UP)
return
return line
def process_spi(config: DeviceConfiguration, signals, line: str) -> Optional[str]:
"""Process SPI devices."""
# check if line specifies a pin
if not match_and_store_pin(config, signals, line):
return
# check if pull-up is specified
m = re.match(r"\s*miso-pull-up.*", line)
if m:
config.add_signal_config(signals["miso"], PIN_CONFIG.PULL_UP)
return
# check if pull-down is specified
m = re.match(r"\s*miso-pull-down.*", line)
if m:
config.add_signal_config(signals["miso"], PIN_CONFIG.PULL_DOWN)
return
return line
def process_pwm(config: DeviceConfiguration, signals, line: str) -> Optional[str]:
"""Process PWM devices."""
# check if line specifies a pin
if not match_and_store_pin(config, signals, line):
return
# check if channel inversion is specified
m = re.match(r"\s*([a-z0-9]+)-inverted.*", line)
if m:
config.add_signal_config(signals[m.group(1)], PIN_CONFIG.NORDIC_INVERT)
return
return line
DEVICES = [
Device(
r"uart\d",
process_uart,
{
"tx": "UART_TX",
"rx": "UART_RX",
"rts": "UART_RTS",
"cts": "UART_CTS",
},
needs_sleep=True,
),
Device(
r"i2c\d",
match_and_store_pin,
{
"sda": "TWIM_SDA",
"scl": "TWIM_SCL",
},
needs_sleep=True,
),
Device(
r"spi\d",
process_spi,
{
"sck": "SPIM_SCK",
"miso": "SPIM_MISO",
"mosi": "SPIM_MOSI",
},
needs_sleep=True,
),
Device(
r"pdm\d",
match_and_store_pin,
{
"clk": "PDM_CLK",
"din": "PDM_DIN",
},
needs_sleep=False,
),
Device(
r"qdec",
match_and_store_pin,
{
"a": "QDEC_A",
"b": "QDEC_B",
"led": "QDEC_LED",
},
needs_sleep=True,
),
Device(
r"qspi",
match_and_store_pin,
{
"sck": "QSPI_SCK",
"io0": "QSPI_IO0",
"io1": "QSPI_IO1",
"io2": "QSPI_IO2",
"io3": "QSPI_IO3",
"csn": "QSPI_CSN",
},
needs_sleep=True,
),
Device(
r"pwm\d",
process_pwm,
{
"ch0": "PWM_OUT0",
"ch1": "PWM_OUT1",
"ch2": "PWM_OUT2",
"ch3": "PWM_OUT3",
},
needs_sleep=True,
),
Device(
r"i2s\d",
match_and_store_pin,
{
"sck": "I2S_SCK_M",
"lrck": "I2S_LRCK_M",
"sdout": "I2S_SDOUT",
"sdin": "I2S_SDIN",
"mck": "I2S_MCK",
},
needs_sleep=False,
),
]
"""Supported devices and associated configuration"""
def main(input_file: Path, no_backup: bool, skip_nrf_check: bool, header: str) -> None:
"""Entry point
Args:
input_file: Input DTS file.
no_backup: Do not create backup files.
"""
board_name = input_file.stem
content = open(input_file).readlines()
if not skip_nrf_check and not board_is_nrf(content):
print(f"Board {board_name} is not nRF based, terminating")
return
if not no_backup:
backup_file = input_file.parent / (board_name + ".bck" + input_file.suffix)
shutil.copy(input_file, backup_file)
configs = adjust_content(content, board_name)
if configs:
with open(input_file, "w") as f:
f.writelines(content)
gen_pinctrl(configs, input_file, header)
print(f"Board {board_name} Devicetree file has been converted")
else:
print(f"Nothing to be converted for {board_name}")
if __name__ == "__main__":
parser = argparse.ArgumentParser("pinctrl migration utility for nRF", allow_abbrev=False)
parser.add_argument(
"-i", "--input", type=Path, required=True, help="Board DTS file"
)
parser.add_argument(
"--no-backup", action="store_true", help="Do not create backup files"
)
parser.add_argument(
"--skip-nrf-check",
action="store_true",
help="Skip checking if board is nRF-based",
)
parser.add_argument(
"--header", default="", type=str, help="Header to be prepended to pinctrl files"
)
args = parser.parse_args()
main(args.input, args.no_backup, args.skip_nrf_check, args.header)
``` | /content/code_sandbox/scripts/utils/pinctrl_nrf_migrate.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 4,424 |
```python
"""
Utility script to migrate Twister configuration files from using string-based
lists to native YAML lists.
Usage::
python $ZEPHYR_BASE/scripts/utils/twister_to_list.py \
-p path/to/zephyr-based-project
"""
import argparse
from pathlib import Path
from ruamel.yaml import YAML
FIELDS = (
"arch_exclude",
"arch_allow",
"depends_on",
"extra_args",
"extra_sections",
"platform_exclude",
"platform_allow",
"tags",
"toolchain_exclude",
"toolchain_allow"
)
def process(conf):
update = False
for field in FIELDS:
val = conf.get(field)
if not val or not isinstance(val, str):
continue
s = val.split()
if len(s) > 1:
conf[field] = s
update = True
return update
def twister_to_list(project, dry_run):
yaml = YAML()
yaml.indent(offset=2)
yaml.preserve_quotes = True
for p in project.glob("**/*"):
if p.name not in ("testcase.yaml", "sample.yaml"):
continue
conf = yaml.load(p)
update = False
common = conf.get("common")
if common:
update |= process(common)
for _, spec in conf["tests"].items():
update |= process(spec)
if update:
print(f"Updating {p}{' (dry run)' if dry_run else ''}")
if not dry_run:
with open(p, "w") as f:
yaml.dump(conf, f)
if __name__ == "__main__":
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument(
"-p", "--project", type=Path, required=True, help="Zephyr-based project path"
)
parser.add_argument("--dry-run", action="store_true", help="Dry run")
args = parser.parse_args()
twister_to_list(args.project, args.dry_run)
``` | /content/code_sandbox/scripts/utils/twister_to_list.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 428 |
```python
#!/usr/bin/env python3
#
"""Query the Top-Ten Bug Bashers
This script will query the top-ten Bug Bashers in a specified date window.
Usage:
./scripts/bug-bash.py -t ~/.ghtoken -b 2021-07-26 -e 2021-08-07
GITHUB_TOKEN="..." ./scripts/bug-bash.py -b 2021-07-26 -e 2021-08-07
"""
import argparse
from datetime import datetime, timedelta
import operator
import os
# Requires PyGithub
from github import Github
def parse_args():
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument('-a', '--all', dest='all',
help='Show all bugs squashed', action='store_true')
parser.add_argument('-t', '--token', dest='tokenfile',
help='File containing GitHub token (alternatively, use GITHUB_TOKEN env variable)', metavar='FILE')
parser.add_argument('-s', '--start', dest='start', help='start date (YYYY-mm-dd)',
metavar='START_DATE', type=valid_date_type, required=True)
parser.add_argument('-e', '--end', dest='end', help='end date (YYYY-mm-dd)',
metavar='END_DATE', type=valid_date_type, required=True)
args = parser.parse_args()
if args.end < args.start:
raise ValueError(
'end date {} is before start date {}'.format(args.end, args.start))
if args.tokenfile:
with open(args.tokenfile, 'r') as file:
token = file.read()
token = token.strip()
else:
if 'GITHUB_TOKEN' not in os.environ:
raise ValueError('No credentials specified')
token = os.environ['GITHUB_TOKEN']
setattr(args, 'token', token)
return args
class BugBashTally(object):
def __init__(self, gh, start_date, end_date):
"""Create a BugBashTally object with the provided Github object,
start datetime object, and end datetime object"""
self._gh = gh
self._repo = gh.get_repo('zephyrproject-rtos/zephyr')
self._start_date = start_date
self._end_date = end_date
self._issues = []
self._pulls = []
def get_tally(self):
"""Return a dict with (key = user, value = score)"""
tally = dict()
for p in self.get_pulls():
user = p.user.login
tally[user] = tally.get(user, 0) + 1
return tally
def get_rev_tally(self):
"""Return a dict with (key = score, value = list<user>) sorted in
descending order"""
# there may be ties!
rev_tally = dict()
for user, score in self.get_tally().items():
if score not in rev_tally:
rev_tally[score] = [user]
else:
rev_tally[score].append(user)
# sort in descending order by score
rev_tally = dict(
sorted(rev_tally.items(), key=operator.itemgetter(0), reverse=True))
return rev_tally
def get_top_ten(self):
"""Return a dict with (key = score, value = user) sorted in
descending order"""
top_ten = []
for score, users in self.get_rev_tally().items():
# do not sort users by login - hopefully fair-ish
for user in users:
if len(top_ten) == 10:
return top_ten
top_ten.append(tuple([score, user]))
return top_ten
def get_pulls(self):
"""Return GitHub pull requests that squash bugs in the provided
date window"""
if self._pulls:
return self._pulls
self.get_issues()
return self._pulls
def get_issues(self):
"""Return GitHub issues representing bugs in the provided date
window"""
if self._issues:
return self._issues
cutoff = self._end_date + timedelta(1)
issues = self._repo.get_issues(state='closed', labels=[
'bug'], since=self._start_date)
for i in issues:
# the PyGithub API and v3 REST API do not facilitate 'until'
# or 'end date' :-/
if i.closed_at < self._start_date or i.closed_at > cutoff:
continue
ipr = i.pull_request
if ipr is None:
# ignore issues without a linked pull request
continue
prid = int(ipr.html_url.split('/')[-1])
pr = self._repo.get_pull(prid)
if not pr.merged:
# pull requests that were not merged do not count
continue
self._pulls.append(pr)
self._issues.append(i)
return self._issues
# path_to_url
def valid_date_type(arg_date_str):
"""custom argparse *date* type for user dates values given from the
command line"""
try:
return datetime.strptime(arg_date_str, "%Y-%m-%d")
except ValueError:
msg = "Given Date ({0}) not valid! Expected format, YYYY-MM-DD!".format(arg_date_str)
raise argparse.ArgumentTypeError(msg)
def print_top_ten(top_ten):
"""Print the top-ten bug bashers"""
for score, user in top_ten:
# print tab-separated value, to allow for ./script ... > foo.csv
print('{}\t{}'.format(score, user))
def main():
args = parse_args()
bbt = BugBashTally(Github(args.token), args.start, args.end)
if args.all:
# print one issue per line
issues = bbt.get_issues()
pulls = bbt.get_pulls()
n = len(issues)
m = len(pulls)
assert n == m
for i in range(0, n):
print('{}\t{}\t{}'.format(
issues[i].number, pulls[i].user.login, pulls[i].title))
else:
# print the top ten
print_top_ten(bbt.get_top_ten())
if __name__ == '__main__':
main()
``` | /content/code_sandbox/scripts/release/bug_bash.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,353 |
```python
#!/usr/bin/env python3
#
"""Query issues in a release branch
This script searches for issues referenced via pull-requests in a release
branch in order to simplify tracking changes such as automated backports,
manual backports, security fixes, and stability fixes.
A formatted report is printed to standard output either in JSON or
reStructuredText.
Since an issue is required for all changes to release branches, merged PRs
must have at least one instance of the phrase "Fixes #1234" in the body. This
script will throw an error if a PR has been made without an associated issue.
Usage:
./scripts/release/list_backports.py \
-t ~/.ghtoken \
-b v2.7-branch \
-s 2021-12-15 -e 2022-04-22 \
-P 45074 -P 45868 -P 44918 -P 41234 -P 41174 \
-j | jq . | tee /tmp/backports.json
GITHUB_TOKEN="<secret>" \
./scripts/release/list_backports.py \
-b v3.0-branch \
-p 43381 \
-j | jq . | tee /tmp/backports.json
"""
import argparse
from datetime import datetime, timedelta
import io
import json
import logging
import os
import re
import sys
# Requires PyGithub
from github import Github
# path_to_url
def valid_date_type(arg_date_str):
"""custom argparse *date* type for user dates values given from the
command line"""
try:
return datetime.strptime(arg_date_str, "%Y-%m-%d")
except ValueError:
msg = "Given Date ({0}) not valid! Expected format, YYYY-MM-DD!".format(arg_date_str)
raise argparse.ArgumentTypeError(msg)
def parse_args():
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument('-t', '--token', dest='tokenfile',
help='File containing GitHub token (alternatively, use GITHUB_TOKEN env variable)', metavar='FILE')
parser.add_argument('-b', '--base', dest='base',
help='branch (base) for PRs (e.g. v2.7-branch)', metavar='BRANCH', required=True)
parser.add_argument('-j', '--json', dest='json', action='store_true',
help='print output in JSON rather than RST')
parser.add_argument('-s', '--start', dest='start', help='start date (YYYY-mm-dd)',
metavar='START_DATE', type=valid_date_type)
parser.add_argument('-e', '--end', dest='end', help='end date (YYYY-mm-dd)',
metavar='END_DATE', type=valid_date_type)
parser.add_argument("-o", "--org", default="zephyrproject-rtos",
help="Github organisation")
parser.add_argument('-p', '--include-pull', dest='includes',
help='include pull request (can be specified multiple times)',
metavar='PR', type=int, action='append', default=[])
parser.add_argument('-P', '--exclude-pull', dest='excludes',
help='exlude pull request (can be specified multiple times, helpful for version bumps and release notes)',
metavar='PR', type=int, action='append', default=[])
parser.add_argument("-r", "--repo", default="zephyr",
help="Github repository")
args = parser.parse_args()
if args.includes:
if getattr(args, 'start'):
logging.error(
'the --start argument should not be used with --include-pull')
return None
if getattr(args, 'end'):
logging.error(
'the --end argument should not be used with --include-pull')
return None
else:
if not getattr(args, 'start'):
logging.error(
'if --include-pr PR is not used, --start START_DATE is required')
return None
if not getattr(args, 'end'):
setattr(args, 'end', datetime.now())
if args.end < args.start:
logging.error(
f'end date {args.end} is before start date {args.start}')
return None
if args.tokenfile:
with open(args.tokenfile, 'r') as file:
token = file.read()
token = token.strip()
else:
if 'GITHUB_TOKEN' not in os.environ:
raise ValueError('No credentials specified')
token = os.environ['GITHUB_TOKEN']
setattr(args, 'token', token)
return args
class Backport(object):
def __init__(self, repo, base, pulls):
self._base = base
self._repo = repo
self._issues = []
self._pulls = pulls
self._pulls_without_an_issue = []
self._pulls_with_invalid_issues = {}
@staticmethod
def by_date_range(repo, base, start_date, end_date, excludes):
"""Create a Backport object with the provided repo,
base, start datetime object, and end datetime objects, and
list of excluded PRs"""
pulls = []
unfiltered_pulls = repo.get_pulls(
base=base, state='closed')
for p in unfiltered_pulls:
if not p.merged:
# only consider merged backports
continue
if p.closed_at < start_date or p.closed_at >= end_date + timedelta(1):
# only concerned with PRs within time window
continue
if p.number in excludes:
# skip PRs that have been explicitly excluded
continue
pulls.append(p)
# paginated_list.sort() does not exist
pulls = sorted(pulls, key=lambda x: x.number)
return Backport(repo, base, pulls)
@staticmethod
def by_included_prs(repo, base, includes):
"""Create a Backport object with the provided repo,
base, and list of included PRs"""
pulls = []
for i in includes:
try:
p = repo.get_pull(i)
except Exception:
p = None
if not p:
logging.error(f'{i} is not a valid pull request')
return None
if p.base.ref != base:
logging.error(
f'{i} is not a valid pull request for base {base} ({p.base.label})')
return None
pulls.append(p)
# paginated_list.sort() does not exist
pulls = sorted(pulls, key=lambda x: x.number)
return Backport(repo, base, pulls)
@staticmethod
def sanitize_title(title):
# TODO: sanitize titles such that they are suitable for both JSON and ReStructured Text
# could also automatically fix titles like "Automated backport of PR #1234"
return title
def print(self):
for i in self.get_issues():
title = Backport.sanitize_title(i.title)
# * :github:`38972` - logging: Cleaning references to tracing in logging
print(f'* :github:`{i.number}` - {title}')
def print_json(self):
issue_objects = []
for i in self.get_issues():
obj = {}
obj['id'] = i.number
obj['title'] = Backport.sanitize_title(i.title)
obj['url'] = f'path_to_url{self._repo.organization.login}/{self._repo.name}/pull/{i.number}'
issue_objects.append(obj)
print(json.dumps(issue_objects))
def get_pulls(self):
return self._pulls
def get_issues(self):
"""Return GitHub issues fixed in the provided date window"""
if self._issues:
return self._issues
issue_map = {}
self._pulls_without_an_issue = []
self._pulls_with_invalid_issues = {}
for p in self._pulls:
# check for issues in this pr
issues_for_this_pr = {}
with io.StringIO(p.body) as buf:
for line in buf.readlines():
line = line.strip()
match = re.search(r"^Fixes[:]?\s*#([1-9][0-9]*).*", line)
if not match:
match = re.search(
rf"^Fixes[:]?\s*path_to_url{self._repo.organization.login}/{self._repo.name}/issues/([1-9][0-9]*).*", line)
if not match:
continue
issue_number = int(match[1])
issue = self._repo.get_issue(issue_number)
if not issue:
if not self._pulls_with_invalid_issues[p.number]:
self._pulls_with_invalid_issues[p.number] = [
issue_number]
else:
self._pulls_with_invalid_issues[p.number].append(
issue_number)
logging.error(
f'path_to_url{self._repo.organization.login}/{self._repo.name}/pull/{p.number} references invalid issue number {issue_number}')
continue
issues_for_this_pr[issue_number] = issue
# report prs missing issues later
if len(issues_for_this_pr) == 0:
logging.error(
f'path_to_url{self._repo.organization.login}/{self._repo.name}/pull/{p.number} does not have an associated issue')
self._pulls_without_an_issue.append(p)
continue
# FIXME: when we have upgrade to python3.9+, use "issue_map | issues_for_this_pr"
issue_map = {**issue_map, **issues_for_this_pr}
issues = list(issue_map.values())
# paginated_list.sort() does not exist
issues = sorted(issues, key=lambda x: x.number)
self._issues = issues
return self._issues
def get_pulls_without_issues(self):
if self._pulls_without_an_issue:
return self._pulls_without_an_issue
self.get_issues()
return self._pulls_without_an_issue
def get_pulls_with_invalid_issues(self):
if self._pulls_with_invalid_issues:
return self._pulls_with_invalid_issues
self.get_issues()
return self._pulls_with_invalid_issues
def main():
args = parse_args()
if not args:
return os.EX_DATAERR
try:
gh = Github(args.token)
except Exception:
logging.error('failed to authenticate with GitHub')
return os.EX_DATAERR
try:
repo = gh.get_repo(args.org + '/' + args.repo)
except Exception:
logging.error('failed to obtain Github repository')
return os.EX_DATAERR
bp = None
if args.includes:
bp = Backport.by_included_prs(repo, args.base, set(args.includes))
else:
bp = Backport.by_date_range(repo, args.base,
args.start, args.end, set(args.excludes))
if not bp:
return os.EX_DATAERR
pulls_with_invalid_issues = bp.get_pulls_with_invalid_issues()
if pulls_with_invalid_issues:
logging.error('The following PRs link to invalid issues:')
for (p, lst) in pulls_with_invalid_issues:
logging.error(
f'\npath_to_url{repo.organization.login}/{repo.name}/pull/{p.number}: {lst}')
return os.EX_DATAERR
pulls_without_issues = bp.get_pulls_without_issues()
if pulls_without_issues:
logging.error(
'Please ensure the body of each PR to a release branch contains "Fixes #1234"')
logging.error('The following PRs are lacking associated issues:')
for p in pulls_without_issues:
logging.error(
f'path_to_url{repo.organization.login}/{repo.name}/pull/{p.number}')
return os.EX_DATAERR
if args.json:
bp.print_json()
else:
bp.print()
return os.EX_OK
if __name__ == '__main__':
sys.exit(main())
``` | /content/code_sandbox/scripts/release/list_backports.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,549 |
```python
#
import os
import uuid
from west.commands import WestCommand
from west import log
from zspdx.sbom import SBOMConfig, makeSPDX, setupCmakeQuery
SPDX_DESCRIPTION = """\
This command creates an SPDX 2.2 tag-value bill of materials
following the completion of a Zephyr build.
Prior to the build, an empty file must be created at
BUILDDIR/.cmake/api/v1/query/codemodel-v2 in order to enable
the CMake file-based API, which the SPDX command relies upon.
This can be done by calling `west spdx --init` prior to
calling `west build`."""
class ZephyrSpdx(WestCommand):
def __init__(self):
super().__init__(
'spdx',
'create SPDX bill of materials',
SPDX_DESCRIPTION)
def do_add_parser(self, parser_adder):
parser = parser_adder.add_parser(self.name,
help=self.help,
description = self.description)
# If you update these options, make sure to keep the docs in
# doc/guides/west/zephyr-cmds.rst up to date.
parser.add_argument('-i', '--init', action="store_true",
help="initialize CMake file-based API")
parser.add_argument('-d', '--build-dir',
help="build directory")
parser.add_argument('-n', '--namespace-prefix',
help="namespace prefix")
parser.add_argument('-s', '--spdx-dir',
help="SPDX output directory")
parser.add_argument('--analyze-includes', action="store_true",
help="also analyze included header files")
parser.add_argument('--include-sdk', action="store_true",
help="also generate SPDX document for SDK")
return parser
def do_run(self, args, unknown_args):
log.dbg(f"running zephyr SPDX generator")
log.dbg(f" --init is", args.init)
log.dbg(f" --build-dir is", args.build_dir)
log.dbg(f" --namespace-prefix is", args.namespace_prefix)
log.dbg(f" --spdx-dir is", args.spdx_dir)
log.dbg(f" --analyze-includes is", args.analyze_includes)
log.dbg(f" --include-sdk is", args.include_sdk)
if args.init:
do_run_init(args)
else:
do_run_spdx(args)
def do_run_init(args):
log.inf("initializing Cmake file-based API prior to build")
if not args.build_dir:
log.die("Build directory not specified; call `west spdx --init --build-dir=BUILD_DIR`")
# initialize CMake file-based API - empty query file
query_ready = setupCmakeQuery(args.build_dir)
if query_ready:
log.inf("initialized; run `west build` then run `west spdx`")
else:
log.err("Couldn't create Cmake file-based API query directory")
log.err("You can manually create an empty file at $BUILDDIR/.cmake/api/v1/query/codemodel-v2")
def do_run_spdx(args):
if not args.build_dir:
log.die("Build directory not specified; call `west spdx --build-dir=BUILD_DIR`")
# create the SPDX files
cfg = SBOMConfig()
cfg.buildDir = args.build_dir
if args.namespace_prefix:
cfg.namespacePrefix = args.namespace_prefix
else:
# create default namespace according to SPDX spec
# note that this is intentionally _not_ an actual URL where
# this document will be stored
cfg.namespacePrefix = f"path_to_url{str(uuid.uuid4())}"
if args.spdx_dir:
cfg.spdxDir = args.spdx_dir
else:
cfg.spdxDir = os.path.join(args.build_dir, "spdx")
if args.analyze_includes:
cfg.analyzeIncludes = True
if args.include_sdk:
cfg.includeSDK = True
# make sure SPDX directory exists, or create it if it doesn't
if os.path.exists(cfg.spdxDir):
if not os.path.isdir(cfg.spdxDir):
log.err(f'SPDX output directory {cfg.spdxDir} exists but is not a directory')
return
# directory exists, we're good
else:
# create the directory
os.makedirs(cfg.spdxDir, exist_ok=False)
makeSPDX(cfg)
``` | /content/code_sandbox/scripts/west_commands/spdx.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 964 |
```python
#
from west.commands import WestCommand
from run_common import add_parser_common, do_run_common
EXPORT_DESCRIPTION = '''\
Run RobotFramework test suites with a runner of choice.
'''
class Robot(WestCommand):
def __init__(self):
super(Robot, self).__init__(
'robot',
# Keep this in sync with the string in west-commands.yml.
'run RobotFramework test suites',
EXPORT_DESCRIPTION,
accepts_unknown_args=True)
self.runner_key = 'robot-runner' # in runners.yaml
def do_add_parser(self, parser_adder):
return add_parser_common(self, parser_adder)
def do_run(self, my_args, runner_args):
do_run_common(self, my_args, runner_args)
``` | /content/code_sandbox/scripts/west_commands/robot.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 159 |
```python
#!/usr/bin/env python3
from collections import defaultdict
from dataclasses import dataclass
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Set, Union
import argparse
import contextlib
import glob
import os
import subprocess
import sys
import tempfile
# TODO: include changes to child bindings
HERE = Path(__file__).parent.resolve()
ZEPHYR_BASE = HERE.parent.parent
SCRIPTS = ZEPHYR_BASE / 'scripts'
sys.path.insert(0, str(SCRIPTS / 'dts' / 'python-devicetree' / 'src'))
from devicetree.edtlib import Binding, bindings_from_paths, load_vendor_prefixes_txt
# The Compat type is a (compatible, on_bus) pair, which is used as a
# lookup key for bindings. The name "compat" matches edtlib's internal
# variable for this; it's a bit of a misnomer, but let's be
# consistent.
@dataclass
class Compat:
compatible: str
on_bus: Optional[str]
def __hash__(self):
return hash((self.compatible, self.on_bus))
class BindingChange:
'''Marker type for an individual change that happened to a
binding between the start and end commits. See subclasses
below for concrete changes.
'''
Compat2Binding = Dict[Compat, Binding]
Binding2Changes = Dict[Binding, List[BindingChange]]
@dataclass
class Changes:
'''Container for all the changes that happened between the
start and end commits.'''
vnds: List[str]
vnd2added: Dict[str, Compat2Binding]
vnd2removed: Dict[str, Compat2Binding]
vnd2changes: Dict[str, Binding2Changes]
@dataclass
class ModifiedSpecifier2Cells(BindingChange):
space: str
start: List[str]
end: List[str]
@dataclass
class ModifiedBuses(BindingChange):
start: List[str]
end: List[str]
@dataclass
class AddedProperty(BindingChange):
property: str
@dataclass
class RemovedProperty(BindingChange):
property: str
@dataclass
class ModifiedPropertyType(BindingChange):
property: str
start: str
end: str
@dataclass
class ModifiedPropertyEnum(BindingChange):
property: str
start: Any
end: Any
@dataclass
class ModifiedPropertyConst(BindingChange):
property: str
start: Any
end: Any
@dataclass
class ModifiedPropertyDefault(BindingChange):
property: str
start: Any
end: Any
@dataclass
class ModifiedPropertyDeprecated(BindingChange):
property: str
start: bool
end: bool
@dataclass
class ModifiedPropertyRequired(BindingChange):
property: str
start: bool
end: bool
def get_changes_between(
compat2binding_start: Compat2Binding,
compat2binding_end: Compat2Binding
) -> Changes:
vnd2added: Dict[str, Compat2Binding] = \
group_compat2binding_by_vnd({
compat: compat2binding_end[compat]
for compat in compat2binding_end
if compat not in compat2binding_start
})
vnd2removed: Dict[str, Compat2Binding] = \
group_compat2binding_by_vnd({
compat: compat2binding_start[compat]
for compat in compat2binding_start
if compat not in compat2binding_end
})
vnd2changes = group_binding2changes_by_vnd(
get_binding2changes(compat2binding_start,
compat2binding_end))
vnds_set: Set[str] = set()
vnds_set.update(set(vnd2added.keys()),
set(vnd2removed.keys()),
set(vnd2changes.keys()))
return Changes(vnds=sorted(vnds_set),
vnd2added=vnd2added,
vnd2removed=vnd2removed,
vnd2changes=vnd2changes)
def group_compat2binding_by_vnd(
compat2binding: Compat2Binding
) -> Dict[str, Compat2Binding]:
'''Convert *compat2binding* to a dict mapping vendor prefixes
to the subset of *compat2binding* with that vendor prefix.'''
ret: Dict[str, Compat2Binding] = defaultdict(dict)
for compat, binding in compat2binding.items():
ret[get_vnd(binding.compatible)][compat] = binding
return ret
def group_binding2changes_by_vnd(
binding2changes: Binding2Changes
) -> Dict[str, Binding2Changes]:
'''Convert *binding2chages* to a dict mapping vendor prefixes
to the subset of *binding2changes* with that vendor prefix.'''
ret: Dict[str, Binding2Changes] = defaultdict(dict)
for binding, changes in binding2changes.items():
ret[get_vnd(binding.compatible)][binding] = changes
return ret
def get_vnd(compatible: str) -> str:
'''Return the vendor prefix or the empty string.'''
if ',' not in compatible:
return ''
return compatible.split(',')[0]
def get_binding2changes(
compat2binding_start: Compat2Binding,
compat2binding_end: Compat2Binding
) -> Binding2Changes:
ret: Binding2Changes = {}
for compat, binding in compat2binding_end.items():
if compat not in compat2binding_start:
continue
binding_start = compat2binding_start[compat]
binding_end = compat2binding_end[compat]
binding_changes: List[BindingChange] = \
get_binding_changes(binding_start, binding_end)
if binding_changes:
ret[binding] = binding_changes
return ret
def get_binding_changes(
binding_start: Binding,
binding_end: Binding
) -> List[BindingChange]:
'''Enumerate the changes to a binding given its start and end values.'''
ret: List[BindingChange] = []
assert binding_start.compatible == binding_end.compatible
assert binding_start.on_bus == binding_end.on_bus
common_props: Set[str] = set(binding_start.prop2specs).intersection(
set(binding_end.prop2specs))
ret.extend(get_modified_specifier2cells(binding_start, binding_end))
ret.extend(get_modified_buses(binding_start, binding_end))
ret.extend(get_added_properties(binding_start, binding_end))
ret.extend(get_removed_properties(binding_start, binding_end))
ret.extend(get_modified_property_type(binding_start, binding_end,
common_props))
ret.extend(get_modified_property_enum(binding_start, binding_end,
common_props))
ret.extend(get_modified_property_const(binding_start, binding_end,
common_props))
ret.extend(get_modified_property_default(binding_start, binding_end,
common_props))
ret.extend(get_modified_property_deprecated(binding_start, binding_end,
common_props))
ret.extend(get_modified_property_required(binding_start, binding_end,
common_props))
return ret
def get_modified_specifier2cells(
binding_start: Binding,
binding_end: Binding
) -> List[BindingChange]:
ret: List[BindingChange] = []
start = binding_start.specifier2cells
end = binding_end.specifier2cells
if start == end:
return []
for space, cells_end in end.items():
cells_start = start.get(space)
if cells_start != cells_end:
ret.append(ModifiedSpecifier2Cells(space,
start=cells_start,
end=cells_end))
for space, cells_start in start.items():
if space not in end:
ret.append(ModifiedSpecifier2Cells(space,
start=cells_start,
end=None))
return ret
def get_modified_buses(
binding_start: Binding,
binding_end: Binding
) -> List[BindingChange]:
start = binding_start.buses
end = binding_end.buses
if start == end:
return []
return [ModifiedBuses(start=start, end=end)]
def get_added_properties(
binding_start: Binding,
binding_end: Binding
) -> List[BindingChange]:
return [AddedProperty(prop) for prop in binding_end.prop2specs
if prop not in binding_start.prop2specs]
def get_removed_properties(
binding_start: Binding,
binding_end: Binding
) -> List[BindingChange]:
return [RemovedProperty(prop) for prop in binding_start.prop2specs
if prop not in binding_end.prop2specs]
def get_modified_property_type(
binding_start: Binding,
binding_end: Binding,
common_props: Set[str]
) -> List[BindingChange]:
return get_modified_property_helper(
common_props,
lambda prop: binding_start.prop2specs[prop].type,
lambda prop: binding_end.prop2specs[prop].type,
ModifiedPropertyType)
def get_modified_property_enum(
binding_start: Binding,
binding_end: Binding,
common_props: Set[str]
) -> List[BindingChange]:
return get_modified_property_helper(
common_props,
lambda prop: binding_start.prop2specs[prop].enum,
lambda prop: binding_end.prop2specs[prop].enum,
ModifiedPropertyEnum)
def get_modified_property_const(
binding_start: Binding,
binding_end: Binding,
common_props: Set[str]
) -> List[BindingChange]:
return get_modified_property_helper(
common_props,
lambda prop: binding_start.prop2specs[prop].const,
lambda prop: binding_end.prop2specs[prop].const,
ModifiedPropertyConst)
def get_modified_property_default(
binding_start: Binding,
binding_end: Binding,
common_props: Set[str]
) -> List[BindingChange]:
return get_modified_property_helper(
common_props,
lambda prop: binding_start.prop2specs[prop].default,
lambda prop: binding_end.prop2specs[prop].default,
ModifiedPropertyDefault)
def get_modified_property_deprecated(
binding_start: Binding,
binding_end: Binding,
common_props: Set[str]
) -> List[BindingChange]:
return get_modified_property_helper(
common_props,
lambda prop: binding_start.prop2specs[prop].deprecated,
lambda prop: binding_end.prop2specs[prop].deprecated,
ModifiedPropertyDeprecated)
def get_modified_property_required(
binding_start: Binding,
binding_end: Binding,
common_props: Set[str]
) -> List[BindingChange]:
return get_modified_property_helper(
common_props,
lambda prop: binding_start.prop2specs[prop].required,
lambda prop: binding_end.prop2specs[prop].required,
ModifiedPropertyRequired)
def get_modified_property_helper(
common_props: Set[str],
start_fn: Callable[[str], Any],
end_fn: Callable[[str], Any],
change_constructor: Callable[[str, Any, Any], BindingChange]
) -> List[BindingChange]:
ret = []
for prop in common_props:
start = start_fn(prop)
end = end_fn(prop)
if start != end:
ret.append(change_constructor(prop, start, end))
return ret
def load_compat2binding(commit: str) -> Compat2Binding:
'''Load a map from compatible to binding with that compatible,
based on the bindings in zephyr at the given commit.'''
@contextlib.contextmanager
def git_worktree(directory: os.PathLike, commit: str):
fspath = os.fspath(directory)
subprocess.run(['git', 'worktree', 'add', '--detach', fspath, commit],
check=True)
yield
print('removing worktree...')
subprocess.run(['git', 'worktree', 'remove', fspath], check=True)
ret: Compat2Binding = {}
with tempfile.TemporaryDirectory(prefix='dt_bindings_worktree') as tmpdir:
with git_worktree(tmpdir, commit):
tmpdir_bindings = Path(tmpdir) / 'dts' / 'bindings'
binding_files = []
binding_files.extend(glob.glob(f'{tmpdir_bindings}/**/*.yml',
recursive=True))
binding_files.extend(glob.glob(f'{tmpdir_bindings}/**/*.yaml',
recursive=True))
bindings: List[Binding] = bindings_from_paths(
binding_files, ignore_errors=True)
for binding in bindings:
compat = Compat(binding.compatible, binding.on_bus)
assert compat not in ret
ret[compat] = binding
return ret
def compatible_sort_key(data: Union[Compat, Binding]) -> str:
'''Sort key used by Printer.'''
return (data.compatible, data.on_bus or '')
class Printer:
'''Helper class for formatting output.'''
def __init__(self, outfile):
self.outfile = outfile
self.vnd2vendor_name = load_vendor_prefixes_txt(
ZEPHYR_BASE / 'dts' / 'bindings' / 'vendor-prefixes.txt')
def print(self, *args, **kwargs):
kwargs['file'] = self.outfile
print(*args, **kwargs)
def print_changes(self, changes: Changes):
for vnd in changes.vnds:
if vnd:
vnd_fmt = f' ({vnd})'
else:
vnd_fmt = ''
self.print(f'* {self.vendor_name(vnd)}{vnd_fmt}:\n')
added = changes.vnd2added[vnd]
if added:
self.print(' * New bindings:\n')
self.print_compat2binding(
added,
lambda binding: f':dtcompatible:`{binding.compatible}`'
)
removed = changes.vnd2removed[vnd]
if removed:
self.print(' * Removed bindings:\n')
self.print_compat2binding(
removed,
lambda binding: f'``{binding.compatible}``'
)
modified = changes.vnd2changes[vnd]
if modified:
self.print(' * Modified bindings:\n')
self.print_binding2changes(modified)
def print_compat2binding(
self,
compat2binding: Compat2Binding,
formatter: Callable[[Binding], str]
) -> None:
for compat in sorted(compat2binding, key=compatible_sort_key):
self.print(f' * {formatter(compat2binding[compat])}')
self.print()
def print_binding2changes(self, binding2changes: Binding2Changes) -> None:
for binding, changes in binding2changes.items():
on_bus = f' (on {binding.on_bus} bus)' if binding.on_bus else ''
self.print(f' * :dtcompatible:`{binding.compatible}`{on_bus}:\n')
for change in changes:
self.print_change(change)
self.print()
def print_change(self, change: BindingChange) -> None:
def print(msg):
self.print(f' * {msg}')
def print_prop_change(details):
print(f'property ``{change.property}`` {details} changed from '
f'{change.start} to {change.end}')
if isinstance(change, ModifiedSpecifier2Cells):
print(f'specifier cells for space "{change.space}" '
f'are now named: {change.end} (old value: {change.start})')
elif isinstance(change, ModifiedBuses):
print(f'bus list changed from {change.start} to {change.end}')
elif isinstance(change, AddedProperty):
print(f'new property: ``{change.property}``')
elif isinstance(change, RemovedProperty):
print(f'removed property: ``{change.property}``')
elif isinstance(change, ModifiedPropertyType):
print_prop_change('type')
elif isinstance(change, ModifiedPropertyEnum):
print_prop_change('enum value')
elif isinstance(change, ModifiedPropertyConst):
print_prop_change('const value')
elif isinstance(change, ModifiedPropertyDefault):
print_prop_change('default value')
elif isinstance(change, ModifiedPropertyDeprecated):
print_prop_change('deprecation status')
elif isinstance(change, ModifiedPropertyRequired):
if not change.start and change.end:
print(f'property ``{change.property}`` is now required')
else:
print(f'property ``{change.property}`` is no longer required')
else:
raise ValueError(f'unknown type for {change}: {type(change)}')
def vendor_name(self, vnd: str) -> str:
# Necessary due to the patch for openthread.
if vnd == 'openthread':
# FIXME: we have to go beyond the dict since this
# compatible isn't in vendor-prefixes.txt, but we have
# binding(s) for it. We need to fix this in CI by
# rejecting unknown vendors in a bindings check.
return 'OpenThread'
if vnd == '':
return 'Generic or vendor-independent'
return self.vnd2vendor_name[vnd]
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
allow_abbrev=False,
description='''
Print human-readable descriptions of changes to devicetree
bindings between two commits, in .rst format suitable for copy/pasting
into the release notes.
''',
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument('start', metavar='START-COMMIT',
help='''what you want to compare bindings against
(typically the previous release's tag)''')
parser.add_argument('end', metavar='END-COMMIT',
help='''what you want to know bindings changes for
(typically 'main')''')
parser.add_argument('file', help='where to write the .rst output to')
return parser.parse_args()
def main():
args = parse_args()
compat2binding_start = load_compat2binding(args.start)
compat2binding_end = load_compat2binding(args.end)
changes = get_changes_between(compat2binding_start,
compat2binding_end)
with open(args.file, 'w') as outfile:
Printer(outfile).print_changes(changes)
if __name__ == '__main__':
main()
``` | /content/code_sandbox/scripts/release/list_devicetree_bindings_changes.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 3,874 |
```python
#
import argparse
import os
from west import log
from west.commands import WestCommand
# Relative to the folder where this script lives
COMPLETION_REL_PATH = 'completion/west-completion'
COMP_DESCRIPTION = '''\
Output shell completion scripts for west.
This command outputs completion scripts for different shells by printing them
to stdout. Using the completion scripts:
bash:
# one-time
source <(west completion bash)
# permanent
west completion bash > ~/west-completion.bash
# edit your .bashrc or .bash_profile and add:
source $HOME/west-completion.bash
zsh:
# one-time
source <(west completion zsh)
# permanent (might require sudo)
west completion zsh > "${fpath[1]}/_west"
fish:
# one-time
west completion fish | source
# permanent
west completion fish > $HOME/.config/fish/completions/west.fish
positional arguments:
source_dir application source directory
cmake_opt extra options to pass to cmake; implies -c
(these must come after "--" as shown above)
'''
class Completion(WestCommand):
def __init__(self):
super().__init__(
'completion',
# Keep this in sync with the string in west-commands.yml.
'output shell completion scripts',
COMP_DESCRIPTION,
accepts_unknown_args=False)
def do_add_parser(self, parser_adder):
parser = parser_adder.add_parser(
self.name,
help=self.help,
formatter_class=argparse.RawDescriptionHelpFormatter,
description=self.description)
# Remember to update west-completion.bash if you add or remove
# flags
parser.add_argument('shell', nargs=1, choices=['bash', 'zsh', 'fish'],
help='''Shell that which the completion
script is intended for.''')
return parser
def do_run(self, args, unknown_args):
cf = os.path.join(os.path.dirname(os.path.realpath(__file__)),
*COMPLETION_REL_PATH.split('/'))
cf += '.' + args.shell[0]
try:
with open(cf, 'r') as f:
print(f.read())
except FileNotFoundError as e:
log.die('Unable to find completion file: {}'.format(e))
``` | /content/code_sandbox/scripts/west_commands/completion.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 502 |
```python
#
'''Common definitions for building Zephyr applications.
This provides some default settings and convenience wrappers for
building Zephyr applications needed by multiple commands.
See build.py for the build command itself.
'''
import zcmake
import os
import sys
from pathlib import Path
from west import log
from west.configuration import config
from west.util import escapes_directory
# Domains.py must be imported from the pylib directory, since
# twister also uses the implementation
script_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, os.path.join(script_dir, "pylib/build_helpers/"))
from domains import Domains
DEFAULT_BUILD_DIR = 'build'
'''Name of the default Zephyr build directory.'''
DEFAULT_CMAKE_GENERATOR = 'Ninja'
'''Name of the default CMake generator.'''
FIND_BUILD_DIR_DESCRIPTION = '''\
If the build directory is not given, the default is {}/ unless the
build.dir-fmt configuration variable is set. The current directory is
checked after that. If either is a Zephyr build directory, it is used.
'''.format(DEFAULT_BUILD_DIR)
def _resolve_build_dir(fmt, guess, cwd, **kwargs):
# Remove any None values, we do not want 'None' as a string
kwargs = {k: v for k, v in kwargs.items() if v is not None}
# Check if source_dir is below cwd first
source_dir = kwargs.get('source_dir')
if source_dir:
if escapes_directory(cwd, source_dir):
kwargs['source_dir'] = os.path.relpath(source_dir, cwd)
else:
# no meaningful relative path possible
kwargs['source_dir'] = ''
try:
return fmt.format(**kwargs)
except KeyError:
if not guess:
return None
# Guess the build folder by iterating through all sub-folders from the
# root of the format string and trying to resolve. If resolving fails,
# proceed to iterate over subfolders only if there is a single folder
# present on each iteration.
parts = Path(fmt).parts
b = Path('.')
for p in parts:
# default to cwd in the first iteration
curr = b
b = b.joinpath(p)
try:
# if fmt is an absolute path, the first iteration will always
# resolve '/'
b = Path(str(b).format(**kwargs))
except KeyError:
# Missing key, check sub-folders and match if a single one exists
while True:
if not curr.exists():
return None
dirs = [f for f in curr.iterdir() if f.is_dir()]
if len(dirs) != 1:
return None
curr = dirs[0]
if is_zephyr_build(str(curr)):
return str(curr)
return str(b)
def find_build_dir(dir, guess=False, **kwargs):
'''Heuristic for finding a build directory.
The default build directory is computed by reading the build.dir-fmt
configuration option, defaulting to DEFAULT_BUILD_DIR if not set. It might
be None if the build.dir-fmt configuration option is set but cannot be
resolved.
If the given argument is truthy, it is returned. Otherwise, if
the default build folder is a build directory, it is returned.
Next, if the current working directory is a build directory, it is
returned. Finally, the default build directory is returned (may be None).
'''
if dir:
build_dir = dir
else:
cwd = os.getcwd()
default = config.get('build', 'dir-fmt', fallback=DEFAULT_BUILD_DIR)
default = _resolve_build_dir(default, guess, cwd, **kwargs)
log.dbg('config dir-fmt: {}'.format(default), level=log.VERBOSE_EXTREME)
if default and is_zephyr_build(default):
build_dir = default
elif is_zephyr_build(cwd):
build_dir = cwd
else:
build_dir = default
log.dbg('build dir: {}'.format(build_dir), level=log.VERBOSE_EXTREME)
if build_dir:
return os.path.abspath(build_dir)
else:
return None
def is_zephyr_build(path):
'''Return true if and only if `path` appears to be a valid Zephyr
build directory.
"Valid" means the given path is a directory which contains a CMake
cache with a 'ZEPHYR_BASE' or 'ZEPHYR_TOOLCHAIN_VARIANT' variable.
(The check for ZEPHYR_BASE introduced sometime after Zephyr 2.4 to
fix path_to_url we
keep support for the second variable around for compatibility with
versions 2.2 and earlier, which didn't have ZEPHYR_BASE in cache.
The cached ZEPHYR_BASE was added in
path_to_url
'''
try:
cache = zcmake.CMakeCache.from_build_dir(path)
except FileNotFoundError:
cache = {}
if 'ZEPHYR_BASE' in cache or 'ZEPHYR_TOOLCHAIN_VARIANT' in cache:
log.dbg(f'{path} is a zephyr build directory',
level=log.VERBOSE_EXTREME)
return True
log.dbg(f'{path} is NOT a valid zephyr build directory',
level=log.VERBOSE_EXTREME)
return False
def load_domains(path):
'''Load domains from a domains.yaml.
If domains.yaml is not found, then a single 'app' domain referring to the
top-level build folder is created and returned.
'''
domains_file = Path(path) / 'domains.yaml'
if not domains_file.is_file():
return Domains.from_yaml(f'''\
default: app
build_dir: {path}
domains:
- name: app
build_dir: {path}
flash_order:
- app
''')
return Domains.from_file(domains_file)
``` | /content/code_sandbox/scripts/west_commands/build_helpers.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,292 |
```python
#
import argparse
import os
from pathlib import Path
import re
import sys
import textwrap
from west import log
from west.commands import WestCommand
from zephyr_ext_common import ZEPHYR_BASE
sys.path.append(os.fspath(Path(__file__).parent.parent))
import list_boards
import zephyr_module
class Boards(WestCommand):
def __init__(self):
super().__init__(
'boards',
# Keep this in sync with the string in west-commands.yml.
'display information about supported boards',
'Display information about boards',
accepts_unknown_args=False)
def do_add_parser(self, parser_adder):
default_fmt = '{name}'
parser = parser_adder.add_parser(
self.name,
help=self.help,
formatter_class=argparse.RawDescriptionHelpFormatter,
description=self.description,
epilog=textwrap.dedent(f'''\
FORMAT STRINGS
--------------
Boards are listed using a Python 3 format string. Arguments
to the format string are accessed by name.
The default format string is:
"{default_fmt}"
The following arguments are available:
- name: board name
- qualifiers: board qualifiers (will be empty for legacy boards)
- arch: board architecture (deprecated)
(arch is ambiguous for boards described in new hw model)
- dir: directory that contains the board definition
- vendor: board vendor
'''))
# Remember to update west-completion.bash if you add or remove
# flags
parser.add_argument('-f', '--format', default=default_fmt,
help='''Format string to use to list each board;
see FORMAT STRINGS below.''')
parser.add_argument('-n', '--name', dest='name_re',
help='''a regular expression; only boards whose
names match NAME_RE will be listed''')
list_boards.add_args(parser)
return parser
def do_run(self, args, _):
if args.name_re is not None:
name_re = re.compile(args.name_re)
else:
name_re = None
module_settings = {
'arch_root': [ZEPHYR_BASE],
'board_root': [ZEPHYR_BASE],
'soc_root': [ZEPHYR_BASE],
}
for module in zephyr_module.parse_modules(ZEPHYR_BASE, self.manifest):
for key in module_settings:
root = module.meta.get('build', {}).get('settings', {}).get(key)
if root is not None:
module_settings[key].append(Path(module.project) / root)
args.arch_roots += module_settings['arch_root']
args.board_roots += module_settings['board_root']
args.soc_roots += module_settings['soc_root']
for board in list_boards.find_boards(args):
if name_re is not None and not name_re.search(board.name):
continue
log.inf(args.format.format(name=board.name, arch=board.arch,
dir=board.dir, hwm=board.hwm, qualifiers=''))
for board in list_boards.find_v2_boards(args):
if name_re is not None and not name_re.search(board.name):
continue
log.inf(
args.format.format(
name=board.name,
dir=board.dir,
hwm=board.hwm,
vendor=board.vendor,
qualifiers=list_boards.board_v2_qualifiers_csv(board),
)
)
``` | /content/code_sandbox/scripts/west_commands/boards.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 741 |
```python
#
import argparse
from pathlib import Path
from shutil import rmtree
from west.commands import WestCommand
from west import log
from zcmake import run_cmake
EXPORT_DESCRIPTION = '''\
This command registers the current Zephyr installation as a CMake
config package in the CMake user package registry.
In Windows, the CMake user package registry is found in:
HKEY_CURRENT_USER\\Software\\Kitware\\CMake\\Packages\\
In Linux and MacOS, the CMake user package registry is found in:
~/.cmake/packages/'''
class ZephyrExport(WestCommand):
def __init__(self):
super().__init__(
'zephyr-export',
# Keep this in sync with the string in west-commands.yml.
'export Zephyr installation as a CMake config package',
EXPORT_DESCRIPTION,
accepts_unknown_args=False)
def do_add_parser(self, parser_adder):
parser = parser_adder.add_parser(
self.name,
help=self.help,
formatter_class=argparse.RawDescriptionHelpFormatter,
description=self.description)
return parser
def do_run(self, args, unknown_args):
# The 'share' subdirectory of the top level zephyr repository.
share = Path(__file__).parents[2] / 'share'
run_cmake_export(share / 'zephyr-package' / 'cmake')
run_cmake_export(share / 'zephyrunittest-package' / 'cmake')
def run_cmake_export(path):
# Run a package installation script.
#
# Filtering out lines that start with -- ignores the normal
# CMake status messages and instead only prints the important
# information.
lines = run_cmake(['-P', str(path / 'zephyr_export.cmake')],
capture_output=True)
msg = [line for line in lines if not line.startswith('-- ')]
log.inf('\n'.join(msg))
def remove_if_exists(pathobj):
if pathobj.is_file():
log.inf(f'- removing: {pathobj}')
pathobj.unlink()
elif pathobj.is_dir():
log.inf(f'- removing: {pathobj}')
rmtree(pathobj)
``` | /content/code_sandbox/scripts/west_commands/export.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 468 |
```python
#
import abc
import argparse
import os
import pathlib
import pickle
import platform
import shutil
import shlex
import subprocess
import sys
from west import log
from west import manifest
from west.util import quote_sh_list
from build_helpers import find_build_dir, is_zephyr_build, \
FIND_BUILD_DIR_DESCRIPTION
from runners.core import BuildConfiguration
from zcmake import CMakeCache
from zephyr_ext_common import Forceable, ZEPHYR_SCRIPTS
# This is needed to load edt.pickle files.
sys.path.insert(0, str(ZEPHYR_SCRIPTS / 'dts' / 'python-devicetree' / 'src'))
SIGN_DESCRIPTION = '''\
This command automates some of the drudgery of creating signed Zephyr
binaries for chain-loading by a bootloader.
In the simplest usage, run this from your build directory:
west sign -t your_tool -- ARGS_FOR_YOUR_TOOL
The "ARGS_FOR_YOUR_TOOL" value can be any additional arguments you want to
pass to the tool, such as the location of a signing key etc. Depending on
which sort of ARGS_FOR_YOUR_TOOLS you use, the `--` separator/sentinel may
not always be required. To avoid ambiguity and having to find and
understand POSIX 12.2 Guideline 10, always use `--`.
See tool-specific help below for details.'''
SIGN_EPILOG = '''\
imgtool
-------
To build a signed binary you can load with MCUboot using imgtool,
run this from your build directory:
west sign -t imgtool -- --key YOUR_SIGNING_KEY.pem
For this to work, either imgtool must be installed (e.g. using pip3),
or you must pass the path to imgtool.py using the -p option.
Assuming your binary was properly built for processing and handling by
imgtool, this creates zephyr.signed.bin and zephyr.signed.hex
files which are ready for use by your bootloader.
The version number, image header size, alignment, and slot sizes are
determined from the build directory using .config and the device tree.
As shown above, extra arguments after a '--' are passed to imgtool
directly.
rimage
------
To create a signed binary with the rimage tool, run this from your build
directory:
west sign -t rimage -- -k YOUR_SIGNING_KEY.pem
For this to work, either rimage must be installed or you must pass
the path to rimage using the -p option.
You can also pass additional arguments to rimage thanks to [sign] and
[rimage] sections in your west config file(s); this is especially useful
when invoking west sign _indirectly_ through CMake/ninja. See how at
path_to_url
'''
def config_get_words(west_config, section_key, fallback=None):
unparsed = west_config.get(section_key)
log.dbg(f'west config {section_key}={unparsed}')
return fallback if unparsed is None else shlex.split(unparsed)
def config_get(west_config, section_key, fallback=None):
words = config_get_words(west_config, section_key)
if words is None:
return fallback
if len(words) != 1:
log.die(f'Single word expected for: {section_key}={words}. Use quotes?')
return words[0]
class ToggleAction(argparse.Action):
def __call__(self, parser, args, ignored, option):
setattr(args, self.dest, not option.startswith('--no-'))
class Sign(Forceable):
def __init__(self):
super(Sign, self).__init__(
'sign',
# Keep this in sync with the string in west-commands.yml.
'sign a Zephyr binary for bootloader chain-loading',
SIGN_DESCRIPTION,
accepts_unknown_args=False)
def do_add_parser(self, parser_adder):
parser = parser_adder.add_parser(
self.name,
epilog=SIGN_EPILOG,
help=self.help,
formatter_class=argparse.RawDescriptionHelpFormatter,
description=self.description)
parser.add_argument('-d', '--build-dir',
help=FIND_BUILD_DIR_DESCRIPTION)
parser.add_argument('-q', '--quiet', action='store_true',
help='suppress non-error output')
self.add_force_arg(parser)
# general options
group = parser.add_argument_group('tool control options')
group.add_argument('-t', '--tool', choices=['imgtool', 'rimage'],
help='''image signing tool name; imgtool and rimage
are currently supported''')
group.add_argument('-p', '--tool-path', default=None,
help='''path to the tool itself, if needed''')
group.add_argument('-D', '--tool-data', default=None,
help='''path to a tool-specific data/configuration directory, if needed''')
group.add_argument('--if-tool-available', action='store_true',
help='''Do not fail if the rimage tool is not found or the rimage signing
schema (rimage "target") is not defined in board.cmake.''')
group.add_argument('tool_args', nargs='*', metavar='tool_opt',
help='extra option(s) to pass to the signing tool')
# bin file options
group = parser.add_argument_group('binary (.bin) file options')
group.add_argument('--bin', '--no-bin', dest='gen_bin', nargs=0,
action=ToggleAction,
help='''produce a signed .bin file?
(default: yes, if supported and unsigned bin
exists)''')
group.add_argument('-B', '--sbin', metavar='BIN',
help='''signed .bin file name
(default: zephyr.signed.bin in the build
directory, next to zephyr.bin)''')
# hex file options
group = parser.add_argument_group('Intel HEX (.hex) file options')
group.add_argument('--hex', '--no-hex', dest='gen_hex', nargs=0,
action=ToggleAction,
help='''produce a signed .hex file?
(default: yes, if supported and unsigned hex
exists)''')
group.add_argument('-H', '--shex', metavar='HEX',
help='''signed .hex file name
(default: zephyr.signed.hex in the build
directory, next to zephyr.hex)''')
return parser
def do_run(self, args, ignored):
self.args = args # for check_force
# Find the build directory and parse .config and DT.
build_dir = find_build_dir(args.build_dir)
self.check_force(os.path.isdir(build_dir),
'no such build directory {}'.format(build_dir))
self.check_force(is_zephyr_build(build_dir),
"build directory {} doesn't look like a Zephyr build "
'directory'.format(build_dir))
build_conf = BuildConfiguration(build_dir)
if not args.tool:
args.tool = config_get(self.config, 'sign.tool')
# Decide on output formats.
formats = []
bin_exists = build_conf.getboolean('CONFIG_BUILD_OUTPUT_BIN')
if args.gen_bin:
self.check_force(bin_exists,
'--bin given but CONFIG_BUILD_OUTPUT_BIN not set '
"in build directory's ({}) .config".
format(build_dir))
formats.append('bin')
elif args.gen_bin is None and bin_exists:
formats.append('bin')
hex_exists = build_conf.getboolean('CONFIG_BUILD_OUTPUT_HEX')
if args.gen_hex:
self.check_force(hex_exists,
'--hex given but CONFIG_BUILD_OUTPUT_HEX not set '
"in build directory's ({}) .config".
format(build_dir))
formats.append('hex')
elif args.gen_hex is None and hex_exists:
formats.append('hex')
# Delegate to the signer.
if args.tool == 'imgtool':
if args.if_tool_available:
log.die('imgtool does not support --if-tool-available')
signer = ImgtoolSigner()
elif args.tool == 'rimage':
signer = RimageSigner()
# (Add support for other signers here in elif blocks)
else:
if args.tool is None:
log.die('one --tool is required')
else:
log.die(f'invalid tool: {args.tool}')
signer.sign(self, build_dir, build_conf, formats)
class Signer(abc.ABC):
'''Common abstract superclass for signers.
To add support for a new tool, subclass this and add support for
it in the Sign.do_run() method.'''
@abc.abstractmethod
def sign(self, command, build_dir, build_conf, formats):
'''Abstract method to perform a signature; subclasses must implement.
:param command: the Sign instance
:param build_dir: the build directory
:param build_conf: BuildConfiguration for build directory
:param formats: list of formats to generate ('bin', 'hex')
'''
class ImgtoolSigner(Signer):
def sign(self, command, build_dir, build_conf, formats):
if not formats:
return
args = command.args
b = pathlib.Path(build_dir)
imgtool = self.find_imgtool(command, args)
# The vector table offset and application version are set in Kconfig:
appver = self.get_cfg(command, build_conf, 'CONFIG_MCUBOOT_IMGTOOL_SIGN_VERSION')
vtoff = self.get_cfg(command, build_conf, 'CONFIG_ROM_START_OFFSET')
# Flash device write alignment and the partition's slot size
# come from devicetree:
flash = self.edt_flash_node(b, args.quiet)
align, addr, size = self.edt_flash_params(flash)
if not build_conf.getboolean('CONFIG_BOOTLOADER_MCUBOOT'):
log.wrn("CONFIG_BOOTLOADER_MCUBOOT is not set to y in "
f"{build_conf.path}; this probably won't work")
kernel = build_conf.get('CONFIG_KERNEL_BIN_NAME', 'zephyr')
if 'bin' in formats:
in_bin = b / 'zephyr' / f'{kernel}.bin'
if not in_bin.is_file():
log.die(f"no unsigned .bin found at {in_bin}")
in_bin = os.fspath(in_bin)
else:
in_bin = None
if 'hex' in formats:
in_hex = b / 'zephyr' / f'{kernel}.hex'
if not in_hex.is_file():
log.die(f"no unsigned .hex found at {in_hex}")
in_hex = os.fspath(in_hex)
else:
in_hex = None
if not args.quiet:
log.banner('image configuration:')
log.inf('partition offset: {0} (0x{0:x})'.format(addr))
log.inf('partition size: {0} (0x{0:x})'.format(size))
log.inf('rom start offset: {0} (0x{0:x})'.format(vtoff))
# Base sign command.
sign_base = imgtool + ['sign',
'--version', str(appver),
'--align', str(align),
'--header-size', str(vtoff),
'--slot-size', str(size)]
sign_base.extend(args.tool_args)
if not args.quiet:
log.banner('signing binaries')
if in_bin:
out_bin = args.sbin or str(b / 'zephyr' / 'zephyr.signed.bin')
sign_bin = sign_base + [in_bin, out_bin]
if not args.quiet:
log.inf(f'unsigned bin: {in_bin}')
log.inf(f'signed bin: {out_bin}')
log.dbg(quote_sh_list(sign_bin))
subprocess.check_call(sign_bin, stdout=subprocess.PIPE if args.quiet else None)
if in_hex:
out_hex = args.shex or str(b / 'zephyr' / 'zephyr.signed.hex')
sign_hex = sign_base + [in_hex, out_hex]
if not args.quiet:
log.inf(f'unsigned hex: {in_hex}')
log.inf(f'signed hex: {out_hex}')
log.dbg(quote_sh_list(sign_hex))
subprocess.check_call(sign_hex, stdout=subprocess.PIPE if args.quiet else None)
@staticmethod
def find_imgtool(command, args):
if args.tool_path:
imgtool = args.tool_path
if not os.path.isfile(imgtool):
log.die(f'--tool-path {imgtool}: no such file')
else:
imgtool = shutil.which('imgtool') or shutil.which('imgtool.py')
if not imgtool:
log.die('imgtool not found; either install it',
'(e.g. "pip3 install imgtool") or provide --tool-path')
if platform.system() == 'Windows' and imgtool.endswith('.py'):
# Windows users may not be able to run .py files
# as executables in subprocesses, regardless of
# what the mode says. Always run imgtool as
# 'python path/to/imgtool.py' instead of
# 'path/to/imgtool.py' in these cases.
# path_to_url
return [sys.executable, imgtool]
return [imgtool]
@staticmethod
def get_cfg(command, build_conf, item):
try:
return build_conf[item]
except KeyError:
command.check_force(
False, "build .config is missing a {} value".format(item))
return None
@staticmethod
def edt_flash_node(b, quiet=False):
# Get the EDT Node corresponding to the zephyr,flash chosen DT
# node; 'b' is the build directory as a pathlib object.
# Ensure the build directory has a compiled DTS file
# where we expect it to be.
dts = b / 'zephyr' / 'zephyr.dts'
if not quiet:
log.dbg('DTS file:', dts, level=log.VERBOSE_VERY)
edt_pickle = b / 'zephyr' / 'edt.pickle'
if not edt_pickle.is_file():
log.die("can't load devicetree; expected to find:", edt_pickle)
# Load the devicetree.
with open(edt_pickle, 'rb') as f:
edt = pickle.load(f)
# By convention, the zephyr,flash chosen node contains the
# partition information about the zephyr image to sign.
flash = edt.chosen_node('zephyr,flash')
if not flash:
log.die('devicetree has no chosen zephyr,flash node;',
"can't infer flash write block or slot0_partition slot sizes")
return flash
@staticmethod
def edt_flash_params(flash):
# Get the flash device's write alignment and offset from the
# slot0_partition and the size from slot1_partition , out of the
# build directory's devicetree. slot1_partition size is used,
# when available, because in swap-move mode it can be one sector
# smaller. When not available, fallback to slot0_partition (single slot dfu).
# The node must have a "partitions" child node, which in turn
# must have child nodes with label slot0_partition and may have a child node
# with label slot1_partition. By convention, the slots for consumption by
# imgtool are linked into these partitions.
if 'partitions' not in flash.children:
log.die("DT zephyr,flash chosen node has no partitions,",
"can't find partitions for MCUboot slots")
partitions = flash.children['partitions']
slots = {
label: node for node in partitions.children.values()
for label in node.labels
if label in set(['slot0_partition', 'slot1_partition'])
}
if 'slot0_partition' not in slots:
log.die("DT zephyr,flash chosen node has no slot0_partition partition,",
"can't determine its address")
# Die on missing or zero alignment or slot_size.
if "write-block-size" not in flash.props:
log.die('DT zephyr,flash node has no write-block-size;',
"can't determine imgtool write alignment")
align = flash.props['write-block-size'].val
if align == 0:
log.die('expected nonzero flash alignment, but got '
'DT flash device write-block-size {}'.format(align))
# The partitions node, and its subnode, must provide
# the size of slot1_partition or slot0_partition partition via the regs property.
slot_key = 'slot1_partition' if 'slot1_partition' in slots else 'slot0_partition'
if not slots[slot_key].regs:
log.die(f'{slot_key} flash partition has no regs property;',
"can't determine size of slot")
# always use addr of slot0_partition, which is where slots are run
addr = slots['slot0_partition'].regs[0].addr
size = slots[slot_key].regs[0].size
if size == 0:
log.die('expected nonzero slot size for {}'.format(slot_key))
return (align, addr, size)
class RimageSigner(Signer):
def rimage_config_dir(self):
'Returns the rimage/config/ directory with the highest precedence'
args = self.command.args
if args.tool_data:
conf_dir = pathlib.Path(args.tool_data)
elif self.cmake_cache.get('RIMAGE_CONFIG_PATH'):
conf_dir = pathlib.Path(self.cmake_cache['RIMAGE_CONFIG_PATH'])
else:
conf_dir = self.sof_src_dir / 'tools' / 'rimage' / 'config'
self.command.dbg(f'rimage config directory={conf_dir}')
return conf_dir
def preprocess_toml(self, config_dir, toml_basename, subdir):
'Runs the C pre-processor on config_dir/toml_basename.h'
compiler_path = self.cmake_cache.get("CMAKE_C_COMPILER")
preproc_cmd = [compiler_path, '-E', str(config_dir / (toml_basename + '.h'))]
# -P removes line markers to keep the .toml output reproducible. To
# trace #includes, temporarily comment out '-P' (-f*-prefix-map
# unfortunately don't seem to make any difference here and they're
# gcc-specific)
preproc_cmd += ['-P']
# "REM" escapes _leading_ '#' characters from cpp and allows
# such comments to be preserved in generated/*.toml files:
#
# REM # my comment...
#
# Note _trailing_ '#' characters and comments are ignored by cpp
# and don't need any REM trick.
preproc_cmd += ['-DREM=']
preproc_cmd += ['-I', str(self.sof_src_dir / 'src')]
preproc_cmd += ['-imacros',
str(pathlib.Path('zephyr') / 'include' / 'generated' / 'zephyr' / 'autoconf.h')]
preproc_cmd += ['-o', str(subdir / 'rimage_config.toml')]
self.command.inf(quote_sh_list(preproc_cmd))
subprocess.run(preproc_cmd, check=True, cwd=self.build_dir)
def sign(self, command, build_dir, build_conf, formats):
self.command = command
args = command.args
b = pathlib.Path(build_dir)
self.build_dir = b
cache = CMakeCache.from_build_dir(build_dir)
self.cmake_cache = cache
# Warning: RIMAGE_TARGET in Zephyr is a duplicate of
# CONFIG_RIMAGE_SIGNING_SCHEMA in SOF.
target = cache.get('RIMAGE_TARGET')
if not target:
msg = 'rimage target not defined in board.cmake'
if args.if_tool_available:
log.inf(msg)
sys.exit(0)
else:
log.die(msg)
kernel_name = build_conf.get('CONFIG_KERNEL_BIN_NAME', 'zephyr')
# TODO: make this a new sign.py --bootloader option.
if target in ('imx8', 'imx8m', 'imx8ulp'):
bootloader = None
kernel = str(b / 'zephyr' / f'{kernel_name}.elf')
out_bin = str(b / 'zephyr' / f'{kernel_name}.ri')
out_xman = str(b / 'zephyr' / f'{kernel_name}.ri.xman')
out_tmp = str(b / 'zephyr' / f'{kernel_name}.rix')
else:
bootloader = str(b / 'zephyr' / 'boot.mod')
kernel = str(b / 'zephyr' / 'main.mod')
out_bin = str(b / 'zephyr' / f'{kernel_name}.ri')
out_xman = str(b / 'zephyr' / f'{kernel_name}.ri.xman')
out_tmp = str(b / 'zephyr' / f'{kernel_name}.rix')
# Clean any stale output. This is especially important when using --if-tool-available
# (but not just)
for o in [ out_bin, out_xman, out_tmp ]:
pathlib.Path(o).unlink(missing_ok=True)
tool_path = (
args.tool_path if args.tool_path else
config_get(command.config, 'rimage.path', None)
)
err_prefix = '--tool-path' if args.tool_path else 'west config'
if tool_path:
command.check_force(shutil.which(tool_path),
f'{err_prefix} {tool_path}: not an executable')
else:
tool_path = shutil.which('rimage')
if not tool_path:
err_msg = 'rimage not found; either install it or provide --tool-path'
if args.if_tool_available:
log.wrn(err_msg)
log.wrn('zephyr binary _not_ signed!')
return
else:
log.die(err_msg)
#### -c sof/rimage/config/signing_schema.toml ####
if not args.quiet:
log.inf('Signing with tool {}'.format(tool_path))
try:
sof_proj = command.manifest.get_projects(['sof'], allow_paths=False)
sof_src_dir = pathlib.Path(sof_proj[0].abspath)
except ValueError: # sof is the manifest
sof_src_dir = pathlib.Path(manifest.manifest_path()).parent
self.sof_src_dir = sof_src_dir
log.inf('Signing for SOC target ' + target)
# FIXME: deprecate --no-manifest and replace it with a much
# simpler and more direct `-- -e` which the user can _already_
# pass today! With unclear consequences right now...
if '--no-manifest' in args.tool_args:
no_manifest = True
args.tool_args.remove('--no-manifest')
else:
no_manifest = False
# Non-SOF build does not have extended manifest data for
# rimage to process, which might result in rimage error.
# So skip it when not doing SOF builds.
is_sof_build = build_conf.getboolean('CONFIG_SOF')
if not is_sof_build:
no_manifest = True
if no_manifest:
extra_ri_args = [ ]
else:
extra_ri_args = ['-e']
sign_base = [tool_path]
# Align rimage verbosity.
# Sub-command arg 'west sign -q' takes precedence over west '-v'
if not args.quiet and args.verbose:
sign_base += ['-v'] * args.verbose
components = [ ] if bootloader is None else [ bootloader ]
components += [ kernel ]
sign_config_extra_args = config_get_words(command.config, 'rimage.extra-args', [])
if '-k' not in sign_config_extra_args + args.tool_args:
# rimage requires a key argument even when it does not sign
cmake_default_key = cache.get('RIMAGE_SIGN_KEY', 'key placeholder from sign.py')
extra_ri_args += [ '-k', str(sof_src_dir / 'keys' / cmake_default_key) ]
if args.tool_data and '-c' in args.tool_args:
log.wrn('--tool-data ' + args.tool_data + ' ignored! Overridden by: -- -c ... ')
if '-c' not in sign_config_extra_args + args.tool_args:
conf_dir = self.rimage_config_dir()
toml_basename = target + '.toml'
if ((conf_dir / toml_basename).exists() and
(conf_dir / (toml_basename + '.h')).exists()):
command.die(f"Cannot have both {toml_basename + '.h'} and {toml_basename} in {conf_dir}")
if (conf_dir / (toml_basename + '.h')).exists():
generated_subdir = pathlib.Path('zephyr') / 'misc' / 'generated'
self.preprocess_toml(conf_dir, toml_basename, generated_subdir)
extra_ri_args += ['-c', str(b / generated_subdir / 'rimage_config.toml')]
else:
toml_dir = conf_dir
extra_ri_args += ['-c', str(toml_dir / toml_basename)]
# Warning: while not officially supported (yet?), the rimage --option that is last
# on the command line currently wins in case of duplicate options. So pay
# attention to the _args order below.
sign_base += (['-o', out_bin] + sign_config_extra_args +
extra_ri_args + args.tool_args + components)
command.inf(quote_sh_list(sign_base))
subprocess.check_call(sign_base)
if no_manifest:
filenames = [out_bin]
else:
filenames = [out_xman, out_bin]
if not args.quiet:
log.inf('Prefixing ' + out_bin + ' with manifest ' + out_xman)
with open(out_tmp, 'wb') as outfile:
for fname in filenames:
with open(fname, 'rb') as infile:
outfile.write(infile.read())
os.remove(out_bin)
os.rename(out_tmp, out_bin)
``` | /content/code_sandbox/scripts/west_commands/sign.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 5,696 |
```python
#
'''Common definitions for building Zephyr applications with CMake.
This provides some default settings and convenience wrappers for
building Zephyr applications needed by multiple commands.
See build.py for the build command itself.
'''
from collections import OrderedDict
import os.path
import re
import subprocess
import shutil
import sys
import packaging.version
from west import log
from west.util import quote_sh_list
DEFAULT_CACHE = 'CMakeCache.txt'
DEFAULT_CMAKE_GENERATOR = 'Ninja'
'''Name of the default CMake generator.'''
def run_cmake(args, cwd=None, capture_output=False, dry_run=False):
'''Run cmake to (re)generate a build system, a script, etc.
:param args: arguments to pass to CMake
:param cwd: directory to run CMake in, cwd is default
:param capture_output: if True, the output is returned instead of being
displayed (None is returned by default, or if
dry_run is also True)
:param dry_run: don't actually execute the command, just print what
would have been run
If capture_output is set to True, returns the output of the command instead
of displaying it on stdout/stderr..'''
cmake = shutil.which('cmake')
if cmake is None and not dry_run:
log.die('CMake is not installed or cannot be found; cannot build.')
_ensure_min_version(cmake, dry_run)
cmd = [cmake] + args
kwargs = dict()
if capture_output:
kwargs['stdout'] = subprocess.PIPE
# CMake sends the output of message() to stderr unless it's STATUS
kwargs['stderr'] = subprocess.STDOUT
if cwd:
kwargs['cwd'] = cwd
if dry_run:
in_cwd = ' (in {})'.format(cwd) if cwd else ''
log.inf('Dry run{}:'.format(in_cwd), quote_sh_list(cmd))
return None
log.dbg('Running CMake:', quote_sh_list(cmd), level=log.VERBOSE_NORMAL)
p = subprocess.Popen(cmd, **kwargs)
out, _ = p.communicate()
if p.returncode == 0:
if out:
return out.decode(sys.getdefaultencoding()).splitlines()
else:
return None
else:
# A real error occurred, raise an exception
raise subprocess.CalledProcessError(p.returncode, p.args)
def run_build(build_directory, **kwargs):
'''Run cmake in build tool mode.
:param build_directory: runs "cmake --build build_directory"
:param extra_args: optional kwarg. List of additional CMake arguments;
these come after "--build <build_directory>"
on the command line.
Any additional keyword arguments are passed as-is to run_cmake().
'''
extra_args = kwargs.pop('extra_args', [])
return run_cmake(['--build', build_directory] + extra_args, **kwargs)
def make_c_identifier(string):
'''Make a C identifier from a string in the same way CMake does.
'''
# The behavior of CMake's string(MAKE_C_IDENTIFIER ...) is not
# precisely documented. This behavior matches the test case
# that introduced the function:
#
# path_to_url
ret = []
alpha_under = re.compile('[A-Za-z_]')
alpha_num_under = re.compile('[A-Za-z0-9_]')
if not alpha_under.match(string):
ret.append('_')
for c in string:
if alpha_num_under.match(c):
ret.append(c)
else:
ret.append('_')
return ''.join(ret)
class CMakeCacheEntry:
'''Represents a CMake cache entry.
This class understands the type system in a CMakeCache.txt, and
converts the following cache types to Python types:
Cache Type Python type
---------- -------------------------------------------
FILEPATH str
PATH str
STRING str OR list of str (if ';' is in the value)
BOOL bool
INTERNAL str OR list of str (if ';' is in the value)
STATIC str OR list of str (if ';' is in the value)
UNINITIALIZED str OR list of str (if ';' is in the value)
---------- -------------------------------------------
'''
# Regular expression for a cache entry.
#
# CMake variable names can include escape characters, allowing a
# wider set of names than is easy to match with a regular
# expression. To be permissive here, use a non-greedy match up to
# the first colon (':'). This breaks if the variable name has a
# colon inside, but it's good enough.
CACHE_ENTRY = re.compile(
r'''(?P<name>.*?) # name
:(?P<type>FILEPATH|PATH|STRING|BOOL|INTERNAL|STATIC|UNINITIALIZED) # type
=(?P<value>.*) # value
''', re.X)
@classmethod
def _to_bool(cls, val):
# Convert a CMake BOOL string into a Python bool.
#
# "True if the constant is 1, ON, YES, TRUE, Y, or a
# non-zero number. False if the constant is 0, OFF, NO,
# FALSE, N, IGNORE, NOTFOUND, the empty string, or ends in
# the suffix -NOTFOUND. Named boolean constants are
# case-insensitive. If the argument is not one of these
# constants, it is treated as a variable."
#
# path_to_url
val = val.upper()
if val in ('ON', 'YES', 'TRUE', 'Y'):
return True
elif val in ('OFF', 'NO', 'FALSE', 'N', 'IGNORE', 'NOTFOUND', ''):
return False
elif val.endswith('-NOTFOUND'):
return False
else:
try:
v = int(val)
return v != 0
except ValueError as exc:
raise ValueError('invalid bool {}'.format(val)) from exc
@classmethod
def from_line(cls, line, line_no):
# Comments can only occur at the beginning of a line.
# (The value of an entry could contain a comment character).
if line.startswith('//') or line.startswith('#'):
return None
# Whitespace-only lines do not contain cache entries.
if not line.strip():
return None
m = cls.CACHE_ENTRY.match(line)
if not m:
return None
name, type_, value = (m.group(g) for g in ('name', 'type', 'value'))
if type_ == 'BOOL':
try:
value = cls._to_bool(value)
except ValueError as exc:
args = exc.args + ('on line {}: {}'.format(line_no, line),)
raise ValueError(args) from exc
elif type_ in {'STRING', 'INTERNAL', 'STATIC', 'UNINITIALIZED'}:
# If the value is a CMake list (i.e. is a string which
# contains a ';'), convert to a Python list.
if ';' in value:
value = value.split(';')
return CMakeCacheEntry(name, value)
def __init__(self, name, value):
self.name = name
self.value = value
def __str__(self):
fmt = 'CMakeCacheEntry(name={}, value={})'
return fmt.format(self.name, self.value)
class CMakeCache:
'''Parses and represents a CMake cache file.'''
@staticmethod
def from_build_dir(build_dir):
return CMakeCache(os.path.join(build_dir, DEFAULT_CACHE))
def __init__(self, cache_file):
self.cache_file = cache_file
self.load(cache_file)
def load(self, cache_file):
entries = []
with open(cache_file, 'r', encoding="utf-8") as cache:
for line_no, line in enumerate(cache):
entry = CMakeCacheEntry.from_line(line, line_no)
if entry:
entries.append(entry)
self._entries = OrderedDict((e.name, e) for e in entries)
def get(self, name, default=None):
entry = self._entries.get(name)
if entry is not None:
return entry.value
else:
return default
def get_list(self, name, default=None):
if default is None:
default = []
entry = self._entries.get(name)
if entry is not None:
value = entry.value
if isinstance(value, list):
return value
elif isinstance(value, str):
return [value] if value else []
else:
msg = 'invalid value {} type {}'
raise RuntimeError(msg.format(value, type(value)))
else:
return default
def __contains__(self, name):
return name in self._entries
def __getitem__(self, name):
return self._entries[name].value
def __setitem__(self, name, entry):
if not isinstance(entry, CMakeCacheEntry):
msg = 'improper type {} for value {}, expecting CMakeCacheEntry'
raise TypeError(msg.format(type(entry), entry))
self._entries[name] = entry
def __delitem__(self, name):
del self._entries[name]
def __iter__(self):
return iter(self._entries.values())
def _ensure_min_version(cmake, dry_run):
cmd = [cmake, '--version']
if dry_run:
log.inf('Dry run:', quote_sh_list(cmd))
return
try:
version_out = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
except subprocess.CalledProcessError as cpe:
log.die('cannot get cmake version:', str(cpe))
decoded = version_out.decode('utf-8')
lines = decoded.splitlines()
if not lines:
log.die('can\'t get cmake version: ' +
'unexpected "cmake --version" output:\n{}\n'.
format(decoded) +
'Please install CMake ' + _MIN_CMAKE_VERSION_STR +
' or higher (path_to_url
version = lines[0].split()[2]
if '-' in version:
# Handle semver cases like "3.19.20210206-g1e50ab6"
# which Kitware uses for prerelease versions.
version = version.split('-', 1)[0]
if packaging.version.parse(version) < _MIN_CMAKE_VERSION:
log.die('cmake version', version,
'is less than minimum version {};'.
format(_MIN_CMAKE_VERSION_STR),
'please update your CMake (path_to_url
else:
log.dbg('cmake version', version, 'is OK; minimum version is',
_MIN_CMAKE_VERSION_STR)
_MIN_CMAKE_VERSION_STR = '3.13.1'
_MIN_CMAKE_VERSION = packaging.version.parse(_MIN_CMAKE_VERSION_STR)
``` | /content/code_sandbox/scripts/west_commands/zcmake.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,378 |
```python
#
from west.commands import WestCommand
from run_common import add_parser_common, do_run_common
EXPORT_DESCRIPTION = '''\
Simulate the board on a runner of choice using generated artifacts.
'''
class Simulate(WestCommand):
def __init__(self):
super(Simulate, self).__init__(
'simulate',
# Keep this in sync with the string in west-commands.yml.
'simulate board',
EXPORT_DESCRIPTION,
accepts_unknown_args=True)
self.runner_key = 'sim-runner' # in runners.yaml
def do_add_parser(self, parser_adder):
return add_parser_common(self, parser_adder)
def do_run(self, my_args, runner_args):
do_run_common(self, my_args, runner_args)
``` | /content/code_sandbox/scripts/west_commands/simulate.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 160 |
```python
#
from textwrap import dedent
import struct
from west.commands import WestCommand
from west import log
try:
from elftools.elf.elffile import ELFFile
from intelhex import IntelHex
MISSING_REQUIREMENTS = False
except ImportError:
MISSING_REQUIREMENTS = True
# Based on scripts/build/uf2conv.py
def convert_from_uf2(buf):
UF2_MAGIC_START0 = 0x0A324655 # First magic number ('UF2\n')
UF2_MAGIC_START1 = 0x9E5D5157 # Second magic number
numblocks = len(buf) // 512
curraddr = None
outp = []
for blockno in range(numblocks):
ptr = blockno * 512
block = buf[ptr:ptr + 512]
hd = struct.unpack(b'<IIIIIIII', block[0:32])
if hd[0] != UF2_MAGIC_START0 or hd[1] != UF2_MAGIC_START1:
log.inf('Skipping block at ' + ptr + '; bad magic')
continue
if hd[2] & 1:
# NO-flash flag set; skip block
continue
datalen = hd[4]
if datalen > 476:
log.die(f'Invalid UF2 data size at {ptr}')
newaddr = hd[3]
if curraddr is None:
curraddr = newaddr
padding = newaddr - curraddr
if padding < 0:
log.die(f'Block out of order at {ptr}')
if padding > 10*1024*1024:
log.die(f'More than 10M of padding needed at {ptr}')
if padding % 4 != 0:
log.die(f'Non-word padding size at {ptr}')
while padding > 0:
padding -= 4
outp += b'\x00\x00\x00\x00'
outp.append(block[32 : 32 + datalen])
curraddr = newaddr + datalen
return b''.join(outp)
class Bindesc(WestCommand):
EXTENSIONS = ['bin', 'hex', 'elf', 'uf2']
# Corresponds to the definitions in include/zephyr/bindesc.h.
# Do not change without syncing the definitions in both files!
TYPE_UINT = 0
TYPE_STR = 1
TYPE_BYTES = 2
MAGIC = 0xb9863e5a7ea46046
DESCRIPTORS_END = 0xffff
def __init__(self):
self.TAG_TO_NAME = {
# Corresponds to the definitions in include/zephyr/bindesc.h.
# Do not change without syncing the definitions in both files!
self.bindesc_gen_tag(self.TYPE_STR, 0x800): 'APP_VERSION_STRING',
self.bindesc_gen_tag(self.TYPE_UINT, 0x801): 'APP_VERSION_MAJOR',
self.bindesc_gen_tag(self.TYPE_UINT, 0x802): 'APP_VERSION_MINOR',
self.bindesc_gen_tag(self.TYPE_UINT, 0x803): 'APP_VERSION_PATCHLEVEL',
self.bindesc_gen_tag(self.TYPE_UINT, 0x804): 'APP_VERSION_NUMBER',
self.bindesc_gen_tag(self.TYPE_STR, 0x900): 'KERNEL_VERSION_STRING',
self.bindesc_gen_tag(self.TYPE_UINT, 0x901): 'KERNEL_VERSION_MAJOR',
self.bindesc_gen_tag(self.TYPE_UINT, 0x902): 'KERNEL_VERSION_MINOR',
self.bindesc_gen_tag(self.TYPE_UINT, 0x903): 'KERNEL_VERSION_PATCHLEVEL',
self.bindesc_gen_tag(self.TYPE_UINT, 0x904): 'KERNEL_VERSION_NUMBER',
self.bindesc_gen_tag(self.TYPE_UINT, 0xa00): 'BUILD_TIME_YEAR',
self.bindesc_gen_tag(self.TYPE_UINT, 0xa01): 'BUILD_TIME_MONTH',
self.bindesc_gen_tag(self.TYPE_UINT, 0xa02): 'BUILD_TIME_DAY',
self.bindesc_gen_tag(self.TYPE_UINT, 0xa03): 'BUILD_TIME_HOUR',
self.bindesc_gen_tag(self.TYPE_UINT, 0xa04): 'BUILD_TIME_MINUTE',
self.bindesc_gen_tag(self.TYPE_UINT, 0xa05): 'BUILD_TIME_SECOND',
self.bindesc_gen_tag(self.TYPE_UINT, 0xa06): 'BUILD_TIME_UNIX',
self.bindesc_gen_tag(self.TYPE_STR, 0xa07): 'BUILD_DATE_TIME_STRING',
self.bindesc_gen_tag(self.TYPE_STR, 0xa08): 'BUILD_DATE_STRING',
self.bindesc_gen_tag(self.TYPE_STR, 0xa09): 'BUILD_TIME_STRING',
self.bindesc_gen_tag(self.TYPE_STR, 0xb00): 'HOST_NAME',
self.bindesc_gen_tag(self.TYPE_STR, 0xb01): 'C_COMPILER_NAME',
self.bindesc_gen_tag(self.TYPE_STR, 0xb02): 'C_COMPILER_VERSION',
self.bindesc_gen_tag(self.TYPE_STR, 0xb03): 'CXX_COMPILER_NAME',
self.bindesc_gen_tag(self.TYPE_STR, 0xb04): 'CXX_COMPILER_VERSION',
}
self.NAME_TO_TAG = {v: k for k, v in self.TAG_TO_NAME.items()}
super().__init__(
'bindesc',
'work with Binary Descriptors',
dedent('''
Work with Binary Descriptors - constant data objects
describing a binary image
'''))
def do_add_parser(self, parser_adder):
parser = parser_adder.add_parser(self.name,
help=self.help,
description=self.description)
subparsers = parser.add_subparsers(help='sub-command to run', required=True)
dump_parser = subparsers.add_parser('dump', help='Dump all binary descriptors in the image')
dump_parser.add_argument('file', type=str, help='Executable file')
dump_parser.add_argument('--file-type', type=str, choices=self.EXTENSIONS, help='File type')
dump_parser.add_argument('-b', '--big-endian', action='store_true',
help='Target CPU is big endian')
dump_parser.set_defaults(subcmd='dump', big_endian=False)
search_parser = subparsers.add_parser('search', help='Search for a specific descriptor')
search_parser.add_argument('descriptor', type=str, help='Descriptor name')
search_parser.add_argument('file', type=str, help='Executable file')
search_parser.add_argument('--file-type', type=str, choices=self.EXTENSIONS, help='File type')
search_parser.add_argument('-b', '--big-endian', action='store_true',
help='Target CPU is big endian')
search_parser.set_defaults(subcmd='search', big_endian=False)
custom_search_parser = subparsers.add_parser('custom_search',
help='Search for a custom descriptor')
custom_search_parser.add_argument('type', type=str, choices=['UINT', 'STR', 'BYTES'],
help='Descriptor type')
custom_search_parser.add_argument('id', type=str, help='Descriptor ID in hex')
custom_search_parser.add_argument('file', type=str, help='Executable file')
custom_search_parser.add_argument('--file-type', type=str, choices=self.EXTENSIONS,
help='File type')
custom_search_parser.add_argument('-b', '--big-endian', action='store_true',
help='Target CPU is big endian')
custom_search_parser.set_defaults(subcmd='custom_search', big_endian=False)
list_parser = subparsers.add_parser('list', help='List all known descriptors')
list_parser.set_defaults(subcmd='list', big_endian=False)
return parser
def dump(self, args):
image = self.get_image_data(args.file)
descriptors = self.parse_descriptors(image)
for tag, value in descriptors.items():
if tag in self.TAG_TO_NAME:
tag = self.TAG_TO_NAME[tag]
log.inf(f'{tag}', self.bindesc_repr(value))
def list(self, args):
for tag in self.TAG_TO_NAME.values():
log.inf(f'{tag}')
def common_search(self, args, search_term):
image = self.get_image_data(args.file)
descriptors = self.parse_descriptors(image)
if search_term in descriptors:
value = descriptors[search_term]
log.inf(self.bindesc_repr(value))
else:
log.die('Descriptor not found')
def search(self, args):
try:
search_term = self.NAME_TO_TAG[args.descriptor]
except KeyError:
log.die(f'Descriptor {args.descriptor} is invalid')
self.common_search(args, search_term)
def custom_search(self, args):
custom_type = {
'STR': self.TYPE_STR,
'UINT': self.TYPE_UINT,
'BYTES': self.TYPE_BYTES
}[args.type]
custom_tag = self.bindesc_gen_tag(custom_type, int(args.id, 16))
self.common_search(args, custom_tag)
def do_run(self, args, _):
if MISSING_REQUIREMENTS:
raise RuntimeError('one or more Python dependencies were missing; '
'see the getting started guide for details on '
'how to fix')
self.is_big_endian = args.big_endian
self.file_type = self.guess_file_type(args)
subcmd = getattr(self, args.subcmd)
subcmd(args)
def get_image_data(self, file_name):
if self.file_type == 'bin':
with open(file_name, 'rb') as bin_file:
return bin_file.read()
if self.file_type == 'hex':
return IntelHex(file_name).tobinstr()
if self.file_type == 'uf2':
with open(file_name, 'rb') as uf2_file:
return convert_from_uf2(uf2_file.read())
if self.file_type == 'elf':
with open(file_name, 'rb') as f:
elffile = ELFFile(f)
section = elffile.get_section_by_name('rom_start')
if section:
return section.data()
section = elffile.get_section_by_name('text')
if section:
return section.data()
log.die('No "rom_start" or "text" section found')
log.die('Unknown file type')
def parse_descriptors(self, image):
magic = struct.pack('>Q' if self.is_big_endian else 'Q', self.MAGIC)
index = image.find(magic)
if index == -1:
log.die('Could not find binary descriptor magic')
descriptors = {}
index += len(magic) # index points to first descriptor
current_tag = self.bytes_to_short(image[index:index+2])
while current_tag != self.DESCRIPTORS_END:
index += 2 # index points to length
length = self.bytes_to_short(image[index:index+2])
index += 2 # index points to data
data = image[index:index+length]
tag_type = self.bindesc_get_type(current_tag)
if tag_type == self.TYPE_STR:
decoded_data = data[:-1].decode('ascii')
elif tag_type == self.TYPE_UINT:
decoded_data = self.bytes_to_uint(data)
elif tag_type == self.TYPE_BYTES:
decoded_data = data
else:
log.die(f'Unknown type for tag 0x{current_tag:04x}')
key = f'0x{current_tag:04x}'
descriptors[key] = decoded_data
index += length
index = self.align(index, 4)
current_tag = self.bytes_to_short(image[index:index+2])
return descriptors
def guess_file_type(self, args):
if "file" not in args:
return None
# If file type is explicitly given, use it
if args.file_type is not None:
return args.file_type
# If the file has a known extension, use it
for extension in self.EXTENSIONS:
if args.file.endswith(f'.{extension}'):
return extension
with open(args.file, 'rb') as f:
header = f.read(1024)
# Try the elf magic
if header.startswith(b'\x7fELF'):
return 'elf'
# Try the uf2 magic
if header.startswith(b'UF2\n'):
return 'uf2'
try:
# if the file is textual it's probably hex
header.decode('ascii')
return 'hex'
except UnicodeDecodeError:
# Default to bin
return 'bin'
def bytes_to_uint(self, b):
return struct.unpack('>I' if self.is_big_endian else 'I', b)[0]
def bytes_to_short(self, b):
return struct.unpack('>H' if self.is_big_endian else 'H', b)[0]
@staticmethod
def bindesc_gen_tag(_type, _id):
return f'0x{(_type << 12 | _id):04x}'
@staticmethod
def bindesc_get_type(tag):
return tag >> 12
@staticmethod
def align(x, alignment):
return (x + alignment - 1) & (~(alignment - 1))
@staticmethod
def bindesc_repr(value):
if isinstance(value, str):
return f'"{value}"'
if isinstance(value, (int, bytes)):
return f'{value}'
``` | /content/code_sandbox/scripts/west_commands/bindesc.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,856 |
```python
#
import argparse
import os
from pathlib import Path
import re
import sys
import textwrap
from west import log
from west.commands import WestCommand
from zephyr_ext_common import ZEPHYR_BASE
sys.path.append(os.fspath(Path(__file__).parent.parent))
import list_shields
import zephyr_module
class Shields(WestCommand):
def __init__(self):
super().__init__(
'shields',
# Keep this in sync with the string in west-commands.yml.
'display list of supported shield',
'Display supported shields',
accepts_unknown_args=False)
def do_add_parser(self, parser_adder):
default_fmt = '{name}'
parser = parser_adder.add_parser(
self.name,
help=self.help,
formatter_class=argparse.RawDescriptionHelpFormatter,
description=self.description,
epilog=textwrap.dedent(f'''\
FORMAT STRINGS
--------------
Shields are listed using a Python 3 format string. Arguments
to the format string are accessed by name.
The default format string is:
"{default_fmt}"
The following arguments are available:
- name: shield name
- dir: directory that contains the shield definition
'''))
# Remember to update west-completion.bash if you add or remove
# flags
parser.add_argument('-f', '--format', default=default_fmt,
help='''Format string to use to list each shield;
see FORMAT STRINGS below.''')
parser.add_argument('-n', '--name', dest='name_re',
help='''a regular expression; only shields whose
names match NAME_RE will be listed''')
list_shields.add_args(parser)
return parser
def do_run(self, args, _):
if args.name_re is not None:
name_re = re.compile(args.name_re)
else:
name_re = None
modules_board_roots = [ZEPHYR_BASE]
for module in zephyr_module.parse_modules(ZEPHYR_BASE, self.manifest):
board_root = module.meta.get('build', {}).get('settings', {}).get('board_root')
if board_root is not None:
modules_board_roots.append(Path(module.project) / board_root)
args.board_roots += modules_board_roots
for shield in list_shields.find_shields(args):
if name_re is not None and not name_re.search(shield.name):
continue
log.inf(args.format.format(name=shield.name, dir=shield.dir))
``` | /content/code_sandbox/scripts/west_commands/shields.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 541 |
```python
#
'''Common code used by commands which execute runners.
'''
import re
import argparse
import logging
from collections import defaultdict
from os import close, getcwd, path, fspath
from pathlib import Path
from subprocess import CalledProcessError
import sys
import tempfile
import textwrap
import traceback
from dataclasses import dataclass
from west import log
from build_helpers import find_build_dir, is_zephyr_build, load_domains, \
FIND_BUILD_DIR_DESCRIPTION
from west.commands import CommandError
from west.configuration import config
from runners.core import FileType
from runners.core import BuildConfiguration
import yaml
from zephyr_ext_common import ZEPHYR_SCRIPTS
# Runners depend on edtlib. Make sure the copy in the tree is
# available to them before trying to import any.
sys.path.insert(0, str(ZEPHYR_SCRIPTS / 'dts' / 'python-devicetree' / 'src'))
from runners import get_runner_cls, ZephyrBinaryRunner, MissingProgram
from runners.core import RunnerConfig
import zcmake
# Context-sensitive help indentation.
# Don't change this, or output from argparse won't match up.
INDENT = ' ' * 2
if log.VERBOSE >= log.VERBOSE_NORMAL:
# Using level 1 allows sub-DEBUG levels of verbosity. The
# west.log module decides whether or not to actually print the
# message.
#
# path_to_url#logging-levels.
LOG_LEVEL = 1
else:
LOG_LEVEL = logging.INFO
def _banner(msg):
log.inf('-- ' + msg, colorize=True)
class WestLogFormatter(logging.Formatter):
def __init__(self):
super().__init__(fmt='%(name)s: %(message)s')
class WestLogHandler(logging.Handler):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setFormatter(WestLogFormatter())
self.setLevel(LOG_LEVEL)
def emit(self, record):
fmt = self.format(record)
lvl = record.levelno
if lvl > logging.CRITICAL:
log.die(fmt)
elif lvl >= logging.ERROR:
log.err(fmt)
elif lvl >= logging.WARNING:
log.wrn(fmt)
elif lvl >= logging.INFO:
_banner(fmt)
elif lvl >= logging.DEBUG:
log.dbg(fmt)
else:
log.dbg(fmt, level=log.VERBOSE_EXTREME)
@dataclass
class UsedFlashCommand:
command: str
boards: list
runners: list
first: bool
ran: bool = False
@dataclass
class ImagesFlashed:
flashed: int = 0
total: int = 0
def command_verb(command):
return "flash" if command.name == "flash" else "debug"
def add_parser_common(command, parser_adder=None, parser=None):
if parser_adder is not None:
parser = parser_adder.add_parser(
command.name,
formatter_class=argparse.RawDescriptionHelpFormatter,
help=command.help,
description=command.description)
# Remember to update west-completion.bash if you add or remove
# flags
group = parser.add_argument_group('general options',
FIND_BUILD_DIR_DESCRIPTION)
group.add_argument('-d', '--build-dir', metavar='DIR',
help='application build directory')
# still supported for backwards compatibility, but questionably
# useful now that we do everything with runners.yaml
group.add_argument('-c', '--cmake-cache', metavar='FILE',
help=argparse.SUPPRESS)
group.add_argument('-r', '--runner',
help='override default runner from --build-dir')
group.add_argument('--skip-rebuild', action='store_true',
help='do not refresh cmake dependencies first')
group.add_argument('--domain', action='append',
help='execute runner only for given domain')
group = parser.add_argument_group(
'runner configuration',
textwrap.dedent(f'''\
===================================================================
IMPORTANT:
Individual runners support additional options not printed here.
===================================================================
Run "west {command.name} --context" for runner-specific options.
If a build directory is found, --context also prints per-runner
settings found in that build directory's runners.yaml file.
Use "west {command.name} --context -r RUNNER" to limit output to a
specific RUNNER.
Some runner settings also can be overridden with options like
--hex-file. However, this depends on the runner: not all runners
respect --elf-file / --hex-file / --bin-file, nor use gdb or openocd,
etc.'''))
group.add_argument('-H', '--context', action='store_true',
help='print runner- and build-specific help')
# Options used to override RunnerConfig values in runners.yaml.
# TODO: is this actually useful?
group.add_argument('--board-dir', metavar='DIR', help='board directory')
# FIXME: these are runner-specific and should be moved to where --context
# can find them instead.
group.add_argument('--gdb', help='path to GDB')
group.add_argument('--openocd', help='path to openocd')
group.add_argument(
'--openocd-search', metavar='DIR', action='append',
help='path to add to openocd search path, if applicable')
return parser
def do_run_common(command, user_args, user_runner_args, domain_file=None):
# This is the main routine for all the "west flash", "west debug",
# etc. commands.
# Holds a list of run once commands, this is useful for sysbuild images
# whereby there are multiple images per board with flash commands that can
# interfere with other images if they run one per time an image is flashed.
used_cmds = []
# Holds a set of processed board names for flash running information.
processed_boards = set()
# Holds a dictionary of board image flash counts, the first element is
# number of images flashed so far and second element is total number of
# images for a given board.
board_image_count = defaultdict(ImagesFlashed)
if user_args.context:
dump_context(command, user_args, user_runner_args)
return
build_dir = get_build_dir(user_args)
if not user_args.skip_rebuild:
rebuild(command, build_dir, user_args)
if domain_file is None:
if user_args.domain is None:
# No domains are passed down and no domains specified by the user.
# So default domain will be used.
domains = [load_domains(build_dir).get_default_domain()]
else:
# No domains are passed down, but user has specified domains to use.
# Get the user specified domains.
domains = load_domains(build_dir).get_domains(user_args.domain)
else:
domains = load_domains(build_dir).get_domains(user_args.domain,
default_flash_order=True)
if len(domains) > 1:
if len(user_runner_args) > 0:
log.wrn("Specifying runner options for multiple domains is experimental.\n"
"If problems are experienced, please specify a single domain "
"using '--domain <domain>'")
# Process all domains to load board names and populate flash runner
# parameters.
board_names = set()
for d in domains:
if d.build_dir is None:
build_dir = get_build_dir(user_args)
else:
build_dir = d.build_dir
cache = load_cmake_cache(build_dir, user_args)
build_conf = BuildConfiguration(build_dir)
board = build_conf.get('CONFIG_BOARD_TARGET')
board_names.add(board)
board_image_count[board].total += 1
# Load board flash runner configuration (if it exists) and store
# single-use commands in a dictionary so that they get executed
# once per unique board name.
if cache['BOARD_DIR'] not in processed_boards and 'SOC_FULL_DIR' in cache:
soc_yaml_file = Path(cache['SOC_FULL_DIR']) / 'soc.yml'
board_yaml_file = Path(cache['BOARD_DIR']) / 'board.yml'
group_type = 'boards'
# Search for flash runner configuration, board takes priority over SoC
try:
with open(board_yaml_file, 'r') as f:
data_yaml = yaml.safe_load(f.read())
except FileNotFoundError:
continue
if 'runners' not in data_yaml:
# Check SoC file
group_type = 'qualifiers'
try:
with open(soc_yaml_file, 'r') as f:
data_yaml = yaml.safe_load(f.read())
except FileNotFoundError:
continue
processed_boards.add(cache['BOARD_DIR'])
if 'runners' not in data_yaml or 'run_once' not in data_yaml['runners']:
continue
for cmd in data_yaml['runners']['run_once']:
for data in data_yaml['runners']['run_once'][cmd]:
for group in data['groups']:
run_first = bool(data['run'] == 'first')
if group_type == 'qualifiers':
targets = []
for target in group[group_type]:
# For SoC-based qualifiers, prepend to the beginning of the
# match to allow for matching any board name
targets.append('([^/]+)/' + target)
else:
targets = group[group_type]
used_cmds.append(UsedFlashCommand(cmd, targets, data['runners'], run_first))
# Reduce entries to only those having matching board names (either exact or with regex) and
# remove any entries with empty board lists
for i, entry in enumerate(used_cmds):
for l, match in enumerate(entry.boards):
match_found = False
# Check if there is a matching board for this regex
for check in board_names:
if re.match(fr'^{match}$', check) is not None:
match_found = True
break
if not match_found:
del entry.boards[l]
if len(entry.boards) == 0:
del used_cmds[i]
for d in domains:
do_run_common_image(command, user_args, user_runner_args,
used_cmds, board_image_count, d.build_dir)
def do_run_common_image(command, user_args, user_runner_args, used_cmds,
board_image_count, build_dir=None,):
global re
command_name = command.name
if build_dir is None:
build_dir = get_build_dir(user_args)
cache = load_cmake_cache(build_dir, user_args)
build_conf = BuildConfiguration(build_dir)
board = build_conf.get('CONFIG_BOARD_TARGET')
if board_image_count is not None and board in board_image_count:
board_image_count[board].flashed += 1
# Load runners.yaml.
yaml_path = runners_yaml_path(build_dir, board)
runners_yaml = load_runners_yaml(yaml_path)
# Get a concrete ZephyrBinaryRunner subclass to use based on
# runners.yaml and command line arguments.
runner_cls = use_runner_cls(command, board, user_args, runners_yaml,
cache)
runner_name = runner_cls.name()
# Set up runner logging to delegate to west.log commands.
logger = logging.getLogger('runners')
logger.setLevel(LOG_LEVEL)
if not logger.hasHandlers():
# Only add a runners log handler if none has been added already.
logger.addHandler(WestLogHandler())
# If the user passed -- to force the parent argument parser to stop
# parsing, it will show up here, and needs to be filtered out.
runner_args = [arg for arg in user_runner_args if arg != '--']
# Check if there are any commands that should only be ran once per board
# and if so, remove them for all but the first iteration of the flash
# runner per unique board name.
if len(used_cmds) > 0 and len(runner_args) > 0:
i = len(runner_args) - 1
while i >= 0:
for cmd in used_cmds:
if cmd.command == runner_args[i] and (runner_name in cmd.runners or 'all' in cmd.runners):
# Check if board is here
match_found = False
for match in cmd.boards:
# Check if there is a matching board for this regex
if re.match(fr'^{match}$', board) is not None:
match_found = True
break
if not match_found:
continue
# Check if this is a first or last run
if not cmd.first:
# For last run instances, we need to check that this really is the last
# image of all boards being flashed
for check in cmd.boards:
can_continue = False
for match in board_image_count:
if re.match(fr'^{check}$', match) is not None:
if board_image_count[match].flashed == board_image_count[match].total:
can_continue = True
break
if not can_continue:
continue
if not cmd.ran:
cmd.ran = True
else:
runner_args.pop(i)
break
i = i - 1
# If flashing multiple images, the runner supports reset after flashing and
# the board has enabled this functionality, check if the board should be
# reset or not. If this is not specified in the board/soc file, leave it up to
# the runner's default configuration to decide if a reset should occur.
if runner_cls.capabilities().reset:
if board_image_count is not None:
reset = True
for cmd in used_cmds:
if cmd.command == '--reset' and (runner_name in cmd.runners or 'all' in cmd.runners):
# Check if board is here
match_found = False
for match in cmd.boards:
if re.match(fr'^{match}$', board) is not None:
match_found = True
break
if not match_found:
continue
# Check if this is a first or last run
if cmd.first and cmd.ran:
reset = False
break
elif not cmd.first and not cmd.ran:
# For last run instances, we need to check that this really is the last
# image of all boards being flashed
for check in cmd.boards:
can_continue = False
for match in board_image_count:
if re.match(fr'^{check}$', match) is not None:
if board_image_count[match].flashed != board_image_count[match].total:
reset = False
break
if reset:
runner_args.append('--reset')
else:
runner_args.append('--no-reset')
# Arguments in this order to allow specific to override general:
#
# - runner-specific runners.yaml arguments
# - user-provided command line arguments
final_argv = runners_yaml['args'][runner_name] + runner_args
# 'user_args' contains parsed arguments which are:
#
# 1. provided on the command line, and
# 2. handled by add_parser_common(), and
# 3. *not* runner-specific
#
# 'final_argv' contains unparsed arguments from either:
#
# 1. runners.yaml, or
# 2. the command line
#
# We next have to:
#
# - parse 'final_argv' now that we have all the command line
# arguments
# - create a RunnerConfig using 'user_args' and the result
# of parsing 'final_argv'
parser = argparse.ArgumentParser(prog=runner_name, allow_abbrev=False)
add_parser_common(command, parser=parser)
runner_cls.add_parser(parser)
args, unknown = parser.parse_known_args(args=final_argv)
if unknown:
log.die(f'runner {runner_name} received unknown arguments: {unknown}')
# Override args with any user_args. The latter must take
# precedence, or e.g. --hex-file on the command line would be
# ignored in favor of a board.cmake setting.
for a, v in vars(user_args).items():
if v is not None:
setattr(args, a, v)
# Create the RunnerConfig from runners.yaml and any command line
# overrides.
runner_config = get_runner_config(build_dir, yaml_path, runners_yaml, args)
log.dbg(f'runner_config: {runner_config}', level=log.VERBOSE_VERY)
# Use that RunnerConfig to create the ZephyrBinaryRunner instance
# and call its run().
try:
runner = runner_cls.create(runner_config, args)
runner.run(command_name)
except ValueError as ve:
log.err(str(ve), fatal=True)
dump_traceback()
raise CommandError(1)
except MissingProgram as e:
log.die('required program', e.filename,
'not found; install it or add its location to PATH')
except RuntimeError as re:
if not user_args.verbose:
log.die(re)
else:
log.err('verbose mode enabled, dumping stack:', fatal=True)
raise
def get_build_dir(args, die_if_none=True):
# Get the build directory for the given argument list and environment.
if args.build_dir:
return args.build_dir
guess = config.get('build', 'guess-dir', fallback='never')
guess = guess == 'runners'
dir = find_build_dir(None, guess)
if dir and is_zephyr_build(dir):
return dir
elif die_if_none:
msg = '--build-dir was not given, '
if dir:
msg = msg + 'and neither {} nor {} are zephyr build directories.'
else:
msg = msg + ('{} is not a build directory and the default build '
'directory cannot be determined. Check your '
'build.dir-fmt configuration option')
log.die(msg.format(getcwd(), dir))
else:
return None
def load_cmake_cache(build_dir, args):
cache_file = path.join(build_dir, args.cmake_cache or zcmake.DEFAULT_CACHE)
try:
return zcmake.CMakeCache(cache_file)
except FileNotFoundError:
log.die(f'no CMake cache found (expected one at {cache_file})')
def rebuild(command, build_dir, args):
_banner(f'west {command.name}: rebuilding')
try:
zcmake.run_build(build_dir)
except CalledProcessError:
if args.build_dir:
log.die(f're-build in {args.build_dir} failed')
else:
log.die(f're-build in {build_dir} failed (no --build-dir given)')
def runners_yaml_path(build_dir, board):
ret = Path(build_dir) / 'zephyr' / 'runners.yaml'
if not ret.is_file():
log.die(f'no runners.yaml found in {build_dir}/zephyr. '
f"Either board {board} doesn't support west flash/debug/simulate,"
' or a pristine build is needed.')
return ret
def load_runners_yaml(path):
# Load runners.yaml and convert to Python object.
try:
with open(path, 'r') as f:
content = yaml.safe_load(f.read())
except FileNotFoundError:
log.die(f'runners.yaml file not found: {path}')
if not content.get('runners'):
log.wrn(f'no pre-configured runners in {path}; '
"this probably won't work")
return content
def use_runner_cls(command, board, args, runners_yaml, cache):
# Get the ZephyrBinaryRunner class from its name, and make sure it
# supports the command. Print a message about the choice, and
# return the class.
runner = args.runner or runners_yaml.get(command.runner_key)
if runner is None:
log.die(f'no {command.name} runner available for board {board}. '
"Check the board's documentation for instructions.")
_banner(f'west {command.name}: using runner {runner}')
available = runners_yaml.get('runners', [])
if runner not in available:
if 'BOARD_DIR' in cache:
board_cmake = Path(cache['BOARD_DIR']) / 'board.cmake'
else:
board_cmake = 'board.cmake'
log.err(f'board {board} does not support runner {runner}',
fatal=True)
log.inf(f'To fix, configure this runner in {board_cmake} and rebuild.')
sys.exit(1)
try:
runner_cls = get_runner_cls(runner)
except ValueError as e:
log.die(e)
if command.name not in runner_cls.capabilities().commands:
log.die(f'runner {runner} does not support command {command.name}')
return runner_cls
def get_runner_config(build_dir, yaml_path, runners_yaml, args=None):
# Get a RunnerConfig object for the current run. yaml_config is
# runners.yaml's config: map, and args are the command line arguments.
yaml_config = runners_yaml['config']
yaml_dir = yaml_path.parent
if args is None:
args = argparse.Namespace()
def output_file(filetype):
from_args = getattr(args, f'{filetype}_file', None)
if from_args is not None:
return from_args
from_yaml = yaml_config.get(f'{filetype}_file')
if from_yaml is not None:
# Output paths in runners.yaml are relative to the
# directory containing the runners.yaml file.
return fspath(yaml_dir / from_yaml)
return None
def config(attr, default=None):
return getattr(args, attr, None) or yaml_config.get(attr, default)
def filetype(attr):
ftype = str(getattr(args, attr, None)).lower()
if ftype == "hex":
return FileType.HEX
elif ftype == "bin":
return FileType.BIN
elif ftype == "elf":
return FileType.ELF
elif getattr(args, attr, None) is not None:
err = 'unknown --file-type ({}). Please use hex, bin or elf'
raise ValueError(err.format(ftype))
# file-type not provided, try to get from filename
file = getattr(args, "file", None)
if file is not None:
ext = Path(file).suffix
if ext == ".hex":
return FileType.HEX
if ext == ".bin":
return FileType.BIN
if ext == ".elf":
return FileType.ELF
# we couldn't get the file-type, set to
# OTHER and let the runner deal with it
return FileType.OTHER
return RunnerConfig(build_dir,
yaml_config['board_dir'],
output_file('elf'),
output_file('exe'),
output_file('hex'),
output_file('bin'),
output_file('uf2'),
config('file'),
filetype('file_type'),
config('gdb'),
config('openocd'),
config('openocd_search', []))
def dump_traceback():
# Save the current exception to a file and return its path.
fd, name = tempfile.mkstemp(prefix='west-exc-', suffix='.txt')
close(fd) # traceback has no use for the fd
with open(name, 'w') as f:
traceback.print_exc(file=f)
log.inf("An exception trace has been saved in", name)
#
# west {command} --context
#
def dump_context(command, args, unknown_args):
build_dir = get_build_dir(args, die_if_none=False)
if build_dir is None:
log.wrn('no --build-dir given or found; output will be limited')
runners_yaml = None
else:
build_conf = BuildConfiguration(build_dir)
board = build_conf.get('CONFIG_BOARD_TARGET')
yaml_path = runners_yaml_path(build_dir, board)
runners_yaml = load_runners_yaml(yaml_path)
# Re-build unless asked not to, to make sure the output is up to date.
if build_dir and not args.skip_rebuild:
rebuild(command, build_dir, args)
if args.runner:
try:
cls = get_runner_cls(args.runner)
except ValueError:
log.die(f'invalid runner name {args.runner}; choices: ' +
', '.join(cls.name() for cls in
ZephyrBinaryRunner.get_runners()))
else:
cls = None
if runners_yaml is None:
dump_context_no_config(command, cls)
else:
log.inf(f'build configuration:', colorize=True)
log.inf(f'{INDENT}build directory: {build_dir}')
log.inf(f'{INDENT}board: {board}')
log.inf(f'{INDENT}runners.yaml: {yaml_path}')
if cls:
dump_runner_context(command, cls, runners_yaml)
else:
dump_all_runner_context(command, runners_yaml, board, build_dir)
def dump_context_no_config(command, cls):
if not cls:
all_cls = {cls.name(): cls for cls in ZephyrBinaryRunner.get_runners()
if command.name in cls.capabilities().commands}
log.inf('all Zephyr runners which support {}:'.format(command.name),
colorize=True)
dump_wrapped_lines(', '.join(all_cls.keys()), INDENT)
log.inf()
log.inf('Note: use -r RUNNER to limit information to one runner.')
else:
# This does the right thing with a None argument.
dump_runner_context(command, cls, None)
def dump_runner_context(command, cls, runners_yaml, indent=''):
dump_runner_caps(cls, indent)
dump_runner_option_help(cls, indent)
if runners_yaml is None:
return
if cls.name() in runners_yaml['runners']:
dump_runner_args(cls.name(), runners_yaml, indent)
else:
log.wrn(f'support for runner {cls.name()} is not configured '
f'in this build directory')
def dump_runner_caps(cls, indent=''):
# Print RunnerCaps for the given runner class.
log.inf(f'{indent}{cls.name()} capabilities:', colorize=True)
log.inf(f'{indent}{INDENT}{cls.capabilities()}')
def dump_runner_option_help(cls, indent=''):
# Print help text for class-specific command line options for the
# given runner class.
dummy_parser = argparse.ArgumentParser(prog='', add_help=False, allow_abbrev=False)
cls.add_parser(dummy_parser)
formatter = dummy_parser._get_formatter()
for group in dummy_parser._action_groups:
# Break the abstraction to filter out the 'flash', 'debug', etc.
# TODO: come up with something cleaner (may require changes
# in the runner core).
actions = group._group_actions
if len(actions) == 1 and actions[0].dest == 'command':
# This is the lone positional argument. Skip it.
continue
formatter.start_section('REMOVE ME')
formatter.add_text(group.description)
formatter.add_arguments(actions)
formatter.end_section()
# Get the runner help, with the "REMOVE ME" string gone
runner_help = f'\n{indent}'.join(formatter.format_help().splitlines()[1:])
log.inf(f'{indent}{cls.name()} options:', colorize=True)
log.inf(indent + runner_help)
def dump_runner_args(group, runners_yaml, indent=''):
msg = f'{indent}{group} arguments from runners.yaml:'
args = runners_yaml['args'][group]
if args:
log.inf(msg, colorize=True)
for arg in args:
log.inf(f'{indent}{INDENT}{arg}')
else:
log.inf(f'{msg} (none)', colorize=True)
def dump_all_runner_context(command, runners_yaml, board, build_dir):
all_cls = {cls.name(): cls for cls in ZephyrBinaryRunner.get_runners() if
command.name in cls.capabilities().commands}
available = runners_yaml['runners']
available_cls = {r: all_cls[r] for r in available if r in all_cls}
default_runner = runners_yaml[command.runner_key]
yaml_path = runners_yaml_path(build_dir, board)
runners_yaml = load_runners_yaml(yaml_path)
log.inf(f'zephyr runners which support "west {command.name}":',
colorize=True)
dump_wrapped_lines(', '.join(all_cls.keys()), INDENT)
log.inf()
dump_wrapped_lines('Note: not all may work with this board and build '
'directory. Available runners are listed below.',
INDENT)
log.inf(f'available runners in runners.yaml:',
colorize=True)
dump_wrapped_lines(', '.join(available), INDENT)
log.inf(f'default runner in runners.yaml:', colorize=True)
log.inf(INDENT + default_runner)
log.inf('common runner configuration:', colorize=True)
runner_config = get_runner_config(build_dir, yaml_path, runners_yaml)
for field, value in zip(runner_config._fields, runner_config):
log.inf(f'{INDENT}- {field}: {value}')
log.inf('runner-specific context:', colorize=True)
for cls in available_cls.values():
dump_runner_context(command, cls, runners_yaml, INDENT)
if len(available) > 1:
log.inf()
log.inf('Note: use -r RUNNER to limit information to one runner.')
def dump_wrapped_lines(text, indent):
for line in textwrap.wrap(text, initial_indent=indent,
subsequent_indent=indent,
break_on_hyphens=False,
break_long_words=False):
log.inf(line)
``` | /content/code_sandbox/scripts/west_commands/run_common.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 6,429 |
```python
#!/usr/bin/env python3
#
# A convenience script provided for running tests on the runners
# package. Runs mypy and pytest. Any extra arguments in sys.argv are
# passed along to pytest.
#
# Using tox was considered, but rejected as overkill for now.
#
# We would have to configure tox to create the test virtualenv with
# all of zephyr's scripts/requirements.txt, which seems like too much
# effort for now. We just run in the same Python environment as the
# user for developer testing and trust CI to set that environment up
# properly for integration testing.
#
# If this file starts to reimplement too many features that are
# already available in tox, we can revisit this decision.
import os
import shlex
import subprocess
import sys
here = os.path.abspath(os.path.dirname(__file__))
mypy = [sys.executable, '-m', 'mypy', f'--config-file={here}/mypy.ini',
'--package', 'runners']
pytest = [sys.executable, '-m', 'pytest'] + sys.argv[1:]
print(f'Running mypy from {here}:\n\t' +
' '.join(shlex.quote(s) for s in mypy),
flush=True)
subprocess.run(mypy, check=True, cwd=here)
print(f'Running pytest from {here}:\n\t' +
' '.join(shlex.quote(s) for s in pytest),
flush=True)
subprocess.run(pytest, check=True, cwd=here)
``` | /content/code_sandbox/scripts/west_commands/run_tests.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 327 |
```ini
[mypy]
ignore_missing_imports=True
``` | /content/code_sandbox/scripts/west_commands/mypy.ini | ini | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 10 |
```python
#
import argparse
import os
import pathlib
import shlex
import sys
import yaml
from west import log
from west.configuration import config
from zcmake import DEFAULT_CMAKE_GENERATOR, run_cmake, run_build, CMakeCache
from build_helpers import is_zephyr_build, find_build_dir, load_domains, \
FIND_BUILD_DIR_DESCRIPTION
from zephyr_ext_common import Forceable
_ARG_SEPARATOR = '--'
SYSBUILD_PROJ_DIR = pathlib.Path(__file__).resolve().parent.parent.parent \
/ pathlib.Path('share/sysbuild')
BUILD_USAGE = '''\
west build [-h] [-b BOARD[@REV]]] [-d BUILD_DIR]
[-S SNIPPET] [--shield SHIELD]
[-t TARGET] [-p {auto, always, never}] [-c] [--cmake-only]
[-n] [-o BUILD_OPT] [-f]
[--sysbuild | --no-sysbuild] [--domain DOMAIN]
[source_dir] -- [cmake_opt [cmake_opt ...]]
'''
BUILD_DESCRIPTION = f'''\
Convenience wrapper for building Zephyr applications.
{FIND_BUILD_DIR_DESCRIPTION}
positional arguments:
source_dir application source directory
cmake_opt extra options to pass to cmake; implies -c
(these must come after "--" as shown above)
'''
PRISTINE_DESCRIPTION = """\
A "pristine" build directory is empty. The -p option controls
whether the build directory is made pristine before the build
is done. A bare '--pristine' with no value is the same as
--pristine=always. Setting --pristine=auto uses heuristics to
guess if a pristine build may be necessary."""
def _banner(msg):
log.inf('-- west build: ' + msg, colorize=True)
def config_get(option, fallback):
return config.get('build', option, fallback=fallback)
def config_getboolean(option, fallback):
return config.getboolean('build', option, fallback=fallback)
class AlwaysIfMissing(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values or 'always')
class Build(Forceable):
def __init__(self):
super(Build, self).__init__(
'build',
# Keep this in sync with the string in west-commands.yml.
'compile a Zephyr application',
BUILD_DESCRIPTION,
accepts_unknown_args=True)
self.source_dir = None
'''Source directory for the build, or None on error.'''
self.build_dir = None
'''Final build directory used to run the build, or None on error.'''
self.created_build_dir = False
'''True if the build directory was created; False otherwise.'''
self.run_cmake = False
'''True if CMake was run; False otherwise.
Note: this only describes CMake runs done by this command. The
build system generated by CMake may also update itself due to
internal logic.'''
self.cmake_cache = None
'''Final parsed CMake cache for the build, or None on error.'''
def do_add_parser(self, parser_adder):
parser = parser_adder.add_parser(
self.name,
help=self.help,
formatter_class=argparse.RawDescriptionHelpFormatter,
description=self.description,
usage=BUILD_USAGE)
# Remember to update west-completion.bash if you add or remove
# flags
parser.add_argument('-b', '--board',
help='board to build for with optional board revision')
# Hidden option for backwards compatibility
parser.add_argument('-s', '--source-dir', help=argparse.SUPPRESS)
parser.add_argument('-d', '--build-dir',
help='build directory to create or use')
self.add_force_arg(parser)
group = parser.add_argument_group('cmake and build tool')
group.add_argument('-c', '--cmake', action='store_true',
help='force a cmake run')
group.add_argument('--cmake-only', action='store_true',
help="just run cmake; don't build (implies -c)")
group.add_argument('--domain', action='append',
help='''execute build tool (make or ninja) only for
given domain''')
group.add_argument('-t', '--target',
help='''run build system target TARGET
(try "-t usage")''')
group.add_argument('-T', '--test-item',
help='''Build based on test data in testcase.yaml
or sample.yaml. If source directory is not used
an argument has to be defined as
SOURCE_PATH/TEST_NAME.
E.g. samples/hello_world/sample.basic.helloworld.
If source directory is passed
then "TEST_NAME" is enough.''')
group.add_argument('-o', '--build-opt', default=[], action='append',
help='''options to pass to the build tool
(make or ninja); may be given more than once''')
group.add_argument('-n', '--just-print', '--dry-run', '--recon',
dest='dry_run', action='store_true',
help="just print build commands; don't run them")
group.add_argument('-S', '--snippet', dest='snippets', metavar='SNIPPET',
action='append', default=[],
help='''add the argument to SNIPPET; may be given
multiple times. Forces CMake to run again if given.
Do not use this option with manually specified
-DSNIPPET... cmake arguments: the results are
undefined''')
group.add_argument('--shield', dest='shields', metavar='SHIELD',
action='append', default=[],
help='''add the argument to SHIELD; may be given
multiple times. Forces CMake to run again if given.
Do not use this option with manually specified
-DSHIELD... cmake arguments: the results are
undefined''')
group = parser.add_mutually_exclusive_group()
group.add_argument('--sysbuild', action='store_true',
help='''create multi domain build system''')
group.add_argument('--no-sysbuild', action='store_true',
help='''do not create multi domain build system
(default)''')
group = parser.add_argument_group('pristine builds',
PRISTINE_DESCRIPTION)
group.add_argument('-p', '--pristine', choices=['auto', 'always',
'never'], action=AlwaysIfMissing, nargs='?',
help='pristine build folder setting')
return parser
def do_run(self, args, remainder):
self.args = args # Avoid having to pass them around
self.config_board = config_get('board', None)
log.dbg('args: {} remainder: {}'.format(args, remainder),
level=log.VERBOSE_EXTREME)
# Store legacy -s option locally
source_dir = self.args.source_dir
self._parse_remainder(remainder)
# Parse testcase.yaml or sample.yaml files for additional options.
if self.args.test_item:
# we get path + testitem
item = os.path.basename(self.args.test_item)
if self.args.source_dir:
test_path = self.args.source_dir
else:
test_path = os.path.dirname(self.args.test_item)
if test_path and os.path.exists(test_path):
self.args.source_dir = test_path
if not self._parse_test_item(item):
log.die("No test metadata found")
else:
log.die("test item path does not exist")
if source_dir:
if self.args.source_dir:
log.die("source directory specified twice:({} and {})".format(
source_dir, self.args.source_dir))
self.args.source_dir = source_dir
log.dbg('source_dir: {} cmake_opts: {}'.format(self.args.source_dir,
self.args.cmake_opts),
level=log.VERBOSE_EXTREME)
self._sanity_precheck()
self._setup_build_dir()
if args.pristine is not None:
pristine = args.pristine
else:
# Load the pristine={auto, always, never} configuration value
pristine = config_get('pristine', 'never')
if pristine not in ['auto', 'always', 'never']:
log.wrn(
'treating unknown build.pristine value "{}" as "never"'.
format(pristine))
pristine = 'never'
self.auto_pristine = pristine == 'auto'
log.dbg('pristine: {} auto_pristine: {}'.format(pristine,
self.auto_pristine),
level=log.VERBOSE_VERY)
if is_zephyr_build(self.build_dir):
if pristine == 'always':
self._run_pristine()
self.run_cmake = True
else:
self._update_cache()
if (self.args.cmake or self.args.cmake_opts or
self.args.cmake_only or self.args.snippets or
self.args.shields):
self.run_cmake = True
else:
self.run_cmake = True
self.source_dir = self._find_source_dir()
self._sanity_check()
board, origin = self._find_board()
self._run_cmake(board, origin, self.args.cmake_opts)
if args.cmake_only:
return
self._sanity_check()
self._update_cache()
self.domains = load_domains(self.build_dir)
self._run_build(args.target, args.domain)
def _find_board(self):
board, origin = None, None
if self.cmake_cache:
board, origin = (self.cmake_cache.get('CACHED_BOARD'),
'CMakeCache.txt')
# A malformed CMake cache may exist, but not have a board.
# This happens if there's a build error from a previous run.
if board is not None:
return (board, origin)
if self.args.board:
board, origin = self.args.board, 'command line'
elif 'BOARD' in os.environ:
board, origin = os.environ['BOARD'], 'env'
elif self.config_board is not None:
board, origin = self.config_board, 'configfile'
return board, origin
def _parse_remainder(self, remainder):
self.args.source_dir = None
self.args.cmake_opts = None
try:
# Only one source_dir is allowed, as the first positional arg
if remainder[0] != _ARG_SEPARATOR:
self.args.source_dir = remainder[0]
remainder = remainder[1:]
# Only the first argument separator is consumed, the rest are
# passed on to CMake
if remainder[0] == _ARG_SEPARATOR:
remainder = remainder[1:]
if remainder:
self.args.cmake_opts = remainder
except IndexError:
pass
def _parse_test_item(self, test_item):
found_test_metadata = False
for yp in ['sample.yaml', 'testcase.yaml']:
yf = os.path.join(self.args.source_dir, yp)
if not os.path.exists(yf):
continue
found_test_metadata = True
with open(yf, 'r') as stream:
try:
y = yaml.safe_load(stream)
except yaml.YAMLError as exc:
log.die(exc)
common = y.get('common')
tests = y.get('tests')
if not tests:
log.die(f"No tests found in {yf}")
if test_item not in tests:
log.die(f"Test item {test_item} not found in {yf}")
item = tests.get(test_item)
sysbuild = False
extra_dtc_overlay_files = []
extra_overlay_confs = []
extra_conf_files = []
required_snippets = []
for section in [common, item]:
if not section:
continue
sysbuild = section.get('sysbuild', sysbuild)
for data in [
'extra_args',
'extra_configs',
'extra_conf_files',
'extra_overlay_confs',
'extra_dtc_overlay_files',
'required_snippets'
]:
extra = section.get(data)
if not extra:
continue
if isinstance(extra, str):
arg_list = extra.split(" ")
else:
arg_list = extra
if data == 'extra_configs':
args = ["-D{}".format(arg.replace('"', '\"')) for arg in arg_list]
elif data == 'extra_args':
# Retain quotes around config options
config_options = [arg for arg in arg_list if arg.startswith("CONFIG_")]
non_config_options = [arg for arg in arg_list if not arg.startswith("CONFIG_")]
args = ["-D{}".format(a.replace('"', '\"')) for a in config_options]
args.extend(["-D{}".format(arg.replace('"', '')) for arg in non_config_options])
elif data == 'extra_conf_files':
extra_conf_files.extend(arg_list)
continue
elif data == 'extra_overlay_confs':
extra_overlay_confs.extend(arg_list)
continue
elif data == 'extra_dtc_overlay_files':
extra_dtc_overlay_files.extend(arg_list)
continue
elif data == 'required_snippets':
required_snippets.extend(arg_list)
continue
if self.args.cmake_opts:
self.args.cmake_opts.extend(args)
else:
self.args.cmake_opts = args
self.args.sysbuild = sysbuild
if found_test_metadata:
args = []
if extra_conf_files:
args.append(f"CONF_FILE=\"{';'.join(extra_conf_files)}\"")
if extra_dtc_overlay_files:
args.append(f"DTC_OVERLAY_FILE=\"{';'.join(extra_dtc_overlay_files)}\"")
if extra_overlay_confs:
args.append(f"OVERLAY_CONFIG=\"{';'.join(extra_overlay_confs)}\"")
if required_snippets:
args.append(f"SNIPPET=\"{';'.join(required_snippets)}\"")
# Build the final argument list
args_expanded = ["-D{}".format(a.replace('"', '')) for a in args]
if self.args.cmake_opts:
self.args.cmake_opts.extend(args_expanded)
else:
self.args.cmake_opts = args_expanded
return found_test_metadata
def _sanity_precheck(self):
app = self.args.source_dir
if app:
self.check_force(
os.path.isdir(app),
'source directory {} does not exist'.format(app))
self.check_force(
'CMakeLists.txt' in os.listdir(app),
"{} doesn't contain a CMakeLists.txt".format(app))
def _update_cache(self):
try:
self.cmake_cache = CMakeCache.from_build_dir(self.build_dir)
except FileNotFoundError:
pass
def _setup_build_dir(self):
# Initialize build_dir and created_build_dir attributes.
# If we created the build directory, we must run CMake.
log.dbg('setting up build directory', level=log.VERBOSE_EXTREME)
# The CMake Cache has not been loaded yet, so this is safe
board, _ = self._find_board()
source_dir = self._find_source_dir()
app = os.path.split(source_dir)[1]
build_dir = find_build_dir(self.args.build_dir, board=board,
source_dir=source_dir, app=app)
if not build_dir:
log.die('Unable to determine a default build folder. Check '
'your build.dir-fmt configuration option')
if os.path.exists(build_dir):
if not os.path.isdir(build_dir):
log.die('build directory {} exists and is not a directory'.
format(build_dir))
else:
os.makedirs(build_dir, exist_ok=False)
self.created_build_dir = True
self.run_cmake = True
self.build_dir = build_dir
def _find_source_dir(self):
# Initialize source_dir attribute, either from command line argument,
# implicitly from the build directory's CMake cache, or using the
# default (current working directory).
log.dbg('setting up source directory', level=log.VERBOSE_EXTREME)
if self.args.source_dir:
source_dir = self.args.source_dir
elif self.cmake_cache:
source_dir = self.cmake_cache.get('APP_DIR')
if not source_dir:
source_dir = self.cmake_cache.get('APPLICATION_SOURCE_DIR')
if not source_dir:
source_dir = self.cmake_cache.get('CMAKE_HOME_DIRECTORY')
if not source_dir:
# This really ought to be there. The build directory
# must be corrupted somehow. Let's see what we can do.
log.die('build directory', self.build_dir,
'CMake cache has no CMAKE_HOME_DIRECTORY;',
'please give a source_dir')
else:
source_dir = os.getcwd()
return os.path.abspath(source_dir)
def _sanity_check_source_dir(self):
if self.source_dir == self.build_dir:
# There's no forcing this.
log.die('source and build directory {} cannot be the same; '
'use --build-dir {} to specify a build directory'.
format(self.source_dir, self.build_dir))
srcrel = os.path.relpath(self.source_dir)
self.check_force(
not is_zephyr_build(self.source_dir),
'it looks like {srcrel} is a build directory: '
'did you mean --build-dir {srcrel} instead?'.
format(srcrel=srcrel))
self.check_force(
'CMakeLists.txt' in os.listdir(self.source_dir),
'source directory "{srcrel}" does not contain '
'a CMakeLists.txt; is this really what you '
'want to build? (Use -s SOURCE_DIR to specify '
'the application source directory)'.
format(srcrel=srcrel))
def _sanity_check(self):
# Sanity check the build configuration.
# Side effect: may update cmake_cache attribute.
log.dbg('sanity checking the build', level=log.VERBOSE_EXTREME)
self._sanity_check_source_dir()
if not self.cmake_cache:
return # That's all we can check without a cache.
if "CMAKE_PROJECT_NAME" not in self.cmake_cache:
# This happens sometimes when a build system is not
# completely generated due to an error during the
# CMake configuration phase.
self.run_cmake = True
cached_proj = self.cmake_cache.get('APPLICATION_SOURCE_DIR')
cached_app = self.cmake_cache.get('APP_DIR')
# if APP_DIR is None but APPLICATION_SOURCE_DIR is set, that indicates
# an older build folder, this still requires pristine.
if cached_app is None and cached_proj:
cached_app = cached_proj
log.dbg('APP_DIR:', cached_app, level=log.VERBOSE_EXTREME)
source_abs = (os.path.abspath(self.args.source_dir)
if self.args.source_dir else None)
cached_abs = os.path.abspath(cached_app) if cached_app else None
log.dbg('pristine:', self.auto_pristine, level=log.VERBOSE_EXTREME)
# If the build directory specifies a source app, make sure it's
# consistent with --source-dir.
apps_mismatched = (source_abs and cached_abs and
pathlib.Path(source_abs).resolve() != pathlib.Path(cached_abs).resolve())
self.check_force(
not apps_mismatched or self.auto_pristine,
'Build directory "{}" is for application "{}", but source '
'directory "{}" was specified; please clean it, use --pristine, '
'or use --build-dir to set another build directory'.
format(self.build_dir, cached_abs, source_abs))
if apps_mismatched:
self.run_cmake = True # If they insist, we need to re-run cmake.
# If CACHED_BOARD is not defined, we need some other way to
# find the board.
cached_board = self.cmake_cache.get('CACHED_BOARD')
log.dbg('CACHED_BOARD:', cached_board, level=log.VERBOSE_EXTREME)
# If apps_mismatched and self.auto_pristine are true, we will
# run pristine on the build, invalidating the cached
# board. In that case, we need some way of getting the board.
self.check_force((cached_board and
not (apps_mismatched and self.auto_pristine))
or self.args.board or self.config_board or
os.environ.get('BOARD'),
'Cached board not defined, please provide it '
'(provide --board, set default with '
'"west config build.board <BOARD>", or set '
'BOARD in the environment)')
# Check consistency between cached board and --board.
boards_mismatched = (self.args.board and cached_board and
self.args.board != cached_board)
self.check_force(
not boards_mismatched or self.auto_pristine,
'Build directory {} targets board {}, but board {} was specified. '
'(Clean the directory, use --pristine, or use --build-dir to '
'specify a different one.)'.
format(self.build_dir, cached_board, self.args.board))
if self.auto_pristine and (apps_mismatched or boards_mismatched):
self._run_pristine()
self.cmake_cache = None
log.dbg('run_cmake:', True, level=log.VERBOSE_EXTREME)
self.run_cmake = True
# Tricky corner-case: The user has not specified a build folder but
# there was one in the CMake cache. Since this is going to be
# invalidated, reset to CWD and re-run the basic tests.
if ((boards_mismatched and not apps_mismatched) and
(not source_abs and cached_abs)):
self.source_dir = self._find_source_dir()
self._sanity_check_source_dir()
def _run_cmake(self, board, origin, cmake_opts):
if board is None and config_getboolean('board_warn', True):
log.wrn('This looks like a fresh build and BOARD is unknown;',
"so it probably won't work. To fix, use",
'--board=<your-board>.')
log.inf('Note: to silence the above message, run',
"'west config build.board_warn false'")
if not self.run_cmake:
return
_banner('generating a build system')
if board is not None and origin != 'CMakeCache.txt':
cmake_opts = ['-DBOARD={}'.format(board)]
else:
cmake_opts = []
if self.args.cmake_opts:
cmake_opts.extend(self.args.cmake_opts)
if self.args.snippets:
cmake_opts.append(f'-DSNIPPET={";".join(self.args.snippets)}')
if self.args.shields:
cmake_opts.append(f'-DSHIELD={";".join(self.args.shields)}')
user_args = config_get('cmake-args', None)
if user_args:
cmake_opts.extend(shlex.split(user_args))
config_sysbuild = config_getboolean('sysbuild', False)
if self.args.sysbuild or (config_sysbuild and not self.args.no_sysbuild):
cmake_opts.extend(['-S{}'.format(SYSBUILD_PROJ_DIR),
'-DAPP_DIR:PATH={}'.format(self.source_dir)])
else:
# self.args.no_sysbuild == True or config sysbuild False
cmake_opts.extend(['-S{}'.format(self.source_dir)])
# Invoke CMake from the current working directory using the
# -S and -B options (officially introduced in CMake 3.13.0).
# This is important because users expect invocations like this
# to Just Work:
#
# west build -- -DOVERLAY_CONFIG=relative-path.conf
final_cmake_args = ['-DWEST_PYTHON={}'.format(pathlib.Path(sys.executable).as_posix()),
'-B{}'.format(self.build_dir),
'-G{}'.format(config_get('generator',
DEFAULT_CMAKE_GENERATOR))]
if cmake_opts:
final_cmake_args.extend(cmake_opts)
run_cmake(final_cmake_args, dry_run=self.args.dry_run)
def _run_pristine(self):
_banner('making build dir {} pristine'.format(self.build_dir))
if not is_zephyr_build(self.build_dir):
log.die('Refusing to run pristine on a folder that is not a '
'Zephyr build system')
cache = CMakeCache.from_build_dir(self.build_dir)
app_src_dir = cache.get('APPLICATION_SOURCE_DIR')
app_bin_dir = cache.get('APPLICATION_BINARY_DIR')
cmake_args = [f'-DBINARY_DIR={app_bin_dir}',
f'-DSOURCE_DIR={app_src_dir}',
'-P', cache['ZEPHYR_BASE'] + '/cmake/pristine.cmake']
run_cmake(cmake_args, cwd=self.build_dir, dry_run=self.args.dry_run)
def _run_build(self, target, domain):
if target:
_banner('running target {}'.format(target))
elif self.run_cmake:
_banner('building application')
extra_args = ['--target', target] if target else []
if self.args.build_opt:
extra_args.append('--')
extra_args.extend(self.args.build_opt)
if self.args.verbose:
self._append_verbose_args(extra_args,
not bool(self.args.build_opt))
domains = load_domains(self.build_dir)
build_dir_list = []
if domain is None:
# If no domain is specified, we just build top build dir as that
# will build all domains.
build_dir_list = [domains.get_top_build_dir()]
else:
_banner('building domain(s): {}'.format(' '.join(domain)))
domain_list = domains.get_domains(domain)
for d in domain_list:
build_dir_list.append(d.build_dir)
for b in build_dir_list:
run_build(b, extra_args=extra_args,
dry_run=self.args.dry_run)
def _append_verbose_args(self, extra_args, add_dashes):
# These hacks are only needed for CMake versions earlier than
# 3.14. When Zephyr's minimum version is at least that, we can
# drop this nonsense and just run "cmake --build BUILD -v".
self._update_cache()
if not self.cmake_cache:
return
generator = self.cmake_cache.get('CMAKE_GENERATOR')
if not generator:
return
# Substring matching is for things like "Eclipse CDT4 - Ninja".
if 'Ninja' in generator:
if add_dashes:
extra_args.append('--')
extra_args.append('-v')
elif generator == 'Unix Makefiles':
if add_dashes:
extra_args.append('--')
extra_args.append('VERBOSE=1')
``` | /content/code_sandbox/scripts/west_commands/build.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 5,830 |
```python
#
'''west "debug", "debugserver", and "attach" commands.'''
from textwrap import dedent
from west.commands import WestCommand
from run_common import add_parser_common, do_run_common
class Debug(WestCommand):
def __init__(self):
super(Debug, self).__init__(
'debug',
# Keep this in sync with the string in west-commands.yml.
'flash and interactively debug a Zephyr application',
dedent('''
Connect to the board, flash the program, and start a
debugging session. Use "west attach" instead to attach
a debugger without reflashing.'''),
accepts_unknown_args=True)
self.runner_key = 'debug-runner' # in runners.yaml
def do_add_parser(self, parser_adder):
return add_parser_common(self, parser_adder)
def do_run(self, my_args, runner_args):
do_run_common(self, my_args, runner_args)
class DebugServer(WestCommand):
def __init__(self):
super(DebugServer, self).__init__(
'debugserver',
# Keep this in sync with the string in west-commands.yml.
'connect to board and launch a debug server',
dedent('''
Connect to the board and launch a debug server which accepts
incoming connections for debugging the connected board.
The debug server binds to a known port, and allows client software
started elsewhere to connect to it and debug the running
Zephyr image.'''),
accepts_unknown_args=True)
self.runner_key = 'debug-runner' # in runners.yaml
def do_add_parser(self, parser_adder):
return add_parser_common(self, parser_adder)
def do_run(self, my_args, runner_args):
do_run_common(self, my_args, runner_args)
class Attach(WestCommand):
def __init__(self):
super(Attach, self).__init__(
'attach',
# Keep this in sync with the string in west-commands.yml.
'interactively debug a board',
"Like \"west debug\", but doesn't reflash the program.",
accepts_unknown_args=True)
self.runner_key = 'debug-runner' # in runners.yaml
def do_add_parser(self, parser_adder):
return add_parser_common(self, parser_adder)
def do_run(self, my_args, runner_args):
do_run_common(self, my_args, runner_args)
``` | /content/code_sandbox/scripts/west_commands/debug.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 517 |
```python
#
'''west "flash" command'''
from west.commands import WestCommand
from run_common import add_parser_common, do_run_common, get_build_dir
from pathlib import Path
class Flash(WestCommand):
def __init__(self):
super(Flash, self).__init__(
'flash',
# Keep this in sync with the string in west-commands.yml.
'flash and run a binary on a board',
"Permanently reprogram a board's flash with a new binary.",
accepts_unknown_args=True)
self.runner_key = 'flash-runner' # in runners.yaml
def do_add_parser(self, parser_adder):
return add_parser_common(self, parser_adder)
def do_run(self, my_args, runner_args):
build_dir = get_build_dir(my_args)
domains_file = Path(build_dir) / 'domains.yaml'
do_run_common(self, my_args, runner_args, domain_file=domains_file)
``` | /content/code_sandbox/scripts/west_commands/flash.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 201 |
```python
#
import argparse
import os
import sys
from west import log
from west.commands import WestCommand
from zephyr_ext_common import ZEPHYR_SCRIPTS
# Resolve path to twister libs and add imports
twister_path = ZEPHYR_SCRIPTS
os.environ["ZEPHYR_BASE"] = str(twister_path.parent)
sys.path.insert(0, str(twister_path))
sys.path.insert(0, str(twister_path / "pylib" / "twister"))
from twisterlib.environment import add_parse_arguments, parse_arguments
from twisterlib.twister_main import main
TWISTER_DESCRIPTION = """\
Convenience wrapper for twister. The below options are shared with the twister
script and have the same effects as if you ran twister directly. Refer to the
twister documentation for more information.
"""
class Twister(WestCommand):
def __init__(self):
super(Twister, self).__init__(
"twister",
# Keep this in sync with the string in west-commands.yml.
"west twister wrapper",
TWISTER_DESCRIPTION,
accepts_unknown_args=True,
)
def do_add_parser(self, parser_adder):
parser = parser_adder.add_parser(
self.name,
help=self.help,
formatter_class=argparse.RawDescriptionHelpFormatter,
description=self.description,
allow_abbrev=False
)
parser = add_parse_arguments(parser)
return parser
def do_run(self, args, remainder):
log.dbg(
"args: {} remainder: {}".format(args, remainder), level=log.VERBOSE_EXTREME
)
options = parse_arguments(self.parser, args=remainder, options=args)
default_options = parse_arguments(self.parser, args=[], on_init=False)
ret = main(options, default_options)
sys.exit(ret)
def _parse_arguments(self, args, options):
"""Helper function for testing purposes"""
return parse_arguments(self.parser, args, options)
``` | /content/code_sandbox/scripts/west_commands/twister_cmd.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 422 |
```python
#
'''Helpers shared by multiple west extension command modules.
Note that common helpers used by the flash and debug extension
commands are in run_common -- that's for common code used by
commands which specifically execute runners.'''
import os
from pathlib import Path
from west import log
from west.commands import WestCommand
# This relies on this file being zephyr/scripts/foo/bar.py.
# If you move this file, you'll break it, so be careful.
THIS_ZEPHYR = Path(__file__).parent.parent.parent
ZEPHYR_BASE = Path(os.environ.get('ZEPHYR_BASE', THIS_ZEPHYR))
# FIXME we need a nicer way to handle imports from scripts and cmake than this.
ZEPHYR_SCRIPTS = ZEPHYR_BASE / 'scripts'
ZEPHYR_CMAKE = ZEPHYR_BASE / 'cmake'
class Forceable(WestCommand):
'''WestCommand subclass for commands with a --force option.'''
@staticmethod
def add_force_arg(parser):
'''Add a -f / --force option to the parser.'''
parser.add_argument('-f', '--force', action='store_true',
help='ignore any errors and try to proceed')
def check_force(self, cond, msg):
'''Abort if the command needs to be forced and hasn't been.
The "forced" predicate must be in self.args.forced.
If cond and self.args.force are both False, scream and die
with message msg. Otherwise, return. That is, "cond" is a
condition which means everything is OK; if it's False, only
self.args.force being True can allow execution to proceed.
'''
if not (cond or self.args.force):
log.err(msg)
log.die('refusing to proceed without --force due to above error')
``` | /content/code_sandbox/scripts/west_commands/zephyr_ext_common.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 388 |
```python
#
import argparse
import os
from pathlib import Path
import sys
import textwrap
from urllib.parse import urlparse
from west import log
from west.commands import WestCommand
from zephyr_ext_common import ZEPHYR_BASE
sys.path.append(os.fspath(Path(__file__).parent.parent))
import zephyr_module
class Blobs(WestCommand):
DEFAULT_LIST_FMT = '{module} {status} {path} {type} {abspath}'
def __init__(self):
super().__init__(
'blobs',
# Keep this in sync with the string in west-commands.yml.
'work with binary blobs',
'Work with binary blobs',
accepts_unknown_args=False)
def do_add_parser(self, parser_adder):
parser = parser_adder.add_parser(
self.name,
help=self.help,
formatter_class=argparse.RawDescriptionHelpFormatter,
description=self.description,
epilog=textwrap.dedent(f'''\
FORMAT STRINGS
--------------
Blobs are listed using a Python 3 format string. Arguments
to the format string are accessed by name.
The default format string is:
"{self.DEFAULT_LIST_FMT}"
The following arguments are available:
- module: name of the module that contains this blob
- abspath: blob absolute path
- status: short status (A: present, M: hash failure, D: not present)
- path: blob local path from <module>/zephyr/blobs/
- sha256: blob SHA256 hash in hex
- type: type of blob
- version: version string
- license_path: path to the license file for the blob
- uri: URI to the remote location of the blob
- description: blob text description
- doc-url: URL to the documentation for this blob
'''))
# Remember to update west-completion.bash if you add or remove
# flags
parser.add_argument('subcmd', nargs=1,
choices=['list', 'fetch', 'clean'],
help='sub-command to execute')
parser.add_argument('modules', metavar='MODULE', nargs='*',
help='''zephyr modules to operate on;
all modules will be used if not given''')
group = parser.add_argument_group('west blob list options')
group.add_argument('-f', '--format',
help='''format string to use to list each blob;
see FORMAT STRINGS below''')
return parser
def get_blobs(self, args):
blobs = []
modules = args.modules
all_modules = zephyr_module.parse_modules(ZEPHYR_BASE, self.manifest)
all_names = [m.meta.get('name', None) for m in all_modules]
unknown = set(modules) - set(all_names)
if len(unknown):
log.die(f'Unknown module(s): {unknown}')
for module in all_modules:
# Filter by module
module_name = module.meta.get('name', None)
if len(modules) and module_name not in modules:
continue
blobs += zephyr_module.process_blobs(module.project, module.meta)
return blobs
def list(self, args):
blobs = self.get_blobs(args)
fmt = args.format or self.DEFAULT_LIST_FMT
for blob in blobs:
log.inf(fmt.format(**blob))
def ensure_folder(self, path):
path.parent.mkdir(parents=True, exist_ok=True)
def fetch_blob(self, url, path):
scheme = urlparse(url).scheme
log.dbg(f'Fetching {path} with {scheme}')
import fetchers
fetcher = fetchers.get_fetcher_cls(scheme)
log.dbg(f'Found fetcher: {fetcher}')
inst = fetcher()
self.ensure_folder(path)
inst.fetch(url, path)
# Compare the checksum of a file we've just downloaded
# to the digest in blob metadata, warn user if they differ.
def verify_blob(self, blob):
log.dbg('Verifying blob {module}: {abspath}'.format(**blob))
status = zephyr_module.get_blob_status(blob['abspath'], blob['sha256'])
if status == zephyr_module.BLOB_OUTDATED:
log.err(textwrap.dedent(
f'''\
The checksum of the downloaded file does not match that
in the blob metadata:
- if it is not certain that the download was successful,
try running 'west blobs fetch {blob['module']}'
to re-download the file
- if the error persists, please consider contacting
the maintainers of the module so that they can check
the corresponding blob metadata
Module: {blob['module']}
Blob: {blob['path']}
URL: {blob['url']}
Info: {blob['description']}'''))
def fetch(self, args):
blobs = self.get_blobs(args)
for blob in blobs:
if blob['status'] == zephyr_module.BLOB_PRESENT:
log.dbg('Blob {module}: {abspath} is up to date'.format(**blob))
continue
log.inf('Fetching blob {module}: {abspath}'.format(**blob))
self.fetch_blob(blob['url'], blob['abspath'])
self.verify_blob(blob)
def clean(self, args):
blobs = self.get_blobs(args)
for blob in blobs:
if blob['status'] == zephyr_module.BLOB_NOT_PRESENT:
log.dbg('Blob {module}: {abspath} not in filesystem'.format(**blob))
continue
log.inf('Deleting blob {module}: {status} {abspath}'.format(**blob))
blob['abspath'].unlink()
def do_run(self, args, _):
log.dbg(f'subcmd: \'{args.subcmd[0]}\' modules: {args.modules}')
subcmd = getattr(self, args.subcmd[0])
if args.subcmd[0] != 'list' and args.format is not None:
log.die(f'unexpected --format argument; this is a "west blobs list" option')
subcmd(args)
``` | /content/code_sandbox/scripts/west_commands/blobs.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,329 |
```python
#
import argparse
import os
from unittest.mock import patch, call
import pytest
from runners.canopen_program import CANopenBinaryRunner
from conftest import RC_KERNEL_BIN
#
# Test values
#
TEST_DEF_CONTEXT = 'default'
TEST_ALT_CONTEXT = 'alternate'
#
# Test cases
#
TEST_CASES = [(n, x, p, c, o, t, r, s, b)
for n in range(1, 3)
for x in (None, TEST_ALT_CONTEXT)
for p in range(1, 3)
for c in (False, True)
for o in (False, True)
for t in range(1, 3)
for r in range(1, 3)
for s in range(1, 3)
for b in range(False, True)]
os_path_isfile = os.path.isfile
def os_path_isfile_patch(filename):
if filename == RC_KERNEL_BIN:
return True
return os_path_isfile(filename)
@pytest.mark.parametrize('test_case', TEST_CASES)
@patch('runners.canopen_program.CANopenProgramDownloader')
def test_canopen_program_create(cpd, test_case, runner_config):
'''Test CANopen runner created from command line parameters.'''
node_id, context, program_number, confirm, confirm_only, timeout, sdo_retries, sdo_timeout, block_transfer = test_case
args = ['--node-id', str(node_id)]
if context is not None:
args.extend(['--can-context', context])
if program_number:
args.extend(['--program-number', str(program_number)])
if not confirm:
args.append('--no-confirm')
if confirm_only:
args.append('--confirm-only')
if timeout:
args.extend(['--timeout', str(timeout)])
if sdo_retries:
args.extend(['--sdo-retries', str(sdo_retries)])
if sdo_timeout:
args.extend(['--sdo-timeout', str(sdo_timeout)])
if block_transfer:
args.append('--block_transfer')
mock = cpd.return_value
mock.flash_status.return_value = 0
mock.wait_for_flash_status_ok.return_value = 0
mock.swid.return_value = 0
parser = argparse.ArgumentParser(allow_abbrev=False)
CANopenBinaryRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner = CANopenBinaryRunner.create(runner_config, arg_namespace)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
cpd.assert_called_once()
if context:
assert cpd.call_args == call(node_id=node_id,
can_context=context,
logger=runner.logger,
program_number=program_number,
sdo_retries=sdo_retries,
sdo_timeout=sdo_timeout,
block_transfer=block_transfer)
else:
assert cpd.call_args == call(node_id=node_id,
can_context=TEST_DEF_CONTEXT,
logger=runner.logger,
program_number=program_number,
sdo_retries=sdo_retries,
sdo_timeout=sdo_timeout,
block_transfer=block_transfer)
mock.connect.assert_called_once()
if confirm_only:
mock.wait_for_flash_status_ok.assert_called_with(timeout)
mock.swid.assert_called_once()
mock.enter_pre_operational.assert_called_once()
mock.zephyr_confirm_program.assert_called_once()
mock.clear_program.assert_not_called()
mock.stop_program.assert_not_called()
mock.download.assert_not_called()
mock.start_program.assert_not_called()
mock.wait_for_bootup.assert_not_called()
else:
mock.enter_pre_operational.assert_called()
mock.wait_for_flash_status_ok.assert_called_with(timeout)
mock.swid.assert_called()
mock.stop_program.assert_called_once()
mock.clear_program.assert_called_once()
mock.download.assert_called_once_with(RC_KERNEL_BIN)
mock.start_program.assert_called_once()
mock.wait_for_bootup.assert_called_once_with(timeout)
if confirm:
mock.zephyr_confirm_program.assert_called_once()
else:
mock.zephyr_confirm_program.assert_not_called()
mock.disconnect.assert_called_once()
``` | /content/code_sandbox/scripts/west_commands/tests/test_canopen_program.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 886 |
```python
#
import argparse
import os
from unittest.mock import patch, call
import pytest
from runners.dfu import DfuUtilBinaryRunner, DfuSeConfig
from conftest import RC_KERNEL_BIN
DFU_UTIL = 'dfu-util'
TEST_EXE = 'test-dfu-util'
TEST_PID = '0000:9999'
TEST_PID_RES = '-d,{}'.format(TEST_PID)
TEST_ALT_INT = '1'
TEST_ALT_STR = 'alt-name'
TEST_BIN_NAME = 'test-img.bin'
TEST_DFUSE_ADDR = 2
TEST_DFUSE_OPTS = 'test-dfuse-opt'
TEST_DCFG_OPT = DfuSeConfig(address=TEST_DFUSE_ADDR, options='test-dfuse-opt')
TEST_DCFG_OPT_RES = '{}:{}'.format(hex(TEST_DFUSE_ADDR), TEST_DFUSE_OPTS)
TEST_DCFG_NOPT = DfuSeConfig(address=TEST_DFUSE_ADDR, options='')
TEST_DCFG_NOPT_RES = '{}:'.format(hex(TEST_DFUSE_ADDR))
# A map from a test case to the expected dfu-util call.
# Test cases are (alt, exe, img, dfuse) tuples.
EXPECTED_COMMAND = {
(DFU_UTIL, TEST_ALT_INT, None, RC_KERNEL_BIN):
[DFU_UTIL, TEST_PID_RES, '-a', TEST_ALT_INT, '-D', RC_KERNEL_BIN],
(DFU_UTIL, TEST_ALT_STR, None, RC_KERNEL_BIN):
[DFU_UTIL, TEST_PID_RES, '-a', TEST_ALT_STR, '-D', RC_KERNEL_BIN],
(TEST_EXE, TEST_ALT_INT, None, RC_KERNEL_BIN):
[TEST_EXE, TEST_PID_RES, '-a', TEST_ALT_INT, '-D', RC_KERNEL_BIN],
(DFU_UTIL, TEST_ALT_INT, None, TEST_BIN_NAME):
[DFU_UTIL, TEST_PID_RES, '-a', TEST_ALT_INT, '-D', TEST_BIN_NAME],
(DFU_UTIL, TEST_ALT_INT, TEST_DCFG_OPT, RC_KERNEL_BIN):
[DFU_UTIL, TEST_PID_RES, '-s', TEST_DCFG_OPT_RES, '-a', TEST_ALT_INT,
'-D', RC_KERNEL_BIN],
(DFU_UTIL, TEST_ALT_INT, TEST_DCFG_NOPT, RC_KERNEL_BIN):
[DFU_UTIL, TEST_PID_RES, '-s', TEST_DCFG_NOPT_RES, '-a', TEST_ALT_INT,
'-D', RC_KERNEL_BIN],
}
def find_device_patch():
return True
def require_patch(program):
assert program in [DFU_UTIL, TEST_EXE]
os_path_isfile = os.path.isfile
def os_path_isfile_patch(filename):
if filename == RC_KERNEL_BIN:
return True
return os_path_isfile(filename)
def id_fn(tc):
return 'exe={},alt={},dfuse_config={},img={}'.format(*tc)
@pytest.mark.parametrize('tc', [
# (exe, alt, dfuse_config, img)
(DFU_UTIL, TEST_ALT_INT, None, RC_KERNEL_BIN),
(DFU_UTIL, TEST_ALT_STR, None, RC_KERNEL_BIN),
(TEST_EXE, TEST_ALT_INT, None, RC_KERNEL_BIN),
(DFU_UTIL, TEST_ALT_INT, None, TEST_BIN_NAME),
(DFU_UTIL, TEST_ALT_INT, TEST_DCFG_OPT, RC_KERNEL_BIN),
(DFU_UTIL, TEST_ALT_INT, TEST_DCFG_NOPT, RC_KERNEL_BIN),
], ids=id_fn)
@patch('runners.dfu.DfuUtilBinaryRunner.find_device',
side_effect=find_device_patch)
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_dfu_util_init(cc, req, find_device, tc, runner_config):
'''Test commands using a runner created by constructor.'''
exe, alt, dfuse_config, img = tc
runner = DfuUtilBinaryRunner(runner_config, TEST_PID, alt, img, exe=exe,
dfuse_config=dfuse_config)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert find_device.called
assert req.call_args_list == [call(exe)]
assert cc.call_args_list == [call(EXPECTED_COMMAND[tc])]
def get_flash_address_patch(args, bcfg):
return TEST_DFUSE_ADDR
@pytest.mark.parametrize('tc', [
# arg spec: (exe, alt, dfuse, modifiers, img)
(None, TEST_ALT_INT, False, None, None),
(None, TEST_ALT_STR, False, None, None),
(TEST_EXE, TEST_ALT_INT, False, None, None),
(None, TEST_ALT_INT, False, None, TEST_BIN_NAME),
(None, TEST_ALT_INT, True, TEST_DFUSE_OPTS, None),
(None, TEST_ALT_INT, True, None, None),
], ids=id_fn)
@patch('runners.dfu.DfuUtilBinaryRunner.find_device',
side_effect=find_device_patch)
@patch('runners.core.ZephyrBinaryRunner.get_flash_address',
side_effect=get_flash_address_patch)
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_dfu_util_create(cc, req, gfa, find_device, tc, runner_config, tmpdir):
'''Test commands using a runner created from command line parameters.'''
exe, alt, dfuse, modifiers, img = tc
args = ['--pid', TEST_PID, '--alt', alt]
if img:
args.extend(['--img', img])
if dfuse:
args.append('--dfuse')
if modifiers:
args.extend(['--dfuse-modifiers', modifiers])
else:
args.extend(['--dfuse-modifiers', ''])
if exe:
args.extend(['--dfu-util', exe])
(tmpdir / 'zephyr').mkdir()
with open(os.fspath(tmpdir / 'zephyr' / '.config'), 'w') as f:
f.write('\n')
runner_config = runner_config._replace(build_dir=os.fspath(tmpdir))
parser = argparse.ArgumentParser(allow_abbrev=False)
DfuUtilBinaryRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner = DfuUtilBinaryRunner.create(runner_config, arg_namespace)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
if dfuse:
cfg = DfuSeConfig(address=TEST_DFUSE_ADDR, options=modifiers or '')
else:
cfg = None
map_tc = (exe or DFU_UTIL, alt, cfg, img or RC_KERNEL_BIN)
assert find_device.called
assert req.call_args_list == [call(exe or DFU_UTIL)]
assert cc.call_args_list == [call(EXPECTED_COMMAND[map_tc])]
``` | /content/code_sandbox/scripts/west_commands/tests/test_dfu_util.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,480 |
```python
#
import argparse
import platform
from unittest.mock import patch, call
import pytest
from runners.dediprog import DediProgBinaryRunner
from conftest import RC_KERNEL_BIN
DPCMD_EXE = 'dpcmd.exe' if platform.system() == 'Windows' else 'dpcmd'
EXPECTED_COMMAND = {
(RC_KERNEL_BIN, None):
[DPCMD_EXE,
'--auto', RC_KERNEL_BIN,
'-x', 'ff',
'--silent', '--verify'],
(RC_KERNEL_BIN, '0'):
[DPCMD_EXE,
'--auto', RC_KERNEL_BIN, '--vcc', '0',
'-x', 'ff',
'--silent', '--verify'],
(RC_KERNEL_BIN, '1'):
[DPCMD_EXE,
'--auto', RC_KERNEL_BIN, '--vcc', '1',
'-x', 'ff',
'--silent', '--verify'],
}
def require_patch(program):
assert program in [DPCMD_EXE]
def id_fn(tc):
return 'spi_image={},vcc={}'.format(*tc)
@pytest.mark.parametrize('tc', [
(RC_KERNEL_BIN, None),
(RC_KERNEL_BIN, '0'),
(RC_KERNEL_BIN, '1'),
], ids=id_fn)
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_dediprog_init(cc, req, tc, runner_config):
'''Test commands using a runner created by constructor.'''
spi_image, vcc = tc
runner = DediProgBinaryRunner(runner_config, spi_image=spi_image,
vcc=vcc, retries=0)
runner.run('flash')
assert cc.call_args_list == [call(EXPECTED_COMMAND[tc])]
@pytest.mark.parametrize('tc', [
(RC_KERNEL_BIN, None),
(RC_KERNEL_BIN, '0'),
(RC_KERNEL_BIN, '1'),
], ids=id_fn)
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_dediprog_create(cc, req, tc, runner_config):
'''Test commands using a runner created from command line parameters.'''
spi_image, vcc = tc
args = ['--spi-image', spi_image, '--retries', '0']
if vcc:
args.extend(['--vcc', vcc])
parser = argparse.ArgumentParser(allow_abbrev=False)
DediProgBinaryRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner = DediProgBinaryRunner.create(runner_config, arg_namespace)
runner.run('flash')
assert cc.call_args_list == [call(EXPECTED_COMMAND[tc])]
``` | /content/code_sandbox/scripts/west_commands/tests/test_dediprog.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 605 |
```python
#
import argparse
import os
import platform
from unittest.mock import patch, call
import pytest
from runners.stm32flash import Stm32flashBinaryRunner
from conftest import RC_KERNEL_BIN
TEST_CMD = 'stm32flash'
TEST_DEVICE = '/dev/ttyUSB0'
if platform.system() == 'Darwin':
TEST_DEVICE = '/dev/tty.SLAB_USBtoUART'
TEST_BAUD = '115200'
TEST_FORCE_BINARY = False
TEST_ADDR = '0x08000000'
TEST_BIN_SIZE = '4095'
TEST_EXEC_ADDR = '0'
TEST_SERIAL_MODE = '8e1'
TEST_RESET = False
TEST_VERIFY = False
# Expected subprocesses to be run for each action. Using the
# runner_config fixture (and always specifying all necessary
# parameters) means we don't get 100% coverage, but it's a
# starting out point.
EXPECTED_COMMANDS = {
'info':
([TEST_CMD,
'-b', TEST_BAUD,
'-m', TEST_SERIAL_MODE,
TEST_DEVICE],),
'erase':
([TEST_CMD,
'-b', TEST_BAUD,
'-m', TEST_SERIAL_MODE,
'-S', TEST_ADDR + ":" + str((int(TEST_BIN_SIZE) >> 12) + 1 << 12),
'-o', TEST_DEVICE],),
'start':
([TEST_CMD,
'-b', TEST_BAUD,
'-m', TEST_SERIAL_MODE,
'-g', TEST_EXEC_ADDR, TEST_DEVICE],),
'write':
([TEST_CMD,
'-b', TEST_BAUD,
'-m', TEST_SERIAL_MODE,
'-S', TEST_ADDR + ":" + TEST_BIN_SIZE,
'-w', RC_KERNEL_BIN,
TEST_DEVICE],),
}
def require_patch(program):
assert program == TEST_CMD
def os_path_getsize_patch(filename):
if filename == RC_KERNEL_BIN:
return TEST_BIN_SIZE
return os.path.isfile(filename)
os_path_isfile = os.path.isfile
def os_path_isfile_patch(filename):
if filename == RC_KERNEL_BIN:
return True
return os_path_isfile(filename)
@pytest.mark.parametrize('action', EXPECTED_COMMANDS)
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_stm32flash_init(cc, req, action, runner_config):
'''Test actions using a runner created by constructor.'''
test_exec_addr = TEST_EXEC_ADDR
if action == 'write':
test_exec_addr = None
runner = Stm32flashBinaryRunner(runner_config, device=TEST_DEVICE,
action=action, baud=TEST_BAUD, force_binary=TEST_FORCE_BINARY,
start_addr=TEST_ADDR, exec_addr=test_exec_addr,
serial_mode=TEST_SERIAL_MODE, reset=TEST_RESET, verify=TEST_VERIFY)
with patch('os.path.getsize', side_effect=os_path_getsize_patch):
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS[action]]
@pytest.mark.parametrize('action', EXPECTED_COMMANDS)
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_stm32flash_create(cc, req, action, runner_config):
'''Test actions using a runner created from action line parameters.'''
if action == 'start':
args = ['--action', action, '--baud-rate', TEST_BAUD, '--start-addr', TEST_ADDR,
'--execution-addr', TEST_EXEC_ADDR]
else:
args = ['--action', action, '--baud-rate', TEST_BAUD, '--start-addr', TEST_ADDR]
parser = argparse.ArgumentParser(allow_abbrev=False)
Stm32flashBinaryRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner = Stm32flashBinaryRunner.create(runner_config, arg_namespace)
with patch('os.path.getsize', side_effect=os_path_getsize_patch):
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS[action]]
``` | /content/code_sandbox/scripts/west_commands/tests/test_stm32flash.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 923 |
```python
#
'''Common fixtures for use testing the runner package.'''
import pytest
from runners.core import RunnerConfig, FileType
RC_BUILD_DIR = '/test/build-dir'
RC_BOARD_DIR = '/test/zephyr/boards/test-arch/test-board'
RC_KERNEL_ELF = 'test-zephyr.elf'
RC_KERNEL_EXE = 'test-zephyr.exe'
RC_KERNEL_HEX = 'test-zephyr.hex'
RC_KERNEL_BIN = 'test-zephyr.bin'
RC_GDB = 'test-none-gdb'
RC_OPENOCD = 'test-openocd'
RC_OPENOCD_SEARCH = ['/test/openocd/search']
@pytest.fixture
def runner_config():
'''Fixture which provides a runners.core.RunnerConfig.'''
return RunnerConfig(RC_BUILD_DIR, RC_BOARD_DIR, RC_KERNEL_ELF, RC_KERNEL_EXE,
RC_KERNEL_HEX, RC_KERNEL_BIN, None, FileType.OTHER,
gdb=RC_GDB, openocd=RC_OPENOCD,
openocd_search=RC_OPENOCD_SEARCH)
``` | /content/code_sandbox/scripts/west_commands/tests/conftest.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 217 |
```python
#
from argparse import Namespace
from twister_cmd import Twister
import pytest
import argparse
TEST_CASES = [
{
"r": [],
"c": False,
"test_only": False,
},
{
"r": ["-c", "-T tests/ztest/base"],
"c": True,
"T": [" tests/ztest/base"],
"test_only": False,
},
{
"r": ["--test-only"],
"c": False,
"test_only": True,
},
]
ARGS = Namespace(
help=None,
zephyr_base=None,
verbose=0,
command="twister",
)
@pytest.mark.parametrize("test_case", TEST_CASES)
def test_parse_remainder(test_case):
twister = Twister()
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter,
allow_abbrev=False
)
sub_p = parser.add_subparsers()
twister.parser = twister.do_add_parser(sub_p)
options = twister._parse_arguments(args=test_case["r"], options=None)
assert options.clobber_output == test_case["c"]
assert options.test_only == test_case["test_only"]
if "T" in test_case:
assert options.testsuite_root == test_case["T"]
``` | /content/code_sandbox/scripts/west_commands/tests/test_twister.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 283 |
```python
import argparse
import os
from pathlib import Path
from unittest.mock import patch
import pytest
from conftest import RC_KERNEL_ELF
from runners.nxp_s32dbg import NXPS32DebugProbeConfig, NXPS32DebugProbeRunner
TEST_DEVICE = 's32dbg'
TEST_SPEED = 16000
TEST_SERVER_PORT = 45000
TEST_REMOTE_TIMEOUT = 30
TEST_CORE_NAME = 'R52_0_0'
TEST_SOC_NAME = 'S32Z270'
TEST_SOC_FAMILY_NAME = 's32e2z2'
TEST_START_ALL_CORES = True
TEST_S32DS_PATH_OVERRIDE = None
TEST_TOOL_OPT = ['--test-opt-1', '--test-opt-2']
TEST_RESET_TYPE = 'default'
TEST_RESET_DELAY = 0
TEST_S32DS_CMD = 's32ds'
TEST_SERVER_CMD = Path('S32DS') / 'tools' / 'S32Debugger' / 'Debugger' / 'Server' / 'gta' / 'gta'
TEST_ARM_GDB_CMD = Path('S32DS') / 'tools' / 'gdb-arm' / 'arm32-eabi' / 'bin' / 'arm-none-eabi-gdb-py'
TEST_S32DS_PYTHON_LIB = Path('S32DS') / 'build_tools' / 'msys32' / 'mingw32' / 'lib' / 'python2.7'
TEST_S32DS_RUNTIME_ENV = {
'PYTHONPATH': f'{TEST_S32DS_PYTHON_LIB}{os.pathsep}{TEST_S32DS_PYTHON_LIB / "site-packages"}'
}
TEST_ALL_KWARGS = {
'NXPS32DebugProbeConfig': {
'conn_str': TEST_DEVICE,
'server_port': TEST_SERVER_PORT,
'speed': TEST_SPEED,
'remote_timeout': TEST_REMOTE_TIMEOUT,
},
'NXPS32DebugProbeRunner': {
'core_name': TEST_CORE_NAME,
'soc_name': TEST_SOC_NAME,
'soc_family_name': TEST_SOC_FAMILY_NAME,
'start_all_cores': TEST_START_ALL_CORES,
's32ds_path': TEST_S32DS_PATH_OVERRIDE,
'tool_opt': TEST_TOOL_OPT
}
}
TEST_ALL_PARAMS = [
# generic
'--dev-id', TEST_DEVICE,
*[f'--tool-opt={o}' for o in TEST_TOOL_OPT],
# from runner
'--s32ds-path', TEST_S32DS_PATH_OVERRIDE,
'--core-name', TEST_CORE_NAME,
'--soc-name', TEST_SOC_NAME,
'--soc-family-name', TEST_SOC_FAMILY_NAME,
'--server-port', TEST_SERVER_PORT,
'--speed', TEST_SPEED,
'--remote-timeout', TEST_REMOTE_TIMEOUT,
'--start-all-cores',
]
TEST_ALL_S32DBG_PY_VARS = [
f'py _PROBE_IP = {repr(TEST_DEVICE)}',
f'py _JTAG_SPEED = {repr(TEST_SPEED)}',
f'py _GDB_SERVER_PORT = {repr(TEST_SERVER_PORT)}',
f"py _RESET_TYPE = {repr(TEST_RESET_TYPE)}",
f'py _RESET_DELAY = {repr(TEST_RESET_DELAY)}',
f'py _REMOTE_TIMEOUT = {repr(TEST_REMOTE_TIMEOUT)}',
f'py _CORE_NAME = {repr(f"{TEST_SOC_NAME}_{TEST_CORE_NAME}")}',
f'py _SOC_NAME = {repr(TEST_SOC_NAME)}',
'py _IS_LOGGING_ENABLED = False',
'py _FLASH_NAME = None',
'py _SECURE_TYPE = None',
'py _SECURE_KEY = None',
]
DEBUGSERVER_ALL_EXPECTED_CALL = [
str(TEST_SERVER_CMD),
'-p', str(TEST_SERVER_PORT),
*TEST_TOOL_OPT,
]
DEBUG_ALL_EXPECTED_CALL = {
'client': [
str(TEST_ARM_GDB_CMD),
'-x', 'TEST_GDB_SCRIPT',
*TEST_TOOL_OPT,
],
'server': [
str(TEST_SERVER_CMD),
'-p', str(TEST_SERVER_PORT),
],
'gdb_script': [
*TEST_ALL_S32DBG_PY_VARS,
f'source generic_bareboard{"_all_cores" if TEST_START_ALL_CORES else ""}.py',
'py board_init()',
'py core_init()',
f'file {RC_KERNEL_ELF}',
'load',
]
}
ATTACH_ALL_EXPECTED_CALL = {
**DEBUG_ALL_EXPECTED_CALL,
'gdb_script': [
*TEST_ALL_S32DBG_PY_VARS,
f'source attach.py',
'py core_init()',
f'file {RC_KERNEL_ELF}',
]
}
@pytest.fixture
def s32dbg(runner_config, tmp_path):
'''NXPS32DebugProbeRunner from constructor kwargs or command line parameters'''
def _factory(args):
# create empty files to ensure kernel binaries exist
(tmp_path / RC_KERNEL_ELF).touch()
os.chdir(tmp_path)
runner_config_patched = fix_up_runner_config(runner_config, tmp_path)
if isinstance(args, dict):
probe_cfg = NXPS32DebugProbeConfig(**args['NXPS32DebugProbeConfig'])
return NXPS32DebugProbeRunner(runner_config_patched, probe_cfg,
**args['NXPS32DebugProbeRunner'])
elif isinstance(args, list):
parser = argparse.ArgumentParser(allow_abbrev=False)
NXPS32DebugProbeRunner.add_parser(parser)
arg_namespace = parser.parse_args(str(x) for x in args)
return NXPS32DebugProbeRunner.create(runner_config_patched, arg_namespace)
return _factory
def fix_up_runner_config(runner_config, tmp_path):
to_replace = {}
zephyr = tmp_path / 'zephyr'
zephyr.mkdir()
dotconfig = zephyr / '.config'
dotconfig.write_text('CONFIG_ARCH="arm"')
to_replace['build_dir'] = tmp_path
return runner_config._replace(**to_replace)
def require_patch(program, path=None):
assert Path(program).stem == TEST_S32DS_CMD
return program
def s32dbg_get_script(name):
return Path(f'{name}.py')
@pytest.mark.parametrize('s32dbg_args,expected,osname', [
(TEST_ALL_KWARGS, DEBUGSERVER_ALL_EXPECTED_CALL, 'Windows'),
(TEST_ALL_PARAMS, DEBUGSERVER_ALL_EXPECTED_CALL, 'Windows'),
(TEST_ALL_KWARGS, DEBUGSERVER_ALL_EXPECTED_CALL, 'Linux'),
(TEST_ALL_PARAMS, DEBUGSERVER_ALL_EXPECTED_CALL, 'Linux'),
])
@patch('platform.system')
@patch('runners.core.ZephyrBinaryRunner.check_call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_debugserver(require, check_call, system,
s32dbg_args, expected, osname, s32dbg):
system.return_value = osname
runner = s32dbg(s32dbg_args)
runner.run('debugserver')
assert require.called
check_call.assert_called_once_with(expected)
@pytest.mark.parametrize('s32dbg_args,expected,osname', [
(TEST_ALL_KWARGS, DEBUG_ALL_EXPECTED_CALL, 'Windows'),
(TEST_ALL_PARAMS, DEBUG_ALL_EXPECTED_CALL, 'Windows'),
(TEST_ALL_KWARGS, DEBUG_ALL_EXPECTED_CALL, 'Linux'),
(TEST_ALL_PARAMS, DEBUG_ALL_EXPECTED_CALL, 'Linux'),
])
@patch.dict(os.environ, TEST_S32DS_RUNTIME_ENV, clear=True)
@patch('platform.system')
@patch('tempfile.TemporaryDirectory')
@patch('runners.nxp_s32dbg.NXPS32DebugProbeRunner.get_script', side_effect=s32dbg_get_script)
@patch('runners.core.ZephyrBinaryRunner.popen_ignore_int')
@patch('runners.core.ZephyrBinaryRunner.check_call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_debug(require, check_call, popen_ignore_int, get_script, temporary_dir, system,
s32dbg_args, expected, osname, s32dbg, tmp_path):
# mock tempfile.TemporaryDirectory to return `tmp_path` and create gdb init script there
temporary_dir.return_value.__enter__.return_value = tmp_path
gdb_script = tmp_path / 'runner.nxp_s32dbg'
expected_client = [e.replace('TEST_GDB_SCRIPT', gdb_script.as_posix())
for e in expected['client']]
system.return_value = osname
expected_env = TEST_S32DS_RUNTIME_ENV if osname == 'Windows' else None
runner = s32dbg(s32dbg_args)
runner.run('debug')
assert require.called
assert gdb_script.read_text().splitlines() == expected['gdb_script']
popen_ignore_int.assert_called_once_with(expected['server'], env=expected_env)
check_call.assert_called_once_with(expected_client, env=expected_env)
@pytest.mark.parametrize('s32dbg_args,expected,osname', [
(TEST_ALL_KWARGS, ATTACH_ALL_EXPECTED_CALL, 'Windows'),
(TEST_ALL_PARAMS, ATTACH_ALL_EXPECTED_CALL, 'Windows'),
(TEST_ALL_KWARGS, ATTACH_ALL_EXPECTED_CALL, 'Linux'),
(TEST_ALL_PARAMS, ATTACH_ALL_EXPECTED_CALL, 'Linux'),
])
@patch.dict(os.environ, TEST_S32DS_RUNTIME_ENV, clear=True)
@patch('platform.system')
@patch('tempfile.TemporaryDirectory')
@patch('runners.nxp_s32dbg.NXPS32DebugProbeRunner.get_script', side_effect=s32dbg_get_script)
@patch('runners.core.ZephyrBinaryRunner.popen_ignore_int')
@patch('runners.core.ZephyrBinaryRunner.check_call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_attach(require, check_call, popen_ignore_int, get_script, temporary_dir, system,
s32dbg_args, expected, osname, s32dbg, tmp_path):
# mock tempfile.TemporaryDirectory to return `tmp_path` and create gdb init script there
temporary_dir.return_value.__enter__.return_value = tmp_path
gdb_script = tmp_path / 'runner.nxp_s32dbg'
expected_client = [e.replace('TEST_GDB_SCRIPT', gdb_script.as_posix())
for e in expected['client']]
system.return_value = osname
expected_env = TEST_S32DS_RUNTIME_ENV if osname == 'Windows' else None
runner = s32dbg(s32dbg_args)
runner.run('attach')
assert require.called
assert gdb_script.read_text().splitlines() == expected['gdb_script']
popen_ignore_int.assert_called_once_with(expected['server'], env=expected_env)
check_call.assert_called_once_with(expected_client, env=expected_env)
``` | /content/code_sandbox/scripts/west_commands/tests/test_nxp_s32dbg.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,312 |
```python
#
import argparse
import itertools
from unittest.mock import patch
import pytest
from runners.pyocd import PyOcdBinaryRunner
from conftest import RC_BUILD_DIR, RC_GDB, RC_KERNEL_HEX, RC_KERNEL_ELF
#
# Test values to provide as constructor arguments and command line
# parameters, to verify they're respected.
#
TEST_PYOCD = 'test-pyocd'
TEST_ADDR = 0xadd
TEST_DEV_ID = 'test-dev-id'
TEST_FREQUENCY = 'test-frequency'
TEST_DAPARG = 'test-daparg'
TEST_TARGET = 'test-target'
TEST_FLASH_OPTS = ['--test-flash', 'args']
TEST_GDB_PORT = 1
TEST_TELNET_PORT = 2
TEST_TOOL_OPTS = ['test-opt-1', 'test-opt-2']
TEST_ALL_KWARGS = {
'pyocd': TEST_PYOCD,
'flash_addr': TEST_ADDR,
'flash_opts': TEST_FLASH_OPTS,
'gdb_port': TEST_GDB_PORT,
'telnet_port': TEST_TELNET_PORT,
'tui': False,
'dev_id': TEST_DEV_ID,
'frequency': TEST_FREQUENCY,
'daparg': TEST_DAPARG,
'tool_opt': TEST_TOOL_OPTS
}
TEST_DEF_KWARGS = {}
TEST_ALL_PARAMS = list(itertools.chain(
['--target', TEST_TARGET,
'--daparg', TEST_DAPARG,
'--pyocd', TEST_PYOCD],
[f'--flash-opt={o}' for o in TEST_FLASH_OPTS],
['--gdb-port', str(TEST_GDB_PORT),
'--telnet-port', str(TEST_TELNET_PORT),
'--dev-id', TEST_DEV_ID,
'--frequency', str(TEST_FREQUENCY)],
[f'--tool-opt={o}' for o in TEST_TOOL_OPTS]))
TEST_DEF_PARAMS = ['--target', TEST_TARGET]
#
# Expected results.
#
# These record expected argument lists for system calls made by the
# pyocd runner using its check_call() and run_server_and_client()
# methods.
#
# They are shared between tests that create runners directly and
# tests that construct runners from parsed command-line arguments, to
# ensure that results are consistent.
#
FLASH_ALL_EXPECTED_CALL = ([TEST_PYOCD,
'flash',
'-e', 'sector',
'-a', hex(TEST_ADDR), '-da', TEST_DAPARG,
'-t', TEST_TARGET, '-u', TEST_DEV_ID,
'-f', TEST_FREQUENCY] +
TEST_TOOL_OPTS +
TEST_FLASH_OPTS +
[RC_KERNEL_HEX])
FLASH_DEF_EXPECTED_CALL = ['pyocd', 'flash', '-e', 'sector',
'-t', TEST_TARGET, RC_KERNEL_HEX]
DEBUG_ALL_EXPECTED_SERVER = [TEST_PYOCD,
'gdbserver',
'-da', TEST_DAPARG,
'-p', str(TEST_GDB_PORT),
'-T', str(TEST_TELNET_PORT),
'-t', TEST_TARGET,
'-u', TEST_DEV_ID,
'-f', TEST_FREQUENCY] + TEST_TOOL_OPTS
DEBUG_ALL_EXPECTED_CLIENT = [RC_GDB, RC_KERNEL_ELF,
'-ex', 'target remote :{}'.format(TEST_GDB_PORT),
'-ex', 'monitor halt',
'-ex', 'monitor reset',
'-ex', 'load']
DEBUG_DEF_EXPECTED_SERVER = ['pyocd',
'gdbserver',
'-p', '3333',
'-T', '4444',
'-t', TEST_TARGET]
DEBUG_DEF_EXPECTED_CLIENT = [RC_GDB, RC_KERNEL_ELF,
'-ex', 'target remote :3333',
'-ex', 'monitor halt',
'-ex', 'monitor reset',
'-ex', 'load']
DEBUGSERVER_ALL_EXPECTED_CALL = [TEST_PYOCD,
'gdbserver',
'-da', TEST_DAPARG,
'-p', str(TEST_GDB_PORT),
'-T', str(TEST_TELNET_PORT),
'-t', TEST_TARGET,
'-u', TEST_DEV_ID,
'-f', TEST_FREQUENCY] + TEST_TOOL_OPTS
DEBUGSERVER_DEF_EXPECTED_CALL = ['pyocd',
'gdbserver',
'-p', '3333',
'-T', '4444',
'-t', TEST_TARGET]
#
# Fixtures
#
@pytest.fixture
def pyocd(runner_config, tmpdir):
'''PyOcdBinaryRunner from constructor kwargs or command line parameters'''
# This factory takes either a dict of kwargs to pass to the
# constructor, or a list of command-line arguments to parse and
# use with the create() method.
def _factory(args):
# Ensure kernel binaries exist (as empty files, so commands
# which use them must be patched out).
tmpdir.ensure(RC_KERNEL_HEX)
tmpdir.ensure(RC_KERNEL_ELF)
tmpdir.chdir()
if isinstance(args, dict):
return PyOcdBinaryRunner(runner_config, TEST_TARGET, **args)
elif isinstance(args, list):
parser = argparse.ArgumentParser(allow_abbrev=False)
PyOcdBinaryRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
return PyOcdBinaryRunner.create(runner_config, arg_namespace)
return _factory
#
# Helpers
#
def require_patch(program):
assert program in ['pyocd', TEST_PYOCD, RC_GDB]
#
# Test cases for runners created by constructor.
#
@pytest.mark.parametrize('pyocd_args,expected', [
(TEST_ALL_KWARGS, FLASH_ALL_EXPECTED_CALL),
(TEST_DEF_KWARGS, FLASH_DEF_EXPECTED_CALL)
])
@patch('runners.pyocd.PyOcdBinaryRunner.check_call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_flash(require, cc, pyocd_args, expected, pyocd):
pyocd(pyocd_args).run('flash')
assert require.called
cc.assert_called_once_with(expected)
@pytest.mark.parametrize('pyocd_args,expectedv', [
(TEST_ALL_KWARGS, (DEBUG_ALL_EXPECTED_SERVER, DEBUG_ALL_EXPECTED_CLIENT)),
(TEST_DEF_KWARGS, (DEBUG_DEF_EXPECTED_SERVER, DEBUG_DEF_EXPECTED_CLIENT))
])
@patch('runners.pyocd.PyOcdBinaryRunner.run_server_and_client')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_debug(require, rsc, pyocd_args, expectedv, pyocd):
pyocd(pyocd_args).run('debug')
assert require.called
rsc.assert_called_once_with(*expectedv)
@pytest.mark.parametrize('pyocd_args,expected', [
(TEST_ALL_KWARGS, DEBUGSERVER_ALL_EXPECTED_CALL),
(TEST_DEF_KWARGS, DEBUGSERVER_DEF_EXPECTED_CALL)
])
@patch('runners.pyocd.PyOcdBinaryRunner.check_call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_debugserver(require, cc, pyocd_args, expected, pyocd):
pyocd(pyocd_args).run('debugserver')
assert require.called
cc.assert_called_once_with(expected)
#
# Test cases for runners created via command line arguments.
#
# (Unlike the constructor tests, these require additional patching to mock and
# verify runners.core.BuildConfiguration usage.)
#
@pytest.mark.parametrize('pyocd_args,flash_addr,expected', [
(TEST_ALL_PARAMS, TEST_ADDR, FLASH_ALL_EXPECTED_CALL),
(TEST_DEF_PARAMS, 0x0, FLASH_DEF_EXPECTED_CALL)
])
@patch('runners.pyocd.BuildConfiguration')
@patch('runners.pyocd.PyOcdBinaryRunner.check_call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_flash_args(require, cc, bc, pyocd_args, flash_addr, expected, pyocd):
with patch.object(PyOcdBinaryRunner, 'get_flash_address',
return_value=flash_addr):
pyocd(pyocd_args).run('flash')
assert require.called
bc.assert_called_once_with(RC_BUILD_DIR)
cc.assert_called_once_with(expected)
@pytest.mark.parametrize('pyocd_args, expectedv', [
(TEST_ALL_PARAMS, (DEBUG_ALL_EXPECTED_SERVER, DEBUG_ALL_EXPECTED_CLIENT)),
(TEST_DEF_PARAMS, (DEBUG_DEF_EXPECTED_SERVER, DEBUG_DEF_EXPECTED_CLIENT)),
])
@patch('runners.pyocd.BuildConfiguration')
@patch('runners.pyocd.PyOcdBinaryRunner.run_server_and_client')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_debug_args(require, rsc, bc, pyocd_args, expectedv, pyocd):
pyocd(pyocd_args).run('debug')
assert require.called
bc.assert_called_once_with(RC_BUILD_DIR)
rsc.assert_called_once_with(*expectedv)
@pytest.mark.parametrize('pyocd_args, expected', [
(TEST_ALL_PARAMS, DEBUGSERVER_ALL_EXPECTED_CALL),
(TEST_DEF_PARAMS, DEBUGSERVER_DEF_EXPECTED_CALL),
])
@patch('runners.pyocd.BuildConfiguration')
@patch('runners.pyocd.PyOcdBinaryRunner.check_call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_debugserver_args(require, cc, bc, pyocd_args, expected, pyocd):
pyocd(pyocd_args).run('debugserver')
assert require.called
bc.assert_called_once_with(RC_BUILD_DIR)
cc.assert_called_once_with(expected)
``` | /content/code_sandbox/scripts/west_commands/tests/test_pyocd.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,080 |
```python
#
from runners.core import ZephyrBinaryRunner
def test_runner_imports():
# Ensure that all runner modules are imported and returned by
# get_runners().
#
# This is just a basic sanity check against errors introduced by
# tree-wide refactorings for runners that don't have their own
# test suites.
runner_names = set(r.name() for r in ZephyrBinaryRunner.get_runners())
# Please keep this sorted alphabetically.
expected = set(('arc-nsim',
'blackmagicprobe',
'bossac',
'canopen',
'dediprog',
'dfu-util',
'esp32',
'ezflashcli',
'gd32isp',
'hifive1',
'intel_adsp',
'intel_cyclonev',
'jlink',
'linkserver',
'mdb-nsim',
'mdb-hw',
'misc-flasher',
'native',
'nios2',
'nrfjprog',
'nrfutil',
'nxp_s32dbg',
'openocd',
'probe-rs',
'pyocd',
'qemu',
'renode',
'renode-robot',
'silabs_commander',
'spi_burn',
'stm32cubeprogrammer',
'stm32flash',
'trace32',
'teensy',
'uf2',
'xtensa'))
assert runner_names == expected
``` | /content/code_sandbox/scripts/west_commands/tests/test_imports.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 330 |
```python
#
import argparse
import functools
import io
import os
from pathlib import Path
import shlex
import shutil
import typing
from unittest.mock import patch, call
import pytest
from runners.nrfjprog import NrfJprogBinaryRunner
from runners.nrfutil import NrfUtilBinaryRunner
from conftest import RC_KERNEL_HEX
#
# Test values
#
TEST_DEF_SNR = 'test-default-serial-number' # for mocking user input
TEST_OVR_SNR = 'test-override-serial-number'
TEST_TOOL_OPT = '--ip 192.168.1.10'
TEST_TOOL_OPT_L = shlex.split(TEST_TOOL_OPT)
# nRF53 flashing is special in that we have different results
# depending on the input hex file. For that reason, we test it with
# real hex files.
TEST_DIR = Path(__file__).parent / 'nrf'
NRF5340_APP_ONLY_HEX = os.fspath(TEST_DIR / 'nrf5340_app_only.hex')
NRF5340_NET_ONLY_HEX = os.fspath(TEST_DIR / 'nrf5340_net_only.hex')
NRF5340_APP_AND_NET_HEX = os.fspath(TEST_DIR / 'nrf5340_app_and_net.hex')
CLASS_MAP = {'nrfjprog': NrfJprogBinaryRunner, 'nrfutil': NrfUtilBinaryRunner}
#
# A dictionary mapping test cases to expected results.
#
# The keys are TC objects.
#
# The values are usually tool commands we expect to be executed for
# each test case. Verification is done by mocking the check_call()
# ZephyrBinaryRunner method which is used to run the commands.
#
# Values can also be callables which take a tmpdir and return the
# expected commands. This is needed for nRF53 testing.
#
class TC(typing.NamedTuple): # 'TestCase'
# NRF51, NRF52, etc.
family: str
# 'APP', 'NET', 'APP+NET', or None.
coprocessor: typing.Optional[str]
# Run a recover command first if True
recover: bool
# Use --reset instead of --pinreset if True
softreset: bool
# --snr TEST_OVR_SNR if True, --snr TEST_DEF_SNR if False
snr: bool
# --chiperase if True,
# --sectorerase if False (or --sectoranduicrerase on nRF52)
erase: bool
# --tool-opt TEST_TOOL_OPT if True
tool_opt: bool
EXPECTED_MAP = {'nrfjprog': 0, 'nrfutil': 1}
EXPECTED_RESULTS = {
# your_sha256_hash---------
# NRF51
#
# family CP recov soft snr erase tool_opt
TC('NRF51_FAMILY', None, False, False, False, False, False):
((['nrfjprog', '--program', RC_KERNEL_HEX, '--sectorerase', '--verify', '-f', 'NRF51',
'--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF51', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF51_FAMILY', None, False, False, False, True, False):
((['nrfjprog', '--program', RC_KERNEL_HEX, '--chiperase', '--verify', '-f', 'NRF51',
'--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF51', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF51_FAMILY', None, False, False, True, False, False):
((['nrfjprog', '--program', RC_KERNEL_HEX, '--sectorerase', '--verify', '-f', 'NRF51',
'--snr', TEST_OVR_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF51', '--snr', TEST_OVR_SNR]),
(TEST_OVR_SNR, None)),
TC('NRF51_FAMILY', None, False, True, False, False, False):
((['nrfjprog', '--program', RC_KERNEL_HEX, '--sectorerase', '--verify', '-f', 'NRF51',
'--snr', TEST_DEF_SNR],
['nrfjprog', '--reset', '-f', 'NRF51', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF51_FAMILY', None, True, False, False, False, False):
((['nrfjprog', '--recover', '-f', 'NRF51', '--snr', TEST_DEF_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '--sectorerase', '--verify', '-f', 'NRF51',
'--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF51', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF51_FAMILY', None, True, True, True, True, False):
((['nrfjprog', '--recover', '-f', 'NRF51', '--snr', TEST_OVR_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '--chiperase', '--verify', '-f', 'NRF51',
'--snr', TEST_OVR_SNR],
['nrfjprog', '--reset', '-f', 'NRF51', '--snr', TEST_OVR_SNR]),
(TEST_OVR_SNR, None)),
TC('NRF51_FAMILY', None, True, True, True, True, True):
((['nrfjprog', '--recover', '-f', 'NRF51', '--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L,
['nrfjprog', '--program', RC_KERNEL_HEX, '--chiperase', '--verify', '-f', 'NRF51',
'--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L,
['nrfjprog', '--reset', '-f', 'NRF51', '--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L),
(TEST_OVR_SNR, None)),
# your_sha256_hash---------
# NRF52
#
# family CP recov soft snr erase tool_opt
TC('NRF52_FAMILY', None, False, False, False, False, False):
((['nrfjprog', '--program', RC_KERNEL_HEX, '--sectoranduicrerase',
'--verify', '-f', 'NRF52', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinresetenable', '-f', 'NRF52', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF52', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF52_FAMILY', None, False, False, False, True, False):
((['nrfjprog', '--program', RC_KERNEL_HEX, '--chiperase', '--verify', '-f', 'NRF52',
'--snr', TEST_DEF_SNR],
['nrfjprog', '--pinresetenable', '-f', 'NRF52', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF52', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF52_FAMILY', None, False, False, True, False, False):
((['nrfjprog', '--program', RC_KERNEL_HEX, '--sectoranduicrerase',
'--verify', '-f', 'NRF52', '--snr', TEST_OVR_SNR],
['nrfjprog', '--pinresetenable', '-f', 'NRF52', '--snr', TEST_OVR_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF52', '--snr', TEST_OVR_SNR]),
(TEST_OVR_SNR, None)),
TC('NRF52_FAMILY', None, False, True, False, False, False):
((['nrfjprog', '--program', RC_KERNEL_HEX, '--sectoranduicrerase',
'--verify', '-f', 'NRF52', '--snr', TEST_DEF_SNR],
['nrfjprog', '--reset', '-f', 'NRF52', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF52_FAMILY', None, True, False, False, False, False):
((['nrfjprog', '--recover', '-f', 'NRF52', '--snr', TEST_DEF_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '--sectoranduicrerase',
'--verify', '-f', 'NRF52', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinresetenable', '-f', 'NRF52', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF52', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF52_FAMILY', None, True, True, True, True, False):
((['nrfjprog', '--recover', '-f', 'NRF52', '--snr', TEST_OVR_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '--chiperase', '--verify', '-f', 'NRF52',
'--snr', TEST_OVR_SNR],
['nrfjprog', '--reset', '-f', 'NRF52', '--snr', TEST_OVR_SNR]),
(TEST_OVR_SNR, None)),
TC('NRF52_FAMILY', None, True, True, True, True, True):
((['nrfjprog', '--recover', '-f', 'NRF52', '--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L,
['nrfjprog', '--program', RC_KERNEL_HEX, '--chiperase', '--verify', '-f', 'NRF52',
'--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L,
['nrfjprog', '--reset', '-f', 'NRF52', '--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L),
(TEST_OVR_SNR, None)),
# your_sha256_hash---------
# NRF53 APP only
#
# family CP recov soft snr erase tool_opt
TC('NRF53_FAMILY', 'APP', False, False, False, False, False):
((['nrfjprog', '--program', NRF5340_APP_ONLY_HEX, '--sectorerase',
'--verify', '-f', 'NRF53', '--coprocessor', 'CP_APPLICATION', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF53', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF53_FAMILY', 'APP', False, False, False, True, False):
((['nrfjprog', '--program', NRF5340_APP_ONLY_HEX, '--chiperase',
'--verify', '-f', 'NRF53', '--coprocessor', 'CP_APPLICATION', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF53', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF53_FAMILY', 'APP', False, False, True, False, False):
((['nrfjprog', '--program', NRF5340_APP_ONLY_HEX, '--sectorerase',
'--verify', '-f', 'NRF53', '--coprocessor', 'CP_APPLICATION', '--snr', TEST_OVR_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF53', '--snr', TEST_OVR_SNR]),
(TEST_OVR_SNR, None)),
TC('NRF53_FAMILY', 'APP', False, True, False, False, False):
((['nrfjprog', '--program', NRF5340_APP_ONLY_HEX, '--sectorerase',
'--verify', '-f', 'NRF53', '--coprocessor', 'CP_APPLICATION', '--snr', TEST_DEF_SNR],
['nrfjprog', '--reset', '-f', 'NRF53', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF53_FAMILY', 'APP', True, False, False, False, False):
((['nrfjprog', '--recover', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK',
'--snr', TEST_DEF_SNR],
['nrfjprog', '--recover', '-f', 'NRF53', '--snr', TEST_DEF_SNR],
['nrfjprog', '--program', NRF5340_APP_ONLY_HEX, '--sectorerase',
'--verify', '-f', 'NRF53', '--coprocessor', 'CP_APPLICATION', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF53', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF53_FAMILY', 'APP', True, True, True, True, False):
((['nrfjprog', '--recover', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK',
'--snr', TEST_OVR_SNR],
['nrfjprog', '--recover', '-f', 'NRF53', '--snr', TEST_OVR_SNR],
['nrfjprog', '--program', NRF5340_APP_ONLY_HEX, '--chiperase',
'--verify', '-f', 'NRF53', '--coprocessor', 'CP_APPLICATION', '--snr', TEST_OVR_SNR],
['nrfjprog', '--reset', '-f', 'NRF53', '--snr', TEST_OVR_SNR]),
(TEST_OVR_SNR, None)),
# your_sha256_hash---------
# NRF53 NET only
#
# family CP recov soft snr erase tool_opt
TC('NRF53_FAMILY', 'NET', False, False, False, False, False):
((['nrfjprog', '--program', NRF5340_NET_ONLY_HEX, '--sectorerase',
'--verify', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF53', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF53_FAMILY', 'NET', False, False, False, True, False):
((['nrfjprog', '--program', NRF5340_NET_ONLY_HEX, '--chiperase',
'--verify', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF53', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF53_FAMILY', 'NET', False, False, True, False, False):
((['nrfjprog', '--program', NRF5340_NET_ONLY_HEX, '--sectorerase',
'--verify', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK', '--snr', TEST_OVR_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF53', '--snr', TEST_OVR_SNR]),
(TEST_OVR_SNR, None)),
TC('NRF53_FAMILY', 'NET', False, True, False, False, False):
((['nrfjprog', '--program', NRF5340_NET_ONLY_HEX, '--sectorerase',
'--verify', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK', '--snr', TEST_DEF_SNR],
['nrfjprog', '--reset', '-f', 'NRF53', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF53_FAMILY', 'NET', True, False, False, False, False):
((['nrfjprog', '--recover', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK',
'--snr', TEST_DEF_SNR],
['nrfjprog', '--recover', '-f', 'NRF53', '--snr', TEST_DEF_SNR],
['nrfjprog', '--program', NRF5340_NET_ONLY_HEX, '--sectorerase',
'--verify', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF53', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF53_FAMILY', 'NET', True, True, True, True, False):
((['nrfjprog', '--recover', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK',
'--snr', TEST_OVR_SNR],
['nrfjprog', '--recover', '-f', 'NRF53', '--snr', TEST_OVR_SNR],
['nrfjprog', '--program', NRF5340_NET_ONLY_HEX, '--chiperase',
'--verify', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK', '--snr', TEST_OVR_SNR],
['nrfjprog', '--reset', '-f', 'NRF53', '--snr', TEST_OVR_SNR]),
(TEST_OVR_SNR, None)),
# your_sha256_hash---------
# NRF53 APP+NET
#
# family CP recov soft snr erase tool_opt
TC('NRF53_FAMILY', 'APP+NET', False, False, False, False, False):
((lambda tmpdir, infile: \
(['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_NETWORK_' + Path(infile).name),
'--sectorerase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_NETWORK', '--snr', TEST_DEF_SNR],
['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_APPLICATION_' + Path(infile).name),
'--sectorerase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_APPLICATION', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF53', '--snr', TEST_DEF_SNR])),
(TEST_DEF_SNR, None)),
TC('NRF53_FAMILY', 'APP+NET', False, False, False, True, False):
((lambda tmpdir, infile: \
(['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_NETWORK_' + Path(infile).name),
'--chiperase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_NETWORK', '--snr', TEST_DEF_SNR],
['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_APPLICATION_' + Path(infile).name),
'--chiperase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_APPLICATION', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF53', '--snr', TEST_DEF_SNR])),
(TEST_DEF_SNR, None)),
TC('NRF53_FAMILY', 'APP+NET', False, False, True, False, False):
((lambda tmpdir, infile: \
(['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_NETWORK_' + Path(infile).name),
'--sectorerase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_NETWORK', '--snr', TEST_OVR_SNR],
['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_APPLICATION_' + Path(infile).name),
'--sectorerase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_APPLICATION', '--snr', TEST_OVR_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF53', '--snr', TEST_OVR_SNR])),
(TEST_OVR_SNR, None)),
TC('NRF53_FAMILY', 'APP+NET', False, True, False, False, False):
((lambda tmpdir, infile: \
(['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_NETWORK_' + Path(infile).name),
'--sectorerase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_NETWORK', '--snr', TEST_DEF_SNR],
['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_APPLICATION_' + Path(infile).name),
'--sectorerase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_APPLICATION', '--snr', TEST_DEF_SNR],
['nrfjprog', '--reset', '-f', 'NRF53', '--snr', TEST_DEF_SNR])),
(TEST_DEF_SNR, None)),
TC('NRF53_FAMILY', 'APP+NET', True, False, False, False, False):
((lambda tmpdir, infile: \
(['nrfjprog', '--recover', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK',
'--snr', TEST_DEF_SNR],
['nrfjprog', '--recover', '-f', 'NRF53', '--snr', TEST_DEF_SNR],
['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_NETWORK_' + Path(infile).name),
'--sectorerase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_NETWORK', '--snr', TEST_DEF_SNR],
['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_APPLICATION_' + Path(infile).name),
'--sectorerase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_APPLICATION', '--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF53', '--snr', TEST_DEF_SNR])),
(TEST_DEF_SNR, None)),
TC('NRF53_FAMILY', 'APP+NET', True, True, True, True, False):
((lambda tmpdir, infile: \
(['nrfjprog', '--recover', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK',
'--snr', TEST_OVR_SNR],
['nrfjprog', '--recover', '-f', 'NRF53', '--snr', TEST_OVR_SNR],
['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_NETWORK_' + Path(infile).name),
'--chiperase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_NETWORK', '--snr', TEST_OVR_SNR],
['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_APPLICATION_' + Path(infile).name),
'--chiperase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_APPLICATION', '--snr', TEST_OVR_SNR],
['nrfjprog', '--reset', '-f', 'NRF53', '--snr', TEST_OVR_SNR])),
(TEST_OVR_SNR, None)),
TC('NRF53_FAMILY', 'APP+NET', True, True, True, True, True):
((lambda tmpdir, infile: \
(['nrfjprog', '--recover', '-f', 'NRF53', '--coprocessor', 'CP_NETWORK',
'--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L,
['nrfjprog', '--recover', '-f', 'NRF53', '--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L,
['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_NETWORK_' + Path(infile).name),
'--chiperase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_NETWORK', '--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L,
['nrfjprog',
'--program',
os.fspath(tmpdir / 'GENERATED_CP_APPLICATION_' + Path(infile).name),
'--chiperase', '--verify', '-f', 'NRF53',
'--coprocessor', 'CP_APPLICATION', '--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L,
['nrfjprog', '--reset', '-f', 'NRF53', '--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L)),
(TEST_OVR_SNR, None)),
# your_sha256_hash---------
# NRF91
#
# family CP recov soft snr erase tool_opt
TC('NRF91_FAMILY', None, False, False, False, False, False):
((['nrfjprog', '--program', RC_KERNEL_HEX, '--sectorerase', '--verify', '-f', 'NRF91',
'--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF91', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF91_FAMILY', None, False, False, False, True, False):
((['nrfjprog', '--program', RC_KERNEL_HEX, '--chiperase', '--verify', '-f', 'NRF91',
'--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF91', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF91_FAMILY', None, False, False, True, False, False):
((['nrfjprog', '--program', RC_KERNEL_HEX, '--sectorerase', '--verify', '-f', 'NRF91',
'--snr', TEST_OVR_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF91', '--snr', TEST_OVR_SNR]),
(TEST_OVR_SNR, None)),
TC('NRF91_FAMILY', None, False, True, False, False, False):
((['nrfjprog', '--program', RC_KERNEL_HEX, '--sectorerase', '--verify', '-f', 'NRF91',
'--snr', TEST_DEF_SNR],
['nrfjprog', '--reset', '-f', 'NRF91', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF91_FAMILY', None, True, False, False, False, False):
((['nrfjprog', '--recover', '-f', 'NRF91', '--snr', TEST_DEF_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '--sectorerase', '--verify', '-f', 'NRF91',
'--snr', TEST_DEF_SNR],
['nrfjprog', '--pinreset', '-f', 'NRF91', '--snr', TEST_DEF_SNR]),
(TEST_DEF_SNR, None)),
TC('NRF91_FAMILY', None, True, True, True, True, False):
((['nrfjprog', '--recover', '-f', 'NRF91', '--snr', TEST_OVR_SNR],
['nrfjprog', '--program', RC_KERNEL_HEX, '--chiperase', '--verify', '-f', 'NRF91',
'--snr', TEST_OVR_SNR],
['nrfjprog', '--reset', '-f', 'NRF91', '--snr', TEST_OVR_SNR]),
(TEST_OVR_SNR, None)),
TC('NRF91_FAMILY', None, True, True, True, True, True):
((['nrfjprog', '--recover', '-f', 'NRF91', '--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L,
['nrfjprog', '--program', RC_KERNEL_HEX, '--chiperase', '--verify', '-f', 'NRF91',
'--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L,
['nrfjprog', '--reset', '-f', 'NRF91', '--snr', TEST_OVR_SNR] + TEST_TOOL_OPT_L),
(TEST_OVR_SNR, None)),
}
#
# Monkey-patches
#
def get_board_snr_patch(glob):
return TEST_DEF_SNR
def require_patch(cur_tool, program):
assert cur_tool == program
os_path_isfile = os.path.isfile
def os_path_isfile_patch(filename):
if filename == RC_KERNEL_HEX:
return True
return os_path_isfile(filename)
#
# Test functions.
#
# These are white box tests that rely on the above monkey-patches.
#
def id_fn(test_case):
if test_case.coprocessor is None:
cp = ''
else:
cp = f'-{test_case.coprocessor}'
s = 'soft_reset' if test_case.softreset else 'pin_reset'
sn = 'default_snr' if test_case.snr else 'override_snr'
e = 'chip_erase' if test_case.erase else 'sector[anduicr]_erase'
r = 'recover' if test_case.recover else 'no_recover'
t = 'tool_opt' if test_case.tool_opt else 'no_tool_opt'
return f'{test_case.family[:5]}{cp}-{s}-{sn}-{e}-{r}-{t}'
def fix_up_runner_config(test_case, runner_config, tmpdir):
# Helper that adjusts the common runner_config fixture for our
# nRF-specific tests.
to_replace = {}
# Provide a skeletal zephyr/.config file to use as the runner's
# BuildConfiguration.
zephyr = tmpdir / 'zephyr'
zephyr.mkdir()
dotconfig = os.fspath(zephyr / '.config')
with open(dotconfig, 'w') as f:
f.write(f'''
CONFIG_SOC_SERIES_{test_case.family[:5]}X=y
''')
to_replace['build_dir'] = tmpdir
if test_case.family != 'NRF53_FAMILY':
return runner_config._replace(**to_replace)
if test_case.coprocessor == 'APP':
to_replace['hex_file'] = NRF5340_APP_ONLY_HEX
elif test_case.coprocessor == 'NET':
to_replace['hex_file'] = NRF5340_NET_ONLY_HEX
elif test_case.coprocessor == 'APP+NET':
# Since the runner is going to generate files next to its input
# file, we need to stash a copy in a tmpdir it can use.
outfile = tmpdir / Path(NRF5340_APP_AND_NET_HEX).name
shutil.copyfile(NRF5340_APP_AND_NET_HEX, outfile)
to_replace['hex_file'] = os.fspath(outfile)
else:
assert False, f'bad test case {test_case}'
return runner_config._replace(**to_replace)
def check_expected(tool, test_case, check_fn, get_snr, tmpdir, runner_config):
expected = EXPECTED_RESULTS[test_case][EXPECTED_MAP[tool]]
if tool == 'nrfutil':
assert len(check_fn.call_args_list) == 1
assert len(check_fn.call_args_list[0].args) == 1
# Extract filename
nrfutil_args = check_fn.call_args_list[0].args[0]
tmpfile = nrfutil_args[nrfutil_args.index('--batch-path') + 1]
cmds = (['nrfutil', '--json', 'device', 'x-execute-batch', '--batch-path',
tmpfile, '--serial-number', expected[0]],)
call_args = [call(nrfutil_args)]
else:
cmds = expected
call_args = check_fn.call_args_list
if callable(cmds):
assert (call_args ==
[call(x) for x in cmds(tmpdir, runner_config.hex_file)])
else:
assert call_args == [call(x) for x in cmds]
if not test_case.snr:
get_snr.assert_called_once_with('*')
else:
get_snr.assert_not_called()
@pytest.mark.parametrize('tool', ["nrfjprog","nrfutil"])
@pytest.mark.parametrize('test_case', EXPECTED_RESULTS.keys(), ids=id_fn)
@patch('runners.core.ZephyrBinaryRunner.require')
@patch('runners.nrfjprog.NrfBinaryRunner.get_board_snr',
side_effect=get_board_snr_patch)
@patch('runners.nrfutil.subprocess.Popen')
@patch('runners.nrfjprog.NrfBinaryRunner.check_call')
def test_init(check_call, popen, get_snr, require, tool, test_case,
runner_config, tmpdir):
popen.return_value.__enter__.return_value.stdout = io.BytesIO(b'')
require.side_effect = functools.partial(require_patch, tool)
runner_config = fix_up_runner_config(test_case, runner_config, tmpdir)
snr = TEST_OVR_SNR if test_case.snr else None
tool_opt = TEST_TOOL_OPT_L if test_case.tool_opt else []
cls = CLASS_MAP[tool]
runner = cls(runner_config,
test_case.family,
test_case.softreset,
snr,
erase=test_case.erase,
tool_opt=tool_opt,
recover=test_case.recover)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert require.called
CHECK_FN_MAP = {'nrfjprog': check_call, 'nrfutil': popen}
check_expected(tool, test_case, CHECK_FN_MAP[tool], get_snr, tmpdir,
runner_config)
@pytest.mark.parametrize('tool', ["nrfjprog","nrfutil"])
@pytest.mark.parametrize('test_case', EXPECTED_RESULTS.keys(), ids=id_fn)
@patch('runners.core.ZephyrBinaryRunner.require')
@patch('runners.nrfjprog.NrfBinaryRunner.get_board_snr',
side_effect=get_board_snr_patch)
@patch('runners.nrfutil.subprocess.Popen')
@patch('runners.nrfjprog.NrfBinaryRunner.check_call')
def test_create(check_call, popen, get_snr, require, tool, test_case,
runner_config, tmpdir):
popen.return_value.__enter__.return_value.stdout = io.BytesIO(b'')
require.side_effect = functools.partial(require_patch, tool)
runner_config = fix_up_runner_config(test_case, runner_config, tmpdir)
args = []
if test_case.softreset:
args.append('--softreset')
if test_case.snr:
args.extend(['--dev-id', TEST_OVR_SNR])
if test_case.erase:
args.append('--erase')
if test_case.recover:
args.append('--recover')
if test_case.tool_opt:
args.extend(['--tool-opt', TEST_TOOL_OPT])
parser = argparse.ArgumentParser(allow_abbrev=False)
cls = CLASS_MAP[tool]
cls.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner = cls.create(runner_config, arg_namespace)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert require.called
CHECK_FN_MAP = {'nrfjprog': check_call, 'nrfutil': popen}
check_expected(tool, test_case, CHECK_FN_MAP[tool], get_snr, tmpdir,
runner_config)
``` | /content/code_sandbox/scripts/west_commands/tests/test_nrf.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 8,113 |
```python
#
import argparse
from pathlib import Path
from unittest.mock import patch, call
import pytest
from runners.stm32cubeprogrammer import STM32CubeProgrammerBinaryRunner
from conftest import RC_KERNEL_HEX, RC_KERNEL_ELF
CLI_PATH = Path("STM32_Programmer_CLI")
"""Default CLI path used in tests."""
HOME_PATH = Path("/home", "test")
"""Home path (used for Linux system CLI path)."""
PROGRAMFILESX86_PATH = Path("C:", "Program Files (x86)")
"""Program files x86 path (used for Windows system CLI path)."""
ENVIRON = {
"PROGRAMFILES(X86)": str(PROGRAMFILESX86_PATH),
}
"""Environment (used for Windows system CLI path)."""
LINUX_CLI_PATH = (
HOME_PATH
/ "STMicroelectronics"
/ "STM32Cube"
/ "STM32CubeProgrammer"
/ "bin"
/ "STM32_Programmer_CLI"
)
"""Linux CLI path."""
WINDOWS_CLI_PATH = (
PROGRAMFILESX86_PATH
/ "STMicroelectronics"
/ "STM32Cube"
/ "STM32CubeProgrammer"
/ "bin"
/ "STM32_Programmer_CLI.exe"
)
"""Windows CLI path."""
MACOS_CLI_PATH = (
Path("/Applications")
/ "STMicroelectronics"
/ "STM32Cube"
/ "STM32CubeProgrammer"
/ "STM32CubeProgrammer.app"
/ "Contents"
/ "MacOs"
/ "bin"
/ "STM32_Programmer_CLI"
)
"""macOS CLI path."""
TEST_CASES = (
{
"port": "swd",
"frequency": None,
"reset_mode": None,
"conn_modifiers": None,
"cli": CLI_PATH,
"use_elf": False,
"erase": False,
"extload": None,
"tool_opt": [],
"system": "",
"cli_path": str(CLI_PATH),
"calls": [
[
str(CLI_PATH),
"--connect",
"port=swd",
"--download",
RC_KERNEL_HEX,
"--start",
],
],
},
{
"port": "swd",
"frequency": "4000",
"reset_mode": None,
"conn_modifiers": None,
"cli": CLI_PATH,
"use_elf": False,
"erase": False,
"extload": None,
"tool_opt": [],
"system": "",
"cli_path": str(CLI_PATH),
"calls": [
[
str(CLI_PATH),
"--connect",
"port=swd freq=4000",
"--download",
RC_KERNEL_HEX,
"--start",
],
],
},
{
"port": "swd",
"frequency": None,
"reset_mode": "hw",
"conn_modifiers": None,
"cli": CLI_PATH,
"use_elf": False,
"erase": False,
"extload": None,
"tool_opt": [],
"system": "",
"cli_path": str(CLI_PATH),
"calls": [
[
str(CLI_PATH),
"--connect",
"port=swd reset=HWrst",
"--download",
RC_KERNEL_HEX,
"--start",
],
],
},
{
"port": "swd",
"frequency": None,
"reset_mode": "sw",
"conn_modifiers": None,
"cli": CLI_PATH,
"use_elf": False,
"erase": False,
"extload": None,
"tool_opt": [],
"system": "",
"cli_path": str(CLI_PATH),
"calls": [
[
str(CLI_PATH),
"--connect",
"port=swd reset=SWrst",
"--download",
RC_KERNEL_HEX,
"--start",
],
],
},
{
"port": "swd",
"frequency": None,
"reset_mode": "core",
"conn_modifiers": None,
"cli": CLI_PATH,
"use_elf": False,
"erase": False,
"extload": None,
"tool_opt": [],
"system": "",
"cli_path": str(CLI_PATH),
"calls": [
[
str(CLI_PATH),
"--connect",
"port=swd reset=Crst",
"--download",
RC_KERNEL_HEX,
"--start",
],
],
},
{
"port": "swd",
"frequency": None,
"reset_mode": None,
"conn_modifiers": "br=115200 sn=TEST",
"cli": CLI_PATH,
"use_elf": False,
"erase": False,
"extload": None,
"tool_opt": [],
"system": "",
"cli_path": str(CLI_PATH),
"calls": [
[
str(CLI_PATH),
"--connect",
"port=swd br=115200 sn=TEST",
"--download",
RC_KERNEL_HEX,
"--start",
],
],
},
{
"port": "swd",
"frequency": None,
"reset_mode": None,
"conn_modifiers": None,
"cli": CLI_PATH,
"use_elf": True,
"erase": False,
"extload": None,
"tool_opt": [],
"system": "",
"cli_path": str(CLI_PATH),
"calls": [
[
str(CLI_PATH),
"--connect",
"port=swd",
"--download",
RC_KERNEL_ELF,
"--start",
],
],
},
{
"port": "swd",
"frequency": None,
"reset_mode": None,
"conn_modifiers": None,
"cli": CLI_PATH,
"use_elf": False,
"erase": True,
"extload": None,
"tool_opt": [],
"system": "",
"cli_path": str(CLI_PATH),
"calls": [
[str(CLI_PATH), "--connect", "port=swd", "--erase", "all",],
[
str(CLI_PATH),
"--connect",
"port=swd",
"--download",
RC_KERNEL_HEX,
"--start",
],
],
},
{
"port": "swd",
"frequency": None,
"reset_mode": None,
"conn_modifiers": None,
"cli": CLI_PATH,
"use_elf": False,
"erase": False,
"extload": None,
"tool_opt": ["--skipErase"],
"system": "",
"cli_path": str(CLI_PATH),
"calls": [
[
str(CLI_PATH),
"--connect",
"port=swd",
"--skipErase",
"--download",
RC_KERNEL_HEX,
"--start",
],
],
},
{
"port": "swd",
"frequency": None,
"reset_mode": None,
"conn_modifiers": None,
"cli": None,
"use_elf": False,
"erase": False,
"extload": None,
"tool_opt": [],
"system": "Linux",
"cli_path": str(LINUX_CLI_PATH),
"calls": [
[
str(LINUX_CLI_PATH),
"--connect",
"port=swd",
"--download",
RC_KERNEL_HEX,
"--start",
],
],
},
{
"port": "swd",
"frequency": None,
"reset_mode": None,
"conn_modifiers": None,
"cli": None,
"use_elf": False,
"erase": False,
"extload": None,
"tool_opt": [],
"system": "Darwin",
"cli_path": str(MACOS_CLI_PATH),
"calls": [
[
str(MACOS_CLI_PATH),
"--connect",
"port=swd",
"--download",
RC_KERNEL_HEX,
"--start",
],
],
},
{
"port": "swd",
"frequency": None,
"reset_mode": None,
"conn_modifiers": None,
"cli": None,
"use_elf": False,
"erase": False,
"extload": None,
"tool_opt": [],
"system": "Windows",
"cli_path": str(WINDOWS_CLI_PATH),
"calls": [
[
str(WINDOWS_CLI_PATH),
"--connect",
"port=swd",
"--download",
RC_KERNEL_HEX,
"--start",
],
],
},
)
"""Test cases."""
@pytest.mark.parametrize("tc", TEST_CASES)
@patch("runners.stm32cubeprogrammer.platform.system")
@patch("runners.stm32cubeprogrammer.Path.home", return_value=HOME_PATH)
@patch("runners.stm32cubeprogrammer.Path.exists", return_value=True)
@patch.dict("runners.stm32cubeprogrammer.os.environ", ENVIRON)
@patch("runners.core.ZephyrBinaryRunner.require")
@patch("runners.stm32cubeprogrammer.STM32CubeProgrammerBinaryRunner.check_call")
def test_stm32cubeprogrammer_init(
check_call, require, path_exists, path_home, system, tc, runner_config
):
"""Tests that ``STM32CubeProgrammerBinaryRunner`` class can be initialized
and that ``flash`` command works as expected.
"""
system.return_value = tc["system"]
runner = STM32CubeProgrammerBinaryRunner(
cfg=runner_config,
port=tc["port"],
frequency=tc["frequency"],
reset_mode=tc["reset_mode"],
conn_modifiers=tc["conn_modifiers"],
cli=tc["cli"],
use_elf=tc["use_elf"],
erase=tc["erase"],
extload=tc["extload"],
tool_opt=tc["tool_opt"],
)
runner.run("flash")
require.assert_called_with(tc["cli_path"])
assert check_call.call_args_list == [call(x) for x in tc["calls"]]
@pytest.mark.parametrize("tc", TEST_CASES)
@patch("runners.stm32cubeprogrammer.platform.system")
@patch("runners.stm32cubeprogrammer.Path.home", return_value=HOME_PATH)
@patch("runners.stm32cubeprogrammer.Path.exists", return_value=True)
@patch.dict("runners.stm32cubeprogrammer.os.environ", ENVIRON)
@patch("runners.core.ZephyrBinaryRunner.require")
@patch("runners.stm32cubeprogrammer.STM32CubeProgrammerBinaryRunner.check_call")
def test_stm32cubeprogrammer_create(
check_call, require, path_exists, path_home, system, tc, runner_config
):
"""Tests that ``STM32CubeProgrammerBinaryRunner`` class can be created using
the ``create`` factory method and that ``flash`` command works as expected.
"""
system.return_value = tc["system"]
args = ["--port", tc["port"]]
if tc["frequency"]:
args.extend(["--frequency", tc["frequency"]])
if tc["reset_mode"]:
args.extend(["--reset-mode", tc["reset_mode"]])
if tc["conn_modifiers"]:
args.extend(["--conn-modifiers", tc["conn_modifiers"]])
if tc["cli"]:
args.extend(["--cli", str(tc["cli"])])
if tc["use_elf"]:
args.extend(["--use-elf"])
if tc["erase"]:
args.append("--erase")
if tc["extload"]:
args.extend(["--extload", tc["extload"]])
if tc["tool_opt"]:
args.extend(["--tool-opt", " " + tc["tool_opt"][0]])
parser = argparse.ArgumentParser(allow_abbrev=False)
STM32CubeProgrammerBinaryRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner = STM32CubeProgrammerBinaryRunner.create(runner_config, arg_namespace)
runner.run("flash")
require.assert_called_with(tc["cli_path"])
assert check_call.call_args_list == [call(x) for x in tc["calls"]]
``` | /content/code_sandbox/scripts/west_commands/tests/test_stm32cubeprogrammer.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,705 |
```python
#
import argparse
from unittest.mock import patch, call
import os
import pytest
from runners import blackmagicprobe
from runners.blackmagicprobe import BlackMagicProbeRunner
from conftest import RC_KERNEL_ELF, RC_GDB
import serial.tools.list_ports
TEST_GDB_SERIAL = 'test-gdb-serial'
# Expected subprocesses to be run for each command. Using the
# runner_config fixture (and always specifying gdb-serial) means we
# don't get 100% coverage, but it's a starting out point.
EXPECTED_COMMANDS = {
'attach':
([RC_GDB,
'-ex', "set confirm off",
'-ex', "target extended-remote {}".format(TEST_GDB_SERIAL),
'-ex', "monitor swdp_scan",
'-ex', "attach 1",
'-ex', "file {}".format(RC_KERNEL_ELF)],),
'debug':
([RC_GDB,
'-ex', "set confirm off",
'-ex', "target extended-remote {}".format(TEST_GDB_SERIAL),
'-ex', "monitor swdp_scan",
'-ex', "attach 1",
'-ex', "file {}".format(RC_KERNEL_ELF),
'-ex', "load {}".format(RC_KERNEL_ELF)],),
'flash':
([RC_GDB,
'-ex', "set confirm off",
'-ex', "target extended-remote {}".format(TEST_GDB_SERIAL),
'-ex', "monitor swdp_scan",
'-ex', "attach 1",
'-ex', "load {}".format(RC_KERNEL_ELF),
'-ex', "kill",
'-ex', "quit",
'-silent'],),
}
EXPECTED_CONNECT_SRST_COMMAND = {
'attach': 'monitor connect_rst disable',
'debug': 'monitor connect_rst enable',
'flash': 'monitor connect_rst enable',
}
def require_patch(program):
assert program == RC_GDB
@pytest.mark.parametrize('command', EXPECTED_COMMANDS)
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_blackmagicprobe_init(cc, req, command, runner_config):
'''Test commands using a runner created by constructor.'''
runner = BlackMagicProbeRunner(runner_config, TEST_GDB_SERIAL)
runner.run(command)
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS[command]]
@pytest.mark.parametrize('command', EXPECTED_COMMANDS)
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_blackmagicprobe_create(cc, req, command, runner_config):
'''Test commands using a runner created from command line parameters.'''
args = ['--gdb-serial', TEST_GDB_SERIAL]
parser = argparse.ArgumentParser(allow_abbrev=False)
BlackMagicProbeRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner = BlackMagicProbeRunner.create(runner_config, arg_namespace)
runner.run(command)
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS[command]]
@pytest.mark.parametrize('command', EXPECTED_CONNECT_SRST_COMMAND)
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_blackmagicprobe_connect_rst(cc, req, command, runner_config):
'''Test that commands list the correct connect_rst value when enabled.'''
args = ['--gdb-serial', TEST_GDB_SERIAL, '--connect-rst']
parser = argparse.ArgumentParser(allow_abbrev=False)
BlackMagicProbeRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner = BlackMagicProbeRunner.create(runner_config, arg_namespace)
runner.run(command)
expected = EXPECTED_CONNECT_SRST_COMMAND[command]
assert expected in cc.call_args_list[0][0][0]
@pytest.mark.parametrize('arg, env, expected', [
# Argument has priority
('/dev/XXX', None, '/dev/XXX'),
('/dev/XXX', '/dev/YYYY', '/dev/XXX'),
# Then BMP_GDB_SERIAL env variable
(None, '/dev/XXX', '/dev/XXX'),
])
def test_blackmagicprobe_gdb_serial_generic(arg, env, expected):
if env:
os.environ['BMP_GDB_SERIAL'] = env
else:
if 'BMP_GDB_SERIAL' in os.environ:
os.environ.pop('BMP_GDB_SERIAL')
ret = blackmagicprobe.blackmagicprobe_gdb_serial(arg)
assert expected == ret
@pytest.mark.parametrize('known_path, comports, globs, expected', [
(True, False, ['/dev/ttyACM0', '/dev/ttyACM1'],
blackmagicprobe.DEFAULT_LINUX_BMP_PATH),
(False, True, [], '/dev/ttyACM3'),
(False, False, ['/dev/ttyACM0', '/dev/ttyACM1'], '/dev/ttyACM0'),
(False, False, ['/dev/ttyACM1', '/dev/ttyACM0'], '/dev/ttyACM0'),
])
@patch('serial.tools.list_ports.comports')
@patch('os.path.exists')
@patch('glob.glob')
def test_blackmagicprobe_gdb_serial_linux(gg, ope, stlpc, known_path, comports,
globs, expected):
gg.return_value = globs
ope.return_value = known_path
if comports:
fake_comport1 = serial.tools.list_ports_common.ListPortInfo(
'/dev/ttyACM1')
fake_comport1.interface = 'something'
fake_comport2 = serial.tools.list_ports_common.ListPortInfo(
'/dev/ttyACM2')
fake_comport2.interface = None
fake_comport3 = serial.tools.list_ports_common.ListPortInfo(
'/dev/ttyACM3')
fake_comport3.interface = blackmagicprobe.BMP_GDB_INTERFACE
stlpc.return_value = [fake_comport1, fake_comport2, fake_comport3]
else:
stlpc.return_value = []
ret = blackmagicprobe.blackmagicprobe_gdb_serial_linux()
assert expected == ret
@pytest.mark.parametrize('comports, globs, expected', [
(True, [], '/dev/cu.usbmodem3'),
(False, ['/dev/cu.usbmodemAABBCC0', '/dev/cu.usbmodemAABBCC1'],
'/dev/cu.usbmodemAABBCC0'),
(False, ['/dev/cu.usbmodemAABBCC1', '/dev/cu.usbmodemAABBCC0'],
'/dev/cu.usbmodemAABBCC0'),
])
@patch('serial.tools.list_ports.comports')
@patch('glob.glob')
def test_blackmagicprobe_gdb_serial_darwin(gg, stlpc, comports, globs, expected):
gg.return_value = globs
if comports:
fake_comport1 = serial.tools.list_ports_common.ListPortInfo(
'/dev/cu.usbmodem1')
fake_comport1.description = 'unrelated'
fake_comport2 = serial.tools.list_ports_common.ListPortInfo(
'/dev/cu.usbmodem2')
fake_comport2.description = None
fake_comport3 = serial.tools.list_ports_common.ListPortInfo(
'/dev/cu.usbmodem3')
fake_comport3.description = f'{blackmagicprobe.BMP_GDB_PRODUCT} v1234'
fake_comport4 = serial.tools.list_ports_common.ListPortInfo(
'/dev/cu.usbmodem4')
fake_comport4.description = f'{blackmagicprobe.BMP_GDB_PRODUCT} v1234'
stlpc.return_value = [fake_comport1, fake_comport2,
fake_comport4, fake_comport3]
else:
stlpc.return_value = []
ret = blackmagicprobe.blackmagicprobe_gdb_serial_darwin()
assert expected == ret
@pytest.mark.parametrize('comports, expected', [
(True, 'COM4'),
(False, 'COM1'),
])
@patch('serial.tools.list_ports.comports')
def test_blackmagicprobe_gdb_serial_win32(stlpc, comports, expected):
if comports:
fake_comport1 = serial.tools.list_ports_common.ListPortInfo('COM2')
fake_comport1.vid = 123
fake_comport1.pid = 456
fake_comport2 = serial.tools.list_ports_common.ListPortInfo('COM3')
fake_comport2.vid = None
fake_comport2.pid = None
fake_comport3 = serial.tools.list_ports_common.ListPortInfo('COM4')
fake_comport3.vid = blackmagicprobe.BMP_GDB_VID
fake_comport3.pid = blackmagicprobe.BMP_GDB_PID
fake_comport4 = serial.tools.list_ports_common.ListPortInfo('COM5')
fake_comport4.vid = blackmagicprobe.BMP_GDB_VID
fake_comport4.pid = blackmagicprobe.BMP_GDB_PID
stlpc.return_value = [fake_comport1, fake_comport2,
fake_comport4, fake_comport3]
else:
stlpc.return_value = []
ret = blackmagicprobe.blackmagicprobe_gdb_serial_win32()
assert expected == ret
``` | /content/code_sandbox/scripts/west_commands/tests/test_blackmagicprobe.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,063 |
```python
#
from argparse import Namespace
from build import Build
import pytest
TEST_CASES = [
{'r': [],
's': None, 'c': None},
{'r': ['source_dir'],
's': 'source_dir', 'c': None},
{'r': ['source_dir', '--'],
's': 'source_dir', 'c': None},
{'r': ['source_dir', '--', 'cmake_opt'],
's': 'source_dir', 'c': ['cmake_opt']},
{'r': ['source_dir', '--', 'cmake_opt', 'cmake_opt2'],
's': 'source_dir', 'c': ['cmake_opt', 'cmake_opt2']},
{'r': ['thing_one', 'thing_two'],
's': 'thing_one', 'c': ['thing_two']},
{'r': ['thing_one', 'thing_two', 'thing_three'],
's': 'thing_one', 'c': ['thing_two', 'thing_three']},
{'r': ['--'],
's': None, 'c': None},
{'r': ['--', '--'],
's': None, 'c': ['--']},
{'r': ['--', 'cmake_opt'],
's': None, 'c': ['cmake_opt']},
{'r': ['--', 'cmake_opt', 'cmake_opt2'],
's': None, 'c': ['cmake_opt', 'cmake_opt2']},
{'r': ['--', 'cmake_opt', 'cmake_opt2', '--'],
's': None, 'c': ['cmake_opt', 'cmake_opt2', '--']},
{'r': ['--', 'cmake_opt', 'cmake_opt2', '--', 'tool_opt'],
's': None, 'c': ['cmake_opt', 'cmake_opt2', '--', 'tool_opt']},
{'r': ['--', 'cmake_opt', 'cmake_opt2', '--', 'tool_opt', 'tool_opt2'],
's': None, 'c': ['cmake_opt', 'cmake_opt2', '--', 'tool_opt',
'tool_opt2']},
{'r': ['--', 'cmake_opt', 'cmake_opt2', '--', 'tool_opt', 'tool_opt2',
'--'],
's': None, 'c': ['cmake_opt', 'cmake_opt2', '--', 'tool_opt', 'tool_opt2',
'--']},
]
ARGS = Namespace(board=None, build_dir=None, cmake=False, command='build',
force=False, help=None, target=None, verbose=3, version=False,
zephyr_base=None)
@pytest.mark.parametrize('test_case', TEST_CASES)
def test_parse_remainder(test_case):
b = Build()
b.args = Namespace()
b._parse_remainder(test_case['r'])
assert b.args.source_dir == test_case['s']
assert b.args.cmake_opts == test_case['c']
``` | /content/code_sandbox/scripts/west_commands/tests/test_build.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 647 |
```python
#
import argparse
from os import path, fspath
from unittest.mock import patch
from unittest.mock import call
import pytest
from runners.mdb import MdbNsimBinaryRunner, MdbHwBinaryRunner
from conftest import RC_KERNEL_ELF, RC_BOARD_DIR, RC_BUILD_DIR
TEST_DRIVER_CMD = 'mdb64'
TEST_NSIM_ARGS='test_nsim.args'
TEST_TARGET = 'test-target'
TEST_BOARD_NSIM_ARGS = '@' + path.join(RC_BOARD_DIR, 'support', TEST_NSIM_ARGS)
DOTCONFIG_HOSTLINK = f'''
CONFIG_ARC=y
CONFIG_UART_HOSTLINK=y
'''
DOTCONFIG_NO_HOSTLINK = f'''
CONFIG_ARC=y
'''
# mdb-nsim
TEST_NSIM_FLASH_CASES = [
{
'i': ['--cores=1', '--nsim_args=' + TEST_NSIM_ARGS],
'o': [TEST_DRIVER_CMD, '-nooptions', '-nogoifmain',
'-toggle=include_local_symbols=1',
'-nsim', TEST_BOARD_NSIM_ARGS,
'-run', '-cl', RC_KERNEL_ELF]
}]
TEST_NSIM_DEBUG_CASES = [
{
'i': ['--cores=1', '--nsim_args=' + TEST_NSIM_ARGS],
'o': [TEST_DRIVER_CMD, '-nooptions', '-nogoifmain',
'-toggle=include_local_symbols=1',
'-nsim', TEST_BOARD_NSIM_ARGS,
'-OKN', RC_KERNEL_ELF
]
}]
TEST_NSIM_MULTICORE_CASES = [['--cores=2', '--nsim_args=' + TEST_NSIM_ARGS]]
TEST_NSIM_CORE1 = [TEST_DRIVER_CMD, '-pset=1', '-psetname=core0',
'-nooptions', '-nogoifmain', '-toggle=include_local_symbols=1',
'-nsim', TEST_BOARD_NSIM_ARGS, RC_KERNEL_ELF]
TEST_NSIM_CORE2 = [TEST_DRIVER_CMD, '-pset=2', '-psetname=core1',
'-prop=download=2', '-nooptions', '-nogoifmain',
'-toggle=include_local_symbols=1',
'-nsim', TEST_BOARD_NSIM_ARGS, RC_KERNEL_ELF]
TEST_NSIM_CORES_LAUNCH = [TEST_DRIVER_CMD, '-multifiles=core0,core1',
'-run', '-cl']
# mdb-hw
TEST_HW_FLASH_CASES_NO_HOSTLINK = [
{
'i': ['--jtag=digilent', '--cores=1'],
'o': [TEST_DRIVER_CMD, '-nooptions', '-nogoifmain',
'-toggle=include_local_symbols=1',
'-digilent',
'-run', '-cmd=-nowaitq run', '-cmd=quit', '-cl', RC_KERNEL_ELF]
}, {
'i': ['--jtag=digilent', '--cores=1', '--dig-device=test'],
'o': [TEST_DRIVER_CMD, '-nooptions', '-nogoifmain',
'-toggle=include_local_symbols=1',
'-digilent', '-prop=dig_device=test',
'-run', '-cmd=-nowaitq run', '-cmd=quit', '-cl', RC_KERNEL_ELF]
}]
TEST_HW_FLASH_CASES_HOSTLINK = [
{
'i': ['--jtag=digilent', '--cores=1'],
'o': [TEST_DRIVER_CMD, '-nooptions', '-nogoifmain',
'-toggle=include_local_symbols=1',
'-digilent', '-run', '-cl', RC_KERNEL_ELF]
}, {
'i': ['--jtag=digilent', '--cores=1', '--dig-device=test'],
'o': [TEST_DRIVER_CMD, '-nooptions', '-nogoifmain',
'-toggle=include_local_symbols=1',
'-digilent', '-prop=dig_device=test', '-run', '-cl', RC_KERNEL_ELF]
}]
TEST_HW_FLASH_CASES_ERR = [
{
'i': ['--jtag=test_debug', '--cores=1'],
'e': "unsupported jtag adapter test_debug"
},{
'i': ['--jtag=digilent', '--cores=13'],
'e': "unsupported cores 13"
}]
TEST_HW_DEBUG_CASES = [
{
'i': ['--jtag=digilent', '--cores=1'],
'o': [TEST_DRIVER_CMD, '-nooptions', '-nogoifmain',
'-toggle=include_local_symbols=1',
'-digilent',
'-OKN', RC_KERNEL_ELF]
}, {
'i': ['--jtag=digilent', '--cores=1', '--dig-device=test'],
'o': [TEST_DRIVER_CMD, '-nooptions', '-nogoifmain',
'-toggle=include_local_symbols=1',
'-digilent', '-prop=dig_device=test',
'-OKN', RC_KERNEL_ELF]
}]
TEST_HW_DEBUG_CASES_ERR = [
{
'i': ['--jtag=test_debug', '--cores=1'],
'e': "unsupported jtag adapter test_debug"
}, {
'i': ['--jtag=digilent', '--cores=13'],
'e': "unsupported cores 13"
}]
TEST_HW_MULTICORE_CASES = [['--jtag=digilent', '--cores=2']]
TEST_HW_CORE1 = [TEST_DRIVER_CMD, '-pset=1', '-psetname=core0',
'-nooptions', '-nogoifmain', '-toggle=include_local_symbols=1',
'-digilent', RC_KERNEL_ELF]
TEST_HW_CORE2 = [TEST_DRIVER_CMD, '-pset=2', '-psetname=core1',
'-prop=download=2', '-nooptions', '-nogoifmain',
'-toggle=include_local_symbols=1',
'-digilent', RC_KERNEL_ELF]
TEST_HW_CORES_LAUNCH_NO_HOSTLINK = [TEST_DRIVER_CMD, '-multifiles=core1,core0', '-run',
'-cmd=-nowaitq run', '-cmd=quit', '-cl']
TEST_HW_CORES_LAUNCH_HOSTLINK = [TEST_DRIVER_CMD, '-multifiles=core1,core0', '-run', '-cl']
def adjust_runner_config(runner_config, tmpdir, dotconfig):
# Adjust a RunnerConfig object, 'runner_config', by
# replacing its build directory with 'tmpdir' after writing
# the contents of 'dotconfig' to tmpdir/zephyr/.config.
zephyr = tmpdir / 'zephyr'
zephyr.mkdir()
with open(zephyr / '.config', 'w') as f:
f.write(dotconfig)
print("" + fspath(tmpdir))
return runner_config._replace(build_dir=fspath(tmpdir))
#
# Fixtures
#
def mdb(runner_config, tmpdir, mdb_runner):
'''MdbBinaryRunner from constructor kwargs or command line parameters'''
# This factory takes either a dict of kwargs to pass to the
# constructor, or a list of command-line arguments to parse and
# use with the create() method.
def _factory(args):
# Ensure kernel binaries exist (as empty files, so commands
# which use them must be patched out).
tmpdir.ensure(RC_KERNEL_ELF)
tmpdir.chdir()
if isinstance(args, dict):
return mdb_runner(runner_config, TEST_TARGET, **args)
elif isinstance(args, list):
parser = argparse.ArgumentParser(allow_abbrev=False)
mdb_runner.add_parser(parser)
arg_namespace = parser.parse_args(args)
return mdb_runner.create(runner_config, arg_namespace)
return _factory
@pytest.fixture
def mdb_nsim(runner_config, tmpdir):
return mdb(runner_config, tmpdir, MdbNsimBinaryRunner)
@pytest.fixture
def mdb_hw_no_hl(runner_config, tmpdir):
runner_config = adjust_runner_config(runner_config, tmpdir, DOTCONFIG_NO_HOSTLINK)
return mdb(runner_config, tmpdir, MdbHwBinaryRunner)
@pytest.fixture
def mdb_hw_hl(runner_config, tmpdir):
runner_config = adjust_runner_config(runner_config, tmpdir, DOTCONFIG_HOSTLINK)
return mdb(runner_config, tmpdir, MdbHwBinaryRunner)
#
# Helpers
#
def require_patch(program):
assert program == TEST_DRIVER_CMD
#
# Test cases for runners created by constructor.
#
# mdb-nsim test cases
@pytest.mark.parametrize('test_case', TEST_NSIM_FLASH_CASES)
@patch('runners.mdb.MdbNsimBinaryRunner.call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_flash_nsim(require, cc, test_case, mdb_nsim):
mdb_nsim(test_case['i']).run('flash')
assert require.called
cc.assert_called_once_with(test_case['o'], cwd=RC_BUILD_DIR)
@pytest.mark.parametrize('test_case', TEST_NSIM_DEBUG_CASES)
@patch('runners.mdb.MdbNsimBinaryRunner.call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_debug_nsim(require, pii, test_case, mdb_nsim):
mdb_nsim(test_case['i']).run('debug')
assert require.called
pii.assert_called_once_with(test_case['o'], cwd=RC_BUILD_DIR)
@pytest.mark.parametrize('test_case', TEST_NSIM_MULTICORE_CASES)
@patch('runners.mdb.MdbNsimBinaryRunner.check_call')
@patch('runners.mdb.MdbNsimBinaryRunner.call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_multicores_nsim(require, pii, cc, test_case, mdb_nsim):
mdb_nsim(test_case).run('flash')
assert require.called
cc_calls = [call(TEST_NSIM_CORE1, cwd=RC_BUILD_DIR), call(TEST_NSIM_CORE2, cwd=RC_BUILD_DIR)]
cc.assert_has_calls(cc_calls)
pii.assert_called_once_with(TEST_NSIM_CORES_LAUNCH, cwd=RC_BUILD_DIR)
# mdb-hw test cases
@pytest.mark.parametrize('test_case', TEST_HW_FLASH_CASES_NO_HOSTLINK)
@patch('runners.mdb.MdbHwBinaryRunner.call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_flash_hw_no_hl(require, cc, test_case, mdb_hw_no_hl, tmpdir):
mdb_hw_no_hl(test_case['i']).run('flash')
assert require.called
cc.assert_called_once_with(test_case['o'], cwd=tmpdir)
@pytest.mark.parametrize('test_case', TEST_HW_FLASH_CASES_HOSTLINK)
@patch('runners.mdb.MdbHwBinaryRunner.call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_flash_hw_hl(require, cc, test_case, mdb_hw_hl, tmpdir):
mdb_hw_hl(test_case['i']).run('flash')
assert require.called
cc.assert_called_once_with(test_case['o'], cwd=tmpdir)
@pytest.mark.parametrize('test_case', TEST_HW_FLASH_CASES_ERR)
@patch('runners.mdb.MdbHwBinaryRunner.call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_flash_hw_err(require, cc, test_case, mdb_hw_no_hl):
with pytest.raises(ValueError) as rinfo:
mdb_hw_no_hl(test_case['i']).run('flash')
assert str(rinfo.value) == test_case['e']
@pytest.mark.parametrize('test_case', TEST_HW_DEBUG_CASES)
@patch('runners.mdb.MdbHwBinaryRunner.call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_debug_hw(require, pii, test_case, mdb_hw_no_hl, tmpdir):
mdb_hw_no_hl(test_case['i']).run('debug')
assert require.called
pii.assert_called_once_with(test_case['o'], cwd=tmpdir)
@pytest.mark.parametrize('test_case', TEST_HW_DEBUG_CASES)
@patch('runners.mdb.MdbHwBinaryRunner.call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_debug_hw_hl(require, pii, test_case, mdb_hw_hl, tmpdir):
mdb_hw_hl(test_case['i']).run('debug')
assert require.called
pii.assert_called_once_with(test_case['o'], cwd=tmpdir)
@pytest.mark.parametrize('test_case', TEST_HW_DEBUG_CASES_ERR)
@patch('runners.mdb.MdbHwBinaryRunner.call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_debug_hw_err(require, pii, test_case, mdb_hw_no_hl):
with pytest.raises(ValueError) as rinfo:
mdb_hw_no_hl(test_case['i']).run('debug')
assert str(rinfo.value) == test_case['e']
@pytest.mark.parametrize('test_case', TEST_HW_MULTICORE_CASES)
@patch('runners.mdb.MdbHwBinaryRunner.check_call')
@patch('runners.mdb.MdbHwBinaryRunner.call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_multicores_hw_no_hl(require, pii, cc, test_case, mdb_hw_no_hl, tmpdir):
mdb_hw_no_hl(test_case).run('flash')
assert require.called
cc_calls = [call(TEST_HW_CORE1, cwd=tmpdir), call(TEST_HW_CORE2, cwd=tmpdir)]
cc.assert_has_calls(cc_calls)
pii.assert_called_once_with(TEST_HW_CORES_LAUNCH_NO_HOSTLINK, cwd=tmpdir)
@pytest.mark.parametrize('test_case', TEST_HW_MULTICORE_CASES)
@patch('runners.mdb.MdbHwBinaryRunner.check_call')
@patch('runners.mdb.MdbHwBinaryRunner.call')
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
def test_multicores_hw_hl(require, pii, cc, test_case, mdb_hw_hl, tmpdir):
mdb_hw_hl(test_case).run('flash')
assert require.called
cc_calls = [call(TEST_HW_CORE1, cwd=tmpdir), call(TEST_HW_CORE2, cwd=tmpdir)]
cc.assert_has_calls(cc_calls)
pii.assert_called_once_with(TEST_HW_CORES_LAUNCH_HOSTLINK, cwd=tmpdir)
``` | /content/code_sandbox/scripts/west_commands/tests/test_mdb.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 3,140 |
```python
#
import argparse
import os
import platform
from unittest.mock import patch, call
import pytest
from runners.bossac import BossacBinaryRunner
from conftest import RC_KERNEL_BIN
if platform.system() != 'Linux':
pytest.skip("skipping Linux-only bossac tests", allow_module_level=True)
TEST_BOSSAC_PORT = 'test-bossac-serial'
TEST_BOSSAC_SPEED = '1200'
TEST_OFFSET = 1234
TEST_FLASH_ADDRESS = 5678
TEST_BOARD_NAME = "my_board"
EXPECTED_COMMANDS = [
['stty', '-F', TEST_BOSSAC_PORT, 'raw', 'ispeed', '115200',
'ospeed', '115200', 'cs8', '-cstopb', 'ignpar', 'eol', '255',
'eof', '255'],
['bossac', '-p', TEST_BOSSAC_PORT, '-R', '-e', '-w', '-v',
'-b', RC_KERNEL_BIN],
]
EXPECTED_COMMANDS_WITH_SPEED = [
['stty', '-F', TEST_BOSSAC_PORT, 'raw', 'ispeed', TEST_BOSSAC_SPEED,
'ospeed', TEST_BOSSAC_SPEED, 'cs8', '-cstopb', 'ignpar', 'eol', '255',
'eof', '255'],
['bossac', '-p', TEST_BOSSAC_PORT, '-R', '-e', '-w', '-v',
'-b', RC_KERNEL_BIN],
]
EXPECTED_COMMANDS_WITH_OFFSET = [
['stty', '-F', TEST_BOSSAC_PORT, 'raw', 'ispeed', '115200',
'ospeed', '115200', 'cs8', '-cstopb', 'ignpar', 'eol', '255',
'eof', '255'],
['bossac', '-p', TEST_BOSSAC_PORT, '-R', '-e', '-w', '-v',
'-b', RC_KERNEL_BIN, '-o', str(TEST_OFFSET)],
]
EXPECTED_COMMANDS_WITH_FLASH_ADDRESS = [
[
'stty', '-F', TEST_BOSSAC_PORT, 'raw', 'ispeed', '115200',
'ospeed', '115200', 'cs8', '-cstopb', 'ignpar', 'eol', '255',
'eof', '255'
],
[
'bossac', '-p', TEST_BOSSAC_PORT, '-R', '-e', '-w', '-v',
'-b', RC_KERNEL_BIN, '-o', str(TEST_FLASH_ADDRESS),
],
]
EXPECTED_COMMANDS_WITH_EXTENDED = [
[
'stty', '-F', TEST_BOSSAC_PORT, 'raw', 'ispeed', '1200',
'ospeed', '1200', 'cs8', '-cstopb', 'ignpar', 'eol', '255',
'eof', '255'
],
[
'bossac', '-p', TEST_BOSSAC_PORT, '-R', '-e', '-w', '-v',
'-b', RC_KERNEL_BIN, '-o', str(TEST_FLASH_ADDRESS),
],
]
# SAM-BA ROM without offset
# No code partition Kconfig
# No zephyr,code-partition (defined on DT)
DOTCONFIG_STD = f'''
CONFIG_BOARD="{TEST_BOARD_NAME}"
CONFIG_FLASH_LOAD_OFFSET=0x162e
'''
# SAM-BA ROM/FLASH with offset
DOTCONFIG_COND1 = f'''
CONFIG_BOARD="{TEST_BOARD_NAME}"
CONFIG_USE_DT_CODE_PARTITION=y
CONFIG_HAS_FLASH_LOAD_OFFSET=y
CONFIG_FLASH_LOAD_OFFSET=0x162e
'''
# SAM-BA ROM/FLASH without offset
# No code partition Kconfig
DOTCONFIG_COND2 = f'''
CONFIG_BOARD="{TEST_BOARD_NAME}"
CONFIG_HAS_FLASH_LOAD_OFFSET=y
CONFIG_FLASH_LOAD_OFFSET=0x162e
'''
# SAM-BA Extended Arduino with offset
DOTCONFIG_COND3 = f'''
CONFIG_BOARD="{TEST_BOARD_NAME}"
CONFIG_USE_DT_CODE_PARTITION=y
CONFIG_BOOTLOADER_BOSSA_ARDUINO=y
CONFIG_HAS_FLASH_LOAD_OFFSET=y
CONFIG_FLASH_LOAD_OFFSET=0x162e
'''
# SAM-BA Extended Adafruit with offset
DOTCONFIG_COND4 = f'''
CONFIG_BOARD="{TEST_BOARD_NAME}"
CONFIG_USE_DT_CODE_PARTITION=y
CONFIG_BOOTLOADER_BOSSA_ADAFRUIT_UF2=y
CONFIG_HAS_FLASH_LOAD_OFFSET=y
CONFIG_FLASH_LOAD_OFFSET=0x162e
'''
# SAM-BA omit offset
DOTCONFIG_COND5 = f'''
CONFIG_BOARD="{TEST_BOARD_NAME}"
CONFIG_USE_DT_CODE_PARTITION=y
CONFIG_HAS_FLASH_LOAD_OFFSET=y
CONFIG_FLASH_LOAD_OFFSET=0x0
'''
# SAM-BA Legacy Mode
DOTCONFIG_COND6 = f'''
CONFIG_BOARD="{TEST_BOARD_NAME}"
CONFIG_USE_DT_CODE_PARTITION=y
CONFIG_BOOTLOADER_BOSSA_LEGACY=y
CONFIG_HAS_FLASH_LOAD_OFFSET=y
CONFIG_FLASH_LOAD_OFFSET=0x162e
'''
def adjust_runner_config(runner_config, tmpdir, dotconfig):
# Adjust a RunnerConfig object, 'runner_config', by
# replacing its build directory with 'tmpdir' after writing
# the contents of 'dotconfig' to tmpdir/zephyr/.config.
zephyr = tmpdir / 'zephyr'
zephyr.mkdir()
with open(zephyr / '.config', 'w') as f:
f.write(dotconfig)
return runner_config._replace(build_dir=os.fspath(tmpdir))
def require_patch(program):
assert program in ['bossac', 'stty']
os_path_isfile = os.path.isfile
def os_path_isfile_patch(filename):
if filename == RC_KERNEL_BIN:
return True
return os_path_isfile(filename)
@patch('runners.bossac.BossacBinaryRunner.supports',
return_value=False)
@patch('runners.bossac.BossacBinaryRunner.get_chosen_code_partition_node',
return_value=None)
@patch('runners.core.ZephyrBinaryRunner.require',
side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_bossac_init(cc, req, get_cod_par, sup, runner_config, tmpdir):
"""
Test commands using a runner created by constructor.
Requirements:
Any SDK
Configuration:
ROM bootloader
CONFIG_USE_DT_CODE_PARTITION=n
without zephyr,code-partition
Input:
none
Output:
no --offset
"""
runner_config = adjust_runner_config(runner_config, tmpdir, DOTCONFIG_STD)
runner = BossacBinaryRunner(runner_config, port=TEST_BOSSAC_PORT)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS]
@patch('runners.bossac.BossacBinaryRunner.supports',
return_value=False)
@patch('runners.bossac.BossacBinaryRunner.get_chosen_code_partition_node',
return_value=None)
@patch('runners.core.ZephyrBinaryRunner.require',
side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_bossac_create(cc, req, get_cod_par, sup, runner_config, tmpdir):
"""
Test commands using a runner created from command line parameters.
Requirements:
Any SDK
Configuration:
ROM bootloader
CONFIG_USE_DT_CODE_PARTITION=n
without zephyr,code-partition
Input:
--bossac-port
Output:
no --offset
"""
args = ['--bossac-port', str(TEST_BOSSAC_PORT)]
parser = argparse.ArgumentParser(allow_abbrev=False)
BossacBinaryRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner_config = adjust_runner_config(runner_config, tmpdir, DOTCONFIG_STD)
runner = BossacBinaryRunner.create(runner_config, arg_namespace)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS]
@patch('runners.bossac.BossacBinaryRunner.supports',
return_value=False)
@patch('runners.bossac.BossacBinaryRunner.get_chosen_code_partition_node',
return_value=None)
@patch('runners.core.ZephyrBinaryRunner.require',
side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_bossac_create_with_speed(cc, req, get_cod_par, sup, runner_config, tmpdir):
"""
Test commands using a runner created from command line parameters.
Requirements:
Any SDK
Configuration:
ROM bootloader
CONFIG_USE_DT_CODE_PARTITION=n
without zephyr,code-partition
Input:
--bossac-port
--speed
Output:
no --offset
"""
args = ['--bossac-port', str(TEST_BOSSAC_PORT),
'--speed', str(TEST_BOSSAC_SPEED)]
parser = argparse.ArgumentParser(allow_abbrev=False)
BossacBinaryRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner_config = adjust_runner_config(runner_config, tmpdir, DOTCONFIG_STD)
runner = BossacBinaryRunner.create(runner_config, arg_namespace)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS_WITH_SPEED]
@patch('runners.bossac.BossacBinaryRunner.supports',
return_value=True)
@patch('runners.bossac.BossacBinaryRunner.get_chosen_code_partition_node',
return_value=True)
@patch('runners.core.ZephyrBinaryRunner.require',
side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_bossac_create_with_flash_address(cc, req, get_cod_par, sup,
runner_config, tmpdir):
"""
Test command with offset parameter
Requirements:
SDK >= 0.12.0
Configuration:
Any bootloader
CONFIG_USE_DT_CODE_PARTITION=y
with zephyr,code-partition
Input:
--bossac-port
Output:
--offset
"""
args = [
'--bossac-port',
str(TEST_BOSSAC_PORT),
]
parser = argparse.ArgumentParser(allow_abbrev=False)
BossacBinaryRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner_config = adjust_runner_config(runner_config, tmpdir,
DOTCONFIG_COND1)
runner = BossacBinaryRunner.create(runner_config, arg_namespace)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert cc.call_args_list == [
call(x) for x in EXPECTED_COMMANDS_WITH_FLASH_ADDRESS
]
@patch('runners.bossac.BossacBinaryRunner.supports',
return_value=False)
@patch('runners.bossac.BossacBinaryRunner.get_chosen_code_partition_node',
return_value=True)
@patch('runners.core.ZephyrBinaryRunner.require',
side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_bossac_create_with_omit_address(cc, req, bcfg_ini, sup,
runner_config, tmpdir):
"""
Test command that will omit offset because CONFIG_FLASH_LOAD_OFFSET is 0.
This case is valid for ROM bootloaders that define image start at 0 and
define flash partitions, to use the storage capabilities, for instance.
Requirements:
Any SDK
Configuration:
ROM bootloader
CONFIG_USE_DT_CODE_PARTITION=y
with zephyr,code-partition
Input:
--bossac-port
Output:
no --offset
"""
runner_config = adjust_runner_config(runner_config, tmpdir,
DOTCONFIG_COND5)
runner = BossacBinaryRunner(runner_config, port=TEST_BOSSAC_PORT)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS]
@patch('runners.bossac.BossacBinaryRunner.supports',
return_value=True)
@patch('runners.bossac.BossacBinaryRunner.get_chosen_code_partition_node',
return_value=True)
@patch('runners.core.ZephyrBinaryRunner.require',
side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_bossac_create_with_arduino(cc, req, get_cod_par, sup,
runner_config, tmpdir):
"""
Test SAM-BA extended protocol with Arduino variation
Requirements:
SDK >= 0.12.0
Configuration:
Extended bootloader
CONFIG_USE_DT_CODE_PARTITION=y
CONFIG_BOOTLOADER_BOSSA_ARDUINO=y
with zephyr,code-partition
Input:
--bossac-port
Output:
--offset
"""
runner_config = adjust_runner_config(runner_config, tmpdir,
DOTCONFIG_COND3)
runner = BossacBinaryRunner(runner_config, port=TEST_BOSSAC_PORT)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS_WITH_EXTENDED]
@patch('runners.bossac.BossacBinaryRunner.supports',
return_value=True)
@patch('runners.bossac.BossacBinaryRunner.get_chosen_code_partition_node',
return_value=True)
@patch('runners.core.ZephyrBinaryRunner.require',
side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_bossac_create_with_adafruit(cc, req, get_cod_par, sup,
runner_config, tmpdir):
"""
Test SAM-BA extended protocol with Adafruit UF2 variation
Requirements:
SDK >= 0.12.0
Configuration:
Extended bootloader
CONFIG_USE_DT_CODE_PARTITION=y
CONFIG_BOOTLOADER_BOSSA_ADAFRUIT_UF2=y
with zephyr,code-partition
Input:
--bossac-port
Output:
--offset
"""
runner_config = adjust_runner_config(runner_config, tmpdir,
DOTCONFIG_COND4)
runner = BossacBinaryRunner(runner_config, port=TEST_BOSSAC_PORT)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS_WITH_EXTENDED]
@patch('runners.bossac.BossacBinaryRunner.supports',
return_value=True)
@patch('runners.bossac.BossacBinaryRunner.get_chosen_code_partition_node',
return_value=True)
@patch('runners.core.ZephyrBinaryRunner.require',
side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_bossac_create_with_legacy(cc, req, get_cod_par, sup,
runner_config, tmpdir):
"""
Test SAM-BA legacy protocol
Requirements:
Any SDK
Configuration:
Extended bootloader
CONFIG_USE_DT_CODE_PARTITION=y
CONFIG_BOOTLOADER_BOSSA_LEGACY=y
with zephyr,code-partition
Input:
--bossac-port
Output:
no --offset
"""
runner_config = adjust_runner_config(runner_config, tmpdir,
DOTCONFIG_COND6)
runner = BossacBinaryRunner(runner_config, port=TEST_BOSSAC_PORT)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS]
@patch('runners.bossac.BossacBinaryRunner.supports',
return_value=False)
@patch('runners.bossac.BossacBinaryRunner.get_chosen_code_partition_node',
return_value=True)
@patch('runners.core.ZephyrBinaryRunner.require',
side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_bossac_create_with_oldsdk(cc, req, get_cod_par, sup,
runner_config, tmpdir):
"""
Test old SDK and ask user to upgrade
Requirements:
SDK <= 0.12.0
Configuration:
Any bootloader
CONFIG_USE_DT_CODE_PARTITION=y
with zephyr,code-partition
Input:
Output:
Abort
"""
runner_config = adjust_runner_config(runner_config, tmpdir,
DOTCONFIG_COND1)
runner = BossacBinaryRunner(runner_config)
with pytest.raises(RuntimeError) as rinfo:
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert str(rinfo.value) == "This version of BOSSA does not support the" \
" --offset flag. Please upgrade to a newer" \
" Zephyr SDK version >= 0.12.0."
@patch('runners.bossac.BossacBinaryRunner.supports',
return_value=False)
@patch('runners.bossac.BossacBinaryRunner.get_chosen_code_partition_node',
return_value=None)
@patch('runners.core.ZephyrBinaryRunner.require',
side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_bossac_create_error_missing_dt_info(cc, req, get_cod_par, sup,
runner_config, tmpdir):
"""
Test SAM-BA offset wrong configuration. No chosen code partition.
Requirements:
Any SDK
Configuration:
Any bootloader
CONFIG_USE_DT_CODE_PARTITION=y
with zephyr,code-partition (missing)
Input:
Output:
Abort
"""
runner_config = adjust_runner_config(runner_config, tmpdir,
DOTCONFIG_COND1)
runner = BossacBinaryRunner(runner_config)
with pytest.raises(RuntimeError) as rinfo:
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert str(rinfo.value) == "The device tree zephyr,code-partition" \
" chosen node must be defined."
@patch('runners.bossac.BossacBinaryRunner.supports',
return_value=False)
@patch('runners.bossac.BossacBinaryRunner.get_chosen_code_partition_node',
return_value=True)
@patch('runners.core.ZephyrBinaryRunner.require',
side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_bossac_create_error_missing_kconfig(cc, req, get_cod_par, sup,
runner_config, tmpdir):
"""
Test SAM-BA offset wrong configuration. No CONFIG_USE_DT_CODE_PARTITION
Kconfig definition.
Requirements:
Any SDK
Configuration:
Any bootloader
CONFIG_USE_DT_CODE_PARTITION=y (missing)
with zephyr,code-partition
Input:
Output:
Abort
"""
runner_config = adjust_runner_config(runner_config, tmpdir,
DOTCONFIG_COND2)
runner = BossacBinaryRunner(runner_config)
with pytest.raises(RuntimeError) as rinfo:
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert str(rinfo.value) == \
"There is no CONFIG_USE_DT_CODE_PARTITION Kconfig defined at " \
+ TEST_BOARD_NAME + "_defconfig file.\n This means that" \
" zephyr,code-partition device tree node should not be defined." \
" Check Zephyr SAM-BA documentation."
``` | /content/code_sandbox/scripts/west_commands/tests/test_bossac.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 4,239 |
```unknown
:0100000001FE
:00000001FF
``` | /content/code_sandbox/scripts/west_commands/tests/nrf/nrf5340_app_only.hex | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 14 |
```unknown
:020000040000FA
:0100000001FE
:020000040100F9
:0100000001FE
:00000001FF
``` | /content/code_sandbox/scripts/west_commands/tests/nrf/nrf5340_app_and_net.hex | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 36 |
```restructuredtext
This directory contains test data files for test_nrf.py.
``` | /content/code_sandbox/scripts/west_commands/tests/nrf/README.rst | restructuredtext | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 13 |
```unknown
:020000040100F9
:0100000001FE
:00000001FF
``` | /content/code_sandbox/scripts/west_commands/tests/nrf/nrf5340_net_only.hex | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 22 |
```python
import argparse
import os
import platform
from unittest.mock import patch, call
import pytest
from runners.gd32isp import Gd32ispBinaryRunner
from conftest import RC_KERNEL_BIN
if platform.system() != 'Linux':
pytest.skip("skipping Linux-only gd32isp tests", allow_module_level=True)
TEST_GD32ISP_CLI = 'GD32_ISP_Console'
TEST_GD32ISP_CLI_T = 'GD32_ISP_CLI'
TEST_GD32ISP_DEV = 'test-gd32test'
TEST_GD32ISP_PORT = 'test-gd32isp-serial'
TEST_GD32ISP_SPEED = '2000000'
TEST_GD32ISP_ADDR = '0x08765430'
EXPECTED_COMMANDS_DEFAULT = [
[TEST_GD32ISP_CLI, '-c', '--pn', '/dev/ttyUSB0', '--br', '57600',
'--sb', '1', '-i', TEST_GD32ISP_DEV, '-e', '--all', '-d',
'--a', '0x08000000', '--fn', RC_KERNEL_BIN],
]
EXPECTED_COMMANDS = [
[TEST_GD32ISP_CLI_T, '-c', '--pn', TEST_GD32ISP_PORT,
'--br', TEST_GD32ISP_SPEED,
'--sb', '1', '-i', TEST_GD32ISP_DEV, '-e', '--all', '-d',
'--a', TEST_GD32ISP_ADDR, '--fn', RC_KERNEL_BIN],
]
def require_patch(program):
assert program in [TEST_GD32ISP_CLI, TEST_GD32ISP_CLI_T]
os_path_isfile = os.path.isfile
def os_path_isfile_patch(filename):
if filename == RC_KERNEL_BIN:
return True
return os_path_isfile(filename)
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_gd32isp_init(cc, req, runner_config):
runner = Gd32ispBinaryRunner(runner_config, TEST_GD32ISP_DEV)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS_DEFAULT]
@patch('runners.core.ZephyrBinaryRunner.require', side_effect=require_patch)
@patch('runners.core.ZephyrBinaryRunner.check_call')
def test_gd32isp_create(cc, req, runner_config):
args = ['--device', TEST_GD32ISP_DEV,
'--port', TEST_GD32ISP_PORT,
'--speed', TEST_GD32ISP_SPEED,
'--addr', TEST_GD32ISP_ADDR,
'--isp', TEST_GD32ISP_CLI_T]
parser = argparse.ArgumentParser(allow_abbrev=False)
Gd32ispBinaryRunner.add_parser(parser)
arg_namespace = parser.parse_args(args)
runner = Gd32ispBinaryRunner.create(runner_config, arg_namespace)
with patch('os.path.isfile', side_effect=os_path_isfile_patch):
runner.run('flash')
assert cc.call_args_list == [call(x) for x in EXPECTED_COMMANDS]
``` | /content/code_sandbox/scripts/west_commands/tests/test_gd32isp.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 688 |
```python
#
import re
def getSPDXIDSafeCharacter(c):
"""
Converts a character to an SPDX-ID-safe character.
Arguments:
- c: character to test
Returns: c if it is SPDX-ID-safe (letter, number, '-' or '.');
'-' otherwise
"""
if c.isalpha() or c.isdigit() or c == "-" or c == ".":
return c
return "-"
def convertToSPDXIDSafe(s):
"""
Converts a filename or other string to only SPDX-ID-safe characters.
Note that a separate check (such as in getUniqueID, below) will need
to be used to confirm that this is still a unique identifier, after
conversion.
Arguments:
- s: string to be converted.
Returns: string with all non-safe characters replaced with dashes.
"""
return "".join([getSPDXIDSafeCharacter(c) for c in s])
def getUniqueFileID(filenameOnly, timesSeen):
"""
Find an SPDX ID that is unique among others seen so far.
Arguments:
- filenameOnly: filename only (directories omitted) seeking ID.
- timesSeen: dict of all filename-only to number of times seen.
Returns: unique SPDX ID; updates timesSeen to include it.
"""
converted = convertToSPDXIDSafe(filenameOnly)
spdxID = f"SPDXRef-File-{converted}"
# determine whether spdxID is unique so far, or not
filenameTimesSeen = timesSeen.get(converted, 0) + 1
if filenameTimesSeen > 1:
# we'll append the # of times seen to the end
spdxID += f"-{filenameTimesSeen}"
else:
# first time seeing this filename
# edge case: if the filename itself ends in "-{number}", then we
# need to add a "-1" to it, so that we don't end up overlapping
# with an appended number from a similarly-named file.
p = re.compile(r"-\d+$")
if p.search(converted):
spdxID += "-1"
timesSeen[converted] = filenameTimesSeen
return spdxID
``` | /content/code_sandbox/scripts/west_commands/zspdx/spdxids.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 475 |
```python
#
``` | /content/code_sandbox/scripts/west_commands/zspdx/__init__.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2 |
```python
#
import os
from west import log
from zspdx.walker import WalkerConfig, Walker
from zspdx.scanner import ScannerConfig, scanDocument
from zspdx.writer import writeSPDX
# SBOMConfig contains settings that will be passed along to the various
# SBOM maker subcomponents.
class SBOMConfig:
def __init__(self):
super(SBOMConfig, self).__init__()
# prefix for Document namespaces; should not end with "/"
self.namespacePrefix = ""
# location of build directory
self.buildDir = ""
# location of SPDX document output directory
self.spdxDir = ""
# should also analyze for included header files?
self.analyzeIncludes = False
# should also add an SPDX document for the SDK?
self.includeSDK = False
# create Cmake file-based API directories and query file
# Arguments:
# 1) build_dir: build directory
def setupCmakeQuery(build_dir):
# check that query dir exists as a directory, or else create it
cmakeApiDirPath = os.path.join(build_dir, ".cmake", "api", "v1", "query")
if os.path.exists(cmakeApiDirPath):
if not os.path.isdir(cmakeApiDirPath):
log.err(f'cmake api query directory {cmakeApiDirPath} exists and is not a directory')
return False
# directory exists, we're good
else:
# create the directory
os.makedirs(cmakeApiDirPath, exist_ok=False)
# check that codemodel-v2 exists as a file, or else create it
queryFilePath = os.path.join(cmakeApiDirPath, "codemodel-v2")
if os.path.exists(queryFilePath):
if not os.path.isfile(queryFilePath):
log.err(f'cmake api query file {queryFilePath} exists and is not a directory')
return False
# file exists, we're good
return True
else:
# file doesn't exist, let's create an empty file
cm_fd = open(queryFilePath, "w")
cm_fd.close()
return True
# main entry point for SBOM maker
# Arguments:
# 1) cfg: SBOMConfig
def makeSPDX(cfg):
# report any odd configuration settings
if cfg.analyzeIncludes and not cfg.includeSDK:
log.wrn(f"config: requested to analyze includes but not to generate SDK SPDX document;")
log.wrn(f"config: will proceed but will discard detected includes for SDK header files")
# set up walker configuration
walkerCfg = WalkerConfig()
walkerCfg.namespacePrefix = cfg.namespacePrefix
walkerCfg.buildDir = cfg.buildDir
walkerCfg.analyzeIncludes = cfg.analyzeIncludes
walkerCfg.includeSDK = cfg.includeSDK
# make and run the walker
w = Walker(walkerCfg)
retval = w.makeDocuments()
if not retval:
log.err("SPDX walker failed; bailing")
return False
# set up scanner configuration
scannerCfg = ScannerConfig()
# scan each document from walker
if cfg.includeSDK:
scanDocument(scannerCfg, w.docSDK)
scanDocument(scannerCfg, w.docApp)
scanDocument(scannerCfg, w.docZephyr)
scanDocument(scannerCfg, w.docBuild)
# write each document, in this particular order so that the
# hashes for external references are calculated
# write SDK document, if we made one
if cfg.includeSDK:
retval = writeSPDX(os.path.join(cfg.spdxDir, "sdk.spdx"), w.docSDK)
if not retval:
log.err("SPDX writer failed for SDK document; bailing")
return False
# write app document
retval = writeSPDX(os.path.join(cfg.spdxDir, "app.spdx"), w.docApp)
if not retval:
log.err("SPDX writer failed for app document; bailing")
return False
# write zephyr document
writeSPDX(os.path.join(cfg.spdxDir, "zephyr.spdx"), w.docZephyr)
if not retval:
log.err("SPDX writer failed for zephyr document; bailing")
return False
# write build document
writeSPDX(os.path.join(cfg.spdxDir, "build.spdx"), w.docBuild)
if not retval:
log.err("SPDX writer failed for build document; bailing")
return False
# write modules document
writeSPDX(os.path.join(cfg.spdxDir, "modules-deps.spdx"), w.docModulesExtRefs)
if not retval:
log.err("SPDX writer failed for modules-deps document; bailing")
return False
return True
``` | /content/code_sandbox/scripts/west_commands/zspdx/sbom.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,031 |
```python
#
from enum import Enum
class Codemodel:
def __init__(self):
super(Codemodel, self).__init__()
self.paths_source = ""
self.paths_build = ""
self.configurations = []
def __repr__(self):
return f"Codemodel: source {self.paths_source}, build {self.paths_build}"
# A member of the codemodel configurations array
class Config:
def __init__(self):
super(Config, self).__init__()
self.name = ""
self.directories = []
self.projects = []
self.configTargets = []
def __repr__(self):
if self.name == "":
return f"Config: [no name]"
else:
return f"Config: {self.name}"
# A member of the configuration.directories array
class ConfigDir:
def __init__(self):
super(ConfigDir, self).__init__()
self.source = ""
self.build = ""
self.parentIndex = -1
self.childIndexes = []
self.projectIndex = -1
self.targetIndexes = []
self.minimumCMakeVersion = ""
self.hasInstallRule = False
# actual items, calculated from indices after loading
self.parent = None
self.children = []
self.project = None
self.targets = []
def __repr__(self):
return f"ConfigDir: source {self.source}, build {self.build}"
# A member of the configuration.projects array
class ConfigProject:
def __init__(self):
super(ConfigProject, self).__init__()
self.name = ""
self.parentIndex = -1
self.childIndexes = []
self.directoryIndexes = []
self.targetIndexes = []
# actual items, calculated from indices after loading
self.parent = None
self.children = []
self.directories = []
self.targets = []
def __repr__(self):
return f"ConfigProject: {self.name}"
# A member of the configuration.configTargets array
class ConfigTarget:
def __init__(self):
super(ConfigTarget, self).__init__()
self.name = ""
self.id = ""
self.directoryIndex = -1
self.projectIndex = -1
self.jsonFile = ""
# actual target data, loaded from self.jsonFile
self.target = None
# actual items, calculated from indices after loading
self.directory = None
self.project = None
def __repr__(self):
return f"ConfigTarget: {self.name}"
# The available values for Target.type
class TargetType(Enum):
UNKNOWN = 0
EXECUTABLE = 1
STATIC_LIBRARY = 2
SHARED_LIBRARY = 3
MODULE_LIBRARY = 4
OBJECT_LIBRARY = 5
UTILITY = 6
# A member of the target.install_destinations array
class TargetInstallDestination:
def __init__(self):
super(TargetInstallDestination, self).__init__()
self.path = ""
self.backtrace = -1
def __repr__(self):
return f"TargetInstallDestination: {self.path}"
# A member of the target.link_commandFragments and
# archive_commandFragments array
class TargetCommandFragment:
def __init__(self):
super(TargetCommandFragment, self).__init__()
self.fragment = ""
self.role = ""
def __repr__(self):
return f"TargetCommandFragment: {self.fragment}"
# A member of the target.dependencies array
class TargetDependency:
def __init__(self):
super(TargetDependency, self).__init__()
self.id = ""
self.backtrace = -1
def __repr__(self):
return f"TargetDependency: {self.id}"
# A member of the target.sources array
class TargetSource:
def __init__(self):
super(TargetSource, self).__init__()
self.path = ""
self.compileGroupIndex = -1
self.sourceGroupIndex = -1
self.isGenerated = False
self.backtrace = -1
# actual items, calculated from indices after loading
self.compileGroup = None
self.sourceGroup = None
def __repr__(self):
return f"TargetSource: {self.path}"
# A member of the target.sourceGroups array
class TargetSourceGroup:
def __init__(self):
super(TargetSourceGroup, self).__init__()
self.name = ""
self.sourceIndexes = []
# actual items, calculated from indices after loading
self.sources = []
def __repr__(self):
return f"TargetSourceGroup: {self.name}"
# A member of the target.compileGroups.includes array
class TargetCompileGroupInclude:
def __init__(self):
super(TargetCompileGroupInclude, self).__init__()
self.path = ""
self.isSystem = False
self.backtrace = -1
def __repr__(self):
return f"TargetCompileGroupInclude: {self.path}"
# A member of the target.compileGroups.precompileHeaders array
class TargetCompileGroupPrecompileHeader:
def __init__(self):
super(TargetCompileGroupPrecompileHeader, self).__init__()
self.header = ""
self.backtrace = -1
def __repr__(self):
return f"TargetCompileGroupPrecompileHeader: {self.header}"
# A member of the target.compileGroups.defines array
class TargetCompileGroupDefine:
def __init__(self):
super(TargetCompileGroupDefine, self).__init__()
self.define = ""
self.backtrace = -1
def __repr__(self):
return f"TargetCompileGroupDefine: {self.define}"
# A member of the target.compileGroups array
class TargetCompileGroup:
def __init__(self):
super(TargetCompileGroup, self).__init__()
self.sourceIndexes = []
self.language = ""
self.compileCommandFragments = []
self.includes = []
self.precompileHeaders = []
self.defines = []
self.sysroot = ""
# actual items, calculated from indices after loading
self.sources = []
def __repr__(self):
return f"TargetCompileGroup: {self.sources}"
# A member of the target.backtraceGraph_nodes array
class TargetBacktraceGraphNode:
def __init__(self):
super(TargetBacktraceGraphNode, self).__init__()
self.file = -1
self.line = -1
self.command = -1
self.parent = -1
def __repr__(self):
return f"TargetBacktraceGraphNode: {self.command}"
# Actual data in config.target.target, loaded from
# config.target.jsonFile
class Target:
def __init__(self):
super(Target, self).__init__()
self.name = ""
self.id = ""
self.type = TargetType.UNKNOWN
self.backtrace = -1
self.folder = ""
self.paths_source = ""
self.paths_build = ""
self.nameOnDisk = ""
self.artifacts = []
self.isGeneratorProvided = False
# only if install rule is present
self.install_prefix = ""
self.install_destinations = []
# only for executables and shared library targets that link into
# a runtime binary
self.link_language = ""
self.link_commandFragments = []
self.link_lto = False
self.link_sysroot = ""
# only for static library targets
self.archive_commandFragments = []
self.archive_lto = False
# only if the target depends on other targets
self.dependencies = []
# corresponds to target's source files
self.sources = []
# only if sources are grouped together by source_group() or by default
self.sourceGroups = []
# only if target has sources that compile
self.compileGroups = []
# graph of backtraces referenced from elsewhere
self.backtraceGraph_nodes = []
self.backtraceGraph_commands = []
self.backtraceGraph_files = []
def __repr__(self):
return f"Target: {self.name}"
``` | /content/code_sandbox/scripts/west_commands/zspdx/cmakefileapi.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,706 |
```python
#
# from path_to_url
LICENSE_LIST_VERSION = "3.12"
LICENSES = [
"0BSD",
"389-exception",
"AAL",
"ADSL",
"AFL-1.1",
"AFL-1.2",
"AFL-2.0",
"AFL-2.1",
"AFL-3.0",
"AGPL-1.0",
"AGPL-1.0-only",
"AGPL-1.0-or-later",
"AGPL-3.0",
"AGPL-3.0-only",
"AGPL-3.0-or-later",
"AMDPLPA",
"AML",
"AMPAS",
"ANTLR-PD",
"ANTLR-PD-fallback",
"APAFML",
"APL-1.0",
"APSL-1.0",
"APSL-1.1",
"APSL-1.2",
"APSL-2.0",
"Abstyles",
"Adobe-2006",
"Adobe-Glyph",
"Afmparse",
"Aladdin",
"Apache-1.0",
"Apache-1.1",
"Apache-2.0",
"Artistic-1.0",
"Artistic-1.0-Perl",
"Artistic-1.0-cl8",
"Artistic-2.0",
"Autoconf-exception-2.0",
"Autoconf-exception-3.0",
"BSD-1-Clause",
"BSD-2-Clause",
"BSD-2-Clause-FreeBSD",
"BSD-2-Clause-NetBSD",
"BSD-2-Clause-Patent",
"BSD-2-Clause-Views",
"BSD-3-Clause",
"BSD-3-Clause-Attribution",
"BSD-3-Clause-Clear",
"BSD-3-Clause-LBNL",
"BSD-3-Clause-Modification",
"BSD-3-Clause-No-Nuclear-Warranty",
"BSD-3-Clause-Open-MPI",
"BSD-4-Clause",
"BSD-4-Clause-Shortened",
"BSD-4-Clause-UC",
"BSD-Protection",
"BSD-Source-Code",
"BSL-1.0",
"BUSL-1.1",
"Bahyph",
"Barr",
"Beerware",
"Bison-exception-2.2",
"BitTorrent-1.0",
"BitTorrent-1.1",
"BlueOak-1.0.0",
"Bootloader-exception",
"Borceux",
"C-UDA-1.0",
"CAL-1.0",
"CAL-1.0-Combined-Work-Exception",
"CATOSL-1.1",
"CC-BY-1.0",
"CC-BY-2.0",
"CC-BY-2.5",
"CC-BY-3.0",
"CC-BY-3.0-AT",
"CC-BY-3.0-US",
"CC-BY-4.0",
"CC-BY-NC-1.0",
"CC-BY-NC-2.0",
"CC-BY-NC-2.5",
"CC-BY-NC-3.0",
"CC-BY-NC-4.0",
"CC-BY-NC-ND-1.0",
"CC-BY-NC-ND-2.0",
"CC-BY-NC-ND-2.5",
"CC-BY-NC-ND-3.0",
"CC-BY-NC-ND-3.0-IGO",
"CC-BY-NC-ND-4.0",
"CC-BY-NC-SA-1.0",
"CC-BY-NC-SA-2.0",
"CC-BY-NC-SA-2.5",
"CC-BY-NC-SA-3.0",
"CC-BY-NC-SA-4.0",
"CC-BY-ND-1.0",
"CC-BY-ND-2.0",
"CC-BY-ND-2.5",
"CC-BY-ND-3.0",
"CC-BY-ND-4.0",
"CC-BY-SA-1.0",
"CC-BY-SA-2.0",
"CC-BY-SA-2.0-UK",
"CC-BY-SA-2.1-JP",
"CC-BY-SA-2.5",
"CC-BY-SA-3.0",
"CC-BY-SA-3.0-AT",
"CC-BY-SA-4.0",
"CC-PDDC",
"CC0-1.0",
"CDDL-1.0",
"CDDL-1.1",
"CDLA-Permissive-1.0",
"CDLA-Sharing-1.0",
"CECILL-1.0",
"CECILL-1.1",
"CECILL-2.0",
"CECILL-2.1",
"CECILL-B",
"CECILL-C",
"CERN-OHL-1.1",
"CERN-OHL-1.2",
"CERN-OHL-P-2.0",
"CERN-OHL-S-2.0",
"CERN-OHL-W-2.0",
"CLISP-exception-2.0",
"CNRI-Jython",
"CNRI-Python",
"CNRI-Python-GPL-Compatible",
"CPAL-1.0",
"CPL-1.0",
"CPOL-1.02",
"CUA-OPL-1.0",
"Caldera",
"ClArtistic",
"Classpath-exception-2.0",
"Condor-1.1",
"Crossword",
"CrystalStacker",
"Cube",
"D-FSL-1.0",
"DOC",
"DRL-1.0",
"DSDP",
"DigiRule-FOSS-exception",
"Dotseqn",
"ECL-1.0",
"ECL-2.0",
"EFL-1.0",
"EFL-2.0",
"EPICS",
"EPL-1.0",
"EPL-2.0",
"EUDatagrid",
"EUPL-1.0",
"EUPL-1.1",
"EUPL-1.2",
"Entessa",
"ErlPL-1.1",
"Eurosym",
"FLTK-exception",
"FSFAP",
"FSFUL",
"FSFULLR",
"FTL",
"Fair",
"Fawkes-Runtime-exception",
"Font-exception-2.0",
"Frameworx-1.0",
"FreeBSD-DOC",
"FreeImage",
"GCC-exception-2.0",
"GCC-exception-3.1",
"GD",
"GFDL-1.1",
"GFDL-1.1-invariants-only",
"GFDL-1.1-invariants-or-later",
"GFDL-1.1-no-invariants-only",
"GFDL-1.1-no-invariants-or-later",
"GFDL-1.1-only",
"GFDL-1.1-or-later",
"GFDL-1.2",
"GFDL-1.2-invariants-only",
"GFDL-1.2-invariants-or-later",
"GFDL-1.2-no-invariants-only",
"GFDL-1.2-no-invariants-or-later",
"GFDL-1.2-only",
"GFDL-1.2-or-later",
"GFDL-1.3",
"GFDL-1.3-invariants-only",
"GFDL-1.3-invariants-or-later",
"GFDL-1.3-no-invariants-only",
"GFDL-1.3-no-invariants-or-later",
"GFDL-1.3-only",
"GFDL-1.3-or-later",
"GL2PS",
"GLWTPL",
"GPL-1.0",
"GPL-1.0+",
"GPL-1.0-only",
"GPL-1.0-or-later",
"GPL-2.0",
"GPL-2.0+",
"GPL-2.0-only",
"GPL-2.0-or-later",
"GPL-2.0-with-GCC-exception",
"GPL-2.0-with-autoconf-exception",
"GPL-2.0-with-bison-exception",
"GPL-2.0-with-classpath-exception",
"GPL-2.0-with-font-exception",
"GPL-3.0",
"GPL-3.0+",
"GPL-3.0-linking-exception",
"GPL-3.0-linking-source-exception",
"GPL-3.0-only",
"GPL-3.0-or-later",
"GPL-3.0-with-GCC-exception",
"GPL-3.0-with-autoconf-exception",
"GPL-CC-1.0",
"Giftware",
"Glide",
"Glulxe",
"HPND",
"HPND-sell-variant",
"HTMLTIDY",
"HaskellReport",
"Hippocratic-2.1",
"IBM-pibs",
"ICU",
"IJG",
"IPA",
"IPL-1.0",
"ISC",
"ImageMagick",
"Imlib2",
"Info-ZIP",
"Intel",
"Intel-ACPI",
"Interbase-1.0",
"JPNIC",
"JSON",
"JasPer-2.0",
"LAL-1.2",
"LAL-1.3",
"LGPL-2.0",
"LGPL-2.0+",
"LGPL-2.0-only",
"LGPL-2.0-or-later",
"LGPL-2.1",
"LGPL-2.1+",
"LGPL-2.1-only",
"LGPL-2.1-or-later",
"LGPL-3.0",
"LGPL-3.0+",
"LGPL-3.0-linking-exception",
"LGPL-3.0-only",
"LGPL-3.0-or-later",
"LGPLLR",
"LLVM-exception",
"LPL-1.0",
"LPL-1.02",
"LPPL-1.0",
"LPPL-1.1",
"LPPL-1.2",
"LPPL-1.3a",
"LPPL-1.3c",
"LZMA-exception",
"Latex2e",
"Leptonica",
"LiLiQ-P-1.1",
"LiLiQ-R-1.1",
"LiLiQ-Rplus-1.1",
"Libpng",
"Libtool-exception",
"Linux-OpenIB",
"Linux-syscall-note",
"MIT",
"MIT-0",
"MIT-CMU",
"MIT-Modern-Variant",
"MIT-advertising",
"MIT-enna",
"MIT-feh",
"MIT-open-group",
"MITNFA",
"MPL-1.0",
"MPL-1.1",
"MPL-2.0",
"MPL-2.0-no-copyleft-exception",
"MS-PL",
"MS-RL",
"MTLL",
"MakeIndex",
"MirOS",
"Motosoto",
"MulanPSL-1.0",
"MulanPSL-2.0",
"Multics",
"Mup",
"NAIST-2003",
"NASA-1.3",
"NBPL-1.0",
"NCGL-UK-2.0",
"NCSA",
"NGPL",
"NIST-PD",
"NIST-PD-fallback",
"NLOD-1.0",
"NLPL",
"NOSL",
"NPL-1.0",
"NPL-1.1",
"NPOSL-3.0",
"NRL",
"NTP",
"NTP-0",
"Naumen",
"Net-SNMP",
"NetCDF",
"Newsletr",
"Nokia",
"Nokia-Qt-exception-1.1",
"Noweb",
"Nunit",
"O-UDA-1.0",
"OCCT-PL",
"OCCT-exception-1.0",
"OCLC-2.0",
"OCaml-LGPL-linking-exception",
"ODC-By-1.0",
"ODbL-1.0",
"OFL-1.0",
"OFL-1.0-RFN",
"OFL-1.0-no-RFN",
"OFL-1.1",
"OFL-1.1-RFN",
"OFL-1.1-no-RFN",
"OGC-1.0",
"OGDL-Taiwan-1.0",
"OGL-Canada-2.0",
"OGL-UK-1.0",
"OGL-UK-2.0",
"OGL-UK-3.0",
"OGTSL",
"OLDAP-1.1",
"OLDAP-1.2",
"OLDAP-1.3",
"OLDAP-1.4",
"OLDAP-2.0",
"OLDAP-2.0.1",
"OLDAP-2.1",
"OLDAP-2.2",
"OLDAP-2.2.1",
"OLDAP-2.2.2",
"OLDAP-2.3",
"OLDAP-2.4",
"OLDAP-2.5",
"OLDAP-2.6",
"OLDAP-2.7",
"OLDAP-2.8",
"OML",
"OPL-1.0",
"OSET-PL-2.1",
"OSL-1.0",
"OSL-1.1",
"OSL-2.0",
"OSL-2.1",
"OSL-3.0",
"OpenJDK-assembly-exception-1.0",
"OpenSSL",
"PDDL-1.0",
"PHP-3.0",
"PHP-3.01",
"PS-or-PDF-font-exception-20170817",
"PSF-2.0",
"Parity-6.0.0",
"Parity-7.0.0",
"Plexus",
"PolyForm-Noncommercial-1.0.0",
"PolyForm-Small-Business-1.0.0",
"PostgreSQL",
"Python-2.0",
"QPL-1.0",
"Qhull",
"Qt-GPL-exception-1.0",
"Qt-LGPL-exception-1.1",
"Qwt-exception-1.0",
"RHeCos-1.1",
"RPL-1.1",
"RPL-1.5",
"RPSL-1.0",
"RSA-MD",
"RSCPL",
"Rdisc",
"Ruby",
"SAX-PD",
"SCEA",
"SGI-B-1.0",
"SGI-B-1.1",
"SGI-B-2.0",
"SHL-0.5",
"SHL-0.51",
"SHL-2.0",
"SHL-2.1",
"SISSL",
"SISSL-1.2",
"SMLNJ",
"SMPPL",
"SNIA",
"SPL-1.0",
"SSH-OpenSSH",
"SSH-short",
"SSPL-1.0",
"SWL",
"Saxpath",
"Sendmail",
"Sendmail-8.23",
"SimPL-2.0",
"Sleepycat",
"Spencer-86",
"Spencer-94",
"Spencer-99",
"StandardML-NJ",
"SugarCRM-1.1.3",
"Swift-exception",
"TAPR-OHL-1.0",
"TCL",
"TCP-wrappers",
"TMate",
"TORQUE-1.1",
"TOSL",
"TU-Berlin-1.0",
"TU-Berlin-2.0",
"UCL-1.0",
"UPL-1.0",
"Unicode-DFS-2015",
"Unicode-DFS-2016",
"Unicode-TOU",
"Universal-FOSS-exception-1.0",
"Unlicense",
"VOSTROM",
"VSL-1.0",
"Vim",
"W3C",
"W3C-19980720",
"W3C-20150513",
"WTFPL",
"Watcom-1.0",
"Wsuipa",
"WxWindows-exception-3.1",
"X11",
"XFree86-1.1",
"XSkat",
"Xerox",
"Xnet",
"YPL-1.0",
"YPL-1.1",
"ZPL-1.1",
"ZPL-2.0",
"ZPL-2.1",
"Zed",
"Zend-2.0",
"Zimbra-1.3",
"Zimbra-1.4",
"Zlib",
"blessing",
"bzip2-1.0.5",
"bzip2-1.0.6",
"copyleft-next-0.3.0",
"copyleft-next-0.3.1",
"curl",
"diffmark",
"dvipdfm",
"eCos-2.0",
"eCos-exception-2.0",
"eGenix",
"etalab-2.0",
"freertos-exception-2.0",
"gSOAP-1.3b",
"gnu-javamail-exception",
"gnuplot",
"i2p-gpl-java-exception",
"iMatix",
"libpng-2.0",
"libselinux-1.0",
"libtiff",
"mif-exception",
"mpich2",
"openvpn-openssl-exception",
"psfrag",
"psutils",
"u-boot-exception-2.0",
"wxWindows",
"xinetd",
"xpp",
"zlib-acknowledgement",
]
``` | /content/code_sandbox/scripts/west_commands/zspdx/licenses.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 4,412 |
```python
#
from enum import Enum
# DocumentConfig contains settings used to configure how the SPDX Document
# should be built.
class DocumentConfig:
def __init__(self):
super(DocumentConfig, self).__init__()
# name of document
self.name = ""
# namespace for this document
self.namespace = ""
# standardized DocumentRef- (including that prefix) that the other
# docs will use to refer to this one
self.docRefID = ""
# Document contains the data assembled by the SBOM builder, to be used to
# create the actual SPDX Document.
class Document:
# initialize with a DocumentConfig
def __init__(self, cfg):
super(Document, self).__init__()
# configuration - DocumentConfig
self.cfg = cfg
# dict of SPDX ID => Package
self.pkgs = {}
# relationships "owned" by this Document, _not_ those "owned" by its
# Packages or Files; will likely be just DESCRIBES
self.relationships = []
# dict of filename (ignoring its directory) => number of times it has
# been seen while adding files to this Document; used to calculate
# useful SPDX IDs
self.timesSeen = {}
# dict of absolute path on disk => File
self.fileLinks = {}
# set of other Documents that our elements' Relationships refer to
self.externalDocuments = set()
# this Document's SHA1 hash, filled in _after_ the Document has been
# written to disk, so that others can refer to it
self.myDocSHA1 = ""
# PackageConfig contains settings used to configure how an SPDX Package should
# be built.
class PackageConfig:
def __init__(self):
super(PackageConfig, self).__init__()
# package name
self.name = ""
# SPDX ID, including "SPDXRef-"
self.spdxID = ""
# primary package purpose (ex. "LIBRARY", "APPLICATION", etc.)
self.primaryPurpose = ""
# package URL
self.url = ""
# package version
self.version = ""
# package revision
self.revision = ""
# package external references
self.externalReferences = []
# the Package's declared license
# the Package's copyright text
self.copyrightText = "NOASSERTION"
# absolute path of the "root" directory on disk, to be used as the
# base directory from which this Package's Files will calculate their
# relative paths
# may want to note this in a Package comment field
self.relativeBaseDir = ""
# Package contains the data assembled by the SBOM builder, to be used to
# create the actual SPDX Package.
class Package:
# initialize with:
# 1) PackageConfig
# 2) the Document that owns this Package
def __init__(self, cfg, doc):
super(Package, self).__init__()
# configuration - PackageConfig
self.cfg = cfg
# Document that owns this Package
self.doc = doc
# verification code, calculated per section 7.9 of SPDX spec v2.3
self.verificationCode = ""
# concluded license for this Package, if
# list of licenses found in this Package's Files
self.licenseInfoFromFiles = []
# Files in this Package
# dict of SPDX ID => File
self.files = {}
# Relationships "owned" by this Package (e.g., this Package is left
# side)
self.rlns = []
# If this Package was a target, which File was its main build product?
self.targetBuildFile = None
# RelationshipDataElementType defines whether a RelationshipData element
# (e.g., the "owner" or the "other" element) is a File, a target Package,
# a Package's ID (as other only, and only where owner type is DOCUMENT),
# or the SPDX document itself (as owner only).
class RelationshipDataElementType(Enum):
UNKNOWN = 0
FILENAME = 1
TARGETNAME = 2
PACKAGEID = 3
DOCUMENT = 4
# RelationshipData contains the pre-analysis data about a relationship between
# Files and/or Packages/targets. It is eventually parsed into a corresponding
# Relationship after we have organized the SPDX Package and File data.
class RelationshipData:
def __init__(self):
super(RelationshipData, self).__init__()
# for the "owner" element (e.g., the left side of the Relationship),
# is it a filename or a target name (e.g., a Package in the build doc)
self.ownerType = RelationshipDataElementType.UNKNOWN
# owner file absolute path (if ownerType is FILENAME)
self.ownerFileAbspath = ""
# owner target name (if ownerType is TARGETNAME)
self.ownerTargetName = ""
# owner SPDX Document (if ownerType is DOCUMENT)
self.ownerDocument = None
# for the "other" element (e.g., the right side of the Relationship),
# is it a filename or a target name (e.g., a Package in the build doc)
self.otherType = RelationshipDataElementType.UNKNOWN
# other file absolute path (if otherType is FILENAME)
self.otherFileAbspath = ""
# other target name (if otherType is TARGETNAME)
self.otherTargetName = ""
# other package ID (if ownerType is DOCUMENT and otherType is PACKAGEID)
self.otherPackageID = ""
# text string with Relationship type
# from table 68 in section 11.1 of SPDX spec v2.3
self.rlnType = ""
# Relationship contains the post-analysis, processed data about a relationship
# in a form suitable for creating the actual SPDX Relationship in a particular
# Document's context.
class Relationship:
def __init__(self):
super(Relationship, self).__init__()
# SPDX ID for left side of relationship
# including "SPDXRef-" as well as "DocumentRef-" if needed
self.refA = ""
# SPDX ID for right side of relationship
# including "SPDXRef-" as well as "DocumentRef-" if needed
self.refB = ""
# text string with Relationship type
# from table 68 in section 11.1 of SPDX spec v2.3
self.rlnType = ""
# File contains the data needed to create a File element in the context of a
# particular SPDX Document and Package.
class File:
# initialize with:
# 1) Document containing this File
# 2) Package containing this File
def __init__(self, doc, pkg):
super(File, self).__init__()
# absolute path to this file on disk
self.abspath = ""
# relative path for this file, measured from the owning Package's
# cfg.relativeBaseDir
self.relpath = ""
# SPDX ID for this file, including "SPDXRef-"
self.spdxID = ""
# SHA1 hash
self.sha1 = ""
# SHA256 hash, if pkg.cfg.doSHA256 == True; empty string otherwise
self.sha256 = ""
# MD5 hash, if pkg.cfg.doMD5 == True; empty string otherwise
self.md5 = ""
# "NOASSERTION" otherwise
# license info in file
self.licenseInfoInFile = []
# copyright text
self.copyrightText = "NOASSERTION"
# Relationships "owned" by this File (e.g., this File is left side)
self.rlns = []
# Package that owns this File
self.pkg = pkg
# Document that owns this File
self.doc = doc
``` | /content/code_sandbox/scripts/west_commands/zspdx/datatypes.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,676 |
```python
#
import hashlib
from west import log
def getHashes(filePath):
"""
Scan for and return hashes.
Arguments:
- filePath: path to file to scan.
Returns: tuple of (SHA1, SHA256, MD5) hashes for filePath, or
None if file is not found.
"""
hSHA1 = hashlib.sha1()
hSHA256 = hashlib.sha256()
hMD5 = hashlib.md5()
log.dbg(f" - getting hashes for {filePath}")
try:
with open(filePath, 'rb') as f:
buf = f.read()
hSHA1.update(buf)
hSHA256.update(buf)
hMD5.update(buf)
except OSError:
return None
return (hSHA1.hexdigest(), hSHA256.hexdigest(), hMD5.hexdigest())
``` | /content/code_sandbox/scripts/west_commands/zspdx/util.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 180 |
```python
#
import os
import yaml
import re
from west import log
from west.util import west_topdir, WestNotFound
from zspdx.cmakecache import parseCMakeCacheFile
from zspdx.cmakefileapijson import parseReply
from zspdx.datatypes import DocumentConfig, Document, File, PackageConfig, Package, RelationshipDataElementType, RelationshipData, Relationship
from zspdx.getincludes import getCIncludes
import zspdx.spdxids
# WalkerConfig contains configuration data for the Walker.
class WalkerConfig:
def __init__(self):
super(WalkerConfig, self).__init__()
# prefix for Document namespaces; should not end with "/"
self.namespacePrefix = ""
# location of build directory
self.buildDir = ""
# should also analyze for included header files?
self.analyzeIncludes = False
# should also add an SPDX document for the SDK?
self.includeSDK = False
# Walker is the main analysis class: it walks through the CMake codemodel,
# build files, and corresponding source and SDK files, and gathers the
# information needed to build the SPDX data classes.
class Walker:
# initialize with WalkerConfig
def __init__(self, cfg):
super(Walker, self).__init__()
# configuration - WalkerConfig
self.cfg = cfg
# the various Documents that we will be building
self.docBuild = None
self.docZephyr = None
self.docApp = None
self.docSDK = None
self.docModulesExtRefs = None
# dict of absolute file path => the Document that owns that file
self.allFileLinks = {}
# queue of pending source Files to create, process and assign
self.pendingSources = []
# queue of pending relationships to create, process and assign
self.pendingRelationships = []
# parsed CMake codemodel
self.cm = None
# parsed CMake cache dict, once we have the build path
self.cmakeCache = {}
# C compiler path from parsed CMake cache
self.compilerPath = ""
# SDK install path from parsed CMake cache
self.sdkPath = ""
def _build_purl(self, url, version=None):
if not url:
return None
purl = None
# This is designed to match repository with the following url pattern:
# '<protocol><base_url>/<namespace>/<package>
COMMON_GIT_URL_REGEX=r'((git@|http(s)?:\/\/)(?P<base_url>[\w\.@]+)(\/|:))(?P<namespace>[\w,\-,\_]+)\/(?P<package>[\w,\-,\_]+)(.git){0,1}((\/){0,1})$'
match = re.fullmatch(COMMON_GIT_URL_REGEX, url)
if match:
purl = f'pkg:{match.group("base_url")}/{match.group("namespace")}/{match.group("package")}'
if purl and (version or len(version) > 0):
purl += f'@{version}'
return purl
def _add_describe_relationship(self, doc, cfgpackage):
# create DESCRIBES relationship data
rd = RelationshipData()
rd.ownerType = RelationshipDataElementType.DOCUMENT
rd.ownerDocument = doc
rd.otherType = RelationshipDataElementType.PACKAGEID
rd.otherPackageID = cfgpackage.spdxID
rd.rlnType = "DESCRIBES"
# add it to pending relationships queue
self.pendingRelationships.append(rd)
# primary entry point
def makeDocuments(self):
# parse CMake cache file and get compiler path
log.inf("parsing CMake Cache file")
self.getCacheFile()
# check if meta file is generated
if not self.metaFile:
log.err("CONFIG_BUILD_OUTPUT_META must be enabled to generate spdx files; bailing")
return False
# parse codemodel from Walker cfg's build dir
log.inf("parsing CMake Codemodel files")
self.cm = self.getCodemodel()
if not self.cm:
log.err("could not parse codemodel from CMake API reply; bailing")
return False
# set up Documents
log.inf("setting up SPDX documents")
retval = self.setupDocuments()
if not retval:
return False
# walk through targets in codemodel to gather information
log.inf("walking through targets")
self.walkTargets()
# walk through pending sources and create corresponding files
log.inf("walking through pending sources files")
self.walkPendingSources()
# walk through pending relationship data and create relationships
log.inf("walking through pending relationships")
self.walkRelationships()
return True
# parse cache file and pull out relevant data
def getCacheFile(self):
cacheFilePath = os.path.join(self.cfg.buildDir, "CMakeCache.txt")
self.cmakeCache = parseCMakeCacheFile(cacheFilePath)
if self.cmakeCache:
self.compilerPath = self.cmakeCache.get("CMAKE_C_COMPILER", "")
self.sdkPath = self.cmakeCache.get("ZEPHYR_SDK_INSTALL_DIR", "")
self.metaFile = self.cmakeCache.get("KERNEL_META_PATH", "")
# determine path from build dir to CMake file-based API index file, then
# parse it and return the Codemodel
def getCodemodel(self):
log.dbg("getting codemodel from CMake API reply files")
# make sure the reply directory exists
cmakeReplyDirPath = os.path.join(self.cfg.buildDir, ".cmake", "api", "v1", "reply")
if not os.path.exists(cmakeReplyDirPath):
log.err(f'cmake api reply directory {cmakeReplyDirPath} does not exist')
log.err('was query directory created before cmake build ran?')
return None
if not os.path.isdir(cmakeReplyDirPath):
log.err(f'cmake api reply directory {cmakeReplyDirPath} exists but is not a directory')
return None
# find file with "index" prefix; there should only be one
indexFilePath = ""
for f in os.listdir(cmakeReplyDirPath):
if f.startswith("index"):
indexFilePath = os.path.join(cmakeReplyDirPath, f)
break
if indexFilePath == "":
# didn't find it
log.err(f'cmake api reply index file not found in {cmakeReplyDirPath}')
return None
# parse it
return parseReply(indexFilePath)
def setupAppDocument(self):
# set up app document
cfgApp = DocumentConfig()
cfgApp.name = "app-sources"
cfgApp.namespace = self.cfg.namespacePrefix + "/app"
cfgApp.docRefID = "DocumentRef-app"
self.docApp = Document(cfgApp)
# also set up app sources package
cfgPackageApp = PackageConfig()
cfgPackageApp.name = "app-sources"
cfgPackageApp.spdxID = "SPDXRef-app-sources"
cfgPackageApp.primaryPurpose = "SOURCE"
# relativeBaseDir is app sources dir
cfgPackageApp.relativeBaseDir = self.cm.paths_source
pkgApp = Package(cfgPackageApp, self.docApp)
self.docApp.pkgs[pkgApp.cfg.spdxID] = pkgApp
self._add_describe_relationship(self.docApp, cfgPackageApp)
def setupBuildDocument(self):
# set up build document
cfgBuild = DocumentConfig()
cfgBuild.name = "build"
cfgBuild.namespace = self.cfg.namespacePrefix + "/build"
cfgBuild.docRefID = "DocumentRef-build"
self.docBuild = Document(cfgBuild)
# we'll create the build packages in walkTargets()
# the DESCRIBES relationship for the build document will be
# with the zephyr_final package
rd = RelationshipData()
rd.ownerType = RelationshipDataElementType.DOCUMENT
rd.ownerDocument = self.docBuild
rd.otherType = RelationshipDataElementType.TARGETNAME
rd.otherTargetName = "zephyr_final"
rd.rlnType = "DESCRIBES"
# add it to pending relationships queue
self.pendingRelationships.append(rd)
def setupZephyrDocument(self, zephyr, modules):
# set up zephyr document
cfgZephyr = DocumentConfig()
cfgZephyr.name = "zephyr-sources"
cfgZephyr.namespace = self.cfg.namespacePrefix + "/zephyr"
cfgZephyr.docRefID = "DocumentRef-zephyr"
self.docZephyr = Document(cfgZephyr)
# relativeBaseDir is Zephyr sources topdir
try:
relativeBaseDir = west_topdir(self.cm.paths_source)
except WestNotFound:
log.err(f"cannot find west_topdir for CMake Codemodel sources path {self.cm.paths_source}; bailing")
return False
# set up zephyr sources package
cfgPackageZephyr = PackageConfig()
cfgPackageZephyr.name = "zephyr-sources"
cfgPackageZephyr.spdxID = "SPDXRef-zephyr-sources"
cfgPackageZephyr.relativeBaseDir = relativeBaseDir
zephyr_url = zephyr.get("remote", "")
if zephyr_url:
cfgPackageZephyr.url = zephyr_url
if zephyr.get("revision"):
cfgPackageZephyr.revision = zephyr.get("revision")
purl = None
zephyr_tags = zephyr.get("tags", "")
if zephyr_tags:
#Find tag vX.Y.Z
for tag in zephyr_tags:
version = re.fullmatch(r'^v(?P<version>\d+\.\d+\.\d+)$', tag)
purl = self._build_purl(zephyr_url, tag)
if purl:
cfgPackageZephyr.externalReferences.append(purl)
# Extract version from tag once
if cfgPackageZephyr.version == "" and version:
cfgPackageZephyr.version = version.group('version')
if len(cfgPackageZephyr.version) > 0:
cpe = f'cpe:2.3:o:zephyrproject:zephyr:{cfgPackageZephyr.version}:-:*:*:*:*:*:*'
cfgPackageZephyr.externalReferences.append(cpe)
pkgZephyr = Package(cfgPackageZephyr, self.docZephyr)
self.docZephyr.pkgs[pkgZephyr.cfg.spdxID] = pkgZephyr
self._add_describe_relationship(self.docZephyr, cfgPackageZephyr)
for module in modules:
module_name = module.get("name", None)
module_path = module.get("path", None)
module_url = module.get("remote", None)
module_revision = module.get("revision", None)
if not module_name:
log.err(f"cannot find module name in meta file; bailing")
return False
# set up zephyr sources package
cfgPackageZephyrModule = PackageConfig()
cfgPackageZephyrModule.name = module_name + "-sources"
cfgPackageZephyrModule.spdxID = "SPDXRef-" + module_name + "-sources"
cfgPackageZephyrModule.relativeBaseDir = module_path
cfgPackageZephyrModule.primaryPurpose = "SOURCE"
if module_revision:
cfgPackageZephyrModule.revision = module_revision
if module_url:
cfgPackageZephyrModule.url = module_url
pkgZephyrModule = Package(cfgPackageZephyrModule, self.docZephyr)
self.docZephyr.pkgs[pkgZephyrModule.cfg.spdxID] = pkgZephyrModule
self._add_describe_relationship(self.docZephyr, cfgPackageZephyrModule)
return True
def setupSDKDocument(self):
# set up SDK document
cfgSDK = DocumentConfig()
cfgSDK.name = "sdk"
cfgSDK.namespace = self.cfg.namespacePrefix + "/sdk"
cfgSDK.docRefID = "DocumentRef-sdk"
self.docSDK = Document(cfgSDK)
# also set up zephyr sdk package
cfgPackageSDK = PackageConfig()
cfgPackageSDK.name = "sdk"
cfgPackageSDK.spdxID = "SPDXRef-sdk"
# relativeBaseDir is SDK dir
cfgPackageSDK.relativeBaseDir = self.sdkPath
pkgSDK = Package(cfgPackageSDK, self.docSDK)
self.docSDK.pkgs[pkgSDK.cfg.spdxID] = pkgSDK
# create DESCRIBES relationship data
rd = RelationshipData()
rd.ownerType = RelationshipDataElementType.DOCUMENT
rd.ownerDocument = self.docSDK
rd.otherType = RelationshipDataElementType.PACKAGEID
rd.otherPackageID = cfgPackageSDK.spdxID
rd.rlnType = "DESCRIBES"
# add it to pending relationships queue
self.pendingRelationships.append(rd)
def setupModulesDocument(self, modules):
# set up zephyr document
cfgModuleExtRef = DocumentConfig()
cfgModuleExtRef.name = "modules-deps"
cfgModuleExtRef.namespace = self.cfg.namespacePrefix + "/modules-deps"
cfgModuleExtRef.docRefID = "DocumentRef-modules-deps"
self.docModulesExtRefs = Document(cfgModuleExtRef)
for module in modules:
module_name = module.get("name", None)
module_security = module.get("security", None)
if not module_name:
log.err(f"cannot find module name in meta file; bailing")
return False
module_ext_ref = []
if module_security:
module_ext_ref = module_security.get("external-references")
# set up zephyr sources package
cfgPackageModuleExtRef = PackageConfig()
cfgPackageModuleExtRef.name = module_name + "-deps"
cfgPackageModuleExtRef.spdxID = "SPDXRef-" + module_name + "-deps"
for ref in module_ext_ref:
cfgPackageModuleExtRef.externalReferences.append(ref)
pkgModule = Package(cfgPackageModuleExtRef, self.docModulesExtRefs)
self.docModulesExtRefs.pkgs[pkgModule.cfg.spdxID] = pkgModule
self._add_describe_relationship(self.docModulesExtRefs, cfgPackageModuleExtRef)
# set up Documents before beginning
def setupDocuments(self):
log.dbg("setting up placeholder documents")
self.setupBuildDocument()
try:
with open(self.metaFile) as file:
content = yaml.load(file.read(), yaml.SafeLoader)
if not self.setupZephyrDocument(content["zephyr"], content["modules"]):
return False
except (FileNotFoundError, yaml.YAMLError):
log.err(f"cannot find a valid zephyr_meta.yml required for SPDX generation; bailing")
return False
self.setupAppDocument()
if self.cfg.includeSDK:
self.setupSDKDocument()
self.setupModulesDocument(content["modules"])
return True
# walk through targets and gather information
def walkTargets(self):
log.dbg("walking targets from codemodel")
# assuming just one configuration; consider whether this is incorrect
cfgTargets = self.cm.configurations[0].configTargets
for cfgTarget in cfgTargets:
# build the Package for this target
pkg = self.initConfigTargetPackage(cfgTarget)
# see whether this target has any build artifacts at all
if len(cfgTarget.target.artifacts) > 0:
# add its build file
bf = self.addBuildFile(cfgTarget, pkg)
if pkg.cfg.name == "zephyr_final":
pkg.cfg.primaryPurpose = "APPLICATION"
else:
pkg.cfg.primaryPurpose = "LIBRARY"
# get its source files if build file is found
if bf:
self.collectPendingSourceFiles(cfgTarget, pkg, bf)
else:
log.dbg(f" - target {cfgTarget.name} has no build artifacts")
# get its target dependencies
self.collectTargetDependencies(cfgTargets, cfgTarget, pkg)
# build a Package in the Build doc for the given ConfigTarget
def initConfigTargetPackage(self, cfgTarget):
log.dbg(f" - initializing Package for target: {cfgTarget.name}")
# create target Package's config
cfg = PackageConfig()
cfg.name = cfgTarget.name
cfg.spdxID = "SPDXRef-" + zspdx.spdxids.convertToSPDXIDSafe(cfgTarget.name)
cfg.relativeBaseDir = self.cm.paths_build
# build Package
pkg = Package(cfg, self.docBuild)
# add Package to build Document
self.docBuild.pkgs[cfg.spdxID] = pkg
return pkg
# create a target's build product File and add it to its Package
# call with:
# 1) ConfigTarget
# 2) Package for that target
# returns: File
def addBuildFile(self, cfgTarget, pkg):
# assumes only one artifact in each target
artifactPath = os.path.join(pkg.cfg.relativeBaseDir, cfgTarget.target.artifacts[0])
log.dbg(f" - adding File {artifactPath}")
log.dbg(f" - relativeBaseDir: {pkg.cfg.relativeBaseDir}")
log.dbg(f" - artifacts[0]: {cfgTarget.target.artifacts[0]}")
# don't create build File if artifact path points to nonexistent file
if not os.path.exists(artifactPath):
log.dbg(f" - target {cfgTarget.name} lists build artifact {artifactPath} but file not found after build; skipping")
return None
# create build File
bf = File(self.docBuild, pkg)
bf.abspath = artifactPath
bf.relpath = cfgTarget.target.artifacts[0]
# can use nameOnDisk b/c it is just the filename w/out directory paths
bf.spdxID = zspdx.spdxids.getUniqueFileID(cfgTarget.target.nameOnDisk, self.docBuild.timesSeen)
# don't fill hashes / licenses / rlns now, we'll do that after walking
# add File to Package
pkg.files[bf.spdxID] = bf
# add file path link to Document and global links
self.docBuild.fileLinks[bf.abspath] = bf
self.allFileLinks[bf.abspath] = self.docBuild
# also set this file as the target package's build product file
pkg.targetBuildFile = bf
return bf
# collect a target's source files, add to pending sources queue, and
# create pending relationship data entry
# call with:
# 1) ConfigTarget
# 2) Package for that target
# 3) build File for that target
def collectPendingSourceFiles(self, cfgTarget, pkg, bf):
log.dbg(f" - collecting source files and adding to pending queue")
targetIncludesSet = set()
# walk through target's sources
for src in cfgTarget.target.sources:
log.dbg(f" - add pending source file and relationship for {src.path}")
# get absolute path if we don't have it
srcAbspath = src.path
if not os.path.isabs(src.path):
srcAbspath = os.path.join(self.cm.paths_source, src.path)
# check whether it even exists
if not (os.path.exists(srcAbspath) and os.path.isfile(srcAbspath)):
log.dbg(f" - {srcAbspath} does not exist but is referenced in sources for target {pkg.cfg.name}; skipping")
continue
# add it to pending source files queue
self.pendingSources.append(srcAbspath)
# create relationship data
rd = RelationshipData()
rd.ownerType = RelationshipDataElementType.FILENAME
rd.ownerFileAbspath = bf.abspath
rd.otherType = RelationshipDataElementType.FILENAME
rd.otherFileAbspath = srcAbspath
rd.rlnType = "GENERATED_FROM"
# add it to pending relationships queue
self.pendingRelationships.append(rd)
# collect this source file's includes
if self.cfg.analyzeIncludes and self.compilerPath:
includes = self.collectIncludes(cfgTarget, pkg, bf, src)
for inc in includes:
targetIncludesSet.add(inc)
# make relationships for the overall included files,
# avoiding duplicates for multiple source files including
# the same headers
targetIncludesList = list(targetIncludesSet)
targetIncludesList.sort()
for inc in targetIncludesList:
# add it to pending source files queue
self.pendingSources.append(inc)
# create relationship data
rd = RelationshipData()
rd.ownerType = RelationshipDataElementType.FILENAME
rd.ownerFileAbspath = bf.abspath
rd.otherType = RelationshipDataElementType.FILENAME
rd.otherFileAbspath = inc
rd.rlnType = "GENERATED_FROM"
# add it to pending relationships queue
self.pendingRelationships.append(rd)
# collect the include files corresponding to this source file
# call with:
# 1) ConfigTarget
# 2) Package for this target
# 3) build File for this target
# 4) TargetSource entry for this source file
# returns: sorted list of include files for this source file
def collectIncludes(self, cfgTarget, pkg, bf, src):
# get the right compile group for this source file
if len(cfgTarget.target.compileGroups) < (src.compileGroupIndex + 1):
log.dbg(f" - {cfgTarget.target.name} has compileGroupIndex {src.compileGroupIndex} but only {len(cfgTarget.target.compileGroups)} found; skipping included files search")
return []
cg = cfgTarget.target.compileGroups[src.compileGroupIndex]
# currently only doing C includes
if cg.language != "C":
log.dbg(f" - {cfgTarget.target.name} has compile group language {cg.language} but currently only searching includes for C files; skipping included files search")
return []
srcAbspath = src.path
if src.path[0] != "/":
srcAbspath = os.path.join(self.cm.paths_source, src.path)
return getCIncludes(self.compilerPath, srcAbspath, cg)
# collect relationships for dependencies of this target Package
# call with:
# 1) all ConfigTargets from CodeModel
# 2) this particular ConfigTarget
# 3) Package for this Target
def collectTargetDependencies(self, cfgTargets, cfgTarget, pkg):
log.dbg(f" - collecting target dependencies for {pkg.cfg.name}")
# walk through target's dependencies
for dep in cfgTarget.target.dependencies:
# extract dep name from its id
depFragments = dep.id.split(":")
depName = depFragments[0]
log.dbg(f" - adding pending relationship for {depName}")
# create relationship data between dependency packages
rd = RelationshipData()
rd.ownerType = RelationshipDataElementType.TARGETNAME
rd.ownerTargetName = pkg.cfg.name
rd.otherType = RelationshipDataElementType.TARGETNAME
rd.otherTargetName = depName
rd.rlnType = "HAS_PREREQUISITE"
# add it to pending relationships queue
self.pendingRelationships.append(rd)
# if this is a target with any build artifacts (e.g. non-UTILITY),
# also create STATIC_LINK relationship for dependency build files,
# together with this Package's own target build file
if len(cfgTarget.target.artifacts) == 0:
continue
# find the filename for the dependency's build product, using the
# codemodel (since we might not have created this dependency's
# Package or File yet)
depAbspath = ""
for ct in cfgTargets:
if ct.name == depName:
# skip utility targets
if len(ct.target.artifacts) == 0:
continue
# all targets use the same relativeBaseDir, so this works
# even though pkg is the owner package
depAbspath = os.path.join(pkg.cfg.relativeBaseDir, ct.target.artifacts[0])
break
if depAbspath == "":
continue
# create relationship data between build files
rd = RelationshipData()
rd.ownerType = RelationshipDataElementType.FILENAME
rd.ownerFileAbspath = pkg.targetBuildFile.abspath
rd.otherType = RelationshipDataElementType.FILENAME
rd.otherFileAbspath = depAbspath
rd.rlnType = "STATIC_LINK"
# add it to pending relationships queue
self.pendingRelationships.append(rd)
# walk through pending sources and create corresponding files,
# assigning them to the appropriate Document and Package
def walkPendingSources(self):
log.dbg(f"walking pending sources")
# only one package in each doc; get it
pkgZephyr = list(self.docZephyr.pkgs.values())[0]
pkgApp = list(self.docApp.pkgs.values())[0]
if self.cfg.includeSDK:
pkgSDK = list(self.docSDK.pkgs.values())[0]
for srcAbspath in self.pendingSources:
# check whether we've already seen it
srcDoc = self.allFileLinks.get(srcAbspath, None)
srcPkg = None
if srcDoc:
log.dbg(f" - {srcAbspath}: already seen, assigned to {srcDoc.cfg.name}")
continue
# not yet assigned; figure out where it goes
pkgBuild = self.findBuildPackage(srcAbspath)
pkgZephyr = self.findZephyrPackage(srcAbspath)
if pkgBuild:
log.dbg(f" - {srcAbspath}: assigning to build document, package {pkgBuild.cfg.name}")
srcDoc = self.docBuild
srcPkg = pkgBuild
elif self.cfg.includeSDK and os.path.commonpath([srcAbspath, pkgSDK.cfg.relativeBaseDir]) == pkgSDK.cfg.relativeBaseDir:
log.dbg(f" - {srcAbspath}: assigning to sdk document")
srcDoc = self.docSDK
srcPkg = pkgSDK
elif os.path.commonpath([srcAbspath, pkgApp.cfg.relativeBaseDir]) == pkgApp.cfg.relativeBaseDir:
log.dbg(f" - {srcAbspath}: assigning to app document")
srcDoc = self.docApp
srcPkg = pkgApp
elif pkgZephyr:
log.dbg(f" - {srcAbspath}: assigning to zephyr document")
srcDoc = self.docZephyr
srcPkg = pkgZephyr
else:
log.dbg(f" - {srcAbspath}: can't determine which document should own; skipping")
continue
# create File and assign it to the Package and Document
sf = File(srcDoc, srcPkg)
sf.abspath = srcAbspath
sf.relpath = os.path.relpath(srcAbspath, srcPkg.cfg.relativeBaseDir)
filenameOnly = os.path.split(srcAbspath)[1]
sf.spdxID = zspdx.spdxids.getUniqueFileID(filenameOnly, srcDoc.timesSeen)
# don't fill hashes / licenses / rlns now, we'll do that after walking
# add File to Package
srcPkg.files[sf.spdxID] = sf
# add file path link to Document and global links
srcDoc.fileLinks[sf.abspath] = sf
self.allFileLinks[sf.abspath] = srcDoc
# figure out which Package contains the given file, if any
# call with:
# 1) absolute path for source filename being searched
def findPackageFromSrcAbsPath(self, document, srcAbspath):
# Multiple target Packages might "contain" the file path, if they
# are nested. If so, the one with the longest path would be the
# most deeply-nested target directory, so that's the one which
# should get the file path.
pkgLongestMatch = None
for pkg in document.pkgs.values():
if os.path.commonpath([srcAbspath, pkg.cfg.relativeBaseDir]) == pkg.cfg.relativeBaseDir:
# the package does contain this file; is it the deepest?
if pkgLongestMatch:
if len(pkg.cfg.relativeBaseDir) > len(pkgLongestMatch.cfg.relativeBaseDir):
pkgLongestMatch = pkg
else:
# first package containing it, so assign it
pkgLongestMatch = pkg
return pkgLongestMatch
def findBuildPackage(self, srcAbspath):
return self.findPackageFromSrcAbsPath(self.docBuild, srcAbspath)
def findZephyrPackage(self, srcAbspath):
return self.findPackageFromSrcAbsPath(self.docZephyr, srcAbspath)
# walk through pending RelationshipData entries, create corresponding
# Relationships, and assign them to the applicable Files / Packages
def walkRelationships(self):
for rlnData in self.pendingRelationships:
rln = Relationship()
# get left side of relationship data
docA, spdxIDA, rlnsA = self.getRelationshipLeft(rlnData)
if not docA or not spdxIDA:
continue
rln.refA = spdxIDA
# get right side of relationship data
spdxIDB = self.getRelationshipRight(rlnData, docA)
if not spdxIDB:
continue
rln.refB = spdxIDB
rln.rlnType = rlnData.rlnType
rlnsA.append(rln)
log.dbg(f" - adding relationship to {docA.cfg.name}: {rln.refA} {rln.rlnType} {rln.refB}")
# get owner (left side) document and SPDX ID of Relationship for given RelationshipData
# returns: doc, spdxID, rlnsArray (for either Document, Package, or File, as applicable)
def getRelationshipLeft(self, rlnData):
if rlnData.ownerType == RelationshipDataElementType.FILENAME:
# find the document for this file abspath, and then the specific file's ID
ownerDoc = self.allFileLinks.get(rlnData.ownerFileAbspath, None)
if not ownerDoc:
log.dbg(f" - searching for relationship, can't find document with file {rlnData.ownerFileAbspath}; skipping")
return None, None, None
sf = ownerDoc.fileLinks.get(rlnData.ownerFileAbspath, None)
if not sf:
log.dbg(f" - searching for relationship for file {rlnData.ownerFileAbspath} points to document {ownerDoc.cfg.name} but file not found; skipping")
return None, None, None
# found it
if not sf.spdxID:
log.dbg(f" - searching for relationship for file {rlnData.ownerFileAbspath} found file, but empty ID; skipping")
return None, None, None
return ownerDoc, sf.spdxID, sf.rlns
elif rlnData.ownerType == RelationshipDataElementType.TARGETNAME:
# find the document for this target name, and then the specific package's ID
# for target names, must be docBuild
ownerDoc = self.docBuild
# walk through target Packages and check names
for pkg in ownerDoc.pkgs.values():
if pkg.cfg.name == rlnData.ownerTargetName:
if not pkg.cfg.spdxID:
log.dbg(f" - searching for relationship for target {rlnData.ownerTargetName} found package, but empty ID; skipping")
return None, None, None
return ownerDoc, pkg.cfg.spdxID, pkg.rlns
log.dbg(f" - searching for relationship for target {rlnData.ownerTargetName}, target not found in build document; skipping")
return None, None, None
elif rlnData.ownerType == RelationshipDataElementType.DOCUMENT:
# will always be SPDXRef-DOCUMENT
return rlnData.ownerDocument, "SPDXRef-DOCUMENT", rlnData.ownerDocument.relationships
else:
log.dbg(f" - unknown relationship type {rlnData.ownerType}; skipping")
return None, None, None
# get other (right side) SPDX ID of Relationship for given RelationshipData
def getRelationshipRight(self, rlnData, docA):
if rlnData.otherType == RelationshipDataElementType.FILENAME:
# find the document for this file abspath, and then the specific file's ID
otherDoc = self.allFileLinks.get(rlnData.otherFileAbspath, None)
if not otherDoc:
log.dbg(f" - searching for relationship, can't find document with file {rlnData.otherFileAbspath}; skipping")
return None
bf = otherDoc.fileLinks.get(rlnData.otherFileAbspath, None)
if not bf:
log.dbg(f" - searching for relationship for file {rlnData.otherFileAbspath} points to document {otherDoc.cfg.name} but file not found; skipping")
return None
# found it
if not bf.spdxID:
log.dbg(f" - searching for relationship for file {rlnData.otherFileAbspath} found file, but empty ID; skipping")
return None
# figure out whether to append DocumentRef
spdxIDB = bf.spdxID
if otherDoc != docA:
spdxIDB = otherDoc.cfg.docRefID + ":" + spdxIDB
docA.externalDocuments.add(otherDoc)
return spdxIDB
elif rlnData.otherType == RelationshipDataElementType.TARGETNAME:
# find the document for this target name, and then the specific package's ID
# for target names, must be docBuild
otherDoc = self.docBuild
# walk through target Packages and check names
for pkg in otherDoc.pkgs.values():
if pkg.cfg.name == rlnData.otherTargetName:
if not pkg.cfg.spdxID:
log.dbg(f" - searching for relationship for target {rlnData.otherTargetName} found package, but empty ID; skipping")
return None
spdxIDB = pkg.cfg.spdxID
if otherDoc != docA:
spdxIDB = otherDoc.cfg.docRefID + ":" + spdxIDB
docA.externalDocuments.add(otherDoc)
return spdxIDB
log.dbg(f" - searching for relationship for target {rlnData.otherTargetName}, target not found in build document; skipping")
return None
elif rlnData.otherType == RelationshipDataElementType.PACKAGEID:
# will just be the package ID that was passed in
return rlnData.otherPackageID
else:
log.dbg(f" - unknown relationship type {rlnData.otherType}; skipping")
return None
``` | /content/code_sandbox/scripts/west_commands/zspdx/walker.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 7,682 |
```python
#
from subprocess import run, PIPE
from west import log
# Given a path to the applicable C compiler, a C source file, and the
# corresponding TargetCompileGroup, determine which include files would
# be used.
# Arguments:
# 1) path to applicable C compiler
# 2) C source file being analyzed
# 3) TargetCompileGroup for the current target
# Returns: list of paths to include files, or [] on error or empty findings.
def getCIncludes(compilerPath, srcFile, tcg):
log.dbg(f" - getting includes for {srcFile}")
# prepare fragments
fragments = [fr for fr in tcg.compileCommandFragments if fr.strip() != ""]
# prepare include arguments
includes = ["-I" + incl.path for incl in tcg.includes]
# prepare defines
defines = ["-D" + d.define for d in tcg.defines]
# prepare command invocation
cmd = [compilerPath, "-E", "-H"] + fragments + includes + defines + [srcFile]
cp = run(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True)
if cp.returncode != 0:
log.dbg(f" - calling {compilerPath} failed with error code {cp.returncode}")
return []
else:
# response will be in cp.stderr, not cp.stdout
return extractIncludes(cp.stderr)
# Parse the response from the CC -E -H call, to extract the include file paths
def extractIncludes(resp):
includes = set()
# lines we want will start with one or more periods, followed by
# a space and then the include file path, e.g.:
# .... /home/steve/programming/zephyr/zephyrproject/zephyr/include/kernel.h
# the number of periods indicates the depth of nesting (for transitively-
# included files), but here we aren't going to care about that. We'll
# treat everything as tied to the corresponding source file.
# once we hit the line "Multiple include guards may be useful for:",
# we're done; ignore everything after that
for rline in resp.splitlines():
if rline.startswith("Multiple include guards"):
break
if rline[0] == ".":
sline = rline.split(" ", maxsplit=1)
if len(sline) != 2:
continue
includes.add(sline[1])
includesList = list(includes)
includesList.sort()
return includesList
``` | /content/code_sandbox/scripts/west_commands/zspdx/getincludes.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 554 |
```python
#
from west import log
# Parse a CMakeCache file and return a dict of key:value (discarding
# type hints).
def parseCMakeCacheFile(filePath):
log.dbg(f"parsing CMake cache file at {filePath}")
kv = {}
try:
with open(filePath, "r") as f:
# should be a short file, so we'll use readlines
lines = f.readlines()
# walk through and look for non-comment, non-empty lines
for line in lines:
sline = line.strip()
if sline == "":
continue
if sline.startswith("#") or sline.startswith("//"):
continue
# parse out : and = characters
pline1 = sline.split(":", maxsplit=1)
if len(pline1) != 2:
continue
pline2 = pline1[1].split("=", maxsplit=1)
if len(pline2) != 2:
continue
kv[pline1[0]] = pline2[1]
return kv
except OSError as e:
log.err(f"Error loading {filePath}: {str(e)}")
return {}
``` | /content/code_sandbox/scripts/west_commands/zspdx/cmakecache.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 258 |
```python
#
import hashlib
import os
import re
from west import log
from zspdx.licenses import LICENSES
from zspdx.util import getHashes
# ScannerConfig contains settings used to configure how the SPDX
# Document scanning should occur.
class ScannerConfig:
def __init__(self):
super(ScannerConfig, self).__init__()
# when assembling a Package's data, should we auto-conclude the
# Package's license, based on the licenses of its Files?
# when assembling a Package's Files' data, should we auto-conclude
# each File's license, based on its detected license(s)?
# defaults to 20
self.numLinesScanned = 20
# should we calculate SHA256 hashes for each Package's Files?
# note that SHA1 hashes are mandatory, per SPDX 2.3
self.doSHA256 = True
# should we calculate MD5 hashes for each Package's Files?
self.doMD5 = False
def parseLineForExpression(line):
"""Return parsed SPDX expression if tag found in line, or None otherwise."""
if p[2] == "":
return None
# strip away trailing comment marks and whitespace, if any
expression = p[2].strip()
expression = expression.rstrip("/*")
expression = expression.strip()
return expression
def getExpressionData(filePath, numLines):
"""
tag in the file.
Arguments:
- filePath: path to file to scan.
- numLines: number of lines to scan for an expression before
giving up. If 0, will scan the entire file.
Returns: parsed expression if found; None if not found.
"""
log.dbg(f" - getting licenses for {filePath}")
with open(filePath, "r") as f:
try:
lineno = 0
for line in f:
lineno += 1
if lineno > numLines > 0:
break
expression = parseLineForExpression(line)
if expression is not None:
return expression
except UnicodeDecodeError:
# invalid UTF-8 content
return None
# if we get here, we didn't find an expression
return None
def splitExpression(expression):
"""
Parse a license expression into its constituent identifiers.
Arguments:
- expression: SPDX license expression
Returns: array of split identifiers
"""
# remove parens and plus sign
e2 = re.sub(r'\(|\)|\+', "", expression, flags=re.IGNORECASE)
# remove word operators, ignoring case, leaving a blank space
e3 = re.sub(r' AND | OR | WITH ', " ", e2, flags=re.IGNORECASE)
# and split on space
e4 = e3.split(" ")
return sorted(e4)
def calculateVerificationCode(pkg):
"""
Calculate the SPDX Package Verification Code for all files in the package.
Arguments:
- pkg: Package
Returns: verification code as string
"""
hashes = []
for f in pkg.files.values():
hashes.append(f.sha1)
hashes.sort()
filelist = "".join(hashes)
hSHA1 = hashlib.sha1()
hSHA1.update(filelist.encode('utf-8'))
return hSHA1.hexdigest()
"""
Check whether this license ID is a valid SPDX license ID, and add it
to the custom license IDs set for this Document if it isn't.
Arguments:
- lic: detected license ID
- doc: Document
"""
if lic not in LICENSES:
"""
Extract lists of all concluded and infoInFile licenses seen.
Arguments:
- pkg: Package
Returns: sorted list of concluded license exprs,
sorted list of infoInFile ID's
"""
licsConcluded = set()
licsFromFiles = set()
for f in pkg.files.values():
for licInfo in f.licenseInfoInFile:
licsFromFiles.add(licInfo)
return sorted(list(licsConcluded)), sorted(list(licsFromFiles))
def normalizeExpression(licsConcluded):
"""
Combine array of license expressions into one AND'd expression,
adding parens where needed.
Arguments:
- licsConcluded: array of license expressions
Returns: string with single AND'd expression.
"""
# return appropriate for simple cases
if len(licsConcluded) == 0:
return "NOASSERTION"
if len(licsConcluded) == 1:
return licsConcluded[0]
# more than one, so we'll need to combine them
# if and only if an expression has spaces, it needs parens
revised = []
for lic in licsConcluded:
if lic in ["NONE", "NOASSERTION"]:
continue
if " " in lic:
revised.append(f"({lic})")
else:
revised.append(lic)
return " AND ".join(revised)
def scanDocument(cfg, doc):
"""
Scan for licenses and calculate hashes for all Files and Packages
in this Document.
Arguments:
- cfg: ScannerConfig
- doc: Document
"""
for pkg in doc.pkgs.values():
log.inf(f"scanning files in package {pkg.cfg.name} in document {doc.cfg.name}")
# first, gather File data for this package
for f in pkg.files.values():
# set relpath based on package's relativeBaseDir
f.relpath = os.path.relpath(f.abspath, pkg.cfg.relativeBaseDir)
# get hashes for file
hashes = getHashes(f.abspath)
if not hashes:
log.wrn(f"unable to get hashes for file {f.abspath}; skipping")
continue
hSHA1, hSHA256, hMD5 = hashes
f.sha1 = hSHA1
if cfg.doSHA256:
f.sha256 = hSHA256
if cfg.doMD5:
f.md5 = hMD5
# get licenses for file
expression = getExpressionData(f.abspath, cfg.numLinesScanned)
if expression:
f.licenseInfoInFile = splitExpression(expression)
# check if any custom license IDs should be flagged for document
for lic in f.licenseInfoInFile:
# now, assemble the Package data
pkg.licenseInfoFromFiles = licsFromFiles
pkg.verificationCode = calculateVerificationCode(pkg)
``` | /content/code_sandbox/scripts/west_commands/zspdx/scanner.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,400 |
```python
#
from datetime import datetime
from west import log
from zspdx.util import getHashes
import re
CPE23TYPE_REGEX = (
r'^cpe:2\.3:[aho\*\-](:(((\?*|\*?)([a-zA-Z0-9\-\._]|(\\[\\\*\?!"#$$%&\'\(\)\+,\/:;<=>@\[\]\^'
r"`\{\|}~]))+(\?*|\*?))|[\*\-])){5}(:(([a-zA-Z]{2,3}(-([a-zA-Z]{2}|[0-9]{3}))?)|[\*\-]))(:(((\?*"
r'|\*?)([a-zA-Z0-9\-\._]|(\\[\\\*\?!"#$$%&\'\(\)\+,\/:;<=>@\[\]\^`\{\|}~]))+(\?*|\*?))|[\*\-])){4}$'
)
PURL_REGEX = r"^pkg:.+(\/.+)?\/.+(@.+)?(\?.+)?(#.+)?$"
def _normalize_spdx_name(name):
# Replace "_" by "-" since it's not allowed in spdx ID
return name.replace("_", "-")
# Output tag-value SPDX 2.3 content for the given Relationship object.
# Arguments:
# 1) f: file handle for SPDX document
# 2) rln: Relationship object being described
def writeRelationshipSPDX(f, rln):
f.write(f"Relationship: {_normalize_spdx_name(rln.refA)} {rln.rlnType} {_normalize_spdx_name(rln.refB)}\n")
# Output tag-value SPDX 2.3 content for the given File object.
# Arguments:
# 1) f: file handle for SPDX document
# 2) bf: File object being described
def writeFileSPDX(f, bf):
spdx_normalize_spdx_id = _normalize_spdx_name(bf.spdxID)
f.write(f"""FileName: ./{bf.relpath}
SPDXID: {spdx_normalize_spdx_id}
FileChecksum: SHA1: {bf.sha1}
""")
if bf.sha256 != "":
f.write(f"FileChecksum: SHA256: {bf.sha256}\n")
if bf.md5 != "":
f.write(f"FileChecksum: MD5: {bf.md5}\n")
if len(bf.licenseInfoInFile) == 0:
else:
for licInfoInFile in bf.licenseInfoInFile:
# write file relationships
if len(bf.rlns) > 0:
for rln in bf.rlns:
writeRelationshipSPDX(f, rln)
f.write("\n")
def generateDowloadUrl(url, revision):
# Only git is supported
# walker.py only parse revision if it's from git repositiory
if len(revision) == 0:
return url
return f'git+{url}@{revision}'
# Output tag-value SPDX 2.3 content for the given Package object.
# Arguments:
# 1) f: file handle for SPDX document
# 2) pkg: Package object being described
def writePackageSPDX(f, pkg):
spdx_normalized_name = _normalize_spdx_name(pkg.cfg.name)
spdx_normalize_spdx_id = _normalize_spdx_name(pkg.cfg.spdxID)
f.write(f"""##### Package: {spdx_normalized_name}
PackageName: {spdx_normalized_name}
SPDXID: {spdx_normalize_spdx_id}
""")
""")
if pkg.cfg.primaryPurpose != "":
f.write(f"PrimaryPackagePurpose: {pkg.cfg.primaryPurpose}\n")
if len(pkg.cfg.url) > 0:
downloadUrl = generateDowloadUrl(pkg.cfg.url, pkg.cfg.revision)
f.write(f"PackageDownloadLocation: {downloadUrl}\n")
else:
f.write("PackageDownloadLocation: NOASSERTION\n")
if len(pkg.cfg.version) > 0:
f.write(f"PackageVersion: {pkg.cfg.version}\n")
elif len(pkg.cfg.revision) > 0:
f.write(f"PackageVersion: {pkg.cfg.revision}\n")
for ref in pkg.cfg.externalReferences:
if re.fullmatch(CPE23TYPE_REGEX, ref):
f.write(f"ExternalRef: SECURITY cpe23Type {ref}\n")
elif re.fullmatch(PURL_REGEX, ref):
f.write(f"ExternalRef: PACKAGE_MANAGER purl {ref}\n")
else:
log.wrn(f"Unknown external reference ({ref})")
# flag whether files analyzed / any files present
if len(pkg.files) > 0:
if len(pkg.licenseInfoFromFiles) > 0:
for licFromFiles in pkg.licenseInfoFromFiles:
else:
f.write(f"FilesAnalyzed: true\nPackageVerificationCode: {pkg.verificationCode}\n\n")
else:
f.write(f"FilesAnalyzed: false\nPackageComment: Utility target; no files\n\n")
# write package relationships
if len(pkg.rlns) > 0:
for rln in pkg.rlns:
writeRelationshipSPDX(f, rln)
f.write("\n")
# write package files, if any
if len(pkg.files) > 0:
bfs = list(pkg.files.values())
bfs.sort(key = lambda x: x.relpath)
for bf in bfs:
writeFileSPDX(f, bf)
# Output tag-value SPDX 2.3 content for a custom license.
# Arguments:
# 1) f: file handle for SPDX document
# 2) lic: custom license ID being described
ExtractedText: {lic}
""")
# Output tag-value SPDX 2.3 content for the given Document object.
# Arguments:
# 1) f: file handle for SPDX document
# 2) doc: Document object being described
def writeDocumentSPDX(f, doc):
spdx_normalized_name = _normalize_spdx_name(doc.cfg.name)
f.write(f"""SPDXVersion: SPDX-2.3
SPDXID: SPDXRef-DOCUMENT
DocumentName: {spdx_normalized_name}
DocumentNamespace: {doc.cfg.namespace}
Creator: Tool: Zephyr SPDX builder
Created: {datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")}
""")
# write any external document references
if len(doc.externalDocuments) > 0:
extDocs = list(doc.externalDocuments)
extDocs.sort(key = lambda x: x.cfg.docRefID)
for extDoc in extDocs:
f.write(f"ExternalDocumentRef: {extDoc.cfg.docRefID} {extDoc.cfg.namespace} SHA1: {extDoc.myDocSHA1}\n")
f.write(f"\n")
# write relationships owned by this Document (not by its Packages, etc.), if any
if len(doc.relationships) > 0:
for rln in doc.relationships:
writeRelationshipSPDX(f, rln)
f.write(f"\n")
# write packages
for pkg in doc.pkgs.values():
writePackageSPDX(f, pkg)
# write other license info, if any
# Open SPDX document file for writing, write the document, and calculate
# its hash for other referring documents to use.
# Arguments:
# 1) spdxPath: path to write SPDX document
# 2) doc: SPDX Document object to write
def writeSPDX(spdxPath, doc):
# create and write document to disk
try:
log.inf(f"Writing SPDX document {doc.cfg.name} to {spdxPath}")
with open(spdxPath, "w") as f:
writeDocumentSPDX(f, doc)
except OSError as e:
log.err(f"Error: Unable to write to {spdxPath}: {str(e)}")
return False
# calculate hash of the document we just wrote
hashes = getHashes(spdxPath)
if not hashes:
log.err(f"Error: created document but unable to calculate hash values")
return False
doc.myDocSHA1 = hashes[0]
return True
``` | /content/code_sandbox/scripts/west_commands/zspdx/writer.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,795 |
```python
#
import importlib
import logging
import os
from pathlib import Path
from fetchers.core import ZephyrBlobFetcher
_logger = logging.getLogger('fetchers')
def _import_fetcher_module(fetcher_name):
try:
importlib.import_module(f'fetchers.{fetcher_name}')
except ImportError as ie:
# Fetchers are supposed to gracefully handle failures when they
# import anything outside of stdlib, but they sometimes do
# not. Catch ImportError to handle this.
_logger.warning(f'The module for fetcher "{fetcher_name}" '
f'could not be imported ({ie}). This most likely '
'means it is not handling its dependencies properly. '
'Please report this to the zephyr developers.')
# We import these here to ensure the BlobFetcher subclasses are
# defined; otherwise, BlobFetcher.get_fetchers() won't work.
# Those do not contain subclasses of ZephyrBlobFetcher
name_blocklist = ['__init__', 'core']
fetchers_dir = Path(__file__).parent.resolve()
for f in [f for f in os.listdir(fetchers_dir)]:
file = fetchers_dir / Path(f)
if file.suffix == '.py' and file.stem not in name_blocklist:
_import_fetcher_module(file.stem)
def get_fetcher_cls(scheme):
'''Get a fetcher's class object, given a scheme.'''
for cls in ZephyrBlobFetcher.get_fetchers():
if scheme in cls.schemes():
return cls
raise ValueError('unknown fetcher for scheme "{}"'.format(scheme))
__all__ = ['ZephyrBlobFetcher', 'get_fetcher_cls']
``` | /content/code_sandbox/scripts/west_commands/fetchers/__init__.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 356 |
```python
#
from abc import ABC, abstractmethod
from pathlib import Path
from typing import List, Type
class ZephyrBlobFetcher(ABC):
@staticmethod
def get_fetchers() -> List[Type['ZephyrBlobFetcher']]:
'''Get a list of all currently defined fetcher classes.'''
return ZephyrBlobFetcher.__subclasses__()
@classmethod
@abstractmethod
def schemes(cls) -> List[str]:
'''Return this fetcher's schemes.'''
@abstractmethod
def fetch(self, url: str, path: Path):
''' Fetch a blob and store it '''
``` | /content/code_sandbox/scripts/west_commands/fetchers/core.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 130 |
```python
#
import requests
from west import log
from fetchers.core import ZephyrBlobFetcher
class HTTPFetcher(ZephyrBlobFetcher):
@classmethod
def schemes(cls):
return ['http', 'https']
def fetch(self, url, path):
log.dbg(f'HTTPFetcher fetching {url} to {path}')
resp = requests.get(url)
open(path, "wb").write(resp.content)
``` | /content/code_sandbox/scripts/west_commands/fetchers/http.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 90 |
```python
#
'''Runner stub for QEMU.'''
from runners.core import ZephyrBinaryRunner, RunnerCaps
class QemuBinaryRunner(ZephyrBinaryRunner):
'''Place-holder for QEMU runner customizations.'''
@classmethod
def name(cls):
return 'qemu'
@classmethod
def capabilities(cls):
# This is a stub.
return RunnerCaps(commands=set())
@classmethod
def do_add_parser(cls, parser):
pass # Nothing to do.
@classmethod
def do_create(cls, cfg, args):
return QemuBinaryRunner(cfg)
def do_run(self, command, **kwargs):
pass
``` | /content/code_sandbox/scripts/west_commands/runners/qemu.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 141 |
```python
#
import json
import os
from west import log
import zspdx.cmakefileapi
def parseReply(replyIndexPath):
replyDir, _ = os.path.split(replyIndexPath)
# first we need to find the codemodel reply file
try:
with open(replyIndexPath, 'r') as indexFile:
js = json.load(indexFile)
# get reply object
reply_dict = js.get("reply", {})
if reply_dict == {}:
log.err(f"no \"reply\" field found in index file")
return None
# get codemodel object
cm_dict = reply_dict.get("codemodel-v2", {})
if cm_dict == {}:
log.err(f"no \"codemodel-v2\" field found in \"reply\" object in index file")
return None
# and get codemodel filename
jsonFile = cm_dict.get("jsonFile", "")
if jsonFile == "":
log.err(f"no \"jsonFile\" field found in \"codemodel-v2\" object in index file")
return None
return parseCodemodel(replyDir, jsonFile)
except OSError as e:
log.err(f"Error loading {replyIndexPath}: {str(e)}")
return None
except json.decoder.JSONDecodeError as e:
log.err(f"Error parsing JSON in {replyIndexPath}: {str(e)}")
return None
def parseCodemodel(replyDir, codemodelFile):
codemodelPath = os.path.join(replyDir, codemodelFile)
try:
with open(codemodelPath, 'r') as cmFile:
js = json.load(cmFile)
cm = zspdx.cmakefileapi.Codemodel()
# for correctness, check kind and version
kind = js.get("kind", "")
if kind != "codemodel":
log.err(f"Error loading CMake API reply: expected \"kind\":\"codemodel\" in {codemodelPath}, got {kind}")
return None
version = js.get("version", {})
versionMajor = version.get("major", -1)
if versionMajor != 2:
if versionMajor == -1:
log.err(f"Error loading CMake API reply: expected major version 2 in {codemodelPath}, no version found")
return None
log.err(f"Error loading CMake API reply: expected major version 2 in {codemodelPath}, got {versionMajor}")
return None
# get paths
paths_dict = js.get("paths", {})
cm.paths_source = paths_dict.get("source", "")
cm.paths_build = paths_dict.get("build", "")
# get configurations
configs_arr = js.get("configurations", [])
for cfg_dict in configs_arr:
cfg = parseConfig(cfg_dict, replyDir)
if cfg:
cm.configurations.append(cfg)
# and after parsing is done, link all the indices
linkCodemodel(cm)
return cm
except OSError as e:
log.err(f"Error loading {codemodelPath}: {str(e)}")
return None
except json.decoder.JSONDecodeError as e:
log.err(f"Error parsing JSON in {codemodelPath}: {str(e)}")
return None
def parseConfig(cfg_dict, replyDir):
cfg = zspdx.cmakefileapi.Config()
cfg.name = cfg_dict.get("name", "")
# parse and add each directory
dirs_arr = cfg_dict.get("directories", [])
for dir_dict in dirs_arr:
if dir_dict != {}:
cfgdir = zspdx.cmakefileapi.ConfigDir()
cfgdir.source = dir_dict.get("source", "")
cfgdir.build = dir_dict.get("build", "")
cfgdir.parentIndex = dir_dict.get("parentIndex", -1)
cfgdir.childIndexes = dir_dict.get("childIndexes", [])
cfgdir.projectIndex = dir_dict.get("projectIndex", -1)
cfgdir.targetIndexes = dir_dict.get("targetIndexes", [])
minCMakeVer_dict = dir_dict.get("minimumCMakeVersion", {})
cfgdir.minimumCMakeVersion = minCMakeVer_dict.get("string", "")
cfgdir.hasInstallRule = dir_dict.get("hasInstallRule", False)
cfg.directories.append(cfgdir)
# parse and add each project
projects_arr = cfg_dict.get("projects", [])
for prj_dict in projects_arr:
if prj_dict != {}:
prj = zspdx.cmakefileapi.ConfigProject()
prj.name = prj_dict.get("name", "")
prj.parentIndex = prj_dict.get("parentIndex", -1)
prj.childIndexes = prj_dict.get("childIndexes", [])
prj.directoryIndexes = prj_dict.get("directoryIndexes", [])
prj.targetIndexes = prj_dict.get("targetIndexes", [])
cfg.projects.append(prj)
# parse and add each target
cfgTargets_arr = cfg_dict.get("targets", [])
for cfgTarget_dict in cfgTargets_arr:
if cfgTarget_dict != {}:
cfgTarget = zspdx.cmakefileapi.ConfigTarget()
cfgTarget.name = cfgTarget_dict.get("name", "")
cfgTarget.id = cfgTarget_dict.get("id", "")
cfgTarget.directoryIndex = cfgTarget_dict.get("directoryIndex", -1)
cfgTarget.projectIndex = cfgTarget_dict.get("projectIndex", -1)
cfgTarget.jsonFile = cfgTarget_dict.get("jsonFile", "")
if cfgTarget.jsonFile != "":
cfgTarget.target = parseTarget(os.path.join(replyDir, cfgTarget.jsonFile))
else:
cfgTarget.target = None
cfg.configTargets.append(cfgTarget)
return cfg
def parseTarget(targetPath):
try:
with open(targetPath, 'r') as targetFile:
js = json.load(targetFile)
target = zspdx.cmakefileapi.Target()
target.name = js.get("name", "")
target.id = js.get("id", "")
target.type = parseTargetType(js.get("type", "UNKNOWN"))
target.backtrace = js.get("backtrace", -1)
target.folder = js.get("folder", "")
# get paths
paths_dict = js.get("paths", {})
target.paths_source = paths_dict.get("source", "")
target.paths_build = paths_dict.get("build", "")
target.nameOnDisk = js.get("nameOnDisk", "")
# parse artifacts if present
artifacts_arr = js.get("artifacts", [])
target.artifacts = []
for artifact_dict in artifacts_arr:
artifact_path = artifact_dict.get("path", "")
if artifact_path != "":
target.artifacts.append(artifact_path)
target.isGeneratorProvided = js.get("isGeneratorProvided", False)
# call separate functions to parse subsections
parseTargetInstall(target, js)
parseTargetLink(target, js)
parseTargetArchive(target, js)
parseTargetDependencies(target, js)
parseTargetSources(target, js)
parseTargetSourceGroups(target, js)
parseTargetCompileGroups(target, js)
parseTargetBacktraceGraph(target, js)
return target
except OSError as e:
log.err(f"Error loading {targetPath}: {str(e)}")
return None
except json.decoder.JSONDecodeError as e:
log.err(f"Error parsing JSON in {targetPath}: {str(e)}")
return None
def parseTargetType(targetType):
if targetType == "EXECUTABLE":
return zspdx.cmakefileapi.TargetType.EXECUTABLE
elif targetType == "STATIC_LIBRARY":
return zspdx.cmakefileapi.TargetType.STATIC_LIBRARY
elif targetType == "SHARED_LIBRARY":
return zspdx.cmakefileapi.TargetType.SHARED_LIBRARY
elif targetType == "MODULE_LIBRARY":
return zspdx.cmakefileapi.TargetType.MODULE_LIBRARY
elif targetType == "OBJECT_LIBRARY":
return zspdx.cmakefileapi.TargetType.OBJECT_LIBRARY
elif targetType == "UTILITY":
return zspdx.cmakefileapi.TargetType.UTILITY
else:
return zspdx.cmakefileapi.TargetType.UNKNOWN
def parseTargetInstall(target, js):
install_dict = js.get("install", {})
if install_dict == {}:
return
prefix_dict = install_dict.get("prefix", {})
target.install_prefix = prefix_dict.get("path", "")
destinations_arr = install_dict.get("destinations", [])
for destination_dict in destinations_arr:
dest = zspdx.cmakefileapi.TargetInstallDestination()
dest.path = destination_dict.get("path", "")
dest.backtrace = destination_dict.get("backtrace", -1)
target.install_destinations.append(dest)
def parseTargetLink(target, js):
link_dict = js.get("link", {})
if link_dict == {}:
return
target.link_language = link_dict.get("language", {})
target.link_lto = link_dict.get("lto", False)
sysroot_dict = link_dict.get("sysroot", {})
target.link_sysroot = sysroot_dict.get("path", "")
fragments_arr = link_dict.get("commandFragments", [])
for fragment_dict in fragments_arr:
fragment = zspdx.cmakefileapi.TargetCommandFragment()
fragment.fragment = fragment_dict.get("fragment", "")
fragment.role = fragment_dict.get("role", "")
target.link_commandFragments.append(fragment)
def parseTargetArchive(target, js):
archive_dict = js.get("archive", {})
if archive_dict == {}:
return
target.archive_lto = archive_dict.get("lto", False)
fragments_arr = archive_dict.get("commandFragments", [])
for fragment_dict in fragments_arr:
fragment = zspdx.cmakefileapi.TargetCommandFragment()
fragment.fragment = fragment_dict.get("fragment", "")
fragment.role = fragment_dict.get("role", "")
target.archive_commandFragments.append(fragment)
def parseTargetDependencies(target, js):
dependencies_arr = js.get("dependencies", [])
for dependency_dict in dependencies_arr:
dep = zspdx.cmakefileapi.TargetDependency()
dep.id = dependency_dict.get("id", "")
dep.backtrace = dependency_dict.get("backtrace", -1)
target.dependencies.append(dep)
def parseTargetSources(target, js):
sources_arr = js.get("sources", [])
for source_dict in sources_arr:
src = zspdx.cmakefileapi.TargetSource()
src.path = source_dict.get("path", "")
src.compileGroupIndex = source_dict.get("compileGroupIndex", -1)
src.sourceGroupIndex = source_dict.get("sourceGroupIndex", -1)
src.isGenerated = source_dict.get("isGenerated", False)
src.backtrace = source_dict.get("backtrace", -1)
target.sources.append(src)
def parseTargetSourceGroups(target, js):
sourceGroups_arr = js.get("sourceGroups", [])
for sourceGroup_dict in sourceGroups_arr:
srcgrp = zspdx.cmakefileapi.TargetSourceGroup()
srcgrp.name = sourceGroup_dict.get("name", "")
srcgrp.sourceIndexes = sourceGroup_dict.get("sourceIndexes", [])
target.sourceGroups.append(srcgrp)
def parseTargetCompileGroups(target, js):
compileGroups_arr = js.get("compileGroups", [])
for compileGroup_dict in compileGroups_arr:
cmpgrp = zspdx.cmakefileapi.TargetCompileGroup()
cmpgrp.sourceIndexes = compileGroup_dict.get("sourceIndexes", [])
cmpgrp.language = compileGroup_dict.get("language", "")
cmpgrp.sysroot = compileGroup_dict.get("sysroot", "")
commandFragments_arr = compileGroup_dict.get("compileCommandFragments", [])
for commandFragment_dict in commandFragments_arr:
fragment = commandFragment_dict.get("fragment", "")
if fragment != "":
cmpgrp.compileCommandFragments.append(fragment)
includes_arr = compileGroup_dict.get("includes", [])
for include_dict in includes_arr:
grpInclude = zspdx.cmakefileapi.TargetCompileGroupInclude()
grpInclude.path = include_dict.get("path", "")
grpInclude.isSystem = include_dict.get("isSystem", False)
grpInclude.backtrace = include_dict.get("backtrace", -1)
cmpgrp.includes.append(grpInclude)
precompileHeaders_arr = compileGroup_dict.get("precompileHeaders", [])
for precompileHeader_dict in precompileHeaders_arr:
grpHeader = zspdx.cmakefileapi.TargetCompileGroupPrecompileHeader()
grpHeader.header = precompileHeader_dict.get("header", "")
grpHeader.backtrace = precompileHeader_dict.get("backtrace", -1)
cmpgrp.precompileHeaders.append(grpHeader)
defines_arr = compileGroup_dict.get("defines", [])
for define_dict in defines_arr:
grpDefine = zspdx.cmakefileapi.TargetCompileGroupDefine()
grpDefine.define = define_dict.get("define", "")
grpDefine.backtrace = define_dict.get("backtrace", -1)
cmpgrp.defines.append(grpDefine)
target.compileGroups.append(cmpgrp)
def parseTargetBacktraceGraph(target, js):
backtraceGraph_dict = js.get("backtraceGraph", {})
if backtraceGraph_dict == {}:
return
target.backtraceGraph_commands = backtraceGraph_dict.get("commands", [])
target.backtraceGraph_files = backtraceGraph_dict.get("files", [])
nodes_arr = backtraceGraph_dict.get("nodes", [])
for node_dict in nodes_arr:
node = zspdx.cmakefileapi.TargetBacktraceGraphNode()
node.file = node_dict.get("file", -1)
node.line = node_dict.get("line", -1)
node.command = node_dict.get("command", -1)
node.parent = node_dict.get("parent", -1)
target.backtraceGraph_nodes.append(node)
# Create direct pointers for all Configs in Codemodel
# takes: Codemodel
def linkCodemodel(cm):
for cfg in cm.configurations:
linkConfig(cfg)
# Create direct pointers for all contents of Config
# takes: Config
def linkConfig(cfg):
for cfgDir in cfg.directories:
linkConfigDir(cfg, cfgDir)
for cfgPrj in cfg.projects:
linkConfigProject(cfg, cfgPrj)
for cfgTarget in cfg.configTargets:
linkConfigTarget(cfg, cfgTarget)
# Create direct pointers for ConfigDir indices
# takes: Config and ConfigDir
def linkConfigDir(cfg, cfgDir):
if cfgDir.parentIndex == -1:
cfgDir.parent = None
else:
cfgDir.parent = cfg.directories[cfgDir.parentIndex]
if cfgDir.projectIndex == -1:
cfgDir.project = None
else:
cfgDir.project = cfg.projects[cfgDir.projectIndex]
cfgDir.children = []
for childIndex in cfgDir.childIndexes:
cfgDir.children.append(cfg.directories[childIndex])
cfgDir.targets = []
for targetIndex in cfgDir.targetIndexes:
cfgDir.targets.append(cfg.configTargets[targetIndex])
# Create direct pointers for ConfigProject indices
# takes: Config and ConfigProject
def linkConfigProject(cfg, cfgPrj):
if cfgPrj.parentIndex == -1:
cfgPrj.parent = None
else:
cfgPrj.parent = cfg.projects[cfgPrj.parentIndex]
cfgPrj.children = []
for childIndex in cfgPrj.childIndexes:
cfgPrj.children.append(cfg.projects[childIndex])
cfgPrj.directories = []
for dirIndex in cfgPrj.directoryIndexes:
cfgPrj.directories.append(cfg.directories[dirIndex])
cfgPrj.targets = []
for targetIndex in cfgPrj.targetIndexes:
cfgPrj.targets.append(cfg.configTargets[targetIndex])
# Create direct pointers for ConfigTarget indices
# takes: Config and ConfigTarget
def linkConfigTarget(cfg, cfgTarget):
if cfgTarget.directoryIndex == -1:
cfgTarget.directory = None
else:
cfgTarget.directory = cfg.directories[cfgTarget.directoryIndex]
if cfgTarget.projectIndex == -1:
cfgTarget.project = None
else:
cfgTarget.project = cfg.projects[cfgTarget.projectIndex]
# and link target's sources and source groups
for ts in cfgTarget.target.sources:
linkTargetSource(cfgTarget.target, ts)
for tsg in cfgTarget.target.sourceGroups:
linkTargetSourceGroup(cfgTarget.target, tsg)
for tcg in cfgTarget.target.compileGroups:
linkTargetCompileGroup(cfgTarget.target, tcg)
# Create direct pointers for TargetSource indices
# takes: Target and TargetSource
def linkTargetSource(target, targetSrc):
if targetSrc.compileGroupIndex == -1:
targetSrc.compileGroup = None
else:
targetSrc.compileGroup = target.compileGroups[targetSrc.compileGroupIndex]
if targetSrc.sourceGroupIndex == -1:
targetSrc.sourceGroup = None
else:
targetSrc.sourceGroup = target.sourceGroups[targetSrc.sourceGroupIndex]
# Create direct pointers for TargetSourceGroup indices
# takes: Target and TargetSourceGroup
def linkTargetSourceGroup(target, targetSrcGrp):
targetSrcGrp.sources = []
for srcIndex in targetSrcGrp.sourceIndexes:
targetSrcGrp.sources.append(target.sources[srcIndex])
# Create direct pointers for TargetCompileGroup indices
# takes: Target and TargetCompileGroup
def linkTargetCompileGroup(target, targetCmpGrp):
targetCmpGrp.sources = []
for srcIndex in targetCmpGrp.sourceIndexes:
targetCmpGrp.sources.append(target.sources[srcIndex])
``` | /content/code_sandbox/scripts/west_commands/zspdx/cmakefileapijson.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 3,781 |
```python
#
'''Runner for flashing with dfu-util.'''
from collections import namedtuple
import sys
import time
from runners.core import ZephyrBinaryRunner, RunnerCaps, \
BuildConfiguration
DfuSeConfig = namedtuple('DfuSeConfig', ['address', 'options'])
class DfuUtilBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for dfu-util.'''
def __init__(self, cfg, dev_id, alt, img, exe='dfu-util',
dfuse_config=None):
super().__init__(cfg)
self.dev_id = dev_id # Used only for error checking in do_run
self.alt = alt
self.img = img
self.cmd = [exe, '-d,{}'.format(dev_id)]
try:
self.list_pattern = ', alt={},'.format(int(self.alt))
except ValueError:
self.list_pattern = ', name="{}",'.format(self.alt)
if dfuse_config is None:
self.dfuse = False
else:
self.dfuse = True
self.dfuse_config = dfuse_config
self.reset = False
@classmethod
def name(cls):
return 'dfu-util'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'}, dev_id=True, flash_addr=True)
@classmethod
def dev_id_help(cls) -> str:
return 'USB VID:PID of the connected device.'
@classmethod
def do_add_parser(cls, parser):
parser.add_argument("--alt", required=True,
help="interface alternate setting number or name")
# Optional:
parser.add_argument("--pid", dest='dev_id',
help=cls.dev_id_help())
parser.add_argument("--img",
help="binary to flash, default is --bin-file")
parser.add_argument("--dfuse", default=False, action='store_true',
help='''use the DfuSe protocol extensions
supported by STMicroelectronics
devices (if given, the image flash
address respects
CONFIG_FLASH_BASE_ADDRESS and
CONFIG_FLASH_LOAD_OFFSET)''')
parser.add_argument("--dfuse-modifiers", default='leave',
help='''colon-separated list of additional
DfuSe modifiers for dfu-util's -s
option (default is
"-s <flash-address>:leave", which starts
execution immediately); requires
--dfuse
''')
parser.add_argument('--dfu-util', default='dfu-util',
help='dfu-util executable; defaults to "dfu-util"')
@classmethod
def do_create(cls, cfg, args):
if args.img is None:
args.img = cfg.bin_file
if args.dfuse:
args.dt_flash = True # --dfuse implies --dt-flash.
build_conf = BuildConfiguration(cfg.build_dir)
dcfg = DfuSeConfig(address=cls.get_flash_address(args, build_conf),
options=args.dfuse_modifiers)
else:
dcfg = None
ret = DfuUtilBinaryRunner(cfg, args.dev_id, args.alt, args.img,
exe=args.dfu_util, dfuse_config=dcfg)
ret.ensure_device()
return ret
def ensure_device(self):
if not self.find_device():
self.reset = True
print('Please reset your board to switch to DFU mode...')
while not self.find_device():
time.sleep(0.1)
def find_device(self):
cmd = list(self.cmd) + ['-l']
output = self.check_output(cmd)
output = output.decode(sys.getdefaultencoding())
return self.list_pattern in output
def do_run(self, command, **kwargs):
if not self.dev_id:
raise RuntimeError('Please specify a USB VID:PID with the '
'-i/--dev-id or --pid command-line switch.')
self.require(self.cmd[0])
self.ensure_output('bin')
if not self.find_device():
raise RuntimeError('device not found')
cmd = list(self.cmd)
if self.dfuse:
# path_to_url
dcfg = self.dfuse_config
addr_opts = hex(dcfg.address) + ':' + dcfg.options
cmd.extend(['-s', addr_opts])
cmd.extend(['-a', self.alt, '-D', self.img])
self.check_call(cmd)
if self.dfuse and 'leave' in dcfg.options.split(':'):
# Normal DFU devices generally need to be reset to switch
# back to the flashed program.
#
# DfuSe targets do as well, except when 'leave' is given
# as an option.
self.reset = False
if self.reset:
print('Now reset your board again to switch back to runtime mode.')
``` | /content/code_sandbox/scripts/west_commands/runners/dfu.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,017 |
```python
#
'''Runner for flashing ESP32 devices with esptool/espidf.'''
from os import path
from runners.core import ZephyrBinaryRunner, RunnerCaps
import os
import sys
class Esp32BinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for espidf.'''
def __init__(self, cfg, device, boot_address, part_table_address,
app_address, erase=False, reset=False, baud=921600,
flash_size='detect', flash_freq='40m', flash_mode='dio',
espidf='espidf', bootloader_bin=None, partition_table_bin=None,
no_stub=False):
super().__init__(cfg)
self.elf = cfg.elf_file
self.app_bin = cfg.bin_file
self.erase = bool(erase)
self.reset = bool(reset)
self.device = device
self.boot_address = boot_address
self.part_table_address = part_table_address
self.app_address = app_address
self.baud = baud
self.flash_size = flash_size
self.flash_freq = flash_freq
self.flash_mode = flash_mode
self.espidf = espidf
self.bootloader_bin = bootloader_bin
self.partition_table_bin = partition_table_bin
self.no_stub = no_stub
@classmethod
def name(cls):
return 'esp32'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'}, erase=True, reset=True)
@classmethod
def do_add_parser(cls, parser):
# Required
parser.add_argument('--esp-idf-path', required=True,
help='path to ESP-IDF')
# Optional
parser.add_argument('--esp-boot-address', default='0x1000',
help='bootloader load address')
parser.add_argument('--esp-partition-table-address', default='0x8000',
help='partition table load address')
parser.add_argument('--esp-app-address', default='0x10000',
help='application load address')
parser.add_argument('--esp-device', default=os.environ.get('ESPTOOL_PORT', None),
help='serial port to flash')
parser.add_argument('--esp-baud-rate', default='921600',
help='serial baud rate, default 921600')
parser.add_argument('--esp-monitor-baud', default='115200',
help='serial monitor baud rate, default 115200')
parser.add_argument('--esp-flash-size', default='detect',
help='flash size, default "detect"')
parser.add_argument('--esp-flash-freq', default='40m',
help='flash frequency, default "40m"')
parser.add_argument('--esp-flash-mode', default='dio',
help='flash mode, default "dio"')
parser.add_argument(
'--esp-tool',
help='''if given, complete path to espidf. default is to search for
it in [ESP_IDF_PATH]/tools/esptool_py/esptool.py''')
parser.add_argument('--esp-flash-bootloader',
help='Bootloader image to flash')
parser.add_argument('--esp-flash-partition_table',
help='Partition table to flash')
parser.add_argument('--esp-no-stub', default=False, action='store_true',
help='Disable launching the flasher stub, only talk to ROM bootloader')
parser.set_defaults(reset=True)
@classmethod
def do_create(cls, cfg, args):
if args.esp_tool:
espidf = args.esp_tool
else:
espidf = path.join(args.esp_idf_path, 'tools', 'esptool_py',
'esptool.py')
return Esp32BinaryRunner(
cfg, args.esp_device, boot_address=args.esp_boot_address,
part_table_address=args.esp_partition_table_address,
app_address=args.esp_app_address, erase=args.erase, reset=args.reset,
baud=args.esp_baud_rate, flash_size=args.esp_flash_size,
flash_freq=args.esp_flash_freq, flash_mode=args.esp_flash_mode,
espidf=espidf, bootloader_bin=args.esp_flash_bootloader,
partition_table_bin=args.esp_flash_partition_table,
no_stub=args.esp_no_stub)
def do_run(self, command, **kwargs):
self.require(self.espidf)
# Add Python interpreter
cmd_flash = [sys.executable, self.espidf, '--chip', 'auto']
if self.device is not None:
cmd_flash.extend(['--port', self.device])
if self.erase is True:
cmd_erase = cmd_flash + ['erase_flash']
self.check_call(cmd_erase)
if self.no_stub is True:
cmd_flash.extend(['--no-stub'])
cmd_flash.extend(['--baud', self.baud])
cmd_flash.extend(['--before', 'default_reset'])
if self.reset:
cmd_flash.extend(['--after', 'hard_reset', 'write_flash', '-u'])
cmd_flash.extend(['--flash_mode', self.flash_mode])
cmd_flash.extend(['--flash_freq', self.flash_freq])
cmd_flash.extend(['--flash_size', self.flash_size])
if self.bootloader_bin:
cmd_flash.extend([self.boot_address, self.bootloader_bin])
if self.partition_table_bin:
cmd_flash.extend([self.part_table_address, self.partition_table_bin])
cmd_flash.extend([self.app_address, self.app_bin])
else:
cmd_flash.extend([self.app_address, self.app_bin])
self.logger.info("Flashing esp32 chip on {} ({}bps)".
format(self.device, self.baud))
self.check_call(cmd_flash)
``` | /content/code_sandbox/scripts/west_commands/runners/esp32.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,195 |
```python
#
'''Runner for pyOCD .'''
import os
from os import path
from runners.core import ZephyrBinaryRunner, RunnerCaps, BuildConfiguration
DEFAULT_PYOCD_GDB_PORT = 3333
DEFAULT_PYOCD_TELNET_PORT = 4444
class PyOcdBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for pyOCD.'''
def __init__(self, cfg, target,
pyocd='pyocd',
dev_id=None, flash_addr=0x0, erase=False, flash_opts=None,
gdb_port=DEFAULT_PYOCD_GDB_PORT,
telnet_port=DEFAULT_PYOCD_TELNET_PORT, tui=False,
pyocd_config=None,
daparg=None, frequency=None, tool_opt=None):
super().__init__(cfg)
default = path.join(cfg.board_dir, 'support', 'pyocd.yaml')
if path.exists(default):
self.pyocd_config = default
else:
self.pyocd_config = None
self.target_args = ['-t', target]
self.pyocd = pyocd
self.flash_addr_args = ['-a', hex(flash_addr)] if flash_addr else []
self.erase = erase
self.gdb_cmd = [cfg.gdb] if cfg.gdb is not None else None
self.gdb_port = gdb_port
self.telnet_port = telnet_port
self.tui_args = ['-tui'] if tui else []
self.hex_name = cfg.hex_file
self.bin_name = cfg.bin_file
self.elf_name = cfg.elf_file
pyocd_config_args = []
if self.pyocd_config is not None:
pyocd_config_args = ['--config', self.pyocd_config]
self.pyocd_config_args = pyocd_config_args
board_args = []
if dev_id is not None:
board_args = ['-u', dev_id]
self.board_args = board_args
daparg_args = []
if daparg is not None:
daparg_args = ['-da', daparg]
self.daparg_args = daparg_args
frequency_args = []
if frequency is not None:
frequency_args = ['-f', frequency]
self.frequency_args = frequency_args
self.tool_opt_args = tool_opt or []
self.flash_extra = flash_opts if flash_opts else []
@classmethod
def name(cls):
return 'pyocd'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash', 'debug', 'debugserver', 'attach'},
dev_id=True, flash_addr=True, erase=True,
tool_opt=True)
@classmethod
def dev_id_help(cls) -> str:
return '''Device identifier. Use it to select the probe's unique ID
or substring thereof.'''
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--target', required=True,
help='target override')
parser.add_argument('--daparg',
help='Additional -da arguments to pyocd tool')
parser.add_argument('--pyocd', default='pyocd',
help='path to pyocd tool, default is pyocd')
parser.add_argument('--flash-opt', default=[], action='append',
help='''Additional options for pyocd flash,
e.g. --flash-opt="-e=chip" to chip erase''')
parser.add_argument('--frequency',
help='SWD clock frequency in Hz')
parser.add_argument('--gdb-port', default=DEFAULT_PYOCD_GDB_PORT,
help='pyocd gdb port, defaults to {}'.format(
DEFAULT_PYOCD_GDB_PORT))
parser.add_argument('--telnet-port', default=DEFAULT_PYOCD_TELNET_PORT,
help='pyocd telnet port, defaults to {}'.format(
DEFAULT_PYOCD_TELNET_PORT))
parser.add_argument('--tui', default=False, action='store_true',
help='if given, GDB uses -tui')
parser.add_argument('--board-id', dest='dev_id',
help='obsolete synonym for -i/--dev-id')
@classmethod
def tool_opt_help(cls) -> str:
return """Additional options for pyocd commander,
e.g. '--script=user.py'"""
@classmethod
def do_create(cls, cfg, args):
build_conf = BuildConfiguration(cfg.build_dir)
flash_addr = cls.get_flash_address(args, build_conf)
ret = PyOcdBinaryRunner(
cfg, args.target,
pyocd=args.pyocd,
flash_addr=flash_addr, erase=args.erase, flash_opts=args.flash_opt,
gdb_port=args.gdb_port, telnet_port=args.telnet_port, tui=args.tui,
dev_id=args.dev_id, daparg=args.daparg,
frequency=args.frequency,
tool_opt=args.tool_opt)
daparg = os.environ.get('PYOCD_DAPARG')
if not ret.daparg_args and daparg:
ret.logger.warning('PYOCD_DAPARG is deprecated; use --daparg')
ret.logger.debug('--daparg={} via PYOCD_DAPARG'.format(daparg))
ret.daparg_args = ['-da', daparg]
return ret
def port_args(self):
return ['-p', str(self.gdb_port), '-T', str(self.telnet_port)]
def do_run(self, command, **kwargs):
self.require(self.pyocd)
if command == 'flash':
self.flash(**kwargs)
else:
self.debug_debugserver(command, **kwargs)
def flash(self, **kwargs):
if self.hex_name is not None and os.path.isfile(self.hex_name):
fname = self.hex_name
elif self.bin_name is not None and os.path.isfile(self.bin_name):
self.logger.warning(
'hex file ({}) does not exist; falling back on .bin ({}). '.
format(self.hex_name, self.bin_name) +
'Consider enabling CONFIG_BUILD_OUTPUT_HEX.')
fname = self.bin_name
else:
raise ValueError(
'Cannot flash; no hex ({}) or bin ({}) files found. '.format(
self.hex_name, self.bin_name))
erase_method = 'chip' if self.erase else 'sector'
cmd = ([self.pyocd] +
['flash'] +
self.pyocd_config_args +
['-e', erase_method] +
self.flash_addr_args +
self.daparg_args +
self.target_args +
self.board_args +
self.frequency_args +
self.tool_opt_args +
self.flash_extra +
[fname])
self.logger.info('Flashing file: {}'.format(fname))
self.check_call(cmd)
def log_gdbserver_message(self):
self.logger.info('pyOCD GDB server running on port {}'.
format(self.gdb_port))
def debug_debugserver(self, command, **kwargs):
server_cmd = ([self.pyocd] +
['gdbserver'] +
self.daparg_args +
self.port_args() +
self.target_args +
self.board_args +
self.frequency_args +
self.tool_opt_args)
if command == 'debugserver':
self.log_gdbserver_message()
self.check_call(server_cmd)
else:
if self.gdb_cmd is None:
raise ValueError('Cannot debug; gdb is missing')
if self.elf_name is None:
raise ValueError('Cannot debug; elf is missing')
client_cmd = (self.gdb_cmd +
self.tui_args +
[self.elf_name] +
['-ex', 'target remote :{}'.format(self.gdb_port)])
if command == 'debug':
client_cmd += ['-ex', 'monitor halt',
'-ex', 'monitor reset',
'-ex', 'load']
self.require(client_cmd[0])
self.log_gdbserver_message()
self.run_server_and_client(server_cmd, client_cmd)
``` | /content/code_sandbox/scripts/west_commands/runners/pyocd.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,716 |
```python
#
'''Runner base class for flashing with nrf tools.'''
import abc
from collections import deque
import os
from pathlib import Path
import shlex
import subprocess
import sys
from re import fullmatch, escape
from runners.core import ZephyrBinaryRunner, RunnerCaps
try:
from intelhex import IntelHex
except ImportError:
IntelHex = None
ErrNotAvailableBecauseProtection = 24
ErrVerify = 25
UICR_RANGES = {
'NRF53_FAMILY': {
'NRFDL_DEVICE_CORE_APPLICATION': (0x00FF8000, 0x00FF8800),
'NRFDL_DEVICE_CORE_NETWORK': (0x01FF8000, 0x01FF8800),
},
'NRF91_FAMILY': {
'NRFDL_DEVICE_CORE_APPLICATION': (0x00FF8000, 0x00FF8800),
}
}
class NrfBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end base class for nrf tools.'''
def __init__(self, cfg, family, softreset, dev_id, erase=False,
reset=True, tool_opt=[], force=False, recover=False):
super().__init__(cfg)
self.hex_ = cfg.hex_file
if family and not family.endswith('_FAMILY'):
family = f'{family}_FAMILY'
self.family = family
self.softreset = softreset
self.dev_id = dev_id
self.erase = bool(erase)
self.reset = bool(reset)
self.force = force
self.recover = bool(recover)
self.tool_opt = []
for opts in [shlex.split(opt) for opt in tool_opt]:
self.tool_opt += opts
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'}, dev_id=True, erase=True,
reset=True, tool_opt=True)
@classmethod
def dev_id_help(cls) -> str:
return '''Device identifier. Use it to select the J-Link Serial Number
of the device connected over USB. '*' matches one or more
characters/digits'''
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--nrf-family',
choices=['NRF51', 'NRF52', 'NRF53', 'NRF54L',
'NRF54H', 'NRF91'],
help='''MCU family; still accepted for
compatibility only''')
parser.add_argument('--softreset', required=False,
action='store_true',
help='use reset instead of pinreset')
parser.add_argument('--snr', required=False, dest='dev_id',
help='obsolete synonym for -i/--dev-id')
parser.add_argument('--force', required=False,
action='store_true',
help='Flash even if the result cannot be guaranteed.')
parser.add_argument('--recover', required=False,
action='store_true',
help='''erase all user available non-volatile
memory and disable read back protection before
flashing (erases flash for both cores on nRF53)''')
parser.set_defaults(reset=True)
def ensure_snr(self):
if not self.dev_id or "*" in self.dev_id:
self.dev_id = self.get_board_snr(self.dev_id or "*")
self.dev_id = self.dev_id.lstrip("0")
@abc.abstractmethod
def do_get_boards(self):
''' Return an array of Segger SNRs '''
def get_boards(self):
snrs = self.do_get_boards()
if not snrs:
raise RuntimeError('Unable to find a board; '
'is the board connected?')
return snrs
@staticmethod
def verify_snr(snr):
if snr == '0':
raise RuntimeError('The Segger SNR obtained is 0; '
'is a debugger already connected?')
def get_board_snr(self, glob):
# Use nrfjprog or nrfutil to discover connected boards.
#
# If there's exactly one board connected, it's safe to assume
# the user wants that one. Otherwise, bail unless there are
# multiple boards and we are connected to a terminal, in which
# case use print() and input() to ask what the user wants.
re_glob = escape(glob).replace(r"\*", ".+")
snrs = [snr for snr in self.get_boards() if fullmatch(re_glob, snr)]
if len(snrs) == 0:
raise RuntimeError(
'There are no boards connected{}.'.format(
f" matching '{glob}'" if glob != "*" else ""))
elif len(snrs) == 1:
board_snr = snrs[0]
self.verify_snr(board_snr)
print("Using board {}".format(board_snr))
return board_snr
elif not sys.stdin.isatty():
raise RuntimeError(
f'refusing to guess which of {len(snrs)} '
'connected boards to use. (Interactive prompts '
'disabled since standard input is not a terminal.) '
'Please specify a serial number on the command line.')
snrs = sorted(snrs)
print('There are multiple boards connected{}.'.format(
f" matching '{glob}'" if glob != "*" else ""))
for i, snr in enumerate(snrs, 1):
print('{}. {}'.format(i, snr))
p = 'Please select one with desired serial number (1-{}): '.format(
len(snrs))
while True:
try:
value = input(p)
except EOFError:
sys.exit(0)
try:
value = int(value)
except ValueError:
continue
if 1 <= value <= len(snrs):
break
return snrs[value - 1]
def ensure_family(self):
# Ensure self.family is set.
if self.family is not None:
return
if self.build_conf.getboolean('CONFIG_SOC_SERIES_NRF51X'):
self.family = 'NRF51_FAMILY'
elif self.build_conf.getboolean('CONFIG_SOC_SERIES_NRF52X'):
self.family = 'NRF52_FAMILY'
elif self.build_conf.getboolean('CONFIG_SOC_SERIES_NRF53X'):
self.family = 'NRF53_FAMILY'
elif self.build_conf.getboolean('CONFIG_SOC_SERIES_NRF54LX'):
self.family = 'NRF54L_FAMILY'
elif self.build_conf.getboolean('CONFIG_SOC_SERIES_NRF54HX'):
self.family = 'NRF54H_FAMILY'
elif self.build_conf.getboolean('CONFIG_SOC_SERIES_NRF91X'):
self.family = 'NRF91_FAMILY'
else:
raise RuntimeError(f'unknown nRF; update {__file__}')
def hex_refers_region(self, region_start, region_end):
for segment_start, _ in self.hex_contents.segments():
if region_start <= segment_start <= region_end:
return True
return False
def hex_get_uicrs(self):
hex_uicrs = {}
if self.family in UICR_RANGES:
for uicr_core, uicr_range in UICR_RANGES[self.family].items():
if self.hex_refers_region(*uicr_range):
hex_uicrs[uicr_core] = uicr_range
return hex_uicrs
def flush(self, force=False):
try:
self.flush_ops(force=force)
except subprocess.CalledProcessError as cpe:
if cpe.returncode == ErrNotAvailableBecauseProtection:
if self.family == 'NRF53_FAMILY':
family_help = (
' Note: your target is an nRF53; all flash memory '
'for both the network and application cores will be '
'erased prior to reflashing.')
else:
family_help = (
' Note: this will recover and erase all flash memory '
'prior to reflashing.')
self.logger.error(
'Flashing failed because the target '
'must be recovered.\n'
' To fix, run "west flash --recover" instead.\n' +
family_help)
if cpe.returncode == ErrVerify:
# If there are data in the UICR region it is likely that the
# verify failed du to the UICR not been erased before, so giving
# a warning here will hopefully enhance UX.
if self.hex_get_uicrs():
self.logger.warning(
'The hex file contains data placed in the UICR, which '
'may require a full erase before reprogramming. Run '
'west flash again with --erase, or --recover.')
raise
def recover_target(self):
if self.family in ('NRF53_FAMILY', 'NRF54H_FAMILY'):
self.logger.info(
'Recovering and erasing flash memory for both the network '
'and application cores.')
else:
self.logger.info('Recovering and erasing all flash memory.')
# The network core of the nRF53 needs to be recovered first due to the
# fact that recovering it erases the flash of *both* cores. Since a
# recover operation unlocks the core and then flashes a small image that
# keeps the debug access port open, recovering the network core last
# would result in that small image being deleted from the app core.
# In the case of the 54H, the order is indifferent.
if self.family in ('NRF53_FAMILY', 'NRF54H_FAMILY'):
self.exec_op('recover', core='NRFDL_DEVICE_CORE_NETWORK')
self.exec_op('recover')
def program_hex(self):
# Get the command use to actually program self.hex_.
self.logger.info('Flashing file: {}'.format(self.hex_))
# What type of erase/core arguments should we pass to the tool?
core = None
if self.family == 'NRF54H_FAMILY':
erase_arg = 'ERASE_NONE'
if self.erase:
self.exec_op('erase', core='NRFDL_DEVICE_CORE_APPLICATION')
self.exec_op('erase', core='NRFDL_DEVICE_CORE_NETWORK')
# Manage SUIT artifacts.
# This logic should be executed only once per build.
# Use sysbuild board qualifiers to select the context, with which the artifacts will be programmed.
if self.build_conf.get('CONFIG_BOARD_QUALIFIERS') == self.sysbuild_conf.get('SB_CONFIG_BOARD_QUALIFIERS'):
mpi_hex_dir = Path(os.path.join(self.cfg.build_dir, 'zephyr'))
# Handle Manifest Provisioning Information
if self.build_conf.getboolean('CONFIG_SUIT_MPI_GENERATE'):
app_mpi_hex_file = os.fspath(
mpi_hex_dir / self.build_conf.get('CONFIG_SUIT_MPI_APP_AREA_PATH'))
rad_mpi_hex_file = os.fspath(
mpi_hex_dir / self.build_conf.get('CONFIG_SUIT_MPI_RAD_AREA_PATH'))
self.op_program(app_mpi_hex_file, 'ERASE_NONE', None, defer=True, core='NRFDL_DEVICE_CORE_APPLICATION')
self.op_program(rad_mpi_hex_file, 'ERASE_NONE', None, defer=True, core='NRFDL_DEVICE_CORE_NETWORK')
# Handle SUIT root manifest if application manifests are not used.
# If an application firmware is built, the root envelope is merged with other application manifests
# as well as the output HEX file.
if not self.build_conf.getboolean('CONFIG_SOC_NRF54H20_CPUAPP') and self.sysbuild_conf.get('SB_CONFIG_SUIT_ENVELOPE'):
app_root_envelope_hex_file = os.fspath(
mpi_hex_dir / 'suit_installed_envelopes_application_merged.hex')
self.op_program(app_root_envelope_hex_file, 'ERASE_NONE', None, defer=True, core='NRFDL_DEVICE_CORE_APPLICATION')
if self.build_conf.getboolean('CONFIG_SOC_NRF54H20_CPUAPP'):
if not self.erase and self.build_conf.getboolean('CONFIG_NRF_REGTOOL_GENERATE_UICR'):
self.exec_op('erase', core='NRFDL_DEVICE_CORE_APPLICATION',
option={'chip_erase_mode': 'ERASE_UICR',
'qspi_erase_mode': 'ERASE_NONE'})
core = 'NRFDL_DEVICE_CORE_APPLICATION'
elif self.build_conf.getboolean('CONFIG_SOC_NRF54H20_CPURAD'):
if not self.erase and self.build_conf.getboolean('CONFIG_NRF_REGTOOL_GENERATE_UICR'):
self.exec_op('erase', core='NRFDL_DEVICE_CORE_NETWORK',
option={'chip_erase_mode': 'ERASE_UICR',
'qspi_erase_mode': 'ERASE_NONE'})
core = 'NRFDL_DEVICE_CORE_NETWORK'
else:
if self.erase:
erase_arg = 'ERASE_ALL'
else:
if self.family == 'NRF52_FAMILY':
erase_arg = 'ERASE_PAGES_INCLUDING_UICR'
else:
erase_arg = 'ERASE_PAGES'
xip_ranges = {
'NRF52_FAMILY': (0x12000000, 0x19FFFFFF),
'NRF53_FAMILY': (0x10000000, 0x1FFFFFFF),
}
qspi_erase_opt = None
if self.family in xip_ranges:
xip_start, xip_end = xip_ranges[self.family]
if self.hex_refers_region(xip_start, xip_end):
qspi_erase_opt = 'ERASE_ALL'
# What tool commands do we need to flash this target?
if self.family == 'NRF53_FAMILY':
# nRF53 requires special treatment due to the extra coprocessor.
self.program_hex_nrf53(erase_arg, qspi_erase_opt)
else:
self.op_program(self.hex_, erase_arg, qspi_erase_opt, defer=True, core=core)
self.flush(force=False)
def program_hex_nrf53(self, erase_arg, qspi_erase_opt):
# program_hex() helper for nRF53.
# *********************** NOTE *******************************
# self.hex_ can contain code for both the application core and
# the network core.
#
# We can't assume, for example, that
# CONFIG_SOC_NRF5340_CPUAPP=y means self.hex_ only contains
# data for the app core's flash: the user can put arbitrary
# addresses into one of the files in HEX_FILES_TO_MERGE.
#
# Therefore, on this family, we may need to generate two new
# hex files, one for each core, and flash them individually
# with the correct '--coprocessor' arguments.
#
# Kind of hacky, but it works, and the tools are not capable of
# flashing to both cores at once. If self.hex_ only affects
# one core's flash, then we skip the extra work to save time.
# ************************************************************
# Address range of the network coprocessor's flash. From nRF5340 OPS.
# We should get this from DTS instead if multiple values are possible,
# but this is fine for now.
net_flash_start = 0x01000000
net_flash_end = 0x0103FFFF
# If there is nothing in the hex file for the network core,
# only the application core is programmed.
if not self.hex_refers_region(net_flash_start, net_flash_end):
self.op_program(self.hex_, erase_arg, qspi_erase_opt, defer=True,
core='NRFDL_DEVICE_CORE_APPLICATION')
# If there is some content that addresses a region beyond the network
# core flash range, two hex files are generated and the two cores
# are programmed one by one.
elif self.hex_contents.minaddr() < net_flash_start or \
self.hex_contents.maxaddr() > net_flash_end:
net_hex, app_hex = IntelHex(), IntelHex()
for start, end in self.hex_contents.segments():
if net_flash_start <= start <= net_flash_end:
net_hex.merge(self.hex_contents[start:end])
else:
app_hex.merge(self.hex_contents[start:end])
hex_path = Path(self.hex_)
hex_dir, hex_name = hex_path.parent, hex_path.name
net_hex_file = os.fspath(
hex_dir / f'GENERATED_CP_NETWORK_{hex_name}')
app_hex_file = os.fspath(
hex_dir / f'GENERATED_CP_APPLICATION_{hex_name}')
self.logger.info(
f'{self.hex_} targets both nRF53 coprocessors; '
f'splitting it into: {net_hex_file} and {app_hex_file}')
net_hex.write_hex_file(net_hex_file)
app_hex.write_hex_file(app_hex_file)
self.op_program(net_hex_file, erase_arg, None, defer=True,
core='NRFDL_DEVICE_CORE_NETWORK')
self.op_program(app_hex_file, erase_arg, qspi_erase_opt, defer=True,
core='NRFDL_DEVICE_CORE_APPLICATION')
# Otherwise, only the network core is programmed.
else:
self.op_program(self.hex_, erase_arg, None, defer=True,
core='NRFDL_DEVICE_CORE_NETWORK')
def reset_target(self):
if self.family == 'NRF52_FAMILY' and not self.softreset:
self.exec_op('pinreset-enable')
if self.softreset:
self.exec_op('reset', option="RESET_SYSTEM")
else:
self.exec_op('reset', option="RESET_PIN")
@abc.abstractmethod
def do_require(self):
''' Ensure the tool is installed '''
def op_program(self, hex_file, erase, qspi_erase, defer=False, core=None):
args = {'firmware': {'file': hex_file},
'chip_erase_mode': erase, 'verify': 'VERIFY_READ'}
if qspi_erase:
args['qspi_erase_mode'] = qspi_erase
self.exec_op('program', defer, core, **args)
def exec_op(self, op, defer=False, core=None, **kwargs):
_op = f'{op}'
op = {'operation': {'type': _op}}
if core:
op['core'] = core
op['operation'].update(kwargs)
self.logger.debug(f'defer: {defer} op: {op}')
if defer or not self.do_exec_op(op, force=False):
self.ops.append(op)
@abc.abstractmethod
def do_exec_op(self, op, force=False):
''' Execute an operation. Return True if executed, False if not.
Throws subprocess.CalledProcessError with the appropriate
returncode if a failure arises.'''
def flush_ops(self, force=True):
''' Execute any remaining ops in the self.ops array.
Throws subprocess.CalledProcessError with the appropriate
returncode if a failure arises.
Subclasses can override this method for special handling of
queued ops.'''
self.logger.debug('Flushing ops')
while self.ops:
self.do_exec_op(self.ops.popleft(), force)
def do_run(self, command, **kwargs):
self.do_require()
self.ensure_output('hex')
if IntelHex is None:
raise RuntimeError('Python dependency intelhex was missing; '
'see the getting started guide for details on '
'how to fix')
self.hex_contents = IntelHex()
try:
self.hex_contents.loadfile(self.hex_, format='hex')
except FileNotFoundError:
pass
self.ensure_snr()
self.ensure_family()
self.ops = deque()
if self.recover:
self.recover_target()
self.program_hex()
if self.reset:
self.reset_target()
# All done, now flush any outstanding ops
self.flush(force=True)
self.logger.info(f'Board with serial number {self.dev_id} '
'flashed successfully.')
``` | /content/code_sandbox/scripts/west_commands/runners/nrf_common.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 4,278 |
```python
#
'''Runner for NIOS II, based on quartus-flash.py and GDB.'''
from runners.core import ZephyrBinaryRunner, NetworkPortHelper, RunnerCaps
class Nios2BinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for NIOS II.'''
# From the original shell script:
#
# "XXX [flash] only support[s] cases where the .elf is sent
# over the JTAG and the CPU directly boots from __start. CONFIG_XIP
# and CONFIG_INCLUDE_RESET_VECTOR must be disabled."
def __init__(self, cfg, quartus_py=None, cpu_sof=None, tui=False):
super().__init__(cfg)
self.hex_name = cfg.hex_file
self.elf_name = cfg.elf_file
self.cpu_sof = cpu_sof
self.quartus_py = quartus_py
self.gdb_cmd = [cfg.gdb] if cfg.gdb else None
self.tui_arg = ['-tui'] if tui else []
@classmethod
def name(cls):
return 'nios2'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash', 'debug', 'debugserver', 'attach'})
@classmethod
def do_add_parser(cls, parser):
# TODO merge quartus-flash.py script into this file.
parser.add_argument('--quartus-flash', required=True)
parser.add_argument('--cpu-sof', required=True,
help='path to the CPU .sof data')
parser.add_argument('--tui', default=False, action='store_true',
help='if given, GDB uses -tui')
@classmethod
def do_create(cls, cfg, args):
return Nios2BinaryRunner(cfg,
quartus_py=args.quartus_flash,
cpu_sof=args.cpu_sof,
tui=args.tui)
def do_run(self, command, **kwargs):
if command == 'flash':
self.flash(**kwargs)
else:
self.debug_debugserver(command, **kwargs)
def flash(self, **kwargs):
if self.quartus_py is None:
raise ValueError('Cannot flash; --quartus-flash not given.')
if self.cpu_sof is None:
raise ValueError('Cannot flash; --cpu-sof not given.')
self.ensure_output('hex')
self.logger.info('Flashing file: {}'.format(self.hex_name))
cmd = [self.quartus_py,
'--sof', self.cpu_sof,
'--kernel', self.hex_name]
self.require(cmd[0])
self.check_call(cmd)
def print_gdbserver_message(self, gdb_port):
self.logger.info('Nios II GDB server running on port {}'.
format(gdb_port))
def debug_debugserver(self, command, **kwargs):
# Per comments in the shell script, the NIOSII GDB server
# doesn't exit gracefully, so it's better to explicitly search
# for an unused port. The script picks a random value in
# between 1024 and 49151, but we'll start with the
# "traditional" 3333 choice.
gdb_start = 3333
nh = NetworkPortHelper()
gdb_port = nh.get_unused_ports([gdb_start])[0]
server_cmd = (['nios2-gdb-server',
'--tcpport', str(gdb_port),
'--stop', '--reset-target'])
self.require(server_cmd[0])
if command == 'debugserver':
self.print_gdbserver_message(gdb_port)
self.check_call(server_cmd)
else:
if self.elf_name is None:
raise ValueError('Cannot debug; elf is missing')
if self.gdb_cmd is None:
raise ValueError('Cannot debug; no gdb specified')
gdb_cmd = (self.gdb_cmd +
self.tui_arg +
[self.elf_name,
'-ex', 'target remote :{}'.format(gdb_port)])
self.require(gdb_cmd[0])
self.print_gdbserver_message(gdb_port)
self.run_server_and_client(server_cmd, gdb_cmd)
``` | /content/code_sandbox/scripts/west_commands/runners/nios2.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 894 |
```python
#
import importlib
import logging
from runners.core import ZephyrBinaryRunner, MissingProgram
_logger = logging.getLogger('runners')
def _import_runner_module(runner_name):
try:
importlib.import_module(f'runners.{runner_name}')
except ImportError as ie:
# Runners are supposed to gracefully handle failures when they
# import anything outside of stdlib, but they sometimes do
# not. Catch ImportError to handle this.
_logger.warning(f'The module for runner "{runner_name}" '
f'could not be imported ({ie}). This most likely '
'means it is not handling its dependencies properly. '
'Please report this to the zephyr developers.')
# We import these here to ensure the ZephyrBinaryRunner subclasses are
# defined; otherwise, ZephyrBinaryRunner.get_runners() won't work.
_names = [
'blackmagicprobe',
'bossac',
'canopen_program',
'dediprog',
'dfu',
'esp32',
'ezflashcli',
'gd32isp',
'hifive1',
'intel_adsp',
'intel_cyclonev',
'jlink',
'linkserver',
'mdb',
'misc',
'native',
'nios2',
'nrfjprog',
'nrfutil',
'nsim',
'nxp_s32dbg',
'openocd',
'probe_rs',
'pyocd',
'renode',
'renode-robot',
'qemu',
'silabs_commander',
'spi_burn',
'stm32cubeprogrammer',
'stm32flash',
'teensy',
'trace32',
'uf2',
'xtensa',
# Keep this list sorted by runner name; don't add to the end.
]
for _name in _names:
_import_runner_module(_name)
def get_runner_cls(runner):
'''Get a runner's class object, given its name.'''
for cls in ZephyrBinaryRunner.get_runners():
if cls.name() == runner:
return cls
raise ValueError('unknown runner "{}"'.format(runner))
__all__ = ['ZephyrBinaryRunner', 'get_runner_cls']
``` | /content/code_sandbox/scripts/west_commands/runners/__init__.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 499 |
```python
#
'''Catch-all module for miscellaneous devices which can't use a
generic or widely used tool like J-Link, OpenOCD, etc.
Please use this sparingly and only when your setup is exotic and
you're willing to handle requests for help. E.g. if your "board" is a
core on a special-purpose SoC which requires a complicated script to
network boot.'''
from runners.core import ZephyrBinaryRunner, RunnerCaps
import argparse
class MiscFlasher(ZephyrBinaryRunner):
'''Runner for handling special purpose flashing commands.'''
def __init__(self, cfg, cmd, args):
super().__init__(cfg)
if not cmd:
# This is a board definition error, not a user error,
# so we can do it now and not in do_run().
raise ValueError('no command was given')
self.cmd = cmd
self.args = args
@classmethod
def name(cls):
return 'misc-flasher'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'})
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('cmd',
help='''command to run; it will be passed the
build directory as its first argument''')
parser.add_argument('args', nargs=argparse.REMAINDER,
help='''additional arguments to pass after the build
directory''')
@classmethod
def do_create(cls, cfg, args):
return MiscFlasher(cfg, args.cmd, args.args)
def do_run(self, *args, **kwargs):
self.require(self.cmd)
self.check_call([self.cmd, self.cfg.build_dir] + self.args)
``` | /content/code_sandbox/scripts/west_commands/runners/misc.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 363 |
```python
#
'''Dediprog (dpcmd) flash only runner for SPI chips.'''
import platform
import subprocess
from runners.core import ZephyrBinaryRunner, RunnerCaps
DPCMD_EXE = 'dpcmd.exe' if platform.system() == 'Windows' else 'dpcmd'
DEFAULT_MAX_RETRIES = 3
class DediProgBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for DediProg (dpcmd).'''
def __init__(self, cfg, spi_image, vcc, retries):
super().__init__(cfg)
self.spi_image = spi_image
self.vcc = vcc
self.dpcmd_retries = retries
@classmethod
def name(cls):
return 'dediprog'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'})
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--spi-image', required=True,
help='path to SPI image')
parser.add_argument('--vcc',
help='VCC (0=3.5V, 1=2.5V, 2=1.8V)')
parser.add_argument('--retries', default=5,
help='Number of retries (default 5)')
@classmethod
def do_create(cls, cfg, args):
return DediProgBinaryRunner(cfg,
spi_image=args.spi_image,
vcc=args.vcc,
retries=args.retries)
def do_run(self, command, **kwargs):
self.require(DPCMD_EXE)
cmd_flash = [DPCMD_EXE, '--auto', self.spi_image]
if self.vcc:
cmd_flash.append('--vcc')
cmd_flash.append(self.vcc)
# Allow to flash images smaller than flash device capacity
cmd_flash.append('-x')
cmd_flash.append('ff')
cmd_flash.append('--silent')
cmd_flash.append('--verify')
try:
max_retries = int(self.dpcmd_retries)
except ValueError:
max_retries = DEFAULT_MAX_RETRIES
retries = 0
while retries <= max_retries:
try:
self.check_call(cmd_flash)
except subprocess.CalledProcessError as cpe:
retries += 1
if retries > max_retries:
raise cpe
else:
continue
break
``` | /content/code_sandbox/scripts/west_commands/runners/dediprog.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 512 |
```python
#
'''ARC architecture-specific runners.'''
from os import path
from runners.core import ZephyrBinaryRunner, RunnerCaps
DEFAULT_ARC_GDB_PORT = 3333
DEFAULT_PROPS_FILE = 'nsim_em.props'
class NsimBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for the ARC nSIM.'''
# This unusual 'flash' implementation matches the original shell script.
#
# It works by starting a GDB server in a separate session, connecting a
# client to it to load the program, and running 'continue' within the
# client to execute the application.
#
def __init__(self, cfg,
tui=False,
gdb_port=DEFAULT_ARC_GDB_PORT,
props=DEFAULT_PROPS_FILE):
super().__init__(cfg)
if cfg.gdb is None:
self.gdb_cmd = None
else:
self.gdb_cmd = [cfg.gdb] + (['-tui'] if tui else [])
self.nsim_cmd = ['nsimdrv']
self.gdb_port = gdb_port
self.props = props
@classmethod
def name(cls):
return 'arc-nsim'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash', 'debug', 'debugserver', 'attach'})
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--gdb-port', default=DEFAULT_ARC_GDB_PORT,
help='nsim gdb port, defaults to 3333')
parser.add_argument('--props', default=DEFAULT_PROPS_FILE,
help='nsim props file, defaults to nsim.props')
@classmethod
def do_create(cls, cfg, args):
return NsimBinaryRunner(
cfg,
gdb_port=args.gdb_port,
props=args.props)
def do_run(self, command, **kwargs):
self.require(self.nsim_cmd[0])
kwargs['nsim-cfg'] = path.join(self.cfg.board_dir, 'support',
self.props)
if command == 'flash':
self.do_flash(**kwargs)
elif command == 'debug':
self.do_debug(**kwargs)
else:
self.debugserver(**kwargs)
def do_flash(self, **kwargs):
config = kwargs['nsim-cfg']
cmd = (self.nsim_cmd + ['-propsfile', config, self.cfg.elf_file])
self.check_call(cmd)
def do_debug(self, **kwargs):
if self.gdb_cmd is None:
raise ValueError('Cannot debug; gdb is missing')
config = kwargs['nsim-cfg']
server_cmd = (self.nsim_cmd + ['-gdb',
'-port={}'.format(self.gdb_port),
'-propsfile', config])
gdb_cmd = (self.gdb_cmd +
['-ex', 'target remote :{}'.format(self.gdb_port),
'-ex', 'load', self.cfg.elf_file])
self.require(gdb_cmd[0])
self.run_server_and_client(server_cmd, gdb_cmd)
def debugserver(self, **kwargs):
config = kwargs['nsim-cfg']
cmd = (self.nsim_cmd +
['-gdb', '-port={}'.format(self.gdb_port),
'-propsfile', config])
self.check_call(cmd)
``` | /content/code_sandbox/scripts/west_commands/runners/nsim.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 710 |
```python
#
'''Modified openocd and gdb runner for Cyclone V SoC DevKit.'''
import subprocess
import re
import os
from os import path
from pathlib import Path
from runners.core import ZephyrBinaryRunner, RunnerCaps
DEFAULT_OPENOCD_TCL_PORT = 6333
DEFAULT_OPENOCD_TELNET_PORT = 4444
DEFAULT_OPENOCD_GDB_PORT = 3333
DEFAULT_OPENOCD_RESET_HALT_CMD = 'reset halt'
class IntelCycloneVBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for openocd.'''
def __init__(self, cfg, pre_init=None, reset_halt_cmd=DEFAULT_OPENOCD_RESET_HALT_CMD,
pre_load=None, load_cmd=None, verify_cmd=None, post_verify=None,
do_verify=False, do_verify_only=False,
tui=None, config=None, serial=None, use_elf=True,
no_halt=False, no_init=False, no_targets=False,
tcl_port=DEFAULT_OPENOCD_TCL_PORT,
telnet_port=DEFAULT_OPENOCD_TELNET_PORT,
gdb_port=DEFAULT_OPENOCD_GDB_PORT,
gdb_init=None, no_load=False):
super().__init__(cfg)
support = path.join(cfg.board_dir, 'support')
if not config:
default = path.join(support, 'openocd.cfg')
default2 = path.join(support, 'download_all.gdb')
default3 = path.join(support, 'appli_dl_cmd.gdb')
default4 = path.join(support, 'appli_debug_cmd.gdb')
if path.exists(default):
config = [default]
gdb_commands = [default2]
gdb_commands2 = [default3]
gdb_commands_deb = [default4]
self.openocd_config = config
self.gdb_cmds = gdb_commands
self.gdb_cmds2 = gdb_commands2
self.gdb_cmds_deb = gdb_commands_deb
search_args = []
if path.exists(support):
search_args.append('-s')
search_args.append(support)
if self.openocd_config is not None:
for i in self.openocd_config:
if path.exists(i) and not path.samefile(path.dirname(i), support):
search_args.append('-s')
search_args.append(path.dirname(i))
if cfg.openocd_search is not None:
for p in cfg.openocd_search:
search_args.extend(['-s', p])
self.openocd_cmd = [cfg.openocd or 'openocd'] + search_args
# openocd doesn't cope with Windows path names, so convert
# them to POSIX style just to be sure.
self.elf_name = Path(cfg.elf_file).as_posix()
self.pre_init = pre_init or []
self.reset_halt_cmd = reset_halt_cmd
self.pre_load = pre_load or []
self.load_cmd = load_cmd
self.verify_cmd = verify_cmd
self.post_verify = post_verify or []
self.do_verify = do_verify or False
self.do_verify_only = do_verify_only or False
self.tcl_port = tcl_port
self.telnet_port = telnet_port
self.gdb_port = gdb_port
self.gdb_cmd = [cfg.gdb] if cfg.gdb else None
self.tui_arg = ['-tui'] if tui else []
self.halt_arg = [] if no_halt else ['-c halt']
self.init_arg = [] if no_init else ['-c init']
self.targets_arg = [] if no_targets else ['-c targets']
self.serial = ['-c set _ZEPHYR_BOARD_SERIAL ' + serial] if serial else []
self.use_elf = use_elf
self.gdb_init = gdb_init
self.load_arg = [] if no_load else ['-ex', 'load']
@classmethod
def name(cls):
return 'intel_cyclonev'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash', 'debug', 'attach'},
dev_id=False, flash_addr=False, erase=False)
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--config', action='append',
help='''if given, override default config file;
may be given multiple times''')
parser.add_argument('--serial', default="",
help='if given, selects FTDI instance by its serial number, defaults to empty')
parser.add_argument('--use-elf', default=False, action='store_true',
help='if given, Elf file will be used for loading instead of HEX image')
# Options for flashing:
parser.add_argument('--cmd-pre-init', action='append',
help='''Command to run before calling init;
may be given multiple times''')
parser.add_argument('--cmd-reset-halt', default=DEFAULT_OPENOCD_RESET_HALT_CMD,
help=f'''Command to run for resetting and halting the target,
defaults to "{DEFAULT_OPENOCD_RESET_HALT_CMD}"''')
parser.add_argument('--cmd-pre-load', action='append',
help='''Command to run before flashing;
may be given multiple times''')
parser.add_argument('--cmd-load',
help='''Command to load/flash binary
(required when flashing)''')
parser.add_argument('--cmd-verify',
help='''Command to verify flashed binary''')
parser.add_argument('--cmd-post-verify', action='append',
help='''Command to run after verification;
may be given multiple times''')
parser.add_argument('--verify', action='store_true',
help='if given, verify after flash')
parser.add_argument('--verify-only', action='store_true',
help='if given, do verify and verify only. No flashing')
# Options for debugging:
parser.add_argument('--tui', default=False, action='store_true',
help='if given, GDB uses -tui')
parser.add_argument('--tcl-port', default=DEFAULT_OPENOCD_TCL_PORT,
help='openocd TCL port, defaults to 6333')
parser.add_argument('--telnet-port',
default=DEFAULT_OPENOCD_TELNET_PORT,
help='openocd telnet port, defaults to 4444')
parser.add_argument('--gdb-port', default=DEFAULT_OPENOCD_GDB_PORT,
help='openocd gdb port, defaults to 3333')
parser.add_argument('--gdb-init', action='append',
help='if given, add GDB init commands')
parser.add_argument('--no-halt', action='store_true',
help='if given, no halt issued in gdb server cmd')
parser.add_argument('--no-init', action='store_true',
help='if given, no init issued in gdb server cmd')
parser.add_argument('--no-targets', action='store_true',
help='if given, no target issued in gdb server cmd')
parser.add_argument('--no-load', action='store_true',
help='if given, no load issued in gdb server cmd')
@classmethod
def do_create(cls, cfg, args):
return IntelCycloneVBinaryRunner(
cfg,
pre_init=args.cmd_pre_init, reset_halt_cmd=args.cmd_reset_halt,
pre_load=args.cmd_pre_load, load_cmd=args.cmd_load,
verify_cmd=args.cmd_verify, post_verify=args.cmd_post_verify,
do_verify=args.verify, do_verify_only=args.verify_only,
tui=args.tui, config=args.config, serial=args.serial,
use_elf=args.use_elf, no_halt=args.no_halt, no_init=args.no_init,
no_targets=args.no_targets, tcl_port=args.tcl_port,
telnet_port=args.telnet_port, gdb_port=args.gdb_port,
gdb_init=args.gdb_init, no_load=args.no_load)
def print_gdbserver_message(self):
if not self.thread_info_enabled:
thread_msg = '; no thread info available'
elif self.supports_thread_info():
thread_msg = '; thread info enabled'
else:
thread_msg = '; update OpenOCD software for thread info'
self.logger.info('OpenOCD GDB server running on port '
f'{self.gdb_port}{thread_msg}')
# pylint: disable=R0201
def to_num(self, number):
dev_match = re.search(r"^\d*\+dev", number)
dev_version = not dev_match is None
num_match = re.search(r"^\d*", number)
num = int(num_match.group(0))
if dev_version:
num += 1
return num
def read_version(self):
self.require(self.openocd_cmd[0])
# OpenOCD prints in stderr, need redirect to get output
out = self.check_output([self.openocd_cmd[0], '--version'],
stderr=subprocess.STDOUT).decode()
version_match = re.search(r"Open On-Chip Debugger (\d+.\d+.\d+)", out)
version = version_match.group(1).split('.')
return [self.to_num(i) for i in version]
def supports_thread_info(self):
# Zephyr rtos was introduced after 0.11.0
(major, minor, rev) = self.read_version()
return (major, minor, rev) > (0, 11, 0)
def do_run(self, command, **kwargs):
self.require(self.openocd_cmd[0])
self.cfg_cmd = []
if self.openocd_config is not None:
for i in self.openocd_config:
self.cfg_cmd.append('-f')
self.cfg_cmd.append(i)
if command == 'flash' and self.use_elf:
self.do_flash_elf(**kwargs)
elif command == 'flash':
self.do_flash_elf(**kwargs)
elif command in ('attach', 'debug'):
self.do_attach_debug(command, **kwargs)
def do_flash_elf(self, **kwargs):
if self.gdb_cmd is None:
raise ValueError('Cannot debug; no gdb specified')
if self.elf_name is None:
raise ValueError('Cannot debug; no .elf specified')
pre_init_cmd = []
for i in self.pre_init:
pre_init_cmd.append("-c")
pre_init_cmd.append(i)
pre_init_cmd.append("-q")
if self.thread_info_enabled and self.supports_thread_info():
pre_init_cmd.append("-c")
pre_init_cmd.append("$_TARGETNAME configure -rtos Zephyr")
server_cmd = (self.openocd_cmd + self.serial + self.cfg_cmd + #added mevalver
pre_init_cmd)
temp_str = '--cd=' + os.environ.get('ZEPHYR_BASE') #Go to Zephyr base Dir
gdb_cmd = (self.gdb_cmd + self.tui_arg +
[temp_str,'-ex', 'target extended-remote localhost:{}'.format(self.gdb_port) , '-batch']) #Execute First Script in Zephyr Base Dir
gdb_cmd2 = (self.gdb_cmd + self.tui_arg +
['-ex', 'target extended-remote localhost:{}'.format(self.gdb_port) , '-batch']) #Execute Second Script in Build Dir
echo = ['echo']
if self.gdb_init is not None:
for i in self.gdb_init:
gdb_cmd.append("-ex")
gdb_cmd.append(i)
gdb_cmd2.append("-ex")
gdb_cmd2.append(i)
if self.gdb_cmds is not None:
for i in self.gdb_cmds:
gdb_cmd.append("-x")
gdb_cmd.append(i)
if self.gdb_cmds2 is not None:
for i in self.gdb_cmds2:
gdb_cmd2.append("-x")
gdb_cmd2.append(i)
self.require(gdb_cmd[0])
self.print_gdbserver_message()
cmd1 = echo + server_cmd
self.check_call(cmd1)
cmd2 = echo + gdb_cmd
self.check_call(cmd2)
cmd3 = echo + gdb_cmd2
self.check_call(cmd3)
self.run_server_and_client(server_cmd, gdb_cmd)
self.run_server_and_client(server_cmd, gdb_cmd2)
def do_attach_debug(self, command, **kwargs):
if self.gdb_cmd is None:
raise ValueError('Cannot debug; no gdb specified')
if self.elf_name is None:
raise ValueError('Cannot debug; no .elf specified')
pre_init_cmd = []
for i in self.pre_init:
pre_init_cmd.append("-c")
pre_init_cmd.append(i)
if self.thread_info_enabled and self.supports_thread_info():
pre_init_cmd.append("-c")
pre_init_cmd.append("$_TARGETNAME configure -rtos Zephyr")
pre_init_cmd.append("-q")
server_cmd = (self.openocd_cmd + self.serial + self.cfg_cmd +
pre_init_cmd)
gdb_attach = (self.gdb_cmd + self.tui_arg +
['-ex', 'target extended-remote :{}'.format(self.gdb_port),
self.elf_name, '-q'])
temp_str = '--cd=' + os.environ.get('ZEPHYR_BASE') #Go to Zephyr base Dir
gdb_cmd = (self.gdb_cmd + self.tui_arg +
[temp_str,'-ex', 'target extended-remote localhost:{}'.format(self.gdb_port) , '-batch']) #Execute First Script in Zephyr Base Dir
gdb_cmd2 = (self.gdb_cmd + self.tui_arg +
['-ex', 'target extended-remote :{}'.format(self.gdb_port) , '-batch']) #Execute Second Script in Build Dir
if self.gdb_init is not None:
for i in self.gdb_init:
gdb_cmd.append("-ex")
gdb_cmd.append(i)
gdb_cmd2.append("-ex")
gdb_cmd2.append(i)
if self.gdb_cmds is not None:
for i in self.gdb_cmds:
gdb_cmd.append("-x")
gdb_cmd.append(i)
if self.gdb_cmds_deb is not None:
for i in self.gdb_cmds_deb:
gdb_cmd2.append("-x")
gdb_cmd2.append(i)
self.require(gdb_cmd[0])
self.print_gdbserver_message()
if command == 'attach':
self.run_server_and_client(server_cmd, gdb_attach)
elif command == 'debug':
self.run_server_and_client(server_cmd, gdb_cmd)
self.run_server_and_client(server_cmd, gdb_cmd2)
self.run_server_and_client(server_cmd, gdb_attach)
``` | /content/code_sandbox/scripts/west_commands/runners/intel_cyclonev.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 3,161 |
```python
#! /usr/bin/env python3
#
"""Zephyr binary runner core interfaces
This provides the core ZephyrBinaryRunner class meant for public use,
as well as some other helpers for concrete runner classes.
"""
import abc
import argparse
import errno
import logging
import os
import platform
import shlex
import shutil
import signal
import subprocess
import re
from dataclasses import dataclass, field
from functools import partial
from enum import Enum
from inspect import isabstract
from typing import Dict, List, NamedTuple, NoReturn, Optional, Set, Type, \
Union
# Turn on to enable just logging the commands that would be run (at
# info rather than debug level), without actually running them. This
# can break runners that are expecting output or if one command
# depends on another, so it's just for debugging.
_DRY_RUN = False
_logger = logging.getLogger('runners')
class _DebugDummyPopen:
def terminate(self):
pass
def wait(self):
pass
MAX_PORT = 49151
class NetworkPortHelper:
'''Helper class for dealing with local IP network ports.'''
def get_unused_ports(self, starting_from):
'''Find unused network ports, starting at given values.
starting_from is an iterable of ports the caller would like to use.
The return value is an iterable of ports, in the same order, using
the given values if they were unused, or the next sequentially
available unused port otherwise.
Ports may be bound between this call's check and actual usage, so
callers still need to handle errors involving returned ports.'''
start = list(starting_from)
used = self._used_now()
ret = []
for desired in start:
port = desired
while port in used:
port += 1
if port > MAX_PORT:
msg = "ports above {} are in use"
raise ValueError(msg.format(desired))
used.add(port)
ret.append(port)
return ret
def _used_now(self):
handlers = {
'Windows': self._used_now_windows,
'Linux': self._used_now_linux,
'Darwin': self._used_now_darwin,
}
handler = handlers[platform.system()]
return handler()
def _used_now_windows(self):
cmd = ['netstat', '-a', '-n', '-p', 'tcp']
return self._parser_windows(cmd)
def _used_now_linux(self):
cmd = ['ss', '-a', '-n', '-t']
return self._parser_linux(cmd)
def _used_now_darwin(self):
cmd = ['netstat', '-a', '-n', '-p', 'tcp']
return self._parser_darwin(cmd)
@staticmethod
def _parser_windows(cmd):
out = subprocess.check_output(cmd).split(b'\r\n')
used_bytes = [x.split()[1].rsplit(b':', 1)[1] for x in out
if x.startswith(b' TCP')]
return {int(b) for b in used_bytes}
@staticmethod
def _parser_linux(cmd):
out = subprocess.check_output(cmd).splitlines()[1:]
used_bytes = [s.split()[3].rsplit(b':', 1)[1] for s in out]
return {int(b) for b in used_bytes}
@staticmethod
def _parser_darwin(cmd):
out = subprocess.check_output(cmd).split(b'\n')
used_bytes = [x.split()[3].rsplit(b':', 1)[1] for x in out
if x.startswith(b'tcp')]
return {int(b) for b in used_bytes}
class BuildConfiguration:
'''This helper class provides access to build-time configuration.
Configuration options can be read as if the object were a dict,
either object['CONFIG_FOO'] or object.get('CONFIG_FOO').
Kconfig configuration values are available (parsed from .config).'''
config_prefix = 'CONFIG'
def __init__(self, build_dir: str):
self.build_dir = build_dir
self.options: Dict[str, Union[str, int]] = {}
self.path = os.path.join(self.build_dir, 'zephyr', '.config')
self._parse()
def __contains__(self, item):
return item in self.options
def __getitem__(self, item):
return self.options[item]
def get(self, option, *args):
return self.options.get(option, *args)
def getboolean(self, option):
'''If a boolean option is explicitly set to y or n,
returns its value. Otherwise, falls back to False.
'''
return self.options.get(option, False)
def _parse(self):
filename = self.path
opt_value = re.compile(f'^(?P<option>{self.config_prefix}_[A-Za-z0-9_]+)=(?P<value>.*)$')
not_set = re.compile(f'^# (?P<option>{self.config_prefix}_[A-Za-z0-9_]+) is not set$')
with open(filename, 'r') as f:
for line in f:
match = opt_value.match(line)
if match:
value = match.group('value').rstrip()
if value.startswith('"') and value.endswith('"'):
# A string literal should have the quotes stripped,
# but otherwise be left as is.
value = value[1:-1]
elif value == 'y':
# The character 'y' is a boolean option
# that is set to True.
value = True
else:
# Neither a string nor 'y', so try to parse it
# as an integer.
try:
base = 16 if value.startswith('0x') else 10
self.options[match.group('option')] = int(value, base=base)
continue
except ValueError:
pass
self.options[match.group('option')] = value
continue
match = not_set.match(line)
if match:
# '# CONFIG_FOO is not set' means a boolean option is false.
self.options[match.group('option')] = False
class SysbuildConfiguration(BuildConfiguration):
'''This helper class provides access to sysbuild-time configuration.
Configuration options can be read as if the object were a dict,
either object['SB_CONFIG_FOO'] or object.get('SB_CONFIG_FOO').
Kconfig configuration values are available (parsed from .config).'''
config_prefix = 'SB_CONFIG'
def _parse(self):
# If the build does not use sysbuild, skip parsing the file.
if not os.path.exists(self.path):
return
super()._parse()
class MissingProgram(FileNotFoundError):
'''FileNotFoundError subclass for missing program dependencies.
No significant changes from the parent FileNotFoundError; this is
useful for explicitly signaling that the file in question is a
program that some class requires to proceed.
The filename attribute contains the missing program.'''
def __init__(self, program):
super().__init__(errno.ENOENT, os.strerror(errno.ENOENT), program)
_RUNNERCAPS_COMMANDS = {'flash', 'debug', 'debugserver', 'attach', 'simulate', 'robot'}
@dataclass
class RunnerCaps:
'''This class represents a runner class's capabilities.
Each capability is represented as an attribute with the same
name. Flag attributes are True or False.
Available capabilities:
- commands: set of supported commands; default is {'flash',
'debug', 'debugserver', 'attach', 'simulate', 'robot'}.
- dev_id: whether the runner supports device identifiers, in the form of an
-i, --dev-id option. This is useful when the user has multiple debuggers
connected to a single computer, in order to select which one will be used
with the command provided.
- flash_addr: whether the runner supports flashing to an
arbitrary address. Default is False. If true, the runner
must honor the --dt-flash option.
- erase: whether the runner supports an --erase option, which
does a mass-erase of the entire addressable flash on the target
before flashing. On multi-core SoCs, this may only erase portions of
flash specific the actual target core. (This option can be useful for
things like clearing out old settings values or other subsystem state
that may affect the behavior of the zephyr image. It is also sometimes
needed by SoCs which have flash-like areas that can't be sector
erased by the underlying tool before flashing; UICR on nRF SoCs
is one example.)
- reset: whether the runner supports a --reset option, which
resets the device after a flash operation is complete.
- extload: whether the runner supports a --extload option, which
must be given one time and is passed on to the underlying tool
that the runner wraps.
- tool_opt: whether the runner supports a --tool-opt (-O) option, which
can be given multiple times and is passed on to the underlying tool
that the runner wraps.
- file: whether the runner supports a --file option, which specifies
exactly the file that should be used to flash, overriding any default
discovered in the build directory.
- hide_load_files: whether the elf/hex/bin file arguments should be hidden.
'''
commands: Set[str] = field(default_factory=lambda: set(_RUNNERCAPS_COMMANDS))
dev_id: bool = False
flash_addr: bool = False
erase: bool = False
reset: bool = False
extload: bool = False
tool_opt: bool = False
file: bool = False
hide_load_files: bool = False
def __post_init__(self):
if not self.commands.issubset(_RUNNERCAPS_COMMANDS):
raise ValueError(f'{self.commands=} contains invalid command')
def _missing_cap(cls: Type['ZephyrBinaryRunner'], option: str) -> NoReturn:
# Helper function that's called when an option was given on the
# command line that corresponds to a missing capability in the
# runner class cls.
raise ValueError(f"{cls.name()} doesn't support {option} option")
class FileType(Enum):
OTHER = 0
HEX = 1
BIN = 2
ELF = 3
class RunnerConfig(NamedTuple):
'''Runner execution-time configuration.
This is a common object shared by all runners. Individual runners
can register specific configuration options using their
do_add_parser() hooks.
'''
build_dir: str # application build directory
board_dir: str # board definition directory
elf_file: Optional[str] # zephyr.elf path, or None
exe_file: Optional[str] # zephyr.exe path, or None
hex_file: Optional[str] # zephyr.hex path, or None
bin_file: Optional[str] # zephyr.bin path, or None
uf2_file: Optional[str] # zephyr.uf2 path, or None
file: Optional[str] # binary file path (provided by the user), or None
file_type: Optional[FileType] = FileType.OTHER # binary file type
gdb: Optional[str] = None # path to a usable gdb
openocd: Optional[str] = None # path to a usable openocd
openocd_search: List[str] = [] # add these paths to the openocd search path
_YN_CHOICES = ['Y', 'y', 'N', 'n', 'yes', 'no', 'YES', 'NO']
class _DTFlashAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
if values.lower().startswith('y'):
namespace.dt_flash = True
else:
namespace.dt_flash = False
class _ToggleAction(argparse.Action):
def __call__(self, parser, args, ignored, option):
setattr(args, self.dest, not option.startswith('--no-'))
class DeprecatedAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
_logger.warning(f'Argument {self.option_strings[0]} is deprecated' +
(f' for your runner {self._cls.name()}' if self._cls is not None else '') +
f', use {self._replacement} instead.')
setattr(namespace, self.dest, values)
def depr_action(*args, cls=None, replacement=None, **kwargs):
action = DeprecatedAction(*args, **kwargs)
setattr(action, '_cls', cls)
setattr(action, '_replacement', replacement)
return action
class ZephyrBinaryRunner(abc.ABC):
'''Abstract superclass for binary runners (flashers, debuggers).
**Note**: this class's API has changed relatively rarely since it
as added, but it is not considered a stable Zephyr API, and may change
without notice.
With some exceptions, boards supported by Zephyr must provide
generic means to be flashed (have a Zephyr firmware binary
permanently installed on the device for running) and debugged
(have a breakpoint debugger and program loader on a host
workstation attached to a running target).
This is supported by four top-level commands managed by the
Zephyr build system:
- 'flash': flash a previously configured binary to the board,
start execution on the target, then return.
- 'debug': connect to the board via a debugging protocol, program
the flash, then drop the user into a debugger interface with
symbol tables loaded from the current binary, and block until it
exits.
- 'debugserver': connect via a board-specific debugging protocol,
then reset and halt the target. Ensure the user is now able to
connect to a debug server with symbol tables loaded from the
binary.
- 'attach': connect to the board via a debugging protocol, then drop
the user into a debugger interface with symbol tables loaded from
the current binary, and block until it exits. Unlike 'debug', this
command does not program the flash.
This class provides an API for these commands. Every subclass is
called a 'runner' for short. Each runner has a name (like
'pyocd'), and declares commands it can handle (like
'flash'). Boards (like 'nrf52dk/nrf52832') declare which runner(s)
are compatible with them to the Zephyr build system, along with
information on how to configure the runner to work with the board.
The build system will then place enough information in the build
directory to create and use runners with this class's create()
method, which provides a command line argument parsing API. You
can also create runners by instantiating subclasses directly.
In order to define your own runner, you need to:
1. Define a ZephyrBinaryRunner subclass, and implement its
abstract methods. You may need to override capabilities().
2. Make sure the Python module defining your runner class is
imported, e.g. by editing this package's __init__.py (otherwise,
get_runners() won't work).
3. Give your runner's name to the Zephyr build system in your
board's board.cmake.
Additional advice:
- If you need to import any non-standard-library modules, make sure
to catch ImportError and defer complaints about it to a RuntimeError
if one is missing. This avoids affecting users that don't require your
runner, while still making it clear what went wrong to users that do
require it that don't have the necessary modules installed.
- If you need to ask the user something (e.g. using input()), do it
in your create() classmethod, not do_run(). That ensures your
__init__() really has everything it needs to call do_run(), and also
avoids calling input() when not instantiating within a command line
application.
- Use self.logger to log messages using the standard library's
logging API; your logger is named "runner.<your-runner-name()>"
For command-line invocation from the Zephyr build system, runners
define their own argparse-based interface through the common
add_parser() (and runner-specific do_add_parser() it delegates
to), and provide a way to create instances of themselves from
a RunnerConfig and parsed runner-specific arguments via create().
Runners use a variety of host tools and configuration values, the
user interface to which is abstracted by this class. Each runner
subclass should take any values it needs to execute one of these
commands in its constructor. The actual command execution is
handled in the run() method.'''
def __init__(self, cfg: RunnerConfig):
'''Initialize core runner state.'''
self.cfg = cfg
'''RunnerConfig for this instance.'''
self.logger = logging.getLogger('runners.{}'.format(self.name()))
'''logging.Logger for this instance.'''
@staticmethod
def get_runners() -> List[Type['ZephyrBinaryRunner']]:
'''Get a list of all currently defined runner classes.'''
def inheritors(klass):
subclasses = set()
work = [klass]
while work:
parent = work.pop()
for child in parent.__subclasses__():
if child not in subclasses:
if not isabstract(child):
subclasses.add(child)
work.append(child)
return subclasses
return inheritors(ZephyrBinaryRunner)
@classmethod
@abc.abstractmethod
def name(cls) -> str:
'''Return this runner's user-visible name.
When choosing a name, pick something short and lowercase,
based on the name of the tool (like openocd, jlink, etc.) or
the target architecture/board (like xtensa etc.).'''
@classmethod
def capabilities(cls) -> RunnerCaps:
'''Returns a RunnerCaps representing this runner's capabilities.
This implementation returns the default capabilities.
Subclasses should override appropriately if needed.'''
return RunnerCaps()
@classmethod
def add_parser(cls, parser):
'''Adds a sub-command parser for this runner.
The given object, parser, is a sub-command parser from the
argparse module. For more details, refer to the documentation
for argparse.ArgumentParser.add_subparsers().
The lone common optional argument is:
* --dt-flash (if the runner capabilities includes flash_addr)
Runner-specific options are added through the do_add_parser()
hook.'''
# Unfortunately, the parser argument's type is not documented
# in typeshed, so we can't type annotate much here.
# Common options that depend on runner capabilities. If a
# capability is not supported, the option string or strings
# are added anyway, to prevent an individual runner class from
# using them to mean something else.
caps = cls.capabilities()
if caps.dev_id:
parser.add_argument('-i', '--dev-id',
dest='dev_id',
help=cls.dev_id_help())
else:
parser.add_argument('-i', '--dev-id', help=argparse.SUPPRESS)
if caps.flash_addr:
parser.add_argument('--dt-flash', default='n', choices=_YN_CHOICES,
action=_DTFlashAction,
help='''If 'yes', try to use flash address
information from devicetree when flash
addresses are unknown (e.g. when flashing a .bin)''')
else:
parser.add_argument('--dt-flash', help=argparse.SUPPRESS)
if caps.file:
parser.add_argument('-f', '--file',
dest='file',
help="path to binary file")
parser.add_argument('-t', '--file-type',
dest='file_type',
help="type of binary file")
else:
parser.add_argument('-f', '--file', help=argparse.SUPPRESS)
parser.add_argument('-t', '--file-type', help=argparse.SUPPRESS)
if caps.hide_load_files:
parser.add_argument('--elf-file', help=argparse.SUPPRESS)
parser.add_argument('--hex-file', help=argparse.SUPPRESS)
parser.add_argument('--bin-file', help=argparse.SUPPRESS)
else:
parser.add_argument('--elf-file',
metavar='FILE',
action=(partial(depr_action, cls=cls, replacement='-f/--file') if caps.file else None),
help='path to zephyr.elf' if not caps.file else 'Deprecated, use -f/--file instead.')
parser.add_argument('--hex-file',
metavar='FILE',
action=(partial(depr_action, cls=cls, replacement='-f/--file') if caps.file else None),
help='path to zephyr.hex' if not caps.file else 'Deprecated, use -f/--file instead.')
parser.add_argument('--bin-file',
metavar='FILE',
action=(partial(depr_action, cls=cls, replacement='-f/--file') if caps.file else None),
help='path to zephyr.bin' if not caps.file else 'Deprecated, use -f/--file instead.')
parser.add_argument('--erase', '--no-erase', nargs=0,
action=_ToggleAction,
help=("mass erase flash before loading, or don't. "
"Default action depends on each specific runner."
if caps.erase else argparse.SUPPRESS))
parser.add_argument('--reset', '--no-reset', nargs=0,
action=_ToggleAction,
help=("reset device after flashing, or don't. "
"Default action depends on each specific runner."
if caps.reset else argparse.SUPPRESS))
parser.add_argument('--extload', dest='extload',
help=(cls.extload_help() if caps.extload
else argparse.SUPPRESS))
parser.add_argument('-O', '--tool-opt', dest='tool_opt',
default=[], action='append',
help=(cls.tool_opt_help() if caps.tool_opt
else argparse.SUPPRESS))
# Runner-specific options.
cls.do_add_parser(parser)
@classmethod
@abc.abstractmethod
def do_add_parser(cls, parser):
'''Hook for adding runner-specific options.'''
@classmethod
def create(cls, cfg: RunnerConfig,
args: argparse.Namespace) -> 'ZephyrBinaryRunner':
'''Create an instance from command-line arguments.
- ``cfg``: runner configuration (pass to superclass __init__)
- ``args``: arguments parsed from execution environment, as
specified by ``add_parser()``.'''
caps = cls.capabilities()
if args.dev_id and not caps.dev_id:
_missing_cap(cls, '--dev-id')
if args.dt_flash and not caps.flash_addr:
_missing_cap(cls, '--dt-flash')
if args.erase and not caps.erase:
_missing_cap(cls, '--erase')
if args.reset and not caps.reset:
_missing_cap(cls, '--reset')
if args.extload and not caps.extload:
_missing_cap(cls, '--extload')
if args.tool_opt and not caps.tool_opt:
_missing_cap(cls, '--tool-opt')
if args.file and not caps.file:
_missing_cap(cls, '--file')
if args.file_type and not args.file:
raise ValueError("--file-type requires --file")
if args.file_type and not caps.file:
_missing_cap(cls, '--file-type')
ret = cls.do_create(cfg, args)
if args.erase:
ret.logger.info('mass erase requested')
if args.reset:
ret.logger.info('reset after flashing requested')
return ret
@classmethod
@abc.abstractmethod
def do_create(cls, cfg: RunnerConfig,
args: argparse.Namespace) -> 'ZephyrBinaryRunner':
'''Hook for instance creation from command line arguments.'''
@staticmethod
def get_flash_address(args: argparse.Namespace,
build_conf: BuildConfiguration,
default: int = 0x0) -> int:
'''Helper method for extracting a flash address.
If args.dt_flash is true, returns the address obtained from
ZephyrBinaryRunner.flash_address_from_build_conf(build_conf).
Otherwise (when args.dt_flash is False), the default value is
returned.'''
if args.dt_flash:
return ZephyrBinaryRunner.flash_address_from_build_conf(build_conf)
else:
return default
@staticmethod
def flash_address_from_build_conf(build_conf: BuildConfiguration):
'''If CONFIG_HAS_FLASH_LOAD_OFFSET is n in build_conf,
return the CONFIG_FLASH_BASE_ADDRESS value. Otherwise, return
CONFIG_FLASH_BASE_ADDRESS + CONFIG_FLASH_LOAD_OFFSET.
'''
if build_conf.getboolean('CONFIG_HAS_FLASH_LOAD_OFFSET'):
return (build_conf['CONFIG_FLASH_BASE_ADDRESS'] +
build_conf['CONFIG_FLASH_LOAD_OFFSET'])
else:
return build_conf['CONFIG_FLASH_BASE_ADDRESS']
def run(self, command: str, **kwargs):
'''Runs command ('flash', 'debug', 'debugserver', 'attach').
This is the main entry point to this runner.'''
caps = self.capabilities()
if command not in caps.commands:
raise ValueError('runner {} does not implement command {}'.format(
self.name(), command))
self.do_run(command, **kwargs)
@abc.abstractmethod
def do_run(self, command: str, **kwargs):
'''Concrete runner; run() delegates to this. Implement in subclasses.
In case of an unsupported command, raise a ValueError.'''
@property
def build_conf(self) -> BuildConfiguration:
'''Get a BuildConfiguration for the build directory.'''
if not hasattr(self, '_build_conf'):
self._build_conf = BuildConfiguration(self.cfg.build_dir)
return self._build_conf
@property
def sysbuild_conf(self) -> SysbuildConfiguration:
'''Get a SysbuildConfiguration for the sysbuild directory.'''
if not hasattr(self, '_sysbuild_conf'):
self._sysbuild_conf = SysbuildConfiguration(os.path.dirname(self.cfg.build_dir))
return self._sysbuild_conf
@property
def thread_info_enabled(self) -> bool:
'''Returns True if self.build_conf has
CONFIG_DEBUG_THREAD_INFO enabled.
'''
return self.build_conf.getboolean('CONFIG_DEBUG_THREAD_INFO')
@classmethod
def dev_id_help(cls) -> str:
''' Get the ArgParse help text for the --dev-id option.'''
return '''Device identifier. Use it to select
which debugger, device, node or instance to
target when multiple ones are available or
connected.'''
@classmethod
def extload_help(cls) -> str:
''' Get the ArgParse help text for the --extload option.'''
return '''External loader to be used by stm32cubeprogrammer
to program the targeted external memory.
The runner requires the external loader (*.stldr) filename.
This external loader (*.stldr) must be located within
STM32CubeProgrammer/bin/ExternalLoader directory.'''
@classmethod
def tool_opt_help(cls) -> str:
''' Get the ArgParse help text for the --tool-opt option.'''
return '''Option to pass on to the underlying tool used
by this runner. This can be given multiple times;
the resulting arguments will be given to the tool
in the order they appear on the command line.'''
@staticmethod
def require(program: str, path: Optional[str] = None) -> str:
'''Require that a program is installed before proceeding.
:param program: name of the program that is required,
or path to a program binary.
:param path: PATH where to search for the program binary.
By default check on the system PATH.
If ``program`` is an absolute path to an existing program
binary, this call succeeds. Otherwise, try to find the program
by name on the system PATH or in the given PATH, if provided.
If the program can be found, its path is returned.
Otherwise, raises MissingProgram.'''
ret = shutil.which(program, path=path)
if ret is None:
raise MissingProgram(program)
return ret
def run_server_and_client(self, server, client, **kwargs):
'''Run a server that ignores SIGINT, and a client that handles it.
This routine portably:
- creates a Popen object for the ``server`` command which ignores
SIGINT
- runs ``client`` in a subprocess while temporarily ignoring SIGINT
- cleans up the server after the client exits.
- the keyword arguments, if any, will be passed down to both server and
client subprocess calls
It's useful to e.g. open a GDB server and client.'''
server_proc = self.popen_ignore_int(server, **kwargs)
try:
self.run_client(client, **kwargs)
finally:
server_proc.terminate()
server_proc.wait()
def run_client(self, client, **kwargs):
'''Run a client that handles SIGINT.'''
previous = signal.signal(signal.SIGINT, signal.SIG_IGN)
try:
self.check_call(client, **kwargs)
finally:
signal.signal(signal.SIGINT, previous)
def _log_cmd(self, cmd: List[str]):
escaped = ' '.join(shlex.quote(s) for s in cmd)
if not _DRY_RUN:
self.logger.debug(escaped)
else:
self.logger.info(escaped)
def call(self, cmd: List[str], **kwargs) -> int:
'''Subclass subprocess.call() wrapper.
Subclasses should use this method to run command in a
subprocess and get its return code, rather than
using subprocess directly, to keep accurate debug logs.
'''
self._log_cmd(cmd)
if _DRY_RUN:
return 0
return subprocess.call(cmd, **kwargs)
def check_call(self, cmd: List[str], **kwargs):
'''Subclass subprocess.check_call() wrapper.
Subclasses should use this method to run command in a
subprocess and check that it executed correctly, rather than
using subprocess directly, to keep accurate debug logs.
'''
self._log_cmd(cmd)
if _DRY_RUN:
return
subprocess.check_call(cmd, **kwargs)
def check_output(self, cmd: List[str], **kwargs) -> bytes:
'''Subclass subprocess.check_output() wrapper.
Subclasses should use this method to run command in a
subprocess and check that it executed correctly, rather than
using subprocess directly, to keep accurate debug logs.
'''
self._log_cmd(cmd)
if _DRY_RUN:
return b''
return subprocess.check_output(cmd, **kwargs)
def popen_ignore_int(self, cmd: List[str], **kwargs) -> subprocess.Popen:
'''Spawn a child command, ensuring it ignores SIGINT.
The returned subprocess.Popen object must be manually terminated.'''
cflags = 0
preexec = None
system = platform.system()
if system == 'Windows':
# We can't type check this line on Unix operating systems:
# mypy thinks the subprocess module has no such attribute.
cflags |= subprocess.CREATE_NEW_PROCESS_GROUP # type: ignore
elif system in {'Linux', 'Darwin'}:
# We can't type check this on Windows for the same reason.
preexec = os.setsid # type: ignore
self._log_cmd(cmd)
if _DRY_RUN:
return _DebugDummyPopen() # type: ignore
return subprocess.Popen(cmd, creationflags=cflags, preexec_fn=preexec, **kwargs)
def ensure_output(self, output_type: str) -> None:
'''Ensure self.cfg has a particular output artifact.
For example, ensure_output('bin') ensures that self.cfg.bin_file
refers to an existing file. Errors out if it's missing or undefined.
:param output_type: string naming the output type
'''
output_file = getattr(self.cfg, f'{output_type}_file', None)
if output_file is None:
err = f'{output_type} file location is unknown.'
elif not os.path.isfile(output_file):
err = f'{output_file} does not exist.'
else:
return
if output_type in ('elf', 'hex', 'bin', 'uf2'):
err += f' Try enabling CONFIG_BUILD_OUTPUT_{output_type.upper()}.'
# RuntimeError avoids a stack trace saved in run_common.
raise RuntimeError(err)
``` | /content/code_sandbox/scripts/west_commands/runners/core.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 7,035 |
```python
#
'''Runner for debugging with xt-gdb.'''
from os import path
from runners.core import ZephyrBinaryRunner, RunnerCaps
class XtensaBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for xt-gdb.'''
@classmethod
def name(cls):
return 'xtensa'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'debug'})
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--xcc-tools', required=True,
help='path to XTensa tools')
@classmethod
def do_create(cls, cfg, args):
# Override any GDB with the one provided by the XTensa tools.
cfg.gdb = path.join(args.xcc_tools, 'bin', 'xt-gdb')
return XtensaBinaryRunner(cfg)
def do_run(self, command, **kwargs):
gdb_cmd = [self.cfg.gdb, self.cfg.elf_file]
self.require(gdb_cmd[0])
self.check_call(gdb_cmd)
``` | /content/code_sandbox/scripts/west_commands/runners/xtensa.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 219 |
```python
#
'''Runner for flashing with the Intel ADSP boards.'''
import argparse
import os
import sys
import re
import hashlib
import random
import shutil
from runners.core import ZephyrBinaryRunner, RunnerCaps
from zephyr_ext_common import ZEPHYR_BASE
DEFAULT_CAVSTOOL='soc/intel/intel_adsp/tools/cavstool_client.py'
class SignParamError(argparse.Action):
'User-friendly feedback when trying to sign with west flash'
def __call__(self, parser, namespace, values, option_string=None):
parser.error(f'Cannot use "west flash {option_string} ..." any more. ' +
'"west sign" is now called from CMake, see "west sign -h"')
class IntelAdspBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for the intel ADSP boards.'''
def __init__(self,
cfg,
remote_host,
pty,
tool_opt,
):
super().__init__(cfg)
self.remote_host = remote_host
self.bin_fw = os.path.join(cfg.build_dir, 'zephyr', 'zephyr.ri')
self.cavstool = os.path.join(ZEPHYR_BASE, DEFAULT_CAVSTOOL)
self.platform = os.path.basename(cfg.board_dir)
self.pty = pty
self.tool_opt_args = tool_opt
@classmethod
def name(cls):
return 'intel_adsp'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'}, tool_opt=True)
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--remote-host',
help='hostname of the remote targeting ADSP board')
parser.add_argument('--pty', nargs='?', const="remote-host", type=str,
help=''''Capture the output of cavstool.py running on --remote-host \
and stream it remotely to west's standard output.''')
for old_sign_param in [ '--rimage-tool', '--config-dir', '--default-key', '--key']:
parser.add_argument(old_sign_param, action=SignParamError,
help='do not use, "west sign" is now called from CMake, see "west sign -h"')
@classmethod
def tool_opt_help(cls) -> str:
return """Additional options for run/request service tool,
e.g. '--lock' """
@classmethod
def do_create(cls, cfg, args):
return IntelAdspBinaryRunner(cfg,
remote_host=args.remote_host,
pty=args.pty,
tool_opt=args.tool_opt,
)
def do_run(self, command, **kwargs):
self.logger.info('Starting Intel ADSP runner')
if re.search("adsp", self.platform):
self.require(self.cavstool)
self.flash(**kwargs)
else:
self.logger.error("No suitable platform for running")
sys.exit(1)
def flash(self, **kwargs):
'Generate a hash string for appending to the sending ri file'
hash_object = hashlib.md5(self.bin_fw.encode())
random_str = f"{random.getrandbits(64)}".encode()
hash_object.update(random_str)
send_bin_fw = str(self.bin_fw + "." + hash_object.hexdigest())
shutil.copy(self.bin_fw, send_bin_fw)
# Copy the zephyr to target remote ADSP host and run
self.run_cmd = ([f'{self.cavstool}','-s', f'{self.remote_host}', f'{send_bin_fw}'])
# Add the extra tool options to run/request service tool
if self.tool_opt_args:
self.run_cmd = self.run_cmd + self.tool_opt_args
self.logger.debug(f"rcmd: {self.run_cmd}")
self.check_call(self.run_cmd)
# If the self.pty is assigned, the log will output to stdout
# directly. That means you don't have to execute the command:
#
# cavstool_client.py -s {host}:{port} -l
#
# to get the result later separately.
if self.pty is not None:
if self.pty == 'remote-host':
self.log_cmd = ([f'{self.cavstool}','-s', f'{self.remote_host}', '-l'])
else:
self.log_cmd = ([f'{self.cavstool}','-s', f'{self.pty}', '-l'])
self.logger.debug(f"rcmd: {self.log_cmd}")
self.check_call(self.log_cmd)
``` | /content/code_sandbox/scripts/west_commands/runners/intel_adsp.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 975 |
```python
#
#
# pylint: disable=duplicate-code
'''Runner for openocd.'''
import subprocess
import re
from os import path
from pathlib import Path
from zephyr_ext_common import ZEPHYR_BASE
try:
from elftools.elf.elffile import ELFFile
except ImportError:
pass
from runners.core import ZephyrBinaryRunner, RunnerCaps
DEFAULT_OPENOCD_TCL_PORT = 6333
DEFAULT_OPENOCD_TELNET_PORT = 4444
DEFAULT_OPENOCD_GDB_PORT = 3333
DEFAULT_OPENOCD_RESET_HALT_CMD = 'reset init'
DEFAULT_OPENOCD_TARGET_HANDLE = "_TARGETNAME"
class OpenOcdBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for openocd.'''
def __init__(self, cfg, pre_init=None, reset_halt_cmd=DEFAULT_OPENOCD_RESET_HALT_CMD,
pre_load=None, load_cmd=None, verify_cmd=None, post_verify=None,
do_verify=False, do_verify_only=False,
tui=None, config=None, serial=None, use_elf=None,
no_halt=False, no_init=False, no_targets=False,
tcl_port=DEFAULT_OPENOCD_TCL_PORT,
telnet_port=DEFAULT_OPENOCD_TELNET_PORT,
gdb_port=DEFAULT_OPENOCD_GDB_PORT,
gdb_client_port=DEFAULT_OPENOCD_GDB_PORT,
gdb_init=None, no_load=False,
target_handle=DEFAULT_OPENOCD_TARGET_HANDLE):
super().__init__(cfg)
if not path.exists(cfg.board_dir):
# try to find the board support in-tree
cfg_board_path = path.normpath(cfg.board_dir)
_temp_path = cfg_board_path.split("boards/")[1]
support = path.join(ZEPHYR_BASE, "boards", _temp_path, 'support')
else:
support = path.join(cfg.board_dir, 'support')
if not config:
default = path.join(support, 'openocd.cfg')
if path.exists(default):
config = [default]
self.openocd_config = config
search_args = []
if path.exists(support):
search_args.append('-s')
search_args.append(support)
if self.openocd_config is not None:
for i in self.openocd_config:
if path.exists(i) and not path.samefile(path.dirname(i), support):
search_args.append('-s')
search_args.append(path.dirname(i))
if cfg.openocd_search is not None:
for p in cfg.openocd_search:
search_args.extend(['-s', p])
self.openocd_cmd = [cfg.openocd or 'openocd'] + search_args
# openocd doesn't cope with Windows path names, so convert
# them to POSIX style just to be sure.
self.elf_name = Path(cfg.elf_file).as_posix() if cfg.elf_file else None
self.pre_init = pre_init or []
self.reset_halt_cmd = reset_halt_cmd
self.pre_load = pre_load or []
self.load_cmd = load_cmd
self.verify_cmd = verify_cmd
self.post_verify = post_verify or []
self.do_verify = do_verify or False
self.do_verify_only = do_verify_only or False
self.tcl_port = tcl_port
self.telnet_port = telnet_port
self.gdb_port = gdb_port
self.gdb_client_port = gdb_client_port
self.gdb_cmd = [cfg.gdb] if cfg.gdb else None
self.tui_arg = ['-tui'] if tui else []
self.halt_arg = [] if no_halt else ['-c halt']
self.init_arg = [] if no_init else ['-c init']
self.targets_arg = [] if no_targets else ['-c targets']
self.serial = ['-c set _ZEPHYR_BOARD_SERIAL ' + serial] if serial else []
self.use_elf = use_elf
self.gdb_init = gdb_init
self.load_arg = [] if no_load else ['-ex', 'load']
self.target_handle = target_handle
@classmethod
def name(cls):
return 'openocd'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash', 'debug', 'debugserver', 'attach'})
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--config', action='append',
help='''if given, override default config file;
may be given multiple times''')
parser.add_argument('--serial', default="",
help='if given, selects FTDI instance by its serial number, defaults to empty')
parser.add_argument('--use-elf', default=False, action='store_true',
help='if given, Elf file will be used for loading instead of HEX image')
# Options for flashing:
parser.add_argument('--cmd-pre-init', action='append',
help='''Command to run before calling init;
may be given multiple times''')
parser.add_argument('--cmd-reset-halt', default=DEFAULT_OPENOCD_RESET_HALT_CMD,
help=f'''Command to run for resetting and halting the target,
defaults to "{DEFAULT_OPENOCD_RESET_HALT_CMD}"''')
parser.add_argument('--cmd-pre-load', action='append',
help='''Command to run before flashing;
may be given multiple times''')
parser.add_argument('--cmd-load',
help='''Command to load/flash binary
(required when flashing)''')
parser.add_argument('--cmd-verify',
help='''Command to verify flashed binary''')
parser.add_argument('--cmd-post-verify', action='append',
help='''Command to run after verification;
may be given multiple times''')
parser.add_argument('--verify', action='store_true',
help='if given, verify after flash')
parser.add_argument('--verify-only', action='store_true',
help='if given, do verify and verify only. No flashing')
# Options for debugging:
parser.add_argument('--tui', default=False, action='store_true',
help='if given, GDB uses -tui')
parser.add_argument('--tcl-port', default=DEFAULT_OPENOCD_TCL_PORT,
help='openocd TCL port, defaults to 6333')
parser.add_argument('--telnet-port',
default=DEFAULT_OPENOCD_TELNET_PORT,
help='openocd telnet port, defaults to 4444')
parser.add_argument('--gdb-port', default=DEFAULT_OPENOCD_GDB_PORT,
help='openocd gdb port, defaults to 3333')
parser.add_argument('--gdb-client-port', default=DEFAULT_OPENOCD_GDB_PORT,
help='''openocd gdb client port if multiple ports come
up, defaults to 3333''')
parser.add_argument('--gdb-init', action='append',
help='if given, add GDB init commands')
parser.add_argument('--no-halt', action='store_true',
help='if given, no halt issued in gdb server cmd')
parser.add_argument('--no-init', action='store_true',
help='if given, no init issued in gdb server cmd')
parser.add_argument('--no-targets', action='store_true',
help='if given, no target issued in gdb server cmd')
parser.add_argument('--no-load', action='store_true',
help='if given, no load issued in gdb server cmd')
parser.add_argument('--target-handle', default=DEFAULT_OPENOCD_TARGET_HANDLE,
help=f'''Internal handle used in openocd targets cfg
files, defaults to "{DEFAULT_OPENOCD_TARGET_HANDLE}".
''')
@classmethod
def do_create(cls, cfg, args):
return OpenOcdBinaryRunner(
cfg,
pre_init=args.cmd_pre_init, reset_halt_cmd=args.cmd_reset_halt,
pre_load=args.cmd_pre_load, load_cmd=args.cmd_load,
verify_cmd=args.cmd_verify, post_verify=args.cmd_post_verify,
do_verify=args.verify, do_verify_only=args.verify_only,
tui=args.tui, config=args.config, serial=args.serial,
use_elf=args.use_elf, no_halt=args.no_halt, no_init=args.no_init,
no_targets=args.no_targets, tcl_port=args.tcl_port,
telnet_port=args.telnet_port, gdb_port=args.gdb_port,
gdb_client_port=args.gdb_client_port, gdb_init=args.gdb_init,
no_load=args.no_load, target_handle=args.target_handle)
def print_gdbserver_message(self):
if not self.thread_info_enabled:
thread_msg = '; no thread info available'
elif self.supports_thread_info():
thread_msg = '; thread info enabled'
else:
thread_msg = '; update OpenOCD software for thread info'
self.logger.info('OpenOCD GDB server running on port '
f'{self.gdb_port}{thread_msg}')
# pylint: disable=R0201
def to_num(self, number):
dev_match = re.search(r"^\d*\+dev", number)
dev_version = not dev_match is None
num_match = re.search(r"^\d*", number)
num = int(num_match.group(0))
if dev_version:
num += 1
return num
def read_version(self):
self.require(self.openocd_cmd[0])
# OpenOCD prints in stderr, need redirect to get output
out = self.check_output([self.openocd_cmd[0], '--version'],
stderr=subprocess.STDOUT).decode()
version_match = re.search(r"Open On-Chip Debugger (\d+.\d+.\d+)", out)
version = version_match.group(1).split('.')
return [self.to_num(i) for i in version]
def supports_thread_info(self):
# Zephyr rtos was introduced after 0.11.0
(major, minor, rev) = self.read_version()
return (major, minor, rev) > (0, 11, 0)
def do_run(self, command, **kwargs):
self.require(self.openocd_cmd[0])
if globals().get('ELFFile') is None:
raise RuntimeError(
'elftools missing; please "pip3 install elftools"')
self.cfg_cmd = []
if self.openocd_config is not None:
for i in self.openocd_config:
self.cfg_cmd.append('-f')
self.cfg_cmd.append(i)
if command == 'flash' and self.use_elf:
self.do_flash_elf(**kwargs)
elif command == 'flash':
self.do_flash(**kwargs)
elif command in ('attach', 'debug'):
self.do_attach_debug(command, **kwargs)
elif command == 'load':
self.do_load(**kwargs)
else:
self.do_debugserver(**kwargs)
def do_flash(self, **kwargs):
self.ensure_output('hex')
if self.load_cmd is None:
raise ValueError('Cannot flash; load command is missing')
if self.verify_cmd is None:
raise ValueError('Cannot flash; verify command is missing')
# openocd doesn't cope with Windows path names, so convert
# them to POSIX style just to be sure.
hex_name = Path(self.cfg.hex_file).as_posix()
self.logger.info('Flashing file: {}'.format(hex_name))
pre_init_cmd = []
pre_load_cmd = []
post_verify_cmd = []
for i in self.pre_init:
pre_init_cmd.append("-c")
pre_init_cmd.append(i)
for i in self.pre_load:
pre_load_cmd.append("-c")
pre_load_cmd.append(i)
for i in self.post_verify:
post_verify_cmd.append("-c")
post_verify_cmd.append(i)
load_image = []
if not self.do_verify_only:
load_image = ['-c', self.reset_halt_cmd,
'-c', self.load_cmd + ' ' + hex_name]
verify_image = []
if self.do_verify or self.do_verify_only:
verify_image = ['-c', self.reset_halt_cmd,
'-c', self.verify_cmd + ' ' + hex_name]
cmd = (self.openocd_cmd + self.serial + self.cfg_cmd +
pre_init_cmd + self.init_arg + self.targets_arg +
pre_load_cmd + load_image +
verify_image +
post_verify_cmd +
['-c', 'reset run',
'-c', 'shutdown'])
self.check_call(cmd)
def do_flash_elf(self, **kwargs):
if self.elf_name is None:
raise ValueError('Cannot debug; no .elf specified')
# Extract entry point address from Elf to use it later with
# "resume" command of OpenOCD.
with open(self.elf_name, 'rb') as f:
ep_addr = f"0x{ELFFile(f).header['e_entry']:016x}"
pre_init_cmd = []
for i in self.pre_init:
pre_init_cmd.append("-c")
pre_init_cmd.append(i)
pre_load_cmd = []
load_image = []
if not self.do_verify_only:
for i in self.pre_load:
pre_load_cmd.append("-c")
pre_load_cmd.append(i)
load_image = ['-c', 'load_image ' + self.elf_name]
verify_image = []
post_verify_cmd = []
if self.do_verify or self.do_verify_only:
verify_image = ['-c', 'verify_image ' + self.elf_name]
for i in self.post_verify:
post_verify_cmd.append("-c")
post_verify_cmd.append(i)
prologue = ['-c', 'resume ' + ep_addr,
'-c', 'shutdown']
cmd = (self.openocd_cmd + self.serial + self.cfg_cmd +
pre_init_cmd + self.init_arg + self.targets_arg +
pre_load_cmd + ['-c', self.reset_halt_cmd] +
load_image +
verify_image + post_verify_cmd +
prologue)
self.check_call(cmd)
def do_attach_debug(self, command, **kwargs):
if self.gdb_cmd is None:
raise ValueError('Cannot debug; no gdb specified')
if self.elf_name is None:
raise ValueError('Cannot debug; no .elf specified')
pre_init_cmd = []
for i in self.pre_init:
pre_init_cmd.append("-c")
pre_init_cmd.append(i)
if self.thread_info_enabled and self.supports_thread_info():
pre_init_cmd.append("-c")
rtos_command = '${} configure -rtos Zephyr'.format(self.target_handle)
pre_init_cmd.append(rtos_command)
server_cmd = (self.openocd_cmd + self.serial + self.cfg_cmd +
['-c', 'tcl_port {}'.format(self.tcl_port),
'-c', 'telnet_port {}'.format(self.telnet_port),
'-c', 'gdb_port {}'.format(self.gdb_port)] +
pre_init_cmd + self.init_arg + self.targets_arg +
self.halt_arg)
gdb_cmd = (self.gdb_cmd + self.tui_arg +
['-ex', 'target extended-remote :{}'.format(self.gdb_client_port),
self.elf_name])
if command == 'debug':
gdb_cmd.extend(self.load_arg)
if self.gdb_init is not None:
for i in self.gdb_init:
gdb_cmd.append("-ex")
gdb_cmd.append(i)
self.require(gdb_cmd[0])
self.print_gdbserver_message()
self.run_server_and_client(server_cmd, gdb_cmd)
def do_debugserver(self, **kwargs):
pre_init_cmd = []
for i in self.pre_init:
pre_init_cmd.append("-c")
pre_init_cmd.append(i)
if self.thread_info_enabled and self.supports_thread_info():
pre_init_cmd.append("-c")
rtos_command = '${} configure -rtos Zephyr'.format(self.target_handle)
pre_init_cmd.append(rtos_command)
cmd = (self.openocd_cmd + self.cfg_cmd +
['-c', 'tcl_port {}'.format(self.tcl_port),
'-c', 'telnet_port {}'.format(self.telnet_port),
'-c', 'gdb_port {}'.format(self.gdb_port)] +
pre_init_cmd + self.init_arg + self.targets_arg +
['-c', self.reset_halt_cmd])
self.print_gdbserver_message()
self.check_call(cmd)
``` | /content/code_sandbox/scripts/west_commands/runners/openocd.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 3,552 |
```python
#
'''Runner for flashing with nrfutil.'''
import json
import os
from pathlib import Path
import sys
import subprocess
from runners.core import _DRY_RUN
from runners.nrf_common import NrfBinaryRunner
class NrfUtilBinaryRunner(NrfBinaryRunner):
'''Runner front-end for nrfutil.'''
def __init__(self, cfg, family, softreset, dev_id, erase=False,
reset=True, tool_opt=[], force=False, recover=False):
super().__init__(cfg, family, softreset, dev_id, erase, reset,
tool_opt, force, recover)
self._ops = []
self._op_id = 1
@classmethod
def name(cls):
return 'nrfutil'
@classmethod
def tool_opt_help(cls) -> str:
return 'Additional options for nrfutil, e.g. "--log-level"'
@classmethod
def do_create(cls, cfg, args):
return NrfUtilBinaryRunner(cfg, args.nrf_family, args.softreset,
args.dev_id, erase=args.erase,
reset=args.reset,
tool_opt=args.tool_opt, force=args.force,
recover=args.recover)
def _exec(self, args):
jout_all = []
cmd = ['nrfutil', '--json', 'device'] + args
self._log_cmd(cmd)
if _DRY_RUN:
return {}
with subprocess.Popen(cmd, stdout=subprocess.PIPE) as p:
for line in iter(p.stdout.readline, b''):
# path_to_url
jout = json.loads(line.decode(sys.getdefaultencoding()))
jout_all.append(jout)
if 'x-execute-batch' in args:
if jout['type'] == 'batch_update':
pld = jout['data']['data']
if (
pld['type'] == 'task_progress' and
pld['data']['progress']['progressPercentage'] == 0
):
self.logger.info(pld['data']['progress']['description'])
elif jout['type'] == 'batch_end' and jout['data']['error']:
raise subprocess.CalledProcessError(
jout['data']['error']['code'], cmd
)
return jout_all
def do_get_boards(self):
out = self._exec(['list'])
devs = []
for o in out:
if o['type'] == 'task_end':
devs = o['data']['data']['devices']
snrs = [dev['serialNumber'] for dev in devs if dev['traits']['jlink']]
self.logger.debug(f'Found boards: {snrs}')
return snrs
def do_require(self):
self.require('nrfutil')
def _insert_op(self, op):
op['operationId'] = f'{self._op_id}'
self._op_id += 1
self._ops.append(op)
def _exec_batch(self):
# prepare the dictionary and convert to JSON
batch = json.dumps({'family': f'{self.family}',
'operations': [op for op in self._ops]},
indent=4) + '\n'
hex_dir = Path(self.hex_).parent
json_file = os.fspath(hex_dir / f'generated_nrfutil_batch.json')
with open(json_file, "w") as f:
f.write(batch)
# reset first in case an exception is thrown
self._ops = []
self._op_id = 1
self.logger.debug(f'Executing batch in: {json_file}')
self._exec(['x-execute-batch', '--batch-path', f'{json_file}',
'--serial-number', f'{self.dev_id}'])
def do_exec_op(self, op, force=False):
self.logger.debug(f'Executing op: {op}')
if force:
if len(self._ops) != 0:
raise RuntimeError(f'Forced exec with {len(self._ops)} ops')
self._insert_op(op)
self._exec_batch()
return True
# Defer by default
return False
def flush_ops(self, force=True):
if not force:
return
while self.ops:
self._insert_op(self.ops.popleft())
self._exec_batch()
``` | /content/code_sandbox/scripts/west_commands/runners/nrfutil.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 922 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.