text stringlengths 9 39.2M | dir stringlengths 25 226 | lang stringclasses 163 values | created_date timestamp[s] | updated_date timestamp[s] | repo_name stringclasses 751 values | repo_full_name stringclasses 752 values | star int64 1.01k 183k | len_tokens int64 1 18.5M |
|---|---|---|---|---|---|---|---|---|
```python
#
'''Runner for flashing with nrfjprog.'''
import subprocess
import sys
from runners.nrf_common import ErrNotAvailableBecauseProtection, ErrVerify, \
NrfBinaryRunner
# path_to_url
UnavailableOperationBecauseProtectionError = 16
VerifyError = 55
class NrfJprogBinaryRunner(NrfBinaryRunner):
'''Runner front-end for nrfjprog.'''
def __init__(self, cfg, family, softreset, dev_id, erase=False,
reset=True, tool_opt=[], force=False, recover=False,
qspi_ini=None):
super().__init__(cfg, family, softreset, dev_id, erase, reset,
tool_opt, force, recover)
self.qspi_ini = qspi_ini
@classmethod
def name(cls):
return 'nrfjprog'
@classmethod
def tool_opt_help(cls) -> str:
return 'Additional options for nrfjprog, e.g. "--clockspeed"'
@classmethod
def do_create(cls, cfg, args):
return NrfJprogBinaryRunner(cfg, args.nrf_family, args.softreset,
args.dev_id, erase=args.erase,
reset=args.reset,
tool_opt=args.tool_opt, force=args.force,
recover=args.recover, qspi_ini=args.qspi_ini)
@classmethod
def do_add_parser(cls, parser):
super().do_add_parser(parser)
parser.add_argument('--qspiini', required=False, dest='qspi_ini',
help='path to an .ini file with qspi configuration')
def do_get_boards(self):
snrs = self.check_output(['nrfjprog', '--ids'])
return snrs.decode(sys.getdefaultencoding()).strip().splitlines()
def do_require(self):
self.require('nrfjprog')
def do_exec_op(self, op, force=False):
self.logger.debug(f'Executing op: {op}')
# Translate the op
families = {'NRF51_FAMILY': 'NRF51', 'NRF52_FAMILY': 'NRF52',
'NRF53_FAMILY': 'NRF53', 'NRF54L_FAMILY': 'NRF54L',
'NRF91_FAMILY': 'NRF91'}
cores = {'NRFDL_DEVICE_CORE_APPLICATION': 'CP_APPLICATION',
'NRFDL_DEVICE_CORE_NETWORK': 'CP_NETWORK'}
core_opt = ['--coprocessor', cores[op['core']]] \
if op.get('core') else []
cmd = ['nrfjprog']
_op = op['operation']
op_type = _op['type']
# options that are an empty dict must use "in" instead of get()
if op_type == 'pinreset-enable':
cmd.append('--pinresetenable')
elif op_type == 'program':
cmd.append('--program')
cmd.append(_op['firmware']['file'])
erase = _op['chip_erase_mode']
if erase == 'ERASE_ALL':
cmd.append('--chiperase')
elif erase == 'ERASE_PAGES':
cmd.append('--sectorerase')
elif erase == 'ERASE_PAGES_INCLUDING_UICR':
cmd.append('--sectoranduicrerase')
elif erase == 'NO_ERASE':
pass
else:
raise RuntimeError(f'Invalid erase mode: {erase}')
if _op.get('qspi_erase_mode'):
# In the future there might be multiple QSPI erase modes
cmd.append('--qspisectorerase')
if _op.get('verify'):
# In the future there might be multiple verify modes
cmd.append('--verify')
if self.qspi_ini:
cmd.append('--qspiini')
cmd.append(self.qspi_ini)
elif op_type == 'recover':
cmd.append('--recover')
elif op_type == 'reset':
if _op['option'] == 'RESET_SYSTEM':
cmd.append('--reset')
if _op['option'] == 'RESET_PIN':
cmd.append('--pinreset')
elif op_type == 'erasepage':
cmd.append('--erasepage')
cmd.append(f"0x{_op['page']:08x}")
else:
raise RuntimeError(f'Invalid operation: {op_type}')
try:
self.check_call(cmd + ['-f', families[self.family]] + core_opt +
['--snr', self.dev_id] + self.tool_opt)
except subprocess.CalledProcessError as cpe:
# Translate error codes
if cpe.returncode == UnavailableOperationBecauseProtectionError:
cpe.returncode = ErrNotAvailableBecauseProtection
elif cpe.returncode == VerifyError:
cpe.returncode = ErrVerify
raise cpe
return True
``` | /content/code_sandbox/scripts/west_commands/runners/nrfjprog.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,012 |
```python
'''Runner for Lauterbach TRACE32.'''
import argparse
import os
import platform
import subprocess
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import List, Optional
from runners.core import BuildConfiguration, RunnerCaps, RunnerConfig, ZephyrBinaryRunner
DEFAULT_T32_CONFIG = Path('config.t32')
class TRACE32BinaryRunner(ZephyrBinaryRunner):
'''
Runner front-end for Lauterbach TRACE32.
The runner is a wrapper around Lauterbach TRACE32 PowerView. It executes a Lauterbach Practice
script (.cmm) after launching the debugger, which should be located at
zephyr/boards/<board>/support/<command>.cmm, where <board> is the board directory and <command>
is the name of the west runner command executed (e.g. flash or debug). Extra arguments can be
passed to the startup script by using the command line option --startup-args.
'''
def __init__(self,
cfg: RunnerConfig,
t32_cfg: Path,
arch: str,
startup_args: Optional[List[str]] = None,
timeout: int = 60) -> None:
super(TRACE32BinaryRunner, self).__init__(cfg)
self.arch = arch
self.t32_cfg = t32_cfg
self.t32_exec: Optional[Path] = None
self.startup_dir = Path(cfg.board_dir) / 'support'
self.startup_args = startup_args
self.timeout = timeout
@classmethod
def name(cls) -> str:
return 'trace32'
@classmethod
def capabilities(cls) -> RunnerCaps:
return RunnerCaps(commands={'flash', 'debug'})
@classmethod
def do_add_parser(cls, parser: argparse.ArgumentParser) -> None:
parser.add_argument('--arch',
default='auto',
choices=('auto', 'arm', 'riscv', 'xtensa'),
help='Target architecture. Set to "auto" to select the architecture '
'based on CONFIG_ARCH value')
parser.add_argument('--config',
default=DEFAULT_T32_CONFIG,
type=Path,
help='Override TRACE32 configuration file path. Can be a relative path '
'to T32_DIR environment variable, or an absolute path')
parser.add_argument('--startup-args',
nargs='*',
help='Arguments to pass to the start-up script')
parser.add_argument('--timeout',
default=60,
type=int,
help='Timeout, in seconds, of the flash operation')
@classmethod
def do_create(cls, cfg: RunnerConfig, args: argparse.Namespace) -> 'TRACE32BinaryRunner':
build_conf = BuildConfiguration(cfg.build_dir)
if args.arch == 'auto':
arch = build_conf.get('CONFIG_ARCH').replace('"', '')
# there is a single binary for all ARM architectures
arch = arch.replace('arm64', 'arm')
else:
arch = args.arch
return TRACE32BinaryRunner(cfg, args.config, arch, startup_args=args.startup_args,
timeout=args.timeout)
def do_run(self, command, **kwargs) -> None:
t32_dir = os.environ.get('T32_DIR')
if not t32_dir:
raise RuntimeError('T32_DIR environment variable undefined')
if platform.system() == 'Windows':
os_name = 'windows64'
suffix = '.exe'
elif platform.system() == 'Linux':
os_name = 'pc_linux64'
suffix = ''
else:
raise RuntimeError('Host OS not supported by this runner')
self.t32_exec = Path(t32_dir) / 'bin' / os_name / f't32m{self.arch}{suffix}'
if not self.t32_exec.exists():
raise RuntimeError(f'Cannot find Lauterbach executable at {self.t32_exec}')
if not self.t32_cfg.is_absolute():
self.t32_cfg = Path(t32_dir) / self.t32_cfg
if not self.t32_cfg.exists():
raise RuntimeError(f'Cannot find Lauterbach configuration at {self.t32_cfg}')
startup_script = self.startup_dir / f'{command}.cmm'
if not startup_script.exists():
raise RuntimeError(f'Cannot find start-up script at {startup_script}')
if command == 'flash':
self.flash(**kwargs)
elif command == 'debug':
self.debug(**kwargs)
def flash(self, **kwargs) -> None:
with TemporaryDirectory(suffix='t32') as tmp_dir:
# use a temporary config file, based on the provided configuration,
# to hide the TRACE32 software graphical interface
cfg_content = f'{self.t32_cfg.read_text()}\n\nSCREEN=OFF\n'
tmp_cfg = Path(tmp_dir) / DEFAULT_T32_CONFIG.name
tmp_cfg.write_text(cfg_content)
cmd = self.get_launch_command('flash', cfg=tmp_cfg)
self.logger.info(f'Launching TRACE32: {" ".join(cmd)}')
try:
self.check_call(cmd, timeout=self.timeout)
self.logger.info('Finished')
except subprocess.TimeoutExpired:
self.logger.error(f'Timed out after {self.timeout} seconds')
def debug(self, **kwargs) -> None:
cmd = self.get_launch_command('debug')
self.logger.info(f'Launching TRACE32: {" ".join(cmd)}')
self.check_call(cmd)
def get_launch_command(self, command_name: str,
cfg: Optional[Path] = None) -> List[str]:
cmd = [
str(self.t32_exec),
'-c', str(cfg if cfg else self.t32_cfg),
'-s', str(self.startup_dir / f'{command_name}.cmm')
]
if self.startup_args:
cmd.extend(self.startup_args)
return cmd
``` | /content/code_sandbox/scripts/west_commands/runners/trace32.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,244 |
```python
'''GigaDevice ISP tool (gd32isp) runner for serial boot ROM'''
from runners.core import ZephyrBinaryRunner, RunnerCaps
DEFAULT_GD32ISP_CLI = 'GD32_ISP_Console'
DEFAULT_GD32ISP_PORT = '/dev/ttyUSB0'
DEFAULT_GD32ISP_SPEED = '57600'
DEFAULT_GD32ISP_ADDR = '0x08000000'
class Gd32ispBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for gd32isp.'''
def __init__(self, cfg, device,
isp=DEFAULT_GD32ISP_CLI,
port=DEFAULT_GD32ISP_PORT,
speed=DEFAULT_GD32ISP_SPEED,
addr=DEFAULT_GD32ISP_ADDR):
super().__init__(cfg)
self.device = device
self.isp = isp
self.port = port
self.speed = speed
self.addr = addr
@classmethod
def name(cls):
return 'gd32isp'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'})
@classmethod
def do_add_parser(cls, parser):
# Required:
parser.add_argument('--device', required=True,
help='device part number')
# Optional:
parser.add_argument('--isp', default=DEFAULT_GD32ISP_CLI,
help='path to gd32 isp console program')
parser.add_argument('--port', default=DEFAULT_GD32ISP_PORT,
help='serial port to use, default is ' +
str(DEFAULT_GD32ISP_PORT))
parser.add_argument('--speed', default=DEFAULT_GD32ISP_SPEED,
help='serial port speed to use, default is ' +
DEFAULT_GD32ISP_SPEED)
parser.add_argument('--addr', default=DEFAULT_GD32ISP_ADDR,
help='flash address, default is ' +
DEFAULT_GD32ISP_ADDR)
@classmethod
def do_create(cls, cfg, args):
return Gd32ispBinaryRunner(cfg,
device=args.device,
isp=args.isp,
port=args.port,
speed=args.speed,
addr=args.addr)
def do_run(self, command, **kwargs):
self.require(self.isp)
self.ensure_output('bin')
cmd_flash = [self.isp,
'-c',
'--pn', self.port,
'--br', self.speed,
'--sb', '1',
'-i', self.device,
'-e',
'--all',
'-d',
'--a', self.addr,
'--fn', self.cfg.bin_file]
self.check_call(cmd_flash)
``` | /content/code_sandbox/scripts/west_commands/runners/gd32isp.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 558 |
```python
# Modified 2018 Tavish Naruka <tavishnaruka@gmail.com>
#
'''Runner for flashing with Black Magic Probe.'''
# path_to_url
import glob
import os
import signal
import sys
from pathlib import Path
from runners.core import ZephyrBinaryRunner, RunnerCaps
try:
import serial.tools.list_ports
MISSING_REQUIREMENTS = False
except ImportError:
MISSING_REQUIREMENTS = True
# Default path for linux, based on the project udev.rules file.
DEFAULT_LINUX_BMP_PATH = '/dev/ttyBmpGdb'
# Interface descriptor for the GDB port as defined in the BMP firmware.
BMP_GDB_INTERFACE = 'Black Magic GDB Server'
# Product string as defined in the BMP firmware.
BMP_GDB_PRODUCT = "Black Magic Probe"
# BMP vendor and product ID.
BMP_GDB_VID = 0x1d50
BMP_GDB_PID = 0x6018
LINUX_SERIAL_GLOB = '/dev/ttyACM*'
DARWIN_SERIAL_GLOB = '/dev/cu.usbmodem*'
def blackmagicprobe_gdb_serial_linux():
'''Guess the GDB port on Linux platforms.'''
if os.path.exists(DEFAULT_LINUX_BMP_PATH):
return DEFAULT_LINUX_BMP_PATH
if not MISSING_REQUIREMENTS:
for port in serial.tools.list_ports.comports():
if port.interface == BMP_GDB_INTERFACE:
return port.device
ports = glob.glob(LINUX_SERIAL_GLOB)
if not ports:
raise RuntimeError(
f'cannot find any valid port matching {LINUX_SERIAL_GLOB}')
return sorted(ports)[0]
def blackmagicprobe_gdb_serial_darwin():
'''Guess the GDB port on Darwin platforms.'''
if not MISSING_REQUIREMENTS:
bmp_ports = []
for port in serial.tools.list_ports.comports():
if port.description and port.description.startswith(
BMP_GDB_PRODUCT):
bmp_ports.append(port.device)
if bmp_ports:
return sorted(bmp_ports)[0]
ports = glob.glob(DARWIN_SERIAL_GLOB)
if not ports:
raise RuntimeError(
f'cannot find any valid port matching {DARWIN_SERIAL_GLOB}')
return sorted(ports)[0]
def blackmagicprobe_gdb_serial_win32():
'''Guess the GDB port on Windows platforms.'''
if not MISSING_REQUIREMENTS:
bmp_ports = []
for port in serial.tools.list_ports.comports():
if port.vid == BMP_GDB_VID and port.pid == BMP_GDB_PID:
bmp_ports.append(port.device)
if bmp_ports:
return sorted(bmp_ports)[0]
return 'COM1'
def blackmagicprobe_gdb_serial(port):
'''Guess the GDB port for the probe.
Return the port to use, in order of priority:
- the port specified manually
- the port in the BMP_GDB_SERIAL environment variable
- a guessed one depending on the host
'''
if port:
return port
if 'BMP_GDB_SERIAL' in os.environ:
return os.environ['BMP_GDB_SERIAL']
platform = sys.platform
if platform.startswith('linux'):
return blackmagicprobe_gdb_serial_linux()
elif platform.startswith('darwin'):
return blackmagicprobe_gdb_serial_darwin()
elif platform.startswith('win32'):
return blackmagicprobe_gdb_serial_win32()
else:
raise RuntimeError(f'unsupported platform: {platform}')
class BlackMagicProbeRunner(ZephyrBinaryRunner):
'''Runner front-end for Black Magic probe.'''
def __init__(self, cfg, gdb_serial, connect_rst=False):
super().__init__(cfg)
self.gdb = [cfg.gdb] if cfg.gdb else None
# as_posix() because gdb doesn't recognize backslashes as path
# separators for the 'load' command we execute in bmp_flash().
#
# path_to_url
self.elf_file = Path(cfg.elf_file).as_posix()
if cfg.hex_file is not None:
self.hex_file = Path(cfg.hex_file).as_posix()
else:
self.hex_file = None
self.gdb_serial = blackmagicprobe_gdb_serial(gdb_serial)
self.logger.info(f'using GDB serial: {self.gdb_serial}')
if connect_rst:
self.connect_rst_enable_arg = [
'-ex', "monitor connect_rst enable",
'-ex', "monitor connect_srst enable",
]
self.connect_rst_disable_arg = [
'-ex', "monitor connect_rst disable",
'-ex', "monitor connect_srst disable",
]
else:
self.connect_rst_enable_arg = []
self.connect_rst_disable_arg = []
@classmethod
def name(cls):
return 'blackmagicprobe'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash', 'debug', 'attach'})
@classmethod
def do_create(cls, cfg, args):
return BlackMagicProbeRunner(cfg, args.gdb_serial, args.connect_rst)
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--gdb-serial', help='GDB serial port')
parser.add_argument('--connect-rst', '--connect-srst', action='store_true',
help='Assert SRST during connect? (default: no)')
def bmp_flash(self, command, **kwargs):
# if hex file is present and signed, use it else use elf file
if self.hex_file:
split = self.hex_file.split('.')
# eg zephyr.signed.hex
if len(split) >= 3 and split[-2] == 'signed':
flash_file = self.hex_file
else:
flash_file = self.elf_file
else:
flash_file = self.elf_file
if flash_file is None:
raise ValueError('Cannot flash; elf file is missing')
command = (self.gdb +
['-ex', "set confirm off",
'-ex', "target extended-remote {}".format(
self.gdb_serial)] +
self.connect_rst_enable_arg +
['-ex', "monitor swdp_scan",
'-ex', "attach 1",
'-ex', "load {}".format(flash_file),
'-ex', "kill",
'-ex', "quit",
'-silent'])
self.check_call(command)
def check_call_ignore_sigint(self, command):
previous = signal.signal(signal.SIGINT, signal.SIG_IGN)
try:
self.check_call(command)
finally:
signal.signal(signal.SIGINT, previous)
def bmp_attach(self, command, **kwargs):
if self.elf_file is None:
command = (self.gdb +
['-ex', "set confirm off",
'-ex', "target extended-remote {}".format(
self.gdb_serial)] +
self.connect_rst_disable_arg +
['-ex', "monitor swdp_scan",
'-ex', "attach 1"])
else:
command = (self.gdb +
['-ex', "set confirm off",
'-ex', "target extended-remote {}".format(
self.gdb_serial)] +
self.connect_rst_disable_arg +
['-ex', "monitor swdp_scan",
'-ex', "attach 1",
'-ex', "file {}".format(self.elf_file)])
self.check_call_ignore_sigint(command)
def bmp_debug(self, command, **kwargs):
if self.elf_file is None:
raise ValueError('Cannot debug; elf file is missing')
command = (self.gdb +
['-ex', "set confirm off",
'-ex', "target extended-remote {}".format(
self.gdb_serial)] +
self.connect_rst_enable_arg +
['-ex', "monitor swdp_scan",
'-ex', "attach 1",
'-ex', "file {}".format(self.elf_file),
'-ex', "load {}".format(self.elf_file)])
self.check_call_ignore_sigint(command)
def do_run(self, command, **kwargs):
if self.gdb is None:
raise ValueError('Cannot execute; gdb not specified')
self.require(self.gdb[0])
if command == 'flash':
self.bmp_flash(command, **kwargs)
elif command == 'debug':
self.bmp_debug(command, **kwargs)
elif command == 'attach':
self.bmp_attach(command, **kwargs)
else:
self.bmp_flash(command, **kwargs)
``` | /content/code_sandbox/scripts/west_commands/runners/blackmagicprobe.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,815 |
```python
#
'''Runner stub for renode-test.'''
import subprocess
from runners.core import ZephyrBinaryRunner, RunnerCaps
class RenodeRobotRunner(ZephyrBinaryRunner):
'''Place-holder for Renode runner customizations.'''
def __init__(self, cfg, args):
super().__init__(cfg)
self.testsuite = args.testsuite
self.renode_robot_arg = args.renode_robot_arg
self.renode_robot_help = args.renode_robot_help
@classmethod
def name(cls):
return 'renode-robot'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'robot'}, hide_load_files=True)
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--testsuite',
metavar='SUITE',
action='append',
help='path to Robot test suite')
parser.add_argument('--renode-robot-arg',
metavar='ARG',
action='append',
help='additional argument passed to renode-test')
parser.add_argument('--renode-robot-help',
default=False,
action='store_true',
help='print all possible `renode-test` arguments')
@classmethod
def do_create(cls, cfg, args):
return RenodeRobotRunner(cfg, args)
def do_run(self, command, **kwargs):
self.run_test(**kwargs)
def run_test(self, **kwargs):
cmd = ['renode-test']
if self.renode_robot_help is True:
cmd.append('--help')
else:
if self.renode_robot_arg is not None:
for arg in self.renode_robot_arg:
cmd.append(arg)
if self.testsuite is not None:
for suite in self.testsuite:
cmd.append(suite)
else:
self.logger.error("No Robot testsuite passed to renode-test! Use the `--testsuite` argument to provide one.")
subprocess.run(cmd, check=True)
``` | /content/code_sandbox/scripts/west_commands/runners/renode-robot.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 414 |
```python
'''Runner for probe-rs.'''
from runners.core import ZephyrBinaryRunner, RunnerCaps
class ProbeRsBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for probe-rs.'''
def __init__(self, cfg, chip,
probe_rs='probe-rs',
dev_id=None,
erase=False,
tool_opt=None):
super().__init__(cfg)
self.probe_rs = probe_rs
self.erase = erase
self.args = ['--chip', chip]
if dev_id is not None:
self.args += ['--probe', dev_id]
if tool_opt is not None:
self.args += tool_opt
self.elf_name = cfg.elf_file
@classmethod
def name(cls):
return 'probe-rs'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'},
dev_id=True,
erase=True,
tool_opt=True)
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--chip', required=True,
help='chip name')
parser.add_argument('--probe-rs', default='probe-rs',
help='path to probe-rs tool, default is probe-rs')
@classmethod
def dev_id_help(cls) -> str:
return '''select a specific probe, in the form `VID:PID:<Serial>`'''
@classmethod
def tool_opt_help(cls) -> str:
return '''additional options for probe-rs,
e.g. --chip-description-path=/path/to/chip.yml'''
@classmethod
def do_create(cls, cfg, args):
return ProbeRsBinaryRunner(cfg, args.chip,
probe_rs=args.probe_rs,
dev_id=args.dev_id,
erase=args.erase,
tool_opt=args.tool_opt)
def do_run(self, command, **kwargs):
self.require(self.probe_rs)
if command == 'flash':
self.do_flash(**kwargs)
def do_flash(self, **kwargs):
download_args = []
if self.erase:
download_args += ['--chip-erase']
download_args += [self.elf_name]
self.check_call([self.probe_rs, 'download']
+ self.args + download_args)
self.check_call([self.probe_rs, 'reset']
+ self.args)
``` | /content/code_sandbox/scripts/west_commands/runners/probe_rs.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 498 |
```python
"""This file provides a ZephyrBinaryRunner that launches GDB and enables
flashing (running) a native application."""
import argparse
from runners.core import ZephyrBinaryRunner, RunnerCaps, RunnerConfig
DEFAULT_GDB_PORT = 3333
class NativeSimBinaryRunner(ZephyrBinaryRunner):
"""Runs the ELF binary under GDB."""
def __init__(self, cfg,
tui=False,
gdb_port=DEFAULT_GDB_PORT):
super().__init__(cfg)
self.gdb_port = gdb_port
if cfg.gdb is None:
self.gdb_cmd = None
else:
self.gdb_cmd = [cfg.gdb] + (['-tui'] if tui else [])
if self.cfg.gdb is None:
raise ValueError("The provided RunnerConfig is missing the required field 'gdb'.")
if self.cfg.exe_file is None:
raise ValueError("The provided RunnerConfig is missing the required field 'exe_file'.")
@classmethod
def name(cls):
return 'native'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'debug', 'debugserver', 'flash'})
@classmethod
def do_add_parser(cls, parser: argparse.ArgumentParser):
parser.add_argument('--tui', default=False, action='store_true',
help='if given, GDB uses -tui')
parser.add_argument('--gdb-port', default=DEFAULT_GDB_PORT,
help='gdb port, defaults to {}'.format(
DEFAULT_GDB_PORT))
@classmethod
def do_create(cls, cfg: RunnerConfig, args: argparse.Namespace) -> ZephyrBinaryRunner:
return NativeSimBinaryRunner(cfg,
tui=args.tui,
gdb_port=args.gdb_port)
def do_run(self, command: str, **kwargs):
if command == 'flash':
self.do_flash(**kwargs)
elif command == 'debug':
self.do_debug(**kwargs)
elif command == 'debugserver':
self.do_debugserver(**kwargs)
else:
assert False
def do_flash(self, **kwargs):
cmd = [self.cfg.exe_file]
self.check_call(cmd)
def do_debug(self, **kwargs):
# Clues to debug missing RunnerConfig values (in context of `west debug`):
# build/zephyr/runners.yaml is missing `gdb` or `elf_file`.
# board.cmake should have `board_finalize_runner_args(native)`.
# build/CMakeCache.txt should have `CMAKE_GDB`.
cmd = (self.gdb_cmd + ['--quiet', self.cfg.exe_file])
self.check_call(cmd)
def do_debugserver(self, **kwargs):
cmd = (['gdbserver', ':{}'.format(self.gdb_port), self.cfg.exe_file])
self.check_call(cmd)
``` | /content/code_sandbox/scripts/west_commands/runners/native.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 612 |
```python
#
#
# Based on jlink.py
'''Runner for debugging with NXP's LinkServer.'''
import logging
import os
import shlex
import subprocess
import sys
from runners.core import ZephyrBinaryRunner, RunnerCaps
DEFAULT_LINKSERVER_EXE = 'Linkserver.exe' if sys.platform == 'win32' else 'LinkServer'
DEFAULT_LINKSERVER_GDB_PORT = 3333
DEFAULT_LINKSERVER_SEMIHOST_PORT = 3334
class LinkServerBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for NXP Linkserver'''
def __init__(self, cfg, device, core,
linkserver=DEFAULT_LINKSERVER_EXE,
dt_flash=True, erase=True,
probe='#1',
gdb_host='',
gdb_port=DEFAULT_LINKSERVER_GDB_PORT,
semihost_port=DEFAULT_LINKSERVER_SEMIHOST_PORT,
override=[],
tui=False, tool_opt=[]):
super().__init__(cfg)
self.file = cfg.file
self.file_type = cfg.file_type
self.hex_name = cfg.hex_file
self.bin_name = cfg.bin_file
self.elf_name = cfg.elf_file
self.gdb_cmd = cfg.gdb if cfg.gdb else None
self.device = device
self.core = core
self.linkserver = linkserver
self.dt_flash = dt_flash
self.erase = erase
self.probe = probe
self.gdb_host = gdb_host
self.gdb_port = gdb_port
self.semihost_port = semihost_port
self.tui_arg = ['-tui'] if tui else []
self.override = override
self.override_cli = self._build_override_cli()
self.tool_opt = []
for opts in [shlex.split(opt) for opt in tool_opt]:
self.tool_opt += opts
@classmethod
def name(cls):
return 'linkserver'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash', 'debug', 'debugserver', 'attach'},
dev_id=True, flash_addr=True, erase=True,
tool_opt=True, file=True)
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--device', required=True, help='device name')
parser.add_argument('--core', required=False, help='core of the device')
parser.add_argument('--probe', default='#1',
help='interface to use (index, or serial number, default is #1')
parser.add_argument('--tui', default=False, action='store_true',
help='if given, GDB uses -tui')
parser.add_argument('--gdb-port', default=DEFAULT_LINKSERVER_GDB_PORT,
help='gdb port to open, defaults to {}'.format(
DEFAULT_LINKSERVER_GDB_PORT))
parser.add_argument('--semihost-port', default=DEFAULT_LINKSERVER_SEMIHOST_PORT,
help='semihost port to open, defaults to the empty string '
'and runs a gdb server')
# keep this, we have to assume that the default 'commander' is on PATH
parser.add_argument('--linkserver', default=DEFAULT_LINKSERVER_EXE,
help=f'''LinkServer executable, default is
{DEFAULT_LINKSERVER_EXE}''')
# user may need to override settings.
parser.add_argument('--override', required=False, action='append',
help=f'''configuration overrides as defined bylinkserver. Example: /device/memory/0/location=0xcafecafe''')
@classmethod
def do_create(cls, cfg, args):
return LinkServerBinaryRunner(cfg, args.device, args.core,
linkserver=args.linkserver,
dt_flash=args.dt_flash,
erase=args.erase,
probe=args.probe,
semihost_port=args.semihost_port,
gdb_port=args.gdb_port,
override=args.override,
tui=args.tui, tool_opt=args.tool_opt)
@property
def linkserver_version_str(self):
if not hasattr(self, '_linkserver_version'):
linkserver_version_cmd=[self.linkserver, "-v"]
ls_output=self.check_output(linkserver_version_cmd)
self.linkserver_version = str(ls_output.split()[1].decode()).lower()
return self.linkserver_version
def do_run(self, command, **kwargs):
self.linkserver = self.require(self.linkserver)
self.logger.info(f'LinkServer: {self.linkserver}, version {self.linkserver_version_str}')
if command == 'flash':
self.flash(**kwargs)
else:
if self.core is not None:
_cmd_core = [ "-c", self.core ]
else:
_cmd_core = []
linkserver_cmd = ([self.linkserver] +
["gdbserver"] +
["--probe", str(self.probe) ] +
["--gdb-port", str(self.gdb_port )] +
["--semihost-port", str(self.semihost_port) ] +
_cmd_core +
self.override_cli +
[self.device])
self.logger.debug(f'LinkServer cmd: + {linkserver_cmd}')
if command in ('debug', 'attach'):
if self.elf_name is None or not os.path.isfile(self.elf_name):
raise ValueError('Cannot debug; elf file required')
gdb_cmd = ([self.gdb_cmd] +
self.tui_arg +
[self.elf_name] +
['-ex', 'target remote {}:{}'.format(self.gdb_host, self.gdb_port)])
if command == 'debug':
gdb_cmd += [ '-ex', 'load', '-ex', 'monitor reset']
if command == 'attach':
linkserver_cmd += ['--attach']
self.run_server_and_client(linkserver_cmd, gdb_cmd)
elif command == 'debugserver':
if self.gdb_host:
raise ValueError('Cannot run debugserver with --gdb-host')
self.check_call(linkserver_cmd)
def do_erase(self, **kwargs):
linkserver_cmd = ([self.linkserver, "flash"] + ["--probe", str(self.probe)] +
[self.device] + ["erase"])
self.logger.debug("flash erase command = " + str(linkserver_cmd))
self.check_call(linkserver_cmd)
def _build_override_cli(self):
override_cli = []
if self.override is not None:
for ov in self.override:
override_cli = (override_cli + ["-o", str(ov)])
return override_cli
def flash(self, **kwargs):
linkserver_cmd = ([self.linkserver, "flash"] + ["--probe", str(self.probe)] + self.override_cli + [self.device])
self.logger.debug(f'LinkServer cmd: + {linkserver_cmd}')
if self.erase:
self.do_erase()
# Use .hex or .bin, preferring .hex over .bin
if self.supports_hex and self.hex_name is not None and os.path.isfile(self.hex_name):
flash_cmd = (["load", self.hex_name])
elif self.bin_name is not None and os.path.isfile(self.bin_name):
if self.dt_flash:
load_addr = self.flash_address_from_build_conf(self.build_conf)
else:
self.logger.critical("no load flash address could be found...")
raise RuntimeError("no load flash address could be found...")
flash_cmd = (["load", "--addr", str(load_addr), self.bin_name])
else:
err = 'Cannot flash; no hex ({}) or bin ({}) file found.'
raise ValueError(err.format(self.hex_name, self.bin_name))
# Flash the selected file
linkserver_cmd = linkserver_cmd + flash_cmd
self.logger.debug("flash command = " + str(linkserver_cmd))
kwargs = {}
if not self.logger.isEnabledFor(logging.DEBUG):
if self.linkserver_version_str < "v1.3.15":
kwargs['stderr'] = subprocess.DEVNULL
else:
kwargs['stdout'] = subprocess.DEVNULL
self.check_call(linkserver_cmd, **kwargs)
def supports_hex(self):
# v1.5.30 has added flash support for Intel Hex files.
return self.linkserver_version_str >= "v1.5.30"
``` | /content/code_sandbox/scripts/west_commands/runners/linkserver.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,760 |
```python
#
import re
import os
import time
import subprocess
from runners.core import ZephyrBinaryRunner, RunnerCaps, BuildConfiguration
class SpiBurnBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for SPI_burn.'''
def __init__(self, cfg, addr, spiburn, iceman, timeout, gdb_port, gdb_ex, erase=False):
super().__init__(cfg)
self.spiburn = spiburn
self.iceman = iceman
self.addr = addr
self.timeout = int(timeout)
self.erase = bool(erase)
self.gdb_port = gdb_port
self.gdb_ex = gdb_ex
@classmethod
def name(cls):
return 'spi_burn'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash', 'debug'}, erase=True, flash_addr=True)
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--addr', default='0x0',
help='start flash address to write')
parser.add_argument('--timeout', default=10,
help='ICEman connection establishing timeout in seconds')
parser.add_argument('--telink-tools-path', help='path to Telink flash tools')
parser.add_argument('--gdb-port', default='1111', help='Port to connect for gdb-client')
parser.add_argument('--gdb-ex', default='', nargs='?', help='Additional gdb commands to run')
@classmethod
def do_create(cls, cfg, args):
if args.telink_tools_path:
spiburn = f'{args.telink_tools_path}/flash/bin/SPI_burn'
iceman = f'{args.telink_tools_path}/ice/ICEman'
else:
# If telink_tools_path arg is not specified then pass to tools shall be specified in PATH
spiburn = 'SPI_burn'
iceman = 'ICEman'
# Get flash address offset
if args.dt_flash == 'y':
build_conf = BuildConfiguration(cfg.build_dir)
address = hex(cls.get_flash_address(args, build_conf) - build_conf['CONFIG_FLASH_BASE_ADDRESS'])
else:
address = args.addr
return SpiBurnBinaryRunner(cfg, address, spiburn, iceman, args.timeout, args.gdb_port, args.gdb_ex, args.erase)
def do_run(self, command, **kwargs):
self.require(self.spiburn)
# Find path to ICEman with require call
self.iceman_path = self.require(self.iceman)
if command == "flash":
self._flash()
elif command == "debug":
self._debug()
else:
self.logger.error(f'{command} not supported!')
def start_iceman(self):
# Start ICEman as background process
self.ice_process = self.popen_ignore_int(["./ICEman", '-Z', 'v5', '-l', 'aice_sdp.cfg'],
cwd=os.path.dirname(self.iceman_path),
stdout=subprocess.PIPE)
# Wait till it ready or exit by timeout
start = time.time()
while True:
out = self.ice_process.stdout.readline()
if b'ICEman is ready to use.' in out:
break
if time.time() - start > self.timeout:
raise RuntimeError("TIMEOUT: ICEman is not ready")
def stop_iceman(self):
# Kill ICEman subprocess
self.ice_process.terminate()
def _flash(self):
try:
# Start ICEman
self.start_iceman()
# Compose flash command
cmd_flash = [self.spiburn, '--addr', str(self.addr), '--image', self.cfg.bin_file]
if self.erase:
cmd_flash += ["--erase-all"]
# Run SPI burn flash tool
self.check_call(cmd_flash)
finally:
self.stop_iceman()
def _debug(self):
try:
# Start ICEman
self.start_iceman()
# format -ex commands
gdb_ex = re.split("(-ex) ", self.gdb_ex)[1::]
# Compose gdb command
client_cmd = [self.cfg.gdb, self.cfg.elf_file, '-ex', f'target remote :{self.gdb_port}'] + gdb_ex
# Run gdb
self.run_client(client_cmd)
finally:
self.stop_iceman()
``` | /content/code_sandbox/scripts/west_commands/runners/spi_burn.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 939 |
```python
#
'''Runner for performing program download over CANopen (DSP 302-3).'''
import argparse
import os
import time
from runners.core import ZephyrBinaryRunner, RunnerCaps
try:
import canopen
from progress.bar import Bar
MISSING_REQUIREMENTS = False
except ImportError:
MISSING_REQUIREMENTS = True
# Default Python-CAN context to use, see python-can documentation for details
DEFAULT_CAN_CONTEXT = 'default'
# Default program number
DEFAULT_PROGRAM_NUMBER = 1
# Program download buffer size in bytes
PROGRAM_DOWNLOAD_BUFFER_SIZE = 1024
# Program download chunk size in bytes
PROGRAM_DOWNLOAD_CHUNK_SIZE = PROGRAM_DOWNLOAD_BUFFER_SIZE // 2
# Default timeouts and retries
DEFAULT_TIMEOUT = 10.0 # seconds
DEFAULT_SDO_TIMEOUT = 1 # seconds
DEFAULT_SDO_RETRIES = 1
# Object dictionary indexes
H1F50_PROGRAM_DATA = 0x1F50
H1F51_PROGRAM_CTRL = 0x1F51
H1F56_PROGRAM_SWID = 0x1F56
H1F57_FLASH_STATUS = 0x1F57
# Program control commands
PROGRAM_CTRL_STOP = 0x00
PROGRAM_CTRL_START = 0x01
PROGRAM_CTRL_RESET = 0x02
PROGRAM_CTRL_CLEAR = 0x03
PROGRAM_CTRL_ZEPHYR_CONFIRM = 0x80
class ToggleAction(argparse.Action):
'''Toggle argument parser'''
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, not option_string.startswith('--no-'))
class CANopenBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for CANopen.'''
def __init__(self, cfg, dev_id, can_context=DEFAULT_CAN_CONTEXT,
program_number=DEFAULT_PROGRAM_NUMBER, confirm=True,
confirm_only=True, timeout=DEFAULT_TIMEOUT,
sdo_retries=DEFAULT_SDO_RETRIES, sdo_timeout=DEFAULT_SDO_TIMEOUT,
block_transfer=False):
if MISSING_REQUIREMENTS:
raise RuntimeError('one or more Python dependencies were missing; '
"see the getting started guide for details on "
"how to fix")
super().__init__(cfg)
self.dev_id = dev_id # Only use for error checking in do_run()
self.bin_file = cfg.bin_file
self.confirm = confirm
self.confirm_only = confirm_only
self.timeout = timeout
self.downloader = CANopenProgramDownloader(logger=self.logger,
node_id=dev_id,
can_context=can_context,
program_number=program_number,
sdo_retries=sdo_retries,
sdo_timeout=sdo_timeout,
block_transfer=block_transfer)
@classmethod
def name(cls):
return 'canopen'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'}, dev_id=True, flash_addr=False)
@classmethod
def dev_id_help(cls) -> str:
return 'CANopen Node ID.'
@classmethod
def do_add_parser(cls, parser):
# Optional:
parser.add_argument('--node-id', dest='dev_id',
help=cls.dev_id_help())
parser.add_argument('--can-context', default=DEFAULT_CAN_CONTEXT,
help=f'Python-CAN context to use (default: {DEFAULT_CAN_CONTEXT})')
parser.add_argument('--program-number', type=int, default=DEFAULT_PROGRAM_NUMBER,
help=f'program number (default: {DEFAULT_PROGRAM_NUMBER})')
parser.add_argument('--confirm', '--no-confirm',
dest='confirm', nargs=0,
action=ToggleAction,
help='confirm after starting? (default: yes)')
parser.add_argument('--confirm-only', default=False, action='store_true',
help='confirm only, no program download (default: no)')
parser.add_argument('--timeout', type=float, default=DEFAULT_TIMEOUT,
help=f'Timeout in seconds (default: {DEFAULT_TIMEOUT})')
parser.add_argument('--sdo-retries', type=int, default=DEFAULT_SDO_RETRIES,
help=f'CANopen SDO request retries (default: {DEFAULT_SDO_RETRIES})')
parser.add_argument('--sdo-timeout', type=float, default=DEFAULT_SDO_TIMEOUT,
help=f'''CANopen SDO response timeout in seconds
(default: {DEFAULT_SDO_TIMEOUT})''')
parser.add_argument('--block-transfer', default=False, action='store_true',
help='Use SDO block transfers (experimental, default: no)')
parser.set_defaults(confirm=True)
@classmethod
def do_create(cls, cfg, args):
return CANopenBinaryRunner(cfg, int(args.dev_id),
can_context=args.can_context,
program_number=args.program_number,
confirm=args.confirm,
confirm_only=args.confirm_only,
timeout=args.timeout,
sdo_retries=args.sdo_retries,
sdo_timeout=args.sdo_timeout,
block_transfer=args.block_transfer)
def do_run(self, command, **kwargs):
if not self.dev_id:
raise RuntimeError('Please specify a CANopen node ID with the '
'-i/--dev-id or --node-id command-line switch.')
if command == 'flash':
self.flash(**kwargs)
def flash(self, **kwargs):
'''Download program to flash over CANopen'''
self.ensure_output('bin')
self.logger.info('Using Node ID %d, program number %d',
self.downloader.node_id,
self.downloader.program_number)
self.downloader.connect()
status = self.downloader.wait_for_flash_status_ok(self.timeout)
if status == 0:
self.downloader.swid()
else:
self.logger.warning('Flash status 0x{:02x}, '
'skipping software identification'.format(status))
self.downloader.enter_pre_operational()
if self.confirm_only:
self.downloader.zephyr_confirm_program()
self.downloader.disconnect()
return
if self.bin_file is None:
raise ValueError('Cannot download program; bin_file is missing')
self.downloader.stop_program()
self.downloader.clear_program()
self.downloader.wait_for_flash_status_ok(self.timeout)
self.downloader.download(self.bin_file)
status = self.downloader.wait_for_flash_status_ok(self.timeout)
if status != 0:
raise ValueError('Program download failed: '
'flash status 0x{:02x}'.format(status))
self.downloader.swid()
self.downloader.start_program()
self.downloader.wait_for_bootup(self.timeout)
self.downloader.swid()
if self.confirm:
self.downloader.enter_pre_operational()
self.downloader.zephyr_confirm_program()
self.downloader.disconnect()
class CANopenProgramDownloader(object):
'''CANopen program downloader'''
def __init__(self, logger, node_id, can_context=DEFAULT_CAN_CONTEXT,
program_number=DEFAULT_PROGRAM_NUMBER,
sdo_retries=DEFAULT_SDO_RETRIES, sdo_timeout=DEFAULT_SDO_TIMEOUT,
block_transfer=False):
super(CANopenProgramDownloader, self).__init__()
self.logger = logger
self.node_id = node_id
self.can_context = can_context
self.program_number = program_number
self.network = canopen.Network()
self.node = self.network.add_node(self.node_id,
self.create_object_dictionary())
self.data_sdo = self.node.sdo[H1F50_PROGRAM_DATA][self.program_number]
self.ctrl_sdo = self.node.sdo[H1F51_PROGRAM_CTRL][self.program_number]
self.swid_sdo = self.node.sdo[H1F56_PROGRAM_SWID][self.program_number]
self.flash_sdo = self.node.sdo[H1F57_FLASH_STATUS][self.program_number]
self.node.sdo.MAX_RETRIES = sdo_retries
self.node.sdo.RESPONSE_TIMEOUT = sdo_timeout
self.block_transfer = block_transfer
def connect(self):
'''Connect to CAN network'''
try:
self.network.connect(context=self.can_context)
except:
raise ValueError('Unable to connect to CAN network')
def disconnect(self):
'''Disconnect from CAN network'''
self.network.disconnect()
def enter_pre_operational(self):
'''Enter pre-operational NMT state'''
self.logger.info("Entering pre-operational mode")
try:
self.node.nmt.state = 'PRE-OPERATIONAL'
except:
raise ValueError('Failed to enter pre-operational mode')
def _ctrl_program(self, cmd):
'''Write program control command to CANopen object dictionary (0x1f51)'''
try:
self.ctrl_sdo.raw = cmd
except:
raise ValueError('Unable to write control command 0x{:02x}'.format(cmd))
def stop_program(self):
'''Write stop control command to CANopen object dictionary (0x1f51)'''
self.logger.info('Stopping program')
self._ctrl_program(PROGRAM_CTRL_STOP)
def start_program(self):
'''Write start control command to CANopen object dictionary (0x1f51)'''
self.logger.info('Starting program')
self._ctrl_program(PROGRAM_CTRL_START)
def clear_program(self):
'''Write clear control command to CANopen object dictionary (0x1f51)'''
self.logger.info('Clearing program')
self._ctrl_program(PROGRAM_CTRL_CLEAR)
def zephyr_confirm_program(self):
'''Write confirm control command to CANopen object dictionary (0x1f51)'''
self.logger.info('Confirming program')
self._ctrl_program(PROGRAM_CTRL_ZEPHYR_CONFIRM)
def swid(self):
'''Read software identification from CANopen object dictionary (0x1f56)'''
try:
swid = self.swid_sdo.raw
except:
raise ValueError('Failed to read software identification')
self.logger.info('Program software identification: 0x{:08x}'.format(swid))
return swid
def flash_status(self):
'''Read flash status identification'''
try:
status = self.flash_sdo.raw
except:
raise ValueError('Failed to read flash status identification')
return status
def download(self, bin_file):
'''Download program to CANopen object dictionary (0x1f50)'''
self.logger.info('Downloading program: %s', bin_file)
try:
size = os.path.getsize(bin_file)
infile = open(bin_file, 'rb')
outfile = self.data_sdo.open('wb', buffering=PROGRAM_DOWNLOAD_BUFFER_SIZE,
size=size, block_transfer=self.block_transfer)
progress = Bar('%(percent)d%%', max=size, suffix='%(index)d/%(max)dB')
while True:
chunk = infile.read(PROGRAM_DOWNLOAD_CHUNK_SIZE)
if not chunk:
break
outfile.write(chunk)
progress.next(n=len(chunk))
except:
raise ValueError('Failed to download program')
finally:
progress.finish()
infile.close()
outfile.close()
def wait_for_bootup(self, timeout=DEFAULT_TIMEOUT):
'''Wait for boot-up message reception'''
self.logger.info('Waiting for boot-up message...')
try:
self.node.nmt.wait_for_bootup(timeout=timeout)
except:
raise ValueError('Timeout waiting for boot-up message')
def wait_for_flash_status_ok(self, timeout=DEFAULT_TIMEOUT):
'''Wait for flash status ok'''
self.logger.info('Waiting for flash status ok')
end_time = time.time() + timeout
while True:
now = time.time()
status = self.flash_status()
if status == 0:
break
if now > end_time:
return status
return status
@staticmethod
def create_object_dictionary():
'''Create a synthetic CANopen object dictionary for program download'''
objdict = canopen.objectdictionary.ObjectDictionary()
array = canopen.objectdictionary.Array('Program data', 0x1f50)
member = canopen.objectdictionary.Variable('', 0x1f50, subindex=1)
member.data_type = canopen.objectdictionary.DOMAIN
array.add_member(member)
objdict.add_object(array)
array = canopen.objectdictionary.Array('Program control', 0x1f51)
member = canopen.objectdictionary.Variable('', 0x1f51, subindex=1)
member.data_type = canopen.objectdictionary.UNSIGNED8
array.add_member(member)
objdict.add_object(array)
array = canopen.objectdictionary.Array('Program software ID', 0x1f56)
member = canopen.objectdictionary.Variable('', 0x1f56, subindex=1)
member.data_type = canopen.objectdictionary.UNSIGNED32
array.add_member(member)
objdict.add_object(array)
array = canopen.objectdictionary.Array('Flash error ID', 0x1f57)
member = canopen.objectdictionary.Variable('', 0x1f57, subindex=1)
member.data_type = canopen.objectdictionary.UNSIGNED32
array.add_member(member)
objdict.add_object(array)
return objdict
``` | /content/code_sandbox/scripts/west_commands/runners/canopen_program.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,787 |
```python
#
'''Runner for flashing with stm32flash.'''
from os import path
import platform
from runners.core import ZephyrBinaryRunner, RunnerCaps
DEFAULT_DEVICE = '/dev/ttyUSB0'
if platform.system() == 'Darwin':
DEFAULT_DEVICE = '/dev/tty.SLAB_USBtoUART'
class Stm32flashBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for stm32flash.'''
def __init__(self, cfg, device, action='write', baud=57600,
force_binary=False, start_addr=0, exec_addr=None,
serial_mode='8e1', reset=False, verify=False):
super().__init__(cfg)
self.device = device
self.action = action
self.baud = baud
self.force_binary = force_binary
self.start_addr = start_addr
self.exec_addr = exec_addr
self.serial_mode = serial_mode
self.reset = reset
self.verify = verify
@classmethod
def name(cls):
return 'stm32flash'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'}, reset=True)
@classmethod
def do_add_parser(cls, parser):
# required argument(s)
# none for now
# optional argument(s)
parser.add_argument('--device', default=DEFAULT_DEVICE, required=False,
help='serial port to flash, default \'' + DEFAULT_DEVICE + '\'')
parser.add_argument('--action', default='write', required=False,
choices=['erase', 'info', 'start', 'write'],
help='erase / get device info / start execution / write flash')
parser.add_argument('--baud-rate', default='57600', required=False,
choices=['1200', '1800', '2400', '4800', '9600', '19200',
'38400', '57600', '115200', '230400', '256000', '460800',
'500000', '576000', '921600', '1000000', '1500000', '2000000'],
help='serial baud rate, default \'57600\'')
parser.add_argument('--force-binary', required=False, action='store_true',
help='force the binary parser')
parser.add_argument('--start-addr', default=0, required=False,
help='specify start address for write operation, default \'0\'')
parser.add_argument('--execution-addr', default=None, required=False,
help='start execution at specified address, default \'0\' \
which means start of flash')
parser.add_argument('--serial-mode', default='8e1', required=False,
help='serial port mode, default \'8e1\'')
parser.add_argument('--verify', default=False, required=False, action='store_true',
help='verify writes, default False')
parser.set_defaults(reset=False)
@classmethod
def do_create(cls, cfg, args):
return Stm32flashBinaryRunner(cfg, device=args.device, action=args.action,
baud=args.baud_rate, force_binary=args.force_binary,
start_addr=args.start_addr, exec_addr=args.execution_addr,
serial_mode=args.serial_mode, reset=args.reset, verify=args.verify)
def do_run(self, command, **kwargs):
self.require('stm32flash')
self.ensure_output('bin')
bin_name = self.cfg.bin_file
bin_size = path.getsize(bin_name)
cmd_flash = ['stm32flash', '-b', self.baud,
'-m', self.serial_mode]
action = self.action.lower()
if action == 'info':
# show device information and exit
msg_text = "get device info from {}".format(self.device)
elif action == 'erase':
# erase flash
#size_aligned = (int(bin_size) >> 12) + 1 << 12
size_aligned = (int(bin_size) & 0xfffff000) + 4096
msg_text = "erase {} bit starting at {}".format(size_aligned, self.start_addr)
cmd_flash.extend([
'-S', str(self.start_addr) + ":" + str(size_aligned), '-o'])
elif action == 'start':
# start execution
msg_text = "start code execution at {}".format(self.exec_addr)
if self.exec_addr:
if self.exec_addr == 0 or self.exec_addr.lower() == '0x0':
msg_text += " (flash start)"
else:
self.exec_addr = 0
cmd_flash.extend([
'-g', str(self.exec_addr)])
elif action == 'write':
# flash binary file
msg_text = "write {} bytes starting at {}".format(bin_size, self.start_addr)
cmd_flash.extend([
'-S', str(self.start_addr) + ":" + str(bin_size),
'-w', bin_name])
if self.exec_addr:
cmd_flash.extend(['-g', self.exec_addr])
if self.force_binary:
cmd_flash.extend(['-f'])
if self.reset:
cmd_flash.extend(['-R'])
if self.verify:
cmd_flash.extend(['-v'])
else:
msg_text = "invalid action \'{}\' passed!".format(action)
self.logger.error('Invalid action \'{}\' passed!'.format(action))
return -1
cmd_flash.extend([self.device])
self.logger.info("Board: " + msg_text)
self.check_call(cmd_flash)
self.logger.info('Board: finished \'{}\' .'.format(action))
``` | /content/code_sandbox/scripts/west_commands/runners/stm32flash.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,180 |
```python
#
# Based on J-Link runner
"""
Runner that implements flashing with SiLabs Simplicity Commander binary tool.
See SiLabs UG162: "Simplicity Commander Reference Guide" for more info.
"""
import os
import shlex
from runners.core import ZephyrBinaryRunner, RunnerCaps, FileType
DEFAULT_APP = 'commander'
class SiLabsCommanderBinaryRunner(ZephyrBinaryRunner):
def __init__(self, cfg, device, dev_id, commander, dt_flash, erase, speed, tool_opt):
super().__init__(cfg)
self.file = cfg.file
self.file_type = cfg.file_type
self.hex_name = cfg.hex_file
self.bin_name = cfg.bin_file
self.elf_name = cfg.elf_file
self.device = device
self.dev_id = dev_id
self.commander = commander
self.dt_flash = dt_flash
self.erase = erase
self.speed = speed
self.tool_opt = []
for opts in [shlex.split(opt) for opt in tool_opt]:
self.tool_opt += opts
@classmethod
def name(cls):
return 'silabs_commander'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'},
dev_id=True, flash_addr=True, erase=True,
tool_opt=True, file=True)
@classmethod
def dev_id_help(cls) -> str:
return '''Device identifier. Use it to select the J-Link Serial Number
of the device connected over USB.'''
@classmethod
def tool_opt_help(cls) -> str:
return "Additional options for Simplicity Commander, e.g. '--noreset'"
@classmethod
def do_add_parser(cls, parser):
# Required:
parser.add_argument('--device', required=True,
help='device part number')
# Optional:
parser.add_argument('--commander', default=DEFAULT_APP,
help='path to Simplicity Commander executable')
parser.add_argument('--speed', default=None,
help='JTAG/SWD speed to use')
@classmethod
def do_create(cls, cfg, args):
return SiLabsCommanderBinaryRunner(
cfg, args.device,
dev_id=args.dev_id,
commander=args.commander,
dt_flash=args.dt_flash,
erase=args.erase,
speed=args.speed,
tool_opt=args.tool_opt)
def do_run(self, command, **kwargs):
self.require(self.commander)
opts = ['--device', self.device]
if self.erase:
opts.append('--masserase')
if self.dev_id:
opts.extend(['--serialno', self.dev_id])
if self.speed is not None:
opts.extend(['--speed', self.speed])
# Get the build artifact to flash
if self.dt_flash:
flash_addr = self.flash_address_from_build_conf(self.build_conf)
else:
flash_addr = 0
if self.file is not None:
# use file provided by the user
if not os.path.isfile(self.file):
raise ValueError(f'Cannot flash; file ({self.file}) not found')
flash_file = self.file
if self.file_type == FileType.HEX:
flash_args = [flash_file]
elif self.file_type == FileType.BIN:
flash_args = ['--binary', '--address', f'0x{flash_addr:x}', flash_file]
else:
raise ValueError('Cannot flash; this runner only supports hex and bin files')
else:
# use hex or bin file provided by the buildsystem, preferring .hex over .bin
if self.hex_name is not None and os.path.isfile(self.hex_name):
flash_file = self.hex_name
flash_args = [flash_file]
elif self.bin_name is not None and os.path.isfile(self.bin_name):
flash_file = self.bin_name
flash_args = ['--binary', '--address', f'0x{flash_addr:x}', flash_file]
else:
raise ValueError(f'Cannot flash; no hex ({self.hex_name}) or bin ({self.bin_name}) files found.')
args = [self.commander, 'flash'] + opts + self.tool_opt + flash_args
self.logger.info('Flashing file: {}'.format(flash_file))
self.check_call(args)
``` | /content/code_sandbox/scripts/west_commands/runners/silabs_commander.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 921 |
```python
#
'''Runner stub for Renode.'''
import subprocess
from runners.core import ZephyrBinaryRunner, RunnerCaps
class RenodeRunner(ZephyrBinaryRunner):
'''Place-holder for Renode runner customizations.'''
def __init__(self, cfg, args):
super().__init__(cfg)
self.renode_arg = args.renode_arg
self.renode_command = args.renode_command
self.renode_help = args.renode_help
@classmethod
def name(cls):
return 'renode'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'simulate'}, hide_load_files=True)
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--renode-arg',
metavar='ARG',
action='append',
help='additional argument passed to Renode; `--help` will print all possible arguments')
parser.add_argument('--renode-command',
metavar='COMMAND',
action='append',
help='additional command passed to Renode\'s simulation')
parser.add_argument('--renode-help',
default=False,
action='store_true',
help='print all possible `Renode` arguments')
@classmethod
def do_create(cls, cfg, args):
return RenodeRunner(cfg, args)
def do_run(self, command, **kwargs):
self.run_test(**kwargs)
def run_test(self, **kwargs):
cmd = ['renode']
if self.renode_help is True:
cmd.append('--help')
else:
if self.renode_arg is not None:
for arg in self.renode_arg:
cmd.append(arg)
if self.renode_command is not None:
for command in self.renode_command:
cmd.append('-e')
cmd.append(command)
subprocess.run(cmd, check=True)
``` | /content/code_sandbox/scripts/west_commands/runners/renode.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 389 |
```python
#
'''Runners for Synopsys Metaware Debugger(mdb).'''
import shutil
import os
from os import path
from runners.core import ZephyrBinaryRunner, RunnerCaps
# normally we should create class with common functionality inherited from
# ZephyrBinaryRunner and inherit MdbNsimBinaryRunner and MdbHwBinaryRunner
# from it. However as we do lookup for runners with
# ZephyrBinaryRunner.__subclasses__() such sub-sub-classes won't be found.
# So, we move all common functionality to helper functions instead.
def is_simulation_run(mdb_runner):
return mdb_runner.nsim_args != ''
def is_hostlink_used(mdb_runner):
return mdb_runner.build_conf.getboolean('CONFIG_UART_HOSTLINK')
def is_flash_cmd_need_exit_immediately(mdb_runner):
if is_simulation_run(mdb_runner):
# for nsim, we can't run and quit immediately
return False
elif is_hostlink_used(mdb_runner):
# if hostlink is used we can't run and quit immediately, as we still need MDB process
# attached to process hostlink IO
return False
else:
return True
def smp_core_order(mdb_runner, id):
if is_simulation_run(mdb_runner):
# for simulation targets we start cores in direct order (core 0 first, core 1 second, etc...)
# otherwise we face mismatch arcnum (code ID) with ARConnect ID and core ID in instruction traces
return id
else:
# for HW targets we want to start the primary core last, to avoid ARConnect initialization interfere
# with secondary cores startup - so we reverse start order
return mdb_runner.cores - 1 - id
def mdb_do_run(mdb_runner, command):
commander = "mdb64"
mdb_runner.require(commander)
mdb_basic_options = ['-nooptions', '-nogoifmain', '-toggle=include_local_symbols=1']
# remove previous .sc.project folder which has temporary settings
# for MDB. This is useful for troubleshooting situations with
# unexpected behavior of the debugger
mdb_cfg_dir = path.join(mdb_runner.build_dir, '.sc.project')
if path.exists(mdb_cfg_dir):
shutil.rmtree(mdb_cfg_dir)
# nsim
if is_simulation_run(mdb_runner):
mdb_target = ['-nsim', '@' + mdb_runner.nsim_args]
# hardware target
else:
if mdb_runner.jtag == 'digilent':
mdb_target = ['-digilent']
if mdb_runner.dig_device: mdb_target += [mdb_runner.dig_device]
else:
# \todo: add support of other debuggers
raise ValueError('unsupported jtag adapter {}'.format(mdb_runner.jtag))
if command == 'flash':
if is_flash_cmd_need_exit_immediately(mdb_runner):
mdb_run = ['-run', '-cmd=-nowaitq run', '-cmd=quit', '-cl']
else:
mdb_run = ['-run', '-cl']
elif command == 'debug':
# use mdb gui to debug
mdb_run = ['-OKN']
if mdb_runner.cores == 1:
# single core's mdb command is different with multicores
mdb_cmd = [commander] + mdb_basic_options + mdb_target + mdb_run + [mdb_runner.elf_name]
elif 1 < mdb_runner.cores <= 12:
mdb_multifiles = '-multifiles='
for i in range(mdb_runner.cores):
mdb_sub_cmd = [commander] + ['-pset={}'.format(i + 1), '-psetname=core{}'.format(i)]
# -prop=download=2 is used for SMP application debug, only the 1st core
# will download the shared image.
if i > 0: mdb_sub_cmd += ['-prop=download=2']
mdb_sub_cmd += mdb_basic_options + mdb_target + [mdb_runner.elf_name]
mdb_runner.check_call(mdb_sub_cmd, cwd=mdb_runner.build_dir)
mdb_multifiles += ('core{}' if i == 0 else ',core{}').format(smp_core_order(mdb_runner, i))
# to enable multi-core aware mode for use with the MetaWare debugger,
# need to set the NSIM_MULTICORE environment variable to a non-zero value
if is_simulation_run(mdb_runner):
os.environ["NSIM_MULTICORE"] = '1'
mdb_cmd = [commander] + [mdb_multifiles] + mdb_run
else:
raise ValueError('unsupported cores {}'.format(mdb_runner.cores))
mdb_runner.call(mdb_cmd, cwd=mdb_runner.build_dir)
class MdbNsimBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for nSIM via mdb.'''
def __init__(self, cfg, cores=1, nsim_args=''):
super().__init__(cfg)
self.jtag = ''
self.cores = int(cores)
if nsim_args != '':
self.nsim_args = path.join(cfg.board_dir, 'support', nsim_args)
else:
self.nsim_args = ''
self.elf_name = cfg.elf_file
self.build_dir = cfg.build_dir
self.dig_device = ''
@classmethod
def name(cls):
return 'mdb-nsim'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash', 'debug'})
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--cores', default=1,
help='''choose the cores that target has, e.g.
--cores=1''')
parser.add_argument('--nsim_args', default='',
help='''if given, arguments for nsim simulator
through mdb which should be in
<board_dir>/support, e.g. --nsim-args=
mdb_em.args''')
@classmethod
def do_create(cls, cfg, args):
return MdbNsimBinaryRunner(
cfg,
cores=args.cores,
nsim_args=args.nsim_args)
def do_run(self, command, **kwargs):
mdb_do_run(self, command)
class MdbHwBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for mdb.'''
def __init__(self, cfg, cores=1, jtag='digilent', dig_device=''):
super().__init__(cfg)
self.jtag = jtag
self.cores = int(cores)
self.nsim_args = ''
self.elf_name = cfg.elf_file
if dig_device != '':
self.dig_device = '-prop=dig_device=' + dig_device
else:
self.dig_device = ''
self.build_dir = cfg.build_dir
@classmethod
def name(cls):
return 'mdb-hw'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash', 'debug'})
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--jtag', default='digilent',
help='''choose the jtag interface for hardware
targets, e.g. --jtag=digilent for digilent
jtag adapter''')
parser.add_argument('--cores', default=1,
help='''choose the number of cores that target has,
e.g. --cores=1''')
parser.add_argument('--dig-device', default='',
help='''choose the specific digilent device to
connect, this is useful when multiple
targets are connected''')
@classmethod
def do_create(cls, cfg, args):
return MdbHwBinaryRunner(
cfg,
cores=args.cores,
jtag=args.jtag,
dig_device=args.dig_device)
def do_run(self, command, **kwargs):
mdb_do_run(self, command)
``` | /content/code_sandbox/scripts/west_commands/runners/mdb.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,698 |
```python
#
'''Runner for debugging with J-Link.'''
import argparse
import ipaddress
import logging
import os
from pathlib import Path
import shlex
import subprocess
import sys
import tempfile
from runners.core import ZephyrBinaryRunner, RunnerCaps, FileType
try:
import pylink
from pylink.library import Library
MISSING_REQUIREMENTS = False
except ImportError:
MISSING_REQUIREMENTS = True
DEFAULT_JLINK_EXE = 'JLink.exe' if sys.platform == 'win32' else 'JLinkExe'
DEFAULT_JLINK_GDB_PORT = 2331
def is_ip(ip):
try:
ipaddress.ip_address(ip)
except ValueError:
return False
return True
class ToggleAction(argparse.Action):
def __call__(self, parser, args, ignored, option):
setattr(args, self.dest, not option.startswith('--no-'))
class JLinkBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for the J-Link GDB server.'''
def __init__(self, cfg, device, dev_id=None,
commander=DEFAULT_JLINK_EXE,
dt_flash=True, erase=True, reset=False,
iface='swd', speed='auto',
loader=None,
gdbserver='JLinkGDBServer',
gdb_host='',
gdb_port=DEFAULT_JLINK_GDB_PORT,
tui=False, tool_opt=[]):
super().__init__(cfg)
self.file = cfg.file
self.file_type = cfg.file_type
self.hex_name = cfg.hex_file
self.bin_name = cfg.bin_file
self.elf_name = cfg.elf_file
self.gdb_cmd = [cfg.gdb] if cfg.gdb else None
self.device = device
self.dev_id = dev_id
self.commander = commander
self.dt_flash = dt_flash
self.erase = erase
self.reset = reset
self.gdbserver = gdbserver
self.iface = iface
self.speed = speed
self.gdb_host = gdb_host
self.gdb_port = gdb_port
self.tui_arg = ['-tui'] if tui else []
self.loader = loader
self.tool_opt = []
for opts in [shlex.split(opt) for opt in tool_opt]:
self.tool_opt += opts
@classmethod
def name(cls):
return 'jlink'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash', 'debug', 'debugserver', 'attach'},
dev_id=True, flash_addr=True, erase=True, reset=True,
tool_opt=True, file=True)
@classmethod
def dev_id_help(cls) -> str:
return '''Device identifier. Use it to select the J-Link Serial Number
of the device connected over USB. If the J-Link is connected over ip,
the Device identifier is the ip.'''
@classmethod
def tool_opt_help(cls) -> str:
return "Additional options for JLink Commander, e.g. '-autoconnect 1'"
@classmethod
def do_add_parser(cls, parser):
# Required:
parser.add_argument('--device', required=True, help='device name')
# Optional:
parser.add_argument('--loader', required=False, dest='loader',
help='specifies a loader type')
parser.add_argument('--id', required=False, dest='dev_id',
help='obsolete synonym for -i/--dev-id')
parser.add_argument('--iface', default='swd',
help='interface to use, default is swd')
parser.add_argument('--speed', default='auto',
help='interface speed, default is autodetect')
parser.add_argument('--tui', default=False, action='store_true',
help='if given, GDB uses -tui')
parser.add_argument('--gdbserver', default='JLinkGDBServer',
help='GDB server, default is JLinkGDBServer')
parser.add_argument('--gdb-host', default='',
help='custom gdb host, defaults to the empty string '
'and runs a gdb server')
parser.add_argument('--gdb-port', default=DEFAULT_JLINK_GDB_PORT,
help='pyocd gdb port, defaults to {}'.format(
DEFAULT_JLINK_GDB_PORT))
parser.add_argument('--commander', default=DEFAULT_JLINK_EXE,
help=f'''J-Link Commander, default is
{DEFAULT_JLINK_EXE}''')
parser.add_argument('--reset-after-load', '--no-reset-after-load',
dest='reset', nargs=0,
action=ToggleAction,
help='obsolete synonym for --reset/--no-reset')
parser.set_defaults(reset=False)
@classmethod
def do_create(cls, cfg, args):
return JLinkBinaryRunner(cfg, args.device,
dev_id=args.dev_id,
commander=args.commander,
dt_flash=args.dt_flash,
erase=args.erase,
reset=args.reset,
iface=args.iface, speed=args.speed,
gdbserver=args.gdbserver,
loader=args.loader,
gdb_host=args.gdb_host,
gdb_port=args.gdb_port,
tui=args.tui, tool_opt=args.tool_opt)
def print_gdbserver_message(self):
if not self.thread_info_enabled:
thread_msg = '; no thread info available'
elif self.supports_thread_info:
thread_msg = '; thread info enabled'
else:
thread_msg = '; update J-Link software for thread info'
self.logger.info('J-Link GDB server running on port '
f'{self.gdb_port}{thread_msg}')
@property
def jlink_version(self):
# Get the J-Link version as a (major, minor, rev) tuple of integers.
#
# J-Link's command line tools provide neither a standalone
# "--version" nor help output that contains the version. Hack
# around this deficiency by using the third-party pylink library
# to load the shared library distributed with the tools, which
# provides an API call for getting the version.
if not hasattr(self, '_jlink_version'):
# pylink 0.14.0/0.14.1 exposes JLink SDK DLL (libjlinkarm) in
# JLINK_SDK_STARTS_WITH, while other versions use JLINK_SDK_NAME
if pylink.__version__ in ('0.14.0', '0.14.1'):
sdk = Library.JLINK_SDK_STARTS_WITH
else:
sdk = Library.JLINK_SDK_NAME
plat = sys.platform
if plat.startswith('win32'):
libname = Library.get_appropriate_windows_sdk_name() + '.dll'
elif plat.startswith('linux'):
libname = sdk + '.so'
elif plat.startswith('darwin'):
libname = sdk + '.dylib'
else:
self.logger.warning(f'unknown platform {plat}; assuming UNIX')
libname = sdk + '.so'
lib = Library(dllpath=os.fspath(Path(self.commander).parent /
libname))
version = int(lib.dll().JLINKARM_GetDLLVersion())
self.logger.debug('JLINKARM_GetDLLVersion()=%s', version)
# The return value is an int with 2 decimal digits per
# version subfield.
self._jlink_version = (version // 10000,
(version // 100) % 100,
version % 100)
return self._jlink_version
@property
def jlink_version_str(self):
# Converts the numeric revision tuple to something human-readable.
if not hasattr(self, '_jlink_version_str'):
major, minor, rev = self.jlink_version
rev_str = chr(ord('a') + rev - 1) if rev else ''
self._jlink_version_str = f'{major}.{minor:02}{rev_str}'
return self._jlink_version_str
@property
def supports_nogui(self):
# -nogui was introduced in J-Link Commander v6.80
return self.jlink_version >= (6, 80, 0)
@property
def supports_thread_info(self):
# RTOSPlugin_Zephyr was introduced in 7.11b
return self.jlink_version >= (7, 11, 2)
@property
def supports_loader(self):
return self.jlink_version >= (7, 70, 4)
def do_run(self, command, **kwargs):
if MISSING_REQUIREMENTS:
raise RuntimeError('one or more Python dependencies were missing; '
"see the getting started guide for details on "
"how to fix")
# Convert commander to a real absolute path. We need this to
# be able to find the shared library that tells us what
# version of the tools we're using.
self.commander = os.fspath(
Path(self.require(self.commander)).resolve())
self.logger.info(f'JLink version: {self.jlink_version_str}')
rtos = self.thread_info_enabled and self.supports_thread_info
plugin_dir = os.fspath(Path(self.commander).parent / 'GDBServer' /
'RTOSPlugin_Zephyr')
big_endian = self.build_conf.getboolean('CONFIG_BIG_ENDIAN')
server_cmd = ([self.gdbserver] +
['-select',
('ip' if is_ip(self.dev_id) else 'usb') +
(f'={self.dev_id}' if self.dev_id else ''),
'-port', str(self.gdb_port),
'-if', self.iface,
'-speed', self.speed,
'-device', self.device,
'-silent',
'-endian', 'big' if big_endian else 'little',
'-singlerun'] +
(['-nogui'] if self.supports_nogui else []) +
(['-rtos', plugin_dir] if rtos else []) +
self.tool_opt)
if command == 'flash':
self.flash(**kwargs)
elif command == 'debugserver':
if self.gdb_host:
raise ValueError('Cannot run debugserver with --gdb-host')
self.require(self.gdbserver)
self.print_gdbserver_message()
self.check_call(server_cmd)
else:
if self.gdb_cmd is None:
raise ValueError('Cannot debug; gdb is missing')
if self.file is not None:
if self.file_type != FileType.ELF:
raise ValueError('Cannot debug; elf file required')
elf_name = self.file
elif self.elf_name is None:
raise ValueError('Cannot debug; elf is missing')
else:
elf_name = self.elf_name
client_cmd = (self.gdb_cmd +
self.tui_arg +
[elf_name] +
['-ex', 'target remote {}:{}'.format(self.gdb_host, self.gdb_port)])
if command == 'debug':
client_cmd += ['-ex', 'monitor halt',
'-ex', 'monitor reset',
'-ex', 'load']
if self.reset:
client_cmd += ['-ex', 'monitor reset']
if not self.gdb_host:
self.require(self.gdbserver)
self.print_gdbserver_message()
self.run_server_and_client(server_cmd, client_cmd)
else:
self.run_client(client_cmd)
def flash(self, **kwargs):
loader_details = ""
lines = [
'ExitOnError 1', # Treat any command-error as fatal
'r', # Reset and halt the target
'BE' if self.build_conf.getboolean('CONFIG_BIG_ENDIAN') else 'LE'
]
if self.erase:
lines.append('erase') # Erase all flash sectors
# Get the build artifact to flash
if self.file is not None:
# use file provided by the user
if not os.path.isfile(self.file):
err = 'Cannot flash; file ({}) not found'
raise ValueError(err.format(self.file))
flash_file = self.file
if self.file_type == FileType.HEX:
flash_cmd = f'loadfile "{self.file}"'
elif self.file_type == FileType.BIN:
if self.dt_flash:
flash_addr = self.flash_address_from_build_conf(self.build_conf)
else:
flash_addr = 0
flash_cmd = f'loadfile "{self.file}" 0x{flash_addr:x}'
else:
err = 'Cannot flash; jlink runner only supports hex and bin files'
raise ValueError(err)
else:
# Use hex, bin or elf file provided by the buildsystem.
# Preferring .hex over .bin and .elf
if self.hex_name is not None and os.path.isfile(self.hex_name):
flash_file = self.hex_name
flash_cmd = f'loadfile "{self.hex_name}"'
# Preferring .bin over .elf
elif self.bin_name is not None and os.path.isfile(self.bin_name):
if self.dt_flash:
flash_addr = self.flash_address_from_build_conf(self.build_conf)
else:
flash_addr = 0
flash_file = self.bin_name
flash_cmd = f'loadfile "{self.bin_name}" 0x{flash_addr:x}'
elif self.elf_name is not None and os.path.isfile(self.elf_name):
flash_file = self.elf_name
flash_cmd = f'loadfile "{self.elf_name}"'
else:
err = 'Cannot flash; no hex ({}), bin ({}) or elf ({}) files found.'
raise ValueError(err.format(self.hex_name, self.bin_name, self.elf_name))
# Flash the selected build artifact
lines.append(flash_cmd)
if self.reset:
lines.append('r') # Reset and halt the target
lines.append('g') # Start the CPU
# Reset the Debug Port CTRL/STAT register
# Under normal operation this is done automatically, but if other
# JLink tools are running, it is not performed.
# The J-Link scripting layer chains commands, meaning that writes are
# not actually performed until after the next operation. After writing
# the register, read it back to perform this flushing.
lines.append('writeDP 1 0')
lines.append('readDP 1')
lines.append('q') # Close the connection and quit
self.logger.debug('JLink commander script:\n' +
'\n'.join(lines))
# Don't use NamedTemporaryFile: the resulting file can't be
# opened again on Windows.
with tempfile.TemporaryDirectory(suffix='jlink') as d:
fname = os.path.join(d, 'runner.jlink')
with open(fname, 'wb') as f:
f.writelines(bytes(line + '\n', 'utf-8') for line in lines)
if self.supports_loader and self.loader:
loader_details = "?" + self.loader
cmd = ([self.commander] +
(['-IP', f'{self.dev_id}'] if is_ip(self.dev_id) else (['-USB', f'{self.dev_id}'] if self.dev_id else [])) +
(['-nogui', '1'] if self.supports_nogui else []) +
['-if', self.iface,
'-speed', self.speed,
'-device', self.device + loader_details,
'-CommanderScript', fname] +
(['-nogui', '1'] if self.supports_nogui else []) +
self.tool_opt)
self.logger.info('Flashing file: {}'.format(flash_file))
kwargs = {}
if not self.logger.isEnabledFor(logging.DEBUG):
kwargs['stdout'] = subprocess.DEVNULL
self.check_call(cmd, **kwargs)
``` | /content/code_sandbox/scripts/west_commands/runners/jlink.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 3,382 |
```python
#
'''HiFive1-specific (flash only) runner.'''
from os import path
from runners.core import ZephyrBinaryRunner, RunnerCaps
class HiFive1BinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for the HiFive1 board, using openocd.'''
def __init__(self, cfg):
super().__init__(cfg)
self.openocd_config = path.join(cfg.board_dir, 'support', 'openocd.cfg')
@classmethod
def name(cls):
return 'hifive1'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'})
@classmethod
def do_add_parser(cls, parser):
pass
@classmethod
def do_create(cls, cfg, args):
if cfg.gdb is None:
raise ValueError('--gdb not provided at command line')
return HiFive1BinaryRunner(cfg)
def do_run(self, command, **kwargs):
self.require(self.cfg.openocd)
self.require(self.cfg.gdb)
openocd_cmd = ([self.cfg.openocd, '-f', self.openocd_config])
gdb_cmd = ([self.cfg.gdb, self.cfg.elf_file, '--batch',
'-ex', 'set remotetimeout 240',
'-ex', 'target extended-remote localhost:3333',
'-ex', 'load',
'-ex', 'quit'])
self.run_server_and_client(openocd_cmd, gdb_cmd)
``` | /content/code_sandbox/scripts/west_commands/runners/hifive1.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 316 |
```python
#
'''bossac-specific runner (flash only) for Atmel SAM microcontrollers.'''
import os
import pathlib
import pickle
import platform
import subprocess
import sys
import time
from runners.core import ZephyrBinaryRunner, RunnerCaps
if platform.system() == 'Darwin':
DEFAULT_BOSSAC_PORT = None
else:
DEFAULT_BOSSAC_PORT = '/dev/ttyACM0'
DEFAULT_BOSSAC_SPEED = '115200'
class BossacBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for bossac.'''
def __init__(self, cfg, bossac='bossac', port=DEFAULT_BOSSAC_PORT,
speed=DEFAULT_BOSSAC_SPEED, boot_delay=0):
super().__init__(cfg)
self.bossac = bossac
self.port = port
self.speed = speed
self.boot_delay = boot_delay
@classmethod
def name(cls):
return 'bossac'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'})
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--bossac', default='bossac',
help='path to bossac, default is bossac')
parser.add_argument('--bossac-port', default=DEFAULT_BOSSAC_PORT,
help='serial port to use, default is ' +
str(DEFAULT_BOSSAC_PORT))
parser.add_argument('--speed', default=DEFAULT_BOSSAC_SPEED,
help='serial port speed to use, default is ' +
DEFAULT_BOSSAC_SPEED)
parser.add_argument('--delay', default=0, type=float,
help='''delay in seconds (may be a floating
point number) to wait between putting the board
into bootloader mode and running bossac;
default is no delay''')
@classmethod
def do_create(cls, cfg, args):
return BossacBinaryRunner(cfg, bossac=args.bossac,
port=args.bossac_port, speed=args.speed,
boot_delay=args.delay)
def read_help(self):
"""Run bossac --help and return the output as a list of lines"""
self.require(self.bossac)
try:
# BOSSA > 1.9.1 returns OK
out = self.check_output([self.bossac, '--help']).decode()
except subprocess.CalledProcessError as ex:
# BOSSA <= 1.9.1 returns an error
out = ex.output.decode()
return out.split('\n')
def supports(self, flag):
"""Check if bossac supports a flag by searching the help"""
for line in self.read_help():
if flag in line:
return True
return False
def is_extended_samba_protocol(self):
ext_samba_versions = ['CONFIG_BOOTLOADER_BOSSA_ARDUINO',
'CONFIG_BOOTLOADER_BOSSA_ADAFRUIT_UF2']
for x in ext_samba_versions:
if self.build_conf.getboolean(x):
return True
return False
def is_partition_enabled(self):
return self.build_conf.getboolean('CONFIG_USE_DT_CODE_PARTITION')
def get_chosen_code_partition_node(self):
# Get the EDT Node corresponding to the zephyr,code-partition
# chosen DT node
# Ensure the build directory has a compiled DTS file
# where we expect it to be.
b = pathlib.Path(self.cfg.build_dir)
edt_pickle = b / 'zephyr' / 'edt.pickle'
if not edt_pickle.is_file():
error_msg = "can't load devicetree; expected to find:" \
+ str(edt_pickle)
raise RuntimeError(error_msg)
# Load the devicetree.
try:
with open(edt_pickle, 'rb') as f:
edt = pickle.load(f)
except ModuleNotFoundError:
error_msg = "could not load devicetree, something may be wrong " \
+ "with the python environment"
raise RuntimeError(error_msg)
return edt.chosen_node('zephyr,code-partition')
def get_board_name(self):
if 'CONFIG_BOARD' not in self.build_conf:
return '<board>'
return self.build_conf['CONFIG_BOARD']
def get_dts_img_offset(self):
if self.build_conf.getboolean('CONFIG_BOOTLOADER_BOSSA_LEGACY'):
return 0
if self.build_conf.getboolean('CONFIG_HAS_FLASH_LOAD_OFFSET'):
return self.build_conf['CONFIG_FLASH_LOAD_OFFSET']
return 0
def get_image_offset(self, supports_offset):
"""Validates and returns the flash offset"""
dts_img_offset = self.get_dts_img_offset()
if int(str(dts_img_offset), 16) > 0:
if not supports_offset:
old_sdk = 'This version of BOSSA does not support the' \
' --offset flag. Please upgrade to a newer Zephyr' \
' SDK version >= 0.12.0.'
raise RuntimeError(old_sdk)
return dts_img_offset
return None
def is_gnu_coreutils_stty(self):
try:
result = subprocess.run(['stty', '--version'], capture_output=True, text=True, check=True)
return 'coreutils' in result.stdout
except subprocess.CalledProcessError:
return False
def set_serial_config(self):
if platform.system() == 'Linux' or platform.system() == 'Darwin':
self.require('stty')
# GNU coreutils uses a capital F flag for 'file'
flag = '-F' if self.is_gnu_coreutils_stty() else '-f'
if self.is_extended_samba_protocol():
self.speed = '1200'
cmd_stty = ['stty', flag, self.port, 'raw', 'ispeed', self.speed,
'ospeed', self.speed, 'cs8', '-cstopb', 'ignpar',
'eol', '255', 'eof', '255']
self.check_call(cmd_stty)
self.magic_delay()
def magic_delay(self):
'''There can be a time lag between the board resetting into
bootloader mode (done via stty above) and the OS enumerating
the USB device again. This function lets users tune a magic
delay for their system to handle this case. By default,
we don't wait.
'''
if self.boot_delay > 0:
time.sleep(self.boot_delay)
def make_bossac_cmd(self):
self.ensure_output('bin')
cmd_flash = [self.bossac, '-p', self.port, '-R', '-e', '-w', '-v',
'-b', self.cfg.bin_file]
dt_chosen_code_partition_nd = self.get_chosen_code_partition_node()
if self.is_partition_enabled():
if dt_chosen_code_partition_nd is None:
error_msg = 'The device tree zephyr,code-partition chosen' \
' node must be defined.'
raise RuntimeError(error_msg)
offset = self.get_image_offset(self.supports('--offset'))
if offset is not None and int(str(offset), 16) > 0:
cmd_flash += ['-o', '%s' % offset]
elif dt_chosen_code_partition_nd is not None:
error_msg = 'There is no CONFIG_USE_DT_CODE_PARTITION Kconfig' \
' defined at ' + self.get_board_name() + \
'_defconfig file.\n This means that' \
' zephyr,code-partition device tree node should not' \
' be defined. Check Zephyr SAM-BA documentation.'
raise RuntimeError(error_msg)
return cmd_flash
def get_darwin_serial_device_list(self):
"""
Get a list of candidate serial ports on Darwin by querying the IOKit
registry.
"""
import plistlib
ioreg_out = self.check_output(['ioreg', '-r', '-c', 'IOSerialBSDClient',
'-k', 'IOCalloutDevice', '-a'])
serial_ports = plistlib.loads(ioreg_out, fmt=plistlib.FMT_XML)
return [port["IOCalloutDevice"] for port in serial_ports]
def get_darwin_user_port_choice(self):
"""
Ask the user to select the serial port from a set of candidate ports
retrieved from IOKit on Darwin.
Modelled on get_board_snr() in the nrfjprog runner.
"""
devices = self.get_darwin_serial_device_list()
if len(devices) == 0:
raise RuntimeError('No candidate serial ports were found!')
elif len(devices) == 1:
print('Using only serial device on the system: ' + devices[0])
return devices[0]
elif not sys.stdin.isatty():
raise RuntimeError('Refusing to guess which serial port to use: '
f'there are {len(devices)} available. '
'(Interactive prompts disabled since standard '
'input is not a terminal - please specify a '
'port using --bossac-port instead)')
print('There are multiple serial ports available on this system:')
for i, device in enumerate(devices, 1):
print(f' {i}. {device}')
p = f'Please select one (1-{len(devices)}, or EOF to exit): '
while True:
try:
value = input(p)
except EOFError:
sys.exit(0)
try:
value = int(value)
except ValueError:
continue
if 1 <= value <= len(devices):
break
return devices[value - 1]
def do_run(self, command, **kwargs):
if platform.system() == 'Linux':
if 'microsoft' in platform.uname().release.lower() or \
os.getenv('WSL_DISTRO_NAME') is not None or \
os.getenv('WSL_INTEROP') is not None:
msg = 'CAUTION: BOSSAC runner not supported on WSL!'
raise RuntimeError(msg)
elif platform.system() == 'Darwin' and self.port is None:
self.port = self.get_darwin_user_port_choice()
self.require(self.bossac)
self.set_serial_config()
self.check_call(self.make_bossac_cmd())
``` | /content/code_sandbox/scripts/west_commands/runners/bossac.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,217 |
```python
#
"""Runner for flashing with STM32CubeProgrammer CLI, the official programming
utility from ST Microelectronics.
"""
import argparse
from pathlib import Path
import platform
import os
import shlex
import shutil
from typing import List, Optional, ClassVar, Dict
from runners.core import ZephyrBinaryRunner, RunnerCaps, RunnerConfig
class STM32CubeProgrammerBinaryRunner(ZephyrBinaryRunner):
"""Runner front-end for STM32CubeProgrammer CLI."""
_RESET_MODES: ClassVar[Dict[str, str]] = {
"sw": "SWrst",
"hw": "HWrst",
"core": "Crst",
}
"""Reset mode argument mappings."""
def __init__(
self,
cfg: RunnerConfig,
port: str,
frequency: Optional[int],
reset_mode: Optional[str],
conn_modifiers: Optional[str],
cli: Optional[Path],
use_elf: bool,
erase: bool,
extload: Optional[str],
tool_opt: List[str],
) -> None:
super().__init__(cfg)
self._port = port
self._frequency = frequency
self._reset_mode = reset_mode
self._conn_modifiers = conn_modifiers
self._cli = (
cli or STM32CubeProgrammerBinaryRunner._get_stm32cubeprogrammer_path()
)
self._use_elf = use_elf
self._erase = erase
if extload:
p = STM32CubeProgrammerBinaryRunner._get_stm32cubeprogrammer_path().parent.resolve() / 'ExternalLoader'
self._extload = ['-el', str(p / extload)]
else:
self._extload = []
self._tool_opt: List[str] = list()
for opts in [shlex.split(opt) for opt in tool_opt]:
self._tool_opt += opts
# add required library loader path to the environment (Linux only)
if platform.system() == "Linux":
os.environ["LD_LIBRARY_PATH"] = str(self._cli.parent / ".." / "lib")
@staticmethod
def _get_stm32cubeprogrammer_path() -> Path:
"""Obtain path of the STM32CubeProgrammer CLI tool."""
if platform.system() == "Linux":
cmd = shutil.which("STM32_Programmer_CLI")
if cmd is not None:
return Path(cmd)
return (
Path.home()
/ "STMicroelectronics"
/ "STM32Cube"
/ "STM32CubeProgrammer"
/ "bin"
/ "STM32_Programmer_CLI"
)
if platform.system() == "Windows":
cli = (
Path("STMicroelectronics")
/ "STM32Cube"
/ "STM32CubeProgrammer"
/ "bin"
/ "STM32_Programmer_CLI.exe"
)
x86_path = Path(os.environ["PROGRAMFILES(X86)"]) / cli
if x86_path.exists():
return x86_path
return Path(os.environ["PROGRAMW6432"]) / cli
if platform.system() == "Darwin":
return (
Path("/Applications")
/ "STMicroelectronics"
/ "STM32Cube"
/ "STM32CubeProgrammer"
/ "STM32CubeProgrammer.app"
/ "Contents"
/ "MacOs"
/ "bin"
/ "STM32_Programmer_CLI"
)
raise NotImplementedError("Could not determine STM32_Programmer_CLI path")
@classmethod
def name(cls):
return "stm32cubeprogrammer"
@classmethod
def capabilities(cls):
return RunnerCaps(commands={"flash"}, erase=True, extload=True, tool_opt=True)
@classmethod
def do_add_parser(cls, parser):
parser.add_argument(
"--port",
type=str,
required=True,
help="Interface identifier, e.g. swd, jtag, /dev/ttyS0...",
)
parser.add_argument(
"--frequency", type=int, required=False, help="Programmer frequency in KHz"
)
parser.add_argument(
"--reset-mode",
type=str,
required=False,
choices=["sw", "hw", "core"],
help="Reset mode",
)
parser.add_argument(
"--conn-modifiers",
type=str,
required=False,
help="Additional options for the --connect argument",
)
parser.add_argument(
"--cli",
type=Path,
required=False,
help="STM32CubeProgrammer CLI tool path",
)
parser.add_argument(
"--use-elf",
action="store_true",
required=False,
help="Use ELF file when flashing instead of HEX file",
)
@classmethod
def extload_help(cls) -> str:
return "External Loader for STM32_Programmer_CLI"
@classmethod
def tool_opt_help(cls) -> str:
return "Additional options for STM32_Programmer_CLI"
@classmethod
def do_create(
cls, cfg: RunnerConfig, args: argparse.Namespace
) -> "STM32CubeProgrammerBinaryRunner":
return STM32CubeProgrammerBinaryRunner(
cfg,
port=args.port,
frequency=args.frequency,
reset_mode=args.reset_mode,
conn_modifiers=args.conn_modifiers,
cli=args.cli,
use_elf=args.use_elf,
erase=args.erase,
extload=args.extload,
tool_opt=args.tool_opt,
)
def do_run(self, command: str, **kwargs):
if command == "flash":
self.flash(**kwargs)
def flash(self, **kwargs) -> None:
self.require(str(self._cli))
# prepare base command
cmd = [str(self._cli)]
connect_opts = f"port={self._port}"
if self._frequency:
connect_opts += f" freq={self._frequency}"
if self._reset_mode:
reset_mode = STM32CubeProgrammerBinaryRunner._RESET_MODES[self._reset_mode]
connect_opts += f" reset={reset_mode}"
if self._conn_modifiers:
connect_opts += f" {self._conn_modifiers}"
cmd += ["--connect", connect_opts]
cmd += self._tool_opt
if self._extload:
# external loader to come after the tool option in STM32CubeProgrammer
cmd += self._extload
# erase first if requested
if self._erase:
self.check_call(cmd + ["--erase", "all"])
# flash image and run application
dl_file = self.cfg.elf_file if self._use_elf else self.cfg.hex_file
if dl_file is None:
raise RuntimeError('cannot flash; no download file was specified')
elif not os.path.isfile(dl_file):
raise RuntimeError(f'download file {dl_file} does not exist')
self.check_call(cmd + ["--download", dl_file, "--start"])
``` | /content/code_sandbox/scripts/west_commands/runners/stm32cubeprogrammer.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,509 |
```python
#
'''UF2 runner (flash only) for UF2 compatible bootloaders.'''
from pathlib import Path
from shutil import copy
from runners.core import ZephyrBinaryRunner, RunnerCaps
try:
import psutil # pylint: disable=unused-import
MISSING_PSUTIL = False
except ImportError:
# This can happen when building the documentation for the
# runners package if psutil is not on sys.path. This is fine
# to ignore in that case.
MISSING_PSUTIL = True
class UF2BinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for copying to UF2 USB-MSC mounts.'''
def __init__(self, cfg, board_id=None):
super().__init__(cfg)
self.board_id = board_id
@classmethod
def name(cls):
return 'uf2'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'})
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--board-id', dest='board_id',
help='Board-ID value to match from INFO_UF2.TXT')
@classmethod
def do_create(cls, cfg, args):
return UF2BinaryRunner(cfg, board_id=args.board_id)
@staticmethod
def get_uf2_info_path(part) -> Path:
return Path(part.mountpoint) / "INFO_UF2.TXT"
@staticmethod
def is_uf2_partition(part):
try:
return ((part.fstype in ['vfat', 'FAT', 'msdos']) and
UF2BinaryRunner.get_uf2_info_path(part).is_file())
except PermissionError:
return False
@staticmethod
def get_uf2_info(part):
lines = UF2BinaryRunner.get_uf2_info_path(part).read_text().splitlines()
lines = lines[1:] # Skip the first summary line
def split_uf2_info(line: str):
k, _, val = line.partition(':')
return k.strip(), val.strip()
return {k: v for k, v in (split_uf2_info(line) for line in lines) if k and v}
def match_board_id(self, part):
info = self.get_uf2_info(part)
return info.get('Board-ID') == self.board_id
def get_uf2_partitions(self):
parts = [part for part in psutil.disk_partitions() if self.is_uf2_partition(part)]
if (self.board_id is not None) and parts:
parts = [part for part in parts if self.match_board_id(part)]
if not parts:
self.logger.warning("Discovered UF2 partitions don't match Board-ID '%s'",
self.board_id)
return parts
def copy_uf2_to_partition(self, part):
self.ensure_output('uf2')
copy(self.cfg.uf2_file, part.mountpoint)
def do_run(self, command, **kwargs):
if MISSING_PSUTIL:
raise RuntimeError(
'could not import psutil; something may be wrong with the '
'python environment')
partitions = self.get_uf2_partitions()
if not partitions:
raise RuntimeError('No matching UF2 partitions found')
if len(partitions) > 1:
raise RuntimeError('More than one matching UF2 partitions found')
part = partitions[0]
self.logger.info("Copying UF2 file to '%s'", part.mountpoint)
self.copy_uf2_to_partition(part)
``` | /content/code_sandbox/scripts/west_commands/runners/uf2.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 748 |
```python
#
'''Runner for flashing with ezFlashCLI.'''
import shlex
from runners.core import ZephyrBinaryRunner, RunnerCaps
DEFAULT_EZFLASHCLI = "ezFlashCLI"
class EzFlashCliBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for ezFlashCLI'''
def __init__(self, cfg, tool, dev_id=None, tool_opt=[], erase=False, reset=True):
super().__init__(cfg)
self.bin_ = cfg.bin_file
self.tool = tool
self.dev_id = dev_id
self.erase = bool(erase)
self.reset = bool(reset)
self.tool_opt = []
for opts in [shlex.split(opt) for opt in tool_opt]:
self.tool_opt += opts
@classmethod
def name(cls):
return 'ezflashcli'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'}, dev_id=True, tool_opt=True, erase=True, reset=True)
@classmethod
def dev_id_help(cls) -> str:
return '''Device identifier. Use it to select the J-Link Serial Number
of the device connected over USB.'''
@classmethod
def tool_opt_help(cls) -> str:
return "Additional options for ezFlashCLI e.g. '--verbose'"
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--tool', default=DEFAULT_EZFLASHCLI,
help='ezFlashCLI path, default is '
f'{DEFAULT_EZFLASHCLI}')
parser.set_defaults(reset=True)
@classmethod
def do_create(cls, cfg, args):
return EzFlashCliBinaryRunner(cfg, tool=args.tool, dev_id=args.dev_id,
tool_opt=args.tool_opt, erase=args.erase)
def needs_product_header(self):
# Applications linked to code partition are meant to be run by MCUboot
# and do not require product header. Other applications and MCUboot itself
# are run by internal bootloader and thus require valid product header.
is_mcuboot = self.build_conf.getboolean('CONFIG_MCUBOOT')
uses_code_partition = self.build_conf.getboolean('CONFIG_USE_DT_CODE_PARTITION')
return is_mcuboot or not uses_code_partition
def get_options(self):
device_args = []
if self.dev_id is not None:
device_args = ['-j', f'{self.dev_id}']
return device_args + self.tool_opt
def program_bin(self):
options = self.get_options()
if self.erase:
self.logger.info("Erasing flash...")
self.check_call([self.tool] + options + ["erase_flash"])
self.logger.info(f"Flashing {self.bin_}...")
if self.needs_product_header():
# Write product header and application image at fixed offset as required
# by internal bootloader.
self.check_call([self.tool] + options + ["image_flash", self.bin_])
else:
load_offset = self.build_conf['CONFIG_FLASH_LOAD_OFFSET']
self.check_call([self.tool] + options + ["write_flash", f'0x{load_offset:x}', self.bin_])
def reset_device(self):
self.logger.info("Resetting...")
options = self.get_options()
self.check_call([self.tool] + options + ["go"])
def do_run(self, command, **kwargs):
self.require(self.tool)
self.ensure_output('bin')
self.program_bin()
if self.reset:
self.reset_device()
``` | /content/code_sandbox/scripts/west_commands/runners/ezflashcli.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 740 |
```python
#
'''Runner for teensy .'''
import os
import subprocess
from runners.core import ZephyrBinaryRunner
class TeensyBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for teensy.'''
def __init__(self, cfg, mcu, teensy_loader):
super().__init__(cfg)
self.mcu_args = ['--mcu', mcu]
self.teensy_loader = teensy_loader
self.hex_name = cfg.hex_file
@classmethod
def name(cls):
return 'teensy'
@classmethod
def do_add_parser(cls, parser):
parser.add_argument('--mcu', required=True,
help='Teensy mcu target')
parser.add_argument('--teensy', default='teensy_loader_cli',
help='path to teensy cli tool, default is teensy_loader_cli')
@classmethod
def do_create(cls, cfg, args):
ret = TeensyBinaryRunner(
cfg, args.mcu,
teensy_loader=args.teensy)
return ret
def do_run(self, command):
self.require(self.teensy_loader)
if command == 'flash':
self.flash()
def flash(self):
if self.hex_name is not None and os.path.isfile(self.hex_name):
fname = self.hex_name
else:
raise ValueError(
'Cannot flash; no hex ({}) file found. '.format(self.hex_name))
cmd = ([self.teensy_loader] +
self.mcu_args +
[fname])
self.logger.info('Flashing file: {}'.format(fname))
try:
self.check_output(cmd)
self.logger.info('Success')
except subprocess.CalledProcessError as grepexc:
self.logger.error("Failure %i" % grepexc.returncode)
``` | /content/code_sandbox/scripts/west_commands/runners/teensy.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 386 |
```python
"""
Runner for NXP S32 Debug Probe.
"""
import argparse
import os
import platform
import re
import shlex
import subprocess
import sys
import tempfile
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Optional, Union
from runners.core import (BuildConfiguration, RunnerCaps, RunnerConfig,
ZephyrBinaryRunner)
NXP_S32DBG_USB_CLASS = 'NXP Probes'
NXP_S32DBG_USB_VID = 0x15a2
NXP_S32DBG_USB_PID = 0x0067
@dataclass
class NXPS32DebugProbeConfig:
"""NXP S32 Debug Probe configuration parameters."""
conn_str: str = 's32dbg'
server_port: int = 45000
speed: int = 16000
remote_timeout: int = 30
reset_type: Optional[str] = 'default'
reset_delay: int = 0
class NXPS32DebugProbeRunner(ZephyrBinaryRunner):
"""Runner front-end for NXP S32 Debug Probe."""
def __init__(self,
runner_cfg: RunnerConfig,
probe_cfg: NXPS32DebugProbeConfig,
core_name: str,
soc_name: str,
soc_family_name: str,
start_all_cores: bool,
s32ds_path: Optional[str] = None,
tool_opt: Optional[List[str]] = None) -> None:
super(NXPS32DebugProbeRunner, self).__init__(runner_cfg)
self.elf_file: str = runner_cfg.elf_file or ''
self.probe_cfg: NXPS32DebugProbeConfig = probe_cfg
self.core_name: str = core_name
self.soc_name: str = soc_name
self.soc_family_name: str = soc_family_name
self.start_all_cores: bool = start_all_cores
self.s32ds_path_override: Optional[str] = s32ds_path
self.tool_opt: List[str] = []
if tool_opt:
for opt in tool_opt:
self.tool_opt.extend(shlex.split(opt))
build_cfg = BuildConfiguration(runner_cfg.build_dir)
self.arch = build_cfg.get('CONFIG_ARCH').replace('"', '')
@classmethod
def name(cls) -> str:
return 'nxp_s32dbg'
@classmethod
def capabilities(cls) -> RunnerCaps:
return RunnerCaps(commands={'debug', 'debugserver', 'attach'},
dev_id=True, tool_opt=True)
@classmethod
def dev_id_help(cls) -> str:
return '''Debug probe connection string as in "s32dbg[:<address>]"
where <address> can be the IP address if TAP is available via Ethernet,
the serial ID of the probe or empty if TAP is available via USB.'''
@classmethod
def tool_opt_help(cls) -> str:
return '''Additional options for GDB client when used with "debug" or "attach" commands
or for GTA server when used with "debugserver" command.'''
@classmethod
def do_add_parser(cls, parser: argparse.ArgumentParser) -> None:
parser.add_argument('--core-name',
required=True,
help='Core name as supported by the debug probe (e.g. "R52_0_0")')
parser.add_argument('--soc-name',
required=True,
help='SoC name as supported by the debug probe (e.g. "S32Z270")')
parser.add_argument('--soc-family-name',
required=True,
help='SoC family name as supported by the debug probe (e.g. "s32z2e2")')
parser.add_argument('--start-all-cores',
action='store_true',
help='Start all SoC cores and not just the one being debugged. '
'Use together with "debug" command.')
parser.add_argument('--s32ds-path',
help='Override the path to NXP S32 Design Studio installation. '
'By default, this runner will try to obtain it from the system '
'path, if available.')
parser.add_argument('--server-port',
default=NXPS32DebugProbeConfig.server_port,
type=int,
help='GTA server port')
parser.add_argument('--speed',
default=NXPS32DebugProbeConfig.speed,
type=int,
help='JTAG interface speed')
parser.add_argument('--remote-timeout',
default=NXPS32DebugProbeConfig.remote_timeout,
type=int,
help='Number of seconds to wait for the remote target responses')
@classmethod
def do_create(cls, cfg: RunnerConfig, args: argparse.Namespace) -> 'NXPS32DebugProbeRunner':
probe_cfg = NXPS32DebugProbeConfig(args.dev_id,
server_port=args.server_port,
speed=args.speed,
remote_timeout=args.remote_timeout)
return NXPS32DebugProbeRunner(cfg, probe_cfg, args.core_name, args.soc_name,
args.soc_family_name, args.start_all_cores,
s32ds_path=args.s32ds_path, tool_opt=args.tool_opt)
@staticmethod
def find_usb_probes() -> List[str]:
"""Return a list of debug probe serial numbers connected via USB to this host."""
# use system's native commands to enumerate and retrieve the USB serial ID
# to avoid bloating this runner with third-party dependencies that often
# require priviledged permissions to access the device info
macaddr_pattern = r'(?:[0-9a-f]{2}[:]){5}[0-9a-f]{2}'
if platform.system() == 'Windows':
cmd = f'pnputil /enum-devices /connected /class "{NXP_S32DBG_USB_CLASS}"'
serialid_pattern = f'instance id: +usb\\\\.*\\\\({macaddr_pattern})'
else:
cmd = f'lsusb -v -d {NXP_S32DBG_USB_VID:x}:{NXP_S32DBG_USB_PID:x}'
serialid_pattern = f'iserial +.*({macaddr_pattern})'
try:
outb = subprocess.check_output(shlex.split(cmd), stderr=subprocess.DEVNULL)
out = outb.decode('utf-8').strip().lower()
except subprocess.CalledProcessError:
raise RuntimeError('error while looking for debug probes connected')
devices: List[str] = []
if out and 'no devices were found' not in out:
devices = re.findall(serialid_pattern, out)
return sorted(devices)
@classmethod
def select_probe(cls) -> str:
"""
Find debugger probes connected and return the serial number of the one selected.
If there are multiple debugger probes connected and this runner is being executed
in a interactive prompt, ask the user to select one of the probes.
"""
probes_snr = cls.find_usb_probes()
if not probes_snr:
raise RuntimeError('there are no debug probes connected')
elif len(probes_snr) == 1:
return probes_snr[0]
else:
if not sys.stdin.isatty():
raise RuntimeError(
f'refusing to guess which of {len(probes_snr)} connected probes to use '
'(Interactive prompts disabled since standard input is not a terminal). '
'Please specify a device ID on the command line.')
print('There are multiple debug probes connected')
for i, probe in enumerate(probes_snr, 1):
print(f'{i}. {probe}')
prompt = f'Please select one with desired serial number (1-{len(probes_snr)}): '
while True:
try:
value: int = int(input(prompt))
except EOFError:
sys.exit(0)
except ValueError:
continue
if 1 <= value <= len(probes_snr):
break
return probes_snr[value - 1]
@property
def runtime_environment(self) -> Optional[Dict[str, str]]:
"""Execution environment used for the client process."""
if platform.system() == 'Windows':
python_lib = (self.s32ds_path / 'S32DS' / 'build_tools' / 'msys32'
/ 'mingw32' / 'lib' / 'python2.7')
return {
**os.environ,
'PYTHONPATH': f'{python_lib}{os.pathsep}{python_lib / "site-packages"}'
}
return None
@property
def script_globals(self) -> Dict[str, Optional[Union[str, int]]]:
"""Global variables required by the debugger scripts."""
return {
'_PROBE_IP': self.probe_cfg.conn_str,
'_JTAG_SPEED': self.probe_cfg.speed,
'_GDB_SERVER_PORT': self.probe_cfg.server_port,
'_RESET_TYPE': self.probe_cfg.reset_type,
'_RESET_DELAY': self.probe_cfg.reset_delay,
'_REMOTE_TIMEOUT': self.probe_cfg.remote_timeout,
'_CORE_NAME': f'{self.soc_name}_{self.core_name}',
'_SOC_NAME': self.soc_name,
'_IS_LOGGING_ENABLED': False,
'_FLASH_NAME': None, # not supported
'_SECURE_TYPE': None, # not supported
'_SECURE_KEY': None, # not supported
}
def server_commands(self) -> List[str]:
"""Get launch commands to start the GTA server."""
server_exec = str(self.s32ds_path / 'S32DS' / 'tools' / 'S32Debugger'
/ 'Debugger' / 'Server' / 'gta' / 'gta')
cmd = [server_exec, '-p', str(self.probe_cfg.server_port)]
return cmd
def client_commands(self) -> List[str]:
"""Get launch commands to start the GDB client."""
if self.arch == 'arm':
client_exec_name = 'arm-none-eabi-gdb-py'
elif self.arch == 'arm64':
client_exec_name = 'aarch64-none-elf-gdb-py'
else:
raise RuntimeError(f'architecture {self.arch} not supported')
client_exec = str(self.s32ds_path / 'S32DS' / 'tools' / 'gdb-arm'
/ 'arm32-eabi' / 'bin' / client_exec_name)
cmd = [client_exec]
return cmd
def get_script(self, name: str) -> Path:
"""
Get the file path of a debugger script with the given name.
:param name: name of the script, without the SoC family name prefix
:returns: path to the script
:raises RuntimeError: if file does not exist
"""
script = (self.s32ds_path / 'S32DS' / 'tools' / 'S32Debugger' / 'Debugger' / 'scripts'
/ self.soc_family_name / f'{self.soc_family_name}_{name}.py')
if not script.exists():
raise RuntimeError(f'script not found: {script}')
return script
def do_run(self, command: str, **kwargs) -> None:
"""
Execute the given command.
:param command: command name to execute
:raises RuntimeError: if target architecture or host OS is not supported
:raises MissingProgram: if required tools are not found in the host
"""
if platform.system() not in ('Windows', 'Linux'):
raise RuntimeError(f'runner not supported on {platform.system()} systems')
if self.arch not in ('arm', 'arm64'):
raise RuntimeError(f'architecture {self.arch} not supported')
app_name = 's32ds' if platform.system() == 'Windows' else 's32ds.sh'
self.s32ds_path = Path(self.require(app_name, path=self.s32ds_path_override)).parent
if not self.probe_cfg.conn_str:
self.probe_cfg.conn_str = f's32dbg:{self.select_probe()}'
self.logger.info(f'using debug probe {self.probe_cfg.conn_str}')
if command in ('attach', 'debug'):
self.ensure_output('elf')
self.do_attach_debug(command, **kwargs)
else:
self.do_debugserver(**kwargs)
def do_attach_debug(self, command: str, **kwargs) -> None:
"""
Launch the GTA server and GDB client to start a debugging session.
:param command: command name to execute
"""
gdb_script: List[str] = []
# setup global variables required for the scripts before sourcing them
for name, val in self.script_globals.items():
gdb_script.append(f'py {name} = {repr(val)}')
# load platform-specific debugger script
if command == 'debug':
if self.start_all_cores:
startup_script = self.get_script('generic_bareboard_all_cores')
else:
startup_script = self.get_script('generic_bareboard')
else:
startup_script = self.get_script('attach')
gdb_script.append(f'source {startup_script}')
# executes the SoC and board initialization sequence
if command == 'debug':
gdb_script.append('py board_init()')
# initializes the debugger connection to the core specified
gdb_script.append('py core_init()')
gdb_script.append(f'file {Path(self.elf_file).as_posix()}')
if command == 'debug':
gdb_script.append('load')
with tempfile.TemporaryDirectory(suffix='nxp_s32dbg') as tmpdir:
gdb_cmds = Path(tmpdir) / 'runner.nxp_s32dbg'
gdb_cmds.write_text('\n'.join(gdb_script), encoding='utf-8')
self.logger.debug(gdb_cmds.read_text(encoding='utf-8'))
server_cmd = self.server_commands()
client_cmd = self.client_commands()
client_cmd.extend(['-x', gdb_cmds.as_posix()])
client_cmd.extend(self.tool_opt)
self.run_server_and_client(server_cmd, client_cmd, env=self.runtime_environment)
def do_debugserver(self, **kwargs) -> None:
"""Start the GTA server on a given port with the given extra parameters from cli."""
server_cmd = self.server_commands()
server_cmd.extend(self.tool_opt)
self.check_call(server_cmd)
``` | /content/code_sandbox/scripts/west_commands/runners/nxp_s32dbg.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 3,049 |
```shell
#compdef west
# Ensure this works also when being source-ed
compdef _west west
typeset -A -g _opt_args
_west_cmds() {
local -a builtin_cmds=(
'init[create a west workspace]'
'update[update projects described in west manifest]'
'list[print information about projects]'
'manifest[manage the west manifest]'
'diff["git diff" for one or more projects]'
'status["git status" for one or more projects]'
'forall[run a command in one or more local projects]'
'config[get or set config file values]'
'topdir[print the top level directory of the workspace]'
'help[get help for west or a command]'
)
local -a zephyr_ext_cmds=(
'completion[display shell completion scripts]'
'boards[display information about supported boards]'
'build[compile a Zephyr application]'
'sign[sign a Zephyr binary for bootloader chain-loading]'
'flash[flash and run a binary on a board]'
'debug[flash and interactively debug a Zephyr application]'
'debugserver[connect to board and launch a debug server]'
'attach[interactively debug a board]'
'zephyr-export[export Zephyr installation as a CMake config package]'
'spdx[create SPDX bill of materials]'
'blobs[work with binary blobs]'
)
local -a all_cmds=(${builtin_cmds} ${zephyr_ext_cmds})
if [[ -v WEST_COMP_CHECK_WORKSPACE ]]; then
west topdir &>/dev/null
if [ $? -eq 0 ]; then
_values "west command" $all_cmds
else
_values "west command" $builtin_cmds
fi
else
_values "west command" $all_cmds
fi
}
# Completion script for Zephyr's meta-tool, west
_west() {
# Global options for all commands
local -a global_opts=(
# (: * -) as exclusion list means exclude everything else
'(: * -)'{-h,--help}'[show help]'
# An exclusion list with the very option means only allow once
{-v,--verbose}'[enable verbosity]'
'(: * -)'{-V,--version}'[print version]'
'(-z --zephyr-base)'{-z,--zephyr-base}'[zephyr base folder]:zephyr base folder:_directories'
)
typeset -A opt_args
local curcontext="$curcontext" context state state_descr line
local -a orig_words
orig_words=( ${words[@]} )
_arguments -S -C \
$global_opts \
"1: :->cmds" \
"*::arg:->args" \
case "$state" in
cmds)
_west_cmds
;;
args)
_opt_args=( ${(@kv)opt_args} )
_call_function ret _west_$line[1]
;;
esac
}
__west_x()
{
west 2>/dev/null "$@"
}
_get_west_projs() {
local extra_args
[[ -v _opt_args[-z] ]] && extra_args="-z $_opt_args[-z]"
[[ -v _opt_args[--zephyr-base] ]] && extra_args="-z $_opt_args[--zephyr-base]"
_west_projs=($(__west_x $extra_args list --format={name}))
_describe 'projs' _west_projs
}
_get_west_boards() {
_west_boards=( $(__west_x boards --format='{name}|{qualifiers}') )
for i in {1..${#_west_boards[@]}}; do
local name="${_west_boards[$i]%%|*}"
local transformed_board="${_west_boards[$i]//|//}"
_west_boards[$i]="${transformed_board//,/ ${name}/}"
done
_west_boards=(${(@s/ /)_west_boards})
_describe 'boards' _west_boards
}
_west_init() {
local -a opts=(
'(-l --local)'{--mr,--manifest-rev}'[manifest revision]:manifest rev:'
{--mf,--manifest-file}'[manifest file]:manifest file:'
'(-l --local)'{-m,--manifest}'[manifest URL]:manifest URL:_directories'
'(-m --manifest --mr --manifest-rev)'{-l,--local}'[use local directory as manifest repository]:local manifest repository directory:_directories'
)
_arguments -S $opts \
"1:workspace directory:"
}
_west_update() {
local -a opts=(
'--stats[print performance stats]'
'--name-cache[name-based cache]:name cache folder:_directories'
'--path-cache[path-based cache]:path cache folder:_directories'
{-f,--fetch}'[fetch strategy]:fetch strategy:(always smart)'
{-o,--fetch-opt}'[fetch options]:fetch options:'
{-n,--narrow}'[narrow fetch]'
{-k,--keep-descendants}'[keep manifest-rev descendants checked out]'
{-r,--rebase}'[rebase checked out branch onto the new manifest-rev]'
)
_arguments -S $opts \
"1:west proj:_get_west_projs"
}
_west_list() {
local -a opts=(
{-a,--all}'[include inactive projects]'
'--manifest-path-from-yaml[print performance stats]'
{-f,--format}'[format string]:format string:'
)
_arguments -S $opts \
"1:west proj:_get_west_projs"
}
_west_manifest() {
local -a opts=(
'--resolve[resolve into single manifest]'
'--freeze[resolve into single manifest, with SHAs]'
'--validate[silently validate manifest]'
'--path[print the path to the top level manifest file]'
{-o,--out}'[output file]:output file:_files'
)
_arguments -S $opts
}
_west_diff() {
local -a opts=(
{-a,--all}'[include inactive projects]'
)
_arguments -S $opts \
"1:west proj:_get_west_projs"
}
_west_status() {
local -a opts=(
{-a,--all}'[include inactive projects]'
)
_arguments -S $opts \
"1:west proj:_get_west_projs"
}
_west_forall() {
local -a opts=(
'-c[command to execute]:command:'
{-a,--all}'[include inactive projects]'
)
_arguments -S $opts \
"1:west proj:_get_west_projs"
}
_west_config() {
local -a opts=(
{-l,--list}'[list all options and values]'
{-d,--delete}'[delete an option in one config file]'
{-D,--delete-all}"[delete an option everywhere it\'s set]"
+ '(mutex)'
'--system[system-wide file]'
'--global[global user-wide file]'
"--local[this workspace\'s file]"
)
_arguments -S $opts
}
_west_help() {
_west_cmds
}
_west_completion() {
_arguments -S "1:shell:(bash zsh fish)"
}
_west_boards() {
local -a opts=(
{-f,--format}'[format string]:format string:'
{-n,--name}'[name regex]:regex:'
'*--arch-root[Add an arch root]:arch root:_directories'
'*--board-root[Add a board root]:board root:_directories'
'*--soc-root[Add a soc root]:soc root:_directories'
)
_arguments -S $opts
}
_west_build() {
local -a opts=(
'(-b --board)'{-b,--board}'[board to build for]:board:_get_west_boards'
'(-d --build-dir)'{-d,--build-dir}'[build directory to create or use]:build dir:_directories'
'(-f --force)'{-f,--force}'[ignore errors and continue]'
'--sysbuild[create multi-domain build system]'
'--no-sysbuild[do not create multi-domain build system]'
'(-c --cmake)'{-c,--cmake}'[force a cmake run]'
'--cmake-only[just run cmake]'
'--domain[execute build tool (make or ninja) for a given domain]:domain:'
'(-t --target)'{-t,--target}'[run build system target]:target:'
'(-T --test-item)'{-T,--test-item}'[Build based on test data in .yml]:test item:'
{-o,--build-opt}'[options to pass to build tool (make or ninja)]:tool opt:'
'(-n --just-print --dry-run --recon)'{-n,--just-print,--dry-run,--recon}"[just print build commands, don't run them]"
'(-p --pristine)'{-p,--pristine}'[pristine build setting]:pristine:(auto always never)'
)
_arguments -S $opts \
"1:source_dir:_directories"
}
_west_sign() {
local -a opts=(
'(-d --build-dir)'{-d,--build-dir}'[build directory to create or use]:build dir:_directories'
'(-q --quiet)'{-q,--quiet}'[suppress non-error output]'
'(-f --force)'{-f,--force}'[ignore errors and continue]'
'(-t --tool)'{-t,--tool}'[image signing tool name]:tool:(imgtool rimage)'
'(-p --tool-path)'{-p,--tool-path}'[path to the tool]:tool path:_directories'
'(-D --tool-data)'{-D,--tool-data}'[path to tool data]:tool data path:_directories'
'(--no-bin)--bin[produce a signed bin file]'
'(--bin)--no-bin[do not produce a signed bin file]'
'(-B --sbin)'{-B,--sbin}'[signed .bin filename]:bin filename:_files'
'(--no-hex)--hex[produce a signed hex file]'
'(--hex)--no-hex[do not produce a signed hex file]'
'(-H --shex)'{-H,--shex}'[signed .hex filename]:hex filename:_files'
)
_arguments -S $opts
}
typeset -a -g _west_runner_opts=(
'(-H --context)'{-H,--context}'[print runner-specific options]'
'--board-dir[board directory]:board dir:_directories'
'(-f --file)'{-f,--file}'[path to binary]:path to binary:_files'
'(-t --file-type)'{-t,--file-type}'[type of binary]:type of binary:(hex bin elf)'
'--elf-file[path to zephyr.elf]:path to zephyr.elf:_files'
'--hex-file[path to zephyr.hex]:path to zephyr.hex:_files'
'--bin-file[path to zephyr.bin]:path to zephyr.bin:_files'
'--gdb[path to GDB]:path to GDB:_files'
'--openocd[path to openocd]:path to openocd:_files'
'--openocd-search[path to add to openocd search path]:openocd search:_directories'
)
_west_flash() {
local -a opts=(
'(-d --build-dir)'{-d,--build-dir}'[build directory to create or use]:build dir:_directories'
'(-r --runner)'{-r,--runner}'[override default runner from build-dir]:runner:'
'--skip-rebuild[do not refresh cmake dependencies first]'
'--domain[execute build tool (make or ninja) for a given domain]:domain:'
)
local -a all_opts=(${_west_runner_opts} ${opts})
_arguments -S $all_opts
}
_west_debug() {
_west_flash
}
_west_debugserver() {
_west_flash
}
_west_attach() {
_west_flash
}
_west_spdx() {
local -a opts=(
'(-i --init)'{-i,--init}'[initialize CMake file-based API]'
'(-d --build-dir)'{-d,--build-dir}'[build directory to create or use]:build dir:_directories'
'(-n --namespace-prefix)'{-n,--namespace-prefix}'[namespace prefix]:namespace prefix:'
'(-s --spdx-dir)'{-s,--spdx-dir}'[SPDX output directory]:spdx output dir:_directories'
'--analyze-includes[also analyze included header files]'
'--include-sdk[also generate SPDX document for SDK]'
)
_arguments -S $opts
}
_west_blobs() {
local -a blob_cmds=(
'list[list binary blobs]'
'fetch[fetch binary blobs]'
'clean[clean working tree of binary blobs]'
)
local line state
_arguments -S -C \
"1: :->cmds" \
"*::arg:->args" \
case "$state" in
cmds)
_values "west blob cmds" $blob_cmds
;;
args)
_opt_args=( ${(@kv)opt_args} )
_call_function ret _west_blob_$line[1]
;;
esac
}
_west_blob_list () {
local -a opts=(
{-f,--format}'[format string]:format string:'
)
_arguments -S $opts \
"1:west proj:_get_west_projs"
}
_west_blob_fetch () {
_arguments -S "1:west proj:_get_west_projs"
}
_west_blob_clean () {
_arguments -S "1:west proj:_get_west_projs"
}
# don't run the completion function when being source-ed or eval-ed
if [ "$funcstack[1]" = "_west" ]; then
_west
fi
``` | /content/code_sandbox/scripts/west_commands/completion/west-completion.zsh | shell | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 3,110 |
```fish
# check if we are currently in a west workspace
# this is used to filter which command to show
#
# return 0 if in west workspace
# return 1 else
function __zephyr_west_check_if_in_workspace
west topdir &>/dev/null
if test $status = 0
return 0
else
return 1
end
end
# exclude the caller if one of the arguments is present in the command line
#
# return 1 if one of the arguments is present in the command line
# return 0 else
function __zephyr_west_exclude
set -l tokens (commandline -opc)
for t in $tokens
for a in $argv
if test $t = $a
return 1
end
end
end
return 0
end
# function used to have a maximum number of arguments
#
# argv[1] is the maximum number of arguments
# argv[n] are the arguments to count, if not specified will count all arguments after 'west <command>' on the command line
#
# return 1 if the command line contain more than $argv[1] element from $argv[n...]
# return 0 else
function __zephyr_west_max_args
set -l tokens (commandline -opc)
set -l argc (count $argv)
set -l max $argv[1]
set -l counter 0
if test $argc -eq 1
if test (math (count $tokens) - 2) -ge $max
return 1
else
return 0
end
end
for idx in (seq 2 $argc)
if contains $argv[idx] $tokens
set counter (math $counter + 1)
end
end
if $counter -ge $max
return 1
end
return 0
end
# alias of '__fish_complete_directories' but set the arguments to ''
function __zephyr_west_complete_directories
__fish_complete_directories '' ''
end
# check if a given token is the last one in the command line
#
# return 0 if one of the given argument is the last token
# return 1 else
function __zephyr_west_is_last_token
set -l tokens (commandline -opc)
for token in $argv
if string match -qr -- "$token*" "$tokens[-1]"
return 0
end
end
return 1
end
# function similar to '__fish_use_subcommand' but with special cases
function __zephyr_west_use_subcommand
set -l tokens (commandline -opc)
for idx in (seq 2 (count $tokens))
switch $tokens[$idx]
case '-*'
continue
case '*'
if test $idx -ge 3
set -l prv_idx (math $idx - 1)
switch $tokens[$prv_idx]
# this option can be placed before subcommand and require a folder
# if we don't do that the folder will be catched as a subcommand and
# the subcommands will not be completed
case '-z' '--zephyr-base'
continue
end
end
end
return 1
end
return 0
end
# function similar to '__fish_seen_subcommand_from' but with special cases
function __zephyr_west_seen_subcommand_from
set -l tokens (commandline -opc)
set -e tokens[1]
# special case:
# we don't want the command completion when doing `west help <cmd>`
if contains -- "help" $tokens
return 1
end
for token in $tokens
if contains -- $token $argv
return 0
end
end
return 1
end
# return the list of projects
function __zephyr_west_complete_projects
set -l tokens (commandline -opc)
set -l zephyr_base ""
set -l projects
for idx in (seq 1 (count $tokens))
if test \("$tokens[$idx]" = "-z"\) -o \("$tokens[$idx]" = "--zephyr-base"\)
if set -q $tokens[(math $idx + 1)]
set $zephyr_base $tokens (math $idx + 1)
break
end
end
end
if test $zephyr_base != ""
set projects (west "-z $zephyr_base" list --format="{name}")
else
set projects (west list --format="{name}")
end
printf "%s\n" $projects
end
# return the list of available west commands
function __zephyr_west_complete_help
set -l builtin_cmds "init" "create a west repository" \
"update" "update projects described in west manifest" \
"list" "print information about projects" \
"manifest" "manage the west manifest" \
"diff" '"git diff" for one or more projects' \
"status" '"git status" for one or more projects' \
"forall" "run a command in one or more local projects" \
"config" "get or set config file values" \
"topdir" "print the top level directory of the workspace" \
"help" "get help for west or a command"
set -l nb_builtin_cmds (count $builtin_cmds)
set -l ext_cmds "completion" "display shell completion scripts" \
"boards" "display information about supported boards" \
"build" "compile a Zephyr application" \
"sign" "sign a Zephyr binary for bootloader chain-loading" \
"flash" "flash and run a binary on a board" \
"debug" "flash and interactively debug a Zephyr application" \
"debugserver" "connect to board and launch a debug server" \
"attach" "interactively debug a board" \
"zephyr-export" "export Zephyr installation as a CMake config package" \
"spdx" "create SPDX bill of materials" \
"blobs" "work with binary blobs"
set -l nb_ext_cmds (count $ext_cmds)
if __zephyr_west_check_if_in_workspace
for idx in (seq 1 2 $nb_ext_cmds)
set -l desc_idx (math $idx + 1)
printf "%s\n" $ext_cmds[$idx]\t"$ext_cmds[$desc_idx]"
end
end
for idx in (seq 1 2 $nb_builtin_cmds)
set -l desc_idx (math $idx + 1)
printf "%s\n" $builtin_cmds[$idx]\t"$builtin_cmds[$desc_idx]"
end
end
function __zephyr_west_topdir
set -l cwd (pwd)
set -l fallback 1
if test (count $argv) -eq 2
set cwd $argv[1]
set fallback $argv[2]
end
set -l cwd_split (string split '/' $cwd)
while true
set -l tmp_path (path normalize /(string join "/" $cwd_split))
if test -d $tmp_path/.west
echo "$tmp_path"
return
end
if test -z "$tmp_path" -o $tmp_path = "/"
break
end
set -e cwd_split[-1]
set tmp_path (string join "/" $cwd_split)
end
if test $fallback -eq 1 -a -n "$ZEPHYR_BASE"
west-topdir "$ZEPHYR_BASE" 0
end
end
function __zephyr_west_manifest_path
set -l west_topdir (__zephyr_west_topdir)
set -l config (cat $west_topdir/.west/config)
set -l manifest_path ""
set -l manifest_file ""
set -l in_manifest_group 0
for line in $config
if string match -rq '^\s*\[manifest\]\s*$' $line
set in_manifest_group 1
continue
else if string match -rq '^\[.*\]$' $line
set in_manifest_group 0
continue
end
if test $in_manifest_group -eq 1
set -l tmp_manifest_path (string match -r '^path\s*=\s*(\S*)\s*$' $line)[2]
if test $status -eq 0
set manifest_path "$tmp_manifest_path"
continue
end
set -l tmp_manifest_file (string match -r '^file\s*=\s*(\S*)\s*$' $line)[2]
if test $status -eq 0
set manifest_file "$tmp_manifest_file"
continue
end
end
end
if test -z "$manifest_path" -o -z "$manifest_file"
return
end
echo (path normalize "$west_topdir"/"$manifest_path"/"$manifest_file")
end
function __zephyr_west_get_cache_dir
set -l manifest_path $argv[1]
set -l manifest_path_hash (echo "$manifest_path" | md5sum | string trim --chars=' -')
echo (__zephyr_west_topdir)/.west/fish/$manifest_path_hash
end
function __zephyr_west_complete_board
set -l is_cache_valid 1 # 0: invalid; 1: valid
set -l manifest_file (__zephyr_west_manifest_path)
set -l manifest_dir (path dirname "$manifest_file")
set -l cache_folder (__zephyr_west_get_cache_dir "$manifest_file")
set -l cache_file $cache_folder/fish_boards_completion.cache
set -l manifest_hash (git --work-tree "$manifest_dir" log -1 --pretty=format:'%H' 2> /dev/null)
if test $status -ne 0
# if the manifest folder is not a git repo, use the hash of the manifest file
set manifest_hash (md5sum "$manifest_path")
end
if test ! -f $cache_file
mkdir -p $cache_folder
touch $cache_file
set is_cache_valid 0
else
set -l cache_manifest_hash (head -n 1 $cache_file)
if test -z "$manifest_hash" -o -z "$cache_manifest_hash" -o "$manifest_hash" != "$cache_manifest_hash"
set is_cache_valid 0
end
end
if test $is_cache_valid -eq 0
set -l boards (west boards --format="{name}|{qualifiers}|{vendor}" 2> /dev/null)
if test $status -eq 0
echo $manifest_hash > $cache_file
end
for board in $boards
set -l split_b (string split "|" $board)
set -l name $split_b[1]
set -l qualifiers $split_b[2]
set -l vendor $split_b[3]
if test $vendor != "None"
for qualifier in (string split "," $qualifiers)
printf "%s\t%s\n" $name/$qualifier $vendor >> $cache_file
end
else
for qualifier in (string split "," $qualifiers)
printf "%s\n" $name/$qualifier >> $cache_file
end
end
end
end
tail -n +2 $cache_file
end
# disable file completion, if an option need it, it should use '--force-files'
complete -c west -f
# global options
complete -c west -n "__zephyr_west_exclude -h --help" -o h -l help -d "show help"
complete -c west -o v -l verbose -d "enable verbosity"
complete -c west -n "__zephyr_west_exclude -V --version" -o V -l version -d "print version"
complete -c west -n "__zephyr_west_exclude -z --zephyr-base; or __zephyr_west_is_last_token -z --zephyr-base" -o z -l zephyr-base -xa "(__zephyr_west_complete_directories)" -d "zephyr base folder"
# init
complete -c west -n __zephyr_west_use_subcommand -ra init -d "create a west workspace"
complete -c west -n "__zephyr_west_seen_subcommand_from init" -ra "(__zephyr_west_complete_directories)"
complete -c west -n "__zephyr_west_seen_subcommand_from init; and __zephyr_west_exclude -l --local" -l mr -l manifest-rev -r -d "manifest revision"
complete -c west -n "__zephyr_west_seen_subcommand_from init" -l mf -l manifest-file -r -d "manifest file"
complete -c west -n "__zephyr_west_seen_subcommand_from init; and __zephyr_west_exclude -l --local" -o m -l manifest -ra "(__zephyr_west_complete_directories)" -d "manifest URL"
complete -c west -n "__zephyr_west_seen_subcommand_from init; and __zephyr_west_exclude -m --manifest --mr --manifest-rev" -o l -l local -ra "(__zephyr_west_complete_directories)" -d "use local directory as manifest repository"
# update
complete -c west -n __zephyr_west_use_subcommand -ra update -d "update projects described in west manifest"
complete -c west -n "__zephyr_west_seen_subcommand_from update" -ra "(__zephyr_west_complete_projects)"
complete -c west -n "__zephyr_west_seen_subcommand_from update" -l stats -d "print performance stats"
complete -c west -n "__zephyr_west_seen_subcommand_from update" -l name-cache -ra "(__zephyr_west_complete_directories)" -d "name-based cache"
complete -c west -n "__zephyr_west_seen_subcommand_from update" -l path-cache -ra "(__zephyr_west_complete_directories)" -d "path-based cache"
complete -c west -n "__zephyr_west_seen_subcommand_from update" -o f -l fetch -ra "always smart" -d "fetch strategy"
complete -c west -n "__zephyr_west_seen_subcommand_from update" -o o -l fetch-opt -d "fetch options"
complete -c west -n "__zephyr_west_seen_subcommand_from update" -o n -l narrow -d "narrow fetch"
complete -c west -n "__zephyr_west_seen_subcommand_from update" -o k -l keep-descendants -d "keep manifest-rev descendants checked out"
complete -c west -n "__zephyr_west_seen_subcommand_from update" -o r -l rebase -d "rebase checked out branch onto the new manifest-rev"
# list
complete -c west -n __zephyr_west_use_subcommand -ra list -d "print information about projects"
complete -c west -n "__zephyr_west_seen_subcommand_from list; and not __fish_seen_subcommand_from blobs" -ra "(__zephyr_west_complete_projects)"
complete -c west -n "__zephyr_west_seen_subcommand_from list; and not __fish_seen_subcommand_from blobs" -o a -l all -d "include inactive projects"
complete -c west -n "__zephyr_west_seen_subcommand_from list; and not __fish_seen_subcommand_from blobs" -l manifest-path-from-yaml -d "print performance stats"
complete -c west -n "__zephyr_west_seen_subcommand_from list; and not __fish_seen_subcommand_from blobs" -o f -l format -d "format string"
# manifest
complete -c west -n __zephyr_west_use_subcommand -ra manifest -d "manage the west manifest"
complete -c west -n "__zephyr_west_seen_subcommand_from manifest" -l resolve -d "resolve into single manifest"
complete -c west -n "__zephyr_west_seen_subcommand_from manifest" -l freeze -d "resolve into single manifest, with SHAs"
complete -c west -n "__zephyr_west_seen_subcommand_from manifest" -l validate -d "silently validate manifest"
complete -c west -n "__zephyr_west_seen_subcommand_from manifest" -l path -d "print the path to the top level manifest file"
complete -c west -n "__zephyr_west_seen_subcommand_from manifest" -o o -l output -rF -d "output file"
# diff
complete -c west -n __zephyr_west_use_subcommand -ra diff -d '"git diff" for one or more projects'
complete -c west -n "__zephyr_west_seen_subcommand_from diff" -ra "(__zephyr_west_complete_projects)"
complete -c west -n "__zephyr_west_seen_subcommand_from diff" -o a -l all -d "include inactive projects"
# status
complete -c west -n __zephyr_west_use_subcommand -ra status -d '"git status" for one or more projects'
complete -c west -n "__zephyr_west_seen_subcommand_from status" -ra "(__zephyr_west_complete_projects)"
complete -c west -n "__zephyr_west_seen_subcommand_from status" -o a -l all -d "include inactive projects"
# forall
complete -c west -n __zephyr_west_use_subcommand -ra forall -d "run a command in one or more local projects"
complete -c west -n "__zephyr_west_seen_subcommand_from forall" -ra "(__zephyr_west_complete_projects)"
complete -c west -n "__zephyr_west_seen_subcommand_from forall" -o c -x -d "command to execute"
complete -c west -n "__zephyr_west_seen_subcommand_from forall" -o a -l all -d "include inactive projects"
complete -c west -n "__zephyr_west_seen_subcommand_from forall" -o g -l group -x -d "run command on projects in one of the group"
# config
complete -c west -n __zephyr_west_use_subcommand -ra config -d "get or set config file values"
complete -c west -n "__zephyr_west_seen_subcommand_from config" -o l -l list -d "list all options and values"
complete -c west -n "__zephyr_west_seen_subcommand_from config" -o d -l delete -d "delete an option in one config file"
complete -c west -n "__zephyr_west_seen_subcommand_from config" -o D -l delete-all -d "delete an option everywhere it's set"
complete -c west -n "__zephyr_west_seen_subcommand_from config" -l system -d "system-wide file"
complete -c west -n "__zephyr_west_seen_subcommand_from config" -l global -d "global user-wide"
complete -c west -n "__zephyr_west_seen_subcommand_from config" -l local -d "this workspace's file"
# topdir
complete -c west -n __zephyr_west_use_subcommand -a topdir -d "print the top level directory of the workspace"
# help
complete -c west -n __zephyr_west_use_subcommand -ra help -d "get help for west or a command"
complete -c west -n "__fish_seen_subcommand_from help; and __zephyr_west_max_args 1" -ra "(__zephyr_west_complete_help)"
# completion
complete -c west -n "__zephyr_west_use_subcommand; and __zephyr_west_check_if_in_workspace" -ra completion -d "display shell completion scripts"
complete -c west -n "__zephyr_west_seen_subcommand_from completion; and __zephyr_west_max_args 1" -ra "bash zsh fish"
# boards
complete -c west -n "__zephyr_west_use_subcommand; and __zephyr_west_check_if_in_workspace" -ra boards -d "display information about supported boards"
complete -c west -n "__zephyr_west_seen_subcommand_from boards" -o f -l format -d "format string"
complete -c west -n "__zephyr_west_seen_subcommand_from boards" -o n -l name -d "name regex"
complete -c west -n "__zephyr_west_seen_subcommand_from boards" -l arch-root -xa "(__zephyr_west_complete_directories)" -d "add an arch root"
complete -c west -n "__zephyr_west_seen_subcommand_from boards" -l board-root -xa "(__zephyr_west_complete_directories)" -d "add a board root"
complete -c west -n "__zephyr_west_seen_subcommand_from boards" -l soc-root -xa "(__zephyr_west_complete_directories)" -d "add a soc root"
complete -c west -n "__zephyr_west_seen_subcommand_from boards" -l board -xa "(__zephyr_west_complete_board)" -d "lookup the specific board"
complete -c west -n "__zephyr_west_seen_subcommand_from boards" -l board-dir -xa "(__zephyr_west_complete_directories)" -d "only look for boards in this directory"
# build
complete -c west -n "__zephyr_west_use_subcommand; and __zephyr_west_check_if_in_workspace" -ra build -d "compile a Zephyr application"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -ra "(__zephyr_west_complete_directories)"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -o b -l board -xa "(__zephyr_west_complete_board)"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -o d -l build-dir -xa "(__zephyr_west_complete_directories)" -d "build directory to create or use"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -o f -l force -d "ignore errors and continue"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -l sysbuild -d "create multi-domain build system"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -l no-sysbuild -d "do not create multi-domain build system"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -o c -l cmake -d "force a cmake run"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -l domain -d "execute build tool (make or ninja) for a given domain"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -o t -l target -d "run build system target"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -o T -l test-item -d "build based on test data in .yml"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -o o -l build-opt -d "options to pass to build tool (make or ninja)"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -o n -l just-print -l dry-run -l recon -d "just print build commands, don't run them"
complete -c west -n "__zephyr_west_seen_subcommand_from build" -o p -l pristine -ra "auto always never" -d "pristine build setting"
# sign
complete -c west -n "__zephyr_west_use_subcommand; and __zephyr_west_check_if_in_workspace" -ra sign -d "sign a Zephyr binary for bootloader chain-loading"
complete -c west -n "__zephyr_west_seen_subcommand_from sign" -o d -l build-dir -ra "(__zephyr_west_complete_directories)" -d "build directory to create or use"
complete -c west -n "__zephyr_west_seen_subcommand_from sign" -o q -l quiet -d "suppress non-error output"
complete -c west -n "__zephyr_west_seen_subcommand_from sign" -o f -l force -d "ignore errors and continue"
complete -c west -n "__zephyr_west_seen_subcommand_from sign" -o t -l tool -ra "imgtool rimage" -d "image signing tool name"
complete -c west -n "__zephyr_west_seen_subcommand_from sign" -o p -l tool-path -ra "(__zephyr_west_complete_directories)" -d "path to the tool"
complete -c west -n "__zephyr_west_seen_subcommand_from sign" -o P -l tool-data -ra "(__zephyr_west_complete_directories)" -d "path to tool data"
complete -c west -n "__zephyr_west_seen_subcommand_from sign; and __zephyr_west_exclude --no-bin" -l bin -d "produce a signed bin file"
complete -c west -n "__zephyr_west_seen_subcommand_from sign; and __zephyr_west_exclude --bin" -l no-bin -d "do not produce a signed bin file"
complete -c west -n "__zephyr_west_seen_subcommand_from sign" -o B -l sbin -rF -d "signed .bin filename"
complete -c west -n "__zephyr_west_seen_subcommand_from sign; and __zephyr_west_exclude --no-hex" -l hex -d "produce a signed hex file"
complete -c west -n "__zephyr_west_seen_subcommand_from sign; and __zephyr_west_exclude --hex" -l no-hex -d "do not produce a signed hex file"
complete -c west -n "__zephyr_west_seen_subcommand_from sign" -o H -l shex -rF -d "signed .hex filename"
# flash
complete -c west -n "__zephyr_west_use_subcommand; and __zephyr_west_check_if_in_workspace" -ra flash -d "flash and run a binary on a board"
# debug
complete -c west -n "__zephyr_west_use_subcommand; and __zephyr_west_check_if_in_workspace" -ra debug -d "flash and interactively debug a Zephyr application"
# debugserver
complete -c west -n "__zephyr_west_use_subcommand; and __zephyr_west_check_if_in_workspace" -ra debugserver -d "connect to board and launch a debug server"
# attach
complete -c west -n "__zephyr_west_use_subcommand; and __zephyr_west_check_if_in_workspace" -ra attach -d "interactively debug a board"
## flash, debug, debugserver, attach
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -o d -l build-dir -ra "(__zephyr_west_complete_directories)" -d "build directory to create or use"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -o r -l runner -r -d "override default runner from build-dir"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -l skip-rebuild -d "do not refresh cmake dependencies first"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -l domain -r -d "execute build tool (make or ninja) for a given domain"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -o H -l context -d "print runner-specific options"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -l board-dir -ra "(__zephyr_west_complete_directories)" -d "board directory"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -o f -l file -Fr -d "path to binary"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -o t -l file-type -ra "hex bin elf" -d "type of binary"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -l elf-file -rka "(__fish_complete_suffix .elf)" -d "path to zephyr.elf"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -l hex-file -rka "(__fish_complete_suffix .hex)" -d "path to zephyr.hex"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -l bin-file -rka "(__fish_complete_suffix .bin)" -d "path to zephyr.bin"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -l gdb -Fr -d "path to GDB"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -l openocd -Fr -d "path to openocd"
complete -c west -n "__zephyr_west_seen_subcommand_from flash debug debugserver attach" -l openocd-search -ra "(__zephyr_west_complete_directories)" -d "path to add to openocd search path"
# zephyr-export
complete -c west -n "__zephyr_west_use_subcommand; and __zephyr_west_check_if_in_workspace" -ra zephyr-export -d "export Zephyr installation as a CMake config package"
# spdx
complete -c west -n "__zephyr_west_use_subcommand; and __zephyr_west_check_if_in_workspace" -ra spdx -d "create SPDX bill of materials"
complete -c west -n "__zephyr_west_seen_subcommand_from spdx" -o i -l init -d "initialize CMake file-based API"
complete -c west -n "__zephyr_west_seen_subcommand_from spdx" -o d -l build-dir -ra "(__zephyr_west_complete_directories)" -d "build directory to create or use"
complete -c west -n "__zephyr_west_seen_subcommand_from spdx" -o n -l namespace-prefix -rf -d "namespace prefix"
complete -c west -n "__zephyr_west_seen_subcommand_from spdx" -o s -l spdx-dir -ra "(__zephyr_west_complete_directories)" -d "SPDX output directory"
complete -c west -n "__zephyr_west_seen_subcommand_from spdx" -l analyze-includes -d "also analyze included header files"
complete -c west -n "__zephyr_west_seen_subcommand_from spdx" -l include-sdk -d "also generate SPDX document for SDK"
# blobs
complete -c west -n "__zephyr_west_use_subcommand; and __zephyr_west_check_if_in_workspace" -ra blobs -d "work with binary blobs"
complete -c west -n "__zephyr_west_seen_subcommand_from blobs; and not __fish_seen_subcommand_from list fetch clean" -ra "list\t'list binary blobs' fetch\t'fetch binary blobs' clean\t'clean working tree of binary blobs'"
complete -c west -n "__zephyr_west_seen_subcommand_from blobs; and __fish_seen_subcommand_from list fetch clean" -ra "(__zephyr_west_complete_projects)"
complete -c west -n "__zephyr_west_seen_subcommand_from blobs; and not __fish_seen_subcommand_from fetch clean" -o f -l format -r -d "format string"
``` | /content/code_sandbox/scripts/west_commands/completion/west-completion.fish | fish | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 7,194 |
```unknown
#!/usr/bin/env python3
#
#
# diffconfig - a tool to compare .config files.
#
# originally written in 2006 by Matt Mackall
# (at least, this was in his bloatwatch source code)
# last worked on 2008 by Tim Bird
#
import sys, os
def usage():
print("""Usage: diffconfig [-h] [-m] [<config1> <config2>]
Diffconfig is a simple utility for comparing two .config files.
Using standard diff to compare .config files often includes extraneous and
distracting information. This utility produces sorted output with only the
changes in configuration values between the two files.
Added and removed items are shown with a leading plus or minus, respectively.
Changed items show the old and new values on a single line.
If -m is specified, then output will be in "merge" style, which has the
changed and new values in kernel config option format.
If no config files are specified, .config and .config.old are used.
Example usage:
$ diffconfig .config config-with-some-changes
-EXT2_FS_XATTR n
-EXT2_FS_XIP n
CRAMFS n -> y
EXT2_FS y -> n
LOG_BUF_SHIFT 14 -> 16
PRINTK_TIME n -> y
""")
sys.exit(0)
# returns a dictionary of name/value pairs for config items in the file
def readconfig(config_file):
d = {}
for line in config_file:
line = line[:-1]
if line[:7] == "CONFIG_":
name, val = line[7:].split("=", 1)
d[name] = val
if line[-11:] == " is not set":
d[line[9:-11]] = "n"
return d
def print_config(op, config, value, new_value):
global merge_style
if merge_style:
if new_value:
if new_value=="n":
print("# CONFIG_%s is not set" % config)
else:
print("CONFIG_%s=%s" % (config, new_value))
else:
if op=="-":
print("-%s %s" % (config, value))
elif op=="+":
print("+%s %s" % (config, new_value))
else:
print(" %s %s -> %s" % (config, value, new_value))
def main():
global merge_style
# parse command line args
if ("-h" in sys.argv or "--help" in sys.argv):
usage()
merge_style = 0
if "-m" in sys.argv:
merge_style = 1
sys.argv.remove("-m")
argc = len(sys.argv)
if not (argc==1 or argc == 3):
print("Error: incorrect number of arguments or unrecognized option")
usage()
if argc == 1:
# if no filenames given, assume .config and .config.old
build_dir=""
if "KBUILD_OUTPUT" in os.environ:
build_dir = os.environ["KBUILD_OUTPUT"]+"/"
configa_filename = build_dir + ".config.old"
configb_filename = build_dir + ".config"
else:
configa_filename = sys.argv[1]
configb_filename = sys.argv[2]
try:
a = readconfig(open(configa_filename))
b = readconfig(open(configb_filename))
except (IOError):
e = sys.exc_info()[1]
print("I/O error[%s]: %s\n" % (e.args[0],e.args[1]))
usage()
# print items in a but not b (accumulate, sort and print)
old = []
for config in a:
if config not in b:
old.append(config)
old.sort()
for config in old:
print_config("-", config, a[config], None)
del a[config]
# print items that changed (accumulate, sort, and print)
changed = []
for config in a:
if a[config] != b[config]:
changed.append(config)
else:
del b[config]
changed.sort()
for config in changed:
print_config("->", config, a[config], b[config])
del b[config]
# now print items in b but not in a
# (items from b that were in a were removed above)
new = sorted(b.keys())
for config in new:
print_config("+", config, None, b[config])
main()
``` | /content/code_sandbox/scripts/kconfig/diffconfig | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 988 |
```shell
# Bash auto-completion for west subcommands and flags. To initialize, run
#
# source west-completion.bash
#
# To make it persistent, add it to e.g. your .bashrc.
__west_previous_extglob_setting=$(shopt -p extglob)
shopt -s extglob
# The following function is based on code from:
#
# bash_completion - programmable completion functions for bash 3.2+
#
# 2009-2010, Bash Completion Maintainers
# <bash-completion-devel@lists.alioth.debian.org>
#
# This program is free software; you can redistribute it and/or modify
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#
# along with this program; if not, see <path_to_url
#
# The latest version of this software can be obtained here:
#
# path_to_url
#
# RELEASE: 2.x
# This function can be used to access a tokenized list of words
# on the command line:
#
# __git_reassemble_comp_words_by_ref '=:'
# if test "${words_[cword_-1]}" = -w
# then
# ...
# fi
#
# The argument should be a collection of characters from the list of
# word completion separators (COMP_WORDBREAKS) to treat as ordinary
# characters.
#
# This is roughly equivalent to going back in time and setting
# COMP_WORDBREAKS to exclude those characters. The intent is to
# make option types like --date=<type> and <rev>:<path> easy to
# recognize by treating each shell word as a single token.
#
# It is best not to set COMP_WORDBREAKS directly because the value is
# shared with other completion scripts. By the time the completion
# function gets called, COMP_WORDS has already been populated so local
# changes to COMP_WORDBREAKS have no effect.
#
# Output: words_, cword_, cur_.
__west_reassemble_comp_words_by_ref()
{
local exclude i j first
# Which word separators to exclude?
exclude="${1//[^$COMP_WORDBREAKS]}"
cword_=$COMP_CWORD
if [ -z "$exclude" ]; then
words_=("${COMP_WORDS[@]}")
return
fi
# List of word completion separators has shrunk;
# re-assemble words to complete.
for ((i=0, j=0; i < ${#COMP_WORDS[@]}; i++, j++)); do
# Append each nonempty word consisting of just
# word separator characters to the current word.
first=t
while
[ $i -gt 0 ] &&
[ -n "${COMP_WORDS[$i]}" ] &&
# word consists of excluded word separators
[ "${COMP_WORDS[$i]//[^$exclude]}" = "${COMP_WORDS[$i]}" ]
do
# Attach to the previous token,
# unless the previous token is the command name.
if [ $j -ge 2 ] && [ -n "$first" ]; then
((j--))
fi
first=
words_[$j]=${words_[j]}${COMP_WORDS[i]}
if [ $i = $COMP_CWORD ]; then
cword_=$j
fi
if (($i < ${#COMP_WORDS[@]} - 1)); then
((i++))
else
# Done.
return
fi
done
words_[$j]=${words_[j]}${COMP_WORDS[i]}
if [ $i = $COMP_CWORD ]; then
cword_=$j
fi
done
}
if ! type _get_comp_words_by_ref >/dev/null 2>&1; then
_get_comp_words_by_ref ()
{
local exclude cur_ words_ cword_
if [ "$1" = "-n" ]; then
exclude=$2
shift 2
fi
__west_reassemble_comp_words_by_ref "$exclude"
cur_=${words_[cword_]}
while [ $# -gt 0 ]; do
case "$1" in
cur)
cur=$cur_
;;
prev)
prev=${words_[$cword_-1]}
;;
words)
words=("${words_[@]}")
;;
cword)
cword=$cword_
;;
esac
shift
done
}
fi
if ! type _tilde >/dev/null 2>&1; then
# Perform tilde (~) completion
# @return True (0) if completion needs further processing,
# False (> 0) if tilde is followed by a valid username, completions
# are put in COMPREPLY and no further processing is necessary.
_tilde()
{
local result=0
if [[ $1 == \~* && $1 != */* ]]; then
# Try generate ~username completions
COMPREPLY=( $( compgen -P '~' -u -- "${1#\~}" ) )
result=${#COMPREPLY[@]}
# 2>/dev/null for direct invocation, e.g. in the _tilde unit test
[[ $result -gt 0 ]] && compopt -o filenames 2>/dev/null
fi
return $result
}
fi
if ! type _quote_readline_by_ref >/dev/null 2>&1; then
# This function quotes the argument in a way so that readline dequoting
# results in the original argument. This is necessary for at least
# `compgen' which requires its arguments quoted/escaped:
#
# $ ls "a'b/"
# c
# $ compgen -f "a'b/" # Wrong, doesn't return output
# $ compgen -f "a\'b/" # Good
# a\'b/c
#
# See also:
# - path_to_url
# - path_to_url
# debian.org/msg01944.html
# @param $1 Argument to quote
# @param $2 Name of variable to return result to
_quote_readline_by_ref()
{
if [ -z "$1" ]; then
# avoid quoting if empty
printf -v $2 %s "$1"
elif [[ $1 == \'* ]]; then
# Leave out first character
printf -v $2 %s "${1:1}"
elif [[ $1 == \~* ]]; then
# avoid escaping first ~
printf -v $2 \~%q "${1:1}"
else
printf -v $2 %q "$1"
fi
# Replace double escaping ( \\ ) by single ( \ )
# This happens always when argument is already escaped at cmdline,
# and passed to this function as e.g.: file\ with\ spaces
[[ ${!2} == *\\* ]] && printf -v $2 %s "${1//\\\\/\\}"
# If result becomes quoted like this: $'string', re-evaluate in order to
# drop the additional quoting. See also: path_to_url
# bash-completion-devel@lists.alioth.debian.org/msg01942.html
[[ ${!2} == \$* ]] && eval $2=${!2}
} # _quote_readline_by_ref()
fi
# This function turns on "-o filenames" behavior dynamically. It is present
# for bash < 4 reasons. See path_to_url#64 for info about
# the bash < 4 compgen hack.
_compopt_o_filenames()
{
# We test for compopt availability first because directly invoking it on
# bash < 4 at this point may cause terminal echo to be turned off for some
# reason, see path_to_url for more info.
type compopt &>/dev/null && compopt -o filenames 2>/dev/null || \
compgen -f /non-existing-dir/ >/dev/null
}
if ! type _filedir >/dev/null 2>&1; then
# This function performs file and directory completion. It's better than
# simply using 'compgen -f', because it honours spaces in filenames.
# @param $1 If `-d', complete only on directories. Otherwise filter/pick only
# completions with `.$1' and the uppercase version of it as file
# extension.
#
_filedir()
{
local IFS=$'\n'
_tilde "$cur" || return
local -a toks
local x tmp
x=$( compgen -d -- "$cur" ) &&
while read -r tmp; do
toks+=( "$tmp" )
done <<< "$x"
if [[ "$1" != -d ]]; then
local quoted
_quote_readline_by_ref "$cur" quoted
# Munge xspec to contain uppercase version too
# path_to_url
local xspec=${1:+"!*.@($1|${1^^})"}
x=$( compgen -f -X "$xspec" -- $quoted ) &&
while read -r tmp; do
toks+=( "$tmp" )
done <<< "$x"
# Try without filter if it failed to produce anything and configured to
[[ -n ${COMP_FILEDIR_FALLBACK:-} && -n "$1" && ${#toks[@]} -lt 1 ]] && \
x=$( compgen -f -- $quoted ) &&
while read -r tmp; do
toks+=( "$tmp" )
done <<< "$x"
fi
if [[ ${#toks[@]} -ne 0 ]]; then
# 2>/dev/null for direct invocation, e.g. in the _filedir unit test
_compopt_o_filenames
COMPREPLY+=( "${toks[@]}" )
fi
} # _filedir()
fi
# Misc helpers taken from Docker:
# path_to_url
# __west_pos_first_nonflag finds the position of the first word that is neither
# option nor an option's argument. If there are options that require arguments,
# you should pass a glob describing those options, e.g. "--option1|-o|--option2"
# Use this function to restrict completions to exact positions after the argument list.
__west_pos_first_nonflag()
{
local argument_flags=$1
local counter=$((${subcommand_pos:-${command_pos}} + 1))
while [ "$counter" -le "$cword" ]; do
if [ -n "$argument_flags" ] && eval "case '${words[$counter]}' in $argument_flags) true ;; *) false ;; esac"; then
(( counter++ ))
# eat "=" in case of --option=arg syntax
[ "${words[$counter]}" = "=" ] && (( counter++ ))
else
case "${words[$counter]}" in
-*)
;;
*)
break
;;
esac
fi
# Bash splits words at "=", retaining "=" as a word, examples:
# "--debug=false" => 3 words, "--log-opt syslog-facility=daemon" => 4 words
while [ "${words[$counter + 1]}" = "=" ] ; do
counter=$(( counter + 2))
done
(( counter++ ))
done
echo $counter
}
# __west_map_key_of_current_option returns `key` if we are currently completing the
# value of a map option (`key=value`) which matches the extglob given as an argument.
# This function is needed for key-specific completions.
__west_map_key_of_current_option()
{
local glob="$1"
local key glob_pos
if [ "$cur" = "=" ] ; then # key= case
key="$prev"
glob_pos=$((cword - 2))
elif [[ $cur == *=* ]] ; then # key=value case (OSX)
key=${cur%=*}
glob_pos=$((cword - 1))
elif [ "$prev" = "=" ] ; then
key=${words[$cword - 2]} # key=value case
glob_pos=$((cword - 3))
else
return
fi
[ "${words[$glob_pos]}" = "=" ] && ((glob_pos--)) # --option=key=value syntax
[[ ${words[$glob_pos]} == @($glob) ]] && echo "$key"
}
# __west_value_of_option returns the value of the first option matching `option_glob`.
# Valid values for `option_glob` are option names like `--log-level` and globs like
# `--log-level|-l`
# Only positions between the command and the current word are considered.
__west_value_of_option()
{
local option_extglob=$(__west_to_extglob "$1")
local counter=$((command_pos + 1))
while [ "$counter" -lt "$cword" ]; do
case ${words[$counter]} in
$option_extglob )
echo "${words[$counter + 1]}"
break
;;
esac
(( counter++ ))
done
}
# __west_to_alternatives transforms a multiline list of strings into a single line
# string with the words separated by `|`.
# This is used to prepare arguments to __west_pos_first_nonflag().
__west_to_alternatives()
{
local parts=( $1 )
local IFS='|'
echo "${parts[*]}"
}
# __west_to_extglob transforms a multiline list of options into an extglob pattern
# suitable for use in case statements.
__west_to_extglob()
{
local extglob=$( __west_to_alternatives "$1" )
echo "@($extglob)"
}
__set_comp_dirs()
{
_filedir -d
}
__set_comp_files()
{
_filedir
}
# Sets completions for $cur, from the possibilities in $1..n
__set_comp()
{
# "${*:1}" gives a single argument with arguments $1..n
COMPREPLY=($(compgen -W "${*:1}" -- "$cur"))
}
__west_x()
{
west 2>/dev/null "$@"
}
__set_comp_west_projs()
{
__set_comp "$(__west_x list --format={name} "$@")"
}
__set_comp_west_boards()
{
boards=( $(__west_x boards --format='{name}|{qualifiers}' "$@") )
for i in ${!boards[@]}; do
name="${boards[$i]%%|*}"
transformed_board="${boards[$i]//|//}"
boards[$i]="${transformed_board//,/\ ${name}\/}"
done
__set_comp ${boards[@]}
}
__set_comp_west_shields()
{
__set_comp "$(__west_x shields "$@")"
}
__comp_west_west()
{
case "$prev" in
--zephyr-base|-z)
__set_comp_dirs
return
;;
# We don't know how to autocomplete any others
$(__west_to_extglob "$global_args_opts") )
return
;;
esac
case "$cur" in
-*)
__set_comp $global_bool_opts $global_args_opts
;;
*)
local counter=$( __west_pos_first_nonflag "$(__west_to_extglob "$global_args_opts")" )
if [ "$cword" -eq "$counter" ]; then
__set_comp ${cmds[*]}
fi
;;
esac
}
__comp_west_init()
{
local dir_opts="
--manifest -m
--local -l
"
local bool_opts="
--manifest-rev --mr
--manifest-file --mf
"
all_opts="$dir_opts $bool_opts"
case "$prev" in
$(__west_to_extglob "$dir_opts") )
__set_comp_dirs
return
;;
esac
case "$cur" in
-*)
__set_comp $all_opts
;;
esac
}
__comp_west_update()
{
local bool_opts="
--stats
--narrow -n
--keep-descendants -k
--rebase -r
"
local dir_opts="
--name-cache
--path-cache
"
local other_opts="
--fetch -f
--fetch-opt -o
"
all_opts="$dir_opts $bool_opts $other_opts"
case "$prev" in
# We don't know how to autocomplete those
$(__west_to_extglob "$other_opts") )
return
;;
$(__west_to_extglob "$dir_opts") )
__set_comp_dirs
return
;;
esac
case "$cur" in
-*)
__set_comp $all_opts
;;
*)
__set_comp_west_projs
;;
esac
}
__comp_west_list()
{
local other_opts="
--format -f
"
local bool_opts="
--all -a
"
all_opts="$other_opts $bool_opts"
case "$prev" in
# We don't know how to autocomplete those
$(__west_to_extglob "$other_opts") )
return
;;
esac
case "$cur" in
-*)
__set_comp $all_opts
;;
*)
__set_comp_west_projs
;;
esac
}
__comp_west_manifest()
{
local bool_opts="
--resolve
--freeze
--validate
--path
"
local file_opts="
--out -o
"
all_opts="$bool_opts $file_opts"
case "$prev" in
$(__west_to_extglob "$file_opts") )
__set_comp_files
return
;;
esac
case "$cur" in
-*)
__set_comp $all_opts
;;
esac
}
__comp_west_diff()
{
local bool_opts="
--all -a
"
case "$cur" in
-*)
__set_comp $bool_opts
;;
*)
__set_comp_west_projs
;;
esac
}
__comp_west_status()
{
local bool_opts="
--all -a
"
case "$cur" in
-*)
__set_comp $bool_opts
;;
*)
__set_comp_west_projs
;;
esac
}
__comp_west_forall()
{
local bool_opts="
--all -a
"
local other_opts="
-c
"
all_opts="$bool_opts $other_opts"
case "$prev" in
# We don't know how to autocomplete those
$(__west_to_extglob "$other_opts") )
return
;;
esac
case "$cur" in
-*)
__set_comp $all_opts
;;
*)
__set_comp_west_projs
;;
esac
}
__comp_west_config()
{
local bool_opts="
--list -l
--delete -d
--delete-all -D
--global
--local
--system
"
case "$cur" in
-*)
__set_comp $bool_opts
;;
esac
}
__comp_west_help()
{
case "$cur" in
*)
local counter=$( __west_pos_first_nonflag "$(__west_to_extglob "$global_args_opts")" )
if [ "$cword" -eq "$counter" ]; then
__set_comp ${cmds[*]}
fi
;;
esac
}
# Zephyr extension commands
__comp_west_completion()
{
case "$cur" in
*)
local counter=$( __west_pos_first_nonflag "$(__west_to_extglob "$global_args_opts")" )
if [ "$cword" -eq "$counter" ]; then
__set_comp "bash zsh fish"
fi
;;
esac
}
__comp_west_boards()
{
local other_opts="
--format -f
--name -n
"
local dir_opts="
--arch-root
--board-root
--soc-root
"
all_opts="$dir_opts $other_opts"
case "$prev" in
$(__west_to_extglob "$other_opts") )
# We don't know how to autocomplete these.
return
;;
$(__west_to_extglob "$dir_opts") )
__set_comp_dirs
return
;;
esac
case "$cur" in
-*)
__set_comp $all_opts
;;
esac
}
__comp_west_shields()
{
local other_opts="
--format -f
--name -n
"
local dir_opts="
--board-root
"
all_opts="$dir_opts $other_opts"
case "$prev" in
$(__west_to_extglob "$other_opts") )
# We don't know how to autocomplete these.
return
;;
$(__west_to_extglob "$dir_opts") )
__set_comp_dirs
return
;;
esac
case "$cur" in
-*)
__set_comp $all_opts
;;
esac
}
__comp_west_build()
{
local bool_opts="
--cmake -c
--cmake-only
-n --just-print --dry-run --recon
--force -f
--sysbuild
--no-sysbuild
"
local special_opts="
--board -b
--snippet -S
--shield
--pristine -p
"
local dir_opts="
--build-dir -d
"
local other_opts="
--target -t
--test-item -T
--build-opt -o
"
all_opts="$bool_opts $special_opts $dir_opts $other_opts"
case "$prev" in
--board|-b)
__set_comp_west_boards
return
;;
--shield)
__set_comp_west_shields
return
;;
--pristine|-p)
__set_comp "auto always never"
return
;;
$(__west_to_extglob "$dir_opts") )
__set_comp_dirs
return
;;
# We don't know how to autocomplete those
$(__west_to_extglob "$other_opts") )
return
;;
esac
case "$cur" in
-*)
__set_comp $all_opts
;;
*)
__set_comp_dirs
;;
esac
}
__comp_west_sign()
{
local bool_opts="
--quiet -q
--force -f
--bin --no-bin
--hex --no-hex
"
local special_opts="
--tool -t
"
local dir_opts="
--build-dir -d
--tool-path -p
--tool-data -D
"
local file_opts="
--sbin -B
--shex -H
"
all_opts="$bool_opts $special_opts $dir_opts $file_opts"
case "$prev" in
$(__west_to_extglob "$dir_opts") )
__set_comp_dirs
return
;;
--tool|-t)
__set_comp "imgtool rimage"
return
;;
$(__west_to_extglob "$file_opts") )
__set_comp_files
return
;;
esac
case "$cur" in
-*)
__set_comp $all_opts
;;
esac
}
__comp_west_runner_cmd()
{
# Common arguments for runners
local bool_opts="
--context -H
--skip-rebuild
"
local dir_opts="
--board-dir
--openocd-search
--build-dir -d
"
local file_opts="
--file -f
--file-type -t
--elf-file
--hex-file
--bin-file
--gdb
--openocd
"
local other_opts="
--runner -r
--domain
--dev-id -i
"
all_opts="$bool_opts $other_opts $dir_opts $file_opts"
case "$prev" in
$(__west_to_extglob "$dir_opts") )
__set_comp_dirs
return
;;
$(__west_to_extglob "$file_opts") )
__set_comp_files
return
;;
esac
case "$cur" in
-*)
__set_comp $all_opts
;;
esac
}
__comp_west_flash()
{
__comp_west_runner_cmd
}
__comp_west_debug()
{
__comp_west_runner_cmd
}
__comp_west_debugserver()
{
__comp_west_runner_cmd
}
__comp_west_attach()
{
__comp_west_runner_cmd
}
__comp_west_spdx()
{
local bool_opts="
--init -i
--analyze-includes
--include-sdk
"
local dir_opts="
--build-dir -d
--namespace-prefix -n
--spdx-dir -s
"
local other_opts="
--namespace-prefix -n
"
all_opts="$bool_opts $other_opts $dir_opts"
case "$prev" in
$(__west_to_extglob "$dir_opts") )
__set_comp_dirs
return
;;
# We don't know how to autocomplete those
$(__west_to_extglob "$other_opts") )
return
;;
esac
case "$cur" in
-*)
__set_comp $all_opts
;;
esac
}
__comp_west_blobs()
{
local other_opts="
--format -f
"
case "$prev" in
# We don't know how to autocomplete those
$(__west_to_extglob "$other_opts") )
return
;;
blobs)
__set_comp "list fetch clean"
return
;;
esac
case "$cur" in
-*)
__set_comp $other_opts
;;
*)
__set_comp_west_projs
;;
esac
}
__comp_west_twister()
{
local bool_opts="
--aggressive-no-clean
--all -l
--all-deltas -D
--allow-installed-plugin
--build-only -b
--clobber-output -c
--cmake-only
--coverage -C
--create-rom-ram-report
--detailed-skipped-report
--detailed-test-id
--device-flash-with-test
--device-testing
--disable-suite-name-check
--disable-unrecognized-section-test
--disable-warnings-as-errors -W
--dry-run -y
--emulation-only
--enable-asan
--enable-coverage
--enable-lsan
--enable-size-report
--enable-slow -S
--enable-slow-only
--enable-ubsan
--enable-valgrind
--flash-before
--footprint-from-buildlog
--force-color
--force-platform -K
--force-toolchain
--ignore-platform-key
--inline-logs -i
--integration -G
--last-metrics -m
--list-tags
--list-tests
--make -k
--ninja -N
--no-clean -n
--no-detailed-test-id
--no-update -u
--only-failed -f
--overflow-as-errors
--persistent-hardware-map
--platform-reports
--prep-artifacts-for-testing
--quarantine-verify
--retry-build-errors
--short-build-path
--show-footprint
--shuffle-tests
--test-only
--test-tree
--timestamps
--verbose -v
"
local dir_opts="
--alt-config-root
--board-root -A
--coverage-basedir
--outdir -O
--report-dir -o
--testsuite-root -T
"
local file_opts="
--compare-report
--device-serial
--device-serial-pty
--gcov-tool
--generate-hardware-map
--hardware-map
--load-tests -F
--log-file
--package-artifacts
--pre-script
--quarantine-list
--save-tests -E
--size -z
--test-config
"
local special_opts="
--coverage-platform
--coverage-tool
--exclude-platform -P
--filter
--platform -p
--runtime-artifact-cleanup -M
"
local other_opts="
--arch -a
--coverage-formats
--device-flash-timeout
--device-serial-baud
--exclude-tag -e
--extra-args -x
--fixture -X
--footprint-threshold -H
--jobs -j
--level
--pytest-args
--report-name
--report-suffix
--retry-failed
--retry-interval
--scenario --test -s
--seed
--shuffle-tests-seed
--sub-test
--subset -B
--tag -t
--timeout-multiplier
--vendor
--west-flash
--west-runner
"
all_opts="$bool_opts $dir_opts $file_opts $special_opts $other_opts"
case "$prev" in
--platform|-p|--exclude-platform|-P|--coverage-platform)
__set_comp_west_boards
return
;;
--coverage-tool)
__set_comp "gcovr lcov"
return
;;
--filter)
__set_comp "buildable runnable"
return
;;
--runtime-artifact-cleanup|-M)
__set_comp "all pass"
return
;;
$(__west_to_extglob "$dir_opts") )
__set_comp_dirs
return
;;
$(__west_to_extglob "$file_opts") )
__set_comp_files
return
;;
# We don't know how to autocomplete those
$(__west_to_extglob "$other_opts") )
return
;;
esac
case "$cur" in
-*)
__set_comp $all_opts
;;
esac
}
__comp_west()
{
local previous_extglob_setting=$(shopt -p extglob)
shopt -s extglob
# Reset to default, to make sure compgen works properly
local IFS=$' \t\n'
local builtin_cmds=(
init
update
list
manifest
diff
status
forall
config
topdir
help
)
local zephyr_ext_cmds=(
completion
boards
shields
build
sign
flash
debug
debugserver
attach
zephyr-export
spdx
blobs
twister
)
local cmds=(${builtin_cmds[*]} ${zephyr_ext_cmds[*]})
# Global options for all commands
local global_bool_opts="
--help -h
--verbose -v
--version -V
"
local global_args_opts="
--zephyr-base -z
"
COMPREPLY=()
local cur words cword prev
_get_comp_words_by_ref -n : cur words cword prev
local command='west' command_pos=0
local counter=1
while [ "$counter" -lt "$cword" ]; do
case "${words[$counter]}" in
west)
return 0
;;
$(__west_to_extglob "$global_args_opts") )
(( counter++ ))
;;
-*)
;;
=)
(( counter++ ))
;;
*)
command="${words[$counter]}"
command_pos=$counter
break
;;
esac
(( counter++ ))
done
# Construct the function name to be called
local completions_func=__comp_west_${command//-/_}
#echo "comp_func: ${completions_func}"
declare -F $completions_func >/dev/null && $completions_func
# Restore the user's extglob setting
eval "$previous_extglob_setting"
return 0
}
eval "$__west_previous_extglob_setting"
unset __west_previous_extglob_setting
complete -F __comp_west west
``` | /content/code_sandbox/scripts/west_commands/completion/west-completion.bash | shell | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 7,498 |
```python
#!/usr/bin/env python3
"""
Linter for the Zephyr Kconfig files. Pass --help to see
available checks. By default, all checks are enabled.
Some of the checks rely on heuristics and can get tripped up
by things like preprocessor magic, so manual checking is
still needed. 'git grep' is handy.
Requires west, because the checks need to see Kconfig files
and source code from modules.
"""
import argparse
import os
import re
import shlex
import subprocess
import sys
import tempfile
TOP_DIR = os.path.join(os.path.dirname(__file__), "..", "..")
sys.path.insert(0, os.path.join(TOP_DIR, "scripts", "kconfig"))
import kconfiglib
def main():
init_kconfig()
args = parse_args()
if args.checks:
checks = args.checks
else:
# Run all checks if no checks were specified
checks = (check_always_n,
check_unused,
check_pointless_menuconfigs,
check_defconfig_only_definition,
check_missing_config_prefix)
first = True
for check in checks:
if not first:
print()
first = False
check()
def parse_args():
# args.checks is set to a list of check functions to run
parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter,
description=__doc__, allow_abbrev=False)
parser.add_argument(
"-n", "--check-always-n",
action="append_const", dest="checks", const=check_always_n,
help="""\
List symbols that can never be anything but n/empty. These
are detected as symbols with no prompt or defaults that
aren't selected or implied.
""")
parser.add_argument(
"-u", "--check-unused",
action="append_const", dest="checks", const=check_unused,
help="""\
List symbols that might be unused.
Heuristic:
- Isn't referenced in Kconfig
- Isn't referenced as CONFIG_<NAME> outside Kconfig
(besides possibly as CONFIG_<NAME>=<VALUE>)
- Isn't selecting/implying other symbols
- Isn't a choice symbol
C preprocessor magic can trip up this check.""")
parser.add_argument(
"-m", "--check-pointless-menuconfigs",
action="append_const", dest="checks", const=check_pointless_menuconfigs,
help="""\
List symbols defined with 'menuconfig' where the menu is
empty due to the symbol not being followed by stuff that
depends on it""")
parser.add_argument(
"-d", "--check-defconfig-only-definition",
action="append_const", dest="checks", const=check_defconfig_only_definition,
help="""\
List symbols that are only defined in Kconfig.defconfig
files. A common base definition should probably be added
somewhere for such symbols, and the type declaration ('int',
'hex', etc.) removed from Kconfig.defconfig.""")
parser.add_argument(
"-p", "--check-missing-config-prefix",
action="append_const", dest="checks", const=check_missing_config_prefix,
help="""\
Look for references like
#if MACRO
#if(n)def MACRO
defined(MACRO)
IS_ENABLED(MACRO)
where MACRO is the name of a defined Kconfig symbol but
doesn't have a CONFIG_ prefix. Could be a typo.
Macros that are #define'd somewhere are not flagged.""")
return parser.parse_args()
def check_always_n():
print_header("Symbols that can't be anything but n/empty")
for sym in kconf.unique_defined_syms:
if not has_prompt(sym) and not is_selected_or_implied(sym) and \
not has_defaults(sym):
print(name_and_locs(sym))
def check_unused():
print_header("Symbols that look unused")
referenced = referenced_sym_names()
for sym in kconf.unique_defined_syms:
if not is_selecting_or_implying(sym) and not sym.choice and \
sym.name not in referenced:
print(name_and_locs(sym))
def check_pointless_menuconfigs():
print_header("menuconfig symbols with empty menus")
for node in kconf.node_iter():
if node.is_menuconfig and not node.list and \
isinstance(node.item, kconfiglib.Symbol):
print("{0.item.name:40} {0.filename}:{0.linenr}".format(node))
def check_defconfig_only_definition():
print_header("Symbols only defined in Kconfig.defconfig files")
for sym in kconf.unique_defined_syms:
if all("defconfig" in node.filename for node in sym.nodes):
print(name_and_locs(sym))
def check_missing_config_prefix():
print_header("Symbol references that might be missing a CONFIG_ prefix")
# Paths to modules
modpaths = run(("west", "list", "-f{abspath}")).splitlines()
# Gather #define'd macros that might overlap with symbol names, so that
# they don't trigger false positives
defined = set()
for modpath in modpaths:
regex = r"#\s*define\s+([A-Z0-9_]+)\b"
defines = run(("git", "grep", "--extended-regexp", regex),
cwd=modpath, check=False)
# Could pass --only-matching to git grep as well, but it was added
# pretty recently (2018)
defined.update(re.findall(regex, defines))
# Filter out symbols whose names are #define'd too. Preserve definition
# order to make the output consistent.
syms = [sym for sym in kconf.unique_defined_syms
if sym.name not in defined]
# grep for symbol references in #ifdef/defined() that are missing a CONFIG_
# prefix. Work around an "argument list too long" error from 'git grep' by
# checking symbols in batches.
for batch in split_list(syms, 200):
# grep for '#if((n)def) <symbol>', 'defined(<symbol>', and
# 'IS_ENABLED(<symbol>', with a missing CONFIG_ prefix
regex = r"(?:#\s*if(?:n?def)\s+|\bdefined\s*\(\s*|IS_ENABLED\(\s*)(?:" + \
"|".join(sym.name for sym in batch) + r")\b"
cmd = ("git", "grep", "--line-number", "-I", "--perl-regexp", regex)
for modpath in modpaths:
print(run(cmd, cwd=modpath, check=False), end="")
def split_list(lst, batch_size):
# check_missing_config_prefix() helper generator that splits a list into
# equal-sized batches (possibly with a shorter batch at the end)
for i in range(0, len(lst), batch_size):
yield lst[i:i + batch_size]
def print_header(s):
print(s + "\n" + len(s)*"=")
def init_kconfig():
global kconf
os.environ.update(
srctree=TOP_DIR,
CMAKE_BINARY_DIR=modules_file_dir(),
KCONFIG_DOC_MODE="1",
ZEPHYR_BASE=TOP_DIR,
SOC_DIR="soc",
ARCH_DIR="arch",
BOARD_DIR="boards/*/*",
ARCH="*")
kconf = kconfiglib.Kconfig(suppress_traceback=True)
def modules_file_dir():
# Creates Kconfig.modules in a temporary directory and returns the path to
# the directory. Kconfig.modules brings in Kconfig files from modules.
tmpdir = tempfile.mkdtemp()
run((os.path.join("scripts", "zephyr_module.py"),
"--kconfig-out", os.path.join(tmpdir, "Kconfig.modules")))
return tmpdir
def referenced_sym_names():
# Returns the names of all symbols referenced inside and outside the
# Kconfig files (that we can detect), without any "CONFIG_" prefix
return referenced_in_kconfig() | referenced_outside_kconfig()
def referenced_in_kconfig():
# Returns the names of all symbols referenced inside the Kconfig files
return {ref.name
for node in kconf.node_iter()
for ref in node.referenced
if isinstance(ref, kconfiglib.Symbol)}
def referenced_outside_kconfig():
# Returns the names of all symbols referenced outside the Kconfig files
regex = r"\bCONFIG_[A-Z0-9_]+\b"
res = set()
# 'git grep' all modules
for modpath in run(("west", "list", "-f{abspath}")).splitlines():
for line in run(("git", "grep", "-h", "-I", "--extended-regexp", regex),
cwd=modpath).splitlines():
# Don't record lines starting with "CONFIG_FOO=" or "# CONFIG_FOO="
# as references, so that symbols that are only assigned in .config
# files are not included
if re.match(r"[\s#]*CONFIG_[A-Z0-9_]+=.*", line):
continue
# Could pass --only-matching to git grep as well, but it was added
# pretty recently (2018)
for match in re.findall(regex, line):
res.add(match[7:]) # Strip "CONFIG_"
return res
def has_prompt(sym):
return any(node.prompt for node in sym.nodes)
def is_selected_or_implied(sym):
return sym.rev_dep is not kconf.n or sym.weak_rev_dep is not kconf.n
def has_defaults(sym):
return bool(sym.defaults)
def is_selecting_or_implying(sym):
return sym.selects or sym.implies
def name_and_locs(sym):
# Returns a string with the name and definition location(s) for 'sym'
return "{:40} {}".format(
sym.name,
", ".join("{0.filename}:{0.linenr}".format(node) for node in sym.nodes))
def run(cmd, cwd=TOP_DIR, check=True):
# Runs 'cmd' with subprocess, returning the decoded stdout output. 'cwd' is
# the working directory. It defaults to the top-level Zephyr directory.
# Exits with an error if the command exits with a non-zero return code if
# 'check' is True.
cmd_s = " ".join(shlex.quote(word) for word in cmd)
try:
process = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)
except OSError as e:
err("Failed to run '{}': {}".format(cmd_s, e))
stdout, stderr = process.communicate()
# errors="ignore" temporarily works around
# path_to_url
stdout = stdout.decode("utf-8", errors="ignore")
stderr = stderr.decode("utf-8")
if check and process.returncode:
err("""\
'{}' exited with status {}.
===stdout===
{}
===stderr===
{}""".format(cmd_s, process.returncode, stdout, stderr))
if stderr:
warn("'{}' wrote to stderr:\n{}".format(cmd_s, stderr))
return stdout
def err(msg):
sys.exit(executable() + "error: " + msg)
def warn(msg):
print(executable() + "warning: " + msg, file=sys.stderr)
def executable():
cmd = sys.argv[0] # Empty string if missing
return cmd + ": " if cmd else ""
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/kconfig/lint.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,473 |
```python
#!/usr/bin/env python3
"""
Overview
========
A curses-based Python 2/3 menuconfig implementation. The interface should feel
familiar to people used to mconf ('make menuconfig').
Supports the same keys as mconf, and also supports a set of keybindings
inspired by Vi:
J/K : Down/Up
L : Enter menu/Toggle item
H : Leave menu
Ctrl-D/U: Page Down/Page Up
G/End : Jump to end of list
g/Home : Jump to beginning of list
[Space] toggles values if possible, and enters menus otherwise. [Enter] works
the other way around.
The mconf feature where pressing a key jumps to a menu entry with that
character in it in the current menu isn't supported. A jump-to feature for
jumping directly to any symbol (including invisible symbols), choice, menu or
comment (as in a Kconfig 'comment "Foo"') is available instead.
A few different modes are available:
F: Toggle show-help mode, which shows the help text of the currently selected
item in the window at the bottom of the menu display. This is handy when
browsing through options.
C: Toggle show-name mode, which shows the symbol name before each symbol menu
entry
A: Toggle show-all mode, which shows all items, including currently invisible
items and items that lack a prompt. Invisible items are drawn in a different
style to make them stand out.
Running
=======
menuconfig.py can be run either as a standalone executable or by calling the
menuconfig() function with an existing Kconfig instance. The second option is a
bit inflexible in that it will still load and save .config, etc.
When run in standalone mode, the top-level Kconfig file to load can be passed
as a command-line argument. With no argument, it defaults to "Kconfig".
The KCONFIG_CONFIG environment variable specifies the .config file to load (if
it exists) and save. If KCONFIG_CONFIG is unset, ".config" is used.
When overwriting a configuration file, the old version is saved to
<filename>.old (e.g. .config.old).
$srctree is supported through Kconfiglib.
Color schemes
=============
It is possible to customize the color scheme by setting the MENUCONFIG_STYLE
environment variable. For example, setting it to 'aquatic' will enable an
alternative, less yellow, more 'make menuconfig'-like color scheme, contributed
by Mitja Horvat (pinkfluid).
This is the current list of built-in styles:
- default classic Kconfiglib theme with a yellow accent
- monochrome colorless theme (uses only bold and standout) attributes,
this style is used if the terminal doesn't support colors
- aquatic blue-tinted style loosely resembling the lxdialog theme
It is possible to customize the current style by changing colors of UI
elements on the screen. This is the list of elements that can be stylized:
- path Top row in the main display, with the menu path
- separator Separator lines between windows. Also used for the top line
in the symbol information display.
- list List of items, e.g. the main display
- selection Style for the selected item
- inv-list Like list, but for invisible items. Used in show-all mode.
- inv-selection Like selection, but for invisible items. Used in show-all
mode.
- help Help text windows at the bottom of various fullscreen
dialogs
- show-help Window showing the help text in show-help mode
- frame Frame around dialog boxes
- body Body of dialog boxes
- edit Edit box in pop-up dialogs
- jump-edit Edit box in jump-to dialog
- text Symbol information text
The color definition is a comma separated list of attributes:
- fg:COLOR Set the foreground/background colors. COLOR can be one of
* or * the basic 16 colors (black, red, green, yellow, blue,
- bg:COLOR magenta, cyan, white and brighter versions, for example,
brightred). On terminals that support more than 8 colors,
you can also directly put in a color number, e.g. fg:123
(hexadecimal and octal constants are accepted as well).
Colors outside the range -1..curses.COLORS-1 (which is
terminal-dependent) are ignored (with a warning). The COLOR
can be also specified using a RGB value in the HTML
notation, for example #RRGGBB. If the terminal supports
color changing, the color is rendered accurately.
Otherwise, the visually nearest color is used.
If the background or foreground color of an element is not
specified, it defaults to -1, representing the default
terminal foreground or background color.
Note: On some terminals a bright version of the color
implies bold.
- bold Use bold text
- underline Use underline text
- standout Standout text attribute (reverse color)
More often than not, some UI elements share the same color definition. In such
cases the right value may specify an UI element from which the color definition
will be copied. For example, "separator=help" will apply the current color
definition for "help" to "separator".
A keyword without the '=' is assumed to be a style template. The template name
is looked up in the built-in styles list and the style definition is expanded
in-place. With this, built-in styles can be used as basis for new styles.
For example, take the aquatic theme and give it a red selection bar:
MENUCONFIG_STYLE="aquatic selection=fg:white,bg:red"
If there's an error in the style definition or if a missing style is assigned
to, the assignment will be ignored, along with a warning being printed on
stderr.
The 'default' theme is always implicitly parsed first, so the following two
settings have the same effect:
MENUCONFIG_STYLE="selection=fg:white,bg:red"
MENUCONFIG_STYLE="default selection=fg:white,bg:red"
If the terminal doesn't support colors, the 'monochrome' theme is used, and
MENUCONFIG_STYLE is ignored. The assumption is that the environment is broken
somehow, and that the important thing is to get something usable.
Other features
==============
- Seamless terminal resizing
- No dependencies on *nix, as the 'curses' module is in the Python standard
library
- Unicode text entry
- Improved information screen compared to mconf:
* Expressions are split up by their top-level &&/|| operands to improve
readability
* Undefined symbols in expressions are pointed out
* Menus and comments have information displays
* Kconfig definitions are printed
* The include path is shown, listing the locations of the 'source'
statements that included the Kconfig file of the symbol (or other
item)
Limitations
===========
Doesn't work out of the box on Windows, but can be made to work with
pip install windows-curses
See the path_to_url repository.
"""
from __future__ import print_function
import os
import sys
_IS_WINDOWS = os.name == "nt" # Are we running on Windows?
try:
import curses
except ImportError as e:
if not _IS_WINDOWS:
raise
sys.exit("""\
menuconfig failed to import the standard Python 'curses' library. Try
installing a package like windows-curses
(path_to_url by running this command
in cmd.exe:
pip install windows-curses
Starting with Kconfiglib 13.0.0, windows-curses is no longer automatically
installed when installing Kconfiglib via pip on Windows (because it breaks
installation on MSYS2).
Exception:
{}: {}""".format(type(e).__name__, e))
import errno
import locale
import re
import textwrap
from kconfiglib import Symbol, Choice, MENU, COMMENT, MenuNode, \
BOOL, TRISTATE, STRING, INT, HEX, \
AND, OR, \
expr_str, expr_value, split_expr, \
standard_sc_expr_str, \
TRI_TO_STR, TYPE_TO_STR, \
standard_kconfig, standard_config_filename
#
# Configuration variables
#
# If True, try to change LC_CTYPE to a UTF-8 locale if it is set to the C
# locale (which implies ASCII). This fixes curses Unicode I/O issues on systems
# with bad defaults. ncurses configures itself from the locale settings.
#
# Related PEP: path_to_url
_CHANGE_C_LC_CTYPE_TO_UTF8 = True
# How many steps an implicit submenu will be indented. Implicit submenus are
# created when an item depends on the symbol before it. Note that symbols
# defined with 'menuconfig' create a separate menu instead of indenting.
_SUBMENU_INDENT = 4
# Number of steps for Page Up/Down to jump
_PG_JUMP = 6
# Height of the help window in show-help mode
_SHOW_HELP_HEIGHT = 8
# How far the cursor needs to be from the edge of the window before it starts
# to scroll. Used for the main menu display, the information display, the
# search display, and for text boxes.
_SCROLL_OFFSET = 5
# Minimum width of dialogs that ask for text input
_INPUT_DIALOG_MIN_WIDTH = 30
# Number of arrows pointing up/down to draw when a window is scrolled
_N_SCROLL_ARROWS = 14
# Lines of help text shown at the bottom of the "main" display
_MAIN_HELP_LINES = """
[Space/Enter] Toggle/enter [ESC] Leave menu [S] Save
[O] Load [?] Symbol info [/] Jump to symbol
[F] Toggle show-help mode [C] Toggle show-name mode [A] Toggle show-all mode
[Q] Quit (prompts for save) [D] Save minimal config (advanced)
"""[1:-1].split("\n")
# Lines of help text shown at the bottom of the information dialog
_INFO_HELP_LINES = """
[ESC/q] Return to menu [/] Jump to symbol
"""[1:-1].split("\n")
# Lines of help text shown at the bottom of the search dialog
_JUMP_TO_HELP_LINES = """
Type text to narrow the search. Regexes are supported (via Python's 're'
module). The up/down cursor keys step in the list. [Enter] jumps to the
selected symbol. [ESC] aborts the search. Type multiple space-separated
strings/regexes to find entries that match all of them. Type Ctrl-F to
view the help of the selected item without leaving the dialog.
"""[1:-1].split("\n")
#
# Styling
#
_STYLES = {
"default": """
path=fg:black,bg:white,bold
separator=fg:black,bg:yellow,bold
list=fg:black,bg:white
selection=fg:white,bg:blue,bold
inv-list=fg:red,bg:white
inv-selection=fg:red,bg:blue
help=path
show-help=list
frame=fg:black,bg:yellow,bold
body=fg:white,bg:black
edit=fg:white,bg:blue
jump-edit=edit
text=list
""",
# This style is forced on terminals that do no support colors
"monochrome": """
path=bold
separator=bold,standout
list=
selection=bold,standout
inv-list=bold
inv-selection=bold,standout
help=bold
show-help=
frame=bold,standout
body=
edit=standout
jump-edit=
text=
""",
# Blue-tinted style loosely resembling lxdialog
"aquatic": """
path=fg:white,bg:blue
separator=fg:white,bg:cyan
help=path
frame=fg:white,bg:cyan
body=fg:white,bg:blue
edit=fg:black,bg:white
"""
}
_NAMED_COLORS = {
# Basic colors
"black": curses.COLOR_BLACK,
"red": curses.COLOR_RED,
"green": curses.COLOR_GREEN,
"yellow": curses.COLOR_YELLOW,
"blue": curses.COLOR_BLUE,
"magenta": curses.COLOR_MAGENTA,
"cyan": curses.COLOR_CYAN,
"white": curses.COLOR_WHITE,
# Bright versions
"brightblack": curses.COLOR_BLACK + 8,
"brightred": curses.COLOR_RED + 8,
"brightgreen": curses.COLOR_GREEN + 8,
"brightyellow": curses.COLOR_YELLOW + 8,
"brightblue": curses.COLOR_BLUE + 8,
"brightmagenta": curses.COLOR_MAGENTA + 8,
"brightcyan": curses.COLOR_CYAN + 8,
"brightwhite": curses.COLOR_WHITE + 8,
# Aliases
"purple": curses.COLOR_MAGENTA,
"brightpurple": curses.COLOR_MAGENTA + 8,
}
def _rgb_to_6cube(rgb):
# Converts an 888 RGB color to a 3-tuple (nice in that it's hashable)
# representing the closest xterm 256-color 6x6x6 color cube color.
#
# The xterm 256-color extension uses a RGB color palette with components in
# the range 0-5 (a 6x6x6 cube). The catch is that the mapping is nonlinear.
# Index 0 in the 6x6x6 cube is mapped to 0, index 1 to 95, then 135, 175,
# etc., in increments of 40. See the links below:
#
# path_to_url
# path_to_url
# 48 is the middle ground between 0 and 95.
return tuple(0 if x < 48 else int(round(max(1, (x - 55)/40))) for x in rgb)
def _6cube_to_rgb(r6g6b6):
# Returns the 888 RGB color for a 666 xterm color cube index
return tuple(0 if x == 0 else 40*x + 55 for x in r6g6b6)
def _rgb_to_gray(rgb):
# Converts an 888 RGB color to the index of an xterm 256-color grayscale
# color with approx. the same perceived brightness
# Calculate the luminance (gray intensity) of the color. See
# path_to_url
# and
# path_to_url#color-contrast
luma = 0.299*rgb[0] + 0.587*rgb[1] + 0.114*rgb[2]
# Closest index in the grayscale palette, which starts at RGB 0x080808,
# with stepping 0x0A0A0A
index = int(round((luma - 8)/10))
# Clamp the index to 0-23, corresponding to 232-255
return max(0, min(index, 23))
def _gray_to_rgb(index):
# Convert a grayscale index to its closet single RGB component
return 3*(10*index + 8,) # Returns a 3-tuple
# Obscure Python: We never pass a value for rgb2index, and it keeps pointing to
# the same dict. This avoids a global.
def _alloc_rgb(rgb, rgb2index={}):
# Initialize a new entry in the xterm palette to the given RGB color,
# returning its index. If the color has already been initialized, the index
# of the existing entry is returned.
#
# ncurses is palette-based, so we need to overwrite palette entries to make
# new colors.
#
# The colors from 0 to 15 are user-defined, and there's no way to query
# their RGB values, so we better leave them untouched. Also leave any
# hypothetical colors above 255 untouched (though we're unlikely to
# allocate that many colors anyway).
if rgb in rgb2index:
return rgb2index[rgb]
# Many terminals allow the user to customize the first 16 colors. Avoid
# changing their values.
color_index = 16 + len(rgb2index)
if color_index >= 256:
_warn("Unable to allocate new RGB color ", rgb, ". Too many colors "
"allocated.")
return 0
# Map each RGB component from the range 0-255 to the range 0-1000, which is
# what curses uses
curses.init_color(color_index, *(int(round(1000*x/255)) for x in rgb))
rgb2index[rgb] = color_index
return color_index
def _color_from_num(num):
# Returns the index of a color that looks like color 'num' in the xterm
# 256-color palette (but that might not be 'num', if we're redefining
# colors)
# - _alloc_rgb() won't touch the first 16 colors or any (hypothetical)
# colors above 255, so we can always return them as-is
#
# - If the terminal doesn't support changing color definitions, or if
# curses.COLORS < 256, _alloc_rgb() won't touch any color, and all colors
# can be returned as-is
if num < 16 or num > 255 or not curses.can_change_color() or \
curses.COLORS < 256:
return num
# _alloc_rgb() might redefine colors, so emulate the xterm 256-color
# palette by allocating new colors instead of returning color numbers
# directly
if num < 232:
num -= 16
return _alloc_rgb(_6cube_to_rgb(((num//36)%6, (num//6)%6, num%6)))
return _alloc_rgb(_gray_to_rgb(num - 232))
def _color_from_rgb(rgb):
# Returns the index of a color matching the 888 RGB color 'rgb'. The
# returned color might be an ~exact match or an approximation, depending on
# terminal capabilities.
# Calculates the Euclidean distance between two RGB colors
def dist(r1, r2): return sum((x - y)**2 for x, y in zip(r1, r2))
if curses.COLORS >= 256:
# Assume we're dealing with xterm's 256-color extension
if curses.can_change_color():
# Best case -- the terminal supports changing palette entries via
# curses.init_color(). Initialize an unused palette entry and
# return it.
return _alloc_rgb(rgb)
# Second best case -- pick between the xterm 256-color extension colors
# Closest 6-cube "color" color
c6 = _rgb_to_6cube(rgb)
# Closest gray color
gray = _rgb_to_gray(rgb)
if dist(rgb, _6cube_to_rgb(c6)) < dist(rgb, _gray_to_rgb(gray)):
# Use the "color" color from the 6x6x6 color palette. Calculate the
# color number from the 6-cube index triplet.
return 16 + 36*c6[0] + 6*c6[1] + c6[2]
# Use the color from the gray palette
return 232 + gray
# Terminal not in xterm 256-color mode. This is probably the best we can
# do, or is it? Submit patches. :)
min_dist = float('inf')
best = -1
for color in range(curses.COLORS):
# ncurses uses the range 0..1000. Scale that down to 0..255.
d = dist(rgb, tuple(int(round(255*c/1000))
for c in curses.color_content(color)))
if d < min_dist:
min_dist = d
best = color
return best
def _parse_style(style_str, parsing_default):
# Parses a string with '<element>=<style>' assignments. Anything not
# containing '=' is assumed to be a reference to a built-in style, which is
# treated as if all the assignments from the style were inserted at that
# point in the string.
#
# The parsing_default flag is set to True when we're implicitly parsing the
# 'default'/'monochrome' style, to prevent warnings.
for sline in style_str.split():
# Words without a "=" character represents a style template
if "=" in sline:
key, data = sline.split("=", 1)
# The 'default' style template is assumed to define all keys. We
# run _style_to_curses() for non-existing keys as well, so that we
# print warnings for errors to the right of '=' for those too.
if key not in _style and not parsing_default:
_warn("Ignoring non-existent style", key)
# If data is a reference to another key, copy its style
if data in _style:
_style[key] = _style[data]
else:
_style[key] = _style_to_curses(data)
elif sline in _STYLES:
# Recursively parse style template. Ignore styles that don't exist,
# for backwards/forwards compatibility.
_parse_style(_STYLES[sline], parsing_default)
else:
_warn("Ignoring non-existent style template", sline)
# Dictionary mapping element types to the curses attributes used to display
# them
_style = {}
def _style_to_curses(style_def):
# Parses a style definition string (<element>=<style>), returning
# a (fg_color, bg_color, attributes) tuple.
def parse_color(color_def):
color_def = color_def.split(":", 1)[1]
# HTML format, #RRGGBB
if re.match("#[A-Fa-f0-9]{6}", color_def):
return _color_from_rgb((
int(color_def[1:3], 16),
int(color_def[3:5], 16),
int(color_def[5:7], 16)))
if color_def in _NAMED_COLORS:
color_num = _color_from_num(_NAMED_COLORS[color_def])
else:
try:
color_num = _color_from_num(int(color_def, 0))
except ValueError:
_warn("Ignoring color", color_def, "that's neither "
"predefined nor a number")
return -1
if not -1 <= color_num < curses.COLORS:
_warn("Ignoring color {}, which is outside the range "
"-1..curses.COLORS-1 (-1..{})"
.format(color_def, curses.COLORS - 1))
return -1
return color_num
fg_color = -1
bg_color = -1
attrs = 0
if style_def:
for field in style_def.split(","):
if field.startswith("fg:"):
fg_color = parse_color(field)
elif field.startswith("bg:"):
bg_color = parse_color(field)
elif field == "bold":
# A_BOLD tends to produce faint and hard-to-read text on the
# Windows console, especially with the old color scheme, before
# the introduction of
# path_to_url
attrs |= curses.A_NORMAL if _IS_WINDOWS else curses.A_BOLD
elif field == "standout":
attrs |= curses.A_STANDOUT
elif field == "underline":
attrs |= curses.A_UNDERLINE
else:
_warn("Ignoring unknown style attribute", field)
return _style_attr(fg_color, bg_color, attrs)
def _init_styles():
if curses.has_colors():
try:
curses.use_default_colors()
except curses.error:
# Ignore errors on funky terminals that support colors but not
# using default colors. Worst it can do is break transparency and
# the like. Ran across this with the MSYS2/winpty setup in
# path_to_url though there
# seems to be a lot of general brokenness there.
pass
# Use the 'default' theme as the base, and add any user-defined style
# settings from the environment
_parse_style("default", True)
if "MENUCONFIG_STYLE" in os.environ:
_parse_style(os.environ["MENUCONFIG_STYLE"], False)
else:
# Force the 'monochrome' theme if the terminal doesn't support colors.
# MENUCONFIG_STYLE is likely to mess things up here (though any colors
# would be ignored), so ignore it.
_parse_style("monochrome", True)
# color_attribs holds the color pairs we've already created, indexed by a
# (<foreground color>, <background color>) tuple.
#
# Obscure Python: We never pass a value for color_attribs, and it keeps
# pointing to the same dict. This avoids a global.
def _style_attr(fg_color, bg_color, attribs, color_attribs={}):
# Returns an attribute with the specified foreground and background color
# and the attributes in 'attribs'. Reuses color pairs already created if
# possible, and creates a new color pair otherwise.
#
# Returns 'attribs' if colors aren't supported.
if not curses.has_colors():
return attribs
if (fg_color, bg_color) not in color_attribs:
# Create new color pair. Color pair number 0 is hardcoded and cannot be
# changed, hence the +1s.
curses.init_pair(len(color_attribs) + 1, fg_color, bg_color)
color_attribs[(fg_color, bg_color)] = \
curses.color_pair(len(color_attribs) + 1)
return color_attribs[(fg_color, bg_color)] | attribs
#
# Main application
#
def _main():
menuconfig(standard_kconfig(__doc__))
def menuconfig(kconf):
"""
Launches the configuration interface, returning after the user exits.
kconf:
Kconfig instance to be configured
"""
global _kconf
global _conf_filename
global _conf_changed
global _minconf_filename
global _show_all
_kconf = kconf
# Filename to save configuration to
_conf_filename = standard_config_filename()
# Load existing configuration and set _conf_changed True if it is outdated
_conf_changed = _load_config()
# Filename to save minimal configuration to
_minconf_filename = "defconfig"
# Any visible items in the top menu?
_show_all = False
if not _shown_nodes(kconf.top_node):
# Nothing visible. Start in show-all mode and try again.
_show_all = True
if not _shown_nodes(kconf.top_node):
# Give up. The implementation relies on always having a selected
# node.
print("Empty configuration -- nothing to configure.\n"
"Check that environment variables are set properly.")
return
# Disable warnings. They get mangled in curses mode, and we deal with
# errors ourselves.
kconf.warn = False
# Make curses use the locale settings specified in the environment
locale.setlocale(locale.LC_ALL, "")
# Try to fix Unicode issues on systems with bad defaults
if _CHANGE_C_LC_CTYPE_TO_UTF8:
_change_c_lc_ctype_to_utf8()
# Get rid of the delay between pressing ESC and jumping to the parent menu,
# unless the user has set ESCDELAY (see ncurses(3)). This makes the UI much
# smoother to work with.
#
# Note: This is strictly pretty iffy, since escape codes for e.g. cursor
# keys start with ESC, but I've never seen it cause problems in practice
# (probably because it's unlikely that the escape code for a key would get
# split up across read()s, at least with a terminal emulator). Please
# report if you run into issues. Some suitable small default value could be
# used here instead in that case. Maybe it's silly to not put in the
# smallest imperceptible delay here already, though I don't like guessing.
#
# (From a quick glance at the ncurses source code, ESCDELAY might only be
# relevant for mouse events there, so maybe escapes are assumed to arrive
# in one piece already...)
os.environ.setdefault("ESCDELAY", "0")
# Enter curses mode. _menuconfig() returns a string to print on exit, after
# curses has been de-initialized.
print(curses.wrapper(_menuconfig))
def _load_config():
# Loads any existing .config file. See the Kconfig.load_config() docstring.
#
# Returns True if .config is missing or outdated. We always prompt for
# saving the configuration in that case.
print(_kconf.load_config())
if not os.path.exists(_conf_filename):
# No .config
return True
return _needs_save()
def _needs_save():
# Returns True if a just-loaded .config file is outdated (would get
# modified when saving)
if _kconf.missing_syms:
# Assignments to undefined symbols in the .config
return True
for sym in _kconf.unique_defined_syms:
if sym.user_value is None:
if sym.config_string:
# Unwritten symbol
return True
elif sym.orig_type in (BOOL, TRISTATE):
if sym.tri_value != sym.user_value:
# Written bool/tristate symbol, new value
return True
elif sym.str_value != sym.user_value:
# Written string/int/hex symbol, new value
return True
# No need to prompt for save
return False
# Global variables used below:
#
# _stdscr:
# stdscr from curses
#
# _cur_menu:
# Menu node of the menu (or menuconfig symbol, or choice) currently being
# shown
#
# _shown:
# List of items in _cur_menu that are shown (ignoring scrolling). In
# show-all mode, this list contains all items in _cur_menu. Otherwise, it
# contains just the visible items.
#
# _sel_node_i:
# Index in _shown of the currently selected node
#
# _menu_scroll:
# Index in _shown of the top row of the main display
#
# _parent_screen_rows:
# List/stack of the row numbers that the selections in the parent menus
# appeared on. This is used to prevent the scrolling from jumping around
# when going in and out of menus.
#
# _show_help/_show_name/_show_all:
# If True, the corresponding mode is on. See the module docstring.
#
# _conf_filename:
# File to save the configuration to
#
# _minconf_filename:
# File to save minimal configurations to
#
# _conf_changed:
# True if the configuration has been changed. If False, we don't bother
# showing the save-and-quit dialog.
#
# We reset this to False whenever the configuration is saved explicitly
# from the save dialog.
def _menuconfig(stdscr):
# Logic for the main display, with the list of symbols, etc.
global _stdscr
global _conf_filename
global _conf_changed
global _minconf_filename
global _show_help
global _show_name
_stdscr = stdscr
_init()
while True:
_draw_main()
curses.doupdate()
c = _getch_compat(_menu_win)
if c == curses.KEY_RESIZE:
_resize_main()
elif c in (curses.KEY_DOWN, "j", "J"):
_select_next_menu_entry()
elif c in (curses.KEY_UP, "k", "K"):
_select_prev_menu_entry()
elif c in (curses.KEY_NPAGE, "\x04"): # Page Down/Ctrl-D
# Keep it simple. This way we get sane behavior for small windows,
# etc., for free.
for _ in range(_PG_JUMP):
_select_next_menu_entry()
elif c in (curses.KEY_PPAGE, "\x15"): # Page Up/Ctrl-U
for _ in range(_PG_JUMP):
_select_prev_menu_entry()
elif c in (curses.KEY_END, "G"):
_select_last_menu_entry()
elif c in (curses.KEY_HOME, "g"):
_select_first_menu_entry()
elif c == " ":
# Toggle the node if possible
sel_node = _shown[_sel_node_i]
if not _change_node(sel_node):
_enter_menu(sel_node)
elif c in (curses.KEY_RIGHT, "\n", "l", "L"):
# Enter the node if possible
sel_node = _shown[_sel_node_i]
if not _enter_menu(sel_node):
_change_node(sel_node)
elif c in ("n", "N"):
_set_sel_node_tri_val(0)
elif c in ("m", "M"):
_set_sel_node_tri_val(1)
elif c in ("y", "Y"):
_set_sel_node_tri_val(2)
elif c in (curses.KEY_LEFT, curses.KEY_BACKSPACE, _ERASE_CHAR,
"\x1B", "h", "H"): # \x1B = ESC
if c == "\x1B" and _cur_menu is _kconf.top_node:
res = _quit_dialog()
if res:
return res
else:
_leave_menu()
elif c in ("o", "O"):
_load_dialog()
elif c in ("s", "S"):
filename = _save_dialog(_kconf.write_config, _conf_filename,
"configuration")
if filename:
_conf_filename = filename
_conf_changed = False
elif c in ("d", "D"):
filename = _save_dialog(_kconf.write_min_config, _minconf_filename,
"minimal configuration")
if filename:
_minconf_filename = filename
elif c == "/":
_jump_to_dialog()
# The terminal might have been resized while the fullscreen jump-to
# dialog was open
_resize_main()
elif c == "?":
_info_dialog(_shown[_sel_node_i], False)
# The terminal might have been resized while the fullscreen info
# dialog was open
_resize_main()
elif c in ("f", "F"):
_show_help = not _show_help
_set_style(_help_win, "show-help" if _show_help else "help")
_resize_main()
elif c in ("c", "C"):
_show_name = not _show_name
elif c in ("a", "A"):
_toggle_show_all()
elif c in ("q", "Q"):
res = _quit_dialog()
if res:
return res
def _quit_dialog():
if not _conf_changed:
return "No changes to save (for '{}')".format(_conf_filename)
while True:
c = _key_dialog(
"Quit",
" Save configuration?\n"
"\n"
"(Y)es (N)o (C)ancel",
"ync")
if c is None or c == "c":
return None
if c == "y":
# Returns a message to print
msg = _try_save(_kconf.write_config, _conf_filename, "configuration")
if msg:
return msg
elif c == "n":
return "Configuration ({}) was not saved".format(_conf_filename)
def _init():
# Initializes the main display with the list of symbols, etc. Also does
# misc. global initialization that needs to happen after initializing
# curses.
global _ERASE_CHAR
global _path_win
global _top_sep_win
global _menu_win
global _bot_sep_win
global _help_win
global _parent_screen_rows
global _cur_menu
global _shown
global _sel_node_i
global _menu_scroll
global _show_help
global _show_name
# Looking for this in addition to KEY_BACKSPACE (which is unreliable) makes
# backspace work with TERM=vt100. That makes it likely to work in sane
# environments.
_ERASE_CHAR = curses.erasechar()
if sys.version_info[0] >= 3:
# erasechar() returns a one-byte bytes object on Python 3. This sets
# _ERASE_CHAR to a blank string if it can't be decoded, which should be
# harmless.
_ERASE_CHAR = _ERASE_CHAR.decode("utf-8", "ignore")
_init_styles()
# Hide the cursor
_safe_curs_set(0)
# Initialize windows
# Top row, with menu path
_path_win = _styled_win("path")
# Separator below menu path, with title and arrows pointing up
_top_sep_win = _styled_win("separator")
# List of menu entries with symbols, etc.
_menu_win = _styled_win("list")
_menu_win.keypad(True)
# Row below menu list, with arrows pointing down
_bot_sep_win = _styled_win("separator")
# Help window with keys at the bottom. Shows help texts in show-help mode.
_help_win = _styled_win("help")
# The rows we'd like the nodes in the parent menus to appear on. This
# prevents the scroll from jumping around when going in and out of menus.
_parent_screen_rows = []
# Initial state
_cur_menu = _kconf.top_node
_shown = _shown_nodes(_cur_menu)
_sel_node_i = _menu_scroll = 0
_show_help = _show_name = False
# Give windows their initial size
_resize_main()
def _resize_main():
# Resizes the main display, with the list of symbols, etc., to fill the
# terminal
global _menu_scroll
screen_height, screen_width = _stdscr.getmaxyx()
_path_win.resize(1, screen_width)
_top_sep_win.resize(1, screen_width)
_bot_sep_win.resize(1, screen_width)
help_win_height = _SHOW_HELP_HEIGHT if _show_help else \
len(_MAIN_HELP_LINES)
menu_win_height = screen_height - help_win_height - 3
if menu_win_height >= 1:
_menu_win.resize(menu_win_height, screen_width)
_help_win.resize(help_win_height, screen_width)
_top_sep_win.mvwin(1, 0)
_menu_win.mvwin(2, 0)
_bot_sep_win.mvwin(2 + menu_win_height, 0)
_help_win.mvwin(2 + menu_win_height + 1, 0)
else:
# Degenerate case. Give up on nice rendering and just prevent errors.
menu_win_height = 1
_menu_win.resize(1, screen_width)
_help_win.resize(1, screen_width)
for win in _top_sep_win, _menu_win, _bot_sep_win, _help_win:
win.mvwin(0, 0)
# Adjust the scroll so that the selected node is still within the window,
# if needed
if _sel_node_i - _menu_scroll >= menu_win_height:
_menu_scroll = _sel_node_i - menu_win_height + 1
def _height(win):
# Returns the height of 'win'
return win.getmaxyx()[0]
def _width(win):
# Returns the width of 'win'
return win.getmaxyx()[1]
def _enter_menu(menu):
# Makes 'menu' the currently displayed menu. In addition to actual 'menu's,
# "menu" here includes choices and symbols defined with the 'menuconfig'
# keyword.
#
# Returns False if 'menu' can't be entered.
global _cur_menu
global _shown
global _sel_node_i
global _menu_scroll
if not menu.is_menuconfig:
return False # Not a menu
shown_sub = _shown_nodes(menu)
# Never enter empty menus. We depend on having a current node.
if not shown_sub:
return False
# Remember where the current node appears on the screen, so we can try
# to get it to appear in the same place when we leave the menu
_parent_screen_rows.append(_sel_node_i - _menu_scroll)
# Jump into menu
_cur_menu = menu
_shown = shown_sub
_sel_node_i = _menu_scroll = 0
if isinstance(menu.item, Choice):
_select_selected_choice_sym()
return True
def _select_selected_choice_sym():
# Puts the cursor on the currently selected (y-valued) choice symbol, if
# any. Does nothing if if the choice has no selection (is not visible/in y
# mode).
global _sel_node_i
choice = _cur_menu.item
if choice.selection:
# Search through all menu nodes to handle choice symbols being defined
# in multiple locations
for node in choice.selection.nodes:
if node in _shown:
_sel_node_i = _shown.index(node)
_center_vertically()
return
def _jump_to(node):
# Jumps directly to the menu node 'node'
global _cur_menu
global _shown
global _sel_node_i
global _menu_scroll
global _show_all
global _parent_screen_rows
# Clear remembered menu locations. We might not even have been in the
# parent menus before.
_parent_screen_rows = []
old_show_all = _show_all
jump_into = (isinstance(node.item, Choice) or node.item == MENU) and \
node.list
# If we're jumping to a non-empty choice or menu, jump to the first entry
# in it instead of jumping to its menu node
if jump_into:
_cur_menu = node
node = node.list
else:
_cur_menu = _parent_menu(node)
_shown = _shown_nodes(_cur_menu)
if node not in _shown:
# The node wouldn't be shown. Turn on show-all to show it.
_show_all = True
_shown = _shown_nodes(_cur_menu)
_sel_node_i = _shown.index(node)
if jump_into and not old_show_all and _show_all:
# If we're jumping into a choice or menu and were forced to turn on
# show-all because the first entry wasn't visible, try turning it off.
# That will land us at the first visible node if there are visible
# nodes, and is a no-op otherwise.
_toggle_show_all()
_center_vertically()
# If we're jumping to a non-empty choice, jump to the selected symbol, if
# any
if jump_into and isinstance(_cur_menu.item, Choice):
_select_selected_choice_sym()
def _leave_menu():
# Jumps to the parent menu of the current menu. Does nothing if we're in
# the top menu.
global _cur_menu
global _shown
global _sel_node_i
global _menu_scroll
if _cur_menu is _kconf.top_node:
return
# Jump to parent menu
parent = _parent_menu(_cur_menu)
_shown = _shown_nodes(parent)
_sel_node_i = _shown.index(_cur_menu)
_cur_menu = parent
# Try to make the menu entry appear on the same row on the screen as it did
# before we entered the menu.
if _parent_screen_rows:
# The terminal might have shrunk since we were last in the parent menu
screen_row = min(_parent_screen_rows.pop(), _height(_menu_win) - 1)
_menu_scroll = max(_sel_node_i - screen_row, 0)
else:
# No saved parent menu locations, meaning we jumped directly to some
# node earlier
_center_vertically()
def _select_next_menu_entry():
# Selects the menu entry after the current one, adjusting the scroll if
# necessary. Does nothing if we're already at the last menu entry.
global _sel_node_i
global _menu_scroll
if _sel_node_i < len(_shown) - 1:
# Jump to the next node
_sel_node_i += 1
# If the new node is sufficiently close to the edge of the menu window
# (as determined by _SCROLL_OFFSET), increase the scroll by one. This
# gives nice and non-jumpy behavior even when
# _SCROLL_OFFSET >= _height(_menu_win).
if _sel_node_i >= _menu_scroll + _height(_menu_win) - _SCROLL_OFFSET \
and _menu_scroll < _max_scroll(_shown, _menu_win):
_menu_scroll += 1
def _select_prev_menu_entry():
# Selects the menu entry before the current one, adjusting the scroll if
# necessary. Does nothing if we're already at the first menu entry.
global _sel_node_i
global _menu_scroll
if _sel_node_i > 0:
# Jump to the previous node
_sel_node_i -= 1
# See _select_next_menu_entry()
if _sel_node_i < _menu_scroll + _SCROLL_OFFSET:
_menu_scroll = max(_menu_scroll - 1, 0)
def _select_last_menu_entry():
# Selects the last menu entry in the current menu
global _sel_node_i
global _menu_scroll
_sel_node_i = len(_shown) - 1
_menu_scroll = _max_scroll(_shown, _menu_win)
def _select_first_menu_entry():
# Selects the first menu entry in the current menu
global _sel_node_i
global _menu_scroll
_sel_node_i = _menu_scroll = 0
def _toggle_show_all():
# Toggles show-all mode on/off. If turning it off would give no visible
# items in the current menu, it is left on.
global _show_all
global _shown
global _sel_node_i
global _menu_scroll
# Row on the screen the cursor is on. Preferably we want the same row to
# stay highlighted.
old_row = _sel_node_i - _menu_scroll
_show_all = not _show_all
# List of new nodes to be shown after toggling _show_all
new_shown = _shown_nodes(_cur_menu)
# Find a good node to select. The selected node might disappear if show-all
# mode is turned off.
# Select the previously selected node itself if it is still visible. If
# there are visible nodes before it, select the closest one.
for node in _shown[_sel_node_i::-1]:
if node in new_shown:
_sel_node_i = new_shown.index(node)
break
else:
# No visible nodes before the previously selected node. Select the
# closest visible node after it instead.
for node in _shown[_sel_node_i + 1:]:
if node in new_shown:
_sel_node_i = new_shown.index(node)
break
else:
# No visible nodes at all, meaning show-all was turned off inside
# an invisible menu. Don't allow that, as the implementation relies
# on always having a selected node.
_show_all = True
return
_shown = new_shown
# Try to make the cursor stay on the same row in the menu window. This
# might be impossible if too many nodes have disappeared above the node.
_menu_scroll = max(_sel_node_i - old_row, 0)
def _center_vertically():
# Centers the selected node vertically, if possible
global _menu_scroll
_menu_scroll = min(max(_sel_node_i - _height(_menu_win)//2, 0),
_max_scroll(_shown, _menu_win))
def _draw_main():
# Draws the "main" display, with the list of symbols, the header, and the
# footer.
#
# This could be optimized to only update the windows that have actually
# changed, but keep it simple for now and let curses sort it out.
term_width = _width(_stdscr)
#
# Update the separator row below the menu path
#
_top_sep_win.erase()
# Draw arrows pointing up if the symbol window is scrolled down. Draw them
# before drawing the title, so the title ends up on top for small windows.
if _menu_scroll > 0:
_safe_hline(_top_sep_win, 0, 4, curses.ACS_UARROW, _N_SCROLL_ARROWS)
# Add the 'mainmenu' text as the title, centered at the top
_safe_addstr(_top_sep_win,
0, max((term_width - len(_kconf.mainmenu_text))//2, 0),
_kconf.mainmenu_text)
_top_sep_win.noutrefresh()
# Note: The menu path at the top is deliberately updated last. See below.
#
# Update the symbol window
#
_menu_win.erase()
# Draw the _shown nodes starting from index _menu_scroll up to either as
# many as fit in the window, or to the end of _shown
for i in range(_menu_scroll,
min(_menu_scroll + _height(_menu_win), len(_shown))):
node = _shown[i]
# The 'not _show_all' test avoids showing invisible items in red
# outside show-all mode, which could look confusing/broken. Invisible
# symbols show up outside show-all mode if an invisible symbol has
# visible children in an implicit (indented) menu.
if _visible(node) or not _show_all:
style = _style["selection" if i == _sel_node_i else "list"]
else:
style = _style["inv-selection" if i == _sel_node_i else "inv-list"]
_safe_addstr(_menu_win, i - _menu_scroll, 0, _node_str(node), style)
_menu_win.noutrefresh()
#
# Update the bottom separator window
#
_bot_sep_win.erase()
# Draw arrows pointing down if the symbol window is scrolled up
if _menu_scroll < _max_scroll(_shown, _menu_win):
_safe_hline(_bot_sep_win, 0, 4, curses.ACS_DARROW, _N_SCROLL_ARROWS)
# Indicate when show-name/show-help/show-all mode is enabled
enabled_modes = []
if _show_help:
enabled_modes.append("show-help (toggle with [F])")
if _show_name:
enabled_modes.append("show-name")
if _show_all:
enabled_modes.append("show-all")
if enabled_modes:
s = " and ".join(enabled_modes) + " mode enabled"
_safe_addstr(_bot_sep_win, 0, max(term_width - len(s) - 2, 0), s)
_bot_sep_win.noutrefresh()
#
# Update the help window, which shows either key bindings or help texts
#
_help_win.erase()
if _show_help:
node = _shown[_sel_node_i]
if isinstance(node.item, (Symbol, Choice)) and node.help:
help_lines = textwrap.wrap(node.help, _width(_help_win))
for i in range(min(_height(_help_win), len(help_lines))):
_safe_addstr(_help_win, i, 0, help_lines[i])
else:
_safe_addstr(_help_win, 0, 0, "(no help)")
else:
for i, line in enumerate(_MAIN_HELP_LINES):
_safe_addstr(_help_win, i, 0, line)
_help_win.noutrefresh()
#
# Update the top row with the menu path.
#
# Doing this last leaves the cursor on the top row, which avoids some minor
# annoying jumpiness in gnome-terminal when reducing the height of the
# terminal. It seems to happen whenever the row with the cursor on it
# disappears.
#
_path_win.erase()
# Draw the menu path ("(Top) -> Menu -> Submenu -> ...")
menu_prompts = []
menu = _cur_menu
while menu is not _kconf.top_node:
# Promptless choices can be entered in show-all mode. Use
# standard_sc_expr_str() for them, so they show up as
# '<choice (name if any)>'.
menu_prompts.append(menu.prompt[0] if menu.prompt else
standard_sc_expr_str(menu.item))
menu = menu.parent
menu_prompts.append("(Top)")
menu_prompts.reverse()
# Hack: We can't put ACS_RARROW directly in the string. Temporarily
# represent it with NULL.
menu_path_str = " \0 ".join(menu_prompts)
# Scroll the menu path to the right if needed to make the current menu's
# title visible
if len(menu_path_str) > term_width:
menu_path_str = menu_path_str[len(menu_path_str) - term_width:]
# Print the path with the arrows reinserted
split_path = menu_path_str.split("\0")
_safe_addstr(_path_win, split_path[0])
for s in split_path[1:]:
_safe_addch(_path_win, curses.ACS_RARROW)
_safe_addstr(_path_win, s)
_path_win.noutrefresh()
def _parent_menu(node):
# Returns the menu node of the menu that contains 'node'. In addition to
# proper 'menu's, this might also be a 'menuconfig' symbol or a 'choice'.
# "Menu" here means a menu in the interface.
menu = node.parent
while not menu.is_menuconfig:
menu = menu.parent
return menu
def _shown_nodes(menu):
# Returns the list of menu nodes from 'menu' (see _parent_menu()) that
# would be shown when entering it
def rec(node):
res = []
while node:
if _visible(node) or _show_all:
res.append(node)
if node.list and not node.is_menuconfig:
# Nodes from implicit menu created from dependencies. Will
# be shown indented. Note that is_menuconfig is True for
# menus and choices as well as 'menuconfig' symbols.
res += rec(node.list)
elif node.list and isinstance(node.item, Symbol):
# Show invisible symbols if they have visible children. This
# can happen for an m/y-valued symbol with an optional prompt
# ('prompt "foo" is COND') that is currently disabled. Note
# that it applies to both 'config' and 'menuconfig' symbols.
shown_children = rec(node.list)
if shown_children:
res.append(node)
if not node.is_menuconfig:
res += shown_children
node = node.next
return res
if isinstance(menu.item, Choice):
# For named choices defined in multiple locations, entering the choice
# at a particular menu node would normally only show the choice symbols
# defined there (because that's what the MenuNode tree looks like).
#
# That might look confusing, and makes extending choices by defining
# them in multiple locations less useful. Instead, gather all the child
# menu nodes for all the choices whenever a choice is entered. That
# makes all choice symbols visible at all locations.
#
# Choices can contain non-symbol items (people do all sorts of weird
# stuff with them), hence the generality here. We really need to
# preserve the menu tree at each choice location.
#
# Note: Named choices are pretty broken in the C tools, and this is
# super obscure, so you probably won't find much that relies on this.
# This whole 'if' could be deleted if you don't care about defining
# choices in multiple locations to add symbols (which will still work,
# just with things being displayed in a way that might be unexpected).
# Do some additional work to avoid listing choice symbols twice if all
# or part of the choice is copied in multiple locations (e.g. by
# including some Kconfig file multiple times). We give the prompts at
# the current location precedence.
seen_syms = {node.item for node in rec(menu.list)
if isinstance(node.item, Symbol)}
res = []
for choice_node in menu.item.nodes:
for node in rec(choice_node.list):
# 'choice_node is menu' checks if we're dealing with the
# current location
if node.item not in seen_syms or choice_node is menu:
res.append(node)
if isinstance(node.item, Symbol):
seen_syms.add(node.item)
return res
return rec(menu.list)
def _visible(node):
# Returns True if the node should appear in the menu (outside show-all
# mode)
return node.prompt and expr_value(node.prompt[1]) and not \
(node.item == MENU and not expr_value(node.visibility))
def _change_node(node):
# Changes the value of the menu node 'node' if it is a symbol. Bools and
# tristates are toggled, while other symbol types pop up a text entry
# dialog.
#
# Returns False if the value of 'node' can't be changed.
if not _changeable(node):
return False
# sc = symbol/choice
sc = node.item
if sc.orig_type in (INT, HEX, STRING):
s = sc.str_value
while True:
s = _input_dialog(
"{} ({})".format(node.prompt[0], TYPE_TO_STR[sc.orig_type]),
s, _range_info(sc))
if s is None:
break
if sc.orig_type in (INT, HEX):
s = s.strip()
# 'make menuconfig' does this too. Hex values not starting with
# '0x' are accepted when loading .config files though.
if sc.orig_type == HEX and not s.startswith(("0x", "0X")):
s = "0x" + s
if _check_valid(sc, s):
_set_val(sc, s)
break
elif len(sc.assignable) == 1:
# Handles choice symbols for choices in y mode, which are a special
# case: .assignable can be (2,) while .tri_value is 0.
_set_val(sc, sc.assignable[0])
else:
# Set the symbol to the value after the current value in
# sc.assignable, with wrapping
val_index = sc.assignable.index(sc.tri_value)
_set_val(sc, sc.assignable[(val_index + 1) % len(sc.assignable)])
if _is_y_mode_choice_sym(sc) and not node.list:
# Immediately jump to the parent menu after making a choice selection,
# like 'make menuconfig' does, except if the menu node has children
# (which can happen if a symbol 'depends on' a choice symbol that
# immediately precedes it).
_leave_menu()
return True
def _changeable(node):
# Returns True if the value if 'node' can be changed
sc = node.item
if not isinstance(sc, (Symbol, Choice)):
return False
# This will hit for invisible symbols, which appear in show-all mode and
# when an invisible symbol has visible children (which can happen e.g. for
# symbols with optional prompts)
if not (node.prompt and expr_value(node.prompt[1])):
return False
return sc.orig_type in (STRING, INT, HEX) or len(sc.assignable) > 1 \
or _is_y_mode_choice_sym(sc)
def _set_sel_node_tri_val(tri_val):
# Sets the value of the currently selected menu entry to 'tri_val', if that
# value can be assigned
sc = _shown[_sel_node_i].item
if isinstance(sc, (Symbol, Choice)) and tri_val in sc.assignable:
_set_val(sc, tri_val)
def _set_val(sc, val):
# Wrapper around Symbol/Choice.set_value() for updating the menu state and
# _conf_changed
global _conf_changed
# Use the string representation of tristate values. This makes the format
# consistent for all symbol types.
if val in TRI_TO_STR:
val = TRI_TO_STR[val]
if val != sc.str_value:
sc.set_value(val)
_conf_changed = True
# Changing the value of the symbol might have changed what items in the
# current menu are visible. Recalculate the state.
_update_menu()
def _update_menu():
# Updates the current menu after the value of a symbol or choice has been
# changed. Changing a value might change which items in the menu are
# visible.
#
# If possible, preserves the location of the cursor on the screen when
# items are added/removed above the selected item.
global _shown
global _sel_node_i
global _menu_scroll
# Row on the screen the cursor was on
old_row = _sel_node_i - _menu_scroll
sel_node = _shown[_sel_node_i]
# New visible nodes
_shown = _shown_nodes(_cur_menu)
# New index of selected node
_sel_node_i = _shown.index(sel_node)
# Try to make the cursor stay on the same row in the menu window. This
# might be impossible if too many nodes have disappeared above the node.
_menu_scroll = max(_sel_node_i - old_row, 0)
def _input_dialog(title, initial_text, info_text=None):
# Pops up a dialog that prompts the user for a string
#
# title:
# Title to display at the top of the dialog window's border
#
# initial_text:
# Initial text to prefill the input field with
#
# info_text:
# String to show next to the input field. If None, just the input field
# is shown.
win = _styled_win("body")
win.keypad(True)
info_lines = info_text.split("\n") if info_text else []
# Give the input dialog its initial size
_resize_input_dialog(win, title, info_lines)
_safe_curs_set(2)
# Input field text
s = initial_text
# Cursor position
i = len(initial_text)
def edit_width():
return _width(win) - 4
# Horizontal scroll offset
hscroll = max(i - edit_width() + 1, 0)
while True:
# Draw the "main" display with the menu, etc., so that resizing still
# works properly. This is like a stack of windows, only hardcoded for
# now.
_draw_main()
_draw_input_dialog(win, title, info_lines, s, i, hscroll)
curses.doupdate()
c = _getch_compat(win)
if c == curses.KEY_RESIZE:
# Resize the main display too. The dialog floats above it.
_resize_main()
_resize_input_dialog(win, title, info_lines)
elif c == "\n":
_safe_curs_set(0)
return s
elif c == "\x1B": # \x1B = ESC
_safe_curs_set(0)
return None
elif c == "\0": # \0 = NUL, ignore
pass
else:
s, i, hscroll = _edit_text(c, s, i, hscroll, edit_width())
def _resize_input_dialog(win, title, info_lines):
# Resizes the input dialog to a size appropriate for the terminal size
screen_height, screen_width = _stdscr.getmaxyx()
win_height = 5
if info_lines:
win_height += len(info_lines) + 1
win_height = min(win_height, screen_height)
win_width = max(_INPUT_DIALOG_MIN_WIDTH,
len(title) + 4,
*(len(line) + 4 for line in info_lines))
win_width = min(win_width, screen_width)
win.resize(win_height, win_width)
win.mvwin((screen_height - win_height)//2,
(screen_width - win_width)//2)
def _draw_input_dialog(win, title, info_lines, s, i, hscroll):
edit_width = _width(win) - 4
win.erase()
# Note: Perhaps having a separate window for the input field would be nicer
visible_s = s[hscroll:hscroll + edit_width]
_safe_addstr(win, 2, 2, visible_s + " "*(edit_width - len(visible_s)),
_style["edit"])
for linenr, line in enumerate(info_lines):
_safe_addstr(win, 4 + linenr, 2, line)
# Draw the frame last so that it overwrites the body text for small windows
_draw_frame(win, title)
_safe_move(win, 2, 2 + i - hscroll)
win.noutrefresh()
def _load_dialog():
# Dialog for loading a new configuration
global _conf_changed
global _conf_filename
global _show_all
if _conf_changed:
c = _key_dialog(
"Load",
"You have unsaved changes. Load new\n"
"configuration anyway?\n"
"\n"
" (O)K (C)ancel",
"oc")
if c is None or c == "c":
return
filename = _conf_filename
while True:
filename = _input_dialog("File to load", filename, _load_save_info())
if filename is None:
return
filename = os.path.expanduser(filename)
if _try_load(filename):
_conf_filename = filename
_conf_changed = _needs_save()
# Turn on show-all mode if the selected node is not visible after
# loading the new configuration. _shown still holds the old state.
if _shown[_sel_node_i] not in _shown_nodes(_cur_menu):
_show_all = True
_update_menu()
# The message dialog indirectly updates the menu display, so _msg()
# must be called after the new state has been initialized
_msg("Success", "Loaded " + filename)
return
def _try_load(filename):
# Tries to load a configuration file. Pops up an error and returns False on
# failure.
#
# filename:
# Configuration file to load
try:
_kconf.load_config(filename)
return True
except EnvironmentError as e:
_error("Error loading '{}'\n\n{} (errno: {})"
.format(filename, e.strerror, errno.errorcode[e.errno]))
return False
def _save_dialog(save_fn, default_filename, description):
# Dialog for saving the current configuration
#
# save_fn:
# Function to call with 'filename' to save the file
#
# default_filename:
# Prefilled filename in the input field
#
# description:
# String describing the thing being saved
#
# Return value:
# The path to the saved file, or None if no file was saved
filename = default_filename
while True:
filename = _input_dialog("Filename to save {} to".format(description),
filename, _load_save_info())
if filename is None:
return None
filename = os.path.expanduser(filename)
msg = _try_save(save_fn, filename, description)
if msg:
_msg("Success", msg)
return filename
def _try_save(save_fn, filename, description):
# Tries to save a configuration file. Returns a message to print on
# success.
#
# save_fn:
# Function to call with 'filename' to save the file
#
# description:
# String describing the thing being saved
#
# Return value:
# A message to print on success, and None on failure
try:
# save_fn() returns a message to print
return save_fn(filename)
except EnvironmentError as e:
_error("Error saving {} to '{}'\n\n{} (errno: {})"
.format(description, e.filename, e.strerror,
errno.errorcode[e.errno]))
return None
def _key_dialog(title, text, keys):
# Pops up a dialog that can be closed by pressing a key
#
# title:
# Title to display at the top of the dialog window's border
#
# text:
# Text to show in the dialog
#
# keys:
# List of keys that will close the dialog. Other keys (besides ESC) are
# ignored. The caller is responsible for providing a hint about which
# keys can be pressed in 'text'.
#
# Return value:
# The key that was pressed to close the dialog. Uppercase characters are
# converted to lowercase. ESC will always close the dialog, and returns
# None.
win = _styled_win("body")
win.keypad(True)
_resize_key_dialog(win, text)
while True:
# See _input_dialog()
_draw_main()
_draw_key_dialog(win, title, text)
curses.doupdate()
c = _getch_compat(win)
if c == curses.KEY_RESIZE:
# Resize the main display too. The dialog floats above it.
_resize_main()
_resize_key_dialog(win, text)
elif c == "\x1B": # \x1B = ESC
return None
elif isinstance(c, str):
c = c.lower()
if c in keys:
return c
def _resize_key_dialog(win, text):
# Resizes the key dialog to a size appropriate for the terminal size
screen_height, screen_width = _stdscr.getmaxyx()
lines = text.split("\n")
win_height = min(len(lines) + 4, screen_height)
win_width = min(max(len(line) for line in lines) + 4, screen_width)
win.resize(win_height, win_width)
win.mvwin((screen_height - win_height)//2,
(screen_width - win_width)//2)
def _draw_key_dialog(win, title, text):
win.erase()
for i, line in enumerate(text.split("\n")):
_safe_addstr(win, 2 + i, 2, line)
# Draw the frame last so that it overwrites the body text for small windows
_draw_frame(win, title)
win.noutrefresh()
def _draw_frame(win, title):
# Draw a frame around the inner edges of 'win', with 'title' at the top
win_height, win_width = win.getmaxyx()
win.attron(_style["frame"])
# Draw top/bottom edge
_safe_hline(win, 0, 0, " ", win_width)
_safe_hline(win, win_height - 1, 0, " ", win_width)
# Draw left/right edge
_safe_vline(win, 0, 0, " ", win_height)
_safe_vline(win, 0, win_width - 1, " ", win_height)
# Draw title
_safe_addstr(win, 0, max((win_width - len(title))//2, 0), title)
win.attroff(_style["frame"])
def _jump_to_dialog():
# Implements the jump-to dialog, where symbols can be looked up via
# incremental search and jumped to.
#
# Returns True if the user jumped to a symbol, and False if the dialog was
# canceled.
s = "" # Search text
prev_s = None # Previous search text
s_i = 0 # Search text cursor position
hscroll = 0 # Horizontal scroll offset
sel_node_i = 0 # Index of selected row
scroll = 0 # Index in 'matches' of the top row of the list
# Edit box at the top
edit_box = _styled_win("jump-edit")
edit_box.keypad(True)
# List of matches
matches_win = _styled_win("list")
# Bottom separator, with arrows pointing down
bot_sep_win = _styled_win("separator")
# Help window with instructions at the bottom
help_win = _styled_win("help")
# Give windows their initial size
_resize_jump_to_dialog(edit_box, matches_win, bot_sep_win, help_win,
sel_node_i, scroll)
_safe_curs_set(2)
# Logic duplication with _select_{next,prev}_menu_entry(), except we do a
# functional variant that returns the new (sel_node_i, scroll) values to
# avoid 'nonlocal'. TODO: Can this be factored out in some nice way?
def select_next_match():
if sel_node_i == len(matches) - 1:
return sel_node_i, scroll
if sel_node_i + 1 >= scroll + _height(matches_win) - _SCROLL_OFFSET \
and scroll < _max_scroll(matches, matches_win):
return sel_node_i + 1, scroll + 1
return sel_node_i + 1, scroll
def select_prev_match():
if sel_node_i == 0:
return sel_node_i, scroll
if sel_node_i - 1 < scroll + _SCROLL_OFFSET:
return sel_node_i - 1, max(scroll - 1, 0)
return sel_node_i - 1, scroll
while True:
if s != prev_s:
# The search text changed. Find new matching nodes.
prev_s = s
try:
# We could use re.IGNORECASE here instead of lower(), but this
# is noticeably less jerky while inputting regexes like
# '.*debug$' (though the '.*' is redundant there). Those
# probably have bad interactions with re.search(), which
# matches anywhere in the string.
#
# It's not horrible either way. Just a bit smoother.
regex_searches = [re.compile(regex).search
for regex in s.lower().split()]
# No exception thrown, so the regexes are okay
bad_re = None
# List of matching nodes
matches = []
add_match = matches.append
# Search symbols and choices
for node in _sorted_sc_nodes():
# Symbol/choice
sc = node.item
for search in regex_searches:
# Both the name and the prompt might be missing, since
# we're searching both symbols and choices
# Does the regex match either the symbol name or the
# prompt (if any)?
if not (sc.name and search(sc.name.lower()) or
node.prompt and search(node.prompt[0].lower())):
# Give up on the first regex that doesn't match, to
# speed things up a bit when multiple regexes are
# entered
break
else:
add_match(node)
# Search menus and comments
for node in _sorted_menu_comment_nodes():
for search in regex_searches:
if not search(node.prompt[0].lower()):
break
else:
add_match(node)
except re.error as e:
# Bad regex. Remember the error message so we can show it.
bad_re = "Bad regular expression"
# re.error.msg was added in Python 3.5
if hasattr(e, "msg"):
bad_re += ": " + e.msg
matches = []
# Reset scroll and jump to the top of the list of matches
sel_node_i = scroll = 0
_draw_jump_to_dialog(edit_box, matches_win, bot_sep_win, help_win,
s, s_i, hscroll,
bad_re, matches, sel_node_i, scroll)
curses.doupdate()
c = _getch_compat(edit_box)
if c == "\n":
if matches:
_jump_to(matches[sel_node_i])
_safe_curs_set(0)
return True
elif c == "\x1B": # \x1B = ESC
_safe_curs_set(0)
return False
elif c == curses.KEY_RESIZE:
# We adjust the scroll so that the selected node stays visible in
# the list when the terminal is resized, hence the 'scroll'
# assignment
scroll = _resize_jump_to_dialog(
edit_box, matches_win, bot_sep_win, help_win,
sel_node_i, scroll)
elif c == "\x06": # \x06 = Ctrl-F
if matches:
_safe_curs_set(0)
_info_dialog(matches[sel_node_i], True)
_safe_curs_set(2)
scroll = _resize_jump_to_dialog(
edit_box, matches_win, bot_sep_win, help_win,
sel_node_i, scroll)
elif c == curses.KEY_DOWN:
sel_node_i, scroll = select_next_match()
elif c == curses.KEY_UP:
sel_node_i, scroll = select_prev_match()
elif c in (curses.KEY_NPAGE, "\x04"): # Page Down/Ctrl-D
# Keep it simple. This way we get sane behavior for small windows,
# etc., for free.
for _ in range(_PG_JUMP):
sel_node_i, scroll = select_next_match()
# Page Up (no Ctrl-U, as it's already used by the edit box)
elif c == curses.KEY_PPAGE:
for _ in range(_PG_JUMP):
sel_node_i, scroll = select_prev_match()
elif c == curses.KEY_END:
sel_node_i = len(matches) - 1
scroll = _max_scroll(matches, matches_win)
elif c == curses.KEY_HOME:
sel_node_i = scroll = 0
elif c == "\0": # \0 = NUL, ignore
pass
else:
s, s_i, hscroll = _edit_text(c, s, s_i, hscroll,
_width(edit_box) - 2)
# Obscure Python: We never pass a value for cached_nodes, and it keeps pointing
# to the same list. This avoids a global.
def _sorted_sc_nodes(cached_nodes=[]):
# Returns a sorted list of symbol and choice nodes to search. The symbol
# nodes appear first, sorted by name, and then the choice nodes, sorted by
# prompt and (secondarily) name.
if not cached_nodes:
# Add symbol nodes
for sym in sorted(_kconf.unique_defined_syms,
key=lambda sym: sym.name):
# += is in-place for lists
cached_nodes += sym.nodes
# Add choice nodes
choices = sorted(_kconf.unique_choices,
key=lambda choice: choice.name or "")
cached_nodes += sorted(
[node for choice in choices for node in choice.nodes],
key=lambda node: node.prompt[0] if node.prompt else "")
return cached_nodes
def _sorted_menu_comment_nodes(cached_nodes=[]):
# Returns a list of menu and comment nodes to search, sorted by prompt,
# with the menus first
if not cached_nodes:
def prompt_text(mc):
return mc.prompt[0]
cached_nodes += sorted(_kconf.menus, key=prompt_text)
cached_nodes += sorted(_kconf.comments, key=prompt_text)
return cached_nodes
def _resize_jump_to_dialog(edit_box, matches_win, bot_sep_win, help_win,
sel_node_i, scroll):
# Resizes the jump-to dialog to fill the terminal.
#
# Returns the new scroll index. We adjust the scroll if needed so that the
# selected node stays visible.
screen_height, screen_width = _stdscr.getmaxyx()
bot_sep_win.resize(1, screen_width)
help_win_height = len(_JUMP_TO_HELP_LINES)
matches_win_height = screen_height - help_win_height - 4
if matches_win_height >= 1:
edit_box.resize(3, screen_width)
matches_win.resize(matches_win_height, screen_width)
help_win.resize(help_win_height, screen_width)
matches_win.mvwin(3, 0)
bot_sep_win.mvwin(3 + matches_win_height, 0)
help_win.mvwin(3 + matches_win_height + 1, 0)
else:
# Degenerate case. Give up on nice rendering and just prevent errors.
matches_win_height = 1
edit_box.resize(screen_height, screen_width)
matches_win.resize(1, screen_width)
help_win.resize(1, screen_width)
for win in matches_win, bot_sep_win, help_win:
win.mvwin(0, 0)
# Adjust the scroll so that the selected row is still within the window, if
# needed
if sel_node_i - scroll >= matches_win_height:
return sel_node_i - matches_win_height + 1
return scroll
def _draw_jump_to_dialog(edit_box, matches_win, bot_sep_win, help_win,
s, s_i, hscroll,
bad_re, matches, sel_node_i, scroll):
edit_width = _width(edit_box) - 2
#
# Update list of matches
#
matches_win.erase()
if matches:
for i in range(scroll,
min(scroll + _height(matches_win), len(matches))):
node = matches[i]
if isinstance(node.item, (Symbol, Choice)):
node_str = _name_and_val_str(node.item)
if node.prompt:
node_str += ' "{}"'.format(node.prompt[0])
elif node.item == MENU:
node_str = 'menu "{}"'.format(node.prompt[0])
else: # node.item == COMMENT
node_str = 'comment "{}"'.format(node.prompt[0])
_safe_addstr(matches_win, i - scroll, 0, node_str,
_style["selection" if i == sel_node_i else "list"])
else:
# bad_re holds the error message from the re.error exception on errors
_safe_addstr(matches_win, 0, 0, bad_re or "No matches")
matches_win.noutrefresh()
#
# Update bottom separator line
#
bot_sep_win.erase()
# Draw arrows pointing down if the symbol list is scrolled up
if scroll < _max_scroll(matches, matches_win):
_safe_hline(bot_sep_win, 0, 4, curses.ACS_DARROW, _N_SCROLL_ARROWS)
bot_sep_win.noutrefresh()
#
# Update help window at bottom
#
help_win.erase()
for i, line in enumerate(_JUMP_TO_HELP_LINES):
_safe_addstr(help_win, i, 0, line)
help_win.noutrefresh()
#
# Update edit box. We do this last since it makes it handy to position the
# cursor.
#
edit_box.erase()
_draw_frame(edit_box, "Jump to symbol/choice/menu/comment")
# Draw arrows pointing up if the symbol list is scrolled down
if scroll > 0:
# TODO: Bit ugly that _style["frame"] is repeated here
_safe_hline(edit_box, 2, 4, curses.ACS_UARROW, _N_SCROLL_ARROWS,
_style["frame"])
visible_s = s[hscroll:hscroll + edit_width]
_safe_addstr(edit_box, 1, 1, visible_s)
_safe_move(edit_box, 1, 1 + s_i - hscroll)
edit_box.noutrefresh()
def _info_dialog(node, from_jump_to_dialog):
# Shows a fullscreen window with information about 'node'.
#
# If 'from_jump_to_dialog' is True, the information dialog was opened from
# within the jump-to-dialog. In this case, we make '/' from within the
# information dialog just return, to avoid a confusing recursive invocation
# of the jump-to-dialog.
# Top row, with title and arrows point up
top_line_win = _styled_win("separator")
# Text display
text_win = _styled_win("text")
text_win.keypad(True)
# Bottom separator, with arrows pointing down
bot_sep_win = _styled_win("separator")
# Help window with keys at the bottom
help_win = _styled_win("help")
# Give windows their initial size
_resize_info_dialog(top_line_win, text_win, bot_sep_win, help_win)
# Get lines of help text
lines = _info_str(node).split("\n")
# Index of first row in 'lines' to show
scroll = 0
while True:
_draw_info_dialog(node, lines, scroll, top_line_win, text_win,
bot_sep_win, help_win)
curses.doupdate()
c = _getch_compat(text_win)
if c == curses.KEY_RESIZE:
_resize_info_dialog(top_line_win, text_win, bot_sep_win, help_win)
elif c in (curses.KEY_DOWN, "j", "J"):
if scroll < _max_scroll(lines, text_win):
scroll += 1
elif c in (curses.KEY_NPAGE, "\x04"): # Page Down/Ctrl-D
scroll = min(scroll + _PG_JUMP, _max_scroll(lines, text_win))
elif c in (curses.KEY_PPAGE, "\x15"): # Page Up/Ctrl-U
scroll = max(scroll - _PG_JUMP, 0)
elif c in (curses.KEY_END, "G"):
scroll = _max_scroll(lines, text_win)
elif c in (curses.KEY_HOME, "g"):
scroll = 0
elif c in (curses.KEY_UP, "k", "K"):
if scroll > 0:
scroll -= 1
elif c == "/":
# Support starting a search from within the information dialog
if from_jump_to_dialog:
return # Avoid recursion
if _jump_to_dialog():
return # Jumped to a symbol. Cancel the information dialog.
# Stay in the information dialog if the jump-to dialog was
# canceled. Resize it in case the terminal was resized while the
# fullscreen jump-to dialog was open.
_resize_info_dialog(top_line_win, text_win, bot_sep_win, help_win)
elif c in (curses.KEY_LEFT, curses.KEY_BACKSPACE, _ERASE_CHAR,
"\x1B", # \x1B = ESC
"q", "Q", "h", "H"):
return
def _resize_info_dialog(top_line_win, text_win, bot_sep_win, help_win):
# Resizes the info dialog to fill the terminal
screen_height, screen_width = _stdscr.getmaxyx()
top_line_win.resize(1, screen_width)
bot_sep_win.resize(1, screen_width)
help_win_height = len(_INFO_HELP_LINES)
text_win_height = screen_height - help_win_height - 2
if text_win_height >= 1:
text_win.resize(text_win_height, screen_width)
help_win.resize(help_win_height, screen_width)
text_win.mvwin(1, 0)
bot_sep_win.mvwin(1 + text_win_height, 0)
help_win.mvwin(1 + text_win_height + 1, 0)
else:
# Degenerate case. Give up on nice rendering and just prevent errors.
text_win.resize(1, screen_width)
help_win.resize(1, screen_width)
for win in text_win, bot_sep_win, help_win:
win.mvwin(0, 0)
def _draw_info_dialog(node, lines, scroll, top_line_win, text_win,
bot_sep_win, help_win):
text_win_height, text_win_width = text_win.getmaxyx()
# Note: The top row is deliberately updated last. See _draw_main().
#
# Update text display
#
text_win.erase()
for i, line in enumerate(lines[scroll:scroll + text_win_height]):
_safe_addstr(text_win, i, 0, line)
text_win.noutrefresh()
#
# Update bottom separator line
#
bot_sep_win.erase()
# Draw arrows pointing down if the symbol window is scrolled up
if scroll < _max_scroll(lines, text_win):
_safe_hline(bot_sep_win, 0, 4, curses.ACS_DARROW, _N_SCROLL_ARROWS)
bot_sep_win.noutrefresh()
#
# Update help window at bottom
#
help_win.erase()
for i, line in enumerate(_INFO_HELP_LINES):
_safe_addstr(help_win, i, 0, line)
help_win.noutrefresh()
#
# Update top row
#
top_line_win.erase()
# Draw arrows pointing up if the information window is scrolled down. Draw
# them before drawing the title, so the title ends up on top for small
# windows.
if scroll > 0:
_safe_hline(top_line_win, 0, 4, curses.ACS_UARROW, _N_SCROLL_ARROWS)
title = ("Symbol" if isinstance(node.item, Symbol) else
"Choice" if isinstance(node.item, Choice) else
"Menu" if node.item == MENU else
"Comment") + " information"
_safe_addstr(top_line_win, 0, max((text_win_width - len(title))//2, 0),
title)
top_line_win.noutrefresh()
def _info_str(node):
# Returns information about the menu node 'node' as a string.
#
# The helper functions are responsible for adding newlines. This allows
# them to return "" if they don't want to add any output.
if isinstance(node.item, Symbol):
sym = node.item
return (
_name_info(sym) +
_prompt_info(sym) +
"Type: {}\n".format(TYPE_TO_STR[sym.type]) +
_value_info(sym) +
_help_info(sym) +
_direct_dep_info(sym) +
_defaults_info(sym) +
_select_imply_info(sym) +
_kconfig_def_info(sym)
)
if isinstance(node.item, Choice):
choice = node.item
return (
_name_info(choice) +
_prompt_info(choice) +
"Type: {}\n".format(TYPE_TO_STR[choice.type]) +
'Mode: {}\n'.format(choice.str_value) +
_help_info(choice) +
_choice_syms_info(choice) +
_direct_dep_info(choice) +
_defaults_info(choice) +
_kconfig_def_info(choice)
)
return _kconfig_def_info(node) # node.item in (MENU, COMMENT)
def _name_info(sc):
# Returns a string with the name of the symbol/choice. Names are optional
# for choices.
return "Name: {}\n".format(sc.name) if sc.name else ""
def _prompt_info(sc):
# Returns a string listing the prompts of 'sc' (Symbol or Choice)
s = ""
for node in sc.nodes:
if node.prompt:
s += "Prompt: {}\n".format(node.prompt[0])
return s
def _value_info(sym):
# Returns a string showing 'sym's value
# Only put quotes around the value for string symbols
return "Value: {}\n".format(
'"{}"'.format(sym.str_value)
if sym.orig_type == STRING
else sym.str_value)
def _choice_syms_info(choice):
# Returns a string listing the choice symbols in 'choice'. Adds
# "(selected)" next to the selected one.
s = "Choice symbols:\n"
for sym in choice.syms:
s += " - " + sym.name
if sym is choice.selection:
s += " (selected)"
s += "\n"
return s + "\n"
def _help_info(sc):
# Returns a string with the help text(s) of 'sc' (Symbol or Choice).
# Symbols and choices defined in multiple locations can have multiple help
# texts.
s = "\n"
for node in sc.nodes:
if node.help is not None:
s += "Help:\n\n{}\n\n".format(_indent(node.help, 2))
return s
def _direct_dep_info(sc):
# Returns a string describing the direct dependencies of 'sc' (Symbol or
# Choice). The direct dependencies are the OR of the dependencies from each
# definition location. The dependencies at each definition location come
# from 'depends on' and dependencies inherited from parent items.
return "" if sc.direct_dep is _kconf.y else \
'Direct dependencies (={}):\n{}\n' \
.format(TRI_TO_STR[expr_value(sc.direct_dep)],
_split_expr_info(sc.direct_dep, 2))
def _defaults_info(sc):
# Returns a string describing the defaults of 'sc' (Symbol or Choice)
if not sc.defaults:
return ""
s = "Default"
if len(sc.defaults) > 1:
s += "s"
s += ":\n"
for val, cond in sc.orig_defaults:
s += " - "
if isinstance(sc, Symbol):
s += _expr_str(val)
# Skip the tristate value hint if the expression is just a single
# symbol. _expr_str() already shows its value as a string.
#
# This also avoids showing the tristate value for string/int/hex
# defaults, which wouldn't make any sense.
if isinstance(val, tuple):
s += ' (={})'.format(TRI_TO_STR[expr_value(val)])
else:
# Don't print the value next to the symbol name for choice
# defaults, as it looks a bit confusing
s += val.name
s += "\n"
if cond is not _kconf.y:
s += " Condition (={}):\n{}" \
.format(TRI_TO_STR[expr_value(cond)],
_split_expr_info(cond, 4))
return s + "\n"
def _split_expr_info(expr, indent):
# Returns a string with 'expr' split into its top-level && or || operands,
# with one operand per line, together with the operand's value. This is
# usually enough to get something readable for long expressions. A fancier
# recursive thingy would be possible too.
#
# indent:
# Number of leading spaces to add before the split expression.
if len(split_expr(expr, AND)) > 1:
split_op = AND
op_str = "&&"
else:
split_op = OR
op_str = "||"
s = ""
for i, term in enumerate(split_expr(expr, split_op)):
s += "{}{} {}".format(indent*" ",
" " if i == 0 else op_str,
_expr_str(term))
# Don't bother showing the value hint if the expression is just a
# single symbol. _expr_str() already shows its value.
if isinstance(term, tuple):
s += " (={})".format(TRI_TO_STR[expr_value(term)])
s += "\n"
return s
def _select_imply_info(sym):
# Returns a string with information about which symbols 'select' or 'imply'
# 'sym'. The selecting/implying symbols are grouped according to which
# value they select/imply 'sym' to (n/m/y).
def sis(expr, val, title):
# sis = selects/implies
sis = [si for si in split_expr(expr, OR) if expr_value(si) == val]
if not sis:
return ""
res = title
for si in sis:
res += " - {}\n".format(split_expr(si, AND)[0].name)
return res + "\n"
s = ""
if sym.rev_dep is not _kconf.n:
s += sis(sym.rev_dep, 2,
"Symbols currently y-selecting this symbol:\n")
s += sis(sym.rev_dep, 1,
"Symbols currently m-selecting this symbol:\n")
s += sis(sym.rev_dep, 0,
"Symbols currently n-selecting this symbol (no effect):\n")
if sym.weak_rev_dep is not _kconf.n:
s += sis(sym.weak_rev_dep, 2,
"Symbols currently y-implying this symbol:\n")
s += sis(sym.weak_rev_dep, 1,
"Symbols currently m-implying this symbol:\n")
s += sis(sym.weak_rev_dep, 0,
"Symbols currently n-implying this symbol (no effect):\n")
return s
def _kconfig_def_info(item):
# Returns a string with the definition of 'item' in Kconfig syntax,
# together with the definition location(s) and their include and menu paths
nodes = [item] if isinstance(item, MenuNode) else item.nodes
s = "Kconfig definition{}, with parent deps. propagated to 'depends on'\n" \
.format("s" if len(nodes) > 1 else "")
s += (len(s) - 1)*"="
for node in nodes:
s += "\n\n" \
"At {}:{}\n" \
"{}" \
"Menu path: {}\n\n" \
"{}" \
.format(node.filename, node.linenr,
_include_path_info(node),
_menu_path_info(node),
_indent(node.custom_str(_name_and_val_str), 2))
return s
def _include_path_info(node):
if not node.include_path:
# In the top-level Kconfig file
return ""
return "Included via {}\n".format(
" -> ".join("{}:{}".format(filename, linenr)
for filename, linenr in node.include_path))
def _menu_path_info(node):
# Returns a string describing the menu path leading up to 'node'
path = ""
while node.parent is not _kconf.top_node:
node = node.parent
# Promptless choices might appear among the parents. Use
# standard_sc_expr_str() for them, so that they show up as
# '<choice (name if any)>'.
path = " -> " + (node.prompt[0] if node.prompt else
standard_sc_expr_str(node.item)) + path
return "(Top)" + path
def _indent(s, n):
# Returns 's' with each line indented 'n' spaces. textwrap.indent() is not
# available in Python 2 (it's 3.3+).
return "\n".join(n*" " + line for line in s.split("\n"))
def _name_and_val_str(sc):
# Custom symbol/choice printer that shows symbol values after symbols
# Show the values of non-constant (non-quoted) symbols that don't look like
# numbers. Things like 123 are actually symbol references, and only work as
# expected due to undefined symbols getting their name as their value.
# Showing the symbol value for those isn't helpful though.
if isinstance(sc, Symbol) and not sc.is_constant and not _is_num(sc.name):
if not sc.nodes:
# Undefined symbol reference
return "{}(undefined/n)".format(sc.name)
return '{}(={})'.format(sc.name, sc.str_value)
# For other items, use the standard format
return standard_sc_expr_str(sc)
def _expr_str(expr):
# Custom expression printer that shows symbol values
return expr_str(expr, _name_and_val_str)
def _styled_win(style):
# Returns a new curses window with style 'style' and space as the fill
# character. The initial dimensions are (1, 1), so the window needs to be
# sized and positioned separately.
win = curses.newwin(1, 1)
_set_style(win, style)
return win
def _set_style(win, style):
# Changes the style of an existing window
win.bkgdset(" ", _style[style])
def _max_scroll(lst, win):
# Assuming 'lst' is a list of items to be displayed in 'win',
# returns the maximum number of steps 'win' can be scrolled down.
# We stop scrolling when the bottom item is visible.
return max(0, len(lst) - _height(win))
def _edit_text(c, s, i, hscroll, width):
# Implements text editing commands for edit boxes. Takes a character (which
# could also be e.g. curses.KEY_LEFT) and the edit box state, and returns
# the new state after the character has been processed.
#
# c:
# Character from user
#
# s:
# Current contents of string
#
# i:
# Current cursor index in string
#
# hscroll:
# Index in s of the leftmost character in the edit box, for horizontal
# scrolling
#
# width:
# Width in characters of the edit box
#
# Return value:
# An (s, i, hscroll) tuple for the new state
if c == curses.KEY_LEFT:
if i > 0:
i -= 1
elif c == curses.KEY_RIGHT:
if i < len(s):
i += 1
elif c in (curses.KEY_HOME, "\x01"): # \x01 = CTRL-A
i = 0
elif c in (curses.KEY_END, "\x05"): # \x05 = CTRL-E
i = len(s)
elif c in (curses.KEY_BACKSPACE, _ERASE_CHAR):
if i > 0:
s = s[:i-1] + s[i:]
i -= 1
elif c == curses.KEY_DC:
s = s[:i] + s[i+1:]
elif c == "\x17": # \x17 = CTRL-W
# The \W removes characters like ',' one at a time
new_i = re.search(r"(?:\w*|\W)\s*$", s[:i]).start()
s = s[:new_i] + s[i:]
i = new_i
elif c == "\x0B": # \x0B = CTRL-K
s = s[:i]
elif c == "\x15": # \x15 = CTRL-U
s = s[i:]
i = 0
elif isinstance(c, str):
# Insert character
s = s[:i] + c + s[i:]
i += 1
# Adjust the horizontal scroll so that the cursor never touches the left or
# right edges of the edit box, except when it's at the beginning or the end
# of the string
if i < hscroll + _SCROLL_OFFSET:
hscroll = max(i - _SCROLL_OFFSET, 0)
elif i >= hscroll + width - _SCROLL_OFFSET:
max_scroll = max(len(s) - width + 1, 0)
hscroll = min(i - width + _SCROLL_OFFSET + 1, max_scroll)
return s, i, hscroll
def _load_save_info():
# Returns an information string for load/save dialog boxes
return "(Relative to {})\n\nRefer to your home directory with ~" \
.format(os.path.join(os.getcwd(), ""))
def _msg(title, text):
# Pops up a message dialog that can be dismissed with Space/Enter/ESC
_key_dialog(title, text, " \n")
def _error(text):
# Pops up an error dialog that can be dismissed with Space/Enter/ESC
_msg("Error", text)
def _node_str(node):
# Returns the complete menu entry text for a menu node.
#
# Example return value: "[*] Support for X"
# Calculate the indent to print the item with by checking how many levels
# above it the closest 'menuconfig' item is (this includes menus and
# choices as well as menuconfig symbols)
indent = 0
parent = node.parent
while not parent.is_menuconfig:
indent += _SUBMENU_INDENT
parent = parent.parent
# This approach gives nice alignment for empty string symbols ("() Foo")
s = "{:{}}".format(_value_str(node), 3 + indent)
if _should_show_name(node):
if isinstance(node.item, Symbol):
s += " <{}>".format(node.item.name)
else:
# For choices, use standard_sc_expr_str(). That way they show up as
# '<choice (name if any)>'.
s += " " + standard_sc_expr_str(node.item)
if node.prompt:
if node.item == COMMENT:
s += " *** {} ***".format(node.prompt[0])
else:
s += " " + node.prompt[0]
if isinstance(node.item, Symbol):
sym = node.item
# Print "(NEW)" next to symbols without a user value (from e.g. a
# .config), but skip it for choice symbols in choices in y mode,
# and for symbols of UNKNOWN type (which generate a warning though)
if sym.user_value is None and sym.orig_type and \
not (sym.choice and sym.choice.tri_value == 2):
s += " (NEW)"
if isinstance(node.item, Choice) and node.item.tri_value == 2:
# Print the prompt of the selected symbol after the choice for
# choices in y mode
sym = node.item.selection
if sym:
for sym_node in sym.nodes:
# Use the prompt used at this choice location, in case the
# choice symbol is defined in multiple locations
if sym_node.parent is node and sym_node.prompt:
s += " ({})".format(sym_node.prompt[0])
break
else:
# If the symbol isn't defined at this choice location, then
# just use whatever prompt we can find for it
for sym_node in sym.nodes:
if sym_node.prompt:
s += " ({})".format(sym_node.prompt[0])
break
# Print "--->" next to nodes that have menus that can potentially be
# entered. Print "----" if the menu is empty. We don't allow those to be
# entered.
if node.is_menuconfig:
s += " --->" if _shown_nodes(node) else " ----"
return s
def _should_show_name(node):
# Returns True if 'node' is a symbol or choice whose name should shown (if
# any, as names are optional for choices)
# The 'not node.prompt' case only hits in show-all mode, for promptless
# symbols and choices
return not node.prompt or \
(_show_name and isinstance(node.item, (Symbol, Choice)))
def _value_str(node):
# Returns the value part ("[*]", "<M>", "(foo)" etc.) of a menu node
item = node.item
if item in (MENU, COMMENT):
return ""
# Wouldn't normally happen, and generates a warning
if not item.orig_type:
return ""
if item.orig_type in (STRING, INT, HEX):
return "({})".format(item.str_value)
# BOOL or TRISTATE
if _is_y_mode_choice_sym(item):
return "(X)" if item.choice.selection is item else "( )"
tri_val_str = (" ", "M", "*")[item.tri_value]
if len(item.assignable) <= 1:
# Pinned to a single value
return "" if isinstance(item, Choice) else "-{}-".format(tri_val_str)
if item.type == BOOL:
return "[{}]".format(tri_val_str)
# item.type == TRISTATE
if item.assignable == (1, 2):
return "{{{}}}".format(tri_val_str) # {M}/{*}
return "<{}>".format(tri_val_str)
def _is_y_mode_choice_sym(item):
# The choice mode is an upper bound on the visibility of choice symbols, so
# we can check the choice symbols' own visibility to see if the choice is
# in y mode
return isinstance(item, Symbol) and item.choice and item.visibility == 2
def _check_valid(sym, s):
# Returns True if the string 's' is a well-formed value for 'sym'.
# Otherwise, displays an error and returns False.
if sym.orig_type not in (INT, HEX):
return True # Anything goes for non-int/hex symbols
base = 10 if sym.orig_type == INT else 16
try:
int(s, base)
except ValueError:
_error("'{}' is a malformed {} value"
.format(s, TYPE_TO_STR[sym.orig_type]))
return False
for low_sym, high_sym, cond in sym.ranges:
if expr_value(cond):
low_s = low_sym.str_value
high_s = high_sym.str_value
if not int(low_s, base) <= int(s, base) <= int(high_s, base):
_error("{} is outside the range {}-{}"
.format(s, low_s, high_s))
return False
break
return True
def _range_info(sym):
# Returns a string with information about the valid range for the symbol
# 'sym', or None if 'sym' doesn't have a range
if sym.orig_type in (INT, HEX):
for low, high, cond in sym.ranges:
if expr_value(cond):
return "Range: {}-{}".format(low.str_value, high.str_value)
return None
def _is_num(name):
# Heuristic to see if a symbol name looks like a number, for nicer output
# when printing expressions. Things like 16 are actually symbol names, only
# they get their name as their value when the symbol is undefined.
try:
int(name)
except ValueError:
if not name.startswith(("0x", "0X")):
return False
try:
int(name, 16)
except ValueError:
return False
return True
def _getch_compat(win):
# Uses get_wch() if available (Python 3.3+) and getch() otherwise.
#
# Also falls back on getch() if get_wch() raises curses.error, to work
# around an issue when resizing the terminal on at least macOS Catalina.
# See path_to_url
#
# Also handles a PDCurses resizing quirk.
try:
c = win.get_wch()
except (AttributeError, curses.error):
c = win.getch()
if 0 <= c <= 255:
c = chr(c)
# Decent resizing behavior on PDCurses requires calling resize_term(0, 0)
# after receiving KEY_RESIZE, while ncurses (usually) handles terminal
# resizing automatically in get(_w)ch() (see the end of the
# resizeterm(3NCURSES) man page).
#
# resize_term(0, 0) reliably fails and does nothing on ncurses, so this
# hack gives ncurses/PDCurses compatibility for resizing. I don't know
# whether it would cause trouble for other implementations.
if c == curses.KEY_RESIZE:
try:
curses.resize_term(0, 0)
except curses.error:
pass
return c
def _warn(*args):
# Temporarily returns from curses to shell mode and prints a warning to
# stderr. The warning would get lost in curses mode.
curses.endwin()
print("menuconfig warning: ", end="", file=sys.stderr)
print(*args, file=sys.stderr)
curses.doupdate()
# Ignore exceptions from some functions that might fail, e.g. for small
# windows. They usually do reasonable things anyway.
def _safe_curs_set(visibility):
try:
curses.curs_set(visibility)
except curses.error:
pass
def _safe_addstr(win, *args):
# Clip the line to avoid wrapping to the next line, which looks glitchy.
# addchstr() would do it for us, but it's not available in the 'curses'
# module.
attr = None
if isinstance(args[0], str):
y, x = win.getyx()
s = args[0]
if len(args) == 2:
attr = args[1]
else:
y, x, s = args[:3] # pylint: disable=unbalanced-tuple-unpacking
if len(args) == 4:
attr = args[3]
maxlen = _width(win) - x
s = s.expandtabs()
try:
# The 'curses' module uses wattr_set() internally if you pass 'attr',
# overwriting the background style, so setting 'attr' to 0 in the first
# case won't do the right thing
if attr is None:
win.addnstr(y, x, s, maxlen)
else:
win.addnstr(y, x, s, maxlen, attr)
except curses.error:
pass
def _safe_addch(win, *args):
try:
win.addch(*args)
except curses.error:
pass
def _safe_hline(win, *args):
try:
win.hline(*args)
except curses.error:
pass
def _safe_vline(win, *args):
try:
win.vline(*args)
except curses.error:
pass
def _safe_move(win, *args):
try:
win.move(*args)
except curses.error:
pass
def _change_c_lc_ctype_to_utf8():
# See _CHANGE_C_LC_CTYPE_TO_UTF8
if _IS_WINDOWS:
# Windows rarely has issues here, and the PEP 538 implementation avoids
# changing the locale on it. None of the UTF-8 locales below were
# supported from some quick testing either. Play it safe.
return
def try_set_locale(loc):
try:
locale.setlocale(locale.LC_CTYPE, loc)
return True
except locale.Error:
return False
# Is LC_CTYPE set to the C locale?
if locale.setlocale(locale.LC_CTYPE) == "C":
# This list was taken from the PEP 538 implementation in the CPython
# code, in Python/pylifecycle.c
for loc in "C.UTF-8", "C.utf8", "UTF-8":
if try_set_locale(loc):
# LC_CTYPE successfully changed
return
if __name__ == "__main__":
_main()
``` | /content/code_sandbox/scripts/kconfig/menuconfig.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 25,423 |
```python
#!/usr/bin/env python3
# Originally modified from:
# path_to_url
# Writes/updates the zephyr/.config configuration file by merging configuration
# files passed as arguments, e.g. board *_defconfig and application prj.conf
# files.
#
# When fragments haven't changed, zephyr/.config is both the input and the
# output, which just updates it. This is handled in the CMake files.
#
# Also does various checks (most via Kconfiglib warnings).
import argparse
import os
import re
import sys
import textwrap
# Zephyr doesn't use tristate symbols. They're supported here just to make the
# script a bit more generic.
from kconfiglib import Kconfig, split_expr, expr_value, expr_str, BOOL, \
TRISTATE, TRI_TO_STR, AND, OR
def main():
args = parse_args()
if args.zephyr_base:
os.environ['ZEPHYR_BASE'] = args.zephyr_base
print("Parsing " + args.kconfig_file)
kconf = Kconfig(args.kconfig_file, warn_to_stderr=False,
suppress_traceback=True)
if args.handwritten_input_configs:
# Warn for assignments to undefined symbols, but only for handwritten
# fragments, to avoid warnings-turned-errors when using an old
# configuration file together with updated Kconfig files
kconf.warn_assign_undef = True
# prj.conf may override settings from the board configuration, so
# disable warnings about symbols being assigned more than once
kconf.warn_assign_override = False
kconf.warn_assign_redun = False
if args.forced_input_configs:
# Do not warn on a redundant config.
# The reason is that a regular .config will be followed by the forced
# config which under normal circumstances should be identical to the
# configured setting.
# Only if user has modified to a value that gets overruled by the forced
# a warning shall be issued.
kconf.warn_assign_redun = False
# Load files
print(kconf.load_config(args.configs_in[0]))
for config in args.configs_in[1:]:
# replace=False creates a merged configuration
print(kconf.load_config(config, replace=False))
if args.handwritten_input_configs:
# Check that there are no assignments to promptless symbols, which
# have no effect.
#
# This only makes sense when loading handwritten fragments and not when
# loading zephyr/.config, because zephyr/.config is configuration
# output and also assigns promptless symbols.
check_no_promptless_assign(kconf)
# Print warnings for symbols that didn't get the assigned value. Only
# do this for handwritten input too, to avoid likely unhelpful warnings
# when using an old configuration and updating Kconfig files.
check_assigned_sym_values(kconf)
check_assigned_choice_values(kconf)
if kconf.syms.get('WARN_DEPRECATED', kconf.y).tri_value == 2:
check_deprecated(kconf)
if kconf.syms.get('WARN_EXPERIMENTAL', kconf.y).tri_value == 2:
check_experimental(kconf)
# Hack: Force all symbols to be evaluated, to catch warnings generated
# during evaluation. Wait till the end to write the actual output files, so
# that we don't generate any output if there are warnings-turned-errors.
#
# Kconfiglib caches calculated symbol values internally, so this is still
# fast.
kconf.write_config(os.devnull)
warn_only = r"warning:.*set more than once."
if kconf.warnings:
if args.forced_input_configs:
error_out = False
else:
error_out = True
# Put a blank line between warnings to make them easier to read
for warning in kconf.warnings:
print("\n" + warning, file=sys.stderr)
if not error_out and not re.search(warn_only, warning):
# The warning is not a warn_only, fail the Kconfig.
error_out = True
# Turn all warnings into errors, so that e.g. assignments to undefined
# Kconfig symbols become errors.
#
# A warning is generated by this script whenever a symbol gets a
# different value than the one it was assigned. Keep that one as just a
# warning for now.
if error_out:
err("Aborting due to Kconfig warnings")
# Write the merged configuration and the C header
print(kconf.write_config(args.config_out))
print(kconf.write_autoconf(args.header_out))
# Write the list of parsed Kconfig files to a file
write_kconfig_filenames(kconf, args.kconfig_list_out)
def check_no_promptless_assign(kconf):
# Checks that no promptless symbols are assigned
for sym in kconf.unique_defined_syms:
if sym.user_value is not None and promptless(sym):
err(f"""\
{sym.name_and_loc} is assigned in a configuration file, but is not directly
user-configurable (has no prompt). It gets its value indirectly from other
symbols. """ + SYM_INFO_HINT.format(sym))
def check_assigned_sym_values(kconf):
# Verifies that the values assigned to symbols "took" (matches the value
# the symbols actually got), printing warnings otherwise. Choice symbols
# are checked separately, in check_assigned_choice_values().
for sym in kconf.unique_defined_syms:
if sym.choice:
continue
user_value = sym.user_value
if user_value is None:
continue
# Tristate values are represented as 0, 1, 2. Having them as "n", "m",
# "y" is more convenient here, so convert.
if sym.type in (BOOL, TRISTATE):
user_value = TRI_TO_STR[user_value]
if user_value != sym.str_value:
msg = f"{sym.name_and_loc} was assigned the value '{user_value}'" \
f" but got the value '{sym.str_value}'. "
# List any unsatisfied 'depends on' dependencies in the warning
mdeps = missing_deps(sym)
if mdeps:
expr_strs = []
for expr in mdeps:
estr = expr_str(expr)
if isinstance(expr, tuple):
# Add () around dependencies that aren't plain symbols.
# Gives '(FOO || BAR) (=n)' instead of
# 'FOO || BAR (=n)', which might be clearer.
estr = f"({estr})"
expr_strs.append(f"{estr} "
f"(={TRI_TO_STR[expr_value(expr)]})")
msg += "Check these unsatisfied dependencies: " + \
", ".join(expr_strs) + ". "
warn(msg + SYM_INFO_HINT.format(sym))
def missing_deps(sym):
# check_assigned_sym_values() helper for finding unsatisfied dependencies.
#
# Given direct dependencies
#
# depends on <expr> && <expr> && ... && <expr>
#
# on 'sym' (which can also come from e.g. a surrounding 'if'), returns a
# list of all <expr>s with a value less than the value 'sym' was assigned
# ("less" instead of "not equal" just to be general and handle tristates,
# even though Zephyr doesn't use them).
#
# For string/int/hex symbols, just looks for <expr> = n.
#
# Note that <expr>s can be something more complicated than just a symbol,
# like 'FOO || BAR' or 'FOO = "string"'.
deps = split_expr(sym.direct_dep, AND)
if sym.type in (BOOL, TRISTATE):
return [dep for dep in deps if expr_value(dep) < sym.user_value]
# string/int/hex
return [dep for dep in deps if expr_value(dep) == 0]
def check_assigned_choice_values(kconf):
# Verifies that any choice symbols that were selected (by setting them to
# y) ended up as the selection, printing warnings otherwise.
#
# We check choice symbols separately to avoid warnings when two different
# choice symbols within the same choice are set to y. This might happen if
# a choice selection from a board defconfig is overridden in a prj.conf,
# for example. The last choice symbol set to y becomes the selection (and
# all other choice symbols get the value n).
#
# Without special-casing choices, we'd detect that the first symbol set to
# y ended up as n, and print a spurious warning.
for choice in kconf.unique_choices:
if choice.user_selection and \
choice.user_selection is not choice.selection:
warn(f"""\
The choice symbol {choice.user_selection.name_and_loc} was selected (set =y),
but {choice.selection.name_and_loc if choice.selection else "no symbol"} ended
up as the choice selection. """ + SYM_INFO_HINT.format(choice.user_selection))
# Hint on where to find symbol information. Used like
# SYM_INFO_HINT.format(sym).
SYM_INFO_HINT = """\
See path_to_url#CONFIG_{0.name} and/or
look up {0.name} in the menuconfig/guiconfig interface. The Application
Development Primer, Setting Configuration Values, and Kconfig - Tips and Best
Practices sections of the manual might be helpful too.\
"""
def check_deprecated(kconf):
deprecated = kconf.syms.get('DEPRECATED')
dep_expr = kconf.n if deprecated is None else deprecated.rev_dep
if dep_expr is not kconf.n:
selectors = [s for s in split_expr(dep_expr, OR) if expr_value(s) == 2]
for selector in selectors:
selector_name = split_expr(selector, AND)[0].name
warn(f'Deprecated symbol {selector_name} is enabled.')
def check_experimental(kconf):
experimental = kconf.syms.get('EXPERIMENTAL')
dep_expr = kconf.n if experimental is None else experimental.rev_dep
if dep_expr is not kconf.n:
selectors = [s for s in split_expr(dep_expr, OR) if expr_value(s) == 2]
for selector in selectors:
selector_name = split_expr(selector, AND)[0].name
warn(f'Experimental symbol {selector_name} is enabled.')
def promptless(sym):
# Returns True if 'sym' has no prompt. Since the symbol might be defined in
# multiple locations, we need to check all locations.
return not any(node.prompt for node in sym.nodes)
def write_kconfig_filenames(kconf, kconfig_list_path):
# Writes a sorted list with the absolute paths of all parsed Kconfig files
# to 'kconfig_list_path'. The paths are realpath()'d, and duplicates are
# removed. This file is used by CMake to look for changed Kconfig files. It
# needs to be deterministic.
with open(kconfig_list_path, 'w') as out:
for path in sorted({os.path.realpath(os.path.join(kconf.srctree, path))
for path in kconf.kconfig_filenames}):
print(path, file=out)
def parse_args():
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument("--handwritten-input-configs",
action="store_true",
help="Assume the input configuration fragments are "
"handwritten fragments and do additional checks "
"on them, like no promptless symbols being "
"assigned")
parser.add_argument("--forced-input-configs",
action="store_true",
help="Indicate the input configuration files are "
"followed by an forced configuration file."
"The forced configuration is used to forcefully "
"set specific configuration settings to a "
"pre-defined value and thereby remove any user "
" adjustments.")
parser.add_argument("--zephyr-base",
help="Path to current Zephyr installation")
parser.add_argument("kconfig_file",
help="Top-level Kconfig file")
parser.add_argument("config_out",
help="Output configuration file")
parser.add_argument("header_out",
help="Output header file")
parser.add_argument("kconfig_list_out",
help="Output file for list of parsed Kconfig files")
parser.add_argument("configs_in",
nargs="+",
help="Input configuration fragments. Will be merged "
"together.")
return parser.parse_args()
def warn(msg):
# Use a large fill() width to try to avoid linebreaks in the symbol
# reference link, and add some extra newlines to set the message off from
# surrounding text (this usually gets printed as part of spammy CMake
# output)
print("\n" + textwrap.fill("warning: " + msg, 100) + "\n", file=sys.stderr)
def err(msg):
sys.exit("\n" + textwrap.fill("error: " + msg, 100) + "\n")
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/kconfig/kconfig.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,842 |
```python
#!/usr/bin/env python3
import csv
import os
from kconfiglib import standard_kconfig
def hardenconfig(kconf):
kconf.load_config()
hardened_kconf_filename = os.path.join(os.environ['ZEPHYR_BASE'],
'scripts', 'kconfig', 'hardened.csv')
options = compare_with_hardened_conf(kconf, hardened_kconf_filename)
display_results(options)
class Option:
def __init__(self, name, recommended, current=None, symbol=None):
self.name = name
self.recommended = recommended
self.current = current
self.symbol = symbol
if current is None:
self.result = 'NA'
elif recommended == current:
self.result = 'PASS'
else:
self.result = 'FAIL'
def compare_with_hardened_conf(kconf, hardened_kconf_filename):
options = []
with open(hardened_kconf_filename) as csvfile:
csvreader = csv.reader(csvfile)
for row in csvreader:
if len(row) > 1:
name = row[0]
recommended = row[1]
try:
symbol = kconf.syms[name]
current = symbol.str_value
except KeyError:
symbol = None
current = None
options.append(Option(name=name, current=current,
recommended=recommended, symbol=symbol))
for node in kconf.node_iter():
for select in node.selects:
if kconf.syms["EXPERIMENTAL"] in select or kconf.syms["DEPRECATED"] in select:
options.append(Option(name=node.item.name, current=node.item.str_value, recommended='n', symbol=node.item))
return options
def display_results(options):
# header
print('{:^50}|{:^13}|{:^20}'.format('name', 'current', 'recommended'), end='')
print('||{:^28}\n'.format('check result'), end='')
print('=' * 116)
# results, only printing options that have failed for now. It simplify the readability.
# TODO: add command line option to show all results
for opt in options:
if opt.result == 'FAIL' and opt.symbol.visibility != 0:
print('CONFIG_{:<43}|{:^13}|{:^20}'.format(
opt.name, opt.current, opt.recommended), end='')
print('||{:^28}\n'.format(opt.result), end='')
print()
def main():
hardenconfig(standard_kconfig())
if __name__ == '__main__':
main()
``` | /content/code_sandbox/scripts/kconfig/hardenconfig.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 548 |
```python
#
import functools
import inspect
import operator
import os
import pickle
import re
import sys
from pathlib import Path
ZEPHYR_BASE = str(Path(__file__).resolve().parents[2])
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts", "dts",
"python-devicetree", "src"))
# Types we support
# 'string', 'int', 'hex', 'bool'
doc_mode = os.environ.get('KCONFIG_DOC_MODE') == "1"
if not doc_mode:
EDT_PICKLE = os.environ.get("EDT_PICKLE")
# The "if" handles a missing dts.
if EDT_PICKLE is not None and os.path.isfile(EDT_PICKLE):
with open(EDT_PICKLE, 'rb') as f:
edt = pickle.load(f)
edtlib = inspect.getmodule(edt)
else:
edt = None
edtlib = None
def _warn(kconf, msg):
print("{}:{}: WARNING: {}".format(kconf.filename, kconf.linenr, msg))
def _dt_units_to_scale(unit):
if not unit:
return 0
if unit in {'k', 'K'}:
return 10
if unit in {'m', 'M'}:
return 20
if unit in {'g', 'G'}:
return 30
if unit in {'kb', 'Kb'}:
return 13
if unit in {'mb', 'Mb'}:
return 23
if unit in {'gb', 'Gb'}:
return 33
def dt_chosen_label(kconf, _, chosen):
"""
This function takes a 'chosen' property and treats that property as a path
to an EDT node. If it finds an EDT node, it will look to see if that node
has a "label" property and return the value of that "label". If not, we
return the node's name in the devicetree.
"""
if doc_mode or edt is None:
return ""
node = edt.chosen_node(chosen)
if not node:
return ""
if "label" not in node.props:
return node.name
return node.props["label"].val
def dt_chosen_enabled(kconf, _, chosen):
"""
This function returns "y" if /chosen contains a property named 'chosen'
that points to an enabled node, and "n" otherwise
"""
if doc_mode or edt is None:
return "n"
node = edt.chosen_node(chosen)
return "y" if node and node.status == "okay" else "n"
def dt_chosen_path(kconf, _, chosen):
"""
This function takes a /chosen node property and returns the path
to the node in the property value, or the empty string.
"""
if doc_mode or edt is None:
return "n"
node = edt.chosen_node(chosen)
return node.path if node else ""
def dt_chosen_has_compat(kconf, _, chosen, compat):
"""
This function takes a /chosen node property and returns 'y' if the
chosen node has the provided compatible string 'compat'
"""
if doc_mode or edt is None:
return "n"
node = edt.chosen_node(chosen)
if node is None:
return "n"
if compat in node.compats:
return "y"
return "n"
def dt_node_enabled(kconf, name, node):
"""
This function is used to test if a node is enabled (has status
'okay') or not.
The 'node' argument is a string which is either a path or an
alias, or both, depending on 'name'.
If 'name' is 'dt_path_enabled', 'node' is an alias or a path. If
'name' is 'dt_alias_enabled, 'node' is an alias.
"""
if doc_mode or edt is None:
return "n"
if name == "dt_alias_enabled":
if node.startswith("/"):
# EDT.get_node() works with either aliases or paths. If we
# are specifically being asked about an alias, reject paths.
return "n"
else:
# Make sure this is being called appropriately.
assert name == "dt_path_enabled"
try:
node = edt.get_node(node)
except edtlib.EDTError:
return "n"
return "y" if node and node.status == "okay" else "n"
def dt_nodelabel_enabled(kconf, _, label):
"""
This function is like dt_node_enabled(), but the 'label' argument
should be a node label, like "foo" is here:
foo: some-node { ... };
"""
if doc_mode or edt is None:
return "n"
node = edt.label2node.get(label)
return "y" if node and node.status == "okay" else "n"
def _node_reg_addr(node, index, unit):
if not node:
return 0
if not node.regs:
return 0
if int(index) >= len(node.regs):
return 0
if node.regs[int(index)].addr is None:
return 0
return node.regs[int(index)].addr >> _dt_units_to_scale(unit)
def _node_reg_size(node, index, unit):
if not node:
return 0
if not node.regs:
return 0
if int(index) >= len(node.regs):
return 0
if node.regs[int(index)].size is None:
return 0
return node.regs[int(index)].size >> _dt_units_to_scale(unit)
def _node_int_prop(node, prop, unit=None):
"""
This function takes a 'node' and will look to see if that 'node' has a
property called 'prop' and if that 'prop' is an integer type will return
the value of the property 'prop' as either a string int or string hex
value, if not we return 0.
The function will divide the value based on 'unit':
None No division
'k' or 'K' divide by 1024 (1 << 10)
'm' or 'M' divide by 1,048,576 (1 << 20)
'g' or 'G' divide by 1,073,741,824 (1 << 30)
'kb' or 'Kb' divide by 8192 (1 << 13)
'mb' or 'Mb' divide by 8,388,608 (1 << 23)
'gb' or 'Gb' divide by 8,589,934,592 (1 << 33)
"""
if not node:
return 0
if prop not in node.props:
return 0
if node.props[prop].type != "int":
return 0
return node.props[prop].val >> _dt_units_to_scale(unit)
def _node_array_prop(node, prop, index=0, unit=None):
"""
This function takes a 'node' and will look to see if that 'node' has a
property called 'prop' and if that 'prop' is an array type will return
the value of the property 'prop' at the given 'index' as either a string int
or string hex value. If the property 'prop' is not found or the given 'index'
is out of range it will return 0.
The function will divide the value based on 'unit':
None No division
'k' or 'K' divide by 1024 (1 << 10)
'm' or 'M' divide by 1,048,576 (1 << 20)
'g' or 'G' divide by 1,073,741,824 (1 << 30)
"""
if not node:
return 0
if prop not in node.props:
return 0
if node.props[prop].type != "array":
return 0
if int(index) >= len(node.props[prop].val):
return 0
return node.props[prop].val[int(index)] >> _dt_units_to_scale(unit)
def _node_ph_array_prop(node, prop, index, cell, unit=None):
"""
This function takes a 'node', a property name ('prop'), index ('index') and
a cell ('cell') and it will look to see if that node has a property
called 'prop' and if that 'prop' is an phandle-array type.
Then it will check if that phandle array has a cell matching the given index
and then return the value of the cell named 'cell' in this array index.
If not found it will return 0.
The function will divide the value based on 'unit':
None No division
'k' or 'K' divide by 1024 (1 << 10)
'm' or 'M' divide by 1,048,576 (1 << 20)
'g' or 'G' divide by 1,073,741,824 (1 << 30)
"""
if not node:
return 0
if prop not in node.props:
return 0
if node.props[prop].type != "phandle-array":
return 0
if int(index) >= len(node.props[prop].val):
return 0
if cell not in node.props[prop].val[int(index)].data.keys():
return 0
return node.props[prop].val[int(index)].data[cell] >> _dt_units_to_scale(unit)
def _dt_chosen_reg_addr(kconf, chosen, index=0, unit=None):
"""
This function takes a 'chosen' property and treats that property as a path
to an EDT node. If it finds an EDT node, it will look to see if that
node has a register at the given 'index' and return the address value of
that reg, if not we return 0.
The function will divide the value based on 'unit':
None No division
'k' or 'K' divide by 1024 (1 << 10)
'm' or 'M' divide by 1,048,576 (1 << 20)
'g' or 'G' divide by 1,073,741,824 (1 << 30)
'kb' or 'Kb' divide by 8192 (1 << 13)
'mb' or 'Mb' divide by 8,388,608 (1 << 23)
'gb' or 'Gb' divide by 8,589,934,592 (1 << 33)
"""
if doc_mode or edt is None:
return 0
node = edt.chosen_node(chosen)
return _node_reg_addr(node, index, unit)
def _dt_chosen_reg_size(kconf, chosen, index=0, unit=None):
"""
This function takes a 'chosen' property and treats that property as a path
to an EDT node. If it finds an EDT node, it will look to see if that node
has a register at the given 'index' and return the size value of that reg,
if not we return 0.
The function will divide the value based on 'unit':
None No division
'k' or 'K' divide by 1024 (1 << 10)
'm' or 'M' divide by 1,048,576 (1 << 20)
'g' or 'G' divide by 1,073,741,824 (1 << 30)
'kb' or 'Kb' divide by 8192 (1 << 13)
'mb' or 'Mb' divide by 8,388,608 (1 << 23)
'gb' or 'Gb' divide by 8,589,934,592 (1 << 33)
"""
if doc_mode or edt is None:
return 0
node = edt.chosen_node(chosen)
return _node_reg_size(node, index, unit)
def dt_chosen_reg(kconf, name, chosen, index=0, unit=None):
"""
This function just routes to the proper function and converts
the result to either a string int or string hex value.
"""
if name == "dt_chosen_reg_size_int":
return str(_dt_chosen_reg_size(kconf, chosen, index, unit))
if name == "dt_chosen_reg_size_hex":
return hex(_dt_chosen_reg_size(kconf, chosen, index, unit))
if name == "dt_chosen_reg_addr_int":
return str(_dt_chosen_reg_addr(kconf, chosen, index, unit))
if name == "dt_chosen_reg_addr_hex":
return hex(_dt_chosen_reg_addr(kconf, chosen, index, unit))
def _dt_chosen_partition_addr(kconf, chosen, index=0, unit=None):
"""
This function takes a 'chosen' property and treats that property as a path
to an EDT node. If it finds an EDT node, it will look to see if that
node has a register, and if that node has a grandparent that has a register
at the given 'index'. The addition of both addresses will be returned, if
not, we return 0.
The function will divide the value based on 'unit':
None No division
'k' or 'K' divide by 1024 (1 << 10)
'm' or 'M' divide by 1,048,576 (1 << 20)
'g' or 'G' divide by 1,073,741,824 (1 << 30)
'kb' or 'Kb' divide by 8192 (1 << 13)
'mb' or 'Mb' divide by 8,388,608 (1 << 23)
'gb' or 'Gb' divide by 8,589,934,592 (1 << 33)
"""
if doc_mode or edt is None:
return 0
node = edt.chosen_node(chosen)
if not node:
return 0
p_node = node.parent
if not p_node:
return 0
return _node_reg_addr(p_node.parent, index, unit) + _node_reg_addr(node, 0, unit)
def dt_chosen_partition_addr(kconf, name, chosen, index=0, unit=None):
"""
This function just routes to the proper function and converts
the result to either a string int or string hex value.
"""
if name == "dt_chosen_partition_addr_int":
return str(_dt_chosen_partition_addr(kconf, chosen, index, unit))
if name == "dt_chosen_partition_addr_hex":
return hex(_dt_chosen_partition_addr(kconf, chosen, index, unit))
def _dt_node_reg_addr(kconf, path, index=0, unit=None):
"""
This function takes a 'path' and looks for an EDT node at that path. If it
finds an EDT node, it will look to see if that node has a register at the
given 'index' and return the address value of that reg, if not we return 0.
The function will divide the value based on 'unit':
None No division
'k' or 'K' divide by 1024 (1 << 10)
'm' or 'M' divide by 1,048,576 (1 << 20)
'g' or 'G' divide by 1,073,741,824 (1 << 30)
'kb' or 'Kb' divide by 8192 (1 << 13)
'mb' or 'Mb' divide by 8,388,608 (1 << 23)
'gb' or 'Gb' divide by 8,589,934,592 (1 << 33)
"""
if doc_mode or edt is None:
return 0
try:
node = edt.get_node(path)
except edtlib.EDTError:
return 0
return _node_reg_addr(node, index, unit)
def _dt_node_reg_size(kconf, path, index=0, unit=None):
"""
This function takes a 'path' and looks for an EDT node at that path. If it
finds an EDT node, it will look to see if that node has a register at the
given 'index' and return the size value of that reg, if not we return 0.
The function will divide the value based on 'unit':
None No division
'k' or 'K' divide by 1024 (1 << 10)
'm' or 'M' divide by 1,048,576 (1 << 20)
'g' or 'G' divide by 1,073,741,824 (1 << 30)
'kb' or 'Kb' divide by 8192 (1 << 13)
'mb' or 'Mb' divide by 8,388,608 (1 << 23)
'gb' or 'Gb' divide by 8,589,934,592 (1 << 33)
"""
if doc_mode or edt is None:
return 0
try:
node = edt.get_node(path)
except edtlib.EDTError:
return 0
return _node_reg_size(node, index, unit)
def dt_node_reg(kconf, name, path, index=0, unit=None):
"""
This function just routes to the proper function and converts
the result to either a string int or string hex value.
"""
if name == "dt_node_reg_size_int":
return str(_dt_node_reg_size(kconf, path, index, unit))
if name == "dt_node_reg_size_hex":
return hex(_dt_node_reg_size(kconf, path, index, unit))
if name == "dt_node_reg_addr_int":
return str(_dt_node_reg_addr(kconf, path, index, unit))
if name == "dt_node_reg_addr_hex":
return hex(_dt_node_reg_addr(kconf, path, index, unit))
def dt_nodelabel_reg(kconf, name, label, index=0, unit=None):
"""
This function is like dt_node_reg(), but the 'label' argument
should be a node label, like "foo" is here:
foo: some-node { ... };
"""
if doc_mode or edt is None:
node = None
else:
node = edt.label2node.get(label)
if name == "dt_nodelabel_reg_size_int":
return str(_dt_node_reg_size(kconf, node.path, index, unit)) if node else "0"
if name == "dt_nodelabel_reg_size_hex":
return hex(_dt_node_reg_size(kconf, node.path, index, unit)) if node else "0x0"
if name == "dt_nodelabel_reg_addr_int":
return str(_dt_node_reg_addr(kconf, node.path, index, unit)) if node else "0"
if name == "dt_nodelabel_reg_addr_hex":
return hex(_dt_node_reg_addr(kconf, node.path, index, unit)) if node else "0x0"
def _dt_node_bool_prop_generic(node_search_function, search_arg, prop):
"""
This function takes the 'node_search_function' and uses it to search for
a node with 'search_arg' and if node exists, checks if 'prop' exists
inside the node and is a boolean, if it is true, returns "y".
Otherwise, it returns "n".
"""
try:
node = node_search_function(search_arg)
except edtlib.EDTError:
return "n"
if node is None:
return "n"
if prop not in node.props:
return "n"
if node.props[prop].type != "boolean":
return "n"
if node.props[prop].val:
return "y"
return "n"
def dt_node_bool_prop(kconf, _, path, prop):
"""
This function takes a 'path' and looks for an EDT node at that path. If it
finds an EDT node, it will look to see if that node has a boolean property
by the name of 'prop'. If the 'prop' exists it will return "y" otherwise
we return "n".
"""
if doc_mode or edt is None:
return "n"
return _dt_node_bool_prop_generic(edt.get_node, path, prop)
def dt_nodelabel_bool_prop(kconf, _, label, prop):
"""
This function takes a 'label' and looks for an EDT node with that label.
If it finds an EDT node, it will look to see if that node has a boolean
property by the name of 'prop'. If the 'prop' exists it will return "y"
otherwise we return "n".
"""
if doc_mode or edt is None:
return "n"
return _dt_node_bool_prop_generic(edt.label2node.get, label, prop)
def dt_chosen_bool_prop(kconf, _, chosen, prop):
"""
This function takes a /chosen node property named 'chosen', and
looks for the chosen node. If that node exists and has a boolean
property 'prop', it returns "y". Otherwise, it returns "n".
"""
if doc_mode or edt is None:
return "n"
return _dt_node_bool_prop_generic(edt.chosen_node, chosen, prop)
def _dt_node_has_prop_generic(node_search_function, search_arg, prop):
"""
This function takes the 'node_search_function' and uses it to search for
a node with 'search_arg' and if node exists, then checks if 'prop'
exists inside the node and returns "y". Otherwise, it returns "n".
"""
try:
node = node_search_function(search_arg)
except edtlib.EDTError:
return "n"
if node is None:
return "n"
if prop in node.props:
return "y"
return "n"
def dt_node_has_prop(kconf, _, path, prop):
"""
This function takes a 'path' and looks for an EDT node at that path. If it
finds an EDT node, it will look to see if that node has a property
by the name of 'prop'. If the 'prop' exists it will return "y" otherwise
it returns "n".
"""
if doc_mode or edt is None:
return "n"
return _dt_node_has_prop_generic(edt.get_node, path, prop)
def dt_nodelabel_has_prop(kconf, _, label, prop):
"""
This function takes a 'label' and looks for an EDT node with that label.
If it finds an EDT node, it will look to see if that node has a property
by the name of 'prop'. If the 'prop' exists it will return "y" otherwise
it returns "n".
"""
if doc_mode or edt is None:
return "n"
return _dt_node_has_prop_generic(edt.label2node.get, label, prop)
def dt_node_int_prop(kconf, name, path, prop, unit=None):
"""
This function takes a 'path' and property name ('prop') looks for an EDT
node at that path. If it finds an EDT node, it will look to see if that
node has a property called 'prop' and if that 'prop' is an integer type
will return the value of the property 'prop' as either a string int or
string hex value, if not we return 0.
The function will divide the value based on 'unit':
None No division
'k' or 'K' divide by 1024 (1 << 10)
'm' or 'M' divide by 1,048,576 (1 << 20)
'g' or 'G' divide by 1,073,741,824 (1 << 30)
'kb' or 'Kb' divide by 8192 (1 << 13)
'mb' or 'Mb' divide by 8,388,608 (1 << 23)
'gb' or 'Gb' divide by 8,589,934,592 (1 << 33)
"""
if doc_mode or edt is None:
return "0"
try:
node = edt.get_node(path)
except edtlib.EDTError:
return "0"
if name == "dt_node_int_prop_int":
return str(_node_int_prop(node, prop, unit))
if name == "dt_node_int_prop_hex":
return hex(_node_int_prop(node, prop, unit))
def dt_node_array_prop(kconf, name, path, prop, index, unit=None):
"""
This function takes a 'path', property name ('prop') and index ('index')
and looks for an EDT node at that path. If it finds an EDT node, it will
look to see if that node has a property called 'prop' and if that 'prop'
is an array type will return the value of the property 'prop' at the given
'index' as either a string int or string hex value. If not found we return 0.
The function will divide the value based on 'unit':
None No division
'k' or 'K' divide by 1024 (1 << 10)
'm' or 'M' divide by 1,048,576 (1 << 20)
'g' or 'G' divide by 1,073,741,824 (1 << 30)
"""
if doc_mode or edt is None:
return "0"
try:
node = edt.get_node(path)
except edtlib.EDTError:
return "0"
if name == "dt_node_array_prop_int":
return str(_node_array_prop(node, prop, index, unit))
if name == "dt_node_array_prop_hex":
return hex(_node_array_prop(node, prop, index, unit))
def dt_node_ph_array_prop(kconf, name, path, prop, index, cell, unit=None):
"""
This function takes a 'path', property name ('prop'), index ('index') and
a cell ('cell') and looks for an EDT node at that path.
If it finds an EDT node, it will look to see if that node has a property
called 'prop' and if that 'prop' is an phandle-array type.
Then it will check if that phandle array has a cell matching the given index
and ten return the value of the cell named 'cell' in this array index as
either a string int or string hex value. If not found we return 0.
The function will divide the value based on 'unit':
None No division
'k' or 'K' divide by 1024 (1 << 10)
'm' or 'M' divide by 1,048,576 (1 << 20)
'g' or 'G' divide by 1,073,741,824 (1 << 30)
"""
if doc_mode or edt is None:
return "0"
try:
node = edt.get_node(path)
except edtlib.EDTError:
return "0"
if name == "dt_node_ph_array_prop_int":
return str(_node_ph_array_prop(node, prop, index, cell, unit))
if name == "dt_node_ph_array_prop_hex":
return hex(_node_ph_array_prop(node, prop, index, cell, unit))
def dt_node_ph_prop_path(kconf, name, path, prop):
"""
This function takes a 'path' and a property name ('prop') and
looks for an EDT node at that path. If it finds an EDT node,
it will look to see if that node has a property called 'prop'
and if that 'prop' is an phandle type. Then it will return the
path to the pointed-to node, or an empty string if there is
no such node.
"""
if doc_mode or edt is None:
return ""
try:
node = edt.get_node(path)
except edtlib.EDTError:
return ""
if prop not in node.props:
return ""
if node.props[prop].type != "phandle":
return ""
phandle = node.props[prop].val
return phandle.path if phandle else ""
def dt_node_str_prop_equals(kconf, _, path, prop, val):
"""
This function takes a 'path' and property name ('prop') looks for an EDT
node at that path. If it finds an EDT node, it will look to see if that
node has a property 'prop' of type string. If that 'prop' is equal to 'val'
it will return "y" otherwise return "n".
"""
if doc_mode or edt is None:
return "n"
try:
node = edt.get_node(path)
except edtlib.EDTError:
return "n"
if prop not in node.props:
return "n"
if node.props[prop].type != "string":
return "n"
if node.props[prop].val == val:
return "y"
return "n"
def dt_has_compat(kconf, _, compat):
"""
This function takes a 'compat' and returns "y" if any compatible node
can be found in the EDT, otherwise it returns "n".
"""
if doc_mode or edt is None:
return "n"
return "y" if compat in edt.compat2nodes else "n"
def dt_compat_enabled(kconf, _, compat):
"""
This function takes a 'compat' and returns "y" if we find a status "okay"
compatible node in the EDT otherwise we return "n"
"""
if doc_mode or edt is None:
return "n"
return "y" if compat in edt.compat2okay else "n"
def dt_compat_on_bus(kconf, _, compat, bus):
"""
This function takes a 'compat' and returns "y" if we find an "enabled"
compatible node in the EDT which is on bus 'bus'. It returns "n" otherwise.
"""
if doc_mode or edt is None:
return "n"
if compat in edt.compat2okay:
for node in edt.compat2okay[compat]:
if node.on_buses is not None and bus in node.on_buses:
return "y"
return "n"
def dt_compat_any_has_prop(kconf, _, compat, prop):
"""
This function takes a 'compat' and a 'prop' and returns "y" if any
node with compatible 'compat' also has a valid property 'prop'.
It returns "n" otherwise.
"""
if doc_mode or edt is None:
return "n"
if compat in edt.compat2okay:
for node in edt.compat2okay[compat]:
if prop in node.props:
return "y"
return "n"
def dt_nodelabel_has_compat(kconf, _, label, compat):
"""
This function takes a 'label' and looks for an EDT node with that label.
If it finds such node, it returns "y" if this node is compatible with
the provided 'compat'. Otherwise, it return "n" .
"""
if doc_mode or edt is None:
return "n"
node = edt.label2node.get(label)
if node and compat in node.compats:
return "y"
return "n"
def dt_node_has_compat(kconf, _, path, compat):
"""
This function takes a 'path' and looks for an EDT node at that path. If it
finds an EDT node, it returns "y" if this node is compatible with
the provided 'compat'. Otherwise, it return "n" .
"""
if doc_mode or edt is None:
return "n"
try:
node = edt.get_node(path)
except edtlib.EDTError:
return "n"
if node and compat in node.compats:
return "y"
return "n"
def dt_nodelabel_enabled_with_compat(kconf, _, label, compat):
"""
This function takes a 'label' and returns "y" if an "enabled" node with
such label can be found in the EDT and that node is compatible with the
provided 'compat', otherwise it returns "n".
"""
if doc_mode or edt is None:
return "n"
if compat in edt.compat2okay:
for node in edt.compat2okay[compat]:
if label in node.labels:
return "y"
return "n"
def dt_nodelabel_array_prop_has_val(kconf, _, label, prop, val):
"""
This function looks for a node with node label 'label'.
If the node exists, it checks if the node node has a property
'prop' with type "array". If so, and the property contains
an element equal to the integer 'val', it returns "y".
Otherwise, it returns "n".
"""
if doc_mode or edt is None:
return "n"
node = edt.label2node.get(label)
if not node or (prop not in node.props) or (node.props[prop].type != "array"):
return "n"
else:
return "y" if int(val, base=0) in node.props[prop].val else "n"
def dt_nodelabel_path(kconf, _, label):
"""
This function takes a node label (not a label property) and
returns the path to the node which has that label, or an empty
string if there is no such node.
"""
if doc_mode or edt is None:
return ""
node = edt.label2node.get(label)
return node.path if node else ""
def dt_node_parent(kconf, _, path):
"""
This function takes a 'path' and looks for an EDT node at that path. If it
finds an EDT node, it will look for the parent of that node. If the parent
exists, it will return the path to that parent. Otherwise, an empty string
will be returned.
"""
if doc_mode or edt is None:
return ""
try:
node = edt.get_node(path)
except edtlib.EDTError:
return ""
if node is None:
return ""
return node.parent.path if node.parent else ""
def dt_gpio_hogs_enabled(kconf, _):
"""
Return "y" if any GPIO hog node is enabled. Otherwise, return "n".
"""
if doc_mode or edt is None:
return "n"
for node in edt.nodes:
if node.gpio_hogs and node.status == "okay":
return "y"
return "n"
def normalize_upper(kconf, _, string):
"""
Normalize the string, so that the string only contains alpha-numeric
characters or underscores. All non-alpha-numeric characters are replaced
with an underscore, '_'.
When string has been normalized it will be converted into upper case.
"""
return re.sub(r'[^a-zA-Z0-9_]', '_', string).upper()
def shields_list_contains(kconf, _, shield):
"""
Return "n" if cmake environment variable 'SHIELD_AS_LIST' doesn't exist.
Return "y" if 'shield' is present list obtained after 'SHIELD_AS_LIST'
has been split using ";" as a separator and "n" otherwise.
"""
try:
list = os.environ['SHIELD_AS_LIST']
except KeyError:
return "n"
return "y" if shield in list.split(";") else "n"
def substring(kconf, _, string, start, stop=None):
"""
Extracts a portion of the string, removing characters from the front, back or both.
"""
if stop is not None:
return string[int(start):int(stop)]
else:
return string[int(start):]
def arith(kconf, name, *args):
"""
The arithmetic operations on integers.
If three or more arguments are given, it returns the result of performing
the operation on the first two arguments and operates the same operation as
the result and the following argument.
For interoperability with inc and dec,
if there is only one argument, it will be split with a comma and processed
as a sequence of numbers.
Examples in Kconfig:
$(add, 10, 3) # -> 13
$(add, 10, 3, 2) # -> 15
$(sub, 10, 3) # -> 7
$(sub, 10, 3, 2) # -> 5
$(mul, 10, 3) # -> 30
$(mul, 10, 3, 2) # -> 60
$(div, 10, 3) # -> 3
$(div, 10, 3, 2) # -> 1
$(mod, 10, 3) # -> 1
$(mod, 10, 3, 2) # -> 1
$(inc, 1) # -> 2
$(inc, 1, 1) # -> "2,2"
$(inc, $(inc, 1, 1)) # -> "3,3"
$(dec, 1) # -> 0
$(dec, 1, 1) # -> "0,0"
$(dec, $(dec, 1, 1)) # -> "-1,-1"
$(add, $(inc, 1, 1)) # -> 4
$(div, $(dec, 1, 1)) # Error (0 div 0)
"""
intarray = map(int, args if len(args) > 1 else args[0].split(","))
if name == "add":
return str(int(functools.reduce(operator.add, intarray)))
elif name == "sub":
return str(int(functools.reduce(operator.sub, intarray)))
elif name == "mul":
return str(int(functools.reduce(operator.mul, intarray)))
elif name == "div":
return str(int(functools.reduce(operator.truediv, intarray)))
elif name == "mod":
return str(int(functools.reduce(operator.mod, intarray)))
elif name == "max":
return str(int(functools.reduce(max, intarray)))
elif name == "min":
return str(int(functools.reduce(min, intarray)))
else:
assert False
def inc_dec(kconf, name, *args):
"""
Calculate the increment and the decrement of integer sequence.
Returns a string that concatenates numbers with a comma as a separator.
"""
intarray = map(int, args if len(args) > 1 else args[0].split(","))
if name == "inc":
return ",".join(map(lambda a: str(a + 1), intarray))
elif name == "dec":
return ",".join(map(lambda a: str(a - 1), intarray))
else:
assert False
# Keys in this dict are the function names as they appear
# in Kconfig files. The values are tuples in this form:
#
# (python_function, minimum_number_of_args, maximum_number_of_args)
#
# Each python function is given a kconf object and its name in the
# Kconfig file, followed by arguments from the Kconfig file.
#
# See the kconfiglib documentation for more details.
functions = {
"dt_has_compat": (dt_has_compat, 1, 1),
"dt_compat_enabled": (dt_compat_enabled, 1, 1),
"dt_compat_on_bus": (dt_compat_on_bus, 2, 2),
"dt_compat_any_has_prop": (dt_compat_any_has_prop, 2, 2),
"dt_chosen_label": (dt_chosen_label, 1, 1),
"dt_chosen_enabled": (dt_chosen_enabled, 1, 1),
"dt_chosen_path": (dt_chosen_path, 1, 1),
"dt_chosen_has_compat": (dt_chosen_has_compat, 2, 2),
"dt_path_enabled": (dt_node_enabled, 1, 1),
"dt_alias_enabled": (dt_node_enabled, 1, 1),
"dt_nodelabel_enabled": (dt_nodelabel_enabled, 1, 1),
"dt_nodelabel_enabled_with_compat": (dt_nodelabel_enabled_with_compat, 2, 2),
"dt_chosen_reg_addr_int": (dt_chosen_reg, 1, 3),
"dt_chosen_reg_addr_hex": (dt_chosen_reg, 1, 3),
"dt_chosen_reg_size_int": (dt_chosen_reg, 1, 3),
"dt_chosen_reg_size_hex": (dt_chosen_reg, 1, 3),
"dt_node_reg_addr_int": (dt_node_reg, 1, 3),
"dt_node_reg_addr_hex": (dt_node_reg, 1, 3),
"dt_node_reg_size_int": (dt_node_reg, 1, 3),
"dt_node_reg_size_hex": (dt_node_reg, 1, 3),
"dt_nodelabel_reg_addr_int": (dt_nodelabel_reg, 1, 3),
"dt_nodelabel_reg_addr_hex": (dt_nodelabel_reg, 1, 3),
"dt_nodelabel_reg_size_int": (dt_nodelabel_reg, 1, 3),
"dt_nodelabel_reg_size_hex": (dt_nodelabel_reg, 1, 3),
"dt_node_bool_prop": (dt_node_bool_prop, 2, 2),
"dt_nodelabel_bool_prop": (dt_nodelabel_bool_prop, 2, 2),
"dt_chosen_bool_prop": (dt_chosen_bool_prop, 2, 2),
"dt_node_has_prop": (dt_node_has_prop, 2, 2),
"dt_nodelabel_has_prop": (dt_nodelabel_has_prop, 2, 2),
"dt_node_int_prop_int": (dt_node_int_prop, 2, 3),
"dt_node_int_prop_hex": (dt_node_int_prop, 2, 3),
"dt_node_array_prop_int": (dt_node_array_prop, 3, 4),
"dt_node_array_prop_hex": (dt_node_array_prop, 3, 4),
"dt_node_ph_array_prop_int": (dt_node_ph_array_prop, 4, 5),
"dt_node_ph_array_prop_hex": (dt_node_ph_array_prop, 4, 5),
"dt_node_ph_prop_path": (dt_node_ph_prop_path, 2, 2),
"dt_node_str_prop_equals": (dt_node_str_prop_equals, 3, 3),
"dt_nodelabel_has_compat": (dt_nodelabel_has_compat, 2, 2),
"dt_node_has_compat": (dt_node_has_compat, 2, 2),
"dt_nodelabel_path": (dt_nodelabel_path, 1, 1),
"dt_node_parent": (dt_node_parent, 1, 1),
"dt_nodelabel_array_prop_has_val": (dt_nodelabel_array_prop_has_val, 3, 3),
"dt_gpio_hogs_enabled": (dt_gpio_hogs_enabled, 0, 0),
"dt_chosen_partition_addr_int": (dt_chosen_partition_addr, 1, 3),
"dt_chosen_partition_addr_hex": (dt_chosen_partition_addr, 1, 3),
"normalize_upper": (normalize_upper, 1, 1),
"shields_list_contains": (shields_list_contains, 1, 1),
"substring": (substring, 2, 3),
"add": (arith, 1, 255),
"sub": (arith, 1, 255),
"mul": (arith, 1, 255),
"div": (arith, 1, 255),
"mod": (arith, 1, 255),
"max": (arith, 1, 255),
"min": (arith, 1, 255),
"inc": (inc_dec, 1, 255),
"dec": (inc_dec, 1, 255),
}
``` | /content/code_sandbox/scripts/kconfig/kconfigfunctions.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 9,889 |
```python
"""
Overview
========
Kconfiglib is a Python 2/3 library for scripting and extracting information
from Kconfig (path_to_url
configuration systems.
See the homepage at path_to_url for a longer
overview.
Since Kconfiglib 12.0.0, the library version is available in
kconfiglib.VERSION, which is a (<major>, <minor>, <patch>) tuple, e.g.
(12, 0, 0).
Using Kconfiglib on the Linux kernel with the Makefile targets
==============================================================
For the Linux kernel, a handy interface is provided by the
scripts/kconfig/Makefile patch, which can be applied with either 'git am' or
the 'patch' utility:
$ wget -qO- path_to_url | git am
$ wget -qO- path_to_url | patch -p1
Warning: Not passing -p1 to patch will cause the wrong file to be patched.
Please tell me if the patch does not apply. It should be trivial to apply
manually, as it's just a block of text that needs to be inserted near the other
*conf: targets in scripts/kconfig/Makefile.
Look further down for a motivation for the Makefile patch and for instructions
on how you can use Kconfiglib without it.
If you do not wish to install Kconfiglib via pip, the Makefile patch is set up
so that you can also just clone Kconfiglib into the kernel root:
$ git clone git://github.com/ulfalizer/Kconfiglib.git
$ git am Kconfiglib/makefile.patch (or 'patch -p1 < Kconfiglib/makefile.patch')
Warning: The directory name Kconfiglib/ is significant in this case, because
it's added to PYTHONPATH by the new targets in makefile.patch.
The targets added by the Makefile patch are described in the following
sections.
make kmenuconfig
----------------
This target runs the curses menuconfig interface with Python 3. As of
Kconfiglib 12.2.0, both Python 2 and Python 3 are supported (previously, only
Python 3 was supported, so this was a backport).
make guiconfig
--------------
This target runs the Tkinter menuconfig interface. Both Python 2 and Python 3
are supported. To change the Python interpreter used, pass
PYTHONCMD=<executable> to 'make'. The default is 'python'.
make [ARCH=<arch>] iscriptconfig
--------------------------------
This target gives an interactive Python prompt where a Kconfig instance has
been preloaded and is available in 'kconf'. To change the Python interpreter
used, pass PYTHONCMD=<executable> to 'make'. The default is 'python'.
To get a feel for the API, try evaluating and printing the symbols in
kconf.defined_syms, and explore the MenuNode menu tree starting at
kconf.top_node by following 'next' and 'list' pointers.
The item contained in a menu node is found in MenuNode.item (note that this can
be one of the constants kconfiglib.MENU and kconfiglib.COMMENT), and all
symbols and choices have a 'nodes' attribute containing their menu nodes
(usually only one). Printing a menu node will print its item, in Kconfig
format.
If you want to look up a symbol by name, use the kconf.syms dictionary.
make scriptconfig SCRIPT=<script> [SCRIPT_ARG=<arg>]
----------------------------------------------------
This target runs the Python script given by the SCRIPT parameter on the
configuration. sys.argv[1] holds the name of the top-level Kconfig file
(currently always "Kconfig" in practice), and sys.argv[2] holds the SCRIPT_ARG
argument, if given.
See the examples/ subdirectory for example scripts.
make dumpvarsconfig
-------------------
This target prints a list of all environment variables referenced from the
Kconfig files, together with their values. See the
Kconfiglib/examples/dumpvars.py script.
Only environment variables that are referenced via the Kconfig preprocessor
$(FOO) syntax are included. The preprocessor was added in Linux 4.18.
Using Kconfiglib without the Makefile targets
=============================================
The make targets are only needed to pick up environment variables exported from
the Kbuild makefiles and referenced inside Kconfig files, via e.g.
'source "arch/$(SRCARCH)/Kconfig" and commands run via '$(shell,...)'.
These variables are referenced as of writing (Linux 4.18), together with sample
values:
srctree (.)
ARCH (x86)
SRCARCH (x86)
KERNELVERSION (4.18.0)
CC (gcc)
HOSTCC (gcc)
HOSTCXX (g++)
CC_VERSION_TEXT (gcc (Ubuntu 7.3.0-16ubuntu3) 7.3.0)
Older kernels only reference ARCH, SRCARCH, and KERNELVERSION.
If your kernel is recent enough (4.18+), you can get a list of referenced
environment variables via 'make dumpvarsconfig' (see above). Note that this
command is added by the Makefile patch.
To run Kconfiglib without the Makefile patch, set the environment variables
manually:
$ srctree=. ARCH=x86 SRCARCH=x86 KERNELVERSION=`make kernelversion` ... python(3)
>>> import kconfiglib
>>> kconf = kconfiglib.Kconfig() # filename defaults to "Kconfig"
Search the top-level Makefile for "Additional ARCH settings" to see other
possibilities for ARCH and SRCARCH.
Intro to symbol values
======================
Kconfiglib has the same assignment semantics as the C implementation.
Any symbol can be assigned a value by the user (via Kconfig.load_config() or
Symbol.set_value()), but this user value is only respected if the symbol is
visible, which corresponds to it (currently) being visible in the menuconfig
interface.
For symbols with prompts, the visibility of the symbol is determined by the
condition on the prompt. Symbols without prompts are never visible, so setting
a user value on them is pointless. A warning will be printed by default if
Symbol.set_value() is called on a promptless symbol. Assignments to promptless
symbols are normal within a .config file, so no similar warning will be printed
by load_config().
Dependencies from parents and 'if'/'depends on' are propagated to properties,
including prompts, so these two configurations are logically equivalent:
(1)
menu "menu"
depends on A
if B
config FOO
tristate "foo" if D
default y
depends on C
endif
endmenu
(2)
menu "menu"
depends on A
config FOO
tristate "foo" if A && B && C && D
default y if A && B && C
endmenu
In this example, A && B && C && D (the prompt condition) needs to be non-n for
FOO to be visible (assignable). If its value is m, the symbol can only be
assigned the value m: The visibility sets an upper bound on the value that can
be assigned by the user, and any higher user value will be truncated down.
'default' properties are independent of the visibility, though a 'default' will
often get the same condition as the prompt due to dependency propagation.
'default' properties are used if the symbol is not visible or has no user
value.
Symbols with no user value (or that have a user value but are not visible) and
no (active) 'default' default to n for bool/tristate symbols, and to the empty
string for other symbol types.
'select' works similarly to symbol visibility, but sets a lower bound on the
value of the symbol. The lower bound is determined by the value of the
select*ing* symbol. 'select' does not respect visibility, so non-visible
symbols can be forced to a particular (minimum) value by a select as well.
For non-bool/tristate symbols, it only matters whether the visibility is n or
non-n: m visibility acts the same as y visibility.
Conditions on 'default' and 'select' work in mostly intuitive ways. If the
condition is n, the 'default' or 'select' is disabled. If it is m, the
'default' or 'select' value (the value of the selecting symbol) is truncated
down to m.
When writing a configuration with Kconfig.write_config(), only symbols that are
visible, have an (active) default, or are selected will get written out (note
that this includes all symbols that would accept user values). Kconfiglib
matches the .config format produced by the C implementations down to the
character. This eases testing.
For a visible bool/tristate symbol FOO with value n, this line is written to
.config:
# CONFIG_FOO is not set
The point is to remember the user n selection (which might differ from the
default value the symbol would get), while at the same sticking to the rule
that undefined corresponds to n (.config uses Makefile format, making the line
above a comment). When the .config file is read back in, this line will be
treated the same as the following assignment:
CONFIG_FOO=n
In Kconfiglib, the set of (currently) assignable values for a bool/tristate
symbol appear in Symbol.assignable. For other symbol types, just check if
sym.visibility is non-0 (non-n) to see whether the user value will have an
effect.
Intro to the menu tree
======================
The menu structure, as seen in e.g. menuconfig, is represented by a tree of
MenuNode objects. The top node of the configuration corresponds to an implicit
top-level menu, the title of which is shown at the top in the standard
menuconfig interface. (The title is also available in Kconfig.mainmenu_text in
Kconfiglib.)
The top node is found in Kconfig.top_node. From there, you can visit child menu
nodes by following the 'list' pointer, and any following menu nodes by
following the 'next' pointer. Usually, a non-None 'list' pointer indicates a
menu or Choice, but menu nodes for symbols can sometimes have a non-None 'list'
pointer too due to submenus created implicitly from dependencies.
MenuNode.item is either a Symbol or a Choice object, or one of the constants
MENU and COMMENT. The prompt of the menu node can be found in MenuNode.prompt,
which also holds the title for menus and comments. For Symbol and Choice,
MenuNode.help holds the help text (if any, otherwise None).
Most symbols will only have a single menu node. A symbol defined in multiple
locations will have one menu node for each location. The list of menu nodes for
a Symbol or Choice can be found in the Symbol/Choice.nodes attribute.
Note that prompts and help texts for symbols and choices are stored in their
menu node(s) rather than in the Symbol or Choice objects themselves. This makes
it possible to define a symbol in multiple locations with a different prompt or
help text in each location. To get the help text or prompt for a symbol with a
single menu node, do sym.nodes[0].help and sym.nodes[0].prompt, respectively.
The prompt is a (text, condition) tuple, where condition determines the
visibility (see 'Intro to expressions' below).
This organization mirrors the C implementation. MenuNode is called
'struct menu' there, but I thought "menu" was a confusing name.
It is possible to give a Choice a name and define it in multiple locations,
hence why Choice.nodes is also a list.
As a convenience, the properties added at a particular definition location are
available on the MenuNode itself, in e.g. MenuNode.defaults. This is helpful
when generating documentation, so that symbols/choices defined in multiple
locations can be shown with the correct properties at each location.
Intro to expressions
====================
Expressions can be evaluated with the expr_value() function and printed with
the expr_str() function (these are used internally as well). Evaluating an
expression always yields a tristate value, where n, m, and y are represented as
0, 1, and 2, respectively.
The following table should help you figure out how expressions are represented.
A, B, C, ... are symbols (Symbol instances), NOT is the kconfiglib.NOT
constant, etc.
Expression Representation
---------- --------------
A A
"A" A (constant symbol)
!A (NOT, A)
A && B (AND, A, B)
A && B && C (AND, A, (AND, B, C))
A || B (OR, A, B)
A || (B && C && D) (OR, A, (AND, B, (AND, C, D)))
A = B (EQUAL, A, B)
A != "foo" (UNEQUAL, A, foo (constant symbol))
A && B = C && D (AND, A, (AND, (EQUAL, B, C), D))
n Kconfig.n (constant symbol)
m Kconfig.m (constant symbol)
y Kconfig.y (constant symbol)
"y" Kconfig.y (constant symbol)
Strings like "foo" in 'default "foo"' or 'depends on SYM = "foo"' are
represented as constant symbols, so the only values that appear in expressions
are symbols***. This mirrors the C implementation.
***For choice symbols, the parent Choice will appear in expressions as well,
but it's usually invisible as the value interfaces of Symbol and Choice are
identical. This mirrors the C implementation and makes different choice modes
"just work".
Manual evaluation examples:
- The value of A && B is min(A.tri_value, B.tri_value)
- The value of A || B is max(A.tri_value, B.tri_value)
- The value of !A is 2 - A.tri_value
- The value of A = B is 2 (y) if A.str_value == B.str_value, and 0 (n)
otherwise. Note that str_value is used here instead of tri_value.
For constant (as well as undefined) symbols, str_value matches the name of
the symbol. This mirrors the C implementation and explains why
'depends on SYM = "foo"' above works as expected.
n/m/y are automatically converted to the corresponding constant symbols
"n"/"m"/"y" (Kconfig.n/m/y) during parsing.
Kconfig.const_syms is a dictionary like Kconfig.syms but for constant symbols.
If a condition is missing (e.g., <cond> when the 'if <cond>' is removed from
'default A if <cond>'), it is actually Kconfig.y. The standard __str__()
functions just avoid printing 'if y' conditions to give cleaner output.
Kconfig extensions
==================
Kconfiglib includes a couple of Kconfig extensions:
'source' with relative path
---------------------------
The 'rsource' statement sources Kconfig files with a path relative to directory
of the Kconfig file containing the 'rsource' statement, instead of relative to
the project root.
Consider following directory tree:
Project
+--Kconfig
|
+--src
+--Kconfig
|
+--SubSystem1
+--Kconfig
|
+--ModuleA
+--Kconfig
In this example, assume that src/SubSystem1/Kconfig wants to source
src/SubSystem1/ModuleA/Kconfig.
With 'source', this statement would be used:
source "src/SubSystem1/ModuleA/Kconfig"
With 'rsource', this turns into
rsource "ModuleA/Kconfig"
If an absolute path is given to 'rsource', it acts the same as 'source'.
'rsource' can be used to create "position-independent" Kconfig trees that can
be moved around freely.
Globbing 'source'
-----------------
'source' and 'rsource' accept glob patterns, sourcing all matching Kconfig
files. They require at least one matching file, raising a KconfigError
otherwise.
For example, the following statement might source sub1/foofoofoo and
sub2/foobarfoo:
source "sub[12]/foo*foo"
The glob patterns accepted are the same as for the standard glob.glob()
function.
Two additional statements are provided for cases where it's acceptable for a
pattern to match no files: 'osource' and 'orsource' (the o is for "optional").
For example, the following statements will be no-ops if neither "foo" nor any
files matching "bar*" exist:
osource "foo"
osource "bar*"
'orsource' does a relative optional source.
'source' and 'osource' are analogous to 'include' and '-include' in Make.
Generalized def_* keywords
--------------------------
def_int, def_hex, and def_string are available in addition to def_bool and
def_tristate, allowing int, hex, and string symbols to be given a type and a
default at the same time.
Extra optional warnings
-----------------------
Some optional warnings can be controlled via environment variables:
- KCONFIG_WARN_UNDEF: If set to 'y', warnings will be generated for all
references to undefined symbols within Kconfig files. The only gotcha is
that all hex literals must be prefixed with "0x" or "0X", to make it
possible to distinguish them from symbol references.
Some projects (e.g. the Linux kernel) use multiple Kconfig trees with many
shared Kconfig files, leading to some safe undefined symbol references.
KCONFIG_WARN_UNDEF is useful in projects that only have a single Kconfig
tree though.
KCONFIG_STRICT is an older alias for this environment variable, supported
for backwards compatibility.
- KCONFIG_WARN_UNDEF_ASSIGN: If set to 'y', warnings will be generated for
all assignments to undefined symbols within .config files. By default, no
such warnings are generated.
This warning can also be enabled/disabled via the Kconfig.warn_assign_undef
variable.
Preprocessor user functions defined in Python
---------------------------------------------
Preprocessor functions can be defined in Python, which makes it simple to
integrate information from existing Python tools into Kconfig (e.g. to have
Kconfig symbols depend on hardware information stored in some other format).
Putting a Python module named kconfigfunctions(.py) anywhere in sys.path will
cause it to be imported by Kconfiglib (in Kconfig.__init__()). Note that
sys.path can be customized via PYTHONPATH, and includes the directory of the
module being run by default, as well as installation directories.
If the KCONFIG_FUNCTIONS environment variable is set, it gives a different
module name to use instead of 'kconfigfunctions'.
The imported module is expected to define a global dictionary named 'functions'
that maps function names to Python functions, as follows:
def my_fn(kconf, name, arg_1, arg_2, ...):
# kconf:
# Kconfig instance
#
# name:
# Name of the user-defined function ("my-fn"). Think argv[0].
#
# arg_1, arg_2, ...:
# Arguments passed to the function from Kconfig (strings)
#
# Returns a string to be substituted as the result of calling the
# function
...
def my_other_fn(kconf, name, arg_1, arg_2, ...):
...
functions = {
"my-fn": (my_fn, <min.args>, <max.args>/None),
"my-other-fn": (my_other_fn, <min.args>, <max.args>/None),
...
}
...
<min.args> and <max.args> are the minimum and maximum number of arguments
expected by the function (excluding the implicit 'name' argument). If
<max.args> is None, there is no upper limit to the number of arguments. Passing
an invalid number of arguments will generate a KconfigError exception.
Functions can access the current parsing location as kconf.filename/linenr.
Accessing other fields of the Kconfig object is not safe. See the warning
below.
Keep in mind that for a variable defined like 'foo = $(fn)', 'fn' will be
called only when 'foo' is expanded. If 'fn' uses the parsing location and the
intent is to use the location of the assignment, you want 'foo := $(fn)'
instead, which calls the function immediately.
Once defined, user functions can be called from Kconfig in the same way as
other preprocessor functions:
config FOO
...
depends on $(my-fn,arg1,arg2)
If my_fn() returns "n", this will result in
config FOO
...
depends on n
Warning
*******
User-defined preprocessor functions are called as they're encountered at parse
time, before all Kconfig files have been processed, and before the menu tree
has been finalized. There are no guarantees that accessing Kconfig symbols or
the menu tree via the 'kconf' parameter will work, and it could potentially
lead to a crash.
Preferably, user-defined functions should be stateless.
Feedback
========
Send bug reports, suggestions, and questions to ulfalizer a.t Google's email
service, or open a ticket on the GitHub page.
"""
import errno
import importlib
import os
import re
import sys
# Get rid of some attribute lookups. These are obvious in context.
from glob import iglob
from os.path import dirname, exists, expandvars, islink, join, realpath
VERSION = (14, 1, 0)
# File layout:
#
# Public classes
# Public functions
# Internal functions
# Global constants
# Line length: 79 columns
#
# Public classes
#
class Kconfig(object):
"""
Represents a Kconfig configuration, e.g. for x86 or ARM. This is the set of
symbols, choices, and menu nodes appearing in the configuration. Creating
any number of Kconfig objects (including for different architectures) is
safe. Kconfiglib doesn't keep any global state.
The following attributes are available. They should be treated as
read-only, and some are implemented through @property magic.
syms:
A dictionary with all symbols in the configuration, indexed by name. Also
includes all symbols that are referenced in expressions but never
defined, except for constant (quoted) symbols.
Undefined symbols can be recognized by Symbol.nodes being empty -- see
the 'Intro to the menu tree' section in the module docstring.
const_syms:
A dictionary like 'syms' for constant (quoted) symbols
named_choices:
A dictionary like 'syms' for named choices (choice FOO)
defined_syms:
A list with all defined symbols, in the same order as they appear in the
Kconfig files. Symbols defined in multiple locations appear multiple
times.
Note: You probably want to use 'unique_defined_syms' instead. This
attribute is mostly maintained for backwards compatibility.
unique_defined_syms:
A list like 'defined_syms', but with duplicates removed. Just the first
instance is kept for symbols defined in multiple locations. Kconfig order
is preserved otherwise.
Using this attribute instead of 'defined_syms' can save work, and
automatically gives reasonable behavior when writing configuration output
(symbols defined in multiple locations only generate output once, while
still preserving Kconfig order for readability).
choices:
A list with all choices, in the same order as they appear in the Kconfig
files.
Note: You probably want to use 'unique_choices' instead. This attribute
is mostly maintained for backwards compatibility.
unique_choices:
Analogous to 'unique_defined_syms', for choices. Named choices can have
multiple definition locations.
menus:
A list with all menus, in the same order as they appear in the Kconfig
files
comments:
A list with all comments, in the same order as they appear in the Kconfig
files
kconfig_filenames:
A list with the filenames of all Kconfig files included in the
configuration, relative to $srctree (or relative to the current directory
if $srctree isn't set), except absolute paths (e.g.
'source "/foo/Kconfig"') are kept as-is.
The files are listed in the order they are source'd, starting with the
top-level Kconfig file. If a file is source'd multiple times, it will
appear multiple times. Use set() to get unique filenames.
Note that Kconfig.sync_deps() already indirectly catches any file
modifications that change configuration output.
env_vars:
A set() with the names of all environment variables referenced in the
Kconfig files.
Only environment variables referenced with the preprocessor $(FOO) syntax
will be registered. The older $FOO syntax is only supported for backwards
compatibility.
Also note that $(FOO) won't be registered unless the environment variable
$FOO is actually set. If it isn't, $(FOO) is an expansion of an unset
preprocessor variable (which gives the empty string).
Another gotcha is that environment variables referenced in the values of
recursively expanded preprocessor variables (those defined with =) will
only be registered if the variable is actually used (expanded) somewhere.
The note from the 'kconfig_filenames' documentation applies here too.
n/m/y:
The predefined constant symbols n/m/y. Also available in const_syms.
modules:
The Symbol instance for the modules symbol. Currently hardcoded to
MODULES, which is backwards compatible. Kconfiglib will warn if
'option modules' is set on some other symbol. Tell me if you need proper
'option modules' support.
'modules' is never None. If the MODULES symbol is not explicitly defined,
its tri_value will be 0 (n), as expected.
A simple way to enable modules is to do 'kconf.modules.set_value(2)'
(provided the MODULES symbol is defined and visible). Modules are
disabled by default in the kernel Kconfig files as of writing, though
nearly all defconfig files enable them (with 'CONFIG_MODULES=y').
defconfig_list:
The Symbol instance for the 'option defconfig_list' symbol, or None if no
defconfig_list symbol exists. The defconfig filename derived from this
symbol can be found in Kconfig.defconfig_filename.
defconfig_filename:
The filename given by the defconfig_list symbol. This is taken from the
first 'default' with a satisfied condition where the specified file
exists (can be opened for reading). If a defconfig file foo/defconfig is
not found and $srctree was set when the Kconfig was created,
$srctree/foo/defconfig is looked up as well.
'defconfig_filename' is None if either no defconfig_list symbol exists,
or if the defconfig_list symbol has no 'default' with a satisfied
condition that specifies a file that exists.
Gotcha: scripts/kconfig/Makefile might pass --defconfig=<defconfig> to
scripts/kconfig/conf when running e.g. 'make defconfig'. This option
overrides the defconfig_list symbol, meaning defconfig_filename might not
always match what 'make defconfig' would use.
top_node:
The menu node (see the MenuNode class) of the implicit top-level menu.
Acts as the root of the menu tree.
mainmenu_text:
The prompt (title) of the top menu (top_node). Defaults to "Main menu".
Can be changed with the 'mainmenu' statement (see kconfig-language.txt).
variables:
A dictionary with all preprocessor variables, indexed by name. See the
Variable class.
warn:
Set this variable to True/False to enable/disable warnings. See
Kconfig.__init__().
When 'warn' is False, the values of the other warning-related variables
are ignored.
This variable as well as the other warn* variables can be read to check
the current warning settings.
warn_to_stderr:
Set this variable to True/False to enable/disable warnings on stderr. See
Kconfig.__init__().
warn_assign_undef:
Set this variable to True to generate warnings for assignments to
undefined symbols in configuration files.
This variable is False by default unless the KCONFIG_WARN_UNDEF_ASSIGN
environment variable was set to 'y' when the Kconfig instance was
created.
warn_assign_override:
Set this variable to True to generate warnings for multiple assignments
to the same symbol in configuration files, where the assignments set
different values (e.g. CONFIG_FOO=m followed by CONFIG_FOO=y, where the
last value would get used).
This variable is True by default. Disabling it might be useful when
merging configurations.
warn_assign_redun:
Like warn_assign_override, but for multiple assignments setting a symbol
to the same value.
This variable is True by default. Disabling it might be useful when
merging configurations.
warnings:
A list of strings containing all warnings that have been generated, for
cases where more flexibility is needed.
See the 'warn_to_stderr' parameter to Kconfig.__init__() and the
Kconfig.warn_to_stderr variable as well. Note that warnings still get
added to Kconfig.warnings when 'warn_to_stderr' is True.
Just as for warnings printed to stderr, only warnings that are enabled
will get added to Kconfig.warnings. See the various Kconfig.warn*
variables.
missing_syms:
A list with (name, value) tuples for all assignments to undefined symbols
within the most recently loaded .config file(s). 'name' is the symbol
name without the 'CONFIG_' prefix. 'value' is a string that gives the
right-hand side of the assignment verbatim.
See Kconfig.load_config() as well.
srctree:
The value the $srctree environment variable had when the Kconfig instance
was created, or the empty string if $srctree wasn't set. This gives nice
behavior with os.path.join(), which treats "" as the current directory,
without adding "./".
Kconfig files are looked up relative to $srctree (unless absolute paths
are used), and .config files are looked up relative to $srctree if they
are not found in the current directory. This is used to support
out-of-tree builds. The C tools use this environment variable in the same
way.
Changing $srctree after creating the Kconfig instance has no effect. Only
the value when the configuration is loaded matters. This avoids surprises
if multiple configurations are loaded with different values for $srctree.
config_prefix:
The value the CONFIG_ environment variable had when the Kconfig instance
was created, or "CONFIG_" if CONFIG_ wasn't set. This is the prefix used
(and expected) on symbol names in .config files and C headers. Used in
the same way in the C tools.
config_header:
The value the KCONFIG_CONFIG_HEADER environment variable had when the
Kconfig instance was created, or the empty string if
KCONFIG_CONFIG_HEADER wasn't set. This string is inserted verbatim at the
beginning of configuration files. See write_config().
header_header:
The value the KCONFIG_AUTOHEADER_HEADER environment variable had when the
Kconfig instance was created, or the empty string if
KCONFIG_AUTOHEADER_HEADER wasn't set. This string is inserted verbatim at
the beginning of header files. See write_autoconf().
filename/linenr:
The current parsing location, for use in Python preprocessor functions.
See the module docstring.
"""
__slots__ = (
"_encoding",
"_functions",
"_set_match",
"_srctree_prefix",
"_unset_match",
"_warn_assign_no_prompt",
"choices",
"comments",
"config_header",
"config_prefix",
"const_syms",
"defconfig_list",
"defined_syms",
"env_vars",
"header_header",
"kconfig_filenames",
"m",
"menus",
"missing_syms",
"modules",
"n",
"named_choices",
"srctree",
"syms",
"top_node",
"unique_choices",
"unique_defined_syms",
"variables",
"warn",
"warn_assign_override",
"warn_assign_redun",
"warn_assign_undef",
"warn_to_stderr",
"warnings",
"y",
# Parsing-related
"_parsing_kconfigs",
"_readline",
"filename",
"linenr",
"_include_path",
"_filestack",
"_line",
"_tokens",
"_tokens_i",
"_reuse_tokens",
)
#
# Public interface
#
def __init__(self, filename="Kconfig", warn=True, warn_to_stderr=True,
encoding="utf-8", suppress_traceback=False):
"""
Creates a new Kconfig object by parsing Kconfig files.
Note that Kconfig files are not the same as .config files (which store
configuration symbol values).
See the module docstring for some environment variables that influence
default warning settings (KCONFIG_WARN_UNDEF and
KCONFIG_WARN_UNDEF_ASSIGN).
Raises KconfigError on syntax/semantic errors, and OSError or (possibly
a subclass of) IOError on IO errors ('errno', 'strerror', and
'filename' are available). Note that IOError is an alias for OSError on
Python 3, so it's enough to catch OSError there. If you need Python 2/3
compatibility, it's easiest to catch EnvironmentError, which is a
common base class of OSError/IOError on Python 2 and an alias for
OSError on Python 3.
filename (default: "Kconfig"):
The Kconfig file to load. For the Linux kernel, you'll want "Kconfig"
from the top-level directory, as environment variables will make sure
the right Kconfig is included from there (arch/$SRCARCH/Kconfig as of
writing).
If $srctree is set, 'filename' will be looked up relative to it.
$srctree is also used to look up source'd files within Kconfig files.
See the class documentation.
If you are using Kconfiglib via 'make scriptconfig', the filename of
the base base Kconfig file will be in sys.argv[1]. It's currently
always "Kconfig" in practice.
warn (default: True):
True if warnings related to this configuration should be generated.
This can be changed later by setting Kconfig.warn to True/False. It
is provided as a constructor argument since warnings might be
generated during parsing.
See the other Kconfig.warn_* variables as well, which enable or
suppress certain warnings when warnings are enabled.
All generated warnings are added to the Kconfig.warnings list. See
the class documentation.
warn_to_stderr (default: True):
True if warnings should be printed to stderr in addition to being
added to Kconfig.warnings.
This can be changed later by setting Kconfig.warn_to_stderr to
True/False.
encoding (default: "utf-8"):
The encoding to use when reading and writing files, and when decoding
output from commands run via $(shell). If None, the encoding
specified in the current locale will be used.
The "utf-8" default avoids exceptions on systems that are configured
to use the C locale, which implies an ASCII encoding.
This parameter has no effect on Python 2, due to implementation
issues (regular strings turning into Unicode strings, which are
distinct in Python 2). Python 2 doesn't decode regular strings
anyway.
Related PEP: path_to_url
suppress_traceback (default: False):
Helper for tools. When True, any EnvironmentError or KconfigError
generated during parsing is caught, the exception message is printed
to stderr together with the command name, and sys.exit(1) is called
(which generates SystemExit).
This hides the Python traceback for "expected" errors like syntax
errors in Kconfig files.
Other exceptions besides EnvironmentError and KconfigError are still
propagated when suppress_traceback is True.
"""
try:
self._init(filename, warn, warn_to_stderr, encoding)
except (EnvironmentError, KconfigError) as e:
if suppress_traceback:
cmd = sys.argv[0] # Empty string if missing
if cmd:
cmd += ": "
# Some long exception messages have extra newlines for better
# formatting when reported as an unhandled exception. Strip
# them here.
sys.exit(cmd + str(e).strip())
raise
def _init(self, filename, warn, warn_to_stderr, encoding):
# See __init__()
self._encoding = encoding
self.srctree = os.getenv("srctree", "")
# A prefix we can reliably strip from glob() results to get a filename
# relative to $srctree. relpath() can cause issues for symlinks,
# because it assumes symlink/../foo is the same as foo/.
self._srctree_prefix = realpath(self.srctree) + os.sep
self.warn = warn
self.warn_to_stderr = warn_to_stderr
self.warn_assign_undef = os.getenv("KCONFIG_WARN_UNDEF_ASSIGN") == "y"
self.warn_assign_override = True
self.warn_assign_redun = True
self._warn_assign_no_prompt = True
self.warnings = []
self.config_prefix = os.getenv("CONFIG_", "CONFIG_")
# Regular expressions for parsing .config files
self._set_match = _re_match(self.config_prefix + r"([^=]+)=(.*)")
self._unset_match = _re_match(r"# {}([^ ]+) is not set".format(
self.config_prefix))
self.config_header = os.getenv("KCONFIG_CONFIG_HEADER", "")
self.header_header = os.getenv("KCONFIG_AUTOHEADER_HEADER", "")
self.syms = {}
self.const_syms = {}
self.defined_syms = []
self.missing_syms = []
self.named_choices = {}
self.choices = []
self.menus = []
self.comments = []
for nmy in "n", "m", "y":
sym = Symbol()
sym.kconfig = self
sym.name = nmy
sym.is_constant = True
sym.orig_type = TRISTATE
sym._cached_tri_val = STR_TO_TRI[nmy]
self.const_syms[nmy] = sym
self.n = self.const_syms["n"]
self.m = self.const_syms["m"]
self.y = self.const_syms["y"]
# Make n/m/y well-formed symbols
for nmy in "n", "m", "y":
sym = self.const_syms[nmy]
sym.rev_dep = sym.weak_rev_dep = sym.direct_dep = self.n
# Maps preprocessor variables names to Variable instances
self.variables = {}
# Predefined preprocessor functions, with min/max number of arguments
self._functions = {
"info": (_info_fn, 1, 1),
"error-if": (_error_if_fn, 2, 2),
"filename": (_filename_fn, 0, 0),
"lineno": (_lineno_fn, 0, 0),
"shell": (_shell_fn, 1, 1),
"warning-if": (_warning_if_fn, 2, 2),
}
# Add any user-defined preprocessor functions
try:
self._functions.update(
importlib.import_module(
os.getenv("KCONFIG_FUNCTIONS", "kconfigfunctions")
).functions)
except ImportError:
pass
# This determines whether previously unseen symbols are registered.
# They shouldn't be if we parse expressions after parsing, as part of
# Kconfig.eval_string().
self._parsing_kconfigs = True
self.modules = self._lookup_sym("MODULES")
self.defconfig_list = None
self.top_node = MenuNode()
self.top_node.kconfig = self
self.top_node.item = MENU
self.top_node.is_menuconfig = True
self.top_node.visibility = self.y
self.top_node.prompt = ("Main menu", self.y)
self.top_node.parent = None
self.top_node.dep = self.y
self.top_node.filename = filename
self.top_node.linenr = 1
self.top_node.include_path = ()
# Parse the Kconfig files
# Not used internally. Provided as a convenience.
self.kconfig_filenames = [filename]
self.env_vars = set()
# Keeps track of the location in the parent Kconfig files. Kconfig
# files usually source other Kconfig files. See _enter_file().
self._filestack = []
self._include_path = ()
# The current parsing location
self.filename = filename
self.linenr = 0
# Used to avoid retokenizing lines when we discover that they're not
# part of the construct currently being parsed. This is kinda like an
# unget operation.
self._reuse_tokens = False
# Open the top-level Kconfig file. Store the readline() method directly
# as a small optimization.
self._readline = self._open(join(self.srctree, filename), "r").readline
try:
# Parse the Kconfig files. Returns the last node, which we
# terminate with '.next = None'.
self._parse_block(None, self.top_node, self.top_node).next = None
self.top_node.list = self.top_node.next
self.top_node.next = None
except UnicodeDecodeError as e:
_decoding_error(e, self.filename)
# Close the top-level Kconfig file. __self__ fetches the 'file' object
# for the method.
self._readline.__self__.close()
self._parsing_kconfigs = False
# Do various menu tree post-processing
self._finalize_node(self.top_node, self.y)
for s in self.syms.values():
self._finalize_sym(s)
self.unique_defined_syms = _ordered_unique(self.defined_syms)
self.unique_choices = _ordered_unique(self.choices)
# Do sanity checks. Some of these depend on everything being finalized.
self._check_sym_sanity()
self._check_choice_sanity()
# KCONFIG_STRICT is an older alias for KCONFIG_WARN_UNDEF, supported
# for backwards compatibility
if os.getenv("KCONFIG_WARN_UNDEF") == "y" or \
os.getenv("KCONFIG_STRICT") == "y":
self._check_undef_syms()
# Build Symbol._dependents for all symbols and choices
self._build_dep()
# Check for dependency loops
check_dep_loop_sym = _check_dep_loop_sym # Micro-optimization
for sym in self.unique_defined_syms:
check_dep_loop_sym(sym, False)
# Add extra dependencies from choices to choice symbols that get
# awkward during dependency loop detection
self._add_choice_deps()
@property
def mainmenu_text(self):
"""
See the class documentation.
"""
return self.top_node.prompt[0]
@property
def defconfig_filename(self):
"""
See the class documentation.
"""
if self.defconfig_list:
for filename, cond in self.defconfig_list.defaults:
if expr_value(cond):
try:
with self._open_config(filename.str_value) as f:
return f.name
except EnvironmentError:
continue
return None
def load_config(self, filename=None, replace=True, verbose=None):
"""
Loads symbol values from a file in the .config format. Equivalent to
calling Symbol.set_value() to set each of the values.
"# CONFIG_FOO is not set" within a .config file sets the user value of
FOO to n. The C tools work the same way.
For each symbol, the Symbol.user_value attribute holds the value the
symbol was assigned in the .config file (if any). The user value might
differ from Symbol.str/tri_value if there are unsatisfied dependencies.
Calling this function also updates the Kconfig.missing_syms attribute
with a list of all assignments to undefined symbols within the
configuration file. Kconfig.missing_syms is cleared if 'replace' is
True, and appended to otherwise. See the documentation for
Kconfig.missing_syms as well.
See the Kconfig.__init__() docstring for raised exceptions
(OSError/IOError). KconfigError is never raised here.
filename (default: None):
Path to load configuration from (a string). Respects $srctree if set
(see the class documentation).
If 'filename' is None (the default), the configuration file to load
(if any) is calculated automatically, giving the behavior you'd
usually want:
1. If the KCONFIG_CONFIG environment variable is set, it gives the
path to the configuration file to load. Otherwise, ".config" is
used. See standard_config_filename().
2. If the path from (1.) doesn't exist, the configuration file
given by kconf.defconfig_filename is loaded instead, which is
derived from the 'option defconfig_list' symbol.
3. If (1.) and (2.) fail to find a configuration file to load, no
configuration file is loaded, and symbols retain their current
values (e.g., their default values). This is not an error.
See the return value as well.
replace (default: True):
If True, all existing user values will be cleared before loading the
.config. Pass False to merge configurations.
verbose (default: None):
Limited backwards compatibility to prevent crashes. A warning is
printed if anything but None is passed.
Prior to Kconfiglib 12.0.0, this option enabled printing of messages
to stdout when 'filename' was None. A message is (always) returned
now instead, which is more flexible.
Will probably be removed in some future version.
Returns a string with a message saying which file got loaded (or
possibly that no file got loaded, when 'filename' is None). This is
meant to reduce boilerplate in tools, which can do e.g.
print(kconf.load_config()). The returned message distinguishes between
loading (replace == True) and merging (replace == False).
"""
if verbose is not None:
_warn_verbose_deprecated("load_config")
msg = None
if filename is None:
filename = standard_config_filename()
if not exists(filename) and \
not exists(join(self.srctree, filename)):
defconfig = self.defconfig_filename
if defconfig is None:
return "Using default symbol values (no '{}')" \
.format(filename)
msg = " default configuration '{}' (no '{}')" \
.format(defconfig, filename)
filename = defconfig
if not msg:
msg = " configuration '{}'".format(filename)
# Disable the warning about assigning to symbols without prompts. This
# is normal and expected within a .config file.
self._warn_assign_no_prompt = False
# This stub only exists to make sure _warn_assign_no_prompt gets
# reenabled
try:
self._load_config(filename, replace)
except UnicodeDecodeError as e:
_decoding_error(e, filename)
finally:
self._warn_assign_no_prompt = True
return ("Loaded" if replace else "Merged") + msg
def _load_config(self, filename, replace):
with self._open_config(filename) as f:
if replace:
self.missing_syms = []
# If we're replacing the configuration, keep track of which
# symbols and choices got set so that we can unset the rest
# later. This avoids invalidating everything and is faster.
# Another benefit is that invalidation must be rock solid for
# it to work, making it a good test.
for sym in self.unique_defined_syms:
sym._was_set = False
for choice in self.unique_choices:
choice._was_set = False
# Small optimizations
set_match = self._set_match
unset_match = self._unset_match
get_sym = self.syms.get
for linenr, line in enumerate(f, 1):
# The C tools ignore trailing whitespace
line = line.rstrip()
match = set_match(line)
if match:
name, val = match.groups()
sym = get_sym(name)
if not sym or not sym.nodes:
self._undef_assign(name, val, filename, linenr)
continue
if sym.orig_type in _BOOL_TRISTATE:
# The C implementation only checks the first character
# to the right of '=', for whatever reason
if not (sym.orig_type is BOOL
and val.startswith(("y", "n")) or
sym.orig_type is TRISTATE
and val.startswith(("y", "m", "n"))):
self._warn("'{}' is not a valid value for the {} "
"symbol {}. Assignment ignored."
.format(val, TYPE_TO_STR[sym.orig_type],
sym.name_and_loc),
filename, linenr)
continue
val = val[0]
if sym.choice and val != "n":
# During .config loading, we infer the mode of the
# choice from the kind of values that are assigned
# to the choice symbols
prev_mode = sym.choice.user_value
if prev_mode is not None and \
TRI_TO_STR[prev_mode] != val:
self._warn("both m and y assigned to symbols "
"within the same choice",
filename, linenr)
# Set the choice's mode
sym.choice.set_value(val)
elif sym.orig_type is STRING:
match = _conf_string_match(val)
if not match:
self._warn("malformed string literal in "
"assignment to {}. Assignment ignored."
.format(sym.name_and_loc),
filename, linenr)
continue
val = unescape(match.group(1))
else:
match = unset_match(line)
if not match:
# Print a warning for lines that match neither
# set_match() nor unset_match() and that are not blank
# lines or comments. 'line' has already been
# rstrip()'d, so blank lines show up as "" here.
if line and not line.lstrip().startswith("#"):
self._warn("ignoring malformed line '{}'"
.format(line),
filename, linenr)
continue
name = match.group(1)
sym = get_sym(name)
if not sym or not sym.nodes:
self._undef_assign(name, "n", filename, linenr)
continue
if sym.orig_type not in _BOOL_TRISTATE:
continue
val = "n"
# Done parsing the assignment. Set the value.
if sym._was_set:
self._assigned_twice(sym, val, filename, linenr)
sym.set_value(val)
if replace:
# If we're replacing the configuration, unset the symbols that
# didn't get set
for sym in self.unique_defined_syms:
if not sym._was_set:
sym.unset_value()
for choice in self.unique_choices:
if not choice._was_set:
choice.unset_value()
def _undef_assign(self, name, val, filename, linenr):
# Called for assignments to undefined symbols during .config loading
self.missing_syms.append((name, val))
if self.warn_assign_undef:
self._warn(
"attempt to assign the value '{}' to the undefined symbol {}"
.format(val, name), filename, linenr)
def _assigned_twice(self, sym, new_val, filename, linenr):
# Called when a symbol is assigned more than once in a .config file
# Use strings for bool/tristate user values in the warning
if sym.orig_type in _BOOL_TRISTATE:
user_val = TRI_TO_STR[sym.user_value]
else:
user_val = sym.user_value
msg = '{} set more than once. Old value "{}", new value "{}".'.format(
sym.name_and_loc, user_val, new_val)
if user_val == new_val:
if self.warn_assign_redun:
self._warn(msg, filename, linenr)
elif self.warn_assign_override:
self._warn(msg, filename, linenr)
def load_allconfig(self, filename):
"""
Helper for all*config. Loads (merges) the configuration file specified
by KCONFIG_ALLCONFIG, if any. See Documentation/kbuild/kconfig.txt in
the Linux kernel.
Disables warnings for duplicated assignments within configuration files
for the duration of the call
(kconf.warn_assign_override/warn_assign_redun = False), and restores
the previous warning settings at the end. The KCONFIG_ALLCONFIG
configuration file is expected to override symbols.
Exits with sys.exit() (which raises a SystemExit exception) and prints
an error to stderr if KCONFIG_ALLCONFIG is set but the configuration
file can't be opened.
filename:
Command-specific configuration filename - "allyes.config",
"allno.config", etc.
"""
load_allconfig(self, filename)
def write_autoconf(self, filename=None, header=None):
r"""
Writes out symbol values as a C header file, matching the format used
by include/generated/zephyr/autoconf.h in the kernel.
The ordering of the #defines matches the one generated by
write_config(). The order in the C implementation depends on the hash
table implementation as of writing, and so won't match.
If 'filename' exists and its contents is identical to what would get
written out, it is left untouched. This avoids updating file metadata
like the modification time and possibly triggering redundant work in
build tools.
filename (default: None):
Path to write header to.
If None (the default), the path in the environment variable
KCONFIG_AUTOHEADER is used if set, and "include/generated/zephyr/autoconf.h"
otherwise. This is compatible with the C tools.
header (default: None):
Text inserted verbatim at the beginning of the file. You would
usually want it enclosed in '/* */' to make it a C comment, and
include a trailing newline.
If None (the default), the value of the environment variable
KCONFIG_AUTOHEADER_HEADER had when the Kconfig instance was created
will be used if it was set, and no header otherwise. See the
Kconfig.header_header attribute.
Returns a string with a message saying that the header got saved, or
that there were no changes to it. This is meant to reduce boilerplate
in tools, which can do e.g. print(kconf.write_autoconf()).
"""
if filename is None:
filename = os.getenv("KCONFIG_AUTOHEADER",
"include/generated/zephyr/autoconf.h")
if self._write_if_changed(filename, self._autoconf_contents(header)):
return "Kconfig header saved to '{}'".format(filename)
return "No change to Kconfig header in '{}'".format(filename)
def _autoconf_contents(self, header):
# write_autoconf() helper. Returns the contents to write as a string,
# with 'header' or KCONFIG_AUTOHEADER_HEADER at the beginning.
if header is None:
header = self.header_header
chunks = [header] # "".join()ed later
add = chunks.append
for sym in self.unique_defined_syms:
# _write_to_conf is determined when the value is calculated. This
# is a hidden function call due to property magic.
#
# Note: In client code, you can check if sym.config_string is empty
# instead, to avoid accessing the internal _write_to_conf variable
# (though it's likely to keep working).
val = sym.str_value
if not sym._write_to_conf:
continue
if sym.orig_type in _BOOL_TRISTATE:
if val == "y":
add("#define {}{} 1\n"
.format(self.config_prefix, sym.name))
elif val == "m":
add("#define {}{}_MODULE 1\n"
.format(self.config_prefix, sym.name))
elif sym.orig_type is STRING:
add('#define {}{} "{}"\n'
.format(self.config_prefix, sym.name, escape(val)))
else: # sym.orig_type in _INT_HEX:
if sym.orig_type is HEX and \
not val.startswith(("0x", "0X")):
val = "0x" + val
add("#define {}{} {}\n"
.format(self.config_prefix, sym.name, val))
return "".join(chunks)
def write_config(self, filename=None, header=None, save_old=True,
verbose=None):
r"""
Writes out symbol values in the .config format. The format matches the
C implementation, including ordering.
Symbols appear in the same order in generated .config files as they do
in the Kconfig files. For symbols defined in multiple locations, a
single assignment is written out corresponding to the first location
where the symbol is defined.
See the 'Intro to symbol values' section in the module docstring to
understand which symbols get written out.
If 'filename' exists and its contents is identical to what would get
written out, it is left untouched. This avoids updating file metadata
like the modification time and possibly triggering redundant work in
build tools.
See the Kconfig.__init__() docstring for raised exceptions
(OSError/IOError). KconfigError is never raised here.
filename (default: None):
Path to write configuration to (a string).
If None (the default), the path in the environment variable
KCONFIG_CONFIG is used if set, and ".config" otherwise. See
standard_config_filename().
header (default: None):
Text inserted verbatim at the beginning of the file. You would
usually want each line to start with '#' to make it a comment, and
include a trailing newline.
if None (the default), the value of the environment variable
KCONFIG_CONFIG_HEADER had when the Kconfig instance was created will
be used if it was set, and no header otherwise. See the
Kconfig.config_header attribute.
save_old (default: True):
If True and <filename> already exists, a copy of it will be saved to
<filename>.old in the same directory before the new configuration is
written.
Errors are silently ignored if <filename>.old cannot be written (e.g.
due to permissions errors).
verbose (default: None):
Limited backwards compatibility to prevent crashes. A warning is
printed if anything but None is passed.
Prior to Kconfiglib 12.0.0, this option enabled printing of messages
to stdout when 'filename' was None. A message is (always) returned
now instead, which is more flexible.
Will probably be removed in some future version.
Returns a string with a message saying which file got saved. This is
meant to reduce boilerplate in tools, which can do e.g.
print(kconf.write_config()).
"""
if verbose is not None:
_warn_verbose_deprecated("write_config")
if filename is None:
filename = standard_config_filename()
contents = self._config_contents(header)
if self._contents_eq(filename, contents):
return "No change to configuration in '{}'".format(filename)
if save_old:
_save_old(filename)
with self._open(filename, "w") as f:
f.write(contents)
return "Configuration saved to '{}'".format(filename)
def _config_contents(self, header):
# write_config() helper. Returns the contents to write as a string,
# with 'header' or KCONFIG_CONFIG_HEADER at the beginning.
#
# More memory friendly would be to 'yield' the strings and
# "".join(_config_contents()), but it was a bit slower on my system.
# node_iter() was used here before commit 3aea9f7 ("Add '# end of
# <menu>' after menus in .config"). Those comments get tricky to
# implement with it.
for sym in self.unique_defined_syms:
sym._visited = False
if header is None:
header = self.config_header
chunks = [header] # "".join()ed later
add = chunks.append
# Did we just print an '# end of ...' comment?
after_end_comment = False
node = self.top_node
while 1:
# Jump to the next node with an iterative tree walk
if node.list:
node = node.list
elif node.next:
node = node.next
else:
while node.parent:
node = node.parent
# Add a comment when leaving visible menus
if node.item is MENU and expr_value(node.dep) and \
expr_value(node.visibility) and \
node is not self.top_node:
add("# end of {}\n".format(node.prompt[0]))
after_end_comment = True
if node.next:
node = node.next
break
else:
# No more nodes
return "".join(chunks)
# Generate configuration output for the node
item = node.item
if item.__class__ is Symbol:
if item._visited:
continue
item._visited = True
conf_string = item.config_string
if not conf_string:
continue
if after_end_comment:
# Add a blank line before the first symbol printed after an
# '# end of ...' comment
after_end_comment = False
add("\n")
add(conf_string)
elif expr_value(node.dep) and \
((item is MENU and expr_value(node.visibility)) or
item is COMMENT):
add("\n#\n# {}\n#\n".format(node.prompt[0]))
after_end_comment = False
def write_min_config(self, filename, header=None):
"""
Writes out a "minimal" configuration file, omitting symbols whose value
matches their default value. The format matches the one produced by
'make savedefconfig'.
The resulting configuration file is incomplete, but a complete
configuration can be derived from it by loading it. Minimal
configuration files can serve as a more manageable configuration format
compared to a "full" .config file, especially when configurations files
are merged or edited by hand.
See the Kconfig.__init__() docstring for raised exceptions
(OSError/IOError). KconfigError is never raised here.
filename:
Path to write minimal configuration to.
header (default: None):
Text inserted verbatim at the beginning of the file. You would
usually want each line to start with '#' to make it a comment, and
include a final terminating newline.
if None (the default), the value of the environment variable
KCONFIG_CONFIG_HEADER had when the Kconfig instance was created will
be used if it was set, and no header otherwise. See the
Kconfig.config_header attribute.
Returns a string with a message saying the minimal configuration got
saved, or that there were no changes to it. This is meant to reduce
boilerplate in tools, which can do e.g.
print(kconf.write_min_config()).
"""
if self._write_if_changed(filename, self._min_config_contents(header)):
return "Minimal configuration saved to '{}'".format(filename)
return "No change to minimal configuration in '{}'".format(filename)
def _min_config_contents(self, header):
# write_min_config() helper. Returns the contents to write as a string,
# with 'header' or KCONFIG_CONFIG_HEADER at the beginning.
if header is None:
header = self.config_header
chunks = [header] # "".join()ed later
add = chunks.append
for sym in self.unique_defined_syms:
# Skip symbols that cannot be changed. Only check
# non-choice symbols, as selects don't affect choice
# symbols.
if not sym.choice and \
sym.visibility <= expr_value(sym.rev_dep):
continue
# Skip symbols whose value matches their default
if sym.str_value == sym._str_default():
continue
# Skip symbols that would be selected by default in a
# choice, unless the choice is optional or the symbol type
# isn't bool (it might be possible to set the choice mode
# to n or the symbol to m in those cases).
if sym.choice and \
not sym.choice.is_optional and \
sym.choice._selection_from_defaults() is sym and \
sym.orig_type is BOOL and \
sym.tri_value == 2:
continue
add(sym.config_string)
return "".join(chunks)
def sync_deps(self, path):
"""
Creates or updates a directory structure that can be used to avoid
doing a full rebuild whenever the configuration is changed, mirroring
include/config/ in the kernel.
This function is intended to be called during each build, before
compiling source files that depend on configuration symbols.
See the Kconfig.__init__() docstring for raised exceptions
(OSError/IOError). KconfigError is never raised here.
path:
Path to directory
sync_deps(path) does the following:
1. If the directory <path> does not exist, it is created.
2. If <path>/auto.conf exists, old symbol values are loaded from it,
which are then compared against the current symbol values. If a
symbol has changed value (would generate different output in
autoconf.h compared to before), the change is signaled by
touch'ing a file corresponding to the symbol.
The first time sync_deps() is run on a directory, <path>/auto.conf
won't exist, and no old symbol values will be available. This
logically has the same effect as updating the entire
configuration.
The path to a symbol's file is calculated from the symbol's name
by replacing all '_' with '/' and appending '.h'. For example, the
symbol FOO_BAR_BAZ gets the file <path>/foo/bar/baz.h, and FOO
gets the file <path>/foo.h.
This scheme matches the C tools. The point is to avoid having a
single directory with a huge number of files, which the underlying
filesystem might not handle well.
3. A new auto.conf with the current symbol values is written, to keep
track of them for the next build.
If auto.conf exists and its contents is identical to what would
get written out, it is left untouched. This avoids updating file
metadata like the modification time and possibly triggering
redundant work in build tools.
The last piece of the puzzle is knowing what symbols each source file
depends on. Knowing that, dependencies can be added from source files
to the files corresponding to the symbols they depends on. The source
file will then get recompiled (only) when the symbol value changes
(provided sync_deps() is run first during each build).
The tool in the kernel that extracts symbol dependencies from source
files is scripts/basic/fixdep.c. Missing symbol files also correspond
to "not changed", which fixdep deals with by using the $(wildcard) Make
function when adding symbol prerequisites to source files.
In case you need a different scheme for your project, the sync_deps()
implementation can be used as a template.
"""
if not exists(path):
os.mkdir(path, 0o755)
# Load old values from auto.conf, if any
self._load_old_vals(path)
for sym in self.unique_defined_syms:
# _write_to_conf is determined when the value is calculated. This
# is a hidden function call due to property magic.
#
# Note: In client code, you can check if sym.config_string is empty
# instead, to avoid accessing the internal _write_to_conf variable
# (though it's likely to keep working).
val = sym.str_value
# n tristate values do not get written to auto.conf and autoconf.h,
# making a missing symbol logically equivalent to n
if sym._write_to_conf:
if sym._old_val is None and \
sym.orig_type in _BOOL_TRISTATE and \
val == "n":
# No old value (the symbol was missing or n), new value n.
# No change.
continue
if val == sym._old_val:
# New value matches old. No change.
continue
elif sym._old_val is None:
# The symbol wouldn't appear in autoconf.h (because
# _write_to_conf is false), and it wouldn't have appeared in
# autoconf.h previously either (because it didn't appear in
# auto.conf). No change.
continue
# 'sym' has a new value. Flag it.
_touch_dep_file(path, sym.name)
# Remember the current values as the "new old" values.
#
# This call could go anywhere after the call to _load_old_vals(), but
# putting it last means _sync_deps() can be safely rerun if it fails
# before this point.
self._write_old_vals(path)
def _load_old_vals(self, path):
# Loads old symbol values from auto.conf into a dedicated
# Symbol._old_val field. Mirrors load_config().
#
# The extra field could be avoided with some trickery involving dumping
# symbol values and restoring them later, but this is simpler and
# faster. The C tools also use a dedicated field for this purpose.
for sym in self.unique_defined_syms:
sym._old_val = None
try:
auto_conf = self._open(join(path, "auto.conf"), "r")
except EnvironmentError as e:
if e.errno == errno.ENOENT:
# No old values
return
raise
with auto_conf as f:
for line in f:
match = self._set_match(line)
if not match:
# We only expect CONFIG_FOO=... (and possibly a header
# comment) in auto.conf
continue
name, val = match.groups()
if name in self.syms:
sym = self.syms[name]
if sym.orig_type is STRING:
match = _conf_string_match(val)
if not match:
continue
val = unescape(match.group(1))
self.syms[name]._old_val = val
else:
# Flag that the symbol no longer exists, in
# case something still depends on it
_touch_dep_file(path, name)
def _write_old_vals(self, path):
# Helper for writing auto.conf. Basically just a simplified
# write_config() that doesn't write any comments (including
# '# CONFIG_FOO is not set' comments). The format matches the C
# implementation, though the ordering is arbitrary there (depends on
# the hash table implementation).
#
# A separate helper function is neater than complicating write_config()
# by passing a flag to it, plus we only need to look at symbols here.
self._write_if_changed(
os.path.join(path, "auto.conf"),
self._old_vals_contents())
def _old_vals_contents(self):
# _write_old_vals() helper. Returns the contents to write as a string.
# Temporary list instead of generator makes this a bit faster
return "".join([
sym.config_string for sym in self.unique_defined_syms
if not (sym.orig_type in _BOOL_TRISTATE and not sym.tri_value)
])
def node_iter(self, unique_syms=False):
"""
Returns a generator for iterating through all MenuNode's in the Kconfig
tree. The iteration is done in Kconfig definition order (each node is
visited before its children, and the children of a node are visited
before the next node).
The Kconfig.top_node menu node is skipped. It contains an implicit menu
that holds the top-level items.
As an example, the following code will produce a list equal to
Kconfig.defined_syms:
defined_syms = [node.item for node in kconf.node_iter()
if isinstance(node.item, Symbol)]
unique_syms (default: False):
If True, only the first MenuNode will be included for symbols defined
in multiple locations.
Using kconf.node_iter(True) in the example above would give a list
equal to unique_defined_syms.
"""
if unique_syms:
for sym in self.unique_defined_syms:
sym._visited = False
node = self.top_node
while 1:
# Jump to the next node with an iterative tree walk
if node.list:
node = node.list
elif node.next:
node = node.next
else:
while node.parent:
node = node.parent
if node.next:
node = node.next
break
else:
# No more nodes
return
if unique_syms and node.item.__class__ is Symbol:
if node.item._visited:
continue
node.item._visited = True
yield node
def eval_string(self, s):
"""
Returns the tristate value of the expression 's', represented as 0, 1,
and 2 for n, m, and y, respectively. Raises KconfigError on syntax
errors. Warns if undefined symbols are referenced.
As an example, if FOO and BAR are tristate symbols at least one of
which has the value y, then eval_string("y && (FOO || BAR)") returns
2 (y).
To get the string value of non-bool/tristate symbols, use
Symbol.str_value. eval_string() always returns a tristate value, and
all non-bool/tristate symbols have the tristate value 0 (n).
The expression parsing is consistent with how parsing works for
conditional ('if ...') expressions in the configuration, and matches
the C implementation. m is rewritten to 'm && MODULES', so
eval_string("m") will return 0 (n) unless modules are enabled.
"""
# The parser is optimized to be fast when parsing Kconfig files (where
# an expression can never appear at the beginning of a line). We have
# to monkey-patch things a bit here to reuse it.
self.filename = None
self._tokens = self._tokenize("if " + s)
# Strip "if " to avoid giving confusing error messages
self._line = s
self._tokens_i = 1 # Skip the 'if' token
return expr_value(self._expect_expr_and_eol())
def unset_values(self):
"""
Removes any user values from all symbols, as if Kconfig.load_config()
or Symbol.set_value() had never been called.
"""
self._warn_assign_no_prompt = False
try:
# set_value() already rejects undefined symbols, and they don't
# need to be invalidated (because their value never changes), so we
# can just iterate over defined symbols
for sym in self.unique_defined_syms:
sym.unset_value()
for choice in self.unique_choices:
choice.unset_value()
finally:
self._warn_assign_no_prompt = True
def enable_warnings(self):
"""
Do 'Kconfig.warn = True' instead. Maintained for backwards
compatibility.
"""
self.warn = True
def disable_warnings(self):
"""
Do 'Kconfig.warn = False' instead. Maintained for backwards
compatibility.
"""
self.warn = False
def enable_stderr_warnings(self):
"""
Do 'Kconfig.warn_to_stderr = True' instead. Maintained for backwards
compatibility.
"""
self.warn_to_stderr = True
def disable_stderr_warnings(self):
"""
Do 'Kconfig.warn_to_stderr = False' instead. Maintained for backwards
compatibility.
"""
self.warn_to_stderr = False
def enable_undef_warnings(self):
"""
Do 'Kconfig.warn_assign_undef = True' instead. Maintained for backwards
compatibility.
"""
self.warn_assign_undef = True
def disable_undef_warnings(self):
"""
Do 'Kconfig.warn_assign_undef = False' instead. Maintained for
backwards compatibility.
"""
self.warn_assign_undef = False
def enable_override_warnings(self):
"""
Do 'Kconfig.warn_assign_override = True' instead. Maintained for
backwards compatibility.
"""
self.warn_assign_override = True
def disable_override_warnings(self):
"""
Do 'Kconfig.warn_assign_override = False' instead. Maintained for
backwards compatibility.
"""
self.warn_assign_override = False
def enable_redun_warnings(self):
"""
Do 'Kconfig.warn_assign_redun = True' instead. Maintained for backwards
compatibility.
"""
self.warn_assign_redun = True
def disable_redun_warnings(self):
"""
Do 'Kconfig.warn_assign_redun = False' instead. Maintained for
backwards compatibility.
"""
self.warn_assign_redun = False
def __repr__(self):
"""
Returns a string with information about the Kconfig object when it is
evaluated on e.g. the interactive Python prompt.
"""
def status(flag):
return "enabled" if flag else "disabled"
return "<{}>".format(", ".join((
"configuration with {} symbols".format(len(self.syms)),
'main menu prompt "{}"'.format(self.mainmenu_text),
"srctree is current directory" if not self.srctree else
'srctree "{}"'.format(self.srctree),
'config symbol prefix "{}"'.format(self.config_prefix),
"warnings " + status(self.warn),
"printing of warnings to stderr " + status(self.warn_to_stderr),
"undef. symbol assignment warnings " +
status(self.warn_assign_undef),
"overriding symbol assignment warnings " +
status(self.warn_assign_override),
"redundant symbol assignment warnings " +
status(self.warn_assign_redun)
)))
#
# Private methods
#
#
# File reading
#
def _open_config(self, filename):
# Opens a .config file. First tries to open 'filename', then
# '$srctree/filename' if $srctree was set when the configuration was
# loaded.
try:
return self._open(filename, "r")
except EnvironmentError as e:
# This will try opening the same file twice if $srctree is unset,
# but it's not a big deal
try:
return self._open(join(self.srctree, filename), "r")
except EnvironmentError as e2:
# This is needed for Python 3, because e2 is deleted after
# the try block:
#
# path_to_url#the-try-statement
e = e2
raise _KconfigIOError(
e, "Could not open '{}' ({}: {}). Check that the $srctree "
"environment variable ({}) is set correctly."
.format(filename, errno.errorcode[e.errno], e.strerror,
"set to '{}'".format(self.srctree) if self.srctree
else "unset or blank"))
def _enter_file(self, filename):
# Jumps to the beginning of a sourced Kconfig file, saving the previous
# position and file object.
#
# filename:
# Absolute path to file
# Path relative to $srctree, stored in e.g. self.filename (which makes
# it indirectly show up in MenuNode.filename). Equals 'filename' for
# absolute paths passed to 'source'.
if filename.startswith(self._srctree_prefix):
# Relative path (or a redundant absolute path to within $srctree,
# but it's probably fine to reduce those too)
rel_filename = filename[len(self._srctree_prefix):]
else:
# Absolute path
rel_filename = filename
self.kconfig_filenames.append(rel_filename)
# The parent Kconfig files are represented as a list of
# (<include path>, <Python 'file' object for Kconfig file>) tuples.
#
# <include path> is immutable and holds a *tuple* of
# (<filename>, <linenr>) tuples, giving the locations of the 'source'
# statements in the parent Kconfig files. The current include path is
# also available in Kconfig._include_path.
#
# The point of this redundant setup is to allow Kconfig._include_path
# to be assigned directly to MenuNode.include_path without having to
# copy it, sharing it wherever possible.
# Save include path and 'file' object (via its 'readline' function)
# before entering the file
self._filestack.append((self._include_path, self._readline))
# _include_path is a tuple, so this rebinds the variable instead of
# doing in-place modification
self._include_path += ((self.filename, self.linenr),)
# Check for recursive 'source'
for name, _ in self._include_path:
if name == rel_filename:
raise KconfigError(
"\n{}:{}: recursive 'source' of '{}' detected. Check that "
"environment variables are set correctly.\n"
"Include path:\n{}"
.format(self.filename, self.linenr, rel_filename,
"\n".join("{}:{}".format(name, linenr)
for name, linenr in self._include_path)))
try:
self._readline = self._open(filename, "r").readline
except EnvironmentError as e:
# We already know that the file exists
raise _KconfigIOError(
e, "{}:{}: Could not open '{}' (in '{}') ({}: {})"
.format(self.filename, self.linenr, filename,
self._line.strip(),
errno.errorcode[e.errno], e.strerror))
self.filename = rel_filename
self.linenr = 0
def _leave_file(self):
# Returns from a Kconfig file to the file that sourced it. See
# _enter_file().
# Restore location from parent Kconfig file
self.filename, self.linenr = self._include_path[-1]
# Restore include path and 'file' object
self._readline.__self__.close() # __self__ fetches the 'file' object
self._include_path, self._readline = self._filestack.pop()
def _next_line(self):
# Fetches and tokenizes the next line from the current Kconfig file.
# Returns False at EOF and True otherwise.
# We might already have tokens from parsing a line and discovering that
# it's part of a different construct
if self._reuse_tokens:
self._reuse_tokens = False
# self._tokens_i is known to be 1 here, because _parse_props()
# leaves it like that when it can't recognize a line (or parses a
# help text)
return True
# readline() returns '' over and over at EOF, which we rely on for help
# texts at the end of files (see _line_after_help())
line = self._readline()
if not line:
return False
self.linenr += 1
# Handle line joining
while line.endswith("\\\n"):
line = line[:-2] + self._readline()
self.linenr += 1
self._tokens = self._tokenize(line)
# Initialize to 1 instead of 0 to factor out code from _parse_block()
# and _parse_props(). They immediately fetch self._tokens[0].
self._tokens_i = 1
return True
def _line_after_help(self, line):
# Tokenizes a line after a help text. This case is special in that the
# line has already been fetched (to discover that it isn't part of the
# help text).
#
# An earlier version used a _saved_line variable instead that was
# checked in _next_line(). This special-casing gets rid of it and makes
# _reuse_tokens alone sufficient to handle unget.
# Handle line joining
while line.endswith("\\\n"):
line = line[:-2] + self._readline()
self.linenr += 1
self._tokens = self._tokenize(line)
self._reuse_tokens = True
def _write_if_changed(self, filename, contents):
# Writes 'contents' into 'filename', but only if it differs from the
# current contents of the file.
#
# Another variant would be write a temporary file on the same
# filesystem, compare the files, and rename() the temporary file if it
# differs, but it breaks stuff like write_config("/dev/null"), which is
# used out there to force evaluation-related warnings to be generated.
# This simple version is pretty failsafe and portable.
#
# Returns True if the file has changed and is updated, and False
# otherwise.
if self._contents_eq(filename, contents):
return False
with self._open(filename, "w") as f:
f.write(contents)
return True
def _contents_eq(self, filename, contents):
# Returns True if the contents of 'filename' is 'contents' (a string),
# and False otherwise (including if 'filename' can't be opened/read)
try:
with self._open(filename, "r") as f:
# Robust re. things like encoding and line endings (mmap()
# trickery isn't)
return f.read(len(contents) + 1) == contents
except EnvironmentError:
# If the error here would prevent writing the file as well, we'll
# notice it later
return False
#
# Tokenization
#
def _lookup_sym(self, name):
# Fetches the symbol 'name' from the symbol table, creating and
# registering it if it does not exist. If '_parsing_kconfigs' is False,
# it means we're in eval_string(), and new symbols won't be registered.
if name in self.syms:
return self.syms[name]
sym = Symbol()
sym.kconfig = self
sym.name = name
sym.is_constant = False
sym.configdefaults = []
sym.rev_dep = sym.weak_rev_dep = sym.direct_dep = self.n
if self._parsing_kconfigs:
self.syms[name] = sym
else:
self._warn("no symbol {} in configuration".format(name))
return sym
def _lookup_const_sym(self, name):
# Like _lookup_sym(), for constant (quoted) symbols
if name in self.const_syms:
return self.const_syms[name]
sym = Symbol()
sym.kconfig = self
sym.name = name
sym.is_constant = True
sym.configdefaults = []
sym.rev_dep = sym.weak_rev_dep = sym.direct_dep = self.n
if self._parsing_kconfigs:
self.const_syms[name] = sym
return sym
def _tokenize(self, s):
# Parses 's', returning a None-terminated list of tokens. Registers any
# new symbols encountered with _lookup(_const)_sym().
#
# Tries to be reasonably speedy by processing chunks of text via
# regexes and string operations where possible. This is the biggest
# hotspot during parsing.
#
# It might be possible to rewrite this to 'yield' tokens instead,
# working across multiple lines. Lookback and compatibility with old
# janky versions of the C tools complicate things though.
self._line = s # Used for error reporting
# Initial token on the line
match = _command_match(s)
if not match:
if s.isspace() or s.lstrip().startswith("#"):
return (None,)
self._parse_error("unknown token at start of line")
# Tricky implementation detail: While parsing a token, 'token' refers
# to the previous token. See _STRING_LEX for why this is needed.
token = _get_keyword(match.group(1))
if not token:
# Backwards compatibility with old versions of the C tools, which
# (accidentally) accepted stuff like "--help--" and "-help---".
# This was fixed in the C tools by commit c2264564 ("kconfig: warn
# of unhandled characters in Kconfig commands"), committed in July
# 2015, but it seems people still run Kconfiglib on older kernels.
if s.strip(" \t\n-") == "help":
return (_T_HELP, None)
# If the first token is not a keyword (and not a weird help token),
# we have a preprocessor variable assignment (or a bare macro on a
# line)
self._parse_assignment(s)
return (None,)
tokens = [token]
# The current index in the string being tokenized
i = match.end()
# Main tokenization loop (for tokens past the first one)
while i < len(s):
# Test for an identifier/keyword first. This is the most common
# case.
match = _id_keyword_match(s, i)
if match:
# We have an identifier or keyword
# Check what it is. lookup_sym() will take care of allocating
# new symbols for us the first time we see them. Note that
# 'token' still refers to the previous token.
name = match.group(1)
keyword = _get_keyword(name)
if keyword:
# It's a keyword
token = keyword
# Jump past it
i = match.end()
elif token not in _STRING_LEX:
# It's a non-const symbol, except we translate n, m, and y
# into the corresponding constant symbols, like the C
# implementation
if "$" in name:
# Macro expansion within symbol name
name, s, i = self._expand_name(s, i)
else:
i = match.end()
token = self.const_syms[name] if name in STR_TO_TRI else \
self._lookup_sym(name)
else:
# It's a case of missing quotes. For example, the
# following is accepted:
#
# menu unquoted_title
#
# config A
# tristate unquoted_prompt
#
# endmenu
#
# Named choices ('choice FOO') also end up here.
if token is not _T_CHOICE:
self._warn("style: quotes recommended around '{}' in '{}'"
.format(name, self._line.strip()),
self.filename, self.linenr)
token = name
i = match.end()
else:
# Neither a keyword nor a non-const symbol
# We always strip whitespace after tokens, so it is safe to
# assume that s[i] is the start of a token here.
c = s[i]
if c in "\"'":
if "$" not in s and "\\" not in s:
# Fast path for lines without $ and \. Find the
# matching quote.
end_i = s.find(c, i + 1) + 1
if not end_i:
self._parse_error("unterminated string")
val = s[i + 1:end_i - 1]
i = end_i
else:
# Slow path
s, end_i = self._expand_str(s, i)
# os.path.expandvars() and the $UNAME_RELEASE replace()
# is a backwards compatibility hack, which should be
# reasonably safe as expandvars() leaves references to
# undefined env. vars. as is.
#
# The preprocessor functionality changed how
# environment variables are referenced, to $(FOO).
val = expandvars(s[i + 1:end_i - 1]
.replace("$UNAME_RELEASE",
_UNAME_RELEASE))
i = end_i
# This is the only place where we don't survive with a
# single token of lookback: 'option env="FOO"' does not
# refer to a constant symbol named "FOO".
token = \
val if token in _STRING_LEX or tokens[0] is _T_OPTION \
else self._lookup_const_sym(val)
elif s.startswith("&&", i):
token = _T_AND
i += 2
elif s.startswith("||", i):
token = _T_OR
i += 2
elif c == "=":
token = _T_EQUAL
i += 1
elif s.startswith("!=", i):
token = _T_UNEQUAL
i += 2
elif c == "!":
token = _T_NOT
i += 1
elif c == "(":
token = _T_OPEN_PAREN
i += 1
elif c == ")":
token = _T_CLOSE_PAREN
i += 1
elif c == "#":
break
# Very rare
elif s.startswith("<=", i):
token = _T_LESS_EQUAL
i += 2
elif c == "<":
token = _T_LESS
i += 1
elif s.startswith(">=", i):
token = _T_GREATER_EQUAL
i += 2
elif c == ">":
token = _T_GREATER
i += 1
else:
self._parse_error("unknown tokens in line")
# Skip trailing whitespace
while i < len(s) and s[i].isspace():
i += 1
# Add the token
tokens.append(token)
# None-terminating the token list makes token fetching simpler/faster
tokens.append(None)
return tokens
# Helpers for syntax checking and token fetching. See the
# 'Intro to expressions' section for what a constant symbol is.
#
# More of these could be added, but the single-use cases are inlined as an
# optimization.
def _expect_sym(self):
token = self._tokens[self._tokens_i]
self._tokens_i += 1
if token.__class__ is not Symbol:
self._parse_error("expected symbol")
return token
def _expect_nonconst_sym(self):
# Used for 'select' and 'imply' only. We know the token indices.
token = self._tokens[1]
self._tokens_i = 2
if token.__class__ is not Symbol or token.is_constant:
self._parse_error("expected nonconstant symbol")
return token
def _expect_str_and_eol(self):
token = self._tokens[self._tokens_i]
self._tokens_i += 1
if token.__class__ is not str:
self._parse_error("expected string")
if self._tokens[self._tokens_i] is not None:
self._trailing_tokens_error()
return token
def _expect_expr_and_eol(self):
expr = self._parse_expr(True)
if self._tokens[self._tokens_i] is not None:
self._trailing_tokens_error()
return expr
def _check_token(self, token):
# If the next token is 'token', removes it and returns True
if self._tokens[self._tokens_i] is token:
self._tokens_i += 1
return True
return False
#
# Preprocessor logic
#
def _parse_assignment(self, s):
# Parses a preprocessor variable assignment, registering the variable
# if it doesn't already exist. Also takes care of bare macros on lines
# (which are allowed, and can be useful for their side effects).
# Expand any macros in the left-hand side of the assignment (the
# variable name)
s = s.lstrip()
i = 0
while 1:
i = _assignment_lhs_fragment_match(s, i).end()
if s.startswith("$(", i):
s, i = self._expand_macro(s, i, ())
else:
break
if s.isspace():
# We also accept a bare macro on a line (e.g.
# $(warning-if,$(foo),ops)), provided it expands to a blank string
return
# Assigned variable
name = s[:i]
# Extract assignment operator (=, :=, or +=) and value
rhs_match = _assignment_rhs_match(s, i)
if not rhs_match:
self._parse_error("syntax error")
op, val = rhs_match.groups()
if name in self.variables:
# Already seen variable
var = self.variables[name]
else:
# New variable
var = Variable()
var.kconfig = self
var.name = name
var._n_expansions = 0
self.variables[name] = var
# += acts like = on undefined variables (defines a recursive
# variable)
if op == "+=":
op = "="
if op == "=":
var.is_recursive = True
var.value = val
elif op == ":=":
var.is_recursive = False
var.value = self._expand_whole(val, ())
else: # op == "+="
# += does immediate expansion if the variable was last set
# with :=
var.value += " " + (val if var.is_recursive else
self._expand_whole(val, ()))
def _expand_whole(self, s, args):
# Expands preprocessor macros in all of 's'. Used whenever we don't
# have to worry about delimiters. See _expand_macro() re. the 'args'
# parameter.
#
# Returns the expanded string.
i = 0
while 1:
i = s.find("$(", i)
if i == -1:
break
s, i = self._expand_macro(s, i, args)
return s
def _expand_name(self, s, i):
# Expands a symbol name starting at index 'i' in 's'.
#
# Returns the expanded name, the expanded 's' (including the part
# before the name), and the index of the first character in the next
# token after the name.
s, end_i = self._expand_name_iter(s, i)
name = s[i:end_i]
# isspace() is False for empty strings
if not name.strip():
# Avoid creating a Kconfig symbol with a blank name. It's almost
# guaranteed to be an error.
self._parse_error("macro expanded to blank string")
# Skip trailing whitespace
while end_i < len(s) and s[end_i].isspace():
end_i += 1
return name, s, end_i
def _expand_name_iter(self, s, i):
# Expands a symbol name starting at index 'i' in 's'.
#
# Returns the expanded 's' (including the part before the name) and the
# index of the first character after the expanded name in 's'.
while 1:
match = _name_special_search(s, i)
if match.group() != "$(":
return (s, match.start())
s, i = self._expand_macro(s, match.start(), ())
def _expand_str(self, s, i):
# Expands a quoted string starting at index 'i' in 's'. Handles both
# backslash escapes and macro expansion.
#
# Returns the expanded 's' (including the part before the string) and
# the index of the first character after the expanded string in 's'.
quote = s[i]
i += 1 # Skip over initial "/'
while 1:
match = _string_special_search(s, i)
if not match:
self._parse_error("unterminated string")
if match.group() == quote:
# Found the end of the string
return (s, match.end())
elif match.group() == "\\":
# Replace '\x' with 'x'. 'i' ends up pointing to the character
# after 'x', which allows macros to be canceled with '\$(foo)'.
i = match.end()
s = s[:match.start()] + s[i:]
elif match.group() == "$(":
# A macro call within the string
s, i = self._expand_macro(s, match.start(), ())
else:
# A ' quote within " quotes or vice versa
i += 1
def _expand_macro(self, s, i, args):
# Expands a macro starting at index 'i' in 's'. If this macro resulted
# from the expansion of another macro, 'args' holds the arguments
# passed to that macro.
#
# Returns the expanded 's' (including the part before the macro) and
# the index of the first character after the expanded macro in 's'.
res = s[:i]
i += 2 # Skip over "$("
arg_start = i # Start of current macro argument
new_args = [] # Arguments of this macro call
nesting = 0 # Current parentheses nesting level
while 1:
match = _macro_special_search(s, i)
if not match:
self._parse_error("missing end parenthesis in macro expansion")
if match.group() == "(":
nesting += 1
i = match.end()
elif match.group() == ")":
if nesting:
nesting -= 1
i = match.end()
continue
# Found the end of the macro
new_args.append(s[arg_start:match.start()])
# $(1) is replaced by the first argument to the function, etc.,
# provided at least that many arguments were passed
try:
# Does the macro look like an integer, with a corresponding
# argument? If so, expand it to the value of the argument.
res += args[int(new_args[0])]
except (ValueError, IndexError):
# Regular variables are just functions without arguments,
# and also go through the function value path
res += self._fn_val(new_args)
return (res + s[match.end():], len(res))
elif match.group() == ",":
i = match.end()
if nesting:
continue
# Found the end of a macro argument
new_args.append(s[arg_start:match.start()])
arg_start = i
else: # match.group() == "$("
# A nested macro call within the macro
s, i = self._expand_macro(s, match.start(), args)
def _fn_val(self, args):
# Returns the result of calling the function args[0] with the arguments
# args[1..len(args)-1]. Plain variables are treated as functions
# without arguments.
fn = args[0]
if fn in self.variables:
var = self.variables[fn]
if len(args) == 1:
# Plain variable
if var._n_expansions:
self._parse_error("Preprocessor variable {} recursively "
"references itself".format(var.name))
elif var._n_expansions > 100:
# Allow functions to call themselves, but guess that functions
# that are overly recursive are stuck
self._parse_error("Preprocessor function {} seems stuck "
"in infinite recursion".format(var.name))
var._n_expansions += 1
res = self._expand_whole(self.variables[fn].value, args)
var._n_expansions -= 1
return res
if fn in self._functions:
# Built-in or user-defined function
py_fn, min_arg, max_arg = self._functions[fn]
if len(args) - 1 < min_arg or \
(max_arg is not None and len(args) - 1 > max_arg):
if min_arg == max_arg:
expected_args = min_arg
elif max_arg is None:
expected_args = "{} or more".format(min_arg)
else:
expected_args = "{}-{}".format(min_arg, max_arg)
raise KconfigError("{}:{}: bad number of arguments in call "
"to {}, expected {}, got {}"
.format(self.filename, self.linenr, fn,
expected_args, len(args) - 1))
return py_fn(self, *args)
# Environment variables are tried last
if fn in os.environ:
self.env_vars.add(fn)
return os.environ[fn]
return ""
#
# Parsing
#
def _make_and(self, e1, e2):
# Constructs an AND (&&) expression. Performs trivial simplification.
if e1 is self.y:
return e2
if e2 is self.y:
return e1
if e1 is self.n or e2 is self.n:
return self.n
return (AND, e1, e2)
def _make_or(self, e1, e2):
# Constructs an OR (||) expression. Performs trivial simplification.
if e1 is self.n:
return e2
if e2 is self.n:
return e1
if e1 is self.y or e2 is self.y:
return self.y
return (OR, e1, e2)
def _parse_block(self, end_token, parent, prev):
# Parses a block, which is the contents of either a file or an if,
# menu, or choice statement.
#
# end_token:
# The token that ends the block, e.g. _T_ENDIF ("endif") for ifs.
# None for files.
#
# parent:
# The parent menu node, corresponding to a menu, Choice, or 'if'.
# 'if's are flattened after parsing.
#
# prev:
# The previous menu node. New nodes will be added after this one (by
# modifying 'next' pointers).
#
# 'prev' is reused to parse a list of child menu nodes (for a menu or
# Choice): After parsing the children, the 'next' pointer is assigned
# to the 'list' pointer to "tilt up" the children above the node.
#
# Returns the final menu node in the block (or 'prev' if the block is
# empty). This allows chaining.
while self._next_line():
t0 = self._tokens[0]
if t0 in [_T_CONFIG, _T_MENUCONFIG, _T_CONFIGDEFAULT]:
# The tokenizer allocates Symbol objects for us
sym = self._tokens[1]
if sym.__class__ is not Symbol or sym.is_constant:
self._parse_error("missing or bad symbol name")
if self._tokens[2] is not None:
self._trailing_tokens_error()
self.defined_syms.append(sym)
node = MenuNode()
node.kconfig = self
node.item = sym
node.is_menuconfig = t0 is _T_MENUCONFIG
node.is_configdefault = t0 is _T_CONFIGDEFAULT
node.prompt = node.help = node.list = None
node.parent = parent
node.filename = self.filename
node.linenr = self.linenr
node.include_path = self._include_path
sym.nodes.append(node)
self._parse_props(node)
if node.is_configdefault:
if (node.prompt or
node.dep != self.y or
len(node.ranges) > 0 or
len(node.selects) > 0 or
len(node.implies) > 0):
self._parse_error("configdefault can only contain `default`")
if node.is_menuconfig and not node.prompt:
self._warn("the menuconfig symbol {} has no prompt"
.format(sym.name_and_loc))
# Equivalent to
#
# prev.next = node
# prev = node
#
# due to tricky Python semantics. The order matters.
prev.next = prev = node
elif t0 is None:
# Blank line
continue
elif t0 in _SOURCE_TOKENS:
pattern = self._expect_str_and_eol()
if t0 in _REL_SOURCE_TOKENS:
# Relative source
pattern = join(dirname(self.filename), pattern)
# - glob() doesn't support globbing relative to a directory, so
# we need to prepend $srctree to 'pattern'. Use join()
# instead of '+' so that an absolute path in 'pattern' is
# preserved.
#
# - Sort the glob results to ensure a consistent ordering of
# Kconfig symbols, which indirectly ensures a consistent
# ordering in e.g. .config files
filenames = sorted(iglob(join(self._srctree_prefix, pattern)))
if not filenames and t0 in _OBL_SOURCE_TOKENS:
raise KconfigError(
"{}:{}: '{}' not found (in '{}'). Check that "
"environment variables are set correctly (e.g. "
"$srctree, which is {}). Also note that unset "
"environment variables expand to the empty string."
.format(self.filename, self.linenr, pattern,
self._line.strip(),
"set to '{}'".format(self.srctree)
if self.srctree else "unset or blank"))
for filename in filenames:
self._enter_file(filename)
prev = self._parse_block(None, parent, prev)
self._leave_file()
elif t0 is end_token:
# Reached the end of the block. Terminate the final node and
# return it.
if self._tokens[1] is not None:
self._trailing_tokens_error()
prev.next = None
return prev
elif t0 is _T_IF:
node = MenuNode()
node.item = node.prompt = None
node.parent = parent
node.dep = self._expect_expr_and_eol()
self._parse_block(_T_ENDIF, node, node)
node.list = node.next
prev.next = prev = node
elif t0 is _T_MENU:
node = MenuNode()
node.kconfig = self
node.item = t0 # _T_MENU == MENU
node.is_menuconfig = True
node.prompt = (self._expect_str_and_eol(), self.y)
node.visibility = self.y
node.parent = parent
node.filename = self.filename
node.linenr = self.linenr
node.include_path = self._include_path
self.menus.append(node)
self._parse_props(node)
self._parse_block(_T_ENDMENU, node, node)
node.list = node.next
prev.next = prev = node
elif t0 is _T_COMMENT:
node = MenuNode()
node.kconfig = self
node.item = t0 # _T_COMMENT == COMMENT
node.is_menuconfig = False
node.prompt = (self._expect_str_and_eol(), self.y)
node.list = None
node.parent = parent
node.filename = self.filename
node.linenr = self.linenr
node.include_path = self._include_path
self.comments.append(node)
self._parse_props(node)
prev.next = prev = node
elif t0 is _T_CHOICE:
if self._tokens[1] is None:
choice = Choice()
choice.direct_dep = self.n
else:
# Named choice
name = self._expect_str_and_eol()
choice = self.named_choices.get(name)
if not choice:
choice = Choice()
choice.name = name
choice.direct_dep = self.n
self.named_choices[name] = choice
self.choices.append(choice)
node = MenuNode()
node.kconfig = choice.kconfig = self
node.item = choice
node.is_menuconfig = True
node.prompt = node.help = None
node.parent = parent
node.filename = self.filename
node.linenr = self.linenr
node.include_path = self._include_path
choice.nodes.append(node)
self._parse_props(node)
self._parse_block(_T_ENDCHOICE, node, node)
node.list = node.next
prev.next = prev = node
elif t0 is _T_MAINMENU:
self.top_node.prompt = (self._expect_str_and_eol(), self.y)
else:
# A valid endchoice/endif/endmenu is caught by the 'end_token'
# check above
self._parse_error(
"no corresponding 'choice'" if t0 is _T_ENDCHOICE else
"no corresponding 'if'" if t0 is _T_ENDIF else
"no corresponding 'menu'" if t0 is _T_ENDMENU else
"unrecognized construct")
# End of file reached. Return the last node.
if end_token:
raise KconfigError(
"error: expected '{}' at end of '{}'"
.format("endchoice" if end_token is _T_ENDCHOICE else
"endif" if end_token is _T_ENDIF else
"endmenu",
self.filename))
return prev
def _parse_cond(self):
# Parses an optional 'if <expr>' construct and returns the parsed
# <expr>, or self.y if the next token is not _T_IF
expr = self._parse_expr(True) if self._check_token(_T_IF) else self.y
if self._tokens[self._tokens_i] is not None:
self._trailing_tokens_error()
return expr
def _parse_props(self, node):
# Parses and adds properties to the MenuNode 'node' (type, 'prompt',
# 'default's, etc.) Properties are later copied up to symbols and
# choices in a separate pass after parsing, in e.g.
# _add_props_to_sym().
#
# An older version of this code added properties directly to symbols
# and choices instead of to their menu nodes (and handled dependency
# propagation simultaneously), but that loses information on where a
# property is added when a symbol or choice is defined in multiple
# locations. Some Kconfig configuration systems rely heavily on such
# symbols, and better docs can be generated by keeping track of where
# properties are added.
#
# node:
# The menu node we're parsing properties on
# Dependencies from 'depends on'. Will get propagated to the properties
# below.
node.dep = self.y
while self._next_line():
t0 = self._tokens[0]
if t0 in _TYPE_TOKENS:
# Relies on '_T_BOOL is BOOL', etc., to save a conversion
self._set_type(node.item, t0)
if self._tokens[1] is not None:
self._parse_prompt(node)
elif t0 is _T_DEPENDS:
if not self._check_token(_T_ON):
self._parse_error("expected 'on' after 'depends'")
node.dep = self._make_and(node.dep,
self._expect_expr_and_eol())
elif t0 is _T_HELP:
self._parse_help(node)
elif t0 is _T_SELECT:
if node.item.__class__ is not Symbol:
self._parse_error("only symbols can select")
node.selects.append((self._expect_nonconst_sym(),
self._parse_cond()))
elif t0 is None:
# Blank line
continue
elif t0 is _T_DEFAULT:
node.defaults.append((self._parse_expr(False),
self._parse_cond()))
elif t0 in _DEF_TOKEN_TO_TYPE:
self._set_type(node.item, _DEF_TOKEN_TO_TYPE[t0])
node.defaults.append((self._parse_expr(False),
self._parse_cond()))
elif t0 is _T_PROMPT:
self._parse_prompt(node)
elif t0 is _T_RANGE:
node.ranges.append((self._expect_sym(), self._expect_sym(),
self._parse_cond()))
elif t0 is _T_IMPLY:
if node.item.__class__ is not Symbol:
self._parse_error("only symbols can imply")
node.implies.append((self._expect_nonconst_sym(),
self._parse_cond()))
elif t0 is _T_VISIBLE:
if not self._check_token(_T_IF):
self._parse_error("expected 'if' after 'visible'")
node.visibility = self._make_and(node.visibility,
self._expect_expr_and_eol())
elif t0 is _T_OPTION:
if self._check_token(_T_ENV):
if not self._check_token(_T_EQUAL):
self._parse_error("expected '=' after 'env'")
env_var = self._expect_str_and_eol()
node.item.env_var = env_var
if env_var in os.environ:
node.defaults.append(
(self._lookup_const_sym(os.environ[env_var]),
self.y))
else:
self._warn("{1} has 'option env=\"{0}\"', "
"but the environment variable {0} is not "
"set".format(node.item.name, env_var),
self.filename, self.linenr)
if env_var != node.item.name:
self._warn("Kconfiglib expands environment variables "
"in strings directly, meaning you do not "
"need 'option env=...' \"bounce\" symbols. "
"For compatibility with the C tools, "
"rename {} to {} (so that the symbol name "
"matches the environment variable name)."
.format(node.item.name, env_var),
self.filename, self.linenr)
elif self._check_token(_T_DEFCONFIG_LIST):
if not self.defconfig_list:
self.defconfig_list = node.item
else:
self._warn("'option defconfig_list' set on multiple "
"symbols ({0} and {1}). Only {0} will be "
"used.".format(self.defconfig_list.name,
node.item.name),
self.filename, self.linenr)
elif self._check_token(_T_MODULES):
# To reduce warning spam, only warn if 'option modules' is
# set on some symbol that isn't MODULES, which should be
# safe. I haven't run into any projects that make use
# modules besides the kernel yet, and there it's likely to
# keep being called "MODULES".
if node.item is not self.modules:
self._warn("the 'modules' option is not supported. "
"Let me know if this is a problem for you, "
"as it wouldn't be that hard to implement. "
"Note that modules are supported -- "
"Kconfiglib just assumes the symbol name "
"MODULES, like older versions of the C "
"implementation did when 'option modules' "
"wasn't used.",
self.filename, self.linenr)
elif self._check_token(_T_ALLNOCONFIG_Y):
if node.item.__class__ is not Symbol:
self._parse_error("the 'allnoconfig_y' option is only "
"valid for symbols")
node.item.is_allnoconfig_y = True
else:
self._parse_error("unrecognized option")
elif t0 is _T_OPTIONAL:
if node.item.__class__ is not Choice:
self._parse_error('"optional" is only valid for choices')
node.item.is_optional = True
else:
# Reuse the tokens for the non-property line later
self._reuse_tokens = True
return
def _set_type(self, sc, new_type):
# Sets the type of 'sc' (symbol or choice) to 'new_type'
# UNKNOWN is falsy
if sc.orig_type and sc.orig_type is not new_type:
self._warn("{} defined with multiple types, {} will be used"
.format(sc.name_and_loc, TYPE_TO_STR[new_type]))
sc.orig_type = new_type
def _parse_prompt(self, node):
# 'prompt' properties override each other within a single definition of
# a symbol, but additional prompts can be added by defining the symbol
# multiple times
if node.prompt:
self._warn(node.item.name_and_loc +
" defined with multiple prompts in single location")
prompt = self._tokens[1]
self._tokens_i = 2
if prompt.__class__ is not str:
self._parse_error("expected prompt string")
if prompt != prompt.strip():
self._warn(node.item.name_and_loc +
" has leading or trailing whitespace in its prompt")
# This avoid issues for e.g. reStructuredText documentation, where
# '*prompt *' is invalid
prompt = prompt.strip()
node.prompt = (prompt, self._parse_cond())
def _parse_help(self, node):
if node.help is not None:
self._warn(node.item.name_and_loc + " defined with more than "
"one help text -- only the last one will be used")
# Micro-optimization. This code is pretty hot.
readline = self._readline
# Find first non-blank (not all-space) line and get its
# indentation
while 1:
line = readline()
self.linenr += 1
if not line:
self._empty_help(node, line)
return
if not line.isspace():
break
len_ = len # Micro-optimization
# Use a separate 'expline' variable here and below to avoid stomping on
# any tabs people might've put deliberately into the first line after
# the help text
expline = line.expandtabs()
indent = len_(expline) - len_(expline.lstrip())
if not indent:
self._empty_help(node, line)
return
# The help text goes on till the first non-blank line with less indent
# than the first line
# Add the first line
lines = [expline[indent:]]
add_line = lines.append # Micro-optimization
while 1:
line = readline()
if line.isspace():
# No need to preserve the exact whitespace in these
add_line("\n")
elif not line:
# End of file
break
else:
expline = line.expandtabs()
if len_(expline) - len_(expline.lstrip()) < indent:
break
add_line(expline[indent:])
self.linenr += len_(lines)
node.help = "".join(lines).rstrip()
if line:
self._line_after_help(line)
def _empty_help(self, node, line):
self._warn(node.item.name_and_loc +
" has 'help' but empty help text")
node.help = ""
if line:
self._line_after_help(line)
def _parse_expr(self, transform_m):
# Parses an expression from the tokens in Kconfig._tokens using a
# simple top-down approach. See the module docstring for the expression
# format.
#
# transform_m:
# True if m should be rewritten to m && MODULES. See the
# Kconfig.eval_string() documentation.
# Grammar:
#
# expr: and_expr ['||' expr]
# and_expr: factor ['&&' and_expr]
# factor: <symbol> ['='/'!='/'<'/... <symbol>]
# '!' factor
# '(' expr ')'
#
# It helps to think of the 'expr: and_expr' case as a single-operand OR
# (no ||), and of the 'and_expr: factor' case as a single-operand AND
# (no &&). Parsing code is always a bit tricky.
# Mind dump: parse_factor() and two nested loops for OR and AND would
# work as well. The straightforward implementation there gives a
# (op, (op, (op, A, B), C), D) parse for A op B op C op D. Representing
# expressions as (op, [list of operands]) instead goes nicely with that
# version, but is wasteful for short expressions and complicates
# expression evaluation and other code that works on expressions (more
# complicated code likely offsets any performance gain from less
# recursion too). If we also try to optimize the list representation by
# merging lists when possible (e.g. when ANDing two AND expressions),
# we end up allocating a ton of lists instead of reusing expressions,
# which is bad.
and_expr = self._parse_and_expr(transform_m)
# Return 'and_expr' directly if we have a "single-operand" OR.
# Otherwise, parse the expression on the right and make an OR node.
# This turns A || B || C || D into (OR, A, (OR, B, (OR, C, D))).
return and_expr if not self._check_token(_T_OR) else \
(OR, and_expr, self._parse_expr(transform_m))
def _parse_and_expr(self, transform_m):
factor = self._parse_factor(transform_m)
# Return 'factor' directly if we have a "single-operand" AND.
# Otherwise, parse the right operand and make an AND node. This turns
# A && B && C && D into (AND, A, (AND, B, (AND, C, D))).
return factor if not self._check_token(_T_AND) else \
(AND, factor, self._parse_and_expr(transform_m))
def _parse_factor(self, transform_m):
token = self._tokens[self._tokens_i]
self._tokens_i += 1
if token.__class__ is Symbol:
# Plain symbol or relation
if self._tokens[self._tokens_i] not in _RELATIONS:
# Plain symbol
# For conditional expressions ('depends on <expr>',
# '... if <expr>', etc.), m is rewritten to m && MODULES.
if transform_m and token is self.m:
return (AND, self.m, self.modules)
return token
# Relation
#
# _T_EQUAL, _T_UNEQUAL, etc., deliberately have the same values as
# EQUAL, UNEQUAL, etc., so we can just use the token directly
self._tokens_i += 1
return (self._tokens[self._tokens_i - 1], token,
self._expect_sym())
if token is _T_NOT:
# token == _T_NOT == NOT
return (token, self._parse_factor(transform_m))
if token is _T_OPEN_PAREN:
expr_parse = self._parse_expr(transform_m)
if self._check_token(_T_CLOSE_PAREN):
return expr_parse
self._parse_error("malformed expression")
#
# Caching and invalidation
#
def _build_dep(self):
# Populates the Symbol/Choice._dependents sets, which contain all other
# items (symbols and choices) that immediately depend on the item in
# the sense that changing the value of the item might affect the value
# of the dependent items. This is used for caching/invalidation.
#
# The calculated sets might be larger than necessary as we don't do any
# complex analysis of the expressions.
depend_on = _depend_on # Micro-optimization
# Only calculate _dependents for defined symbols. Constant and
# undefined symbols could theoretically be selected/implied, but it
# wouldn't change their value, so it's not a true dependency.
for sym in self.unique_defined_syms:
# Symbols depend on the following:
# The prompt conditions
for node in sym.nodes:
if node.prompt:
depend_on(sym, node.prompt[1])
# The default values and their conditions
for value, cond in sym.defaults:
depend_on(sym, value)
depend_on(sym, cond)
# The reverse and weak reverse dependencies
depend_on(sym, sym.rev_dep)
depend_on(sym, sym.weak_rev_dep)
# The ranges along with their conditions
for low, high, cond in sym.ranges:
depend_on(sym, low)
depend_on(sym, high)
depend_on(sym, cond)
# The direct dependencies. This is usually redundant, as the direct
# dependencies get propagated to properties, but it's needed to get
# invalidation solid for 'imply', which only checks the direct
# dependencies (even if there are no properties to propagate it
# to).
depend_on(sym, sym.direct_dep)
# In addition to the above, choice symbols depend on the choice
# they're in, but that's handled automatically since the Choice is
# propagated to the conditions of the properties before
# _build_dep() runs.
for choice in self.unique_choices:
# Choices depend on the following:
# The prompt conditions
for node in choice.nodes:
if node.prompt:
depend_on(choice, node.prompt[1])
# The default symbol conditions
for _, cond in choice.defaults:
depend_on(choice, cond)
def _add_choice_deps(self):
# Choices also depend on the choice symbols themselves, because the
# y-mode selection of the choice might change if a choice symbol's
# visibility changes.
#
# We add these dependencies separately after dependency loop detection.
# The invalidation algorithm can handle the resulting
# <choice symbol> <-> <choice> dependency loops, but they make loop
# detection awkward.
for choice in self.unique_choices:
for sym in choice.syms:
sym._dependents.add(choice)
def _invalidate_all(self):
# Undefined symbols never change value and don't need to be
# invalidated, so we can just iterate over defined symbols.
# Invalidating constant symbols would break things horribly.
for sym in self.unique_defined_syms:
sym._invalidate()
for choice in self.unique_choices:
choice._invalidate()
#
# Post-parsing menu tree processing, including dependency propagation and
# implicit submenu creation
#
def _finalize_sym(self, sym):
# Finalizes symbol definitions
#
# - Applies configdefault node defaults to final symbols
#
# sym:
# The symbol to finalize.
inserted = 0
for (idx, defaults) in sym.configdefaults:
for d in defaults:
# Add the defaults to the node, with the requirement that
# direct dependencies are respected. The original order
# of the default statements between nodes is preserved.
default = (d[0], self._make_and(sym.direct_dep, d[1]))
sym.defaults.insert(inserted + idx, default)
inserted += 1
def _finalize_node(self, node, visible_if):
# Finalizes a menu node and its children:
#
# - Copies properties from menu nodes up to their contained
# symbols/choices
#
# - Propagates dependencies from parent to child nodes
#
# - Creates implicit menus (see kconfig-language.txt)
#
# - Removes 'if' nodes
#
# - Sets 'choice' types and registers choice symbols
#
# menu_finalize() in the C implementation is similar.
#
# node:
# The menu node to finalize. This node and its children will have
# been finalized when the function returns, and any implicit menus
# will have been created.
#
# visible_if:
# Dependencies from 'visible if' on parent menus. These are added to
# the prompts of symbols and choices.
if node.item.__class__ is Symbol:
# Copy defaults, ranges, selects, and implies to the Symbol
self._add_props_to_sym(node)
# Find any items that should go in an implicit menu rooted at the
# symbol
cur = node
while cur.next and _auto_menu_dep(node, cur.next):
# This makes implicit submenu creation work recursively, with
# implicit menus inside implicit menus
self._finalize_node(cur.next, visible_if)
cur = cur.next
cur.parent = node
if cur is not node:
# Found symbols that should go in an implicit submenu. Tilt
# them up above us.
node.list = node.next
node.next = cur.next
cur.next = None
elif node.list:
# The menu node is a choice, menu, or if. Finalize each child node.
if node.item is MENU:
visible_if = self._make_and(visible_if, node.visibility)
# Propagate the menu node's dependencies to each child menu node.
#
# This needs to go before the recursive _finalize_node() call so
# that implicit submenu creation can look ahead at dependencies.
self._propagate_deps(node, visible_if)
# Finalize the children
cur = node.list
while cur:
self._finalize_node(cur, visible_if)
cur = cur.next
if node.list:
# node's children have been individually finalized. Do final steps
# to finalize this "level" in the menu tree.
_flatten(node.list)
_remove_ifs(node)
# Empty choices (node.list None) are possible, so this needs to go
# outside
if node.item.__class__ is Choice:
# Add the node's non-node-specific properties to the choice, like
# _add_props_to_sym() does
choice = node.item
choice.direct_dep = self._make_or(choice.direct_dep, node.dep)
choice.defaults += node.defaults
_finalize_choice(node)
def _propagate_deps(self, node, visible_if):
# Propagates 'node's dependencies to its child menu nodes
# If the parent node holds a Choice, we use the Choice itself as the
# parent dependency. This makes sense as the value (mode) of the choice
# limits the visibility of the contained choice symbols. The C
# implementation works the same way.
#
# Due to the similar interface, Choice works as a drop-in replacement
# for Symbol here.
basedep = node.item if node.item.__class__ is Choice else node.dep
cur = node.list
while cur:
dep = cur.dep = self._make_and(cur.dep, basedep)
if cur.item.__class__ in _SYMBOL_CHOICE:
# Propagate 'visible if' and dependencies to the prompt
if cur.prompt:
cur.prompt = (cur.prompt[0],
self._make_and(
cur.prompt[1],
self._make_and(visible_if, dep)))
# Propagate dependencies to defaults
if cur.defaults:
cur.defaults = [(default, self._make_and(cond, dep))
for default, cond in cur.defaults]
# Propagate dependencies to ranges
if cur.ranges:
cur.ranges = [(low, high, self._make_and(cond, dep))
for low, high, cond in cur.ranges]
# Propagate dependencies to selects
if cur.selects:
cur.selects = [(target, self._make_and(cond, dep))
for target, cond in cur.selects]
# Propagate dependencies to implies
if cur.implies:
cur.implies = [(target, self._make_and(cond, dep))
for target, cond in cur.implies]
elif cur.prompt: # Not a symbol/choice
# Propagate dependencies to the prompt. 'visible if' is only
# propagated to symbols/choices.
cur.prompt = (cur.prompt[0],
self._make_and(cur.prompt[1], dep))
cur = cur.next
def _add_props_to_sym(self, node):
# Copies properties from the menu node 'node' up to its contained
# symbol, and adds (weak) reverse dependencies to selected/implied
# symbols.
#
# This can't be rolled into _propagate_deps(), because that function
# traverses the menu tree roughly breadth-first, meaning properties on
# symbols defined in multiple locations could end up in the wrong
# order.
sym = node.item
if node.is_configdefault:
# Store any defaults for later application after the complete tree
# is known. The current length of the default array is stored so
# the configdefaults can be inserted in the order they originally
# appeared.
sym.configdefaults.append((len(sym.defaults), node.defaults))
return
# See the Symbol class docstring
sym.direct_dep = self._make_or(sym.direct_dep, node.dep)
sym.defaults += node.defaults
sym.ranges += node.ranges
sym.selects += node.selects
sym.implies += node.implies
# Modify the reverse dependencies of the selected symbol
for target, cond in node.selects:
target.rev_dep = self._make_or(
target.rev_dep,
self._make_and(sym, cond))
# Modify the weak reverse dependencies of the implied
# symbol
for target, cond in node.implies:
target.weak_rev_dep = self._make_or(
target.weak_rev_dep,
self._make_and(sym, cond))
#
# Misc.
#
def _check_sym_sanity(self):
# Checks various symbol properties that are handiest to check after
# parsing. Only generates errors and warnings.
def num_ok(sym, type_):
# Returns True if the (possibly constant) symbol 'sym' is valid as a value
# for a symbol of type type_ (INT or HEX)
# 'not sym.nodes' implies a constant or undefined symbol, e.g. a plain
# "123"
if not sym.nodes:
return _is_base_n(sym.name, _TYPE_TO_BASE[type_])
return sym.orig_type is type_
for sym in self.unique_defined_syms:
if sym.orig_type in _BOOL_TRISTATE:
# A helper function could be factored out here, but keep it
# speedy/straightforward
for target_sym, _ in sym.selects:
if target_sym.orig_type not in _BOOL_TRISTATE_UNKNOWN:
self._warn("{} selects the {} symbol {}, which is not "
"bool or tristate"
.format(sym.name_and_loc,
TYPE_TO_STR[target_sym.orig_type],
target_sym.name_and_loc))
for target_sym, _ in sym.implies:
if target_sym.orig_type not in _BOOL_TRISTATE_UNKNOWN:
self._warn("{} implies the {} symbol {}, which is not "
"bool or tristate"
.format(sym.name_and_loc,
TYPE_TO_STR[target_sym.orig_type],
target_sym.name_and_loc))
elif sym.orig_type: # STRING/INT/HEX
for default, _ in sym.defaults:
if default.__class__ is not Symbol:
raise KconfigError(
"the {} symbol {} has a malformed default {} -- "
"expected a single symbol"
.format(TYPE_TO_STR[sym.orig_type],
sym.name_and_loc, expr_str(default)))
if sym.orig_type is STRING:
if not default.is_constant and not default.nodes and \
not default.name.isupper():
# 'default foo' on a string symbol could be either a symbol
# reference or someone leaving out the quotes. Guess that
# the quotes were left out if 'foo' isn't all-uppercase
# (and no symbol named 'foo' exists).
self._warn("style: quotes recommended around "
"default value for string symbol "
+ sym.name_and_loc)
elif not num_ok(default, sym.orig_type): # INT/HEX
self._warn("the {0} symbol {1} has a non-{0} default {2}"
.format(TYPE_TO_STR[sym.orig_type],
sym.name_and_loc,
default.name_and_loc))
if sym.selects or sym.implies:
self._warn("the {} symbol {} has selects or implies"
.format(TYPE_TO_STR[sym.orig_type],
sym.name_and_loc))
else: # UNKNOWN
self._warn("{} defined without a type"
.format(sym.name_and_loc))
if sym.ranges:
if sym.orig_type not in _INT_HEX:
self._warn(
"the {} symbol {} has ranges, but is not int or hex"
.format(TYPE_TO_STR[sym.orig_type],
sym.name_and_loc))
else:
for low, high, _ in sym.ranges:
if not num_ok(low, sym.orig_type) or \
not num_ok(high, sym.orig_type):
self._warn("the {0} symbol {1} has a non-{0} "
"range [{2}, {3}]"
.format(TYPE_TO_STR[sym.orig_type],
sym.name_and_loc,
low.name_and_loc,
high.name_and_loc))
def _check_choice_sanity(self):
# Checks various choice properties that are handiest to check after
# parsing. Only generates errors and warnings.
def warn_select_imply(sym, expr, expr_type):
msg = "the choice symbol {} is {} by the following symbols, but " \
"select/imply has no effect on choice symbols" \
.format(sym.name_and_loc, expr_type)
# si = select/imply
for si in split_expr(expr, OR):
msg += "\n - " + split_expr(si, AND)[0].name_and_loc
self._warn(msg)
for choice in self.unique_choices:
if choice.orig_type not in _BOOL_TRISTATE:
self._warn("{} defined with type {}"
.format(choice.name_and_loc,
TYPE_TO_STR[choice.orig_type]))
for node in choice.nodes:
if node.prompt:
break
else:
self._warn(choice.name_and_loc + " defined without a prompt")
for default, _ in choice.defaults:
if default.__class__ is not Symbol:
raise KconfigError(
"{} has a malformed default {}"
.format(choice.name_and_loc, expr_str(default)))
if default.choice is not choice:
self._warn("the default selection {} of {} is not "
"contained in the choice"
.format(default.name_and_loc,
choice.name_and_loc))
for sym in choice.syms:
if sym.defaults:
self._warn("default on the choice symbol {} will have "
"no effect, as defaults do not affect choice "
"symbols".format(sym.name_and_loc))
if sym.rev_dep is not sym.kconfig.n:
warn_select_imply(sym, sym.rev_dep, "selected")
if sym.weak_rev_dep is not sym.kconfig.n:
warn_select_imply(sym, sym.weak_rev_dep, "implied")
for node in sym.nodes:
if node.parent.item is choice:
if not node.prompt:
self._warn("the choice symbol {} has no prompt"
.format(sym.name_and_loc))
elif node.prompt:
self._warn("the choice symbol {} is defined with a "
"prompt outside the choice"
.format(sym.name_and_loc))
def _parse_error(self, msg):
raise KconfigError("{}error: couldn't parse '{}': {}".format(
"" if self.filename is None else
"{}:{}: ".format(self.filename, self.linenr),
self._line.strip(), msg))
def _trailing_tokens_error(self):
self._parse_error("extra tokens at end of line")
def _open(self, filename, mode):
# open() wrapper:
#
# - Enable universal newlines mode on Python 2 to ease
# interoperability between Linux and Windows. It's already the
# default on Python 3.
#
# The "U" flag would currently work for both Python 2 and 3, but it's
# deprecated on Python 3, so play it future-safe.
#
# io.open() defaults to universal newlines on Python 2 (and is an
# alias for open() on Python 3), but it returns 'unicode' strings and
# slows things down:
#
# Parsing x86 Kconfigs on Python 2
#
# with open(..., "rU"):
#
# real 0m0.930s
# user 0m0.905s
# sys 0m0.025s
#
# with io.open():
#
# real 0m1.069s
# user 0m1.040s
# sys 0m0.029s
#
# There's no appreciable performance difference between "r" and
# "rU" for parsing performance on Python 2.
#
# - For Python 3, force the encoding. Forcing the encoding on Python 2
# turns strings into Unicode strings, which gets messy. Python 2
# doesn't decode regular strings anyway.
return open(filename, "rU" if mode == "r" else mode) if _IS_PY2 else \
open(filename, mode, encoding=self._encoding)
def _check_undef_syms(self):
# Prints warnings for all references to undefined symbols within the
# Kconfig files
def is_num(s):
# Returns True if the string 's' looks like a number.
#
# Internally, all operands in Kconfig are symbols, only undefined symbols
# (which numbers usually are) get their name as their value.
#
# Only hex numbers that start with 0x/0X are classified as numbers.
# Otherwise, symbols whose names happen to contain only the letters A-F
# would trigger false positives.
try:
int(s)
except ValueError:
if not s.startswith(("0x", "0X")):
return False
try:
int(s, 16)
except ValueError:
return False
return True
for sym in (self.syms.viewvalues if _IS_PY2 else self.syms.values)():
# - sym.nodes empty means the symbol is undefined (has no
# definition locations)
#
# - Due to Kconfig internals, numbers show up as undefined Kconfig
# symbols, but shouldn't be flagged
#
# - The MODULES symbol always exists
if not sym.nodes and not is_num(sym.name) and \
sym.name != "MODULES":
msg = "undefined symbol {}:".format(sym.name)
for node in self.node_iter():
if sym in node.referenced:
msg += "\n\n- Referenced at {}:{}:\n\n{}" \
.format(node.filename, node.linenr, node)
self._warn(msg)
def _warn(self, msg, filename=None, linenr=None):
# For printing general warnings
if not self.warn:
return
msg = "warning: " + msg
if filename is not None:
msg = "{}:{}: {}".format(filename, linenr, msg)
self.warnings.append(msg)
if self.warn_to_stderr:
sys.stderr.write(msg + "\n")
class Symbol(object):
"""
Represents a configuration symbol:
(menu)config FOO
...
The following attributes are available. They should be viewed as read-only,
and some are implemented through @property magic (but are still efficient
to access due to internal caching).
Note: Prompts, help texts, and locations are stored in the Symbol's
MenuNode(s) rather than in the Symbol itself. Check the MenuNode class and
the Symbol.nodes attribute. This organization matches the C tools.
name:
The name of the symbol, e.g. "FOO" for 'config FOO'.
type:
The type of the symbol. One of BOOL, TRISTATE, STRING, INT, HEX, UNKNOWN.
UNKNOWN is for undefined symbols, (non-special) constant symbols, and
symbols defined without a type.
When running without modules (MODULES having the value n), TRISTATE
symbols magically change type to BOOL. This also happens for symbols
within choices in "y" mode. This matches the C tools, and makes sense for
menuconfig-like functionality.
orig_type:
The type as given in the Kconfig file, without any magic applied. Used
when printing the symbol.
tri_value:
The tristate value of the symbol as an integer. One of 0, 1, 2,
representing n, m, y. Always 0 (n) for non-bool/tristate symbols.
This is the symbol value that's used outside of relation expressions
(A, !A, A && B, A || B).
str_value:
The value of the symbol as a string. Gives the value for string/int/hex
symbols. For bool/tristate symbols, gives "n", "m", or "y".
This is the symbol value that's used in relational expressions
(A = B, A != B, etc.)
Gotcha: For int/hex symbols, the exact format of the value is often
preserved (e.g. when writing a .config file), hence why you can't get it
directly as an int. Do int(int_sym.str_value) or
int(hex_sym.str_value, 16) to get the integer value.
user_value:
The user value of the symbol. None if no user value has been assigned
(via Kconfig.load_config() or Symbol.set_value()).
Holds 0, 1, or 2 for bool/tristate symbols, and a string for the other
symbol types.
WARNING: Do not assign directly to this. It will break things. Use
Symbol.set_value().
assignable:
A tuple containing the tristate user values that can currently be
assigned to the symbol (that would be respected), ordered from lowest (0,
representing n) to highest (2, representing y). This corresponds to the
selections available in the menuconfig interface. The set of assignable
values is calculated from the symbol's visibility and selects/implies.
Returns the empty set for non-bool/tristate symbols and for symbols with
visibility n. The other possible values are (0, 2), (0, 1, 2), (1, 2),
(1,), and (2,). A (1,) or (2,) result means the symbol is visible but
"locked" to m or y through a select, perhaps in combination with the
visibility. menuconfig represents this as -M- and -*-, respectively.
For string/hex/int symbols, check if Symbol.visibility is non-0 (non-n)
instead to determine if the value can be changed.
Some handy 'assignable' idioms:
# Is 'sym' an assignable (visible) bool/tristate symbol?
if sym.assignable:
# What's the highest value it can be assigned? [-1] in Python
# gives the last element.
sym_high = sym.assignable[-1]
# The lowest?
sym_low = sym.assignable[0]
# Can the symbol be set to at least m?
if sym.assignable[-1] >= 1:
...
# Can the symbol be set to m?
if 1 in sym.assignable:
...
visibility:
The visibility of the symbol. One of 0, 1, 2, representing n, m, y. See
the module documentation for an overview of symbol values and visibility.
config_string:
The .config assignment string that would get written out for the symbol
by Kconfig.write_config(). Returns the empty string if no .config
assignment would get written out.
In general, visible symbols, symbols with (active) defaults, and selected
symbols get written out. This includes all non-n-valued bool/tristate
symbols, and all visible string/int/hex symbols.
Symbols with the (no longer needed) 'option env=...' option generate no
configuration output, and neither does the special
'option defconfig_list' symbol.
Tip: This field is useful when generating custom configuration output,
even for non-.config-like formats. To write just the symbols that would
get written out to .config files, do this:
if sym.config_string:
*Write symbol, e.g. by looking sym.str_value*
This is a superset of the symbols written out by write_autoconf().
That function skips all n-valued symbols.
There usually won't be any great harm in just writing all symbols either,
though you might get some special symbols and possibly some "redundant"
n-valued symbol entries in there.
name_and_loc:
Holds a string like
"MY_SYMBOL (defined at foo/Kconfig:12, bar/Kconfig:14)"
, giving the name of the symbol and its definition location(s).
If the symbol is undefined, the location is given as "(undefined)".
nodes:
A list of MenuNodes for this symbol. Will contain a single MenuNode for
most symbols. Undefined and constant symbols have an empty nodes list.
Symbols defined in multiple locations get one node for each location.
choice:
Holds the parent Choice for choice symbols, and None for non-choice
symbols. Doubles as a flag for whether a symbol is a choice symbol.
defaults:
List of (default, cond) tuples for the symbol's 'default' properties. For
example, 'default A && B if C || D' is represented as
((AND, A, B), (OR, C, D)). If no condition was given, 'cond' is
self.kconfig.y.
Note that 'depends on' and parent dependencies are propagated to
'default' conditions.
selects:
List of (symbol, cond) tuples for the symbol's 'select' properties. For
example, 'select A if B && C' is represented as (A, (AND, B, C)). If no
condition was given, 'cond' is self.kconfig.y.
Note that 'depends on' and parent dependencies are propagated to 'select'
conditions.
implies:
Like 'selects', for imply.
ranges:
List of (low, high, cond) tuples for the symbol's 'range' properties. For
example, 'range 1 2 if A' is represented as (1, 2, A). If there is no
condition, 'cond' is self.kconfig.y.
Note that 'depends on' and parent dependencies are propagated to 'range'
conditions.
Gotcha: 1 and 2 above will be represented as (undefined) Symbols rather
than plain integers. Undefined symbols get their name as their string
value, so this works out. The C tools work the same way.
orig_defaults:
orig_selects:
orig_implies:
orig_ranges:
See the corresponding attributes on the MenuNode class.
rev_dep:
Reverse dependency expression from other symbols selecting this symbol.
Multiple selections get ORed together. A condition on a select is ANDed
with the selecting symbol.
For example, if A has 'select FOO' and B has 'select FOO if C', then
FOO's rev_dep will be (OR, A, (AND, B, C)).
weak_rev_dep:
Like rev_dep, for imply.
direct_dep:
The direct ('depends on') dependencies for the symbol, or self.kconfig.y
if there are no direct dependencies.
This attribute includes any dependencies from surrounding menus and ifs.
Those get propagated to the direct dependencies, and the resulting direct
dependencies in turn get propagated to the conditions of all properties.
If the symbol is defined in multiple locations, the dependencies from the
different locations get ORed together.
referenced:
A set() with all symbols and choices referenced in the properties and
property conditions of the symbol.
Also includes dependencies from surrounding menus and ifs, because those
get propagated to the symbol (see the 'Intro to symbol values' section in
the module docstring).
Choices appear in the dependencies of choice symbols.
For the following definitions, only B and not C appears in A's
'referenced'. To get transitive references, you'll have to recursively
expand 'references' until no new items appear.
config A
bool
depends on B
config B
bool
depends on C
config C
bool
See the Symbol.direct_dep attribute if you're only interested in the
direct dependencies of the symbol (its 'depends on'). You can extract the
symbols in it with the global expr_items() function.
env_var:
If the Symbol has an 'option env="FOO"' option, this contains the name
("FOO") of the environment variable. None for symbols without no
'option env'.
'option env="FOO"' acts like a 'default' property whose value is the
value of $FOO.
Symbols with 'option env' are never written out to .config files, even if
they are visible. env_var corresponds to a flag called SYMBOL_AUTO in the
C implementation.
is_allnoconfig_y:
True if the symbol has 'option allnoconfig_y' set on it. This has no
effect internally (except when printing symbols), but can be checked by
scripts.
is_constant:
True if the symbol is a constant (quoted) symbol.
kconfig:
The Kconfig instance this symbol is from.
"""
__slots__ = (
"_cached_assignable",
"_cached_str_val",
"_cached_tri_val",
"_cached_vis",
"_dependents",
"_old_val",
"_visited",
"_was_set",
"_write_to_conf",
"choice",
"defaults",
"configdefaults",
"direct_dep",
"env_var",
"implies",
"is_allnoconfig_y",
"is_constant",
"kconfig",
"name",
"nodes",
"orig_type",
"ranges",
"rev_dep",
"selects",
"user_value",
"weak_rev_dep",
)
#
# Public interface
#
@property
def type(self):
"""
See the class documentation.
"""
if self.orig_type is TRISTATE and \
(self.choice and self.choice.tri_value == 2 or
not self.kconfig.modules.tri_value):
return BOOL
return self.orig_type
@property
def str_value(self):
"""
See the class documentation.
"""
if self._cached_str_val is not None:
return self._cached_str_val
if self.orig_type in _BOOL_TRISTATE:
# Also calculates the visibility, so invalidation safe
self._cached_str_val = TRI_TO_STR[self.tri_value]
return self._cached_str_val
# As a quirk of Kconfig, undefined symbols get their name as their
# string value. This is why things like "FOO = bar" work for seeing if
# FOO has the value "bar".
if not self.orig_type: # UNKNOWN
self._cached_str_val = self.name
return self.name
val = ""
# Warning: See Symbol._rec_invalidate(), and note that this is a hidden
# function call (property magic)
vis = self.visibility
self._write_to_conf = vis != 0
if self.orig_type in _INT_HEX:
# The C implementation checks the user value against the range in a
# separate code path (post-processing after loading a .config).
# Checking all values here instead makes more sense for us. It
# requires that we check for a range first.
base = _TYPE_TO_BASE[self.orig_type]
# Check if a range is in effect
for low_expr, high_expr, cond in self.ranges:
if expr_value(cond):
has_active_range = True
# The zeros are from the C implementation running strtoll()
# on empty strings
low = int(low_expr.str_value, base) if \
_is_base_n(low_expr.str_value, base) else 0
high = int(high_expr.str_value, base) if \
_is_base_n(high_expr.str_value, base) else 0
break
else:
has_active_range = False
# Defaults are used if the symbol is invisible, lacks a user value,
# or has an out-of-range user value
use_defaults = True
if vis and self.user_value:
user_val = int(self.user_value, base)
if has_active_range and not low <= user_val <= high:
num2str = str if base == 10 else hex
self.kconfig._warn(
"user value {} on the {} symbol {} ignored due to "
"being outside the active range ([{}, {}]) -- falling "
"back on defaults"
.format(num2str(user_val), TYPE_TO_STR[self.orig_type],
self.name_and_loc,
num2str(low), num2str(high)))
else:
# If the user value is well-formed and satisfies range
# contraints, it is stored in exactly the same form as
# specified in the assignment (with or without "0x", etc.)
val = self.user_value
use_defaults = False
if use_defaults:
# No user value or invalid user value. Look at defaults.
# Used to implement the warning below
has_default = False
for sym, cond in self.defaults:
if expr_value(cond):
has_default = self._write_to_conf = True
val = sym.str_value
if _is_base_n(val, base):
val_num = int(val, base)
else:
val_num = 0 # strtoll() on empty string
break
else:
val_num = 0 # strtoll() on empty string
# This clamping procedure runs even if there's no default
if has_active_range:
clamp = None
if val_num < low:
clamp = low
elif val_num > high:
clamp = high
if clamp is not None:
# The value is rewritten to a standard form if it is
# clamped
val = str(clamp) \
if self.orig_type is INT else \
hex(clamp)
if has_default:
num2str = str if base == 10 else hex
self.kconfig._warn(
"default value {} on {} clamped to {} due to "
"being outside the active range ([{}, {}])"
.format(val_num, self.name_and_loc,
num2str(clamp), num2str(low),
num2str(high)))
elif self.orig_type is STRING:
if vis and self.user_value is not None:
# If the symbol is visible and has a user value, use that
val = self.user_value
else:
# Otherwise, look at defaults
for sym, cond in self.defaults:
if expr_value(cond):
val = sym.str_value
self._write_to_conf = True
break
# env_var corresponds to SYMBOL_AUTO in the C implementation, and is
# also set on the defconfig_list symbol there. Test for the
# defconfig_list symbol explicitly instead here, to avoid a nonsensical
# env_var setting and the defconfig_list symbol being printed
# incorrectly. This code is pretty cold anyway.
if self.env_var is not None or self is self.kconfig.defconfig_list:
self._write_to_conf = False
self._cached_str_val = val
return val
@property
def tri_value(self):
"""
See the class documentation.
"""
if self._cached_tri_val is not None:
return self._cached_tri_val
if self.orig_type not in _BOOL_TRISTATE:
if self.orig_type: # != UNKNOWN
# Would take some work to give the location here
self.kconfig._warn(
"The {} symbol {} is being evaluated in a logical context "
"somewhere. It will always evaluate to n."
.format(TYPE_TO_STR[self.orig_type], self.name_and_loc))
self._cached_tri_val = 0
return 0
# Warning: See Symbol._rec_invalidate(), and note that this is a hidden
# function call (property magic)
vis = self.visibility
self._write_to_conf = vis != 0
val = 0
if not self.choice:
# Non-choice symbol
if vis and self.user_value is not None:
# If the symbol is visible and has a user value, use that
val = min(self.user_value, vis)
else:
# Otherwise, look at defaults and weak reverse dependencies
# (implies)
for default, cond in self.defaults:
dep_val = expr_value(cond)
if dep_val:
val = min(expr_value(default), dep_val)
if val:
self._write_to_conf = True
break
# Weak reverse dependencies are only considered if our
# direct dependencies are met
dep_val = expr_value(self.weak_rev_dep)
if dep_val and expr_value(self.direct_dep):
val = max(dep_val, val)
self._write_to_conf = True
# Reverse (select-related) dependencies take precedence
dep_val = expr_value(self.rev_dep)
if dep_val:
if expr_value(self.direct_dep) < dep_val:
self._warn_select_unsatisfied_deps()
val = max(dep_val, val)
self._write_to_conf = True
# m is promoted to y for (1) bool symbols and (2) symbols with a
# weak_rev_dep (from imply) of y
if val == 1 and \
(self.type is BOOL or expr_value(self.weak_rev_dep) == 2):
val = 2
elif vis == 2:
# Visible choice symbol in y-mode choice. The choice mode limits
# the visibility of choice symbols, so it's sufficient to just
# check the visibility of the choice symbols themselves.
val = 2 if self.choice.selection is self else 0
elif vis and self.user_value:
# Visible choice symbol in m-mode choice, with set non-0 user value
val = 1
self._cached_tri_val = val
return val
@property
def assignable(self):
"""
See the class documentation.
"""
if self._cached_assignable is None:
self._cached_assignable = self._assignable()
return self._cached_assignable
@property
def visibility(self):
"""
See the class documentation.
"""
if self._cached_vis is None:
self._cached_vis = _visibility(self)
return self._cached_vis
@property
def config_string(self):
"""
See the class documentation.
"""
# _write_to_conf is determined when the value is calculated. This is a
# hidden function call due to property magic.
val = self.str_value
if not self._write_to_conf:
return ""
if self.orig_type in _BOOL_TRISTATE:
return "{}{}={}\n" \
.format(self.kconfig.config_prefix, self.name, val) \
if val != "n" else \
"# {}{} is not set\n" \
.format(self.kconfig.config_prefix, self.name)
if self.orig_type in _INT_HEX:
return "{}{}={}\n" \
.format(self.kconfig.config_prefix, self.name, val)
# sym.orig_type is STRING
return '{}{}="{}"\n' \
.format(self.kconfig.config_prefix, self.name, escape(val))
@property
def name_and_loc(self):
"""
See the class documentation.
"""
return self.name + " " + _locs(self)
def set_value(self, value):
"""
Sets the user value of the symbol.
Equal in effect to assigning the value to the symbol within a .config
file. For bool and tristate symbols, use the 'assignable' attribute to
check which values can currently be assigned. Setting values outside
'assignable' will cause Symbol.user_value to differ from
Symbol.str/tri_value (be truncated down or up).
Setting a choice symbol to 2 (y) sets Choice.user_selection to the
choice symbol in addition to setting Symbol.user_value.
Choice.user_selection is considered when the choice is in y mode (the
"normal" mode).
Other symbols that depend (possibly indirectly) on this symbol are
automatically recalculated to reflect the assigned value.
value:
The user value to give to the symbol. For bool and tristate symbols,
n/m/y can be specified either as 0/1/2 (the usual format for tristate
values in Kconfiglib) or as one of the strings "n", "m", or "y". For
other symbol types, pass a string.
Note that the value for an int/hex symbol is passed as a string, e.g.
"123" or "0x0123". The format of this string is preserved in the
output.
Values that are invalid for the type (such as "foo" or 1 (m) for a
BOOL or "0x123" for an INT) are ignored and won't be stored in
Symbol.user_value. Kconfiglib will print a warning by default for
invalid assignments, and set_value() will return False.
Returns True if the value is valid for the type of the symbol, and
False otherwise. This only looks at the form of the value. For BOOL and
TRISTATE symbols, check the Symbol.assignable attribute to see what
values are currently in range and would actually be reflected in the
value of the symbol. For other symbol types, check whether the
visibility is non-n.
"""
if self.orig_type in _BOOL_TRISTATE and value in STR_TO_TRI:
value = STR_TO_TRI[value]
# If the new user value matches the old, nothing changes, and we can
# avoid invalidating cached values.
#
# This optimization is skipped for choice symbols: Setting a choice
# symbol's user value to y might change the state of the choice, so it
# wouldn't be safe (symbol user values always match the values set in a
# .config file or via set_value(), and are never implicitly updated).
if value == self.user_value and not self.choice:
self._was_set = True
return True
# Check if the value is valid for our type
if not (self.orig_type is BOOL and value in (2, 0) or
self.orig_type is TRISTATE and value in TRI_TO_STR or
value.__class__ is str and
(self.orig_type is STRING or
self.orig_type is INT and _is_base_n(value, 10) or
self.orig_type is HEX and _is_base_n(value, 16)
and int(value, 16) >= 0)):
# Display tristate values as n, m, y in the warning
self.kconfig._warn(
"the value {} is invalid for {}, which has type {} -- "
"assignment ignored"
.format(TRI_TO_STR[value] if value in TRI_TO_STR else
"'{}'".format(value),
self.name_and_loc, TYPE_TO_STR[self.orig_type]))
return False
self.user_value = value
self._was_set = True
if self.choice and value == 2:
# Setting a choice symbol to y makes it the user selection of the
# choice. Like for symbol user values, the user selection is not
# guaranteed to match the actual selection of the choice, as
# dependencies come into play.
self.choice.user_selection = self
self.choice._was_set = True
self.choice._rec_invalidate()
else:
self._rec_invalidate_if_has_prompt()
return True
def unset_value(self):
"""
Removes any user value from the symbol, as if the symbol had never
gotten a user value via Kconfig.load_config() or Symbol.set_value().
"""
if self.user_value is not None:
self.user_value = None
self._rec_invalidate_if_has_prompt()
@property
def referenced(self):
"""
See the class documentation.
"""
return {item for node in self.nodes for item in node.referenced}
@property
def orig_defaults(self):
"""
See the class documentation.
"""
return [d for node in self.nodes for d in node.orig_defaults]
@property
def orig_selects(self):
"""
See the class documentation.
"""
return [s for node in self.nodes for s in node.orig_selects]
@property
def orig_implies(self):
"""
See the class documentation.
"""
return [i for node in self.nodes for i in node.orig_implies]
@property
def orig_ranges(self):
"""
See the class documentation.
"""
return [r for node in self.nodes for r in node.orig_ranges]
def __repr__(self):
"""
Returns a string with information about the symbol (including its name,
value, visibility, and location(s)) when it is evaluated on e.g. the
interactive Python prompt.
"""
fields = ["symbol " + self.name, TYPE_TO_STR[self.type]]
add = fields.append
for node in self.nodes:
if node.prompt:
add('"{}"'.format(node.prompt[0]))
# Only add quotes for non-bool/tristate symbols
add("value " + (self.str_value if self.orig_type in _BOOL_TRISTATE
else '"{}"'.format(self.str_value)))
if not self.is_constant:
# These aren't helpful to show for constant symbols
if self.user_value is not None:
# Only add quotes for non-bool/tristate symbols
add("user value " + (TRI_TO_STR[self.user_value]
if self.orig_type in _BOOL_TRISTATE
else '"{}"'.format(self.user_value)))
add("visibility " + TRI_TO_STR[self.visibility])
if self.choice:
add("choice symbol")
if self.is_allnoconfig_y:
add("allnoconfig_y")
if self is self.kconfig.defconfig_list:
add("is the defconfig_list symbol")
if self.env_var is not None:
add("from environment variable " + self.env_var)
if self is self.kconfig.modules:
add("is the modules symbol")
add("direct deps " + TRI_TO_STR[expr_value(self.direct_dep)])
if self.nodes:
for node in self.nodes:
add("{}:{}".format(node.filename, node.linenr))
else:
add("constant" if self.is_constant else "undefined")
return "<{}>".format(", ".join(fields))
def __str__(self):
"""
Returns a string representation of the symbol when it is printed.
Matches the Kconfig format, with any parent dependencies propagated to
the 'depends on' condition.
The string is constructed by joining the strings returned by
MenuNode.__str__() for each of the symbol's menu nodes, so symbols
defined in multiple locations will return a string with all
definitions.
The returned string does not end in a newline. An empty string is
returned for undefined and constant symbols.
"""
return self.custom_str(standard_sc_expr_str)
def custom_str(self, sc_expr_str_fn):
"""
Works like Symbol.__str__(), but allows a custom format to be used for
all symbol/choice references. See expr_str().
"""
return "\n\n".join(node.custom_str(sc_expr_str_fn)
for node in self.nodes)
#
# Private methods
#
def __init__(self):
"""
Symbol constructor -- not intended to be called directly by Kconfiglib
clients.
"""
# These attributes are always set on the instance from outside and
# don't need defaults:
# kconfig
# direct_dep
# is_constant
# name
# rev_dep
# weak_rev_dep
# - UNKNOWN == 0
# - _visited is used during tree iteration and dep. loop detection
self.orig_type = self._visited = 0
self.nodes = []
self.defaults = []
self.selects = []
self.implies = []
self.ranges = []
self.user_value = \
self.choice = \
self.env_var = \
self._cached_str_val = self._cached_tri_val = self._cached_vis = \
self._cached_assignable = None
# _write_to_conf is calculated along with the value. If True, the
# Symbol gets a .config entry.
self.is_allnoconfig_y = \
self._was_set = \
self._write_to_conf = False
# See Kconfig._build_dep()
self._dependents = set()
def _assignable(self):
# Worker function for the 'assignable' attribute
if self.orig_type not in _BOOL_TRISTATE:
return ()
# Warning: See Symbol._rec_invalidate(), and note that this is a hidden
# function call (property magic)
vis = self.visibility
if not vis:
return ()
rev_dep_val = expr_value(self.rev_dep)
if vis == 2:
if self.choice:
return (2,)
if not rev_dep_val:
if self.type is BOOL or expr_value(self.weak_rev_dep) == 2:
return (0, 2)
return (0, 1, 2)
if rev_dep_val == 2:
return (2,)
# rev_dep_val == 1
if self.type is BOOL or expr_value(self.weak_rev_dep) == 2:
return (2,)
return (1, 2)
# vis == 1
# Must be a tristate here, because bool m visibility gets promoted to y
if not rev_dep_val:
return (0, 1) if expr_value(self.weak_rev_dep) != 2 else (0, 2)
if rev_dep_val == 2:
return (2,)
# vis == rev_dep_val == 1
return (1,)
def _invalidate(self):
# Marks the symbol as needing to be recalculated
self._cached_str_val = self._cached_tri_val = self._cached_vis = \
self._cached_assignable = None
def _rec_invalidate(self):
# Invalidates the symbol and all items that (possibly) depend on it
if self is self.kconfig.modules:
# Invalidating MODULES has wide-ranging effects
self.kconfig._invalidate_all()
else:
self._invalidate()
for item in self._dependents:
# _cached_vis doubles as a flag that tells us whether 'item'
# has cached values, because it's calculated as a side effect
# of calculating all other (non-constant) cached values.
#
# If item._cached_vis is None, it means there can't be cached
# values on other items that depend on 'item', because if there
# were, some value on 'item' would have been calculated and
# item._cached_vis set as a side effect. It's therefore safe to
# stop the invalidation at symbols with _cached_vis None.
#
# This approach massively speeds up scripts that set a lot of
# values, vs simply invalidating all possibly dependent symbols
# (even when you already have a list of all the dependent
# symbols, because some symbols get huge dependency trees).
#
# This gracefully handles dependency loops too, which is nice
# for choices, where the choice depends on the choice symbols
# and vice versa.
if item._cached_vis is not None:
item._rec_invalidate()
def _rec_invalidate_if_has_prompt(self):
# Invalidates the symbol and its dependent symbols, but only if the
# symbol has a prompt. User values never have an effect on promptless
# symbols, so we skip invalidation for them as an optimization.
#
# This also prevents constant (quoted) symbols from being invalidated
# if set_value() is called on them, which would make them lose their
# value and break things.
#
# Prints a warning if the symbol has no prompt. In some contexts (e.g.
# when loading a .config files) assignments to promptless symbols are
# normal and expected, so the warning can be disabled.
for node in self.nodes:
if node.prompt:
self._rec_invalidate()
return
if self.kconfig._warn_assign_no_prompt:
self.kconfig._warn(self.name_and_loc + " has no prompt, meaning "
"user values have no effect on it")
def _str_default(self):
# write_min_config() helper function. Returns the value the symbol
# would get from defaults if it didn't have a user value. Uses exactly
# the same algorithm as the C implementation (though a bit cleaned up),
# for compatibility.
if self.orig_type in _BOOL_TRISTATE:
val = 0
# Defaults, selects, and implies do not affect choice symbols
if not self.choice:
for default, cond in self.defaults:
cond_val = expr_value(cond)
if cond_val:
val = min(expr_value(default), cond_val)
break
val = max(expr_value(self.rev_dep),
expr_value(self.weak_rev_dep),
val)
# Transpose mod to yes if type is bool (possibly due to modules
# being disabled)
if val == 1 and self.type is BOOL:
val = 2
return TRI_TO_STR[val]
if self.orig_type: # STRING/INT/HEX
for default, cond in self.defaults:
if expr_value(cond):
return default.str_value
return ""
def _warn_select_unsatisfied_deps(self):
# Helper for printing an informative warning when a symbol with
# unsatisfied direct dependencies (dependencies from 'depends on', ifs,
# and menus) is selected by some other symbol. Also warn if a symbol
# whose direct dependencies evaluate to m is selected to y.
msg = "{} has direct dependencies {} with value {}, but is " \
"currently being {}-selected by the following symbols:" \
.format(self.name_and_loc, expr_str(self.direct_dep),
TRI_TO_STR[expr_value(self.direct_dep)],
TRI_TO_STR[expr_value(self.rev_dep)])
# The reverse dependencies from each select are ORed together
for select in split_expr(self.rev_dep, OR):
if expr_value(select) <= expr_value(self.direct_dep):
# Only include selects that exceed the direct dependencies
continue
# - 'select A if B' turns into A && B
# - 'select A' just turns into A
#
# In both cases, we can split on AND and pick the first operand
selecting_sym = split_expr(select, AND)[0]
msg += "\n - {}, with value {}, direct dependencies {} " \
"(value: {})" \
.format(selecting_sym.name_and_loc,
selecting_sym.str_value,
expr_str(selecting_sym.direct_dep),
TRI_TO_STR[expr_value(selecting_sym.direct_dep)])
if select.__class__ is tuple:
msg += ", and select condition {} (value: {})" \
.format(expr_str(select[2]),
TRI_TO_STR[expr_value(select[2])])
self.kconfig._warn(msg)
class Choice(object):
"""
Represents a choice statement:
choice
...
endchoice
The following attributes are available on Choice instances. They should be
treated as read-only, and some are implemented through @property magic (but
are still efficient to access due to internal caching).
Note: Prompts, help texts, and locations are stored in the Choice's
MenuNode(s) rather than in the Choice itself. Check the MenuNode class and
the Choice.nodes attribute. This organization matches the C tools.
name:
The name of the choice, e.g. "FOO" for 'choice FOO', or None if the
Choice has no name.
type:
The type of the choice. One of BOOL, TRISTATE, UNKNOWN. UNKNOWN is for
choices defined without a type where none of the contained symbols have a
type either (otherwise the choice inherits the type of the first symbol
defined with a type).
When running without modules (CONFIG_MODULES=n), TRISTATE choices
magically change type to BOOL. This matches the C tools, and makes sense
for menuconfig-like functionality.
orig_type:
The type as given in the Kconfig file, without any magic applied. Used
when printing the choice.
tri_value:
The tristate value (mode) of the choice. A choice can be in one of three
modes:
0 (n) - The choice is disabled and no symbols can be selected. For
visible choices, this mode is only possible for choices with
the 'optional' flag set (see kconfig-language.txt).
1 (m) - Any number of choice symbols can be set to m, the rest will
be n.
2 (y) - One symbol will be y, the rest n.
Only tristate choices can be in m mode. The visibility of the choice is
an upper bound on the mode, and the mode in turn is an upper bound on the
visibility of the choice symbols.
To change the mode, use Choice.set_value().
Implementation note:
The C tools internally represent choices as a type of symbol, with
special-casing in many code paths. This is why there is a lot of
similarity to Symbol. The value (mode) of a choice is really just a
normal symbol value, and an implicit reverse dependency forces its
lower bound to m for visible non-optional choices (the reverse
dependency is 'm && <visibility>').
Symbols within choices get the choice propagated as a dependency to
their properties. This turns the mode of the choice into an upper bound
on e.g. the visibility of choice symbols, and explains the gotcha
related to printing choice symbols mentioned in the module docstring.
Kconfiglib uses a separate Choice class only because it makes the code
and interface less confusing (especially in a user-facing interface).
Corresponding attributes have the same name in the Symbol and Choice
classes, for consistency and compatibility.
str_value:
Like choice.tri_value, but gives the value as one of the strings
"n", "m", or "y"
user_value:
The value (mode) selected by the user through Choice.set_value(). Either
0, 1, or 2, or None if the user hasn't selected a mode. See
Symbol.user_value.
WARNING: Do not assign directly to this. It will break things. Use
Choice.set_value() instead.
assignable:
See the symbol class documentation. Gives the assignable values (modes).
selection:
The Symbol instance of the currently selected symbol. None if the Choice
is not in y mode or has no selected symbol (due to unsatisfied
dependencies on choice symbols).
WARNING: Do not assign directly to this. It will break things. Call
sym.set_value(2) on the choice symbol you want to select instead.
user_selection:
The symbol selected by the user (by setting it to y). Ignored if the
choice is not in y mode, but still remembered so that the choice "snaps
back" to the user selection if the mode is changed back to y. This might
differ from 'selection' due to unsatisfied dependencies.
WARNING: Do not assign directly to this. It will break things. Call
sym.set_value(2) on the choice symbol to be selected instead.
visibility:
See the Symbol class documentation. Acts on the value (mode).
name_and_loc:
Holds a string like
"<choice MY_CHOICE> (defined at foo/Kconfig:12)"
, giving the name of the choice and its definition location(s). If the
choice has no name (isn't defined with 'choice MY_CHOICE'), then it will
be shown as "<choice>" before the list of locations (always a single one
in that case).
syms:
List of symbols contained in the choice.
Obscure gotcha: If a symbol depends on the previous symbol within a
choice so that an implicit menu is created, it won't be a choice symbol,
and won't be included in 'syms'.
nodes:
A list of MenuNodes for this choice. In practice, the list will probably
always contain a single MenuNode, but it is possible to give a choice a
name and define it in multiple locations.
defaults:
List of (symbol, cond) tuples for the choice's 'defaults' properties. For
example, 'default A if B && C' is represented as (A, (AND, B, C)). If
there is no condition, 'cond' is self.kconfig.y.
Note that 'depends on' and parent dependencies are propagated to
'default' conditions.
orig_defaults:
See the corresponding attribute on the MenuNode class.
direct_dep:
See Symbol.direct_dep.
referenced:
A set() with all symbols referenced in the properties and property
conditions of the choice.
Also includes dependencies from surrounding menus and ifs, because those
get propagated to the choice (see the 'Intro to symbol values' section in
the module docstring).
is_optional:
True if the choice has the 'optional' flag set on it and can be in
n mode.
kconfig:
The Kconfig instance this choice is from.
"""
__slots__ = (
"_cached_assignable",
"_cached_selection",
"_cached_vis",
"_dependents",
"_visited",
"_was_set",
"defaults",
"direct_dep",
"is_constant",
"is_optional",
"kconfig",
"name",
"nodes",
"orig_type",
"syms",
"user_selection",
"user_value",
)
#
# Public interface
#
@property
def type(self):
"""
Returns the type of the choice. See Symbol.type.
"""
if self.orig_type is TRISTATE and not self.kconfig.modules.tri_value:
return BOOL
return self.orig_type
@property
def str_value(self):
"""
See the class documentation.
"""
return TRI_TO_STR[self.tri_value]
@property
def tri_value(self):
"""
See the class documentation.
"""
# This emulates a reverse dependency of 'm && visibility' for
# non-optional choices, which is how the C implementation does it
val = 0 if self.is_optional else 1
if self.user_value is not None:
val = max(val, self.user_value)
# Warning: See Symbol._rec_invalidate(), and note that this is a hidden
# function call (property magic)
val = min(val, self.visibility)
# Promote m to y for boolean choices
return 2 if val == 1 and self.type is BOOL else val
@property
def assignable(self):
"""
See the class documentation.
"""
if self._cached_assignable is None:
self._cached_assignable = self._assignable()
return self._cached_assignable
@property
def visibility(self):
"""
See the class documentation.
"""
if self._cached_vis is None:
self._cached_vis = _visibility(self)
return self._cached_vis
@property
def name_and_loc(self):
"""
See the class documentation.
"""
# Reuse the expression format, which is '<choice (name, if any)>'.
return standard_sc_expr_str(self) + " " + _locs(self)
@property
def selection(self):
"""
See the class documentation.
"""
if self._cached_selection is _NO_CACHED_SELECTION:
self._cached_selection = self._selection()
return self._cached_selection
def set_value(self, value):
"""
Sets the user value (mode) of the choice. Like for Symbol.set_value(),
the visibility might truncate the value. Choices without the 'optional'
attribute (is_optional) can never be in n mode, but 0/"n" is still
accepted since it's not a malformed value (though it will have no
effect).
Returns True if the value is valid for the type of the choice, and
False otherwise. This only looks at the form of the value. Check the
Choice.assignable attribute to see what values are currently in range
and would actually be reflected in the mode of the choice.
"""
if value in STR_TO_TRI:
value = STR_TO_TRI[value]
if value == self.user_value:
# We know the value must be valid if it was successfully set
# previously
self._was_set = True
return True
if not (self.orig_type is BOOL and value in (2, 0) or
self.orig_type is TRISTATE and value in TRI_TO_STR):
# Display tristate values as n, m, y in the warning
self.kconfig._warn(
"the value {} is invalid for {}, which has type {} -- "
"assignment ignored"
.format(TRI_TO_STR[value] if value in TRI_TO_STR else
"'{}'".format(value),
self.name_and_loc, TYPE_TO_STR[self.orig_type]))
return False
self.user_value = value
self._was_set = True
self._rec_invalidate()
return True
def unset_value(self):
"""
Resets the user value (mode) and user selection of the Choice, as if
the user had never touched the mode or any of the choice symbols.
"""
if self.user_value is not None or self.user_selection:
self.user_value = self.user_selection = None
self._rec_invalidate()
@property
def referenced(self):
"""
See the class documentation.
"""
return {item for node in self.nodes for item in node.referenced}
@property
def orig_defaults(self):
"""
See the class documentation.
"""
return [d for node in self.nodes for d in node.orig_defaults]
def __repr__(self):
"""
Returns a string with information about the choice when it is evaluated
on e.g. the interactive Python prompt.
"""
fields = ["choice " + self.name if self.name else "choice",
TYPE_TO_STR[self.type]]
add = fields.append
for node in self.nodes:
if node.prompt:
add('"{}"'.format(node.prompt[0]))
add("mode " + self.str_value)
if self.user_value is not None:
add('user mode {}'.format(TRI_TO_STR[self.user_value]))
if self.selection:
add("{} selected".format(self.selection.name))
if self.user_selection:
user_sel_str = "{} selected by user" \
.format(self.user_selection.name)
if self.selection is not self.user_selection:
user_sel_str += " (overridden)"
add(user_sel_str)
add("visibility " + TRI_TO_STR[self.visibility])
if self.is_optional:
add("optional")
for node in self.nodes:
add("{}:{}".format(node.filename, node.linenr))
return "<{}>".format(", ".join(fields))
def __str__(self):
"""
Returns a string representation of the choice when it is printed.
Matches the Kconfig format (though without the contained choice
symbols), with any parent dependencies propagated to the 'depends on'
condition.
The returned string does not end in a newline.
See Symbol.__str__() as well.
"""
return self.custom_str(standard_sc_expr_str)
def custom_str(self, sc_expr_str_fn):
"""
Works like Choice.__str__(), but allows a custom format to be used for
all symbol/choice references. See expr_str().
"""
return "\n\n".join(node.custom_str(sc_expr_str_fn)
for node in self.nodes)
#
# Private methods
#
def __init__(self):
"""
Choice constructor -- not intended to be called directly by Kconfiglib
clients.
"""
# These attributes are always set on the instance from outside and
# don't need defaults:
# direct_dep
# kconfig
# - UNKNOWN == 0
# - _visited is used during dep. loop detection
self.orig_type = self._visited = 0
self.nodes = []
self.syms = []
self.defaults = []
self.name = \
self.user_value = self.user_selection = \
self._cached_vis = self._cached_assignable = None
self._cached_selection = _NO_CACHED_SELECTION
# is_constant is checked by _depend_on(). Just set it to avoid having
# to special-case choices.
self.is_constant = self.is_optional = False
# See Kconfig._build_dep()
self._dependents = set()
def _assignable(self):
# Worker function for the 'assignable' attribute
# Warning: See Symbol._rec_invalidate(), and note that this is a hidden
# function call (property magic)
vis = self.visibility
if not vis:
return ()
if vis == 2:
if not self.is_optional:
return (2,) if self.type is BOOL else (1, 2)
return (0, 2) if self.type is BOOL else (0, 1, 2)
# vis == 1
return (0, 1) if self.is_optional else (1,)
def _selection(self):
# Worker function for the 'selection' attribute
# Warning: See Symbol._rec_invalidate(), and note that this is a hidden
# function call (property magic)
if self.tri_value != 2:
# Not in y mode, so no selection
return None
# Use the user selection if it's visible
if self.user_selection and self.user_selection.visibility:
return self.user_selection
# Otherwise, check if we have a default
return self._selection_from_defaults()
def _selection_from_defaults(self):
# Check if we have a default
for sym, cond in self.defaults:
# The default symbol must be visible too
if expr_value(cond) and sym.visibility:
return sym
# Otherwise, pick the first visible symbol, if any
for sym in self.syms:
if sym.visibility:
return sym
# Couldn't find a selection
return None
def _invalidate(self):
self._cached_vis = self._cached_assignable = None
self._cached_selection = _NO_CACHED_SELECTION
def _rec_invalidate(self):
# See Symbol._rec_invalidate()
self._invalidate()
for item in self._dependents:
if item._cached_vis is not None:
item._rec_invalidate()
class MenuNode(object):
"""
Represents a menu node in the configuration. This corresponds to an entry
in e.g. the 'make menuconfig' interface, though non-visible choices, menus,
and comments also get menu nodes. If a symbol or choice is defined in
multiple locations, it gets one menu node for each location.
The top-level menu node, corresponding to the implicit top-level menu, is
available in Kconfig.top_node.
The menu nodes for a Symbol or Choice can be found in the
Symbol/Choice.nodes attribute. Menus and comments are represented as plain
menu nodes, with their text stored in the prompt attribute (prompt[0]).
This mirrors the C implementation.
The following attributes are available on MenuNode instances. They should
be viewed as read-only.
item:
Either a Symbol, a Choice, or one of the constants MENU and COMMENT.
Menus and comments are represented as plain menu nodes. Ifs are collapsed
(matching the C implementation) and do not appear in the final menu tree.
next:
The following menu node. None if there is no following node.
list:
The first child menu node. None if there are no children.
Choices and menus naturally have children, but Symbols can also have
children because of menus created automatically from dependencies (see
kconfig-language.txt).
parent:
The parent menu node. None if there is no parent.
prompt:
A (string, cond) tuple with the prompt for the menu node and its
conditional expression (which is self.kconfig.y if there is no
condition). None if there is no prompt.
For symbols and choices, the prompt is stored in the MenuNode rather than
the Symbol or Choice instance. For menus and comments, the prompt holds
the text.
defaults:
The 'default' properties for this particular menu node. See
symbol.defaults.
When evaluating defaults, you should use Symbol/Choice.defaults instead,
as it include properties from all menu nodes (a symbol/choice can have
multiple definition locations/menu nodes). MenuNode.defaults is meant for
documentation generation.
selects:
Like MenuNode.defaults, for selects.
implies:
Like MenuNode.defaults, for implies.
ranges:
Like MenuNode.defaults, for ranges.
orig_prompt:
orig_defaults:
orig_selects:
orig_implies:
orig_ranges:
These work the like the corresponding attributes without orig_*, but omit
any dependencies propagated from 'depends on' and surrounding 'if's (the
direct dependencies, stored in MenuNode.dep).
One use for this is generating less cluttered documentation, by only
showing the direct dependencies in one place.
help:
The help text for the menu node for Symbols and Choices. None if there is
no help text. Always stored in the node rather than the Symbol or Choice.
It is possible to have a separate help text at each location if a symbol
is defined in multiple locations.
Trailing whitespace (including a final newline) is stripped from the help
text. This was not the case before Kconfiglib 10.21.0, where the format
was undocumented.
dep:
The direct ('depends on') dependencies for the menu node, or
self.kconfig.y if there are no direct dependencies.
This attribute includes any dependencies from surrounding menus and ifs.
Those get propagated to the direct dependencies, and the resulting direct
dependencies in turn get propagated to the conditions of all properties.
If a symbol or choice is defined in multiple locations, only the
properties defined at a particular location get the corresponding
MenuNode.dep dependencies propagated to them.
visibility:
The 'visible if' dependencies for the menu node (which must represent a
menu), or self.kconfig.y if there are no 'visible if' dependencies.
'visible if' dependencies are recursively propagated to the prompts of
symbols and choices within the menu.
referenced:
A set() with all symbols and choices referenced in the properties and
property conditions of the menu node.
Also includes dependencies inherited from surrounding menus and ifs.
Choices appear in the dependencies of choice symbols.
is_menuconfig:
Set to True if the children of the menu node should be displayed in a
separate menu. This is the case for the following items:
- Menus (node.item == MENU)
- Choices
- Symbols defined with the 'menuconfig' keyword. The children come from
implicitly created submenus, and should be displayed in a separate
menu rather than being indented.
'is_menuconfig' is just a hint on how to display the menu node. It's
ignored internally by Kconfiglib, except when printing symbols.
filename/linenr:
The location where the menu node appears. The filename is relative to
$srctree (or to the current directory if $srctree isn't set), except
absolute paths are used for paths outside $srctree.
include_path:
A tuple of (filename, linenr) tuples, giving the locations of the
'source' statements via which the Kconfig file containing this menu node
was included. The first element is the location of the 'source' statement
in the top-level Kconfig file passed to Kconfig.__init__(), etc.
Note that the Kconfig file of the menu node itself isn't included. Check
'filename' and 'linenr' for that.
kconfig:
The Kconfig instance the menu node is from.
"""
__slots__ = (
"dep",
"filename",
"help",
"include_path",
"is_menuconfig",
"is_configdefault",
"item",
"kconfig",
"linenr",
"list",
"next",
"parent",
"prompt",
"visibility",
# Properties
"defaults",
"selects",
"implies",
"ranges",
)
def __init__(self):
# Properties defined on this particular menu node. A local 'depends on'
# only applies to these, in case a symbol is defined in multiple
# locations.
self.defaults = []
self.selects = []
self.implies = []
self.ranges = []
@property
def orig_prompt(self):
"""
See the class documentation.
"""
if not self.prompt:
return None
return (self.prompt[0], self._strip_dep(self.prompt[1]))
@property
def orig_defaults(self):
"""
See the class documentation.
"""
return [(default, self._strip_dep(cond))
for default, cond in self.defaults]
@property
def orig_selects(self):
"""
See the class documentation.
"""
return [(select, self._strip_dep(cond))
for select, cond in self.selects]
@property
def orig_implies(self):
"""
See the class documentation.
"""
return [(imply, self._strip_dep(cond))
for imply, cond in self.implies]
@property
def orig_ranges(self):
"""
See the class documentation.
"""
return [(low, high, self._strip_dep(cond))
for low, high, cond in self.ranges]
@property
def referenced(self):
"""
See the class documentation.
"""
# self.dep is included to catch dependencies from a lone 'depends on'
# when there are no properties to propagate it to
res = expr_items(self.dep)
if self.prompt:
res |= expr_items(self.prompt[1])
if self.item is MENU:
res |= expr_items(self.visibility)
for value, cond in self.defaults:
res |= expr_items(value)
res |= expr_items(cond)
for value, cond in self.selects:
res.add(value)
res |= expr_items(cond)
for value, cond in self.implies:
res.add(value)
res |= expr_items(cond)
for low, high, cond in self.ranges:
res.add(low)
res.add(high)
res |= expr_items(cond)
return res
def __repr__(self):
"""
Returns a string with information about the menu node when it is
evaluated on e.g. the interactive Python prompt.
"""
fields = []
add = fields.append
if self.item.__class__ is Symbol:
add("menu node for symbol " + self.item.name)
elif self.item.__class__ is Choice:
s = "menu node for choice"
if self.item.name is not None:
s += " " + self.item.name
add(s)
elif self.item is MENU:
add("menu node for menu")
else: # self.item is COMMENT
add("menu node for comment")
if self.prompt:
add('prompt "{}" (visibility {})'.format(
self.prompt[0], TRI_TO_STR[expr_value(self.prompt[1])]))
if self.item.__class__ is Symbol and self.is_menuconfig:
add("is menuconfig")
add("deps " + TRI_TO_STR[expr_value(self.dep)])
if self.item is MENU:
add("'visible if' deps " + TRI_TO_STR[expr_value(self.visibility)])
if self.item.__class__ in _SYMBOL_CHOICE and self.help is not None:
add("has help")
if self.list:
add("has child")
if self.next:
add("has next")
add("{}:{}".format(self.filename, self.linenr))
return "<{}>".format(", ".join(fields))
def __str__(self):
"""
Returns a string representation of the menu node. Matches the Kconfig
format, with any parent dependencies propagated to the 'depends on'
condition.
The output could (almost) be fed back into a Kconfig parser to redefine
the object associated with the menu node. See the module documentation
for a gotcha related to choice symbols.
For symbols and choices with multiple menu nodes (multiple definition
locations), properties that aren't associated with a particular menu
node are shown on all menu nodes ('option env=...', 'optional' for
choices, etc.).
The returned string does not end in a newline.
"""
return self.custom_str(standard_sc_expr_str)
def custom_str(self, sc_expr_str_fn):
"""
Works like MenuNode.__str__(), but allows a custom format to be used
for all symbol/choice references. See expr_str().
"""
return self._menu_comment_node_str(sc_expr_str_fn) \
if self.item in _MENU_COMMENT else \
self._sym_choice_node_str(sc_expr_str_fn)
def _menu_comment_node_str(self, sc_expr_str_fn):
s = '{} "{}"'.format("menu" if self.item is MENU else "comment",
self.prompt[0])
if self.dep is not self.kconfig.y:
s += "\n\tdepends on {}".format(expr_str(self.dep, sc_expr_str_fn))
if self.item is MENU and self.visibility is not self.kconfig.y:
s += "\n\tvisible if {}".format(expr_str(self.visibility,
sc_expr_str_fn))
return s
def _sym_choice_node_str(self, sc_expr_str_fn):
def indent_add(s):
lines.append("\t" + s)
def indent_add_cond(s, cond):
if cond is not self.kconfig.y:
s += " if " + expr_str(cond, sc_expr_str_fn)
indent_add(s)
sc = self.item
if sc.__class__ is Symbol:
if self.is_menuconfig:
t = "menuconfig "
elif self.is_configdefault:
t = "configdefault "
else:
t = "config "
lines = [t + sc.name]
else:
lines = ["choice " + sc.name if sc.name else "choice"]
if sc.orig_type and not self.prompt: # sc.orig_type != UNKNOWN
# If there's a prompt, we'll use the '<type> "prompt"' shorthand
# instead
indent_add(TYPE_TO_STR[sc.orig_type])
if self.prompt:
if sc.orig_type:
prefix = TYPE_TO_STR[sc.orig_type]
else:
# Symbol defined without a type (which generates a warning)
prefix = "prompt"
indent_add_cond(prefix + ' "{}"'.format(escape(self.prompt[0])),
self.orig_prompt[1])
if sc.__class__ is Symbol:
if sc.is_allnoconfig_y:
indent_add("option allnoconfig_y")
if sc is sc.kconfig.defconfig_list:
indent_add("option defconfig_list")
if sc.env_var is not None:
indent_add('option env="{}"'.format(sc.env_var))
if sc is sc.kconfig.modules:
indent_add("option modules")
for low, high, cond in self.orig_ranges:
indent_add_cond(
"range {} {}".format(sc_expr_str_fn(low),
sc_expr_str_fn(high)),
cond)
for default, cond in self.orig_defaults:
indent_add_cond("default " + expr_str(default, sc_expr_str_fn),
cond)
if sc.__class__ is Choice and sc.is_optional:
indent_add("optional")
if sc.__class__ is Symbol:
for select, cond in self.orig_selects:
indent_add_cond("select " + sc_expr_str_fn(select), cond)
for imply, cond in self.orig_implies:
indent_add_cond("imply " + sc_expr_str_fn(imply), cond)
if self.dep is not sc.kconfig.y:
indent_add("depends on " + expr_str(self.dep, sc_expr_str_fn))
if self.help is not None:
indent_add("help")
for line in self.help.splitlines():
indent_add(" " + line)
return "\n".join(lines)
def _strip_dep(self, expr):
# Helper function for removing MenuNode.dep from 'expr'. Uses two
# pieces of internal knowledge: (1) Expressions are reused rather than
# copied, and (2) the direct dependencies always appear at the end.
# ... if dep -> ... if y
if self.dep is expr:
return self.kconfig.y
# (AND, X, dep) -> X
if expr.__class__ is tuple and expr[0] is AND and expr[2] is self.dep:
return expr[1]
return expr
class Variable(object):
"""
Represents a preprocessor variable/function.
The following attributes are available:
name:
The name of the variable.
value:
The unexpanded value of the variable.
expanded_value:
The expanded value of the variable. For simple variables (those defined
with :=), this will equal 'value'. Accessing this property will raise a
KconfigError if the expansion seems to be stuck in a loop.
Accessing this field is the same as calling expanded_value_w_args() with
no arguments. I hadn't considered function arguments when adding it. It
is retained for backwards compatibility though.
is_recursive:
True if the variable is recursive (defined with =).
"""
__slots__ = (
"_n_expansions",
"is_recursive",
"kconfig",
"name",
"value",
)
@property
def expanded_value(self):
"""
See the class documentation.
"""
return self.expanded_value_w_args()
def expanded_value_w_args(self, *args):
"""
Returns the expanded value of the variable/function. Any arguments
passed will be substituted for $(1), $(2), etc.
Raises a KconfigError if the expansion seems to be stuck in a loop.
"""
return self.kconfig._fn_val((self.name,) + args)
def __repr__(self):
return "<variable {}, {}, value '{}'>" \
.format(self.name,
"recursive" if self.is_recursive else "immediate",
self.value)
class KconfigError(Exception):
"""
Exception raised for Kconfig-related errors.
KconfigError and KconfigSyntaxError are the same class. The
KconfigSyntaxError alias is only maintained for backwards compatibility.
"""
KconfigSyntaxError = KconfigError # Backwards compatibility
class InternalError(Exception):
"Never raised. Kept around for backwards compatibility."
# Workaround:
#
# If 'errno' and 'strerror' are set on IOError, then __str__() always returns
# "[Errno <errno>] <strerror>", ignoring any custom message passed to the
# constructor. By defining our own subclass, we can use a custom message while
# also providing 'errno', 'strerror', and 'filename' to scripts.
class _KconfigIOError(IOError):
def __init__(self, ioerror, msg):
self.msg = msg
super(_KconfigIOError, self).__init__(
ioerror.errno, ioerror.strerror, ioerror.filename)
def __str__(self):
return self.msg
#
# Public functions
#
def expr_value(expr):
"""
Evaluates the expression 'expr' to a tristate value. Returns 0 (n), 1 (m),
or 2 (y).
'expr' must be an already-parsed expression from a Symbol, Choice, or
MenuNode property. To evaluate an expression represented as a string, use
Kconfig.eval_string().
Passing subexpressions of expressions to this function works as expected.
"""
if expr.__class__ is not tuple:
return expr.tri_value
if expr[0] is AND:
v1 = expr_value(expr[1])
# Short-circuit the n case as an optimization (~5% faster
# allnoconfig.py and allyesconfig.py, as of writing)
return 0 if not v1 else min(v1, expr_value(expr[2]))
if expr[0] is OR:
v1 = expr_value(expr[1])
# Short-circuit the y case as an optimization
return 2 if v1 == 2 else max(v1, expr_value(expr[2]))
if expr[0] is NOT:
return 2 - expr_value(expr[1])
# Relation
#
# Implements <, <=, >, >= comparisons as well. These were added to
# kconfig in 31847b67 (kconfig: allow use of relations other than
# (in)equality).
rel, v1, v2 = expr
# If both operands are strings...
if v1.orig_type is STRING and v2.orig_type is STRING:
# ...then compare them lexicographically
comp = _strcmp(v1.str_value, v2.str_value)
else:
# Otherwise, try to compare them as numbers
try:
comp = _sym_to_num(v1) - _sym_to_num(v2)
except ValueError:
# Fall back on a lexicographic comparison if the operands don't
# parse as numbers
comp = _strcmp(v1.str_value, v2.str_value)
return 2*(comp == 0 if rel is EQUAL else
comp != 0 if rel is UNEQUAL else
comp < 0 if rel is LESS else
comp <= 0 if rel is LESS_EQUAL else
comp > 0 if rel is GREATER else
comp >= 0)
def standard_sc_expr_str(sc):
"""
Standard symbol/choice printing function. Uses plain Kconfig syntax, and
displays choices as <choice> (or <choice NAME>, for named choices).
See expr_str().
"""
if sc.__class__ is Symbol:
if sc.is_constant and sc.name not in STR_TO_TRI:
return '"{}"'.format(escape(sc.name))
return sc.name
return "<choice {}>".format(sc.name) if sc.name else "<choice>"
def expr_str(expr, sc_expr_str_fn=standard_sc_expr_str):
"""
Returns the string representation of the expression 'expr', as in a Kconfig
file.
Passing subexpressions of expressions to this function works as expected.
sc_expr_str_fn (default: standard_sc_expr_str):
This function is called for every symbol/choice (hence "sc") appearing in
the expression, with the symbol/choice as the argument. It is expected to
return a string to be used for the symbol/choice.
This can be used e.g. to turn symbols/choices into links when generating
documentation, or for printing the value of each symbol/choice after it.
Note that quoted values are represented as constants symbols
(Symbol.is_constant == True).
"""
if expr.__class__ is not tuple:
return sc_expr_str_fn(expr)
if expr[0] is AND:
return "{} && {}".format(_parenthesize(expr[1], OR, sc_expr_str_fn),
_parenthesize(expr[2], OR, sc_expr_str_fn))
if expr[0] is OR:
# This turns A && B || C && D into "(A && B) || (C && D)", which is
# redundant, but more readable
return "{} || {}".format(_parenthesize(expr[1], AND, sc_expr_str_fn),
_parenthesize(expr[2], AND, sc_expr_str_fn))
if expr[0] is NOT:
if expr[1].__class__ is tuple:
return "!({})".format(expr_str(expr[1], sc_expr_str_fn))
return "!" + sc_expr_str_fn(expr[1]) # Symbol
# Relation
#
# Relation operands are always symbols (quoted strings are constant
# symbols)
return "{} {} {}".format(sc_expr_str_fn(expr[1]), REL_TO_STR[expr[0]],
sc_expr_str_fn(expr[2]))
def expr_items(expr):
"""
Returns a set() of all items (symbols and choices) that appear in the
expression 'expr'.
Passing subexpressions of expressions to this function works as expected.
"""
res = set()
def rec(subexpr):
if subexpr.__class__ is tuple:
# AND, OR, NOT, or relation
rec(subexpr[1])
# NOTs only have a single operand
if subexpr[0] is not NOT:
rec(subexpr[2])
else:
# Symbol or choice
res.add(subexpr)
rec(expr)
return res
def split_expr(expr, op):
"""
Returns a list containing the top-level AND or OR operands in the
expression 'expr', in the same (left-to-right) order as they appear in
the expression.
This can be handy e.g. for splitting (weak) reverse dependencies
from 'select' and 'imply' into individual selects/implies.
op:
Either AND to get AND operands, or OR to get OR operands.
(Having this as an operand might be more future-safe than having two
hardcoded functions.)
Pseudo-code examples:
split_expr( A , OR ) -> [A]
split_expr( A && B , OR ) -> [A && B]
split_expr( A || B , OR ) -> [A, B]
split_expr( A || B , AND ) -> [A || B]
split_expr( A || B || (C && D) , OR ) -> [A, B, C && D]
# Second || is not at the top level
split_expr( A || (B && (C || D)) , OR ) -> [A, B && (C || D)]
# Parentheses don't matter as long as we stay at the top level (don't
# encounter any non-'op' nodes)
split_expr( (A || B) || C , OR ) -> [A, B, C]
split_expr( A || (B || C) , OR ) -> [A, B, C]
"""
res = []
def rec(subexpr):
if subexpr.__class__ is tuple and subexpr[0] is op:
rec(subexpr[1])
rec(subexpr[2])
else:
res.append(subexpr)
rec(expr)
return res
def escape(s):
r"""
Escapes the string 's' in the same fashion as is done for display in
Kconfig format and when writing strings to a .config file. " and \ are
replaced by \" and \\, respectively.
"""
# \ must be escaped before " to avoid double escaping
return s.replace("\\", r"\\").replace('"', r'\"')
def unescape(s):
r"""
Unescapes the string 's'. \ followed by any character is replaced with just
that character. Used internally when reading .config files.
"""
return _unescape_sub(r"\1", s)
# unescape() helper
_unescape_sub = re.compile(r"\\(.)").sub
def standard_kconfig(description=None):
"""
Argument parsing helper for tools that take a single optional Kconfig file
argument (default: Kconfig). Returns the Kconfig instance for the parsed
configuration. Uses argparse internally.
Exits with sys.exit() (which raises SystemExit) on errors.
description (default: None):
The 'description' passed to argparse.ArgumentParser(allow_abbrev=False).
argparse.RawDescriptionHelpFormatter is used, so formatting is preserved.
"""
import argparse
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description, allow_abbrev=False)
parser.add_argument(
"kconfig",
metavar="KCONFIG",
default="Kconfig",
nargs="?",
help="Top-level Kconfig file (default: Kconfig)")
return Kconfig(parser.parse_args().kconfig, suppress_traceback=True)
def standard_config_filename():
"""
Helper for tools. Returns the value of KCONFIG_CONFIG (which specifies the
.config file to load/save) if it is set, and ".config" otherwise.
Calling load_config() with filename=None might give the behavior you want,
without having to use this function.
"""
return os.getenv("KCONFIG_CONFIG", ".config")
def load_allconfig(kconf, filename):
"""
Use Kconfig.load_allconfig() instead, which was added in Kconfiglib 13.4.0.
Supported for backwards compatibility. Might be removed at some point after
a long period of deprecation warnings.
"""
allconfig = os.getenv("KCONFIG_ALLCONFIG")
if allconfig is None:
return
def std_msg(e):
# "Upcasts" a _KconfigIOError to an IOError, removing the custom
# __str__() message. The standard message is better here.
#
# This might also convert an OSError to an IOError in obscure cases,
# but it's probably not a big deal. The distinction is shaky (see
# PEP-3151).
return IOError(e.errno, e.strerror, e.filename)
old_warn_assign_override = kconf.warn_assign_override
old_warn_assign_redun = kconf.warn_assign_redun
kconf.warn_assign_override = kconf.warn_assign_redun = False
if allconfig in ("", "1"):
try:
print(kconf.load_config(filename, False))
except EnvironmentError as e1:
try:
print(kconf.load_config("all.config", False))
except EnvironmentError as e2:
sys.exit("error: KCONFIG_ALLCONFIG is set, but neither {} "
"nor all.config could be opened: {}, {}"
.format(filename, std_msg(e1), std_msg(e2)))
else:
try:
print(kconf.load_config(allconfig, False))
except EnvironmentError as e:
sys.exit("error: KCONFIG_ALLCONFIG is set to '{}', which "
"could not be opened: {}"
.format(allconfig, std_msg(e)))
kconf.warn_assign_override = old_warn_assign_override
kconf.warn_assign_redun = old_warn_assign_redun
#
# Internal functions
#
def _visibility(sc):
# Symbols and Choices have a "visibility" that acts as an upper bound on
# the values a user can set for them, corresponding to the visibility in
# e.g. 'make menuconfig'. This function calculates the visibility for the
# Symbol or Choice 'sc' -- the logic is nearly identical.
vis = 0
for node in sc.nodes:
if node.prompt:
vis = max(vis, expr_value(node.prompt[1]))
if sc.__class__ is Symbol and sc.choice:
if sc.choice.orig_type is TRISTATE and \
sc.orig_type is not TRISTATE and sc.choice.tri_value != 2:
# Non-tristate choice symbols are only visible in y mode
return 0
if sc.orig_type is TRISTATE and vis == 1 and sc.choice.tri_value == 2:
# Choice symbols with m visibility are not visible in y mode
return 0
# Promote m to y if we're dealing with a non-tristate (possibly due to
# modules being disabled)
if vis == 1 and sc.type is not TRISTATE:
return 2
return vis
def _depend_on(sc, expr):
# Adds 'sc' (symbol or choice) as a "dependee" to all symbols in 'expr'.
# Constant symbols in 'expr' are skipped as they can never change value
# anyway.
if expr.__class__ is tuple:
# AND, OR, NOT, or relation
_depend_on(sc, expr[1])
# NOTs only have a single operand
if expr[0] is not NOT:
_depend_on(sc, expr[2])
elif not expr.is_constant:
# Non-constant symbol, or choice
expr._dependents.add(sc)
def _parenthesize(expr, type_, sc_expr_str_fn):
# expr_str() helper. Adds parentheses around expressions of type 'type_'.
if expr.__class__ is tuple and expr[0] is type_:
return "({})".format(expr_str(expr, sc_expr_str_fn))
return expr_str(expr, sc_expr_str_fn)
def _ordered_unique(lst):
# Returns 'lst' with any duplicates removed, preserving order. This hacky
# version seems to be a common idiom. It relies on short-circuit evaluation
# and set.add() returning None, which is falsy.
seen = set()
seen_add = seen.add
return [x for x in lst if x not in seen and not seen_add(x)]
def _is_base_n(s, n):
try:
int(s, n)
return True
except ValueError:
return False
def _strcmp(s1, s2):
# strcmp()-alike that returns -1, 0, or 1
return (s1 > s2) - (s1 < s2)
def _sym_to_num(sym):
# expr_value() helper for converting a symbol to a number. Raises
# ValueError for symbols that can't be converted.
# For BOOL and TRISTATE, n/m/y count as 0/1/2. This mirrors 9059a3493ef
# ("kconfig: fix relational operators for bool and tristate symbols") in
# the C implementation.
return sym.tri_value if sym.orig_type in _BOOL_TRISTATE else \
int(sym.str_value, _TYPE_TO_BASE[sym.orig_type])
def _touch_dep_file(path, sym_name):
# If sym_name is MY_SYM_NAME, touches my/sym/name.h. See the sync_deps()
# docstring.
sym_path = path + os.sep + sym_name.lower().replace("_", os.sep) + ".h"
sym_path_dir = dirname(sym_path)
if not exists(sym_path_dir):
os.makedirs(sym_path_dir, 0o755)
# A kind of truncating touch, mirroring the C tools
os.close(os.open(
sym_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o644))
def _save_old(path):
# See write_config()
if not os.path.isfile(path):
# Backup only files (and symlinks to files). Simplest alternative
# to avoid e.g. (potentially successful attempt to) rename /dev/null
# (and to keep fifos).
return
def copy(src, dst):
# Import as needed, to save some startup time
import shutil
shutil.copyfile(src, dst)
if islink(path):
# Preserve symlinks
copy_fn = copy
elif hasattr(os, "replace"):
# Python 3 (3.3+) only. Best choice when available, because it
# removes <filename>.old on both *nix and Windows.
copy_fn = os.replace
elif os.name == "posix":
# Removes <filename>.old on POSIX systems
copy_fn = os.rename
else:
# Fall back on copying
copy_fn = copy
try:
copy_fn(path, path + ".old")
except Exception:
# Ignore errors from 'path' missing as well as other errors.
# <filename>.old file is usually more of a nice-to-have, and not worth
# erroring out over e.g. if <filename>.old happens to be a directory.
pass
def _locs(sc):
# Symbol/Choice.name_and_loc helper. Returns the "(defined at ...)" part of
# the string. 'sc' is a Symbol or Choice.
if sc.nodes:
return "(defined at {})".format(
", ".join("{0.filename}:{0.linenr}".format(node)
for node in sc.nodes))
return "(undefined)"
# Menu manipulation
def _expr_depends_on(expr, sym):
# Reimplementation of expr_depends_symbol() from mconf.c. Used to determine
# if a submenu should be implicitly created. This also influences which
# items inside choice statements are considered choice items.
if expr.__class__ is not tuple:
return expr is sym
if expr[0] in _EQUAL_UNEQUAL:
# Check for one of the following:
# sym = m/y, m/y = sym, sym != n, n != sym
left, right = expr[1:]
if right is sym:
left, right = right, left
elif left is not sym:
return False
return (expr[0] is EQUAL and right is sym.kconfig.m or
right is sym.kconfig.y) or \
(expr[0] is UNEQUAL and right is sym.kconfig.n)
return expr[0] is AND and \
(_expr_depends_on(expr[1], sym) or
_expr_depends_on(expr[2], sym))
def _auto_menu_dep(node1, node2):
# Returns True if node2 has an "automatic menu dependency" on node1. If
# node2 has a prompt, we check its condition. Otherwise, we look directly
# at node2.dep.
return _expr_depends_on(node2.prompt[1] if node2.prompt else node2.dep,
node1.item)
def _flatten(node):
# "Flattens" menu nodes without prompts (e.g. 'if' nodes and non-visible
# symbols with children from automatic menu creation) so that their
# children appear after them instead. This gives a clean menu structure
# with no unexpected "jumps" in the indentation.
#
# Do not flatten promptless choices (which can appear "legitimately" if a
# named choice is defined in multiple locations to add on symbols). It
# looks confusing, and the menuconfig already shows all choice symbols if
# you enter the choice at some location with a prompt.
while node:
if node.list and not node.prompt and \
node.item.__class__ is not Choice:
last_node = node.list
while 1:
last_node.parent = node.parent
if not last_node.next:
break
last_node = last_node.next
last_node.next = node.next
node.next = node.list
node.list = None
node = node.next
def _remove_ifs(node):
# Removes 'if' nodes (which can be recognized by MenuNode.item being None),
# which are assumed to already have been flattened. The C implementation
# doesn't bother to do this, but we expose the menu tree directly, and it
# makes it nicer to work with.
cur = node.list
while cur and not cur.item:
cur = cur.next
node.list = cur
while cur:
next = cur.next
while next and not next.item:
next = next.next
# Equivalent to
#
# cur.next = next
# cur = next
#
# due to tricky Python semantics. The order matters.
cur.next = cur = next
def _finalize_choice(node):
# Finalizes a choice, marking each symbol whose menu node has the choice as
# the parent as a choice symbol, and automatically determining types if not
# specified.
choice = node.item
cur = node.list
while cur:
if cur.item.__class__ is Symbol:
cur.item.choice = choice
choice.syms.append(cur.item)
cur = cur.next
# If no type is specified for the choice, its type is that of
# the first choice item with a specified type
if not choice.orig_type:
for item in choice.syms:
if item.orig_type:
choice.orig_type = item.orig_type
break
# Each choice item of UNKNOWN type gets the type of the choice
for sym in choice.syms:
if not sym.orig_type:
sym.orig_type = choice.orig_type
def _check_dep_loop_sym(sym, ignore_choice):
# Detects dependency loops using depth-first search on the dependency graph
# (which is calculated earlier in Kconfig._build_dep()).
#
# Algorithm:
#
# 1. Symbols/choices start out with _visited = 0, meaning unvisited.
#
# 2. When a symbol/choice is first visited, _visited is set to 1, meaning
# "visited, potentially part of a dependency loop". The recursive
# search then continues from the symbol/choice.
#
# 3. If we run into a symbol/choice X with _visited already set to 1,
# there's a dependency loop. The loop is found on the call stack by
# recording symbols while returning ("on the way back") until X is seen
# again.
#
# 4. Once a symbol/choice and all its dependencies (or dependents in this
# case) have been checked recursively without detecting any loops, its
# _visited is set to 2, meaning "visited, not part of a dependency
# loop".
#
# This saves work if we run into the symbol/choice again in later calls
# to _check_dep_loop_sym(). We just return immediately.
#
# Choices complicate things, as every choice symbol depends on every other
# choice symbol in a sense. When a choice is "entered" via a choice symbol
# X, we visit all choice symbols from the choice except X, and prevent
# immediately revisiting the choice with a flag (ignore_choice).
#
# Maybe there's a better way to handle this (different flags or the
# like...)
if not sym._visited:
# sym._visited == 0, unvisited
sym._visited = 1
for dep in sym._dependents:
# Choices show up in Symbol._dependents when the choice has the
# symbol in a 'prompt' or 'default' condition (e.g.
# 'default ... if SYM').
#
# Since we aren't entering the choice via a choice symbol, all
# choice symbols need to be checked, hence the None.
loop = _check_dep_loop_choice(dep, None) \
if dep.__class__ is Choice \
else _check_dep_loop_sym(dep, False)
if loop:
# Dependency loop found
return _found_dep_loop(loop, sym)
if sym.choice and not ignore_choice:
loop = _check_dep_loop_choice(sym.choice, sym)
if loop:
# Dependency loop found
return _found_dep_loop(loop, sym)
# The symbol is not part of a dependency loop
sym._visited = 2
# No dependency loop found
return None
if sym._visited == 2:
# The symbol was checked earlier and is already known to not be part of
# a dependency loop
return None
# sym._visited == 1, found a dependency loop. Return the symbol as the
# first element in it.
return (sym,)
def _check_dep_loop_choice(choice, skip):
if not choice._visited:
# choice._visited == 0, unvisited
choice._visited = 1
# Check for loops involving choice symbols. If we came here via a
# choice symbol, skip that one, as we'd get a false positive
# '<sym FOO> -> <choice> -> <sym FOO>' loop otherwise.
for sym in choice.syms:
if sym is not skip:
# Prevent the choice from being immediately re-entered via the
# "is a choice symbol" path by passing True
loop = _check_dep_loop_sym(sym, True)
if loop:
# Dependency loop found
return _found_dep_loop(loop, choice)
# The choice is not part of a dependency loop
choice._visited = 2
# No dependency loop found
return None
if choice._visited == 2:
# The choice was checked earlier and is already known to not be part of
# a dependency loop
return None
# choice._visited == 1, found a dependency loop. Return the choice as the
# first element in it.
return (choice,)
def _found_dep_loop(loop, cur):
# Called "on the way back" when we know we have a loop
# Is the symbol/choice 'cur' where the loop started?
if cur is not loop[0]:
# Nope, it's just a part of the loop
return loop + (cur,)
# Yep, we have the entire loop. Throw an exception that shows it.
msg = "\nDependency loop\n" \
"===============\n\n"
for item in loop:
if item is not loop[0]:
msg += "...depends on "
if item.__class__ is Symbol and item.choice:
msg += "the choice symbol "
msg += "{}, with definition...\n\n{}\n\n" \
.format(item.name_and_loc, item)
# Small wart: Since we reuse the already calculated
# Symbol/Choice._dependents sets for recursive dependency detection, we
# lose information on whether a dependency came from a 'select'/'imply'
# condition or e.g. a 'depends on'.
#
# This might cause selecting symbols to "disappear". For example,
# a symbol B having 'select A if C' gives a direct dependency from A to
# C, since it corresponds to a reverse dependency of B && C.
#
# Always print reverse dependencies for symbols that have them to make
# sure information isn't lost. I wonder if there's some neat way to
# improve this.
if item.__class__ is Symbol:
if item.rev_dep is not item.kconfig.n:
msg += "(select-related dependencies: {})\n\n" \
.format(expr_str(item.rev_dep))
if item.weak_rev_dep is not item.kconfig.n:
msg += "(imply-related dependencies: {})\n\n" \
.format(expr_str(item.rev_dep))
msg += "...depends again on " + loop[0].name_and_loc
raise KconfigError(msg)
def _decoding_error(e, filename, macro_linenr=None):
# Gives the filename and context for UnicodeDecodeError's, which are a pain
# to debug otherwise. 'e' is the UnicodeDecodeError object.
#
# If the decoding error is for the output of a $(shell,...) command,
# macro_linenr holds the line number where it was run (the exact line
# number isn't available for decoding errors in files).
raise KconfigError(
"\n"
"Malformed {} in {}\n"
"Context: {}\n"
"Problematic data: {}\n"
"Reason: {}".format(
e.encoding,
"'{}'".format(filename) if macro_linenr is None else
"output from macro at {}:{}".format(filename, macro_linenr),
e.object[max(e.start - 40, 0):e.end + 40],
e.object[e.start:e.end],
e.reason))
def _warn_verbose_deprecated(fn_name):
sys.stderr.write(
"Deprecation warning: {0}()'s 'verbose' argument has no effect. Since "
"Kconfiglib 12.0.0, the message is returned from {0}() instead, "
"and is always generated. Do e.g. print(kconf.{0}()) if you want to "
"want to show a message like \"Loaded configuration '.config'\" on "
"stdout. The old API required ugly hacks to reuse messages in "
"configuration interfaces.\n".format(fn_name))
# Predefined preprocessor functions
def _filename_fn(kconf, _):
return kconf.filename
def _lineno_fn(kconf, _):
return str(kconf.linenr)
def _info_fn(kconf, _, msg):
print("{}:{}: {}".format(kconf.filename, kconf.linenr, msg))
return ""
def _warning_if_fn(kconf, _, cond, msg):
if cond == "y":
kconf._warn(msg, kconf.filename, kconf.linenr)
return ""
def _error_if_fn(kconf, _, cond, msg):
if cond == "y":
raise KconfigError("{}:{}: {}".format(
kconf.filename, kconf.linenr, msg))
return ""
def _shell_fn(kconf, _, command):
import subprocess # Only import as needed, to save some startup time
stdout, stderr = subprocess.Popen(
command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
).communicate()
if not _IS_PY2:
try:
stdout = stdout.decode(kconf._encoding)
stderr = stderr.decode(kconf._encoding)
except UnicodeDecodeError as e:
_decoding_error(e, kconf.filename, kconf.linenr)
if stderr:
kconf._warn("'{}' wrote to stderr: {}".format(
command, "\n".join(stderr.splitlines())),
kconf.filename, kconf.linenr)
# Universal newlines with splitlines() (to prevent e.g. stray \r's in
# command output on Windows), trailing newline removal, and
# newline-to-space conversion.
#
# On Python 3 versions before 3.6, it's not possible to specify the
# encoding when passing universal_newlines=True to Popen() (the 'encoding'
# parameter was added in 3.6), so we do this manual version instead.
return "\n".join(stdout.splitlines()).rstrip("\n").replace("\n", " ")
#
# Global constants
#
TRI_TO_STR = {
0: "n",
1: "m",
2: "y",
}
STR_TO_TRI = {
"n": 0,
"m": 1,
"y": 2,
}
# Constant representing that there's no cached choice selection. This is
# distinct from a cached None (no selection). Any object that's not None or a
# Symbol will do. We test this with 'is'.
_NO_CACHED_SELECTION = 0
# Are we running on Python 2?
_IS_PY2 = sys.version_info[0] < 3
try:
_UNAME_RELEASE = os.uname()[2]
except AttributeError:
# Only import as needed, to save some startup time
import platform
_UNAME_RELEASE = platform.uname()[2]
# The token and type constants below are safe to test with 'is', which is a bit
# faster (~30% faster on my machine, and a few % faster for total parsing
# time), even without assuming Python's small integer optimization (which
# caches small integer objects). The constants end up pointing to unique
# integer objects, and since we consistently refer to them via the names below,
# we always get the same object.
#
# Client code should use == though.
# Tokens, with values 1, 2, ... . Avoiding 0 simplifies some checks by making
# all tokens except empty strings truthy.
(
_T_ALLNOCONFIG_Y,
_T_AND,
_T_BOOL,
_T_CHOICE,
_T_CLOSE_PAREN,
_T_COMMENT,
_T_CONFIG,
_T_CONFIGDEFAULT,
_T_DEFAULT,
_T_DEFCONFIG_LIST,
_T_DEF_BOOL,
_T_DEF_HEX,
_T_DEF_INT,
_T_DEF_STRING,
_T_DEF_TRISTATE,
_T_DEPENDS,
_T_ENDCHOICE,
_T_ENDIF,
_T_ENDMENU,
_T_ENV,
_T_EQUAL,
_T_GREATER,
_T_GREATER_EQUAL,
_T_HELP,
_T_HEX,
_T_IF,
_T_IMPLY,
_T_INT,
_T_LESS,
_T_LESS_EQUAL,
_T_MAINMENU,
_T_MENU,
_T_MENUCONFIG,
_T_MODULES,
_T_NOT,
_T_ON,
_T_OPEN_PAREN,
_T_OPTION,
_T_OPTIONAL,
_T_OR,
_T_ORSOURCE,
_T_OSOURCE,
_T_PROMPT,
_T_RANGE,
_T_RSOURCE,
_T_SELECT,
_T_SOURCE,
_T_STRING,
_T_TRISTATE,
_T_UNEQUAL,
_T_VISIBLE,
) = range(1, 52)
# Keyword to token map, with the get() method assigned directly as a small
# optimization
_get_keyword = {
"---help---": _T_HELP,
"allnoconfig_y": _T_ALLNOCONFIG_Y,
"bool": _T_BOOL,
"boolean": _T_BOOL,
"choice": _T_CHOICE,
"comment": _T_COMMENT,
"config": _T_CONFIG,
"configdefault": _T_CONFIGDEFAULT,
"def_bool": _T_DEF_BOOL,
"def_hex": _T_DEF_HEX,
"def_int": _T_DEF_INT,
"def_string": _T_DEF_STRING,
"def_tristate": _T_DEF_TRISTATE,
"default": _T_DEFAULT,
"defconfig_list": _T_DEFCONFIG_LIST,
"depends": _T_DEPENDS,
"endchoice": _T_ENDCHOICE,
"endif": _T_ENDIF,
"endmenu": _T_ENDMENU,
"env": _T_ENV,
"grsource": _T_ORSOURCE, # Backwards compatibility
"gsource": _T_OSOURCE, # Backwards compatibility
"help": _T_HELP,
"hex": _T_HEX,
"if": _T_IF,
"imply": _T_IMPLY,
"int": _T_INT,
"mainmenu": _T_MAINMENU,
"menu": _T_MENU,
"menuconfig": _T_MENUCONFIG,
"modules": _T_MODULES,
"on": _T_ON,
"option": _T_OPTION,
"optional": _T_OPTIONAL,
"orsource": _T_ORSOURCE,
"osource": _T_OSOURCE,
"prompt": _T_PROMPT,
"range": _T_RANGE,
"rsource": _T_RSOURCE,
"select": _T_SELECT,
"source": _T_SOURCE,
"string": _T_STRING,
"tristate": _T_TRISTATE,
"visible": _T_VISIBLE,
}.get
# The constants below match the value of the corresponding tokens to remove the
# need for conversion
# Node types
MENU = _T_MENU
COMMENT = _T_COMMENT
# Expression types
AND = _T_AND
OR = _T_OR
NOT = _T_NOT
EQUAL = _T_EQUAL
UNEQUAL = _T_UNEQUAL
LESS = _T_LESS
LESS_EQUAL = _T_LESS_EQUAL
GREATER = _T_GREATER
GREATER_EQUAL = _T_GREATER_EQUAL
REL_TO_STR = {
EQUAL: "=",
UNEQUAL: "!=",
LESS: "<",
LESS_EQUAL: "<=",
GREATER: ">",
GREATER_EQUAL: ">=",
}
# Symbol/choice types. UNKNOWN is 0 (falsy) to simplify some checks.
# Client code shouldn't rely on it though, as it was non-zero in
# older versions.
UNKNOWN = 0
BOOL = _T_BOOL
TRISTATE = _T_TRISTATE
STRING = _T_STRING
INT = _T_INT
HEX = _T_HEX
TYPE_TO_STR = {
UNKNOWN: "unknown",
BOOL: "bool",
TRISTATE: "tristate",
STRING: "string",
INT: "int",
HEX: "hex",
}
# Used in comparisons. 0 means the base is inferred from the format of the
# string.
_TYPE_TO_BASE = {
HEX: 16,
INT: 10,
STRING: 0,
UNKNOWN: 0,
}
# def_bool -> BOOL, etc.
_DEF_TOKEN_TO_TYPE = {
_T_DEF_BOOL: BOOL,
_T_DEF_HEX: HEX,
_T_DEF_INT: INT,
_T_DEF_STRING: STRING,
_T_DEF_TRISTATE: TRISTATE,
}
# Tokens after which strings are expected. This is used to tell strings from
# constant symbol references during tokenization, both of which are enclosed in
# quotes.
#
# Identifier-like lexemes ("missing quotes") are also treated as strings after
# these tokens. _T_CHOICE is included to avoid symbols being registered for
# named choices.
_STRING_LEX = frozenset({
_T_BOOL,
_T_CHOICE,
_T_COMMENT,
_T_HEX,
_T_INT,
_T_MAINMENU,
_T_MENU,
_T_ORSOURCE,
_T_OSOURCE,
_T_PROMPT,
_T_RSOURCE,
_T_SOURCE,
_T_STRING,
_T_TRISTATE,
})
# Various sets for quick membership tests. Gives a single global lookup and
# avoids creating temporary dicts/tuples.
_TYPE_TOKENS = frozenset({
_T_BOOL,
_T_TRISTATE,
_T_INT,
_T_HEX,
_T_STRING,
})
_SOURCE_TOKENS = frozenset({
_T_SOURCE,
_T_RSOURCE,
_T_OSOURCE,
_T_ORSOURCE,
})
_REL_SOURCE_TOKENS = frozenset({
_T_RSOURCE,
_T_ORSOURCE,
})
# Obligatory (non-optional) sources
_OBL_SOURCE_TOKENS = frozenset({
_T_SOURCE,
_T_RSOURCE,
})
_BOOL_TRISTATE = frozenset({
BOOL,
TRISTATE,
})
_BOOL_TRISTATE_UNKNOWN = frozenset({
BOOL,
TRISTATE,
UNKNOWN,
})
_INT_HEX = frozenset({
INT,
HEX,
})
_SYMBOL_CHOICE = frozenset({
Symbol,
Choice,
})
_MENU_COMMENT = frozenset({
MENU,
COMMENT,
})
_EQUAL_UNEQUAL = frozenset({
EQUAL,
UNEQUAL,
})
_RELATIONS = frozenset({
EQUAL,
UNEQUAL,
LESS,
LESS_EQUAL,
GREATER,
GREATER_EQUAL,
})
# Helper functions for getting compiled regular expressions, with the needed
# matching function returned directly as a small optimization.
#
# Use ASCII regex matching on Python 3. It's already the default on Python 2.
def _re_match(regex):
return re.compile(regex, 0 if _IS_PY2 else re.ASCII).match
def _re_search(regex):
return re.compile(regex, 0 if _IS_PY2 else re.ASCII).search
# Various regular expressions used during parsing
# The initial token on a line. Also eats leading and trailing whitespace, so
# that we can jump straight to the next token (or to the end of the line if
# there is only one token).
#
# This regex will also fail to match for empty lines and comment lines.
#
# '$' is included to detect preprocessor variable assignments with macro
# expansions in the left-hand side.
_command_match = _re_match(r"\s*([A-Za-z0-9_$-]+)\s*")
# An identifier/keyword after the first token. Also eats trailing whitespace.
# '$' is included to detect identifiers containing macro expansions.
_id_keyword_match = _re_match(r"([A-Za-z0-9_$/.-]+)\s*")
# A fragment in the left-hand side of a preprocessor variable assignment. These
# are the portions between macro expansions ($(foo)). Macros are supported in
# the LHS (variable name).
_assignment_lhs_fragment_match = _re_match("[A-Za-z0-9_-]*")
# The assignment operator and value (right-hand side) in a preprocessor
# variable assignment
_assignment_rhs_match = _re_match(r"\s*(=|:=|\+=)\s*(.*)")
# Special characters/strings while expanding a macro ('(', ')', ',', and '$(')
_macro_special_search = _re_search(r"\(|\)|,|\$\(")
# Special characters/strings while expanding a string (quotes, '\', and '$(')
_string_special_search = _re_search(r'"|\'|\\|\$\(')
# Special characters/strings while expanding a symbol name. Also includes
# end-of-line, in case the macro is the last thing on the line.
_name_special_search = _re_search(r'[^A-Za-z0-9_$/.-]|\$\(|$')
# A valid right-hand side for an assignment to a string symbol in a .config
# file, including escaped characters. Extracts the contents.
_conf_string_match = _re_match(r'"((?:[^\\"]|\\.)*)"')
``` | /content/code_sandbox/scripts/kconfig/kconfiglib.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 56,926 |
```python
#!/usr/bin/env python3
# Copied from 27e322f of path_to_url
# pylint: skip-file
import sys
import struct
import subprocess
import re
import os
import os.path
import argparse
import json
from time import sleep
UF2_MAGIC_START0 = 0x0A324655 # "UF2\n"
UF2_MAGIC_START1 = 0x9E5D5157 # Randomly selected
UF2_MAGIC_END = 0x0AB16F30 # Ditto
INFO_FILE = "/INFO_UF2.TXT"
appstartaddr = 0x2000
familyid = 0x0
def is_uf2(buf):
w = struct.unpack("<II", buf[0:8])
return w[0] == UF2_MAGIC_START0 and w[1] == UF2_MAGIC_START1
def is_hex(buf):
try:
w = buf[0:30].decode("utf-8")
except UnicodeDecodeError:
return False
if w[0] == ':' and re.match(rb"^[:0-9a-fA-F\r\n]+$", buf):
return True
return False
def convert_from_uf2(buf):
global appstartaddr
global familyid
numblocks = len(buf) // 512
curraddr = None
currfamilyid = None
families_found = {}
prev_flag = None
all_flags_same = True
outp = []
for blockno in range(numblocks):
ptr = blockno * 512
block = buf[ptr:ptr + 512]
hd = struct.unpack(b"<IIIIIIII", block[0:32])
if hd[0] != UF2_MAGIC_START0 or hd[1] != UF2_MAGIC_START1:
print("Skipping block at " + ptr + "; bad magic")
continue
if hd[2] & 1:
# NO-flash flag set; skip block
continue
datalen = hd[4]
if datalen > 476:
assert False, "Invalid UF2 data size at " + ptr
newaddr = hd[3]
if (hd[2] & 0x2000) and (currfamilyid == None):
currfamilyid = hd[7]
if curraddr == None or ((hd[2] & 0x2000) and hd[7] != currfamilyid):
currfamilyid = hd[7]
curraddr = newaddr
if familyid == 0x0 or familyid == hd[7]:
appstartaddr = newaddr
padding = newaddr - curraddr
if padding < 0:
assert False, "Block out of order at " + ptr
if padding > 10*1024*1024:
assert False, "More than 10M of padding needed at " + ptr
if padding % 4 != 0:
assert False, "Non-word padding size at " + ptr
while padding > 0:
padding -= 4
outp.append(b"\x00\x00\x00\x00")
if familyid == 0x0 or ((hd[2] & 0x2000) and familyid == hd[7]):
outp.append(block[32 : 32 + datalen])
curraddr = newaddr + datalen
if hd[2] & 0x2000:
if hd[7] in families_found.keys():
if families_found[hd[7]] > newaddr:
families_found[hd[7]] = newaddr
else:
families_found[hd[7]] = newaddr
if prev_flag == None:
prev_flag = hd[2]
if prev_flag != hd[2]:
all_flags_same = False
if blockno == (numblocks - 1):
print("--- UF2 File Header Info ---")
families = load_families()
for family_hex in families_found.keys():
family_short_name = ""
for name, value in families.items():
if value == family_hex:
family_short_name = name
print("Family ID is {:s}, hex value is 0x{:08x}".format(family_short_name,family_hex))
print("Target Address is 0x{:08x}".format(families_found[family_hex]))
if all_flags_same:
print("All block flag values consistent, 0x{:04x}".format(hd[2]))
else:
print("Flags were not all the same")
print("----------------------------")
if len(families_found) > 1 and familyid == 0x0:
outp = []
appstartaddr = 0x0
return b"".join(outp)
def convert_to_carray(file_content):
outp = "const unsigned long bindata_len = %d;\n" % len(file_content)
outp += "const unsigned char bindata[] __attribute__((aligned(16))) = {"
for i in range(len(file_content)):
if i % 16 == 0:
outp += "\n"
outp += "0x%02x, " % file_content[i]
outp += "\n};\n"
return bytes(outp, "utf-8")
def convert_to_uf2(file_content):
global familyid
datapadding = b""
while len(datapadding) < 512 - 256 - 32 - 4:
datapadding += b"\x00\x00\x00\x00"
numblocks = (len(file_content) + 255) // 256
outp = []
for blockno in range(numblocks):
ptr = 256 * blockno
chunk = file_content[ptr:ptr + 256]
flags = 0x0
if familyid:
flags |= 0x2000
hd = struct.pack(b"<IIIIIIII",
UF2_MAGIC_START0, UF2_MAGIC_START1,
flags, ptr + appstartaddr, 256, blockno, numblocks, familyid)
while len(chunk) < 256:
chunk += b"\x00"
block = hd + chunk + datapadding + struct.pack(b"<I", UF2_MAGIC_END)
assert len(block) == 512
outp.append(block)
return b"".join(outp)
class Block:
def __init__(self, addr):
self.addr = addr
self.bytes = bytearray(256)
def encode(self, blockno, numblocks):
global familyid
flags = 0x0
if familyid:
flags |= 0x2000
hd = struct.pack("<IIIIIIII",
UF2_MAGIC_START0, UF2_MAGIC_START1,
flags, self.addr, 256, blockno, numblocks, familyid)
hd += self.bytes[0:256]
while len(hd) < 512 - 4:
hd += b"\x00"
hd += struct.pack("<I", UF2_MAGIC_END)
return hd
def convert_from_hex_to_uf2(buf):
global appstartaddr
appstartaddr = None
upper = 0
currblock = None
blocks = []
for line in buf.split('\n'):
if line[0] != ":":
continue
i = 1
rec = []
while i < len(line) - 1:
rec.append(int(line[i:i+2], 16))
i += 2
tp = rec[3]
if tp == 4:
upper = ((rec[4] << 8) | rec[5]) << 16
elif tp == 2:
upper = ((rec[4] << 8) | rec[5]) << 4
elif tp == 1:
break
elif tp == 0:
addr = upper + ((rec[1] << 8) | rec[2])
if appstartaddr == None:
appstartaddr = addr
i = 4
while i < len(rec) - 1:
if not currblock or currblock.addr & ~0xff != addr & ~0xff:
currblock = Block(addr & ~0xff)
blocks.append(currblock)
currblock.bytes[addr & 0xff] = rec[i]
addr += 1
i += 1
numblocks = len(blocks)
resfile = b""
for i in range(0, numblocks):
resfile += blocks[i].encode(i, numblocks)
return resfile
def to_str(b):
return b.decode("utf-8")
def get_drives():
drives = []
if sys.platform == "win32":
r = subprocess.check_output(["wmic", "PATH", "Win32_LogicalDisk",
"get", "DeviceID,", "VolumeName,",
"FileSystem,", "DriveType"])
for line in to_str(r).split('\n'):
words = re.split(r'\s+', line)
if len(words) >= 3 and words[1] == "2" and words[2] == "FAT":
drives.append(words[0])
else:
searchpaths = ["/media"]
if sys.platform == "darwin":
searchpaths = ["/Volumes"]
elif sys.platform == "linux":
searchpaths += ["/media/" + os.environ["USER"], '/run/media/' + os.environ["USER"]]
for rootpath in searchpaths:
if os.path.isdir(rootpath):
for d in os.listdir(rootpath):
if os.path.isdir(rootpath):
drives.append(os.path.join(rootpath, d))
def has_info(d):
try:
return os.path.isfile(d + INFO_FILE)
except:
return False
return list(filter(has_info, drives))
def board_id(path):
with open(path + INFO_FILE, mode='r') as file:
file_content = file.read()
return re.search(r"Board-ID: ([^\r\n]*)", file_content).group(1)
def list_drives():
for d in get_drives():
print(d, board_id(d))
def write_file(name, buf):
with open(name, "wb") as f:
f.write(buf)
print("Wrote %d bytes to %s" % (len(buf), name))
def load_families():
# The expectation is that the `uf2families.json` file is in the same
# directory as this script. Make a path that works using `__file__`
# which contains the full path to this script.
filename = "uf2families.json"
pathname = os.path.join(os.path.dirname(os.path.abspath(__file__)), filename)
with open(pathname) as f:
raw_families = json.load(f)
families = {}
for family in raw_families:
families[family["short_name"]] = int(family["id"], 0)
return families
def main():
global appstartaddr, familyid
def error(msg):
print(msg, file=sys.stderr)
sys.exit(1)
parser = argparse.ArgumentParser(description='Convert to UF2 or flash directly.')
parser.add_argument('input', metavar='INPUT', type=str, nargs='?',
help='input file (HEX, BIN or UF2)')
parser.add_argument('-b', '--base', dest='base', type=str,
default="0x2000",
help='set base address of application for BIN format (default: 0x2000)')
parser.add_argument('-f', '--family', dest='family', type=str,
default="0x0",
help='specify familyID - number or name (default: 0x0)')
parser.add_argument('-o', '--output', metavar="FILE", dest='output', type=str,
help='write output to named file; defaults to "flash.uf2" or "flash.bin" where sensible')
parser.add_argument('-d', '--device', dest="device_path",
help='select a device path to flash')
parser.add_argument('-l', '--list', action='store_true',
help='list connected devices')
parser.add_argument('-c', '--convert', action='store_true',
help='do not flash, just convert')
parser.add_argument('-D', '--deploy', action='store_true',
help='just flash, do not convert')
parser.add_argument('-w', '--wait', action='store_true',
help='wait for device to flash')
parser.add_argument('-C', '--carray', action='store_true',
help='convert binary file to a C array, not UF2')
parser.add_argument('-i', '--info', action='store_true',
help='display header information from UF2, do not convert')
args = parser.parse_args()
appstartaddr = int(args.base, 0)
families = load_families()
if args.family.upper() in families:
familyid = families[args.family.upper()]
else:
try:
familyid = int(args.family, 0)
except ValueError:
error("Family ID needs to be a number or one of: " + ", ".join(families.keys()))
if args.list:
list_drives()
else:
if not args.input:
error("Need input file")
with open(args.input, mode='rb') as f:
inpbuf = f.read()
from_uf2 = is_uf2(inpbuf)
ext = "uf2"
if args.deploy:
outbuf = inpbuf
elif from_uf2 and not args.info:
outbuf = convert_from_uf2(inpbuf)
ext = "bin"
elif from_uf2 and args.info:
outbuf = ""
convert_from_uf2(inpbuf)
elif is_hex(inpbuf):
outbuf = convert_from_hex_to_uf2(inpbuf.decode("utf-8"))
elif args.carray:
outbuf = convert_to_carray(inpbuf)
ext = "h"
else:
outbuf = convert_to_uf2(inpbuf)
if not args.deploy and not args.info:
print("Converted to %s, output size: %d, start address: 0x%x" %
(ext, len(outbuf), appstartaddr))
if args.convert or ext != "uf2":
if args.output == None:
args.output = "flash." + ext
if args.output:
write_file(args.output, outbuf)
if ext == "uf2" and not args.convert and not args.info:
drives = get_drives()
if len(drives) == 0:
if args.wait:
print("Waiting for drive to deploy...")
while len(drives) == 0:
sleep(0.1)
drives = get_drives()
elif not args.output:
error("No drive to deploy.")
for d in drives:
print("Flashing %s (%s)" % (d, board_id(d)))
write_file(d + "/NEW.UF2", outbuf)
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/uf2conv.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 3,301 |
```python
#!/usr/bin/env python3
# _load_images() builds names dynamically to avoid having to give them twice
# (once for the variable and once for the filename). This forces consistency
# too.
#
# pylint: disable=undefined-variable
"""
Overview
========
A Tkinter-based menuconfig implementation, based around a treeview control and
a help display. The interface should feel familiar to people used to qconf
('make xconfig'). Compatible with both Python 2 and Python 3.
The display can be toggled between showing the full tree and showing just a
single menu (like menuconfig.py). Only single-menu mode distinguishes between
symbols defined with 'config' and symbols defined with 'menuconfig'.
A show-all mode is available that shows invisible items in red.
Supports both mouse and keyboard controls. The following keyboard shortcuts are
available:
Ctrl-S : Save configuration
Ctrl-O : Open configuration
Ctrl-A : Toggle show-all mode
Ctrl-N : Toggle show-name mode
Ctrl-M : Toggle single-menu mode
Ctrl-F, /: Open jump-to dialog
ESC : Close
Running
=======
guiconfig.py can be run either as a standalone executable or by calling the
menuconfig() function with an existing Kconfig instance. The second option is a
bit inflexible in that it will still load and save .config, etc.
When run in standalone mode, the top-level Kconfig file to load can be passed
as a command-line argument. With no argument, it defaults to "Kconfig".
The KCONFIG_CONFIG environment variable specifies the .config file to load (if
it exists) and save. If KCONFIG_CONFIG is unset, ".config" is used.
When overwriting a configuration file, the old version is saved to
<filename>.old (e.g. .config.old).
$srctree is supported through Kconfiglib.
"""
# Note: There's some code duplication with menuconfig.py below, especially for
# the help text. Maybe some of it could be moved into kconfiglib.py or a shared
# helper script, but OTOH it's pretty nice to have things standalone and
# customizable.
import errno
import os
import re
import sys
_PY2 = sys.version_info[0] < 3
if _PY2:
# Python 2
from Tkinter import *
import ttk
import tkFont as font
import tkFileDialog as filedialog
import tkMessageBox as messagebox
else:
# Python 3
from tkinter import *
import tkinter.ttk as ttk
import tkinter.font as font
from tkinter import filedialog, messagebox
from kconfiglib import Symbol, Choice, MENU, COMMENT, MenuNode, \
BOOL, TRISTATE, STRING, INT, HEX, \
AND, OR, \
expr_str, expr_value, split_expr, \
standard_sc_expr_str, \
TRI_TO_STR, TYPE_TO_STR, \
standard_kconfig, standard_config_filename
# If True, use GIF image data embedded in this file instead of separate GIF
# files. See _load_images().
_USE_EMBEDDED_IMAGES = True
# Help text for the jump-to dialog
_JUMP_TO_HELP = """\
Type one or more strings/regexes and press Enter to list items that match all
of them. Python's regex flavor is used (see the 're' module). Double-clicking
an item will jump to it. Item values can be toggled directly within the dialog.\
"""
def _main():
menuconfig(standard_kconfig(__doc__))
# Global variables used below:
#
# _root:
# The Toplevel instance for the main window
#
# _tree:
# The Treeview in the main window
#
# _jump_to_tree:
# The Treeview in the jump-to dialog. None if the jump-to dialog isn't
# open. Doubles as a flag.
#
# _jump_to_matches:
# List of Nodes shown in the jump-to dialog
#
# _menupath:
# The Label that shows the menu path of the selected item
#
# _backbutton:
# The button shown in single-menu mode for jumping to the parent menu
#
# _status_label:
# Label with status text shown at the bottom of the main window
# ("Modified", "Saved to ...", etc.)
#
# _id_to_node:
# We can't use Node objects directly as Treeview item IDs, so we use their
# id()s instead. This dictionary maps Node id()s back to Nodes. (The keys
# are actually str(id(node)), just to simplify lookups.)
#
# _cur_menu:
# The current menu. Ignored outside single-menu mode.
#
# _show_all_var/_show_name_var/_single_menu_var:
# Tkinter Variable instances bound to the corresponding checkboxes
#
# _show_all/_single_menu:
# Plain Python bools that track _show_all_var and _single_menu_var, to
# speed up and simplify things a bit
#
# _conf_filename:
# File to save the configuration to
#
# _minconf_filename:
# File to save minimal configurations to
#
# _conf_changed:
# True if the configuration has been changed. If False, we don't bother
# showing the save-and-quit dialog.
#
# We reset this to False whenever the configuration is saved.
#
# _*_img:
# PhotoImage instances for images
def menuconfig(kconf):
"""
Launches the configuration interface, returning after the user exits.
kconf:
Kconfig instance to be configured
"""
global _kconf
global _conf_filename
global _minconf_filename
global _jump_to_tree
global _cur_menu
_kconf = kconf
_jump_to_tree = None
_create_id_to_node()
_create_ui()
# Filename to save configuration to
_conf_filename = standard_config_filename()
# Load existing configuration and check if it's outdated
_set_conf_changed(_load_config())
# Filename to save minimal configuration to
_minconf_filename = "defconfig"
# Current menu in single-menu mode
_cur_menu = _kconf.top_node
# Any visible items in the top menu?
if not _shown_menu_nodes(kconf.top_node):
# Nothing visible. Start in show-all mode and try again.
_show_all_var.set(True)
if not _shown_menu_nodes(kconf.top_node):
# Give up and show an error. It's nice to be able to assume that
# the tree is non-empty in the rest of the code.
_root.wait_visibility()
messagebox.showerror(
"Error",
"Empty configuration -- nothing to configure.\n\n"
"Check that environment variables are set properly.")
_root.destroy()
return
# Build the initial tree
_update_tree()
# Select the first item and focus the Treeview, so that keyboard controls
# work immediately
_select(_tree, _tree.get_children()[0])
_tree.focus_set()
# Make geometry information available for centering the window. This
# indirectly creates the window, so hide it so that it's never shown at the
# old location.
_root.withdraw()
_root.update_idletasks()
# Center the window
_root.geometry("+{}+{}".format(
(_root.winfo_screenwidth() - _root.winfo_reqwidth())//2,
(_root.winfo_screenheight() - _root.winfo_reqheight())//2))
# Show it
_root.deiconify()
# Prevent the window from being automatically resized. Otherwise, it
# changes size when scrollbars appear/disappear before the user has
# manually resized it.
_root.geometry(_root.geometry())
_root.mainloop()
def _load_config():
# Loads any existing .config file. See the Kconfig.load_config() docstring.
#
# Returns True if .config is missing or outdated. We always prompt for
# saving the configuration in that case.
print(_kconf.load_config())
if not os.path.exists(_conf_filename):
# No .config
return True
return _needs_save()
def _needs_save():
# Returns True if a just-loaded .config file is outdated (would get
# modified when saving)
if _kconf.missing_syms:
# Assignments to undefined symbols in the .config
return True
for sym in _kconf.unique_defined_syms:
if sym.user_value is None:
if sym.config_string:
# Unwritten symbol
return True
elif sym.orig_type in (BOOL, TRISTATE):
if sym.tri_value != sym.user_value:
# Written bool/tristate symbol, new value
return True
elif sym.str_value != sym.user_value:
# Written string/int/hex symbol, new value
return True
# No need to prompt for save
return False
def _create_id_to_node():
global _id_to_node
_id_to_node = {str(id(node)): node for node in _kconf.node_iter()}
def _create_ui():
# Creates the main window UI
global _root
global _tree
# Create the root window. This initializes Tkinter and makes e.g.
# PhotoImage available, so do it early.
_root = Tk()
_load_images()
_init_misc_ui()
_fix_treeview_issues()
_create_top_widgets()
# Create the pane with the Kconfig tree and description text
panedwindow, _tree = _create_kconfig_tree_and_desc(_root)
panedwindow.grid(column=0, row=1, sticky="nsew")
_create_status_bar()
_root.columnconfigure(0, weight=1)
# Only the pane with the Kconfig tree and description grows vertically
_root.rowconfigure(1, weight=1)
# Start with show-name disabled
_do_showname()
_tree.bind("<Left>", _tree_left_key)
_tree.bind("<Right>", _tree_right_key)
# Note: Binding this for the jump-to tree as well would cause issues due to
# the Tk bug mentioned in _tree_open()
_tree.bind("<<TreeviewOpen>>", _tree_open)
# add=True to avoid overriding the description text update
_tree.bind("<<TreeviewSelect>>", _update_menu_path, add=True)
_root.bind("<Control-s>", _save)
_root.bind("<Control-o>", _open)
_root.bind("<Control-a>", _toggle_showall)
_root.bind("<Control-n>", _toggle_showname)
_root.bind("<Control-m>", _toggle_tree_mode)
_root.bind("<Control-f>", _jump_to_dialog)
_root.bind("/", _jump_to_dialog)
_root.bind("<Escape>", _on_quit)
def _load_images():
# Loads GIF images, creating the global _*_img PhotoImage variables.
# Base64-encoded images embedded in this script are used if
# _USE_EMBEDDED_IMAGES is True, and separate image files in the same
# directory as the script otherwise.
#
# Using a global variable indirectly prevents the image from being
# garbage-collected. Passing an image to a Tkinter function isn't enough to
# keep it alive.
def load_image(name, data):
var_name = "_{}_img".format(name)
if _USE_EMBEDDED_IMAGES:
globals()[var_name] = PhotoImage(data=data, format="gif")
else:
globals()[var_name] = PhotoImage(
file=os.path.join(os.path.dirname(__file__), name + ".gif"),
format="gif")
# Note: Base64 data can be put on the clipboard with
# $ base64 -w0 foo.gif | xclip
load_image("icon", "R0lGODlhMAAwAPEDAAAAAADQAO7u7v///yH5BAUKAAMALAAAAAAwADAAAAL/nI+gy+your_sha256_hashYxYv6A0ubuqYhWk66tVTE4enHer7jcKvt0LLUw6P45lvEprT6c0+your_sha256_hashhkCsmY+nhK6uJ6t1mrOhuJqfu6+WYiCiwl7HtLjNSZZZis/MeM7NY3TaRKS40ooDeoiVqIultsrav92bi9c3a5KkkOsOJZpSS99m4k/0zPng4Gks9JSbB+8DIcoQfnjwpZCHv5W+ip4aQrKrB0uOikYhiMCBw1/uPoQUMBADs=")
load_image("n_bool", "R0lGODdhEAAQAPAAAAgICP///ywAAAAAEAAQAAACIISPacHtvp5kcb5qG85hZ2+BkyiRF8BBaEqtrKkqslEAADs=")
load_image("y_bool", "R0lGODdhEAAQAPEAAAgICADQAP///your_sha256_hashKuCnhiupIWjSSjAFuWhSCIKADs=")
load_image("n_tri", "R0lGODlhEAAQAPD/AAEBAf///yH5BAUKAAIALAAAAAAQABAAAAInlI+pBrAKQnCPSUlXvFhznlkfeGwjKZhnJ65h6nrfi6h0st2QXikFADs=")
load_image("m_tri", "R0lGODlhEAAQAPEDAAEBAeQMuv///wAAACH5BAUKAAMALAAAAAAQABAAAAI5nI+your_sha256_hashI24PhfEoLADs=")
load_image("y_tri", "R0lGODlhEAAQAPEDAAICAgDQAP///wAAACH5BAUKAAMALAAAAAAQABAAAAI0nI+pBrAYBhDCRRUypfmergmgZ4xjMpmaw2zmxk7cCB+pWiVqp4MzDwn9FhGZ5WFjIZeGAgA7")
load_image("m_my", "R0lGODlhEAAQAPEDAAAAAOQMuv///wAAACH5BAUKAAMALAAAAAAQABAAAAI5nIGpxiAPI2ghxFinq/ZygQhc94zgZopmOLYf67anGr+oZdp02emfV5n9MEHN5QhqICETxkABbQ4KADs=")
load_image("y_my", "R0lGODlhEAAQAPH/AAAAAADQAAPRA////yH5BAUKAAQALAAAAAAQABAAAAM+your_sha512_hash==")
load_image("n_locked", "R0lGODlhEAAQAPABAAAAAP///your_sha256_hash3pxUAAAOw==")
load_image("m_locked", "R0lGODlhEAAQAPD/your_sha256_hashInZoyiore05walolV39ftxsYHgL9QBBMBGFEFAAAOw==")
load_image("y_locked", "R0lGODlhEAAQAPD/your_sha256_hashmR3KWq4uK74IOnp0HQPmnD3cOVlUIAgKsShkFAAAOw==")
load_image("not_selected", "R0lGODlhEAAQAPD/AAAAAP///your_sha256_hashYrqigCxZoMelU6No9gdCgA7")
load_image("selected", "R0lGODlhEAAQAPD/AAAAAP///yH5BAUKAAIALAAAAAAQABAAAAIzlA2px6IBw2IpWglOvTah/kTZhimASJomiqonlLov1qptHTsgKSEzh9H8QI0QzNPwmRoFADs=")
load_image("edit", your_sha256_hashAAAQABAAAANCWLqw/your_sha512_hash=")
def _fix_treeview_issues():
# Fixes some Treeview issues
global _treeview_rowheight
style = ttk.Style()
# The treeview rowheight isn't adjusted automatically on high-DPI displays,
# so do it ourselves. The font will probably always be TkDefaultFont, but
# play it safe and look it up.
_treeview_rowheight = font.Font(font=style.lookup("Treeview", "font")) \
.metrics("linespace") + 2
style.configure("Treeview", rowheight=_treeview_rowheight)
# Work around regression in path_to_url
# which breaks tag background colors
for option in "foreground", "background":
# Filter out any styles starting with ("!disabled", "!selected", ...).
# style.map() returns an empty list for missing options, so this should
# be future-safe.
style.map(
"Treeview",
**{option: [elm for elm in style.map("Treeview", query_opt=option)
if elm[:2] != ("!disabled", "!selected")]})
def _init_misc_ui():
# Does misc. UI initialization, like setting the title, icon, and theme
_root.title(_kconf.mainmenu_text)
# iconphoto() isn't available in Python 2's Tkinter
_root.tk.call("wm", "iconphoto", _root._w, "-default", _icon_img)
# Reducing the width of the window to 1 pixel makes it move around, at
# least on GNOME. Prevent weird stuff like that.
_root.minsize(128, 128)
_root.protocol("WM_DELETE_WINDOW", _on_quit)
# Use the 'clam' theme on *nix if it's available. It looks nicer than the
# 'default' theme.
if _root.tk.call("tk", "windowingsystem") == "x11":
style = ttk.Style()
if "clam" in style.theme_names():
style.theme_use("clam")
def _create_top_widgets():
# Creates the controls above the Kconfig tree in the main window
global _show_all_var
global _show_name_var
global _single_menu_var
global _menupath
global _backbutton
topframe = ttk.Frame(_root)
topframe.grid(column=0, row=0, sticky="ew")
ttk.Button(topframe, text="Save", command=_save) \
.grid(column=0, row=0, sticky="ew", padx=".05c", pady=".05c")
ttk.Button(topframe, text="Save as...", command=_save_as) \
.grid(column=1, row=0, sticky="ew")
ttk.Button(topframe, text="Save minimal (advanced)...",
command=_save_minimal) \
.grid(column=2, row=0, sticky="ew", padx=".05c")
ttk.Button(topframe, text="Open...", command=_open) \
.grid(column=3, row=0)
ttk.Button(topframe, text="Jump to...", command=_jump_to_dialog) \
.grid(column=4, row=0, padx=".05c")
_show_name_var = BooleanVar()
ttk.Checkbutton(topframe, text="Show name", command=_do_showname,
variable=_show_name_var) \
.grid(column=0, row=1, sticky="nsew", padx=".05c", pady="0 .05c",
ipady=".2c")
_show_all_var = BooleanVar()
ttk.Checkbutton(topframe, text="Show all", command=_do_showall,
variable=_show_all_var) \
.grid(column=1, row=1, sticky="nsew", pady="0 .05c")
# Allow the show-all and single-menu status to be queried via plain global
# Python variables, which is faster and simpler
def show_all_updated(*_):
global _show_all
_show_all = _show_all_var.get()
_trace_write(_show_all_var, show_all_updated)
_show_all_var.set(False)
_single_menu_var = BooleanVar()
ttk.Checkbutton(topframe, text="Single-menu mode", command=_do_tree_mode,
variable=_single_menu_var) \
.grid(column=2, row=1, sticky="nsew", padx=".05c", pady="0 .05c")
_backbutton = ttk.Button(topframe, text="<--", command=_leave_menu,
state="disabled")
_backbutton.grid(column=0, row=4, sticky="nsew", padx=".05c", pady="0 .05c")
def tree_mode_updated(*_):
global _single_menu
_single_menu = _single_menu_var.get()
if _single_menu:
_backbutton.grid()
else:
_backbutton.grid_remove()
_trace_write(_single_menu_var, tree_mode_updated)
_single_menu_var.set(False)
# Column to the right of the buttons that the menu path extends into, so
# that it can grow wider than the buttons
topframe.columnconfigure(5, weight=1)
_menupath = ttk.Label(topframe)
_menupath.grid(column=0, row=3, columnspan=6, sticky="w", padx="0.05c",
pady="0 .05c")
def _create_kconfig_tree_and_desc(parent):
# Creates a Panedwindow with a Treeview that shows Kconfig nodes and a Text
# that shows a description of the selected node. Returns a tuple with the
# Panedwindow and the Treeview. This code is shared between the main window
# and the jump-to dialog.
panedwindow = ttk.Panedwindow(parent, orient=VERTICAL)
tree_frame, tree = _create_kconfig_tree(panedwindow)
desc_frame, desc = _create_kconfig_desc(panedwindow)
panedwindow.add(tree_frame, weight=1)
panedwindow.add(desc_frame)
def tree_select(_):
# The Text widget does not allow editing the text in its disabled
# state. We need to temporarily enable it.
desc["state"] = "normal"
sel = tree.selection()
if not sel:
desc.delete("1.0", "end")
desc["state"] = "disabled"
return
# Text.replace() is not available in Python 2's Tkinter
desc.delete("1.0", "end")
desc.insert("end", _info_str(_id_to_node[sel[0]]))
desc["state"] = "disabled"
tree.bind("<<TreeviewSelect>>", tree_select)
tree.bind("<1>", _tree_click)
tree.bind("<Double-1>", _tree_double_click)
tree.bind("<Return>", _tree_enter)
tree.bind("<KP_Enter>", _tree_enter)
tree.bind("<space>", _tree_toggle)
tree.bind("n", _tree_set_val(0))
tree.bind("m", _tree_set_val(1))
tree.bind("y", _tree_set_val(2))
return panedwindow, tree
def _create_kconfig_tree(parent):
# Creates a Treeview for showing Kconfig nodes
frame = ttk.Frame(parent)
tree = ttk.Treeview(frame, selectmode="browse", height=20,
columns=("name",))
tree.heading("#0", text="Option", anchor="w")
tree.heading("name", text="Name", anchor="w")
tree.tag_configure("n-bool", image=_n_bool_img)
tree.tag_configure("y-bool", image=_y_bool_img)
tree.tag_configure("m-tri", image=_m_tri_img)
tree.tag_configure("n-tri", image=_n_tri_img)
tree.tag_configure("m-tri", image=_m_tri_img)
tree.tag_configure("y-tri", image=_y_tri_img)
tree.tag_configure("m-my", image=_m_my_img)
tree.tag_configure("y-my", image=_y_my_img)
tree.tag_configure("n-locked", image=_n_locked_img)
tree.tag_configure("m-locked", image=_m_locked_img)
tree.tag_configure("y-locked", image=_y_locked_img)
tree.tag_configure("not-selected", image=_not_selected_img)
tree.tag_configure("selected", image=_selected_img)
tree.tag_configure("edit", image=_edit_img)
tree.tag_configure("invisible", foreground="red")
tree.grid(column=0, row=0, sticky="nsew")
_add_vscrollbar(frame, tree)
frame.columnconfigure(0, weight=1)
frame.rowconfigure(0, weight=1)
# Create items for all menu nodes. These can be detached/moved later.
# Micro-optimize this a bit.
insert = tree.insert
id_ = id
Symbol_ = Symbol
for node in _kconf.node_iter():
item = node.item
insert("", "end", iid=id_(node),
values=item.name if item.__class__ is Symbol_ else "")
return frame, tree
def _create_kconfig_desc(parent):
# Creates a Text for showing the description of the selected Kconfig node
frame = ttk.Frame(parent)
desc = Text(frame, height=12, wrap="word", borderwidth=0,
state="disabled")
desc.grid(column=0, row=0, sticky="nsew")
# Work around not being to Ctrl-C/V text from a disabled Text widget, with a
# tip found in path_to_url
desc.bind("<1>", lambda _: desc.focus_set())
_add_vscrollbar(frame, desc)
frame.columnconfigure(0, weight=1)
frame.rowconfigure(0, weight=1)
return frame, desc
def _add_vscrollbar(parent, widget):
# Adds a vertical scrollbar to 'widget' that's only shown as needed
vscrollbar = ttk.Scrollbar(parent, orient="vertical",
command=widget.yview)
vscrollbar.grid(column=1, row=0, sticky="ns")
def yscrollcommand(first, last):
# Only show the scrollbar when needed. 'first' and 'last' are
# strings.
if float(first) <= 0.0 and float(last) >= 1.0:
vscrollbar.grid_remove()
else:
vscrollbar.grid()
vscrollbar.set(first, last)
widget["yscrollcommand"] = yscrollcommand
def _create_status_bar():
# Creates the status bar at the bottom of the main window
global _status_label
_status_label = ttk.Label(_root, anchor="e", padding="0 0 0.4c 0")
_status_label.grid(column=0, row=3, sticky="ew")
def _set_status(s):
# Sets the text in the status bar to 's'
_status_label["text"] = s
def _set_conf_changed(changed):
# Updates the status re. whether there are unsaved changes
global _conf_changed
_conf_changed = changed
if changed:
_set_status("Modified")
def _update_tree():
# Updates the Kconfig tree in the main window by first detaching all nodes
# and then updating and reattaching them. The tree structure might have
# changed.
# If a selected/focused item is detached and later reattached, it stays
# selected/focused. That can give multiple selections even though
# selectmode=browse. Save and later restore the selection and focus as a
# workaround.
old_selection = _tree.selection()
old_focus = _tree.focus()
# Detach all tree items before re-stringing them. This is relatively fast,
# luckily.
_tree.detach(*_id_to_node.keys())
if _single_menu:
_build_menu_tree()
else:
_build_full_tree(_kconf.top_node)
_tree.selection_set(old_selection)
_tree.focus(old_focus)
def _build_full_tree(menu):
# Updates the tree starting from menu.list, in full-tree mode. To speed
# things up, only open menus are updated. The menu-at-a-time logic here is
# to deal with invisible items that can show up outside show-all mode (see
# _shown_full_nodes()).
for node in _shown_full_nodes(menu):
_add_to_tree(node, _kconf.top_node)
# _shown_full_nodes() includes nodes from menus rooted at symbols, so
# we only need to check "real" menus/choices here
if node.list and not isinstance(node.item, Symbol):
if _tree.item(id(node), "open"):
_build_full_tree(node)
else:
# We're just probing here, so _shown_menu_nodes() will work
# fine, and might be a bit faster
shown = _shown_menu_nodes(node)
if shown:
# Dummy element to make the open/closed toggle appear
_tree.move(id(shown[0]), id(shown[0].parent), "end")
def _shown_full_nodes(menu):
# Returns the list of menu nodes shown in 'menu' (a menu node for a menu)
# for full-tree mode. A tricky detail is that invisible items need to be
# shown if they have visible children.
def rec(node):
res = []
while node:
if _visible(node) or _show_all:
res.append(node)
if node.list and isinstance(node.item, Symbol):
# Nodes from menu created from dependencies
res += rec(node.list)
elif node.list and isinstance(node.item, Symbol):
# Show invisible symbols (defined with either 'config' and
# 'menuconfig') if they have visible children. This can happen
# for an m/y-valued symbol with an optional prompt
# ('prompt "foo" is COND') that is currently disabled.
shown_children = rec(node.list)
if shown_children:
res.append(node)
res += shown_children
node = node.next
return res
return rec(menu.list)
def _build_menu_tree():
# Updates the tree in single-menu mode. See _build_full_tree() as well.
for node in _shown_menu_nodes(_cur_menu):
_add_to_tree(node, _cur_menu)
def _shown_menu_nodes(menu):
# Used for single-menu mode. Similar to _shown_full_nodes(), but doesn't
# include children of symbols defined with 'menuconfig'.
def rec(node):
res = []
while node:
if _visible(node) or _show_all:
res.append(node)
if node.list and not node.is_menuconfig:
res += rec(node.list)
elif node.list and isinstance(node.item, Symbol):
shown_children = rec(node.list)
if shown_children:
# Invisible item with visible children
res.append(node)
if not node.is_menuconfig:
res += shown_children
node = node.next
return res
return rec(menu.list)
def _visible(node):
# Returns True if the node should appear in the menu (outside show-all
# mode)
return node.prompt and expr_value(node.prompt[1]) and not \
(node.item == MENU and not expr_value(node.visibility))
def _add_to_tree(node, top):
# Adds 'node' to the tree, at the end of its menu. We rely on going through
# the nodes linearly to get the correct order. 'top' holds the menu that
# corresponds to the top-level menu, and can vary in single-menu mode.
parent = node.parent
_tree.move(id(node), "" if parent is top else id(parent), "end")
_tree.item(
id(node),
text=_node_str(node),
# The _show_all test avoids showing invisible items in red outside
# show-all mode, which could look confusing/broken. Invisible symbols
# are shown outside show-all mode if an invisible symbol has visible
# children in an implicit menu.
tags=_img_tag(node) if _visible(node) or not _show_all else
_img_tag(node) + " invisible")
def _node_str(node):
# Returns the string shown to the right of the image (if any) for the node
if node.prompt:
if node.item == COMMENT:
s = "*** {} ***".format(node.prompt[0])
else:
s = node.prompt[0]
if isinstance(node.item, Symbol):
sym = node.item
# Print "(NEW)" next to symbols without a user value (from e.g. a
# .config), but skip it for choice symbols in choices in y mode,
# and for symbols of UNKNOWN type (which generate a warning though)
if sym.user_value is None and sym.type and not \
(sym.choice and sym.choice.tri_value == 2):
s += " (NEW)"
elif isinstance(node.item, Symbol):
# Symbol without prompt (can show up in show-all)
s = "<{}>".format(node.item.name)
else:
# Choice without prompt. Use standard_sc_expr_str() so that it shows up
# as '<choice (name if any)>'.
s = standard_sc_expr_str(node.item)
if isinstance(node.item, Symbol):
sym = node.item
if sym.orig_type == STRING:
s += ": " + sym.str_value
elif sym.orig_type in (INT, HEX):
s = "({}) {}".format(sym.str_value, s)
elif isinstance(node.item, Choice) and node.item.tri_value == 2:
# Print the prompt of the selected symbol after the choice for
# choices in y mode
sym = node.item.selection
if sym:
for sym_node in sym.nodes:
# Use the prompt used at this choice location, in case the
# choice symbol is defined in multiple locations
if sym_node.parent is node and sym_node.prompt:
s += " ({})".format(sym_node.prompt[0])
break
else:
# If the symbol isn't defined at this choice location, then
# just use whatever prompt we can find for it
for sym_node in sym.nodes:
if sym_node.prompt:
s += " ({})".format(sym_node.prompt[0])
break
# In single-menu mode, print "--->" next to nodes that have menus that can
# potentially be entered. Print "----" if the menu is empty. We don't allow
# those to be entered.
if _single_menu and node.is_menuconfig:
s += " --->" if _shown_menu_nodes(node) else " ----"
return s
def _img_tag(node):
# Returns the tag for the image that should be shown next to 'node', or the
# empty string if it shouldn't have an image
item = node.item
if item in (MENU, COMMENT) or not item.orig_type:
return ""
if item.orig_type in (STRING, INT, HEX):
return "edit"
# BOOL or TRISTATE
if _is_y_mode_choice_sym(item):
# Choice symbol in y-mode choice
return "selected" if item.choice.selection is item else "not-selected"
if len(item.assignable) <= 1:
# Pinned to a single value
return "" if isinstance(item, Choice) else item.str_value + "-locked"
if item.type == BOOL:
return item.str_value + "-bool"
# item.type == TRISTATE
if item.assignable == (1, 2):
return item.str_value + "-my"
return item.str_value + "-tri"
def _is_y_mode_choice_sym(item):
# The choice mode is an upper bound on the visibility of choice symbols, so
# we can check the choice symbols' own visibility to see if the choice is
# in y mode
return isinstance(item, Symbol) and item.choice and item.visibility == 2
def _tree_click(event):
# Click on the Kconfig Treeview
tree = event.widget
if tree.identify_element(event.x, event.y) == "image":
item = tree.identify_row(event.y)
# Select the item before possibly popping up a dialog for
# string/int/hex items, so that its help is visible
_select(tree, item)
_change_node(_id_to_node[item], tree.winfo_toplevel())
return "break"
def _tree_double_click(event):
# Double-click on the Kconfig treeview
# Do an extra check to avoid weirdness when double-clicking in the tree
# heading area
if not _in_heading(event):
return _tree_enter(event)
def _in_heading(event):
# Returns True if 'event' took place in the tree heading
tree = event.widget
return hasattr(tree, "identify_region") and \
tree.identify_region(event.x, event.y) in ("heading", "separator")
def _tree_enter(event):
# Enter press or double-click within the Kconfig treeview. Prefer to
# open/close/enter menus, but toggle the value if that's not possible.
tree = event.widget
sel = tree.focus()
if sel:
node = _id_to_node[sel]
if tree.get_children(sel):
_tree_toggle_open(sel)
elif _single_menu_mode_menu(node, tree):
_enter_menu_and_select_first(node)
else:
_change_node(node, tree.winfo_toplevel())
return "break"
def _tree_toggle(event):
# Space press within the Kconfig treeview. Prefer to toggle the value, but
# open/close/enter the menu if that's not possible.
tree = event.widget
sel = tree.focus()
if sel:
node = _id_to_node[sel]
if _changeable(node):
_change_node(node, tree.winfo_toplevel())
elif _single_menu_mode_menu(node, tree):
_enter_menu_and_select_first(node)
elif tree.get_children(sel):
_tree_toggle_open(sel)
return "break"
def _tree_left_key(_):
# Left arrow key press within the Kconfig treeview
if _single_menu:
# Leave the current menu in single-menu mode
_leave_menu()
return "break"
# Otherwise, default action
def _tree_right_key(_):
# Right arrow key press within the Kconfig treeview
sel = _tree.focus()
if sel:
node = _id_to_node[sel]
# If the node can be entered in single-menu mode, do it
if _single_menu_mode_menu(node, _tree):
_enter_menu_and_select_first(node)
return "break"
# Otherwise, default action
def _single_menu_mode_menu(node, tree):
# Returns True if single-menu mode is on and 'node' is an (interface)
# menu that can be entered
return _single_menu and tree is _tree and node.is_menuconfig and \
_shown_menu_nodes(node)
def _changeable(node):
# Returns True if 'node' is a Symbol/Choice whose value can be changed
sc = node.item
if not isinstance(sc, (Symbol, Choice)):
return False
# This will hit for invisible symbols, which appear in show-all mode and
# when an invisible symbol has visible children (which can happen e.g. for
# symbols with optional prompts)
if not (node.prompt and expr_value(node.prompt[1])):
return False
return sc.orig_type in (STRING, INT, HEX) or len(sc.assignable) > 1 \
or _is_y_mode_choice_sym(sc)
def _tree_toggle_open(item):
# Opens/closes the Treeview item 'item'
if _tree.item(item, "open"):
_tree.item(item, open=False)
else:
node = _id_to_node[item]
if not isinstance(node.item, Symbol):
# Can only get here in full-tree mode
_build_full_tree(node)
_tree.item(item, open=True)
def _tree_set_val(tri_val):
def tree_set_val(event):
# n/m/y press within the Kconfig treeview
# Sets the value of the currently selected item to 'tri_val', if that
# value can be assigned
sel = event.widget.focus()
if sel:
sc = _id_to_node[sel].item
if isinstance(sc, (Symbol, Choice)) and tri_val in sc.assignable:
_set_val(sc, tri_val)
return tree_set_val
def _tree_open(_):
# Lazily populates the Kconfig tree when menus are opened in full-tree mode
if _single_menu:
# Work around path_to_url
# ("ttk::treeview open/closed indicators can be toggled while hidden").
# Clicking on the hidden indicator will call _build_full_tree() in
# single-menu mode otherwise.
return
node = _id_to_node[_tree.focus()]
# _shown_full_nodes() includes nodes from menus rooted at symbols, so we
# only need to check "real" menus and choices here
if not isinstance(node.item, Symbol):
_build_full_tree(node)
def _update_menu_path(_):
# Updates the displayed menu path when nodes are selected in the Kconfig
# treeview
sel = _tree.selection()
_menupath["text"] = _menu_path_info(_id_to_node[sel[0]]) if sel else ""
def _item_row(item):
# Returns the row number 'item' appears on within the Kconfig treeview,
# starting from the top of the tree. Used to preserve scrolling.
#
# ttkTreeview.c in the Tk sources defines a RowNumber() function that does
# the same thing, but it's not exposed.
row = 0
while True:
prev = _tree.prev(item)
if prev:
item = prev
row += _n_rows(item)
else:
item = _tree.parent(item)
if not item:
return row
row += 1
def _n_rows(item):
# _item_row() helper. Returns the number of rows occupied by 'item' and #
# its children.
rows = 1
if _tree.item(item, "open"):
for child in _tree.get_children(item):
rows += _n_rows(child)
return rows
def _attached(item):
# Heuristic for checking if a Treeview item is attached. Doesn't seem to be
# good APIs for this. Might fail for super-obscure cases with tiny trees,
# but you'd just get a small scroll mess-up.
return bool(_tree.next(item) or _tree.prev(item) or _tree.parent(item))
def _change_node(node, parent):
# Toggles/changes the value of 'node'. 'parent' is the parent window
# (either the main window or the jump-to dialog), in case we need to pop up
# a dialog.
if not _changeable(node):
return
# sc = symbol/choice
sc = node.item
if sc.type in (INT, HEX, STRING):
s = _set_val_dialog(node, parent)
# Tkinter can return 'unicode' strings on Python 2, which Kconfiglib
# can't deal with. UTF-8-encode the string to work around it.
if _PY2 and isinstance(s, unicode):
s = s.encode("utf-8", "ignore")
if s is not None:
_set_val(sc, s)
elif len(sc.assignable) == 1:
# Handles choice symbols for choices in y mode, which are a special
# case: .assignable can be (2,) while .tri_value is 0.
_set_val(sc, sc.assignable[0])
else:
# Set the symbol to the value after the current value in
# sc.assignable, with wrapping
val_index = sc.assignable.index(sc.tri_value)
_set_val(sc, sc.assignable[(val_index + 1) % len(sc.assignable)])
def _set_val(sc, val):
# Wrapper around Symbol/Choice.set_value() for updating the menu state and
# _conf_changed
# Use the string representation of tristate values. This makes the format
# consistent for all symbol types.
if val in TRI_TO_STR:
val = TRI_TO_STR[val]
if val != sc.str_value:
sc.set_value(val)
_set_conf_changed(True)
# Update the tree and try to preserve the scroll. Do a cheaper variant
# than in the show-all case, that might mess up the scroll slightly in
# rare cases, but is fast and flicker-free.
stayput = _loc_ref_item() # Item to preserve scroll for
old_row = _item_row(stayput)
_update_tree()
# If the reference item disappeared (can happen if the change was done
# from the jump-to dialog), then avoid messing with the scroll and hope
# for the best
if _attached(stayput):
_tree.yview_scroll(_item_row(stayput) - old_row, "units")
if _jump_to_tree:
_update_jump_to_display()
def _set_val_dialog(node, parent):
# Pops up a dialog for setting the value of the string/int/hex
# symbol at node 'node'. 'parent' is the parent window.
def ok(_=None):
# No 'nonlocal' in Python 2
global _entry_res
s = entry.get()
if sym.type == HEX and not s.startswith(("0x", "0X")):
s = "0x" + s
if _check_valid(dialog, entry, sym, s):
_entry_res = s
dialog.destroy()
def cancel(_=None):
global _entry_res
_entry_res = None
dialog.destroy()
sym = node.item
dialog = Toplevel(parent)
dialog.title("Enter {} value".format(TYPE_TO_STR[sym.type]))
dialog.resizable(False, False)
dialog.transient(parent)
dialog.protocol("WM_DELETE_WINDOW", cancel)
ttk.Label(dialog, text=node.prompt[0] + ":") \
.grid(column=0, row=0, columnspan=2, sticky="w", padx=".3c",
pady=".2c .05c")
entry = ttk.Entry(dialog, width=30)
# Start with the previous value in the editbox, selected
entry.insert(0, sym.str_value)
entry.selection_range(0, "end")
entry.grid(column=0, row=1, columnspan=2, sticky="ew", padx=".3c")
entry.focus_set()
range_info = _range_info(sym)
if range_info:
ttk.Label(dialog, text=range_info) \
.grid(column=0, row=2, columnspan=2, sticky="w", padx=".3c",
pady=".2c 0")
ttk.Button(dialog, text="OK", command=ok) \
.grid(column=0, row=4 if range_info else 3, sticky="e", padx=".3c",
pady=".4c")
ttk.Button(dialog, text="Cancel", command=cancel) \
.grid(column=1, row=4 if range_info else 3, padx="0 .3c")
# Give all horizontal space to the grid cell with the OK button, so that
# Cancel moves to the right
dialog.columnconfigure(0, weight=1)
_center_on_root(dialog)
# Hack to scroll the entry so that the end of the text is shown, from
# path_to_url
# Related Tk ticket: path_to_url
def scroll_entry(_):
_root.update_idletasks()
entry.unbind("<Expose>")
entry.xview_moveto(1)
entry.bind("<Expose>", scroll_entry)
# The dialog must be visible before we can grab the input
dialog.wait_visibility()
dialog.grab_set()
dialog.bind("<Return>", ok)
dialog.bind("<KP_Enter>", ok)
dialog.bind("<Escape>", cancel)
# Wait for the user to be done with the dialog
parent.wait_window(dialog)
# Regrab the input in the parent
parent.grab_set()
return _entry_res
def _center_on_root(dialog):
# Centers 'dialog' on the root window. It often ends up at some bad place
# like the top-left corner of the screen otherwise. See the menuconfig()
# function, which has similar logic.
dialog.withdraw()
_root.update_idletasks()
dialog_width = dialog.winfo_reqwidth()
dialog_height = dialog.winfo_reqheight()
screen_width = _root.winfo_screenwidth()
screen_height = _root.winfo_screenheight()
x = _root.winfo_rootx() + (_root.winfo_width() - dialog_width)//2
y = _root.winfo_rooty() + (_root.winfo_height() - dialog_height)//2
# Clamp so that no part of the dialog is outside the screen
if x + dialog_width > screen_width:
x = screen_width - dialog_width
elif x < 0:
x = 0
if y + dialog_height > screen_height:
y = screen_height - dialog_height
elif y < 0:
y = 0
dialog.geometry("+{}+{}".format(x, y))
dialog.deiconify()
def _check_valid(dialog, entry, sym, s):
# Returns True if the string 's' is a well-formed value for 'sym'.
# Otherwise, pops up an error and returns False.
if sym.type not in (INT, HEX):
# Anything goes for non-int/hex symbols
return True
base = 10 if sym.type == INT else 16
try:
int(s, base)
except ValueError:
messagebox.showerror(
"Bad value",
"'{}' is a malformed {} value".format(
s, TYPE_TO_STR[sym.type]),
parent=dialog)
entry.focus_set()
return False
for low_sym, high_sym, cond in sym.ranges:
if expr_value(cond):
low_s = low_sym.str_value
high_s = high_sym.str_value
if not int(low_s, base) <= int(s, base) <= int(high_s, base):
messagebox.showerror(
"Value out of range",
"{} is outside the range {}-{}".format(s, low_s, high_s),
parent=dialog)
entry.focus_set()
return False
break
return True
def _range_info(sym):
# Returns a string with information about the valid range for the symbol
# 'sym', or None if 'sym' doesn't have a range
if sym.type in (INT, HEX):
for low, high, cond in sym.ranges:
if expr_value(cond):
return "Range: {}-{}".format(low.str_value, high.str_value)
return None
def _save(_=None):
# Tries to save the configuration
if _try_save(_kconf.write_config, _conf_filename, "configuration"):
_set_conf_changed(False)
_tree.focus_set()
def _save_as():
# Pops up a dialog for saving the configuration to a specific location
global _conf_filename
filename = _conf_filename
while True:
filename = filedialog.asksaveasfilename(
title="Save configuration as",
initialdir=os.path.dirname(filename),
initialfile=os.path.basename(filename),
parent=_root)
if not filename:
break
if _try_save(_kconf.write_config, filename, "configuration"):
_conf_filename = filename
break
_tree.focus_set()
def _save_minimal():
# Pops up a dialog for saving a minimal configuration (defconfig) to a
# specific location
global _minconf_filename
filename = _minconf_filename
while True:
filename = filedialog.asksaveasfilename(
title="Save minimal configuration as",
initialdir=os.path.dirname(filename),
initialfile=os.path.basename(filename),
parent=_root)
if not filename:
break
if _try_save(_kconf.write_min_config, filename,
"minimal configuration"):
_minconf_filename = filename
break
_tree.focus_set()
def _open(_=None):
# Pops up a dialog for loading a configuration
global _conf_filename
if _conf_changed and \
not messagebox.askokcancel(
"Unsaved changes",
"You have unsaved changes. Load new configuration anyway?"):
return
filename = _conf_filename
while True:
filename = filedialog.askopenfilename(
title="Open configuration",
initialdir=os.path.dirname(filename),
initialfile=os.path.basename(filename),
parent=_root)
if not filename:
break
if _try_load(filename):
# Maybe something fancier could be done here later to try to
# preserve the scroll
_conf_filename = filename
_set_conf_changed(_needs_save())
if _single_menu and not _shown_menu_nodes(_cur_menu):
# Turn on show-all if we're in single-menu mode and would end
# up with an empty menu
_show_all_var.set(True)
_update_tree()
break
_tree.focus_set()
def _toggle_showname(_):
# Toggles show-name mode on/off
_show_name_var.set(not _show_name_var.get())
_do_showname()
def _do_showname():
# Updates the UI for the current show-name setting
# Columns do not automatically shrink/expand, so we have to update
# column widths ourselves
tree_width = _tree.winfo_width()
if _show_name_var.get():
_tree["displaycolumns"] = ("name",)
_tree["show"] = "tree headings"
name_width = tree_width//3
_tree.column("#0", width=max(tree_width - name_width, 1))
_tree.column("name", width=name_width)
else:
_tree["displaycolumns"] = ()
_tree["show"] = "tree"
_tree.column("#0", width=tree_width)
_tree.focus_set()
def _toggle_showall(_):
# Toggles show-all mode on/off
_show_all_var.set(not _show_all)
_do_showall()
def _do_showall():
# Updates the UI for the current show-all setting
# Don't allow turning off show-all if we'd end up with no visible nodes
if _nothing_shown():
_show_all_var.set(True)
return
# Save scroll information. old_scroll can end up negative here, if the
# reference item isn't shown (only invisible items on the screen, and
# show-all being turned off).
stayput = _vis_loc_ref_item()
# Probe the middle of the first row, to play it safe. identify_row(0) seems
# to return the row before the top row.
old_scroll = _item_row(stayput) - \
_item_row(_tree.identify_row(_treeview_rowheight//2))
_update_tree()
if _show_all:
# Deep magic: Unless we call update_idletasks(), the scroll adjustment
# below is restricted to the height of the old tree, instead of the
# height of the new tree. Since the tree with show-all on is guaranteed
# to be taller, and we want the maximum range, we only call it when
# turning show-all on.
#
# Strictly speaking, something similar ought to be done when changing
# symbol values, but it causes annoying flicker, and in 99% of cases
# things work anyway there (with usually minor scroll mess-ups in the
# 1% case).
_root.update_idletasks()
# Restore scroll
_tree.yview(_item_row(stayput) - old_scroll)
_tree.focus_set()
def _nothing_shown():
# _do_showall() helper. Returns True if no nodes would get
# shown with the current show-all setting. Also handles the
# (obscure) case when there are no visible nodes in the entire
# tree, meaning guiconfig was automatically started in
# show-all mode, which mustn't be turned off.
return not _shown_menu_nodes(
_cur_menu if _single_menu else _kconf.top_node)
def _toggle_tree_mode(_):
# Toggles single-menu mode on/off
_single_menu_var.set(not _single_menu)
_do_tree_mode()
def _do_tree_mode():
# Updates the UI for the current tree mode (full-tree or single-menu)
loc_ref_node = _id_to_node[_loc_ref_item()]
if not _single_menu:
# _jump_to() -> _enter_menu() already updates the tree, but
# _jump_to() -> load_parents() doesn't, because it isn't always needed.
# We always need to update the tree here, e.g. to add/remove "--->".
_update_tree()
_jump_to(loc_ref_node)
_tree.focus_set()
def _enter_menu_and_select_first(menu):
# Enters the menu 'menu' and selects the first item. Used in single-menu
# mode.
_enter_menu(menu)
_select(_tree, _tree.get_children()[0])
def _enter_menu(menu):
# Enters the menu 'menu'. Used in single-menu mode.
global _cur_menu
_cur_menu = menu
_update_tree()
_backbutton["state"] = "disabled" if menu is _kconf.top_node else "normal"
def _leave_menu():
# Leaves the current menu. Used in single-menu mode.
global _cur_menu
if _cur_menu is not _kconf.top_node:
old_menu = _cur_menu
_cur_menu = _parent_menu(_cur_menu)
_update_tree()
_select(_tree, id(old_menu))
if _cur_menu is _kconf.top_node:
_backbutton["state"] = "disabled"
_tree.focus_set()
def _select(tree, item):
# Selects, focuses, and see()s 'item' in 'tree'
tree.selection_set(item)
tree.focus(item)
tree.see(item)
def _loc_ref_item():
# Returns a Treeview item that can serve as a reference for the current
# scroll location. We try to make this item stay on the same row on the
# screen when updating the tree.
# If the selected item is visible, use that
sel = _tree.selection()
if sel and _tree.bbox(sel[0]):
return sel[0]
# Otherwise, use the middle item on the screen. If it doesn't exist, the
# tree is probably really small, so use the first item in the entire tree.
return _tree.identify_row(_tree.winfo_height()//2) or \
_tree.get_children()[0]
def _vis_loc_ref_item():
# Like _loc_ref_item(), but finds a visible item around the reference item.
# Used when changing show-all mode, where non-visible (red) items will
# disappear.
item = _loc_ref_item()
vis_before = _vis_before(item)
if vis_before and _tree.bbox(vis_before):
return vis_before
vis_after = _vis_after(item)
if vis_after and _tree.bbox(vis_after):
return vis_after
return vis_before or vis_after
def _vis_before(item):
# _vis_loc_ref_item() helper. Returns the first visible (not red) item,
# searching backwards from 'item'.
while item:
if not _tree.tag_has("invisible", item):
return item
prev = _tree.prev(item)
item = prev if prev else _tree.parent(item)
return None
def _vis_after(item):
# _vis_loc_ref_item() helper. Returns the first visible (not red) item,
# searching forwards from 'item'.
while item:
if not _tree.tag_has("invisible", item):
return item
next = _tree.next(item)
if next:
item = next
else:
item = _tree.parent(item)
if not item:
break
item = _tree.next(item)
return None
def _on_quit(_=None):
# Called when the user wants to exit
if not _conf_changed:
_quit("No changes to save (for '{}')".format(_conf_filename))
return
while True:
ync = messagebox.askyesnocancel("Quit", "Save changes?")
if ync is None:
return
if not ync:
_quit("Configuration ({}) was not saved".format(_conf_filename))
return
if _try_save(_kconf.write_config, _conf_filename, "configuration"):
# _try_save() already prints the "Configuration saved to ..."
# message
_quit()
return
def _quit(msg=None):
# Quits the application
# Do not call sys.exit() here, in case we're being run from a script
_root.destroy()
if msg:
print(msg)
def _try_save(save_fn, filename, description):
# Tries to save a configuration file. Pops up an error and returns False on
# failure.
#
# save_fn:
# Function to call with 'filename' to save the file
#
# description:
# String describing the thing being saved
try:
# save_fn() returns a message to print
msg = save_fn(filename)
_set_status(msg)
print(msg)
return True
except EnvironmentError as e:
messagebox.showerror(
"Error saving " + description,
"Error saving {} to '{}': {} (errno: {})"
.format(description, e.filename, e.strerror,
errno.errorcode[e.errno]))
return False
def _try_load(filename):
# Tries to load a configuration file. Pops up an error and returns False on
# failure.
#
# filename:
# Configuration file to load
try:
msg = _kconf.load_config(filename)
_set_status(msg)
print(msg)
return True
except EnvironmentError as e:
messagebox.showerror(
"Error loading configuration",
"Error loading '{}': {} (errno: {})"
.format(filename, e.strerror, errno.errorcode[e.errno]))
return False
def _jump_to_dialog(_=None):
# Pops up a dialog for jumping directly to a particular node. Symbol values
# can also be changed within the dialog.
#
# Note: There's nothing preventing this from doing an incremental search
# like menuconfig.py does, but currently it's a bit jerky for large Kconfig
# trees, at least when inputting the beginning of the search string. We'd
# need to somehow only update the tree items that are shown in the Treeview
# to fix it.
global _jump_to_tree
def search(_=None):
_update_jump_to_matches(msglabel, entry.get())
def jump_to_selected(event=None):
# Jumps to the selected node and closes the dialog
# Ignore double clicks on the image and in the heading area
if event and (tree.identify_element(event.x, event.y) == "image" or
_in_heading(event)):
return
sel = tree.selection()
if not sel:
return
node = _id_to_node[sel[0]]
if node not in _shown_menu_nodes(_parent_menu(node)):
_show_all_var.set(True)
if not _single_menu:
# See comment in _do_tree_mode()
_update_tree()
_jump_to(node)
dialog.destroy()
def tree_select(_):
jumpto_button["state"] = "normal" if tree.selection() else "disabled"
dialog = Toplevel(_root)
dialog.geometry("+{}+{}".format(
_root.winfo_rootx() + 50, _root.winfo_rooty() + 50))
dialog.title("Jump to symbol/choice/menu/comment")
dialog.minsize(128, 128) # See _create_ui()
dialog.transient(_root)
ttk.Label(dialog, text=_JUMP_TO_HELP) \
.grid(column=0, row=0, columnspan=2, sticky="w", padx=".1c",
pady=".1c")
entry = ttk.Entry(dialog)
entry.grid(column=0, row=1, sticky="ew", padx=".1c", pady=".1c")
entry.focus_set()
entry.bind("<Return>", search)
entry.bind("<KP_Enter>", search)
ttk.Button(dialog, text="Search", command=search) \
.grid(column=1, row=1, padx="0 .1c", pady="0 .1c")
msglabel = ttk.Label(dialog)
msglabel.grid(column=0, row=2, sticky="w", pady="0 .1c")
panedwindow, tree = _create_kconfig_tree_and_desc(dialog)
panedwindow.grid(column=0, row=3, columnspan=2, sticky="nsew")
# Clear tree
tree.set_children("")
_jump_to_tree = tree
jumpto_button = ttk.Button(dialog, text="Jump to selected item",
state="disabled", command=jump_to_selected)
jumpto_button.grid(column=0, row=4, columnspan=2, sticky="ns", pady=".1c")
dialog.columnconfigure(0, weight=1)
# Only the pane with the Kconfig tree and description grows vertically
dialog.rowconfigure(3, weight=1)
# See the menuconfig() function
_root.update_idletasks()
dialog.geometry(dialog.geometry())
# The dialog must be visible before we can grab the input
dialog.wait_visibility()
dialog.grab_set()
tree.bind("<Double-1>", jump_to_selected)
tree.bind("<Return>", jump_to_selected)
tree.bind("<KP_Enter>", jump_to_selected)
# add=True to avoid overriding the description text update
tree.bind("<<TreeviewSelect>>", tree_select, add=True)
dialog.bind("<Escape>", lambda _: dialog.destroy())
# Wait for the user to be done with the dialog
_root.wait_window(dialog)
_jump_to_tree = None
_tree.focus_set()
def _update_jump_to_matches(msglabel, search_string):
# Searches for nodes matching the search string and updates
# _jump_to_matches. Puts a message in 'msglabel' if there are no matches,
# or regex errors.
global _jump_to_matches
_jump_to_tree.selection_set(())
try:
# We could use re.IGNORECASE here instead of lower(), but this is
# faster for regexes like '.*debug$' (though the '.*' is redundant
# there). Those probably have bad interactions with re.search(), which
# matches anywhere in the string.
regex_searches = [re.compile(regex).search
for regex in search_string.lower().split()]
except re.error as e:
msg = "Bad regular expression"
# re.error.msg was added in Python 3.5
if hasattr(e, "msg"):
msg += ": " + e.msg
msglabel["text"] = msg
# Clear tree
_jump_to_tree.set_children("")
return
_jump_to_matches = []
add_match = _jump_to_matches.append
for node in _sorted_sc_nodes():
# Symbol/choice
sc = node.item
for search in regex_searches:
# Both the name and the prompt might be missing, since
# we're searching both symbols and choices
# Does the regex match either the symbol name or the
# prompt (if any)?
if not (sc.name and search(sc.name.lower()) or
node.prompt and search(node.prompt[0].lower())):
# Give up on the first regex that doesn't match, to
# speed things up a bit when multiple regexes are
# entered
break
else:
add_match(node)
# Search menus and comments
for node in _sorted_menu_comment_nodes():
for search in regex_searches:
if not search(node.prompt[0].lower()):
break
else:
add_match(node)
msglabel["text"] = "" if _jump_to_matches else "No matches"
_update_jump_to_display()
if _jump_to_matches:
item = id(_jump_to_matches[0])
_jump_to_tree.selection_set(item)
_jump_to_tree.focus(item)
def _update_jump_to_display():
# Updates the images and text for the items in _jump_to_matches, and sets
# them as the items of _jump_to_tree
# Micro-optimize a bit
item = _jump_to_tree.item
id_ = id
node_str = _node_str
img_tag = _img_tag
visible = _visible
for node in _jump_to_matches:
item(id_(node),
text=node_str(node),
tags=img_tag(node) if visible(node) else
img_tag(node) + " invisible")
_jump_to_tree.set_children("", *map(id, _jump_to_matches))
def _jump_to(node):
# Jumps directly to 'node' and selects it
if _single_menu:
_enter_menu(_parent_menu(node))
else:
_load_parents(node)
_select(_tree, id(node))
# Obscure Python: We never pass a value for cached_nodes, and it keeps pointing
# to the same list. This avoids a global.
def _sorted_sc_nodes(cached_nodes=[]):
# Returns a sorted list of symbol and choice nodes to search. The symbol
# nodes appear first, sorted by name, and then the choice nodes, sorted by
# prompt and (secondarily) name.
if not cached_nodes:
# Add symbol nodes
for sym in sorted(_kconf.unique_defined_syms,
key=lambda sym: sym.name):
# += is in-place for lists
cached_nodes += sym.nodes
# Add choice nodes
choices = sorted(_kconf.unique_choices,
key=lambda choice: choice.name or "")
cached_nodes += sorted(
[node for choice in choices for node in choice.nodes],
key=lambda node: node.prompt[0] if node.prompt else "")
return cached_nodes
def _sorted_menu_comment_nodes(cached_nodes=[]):
# Returns a list of menu and comment nodes to search, sorted by prompt,
# with the menus first
if not cached_nodes:
def prompt_text(mc):
return mc.prompt[0]
cached_nodes += sorted(_kconf.menus, key=prompt_text)
cached_nodes += sorted(_kconf.comments, key=prompt_text)
return cached_nodes
def _load_parents(node):
# Menus are lazily populated as they're opened in full-tree mode, but
# jumping to an item needs its parent menus to be populated. This function
# populates 'node's parents.
# Get all parents leading up to 'node', sorted with the root first
parents = []
cur = node.parent
while cur is not _kconf.top_node:
parents.append(cur)
cur = cur.parent
parents.reverse()
for i, parent in enumerate(parents):
if not _tree.item(id(parent), "open"):
# Found a closed menu. Populate it and all the remaining menus
# leading up to 'node'.
for parent in parents[i:]:
# We only need to populate "real" menus/choices. Implicit menus
# are populated when their parents menus are entered.
if not isinstance(parent.item, Symbol):
_build_full_tree(parent)
return
def _parent_menu(node):
# Returns the menu node of the menu that contains 'node'. In addition to
# proper 'menu's, this might also be a 'menuconfig' symbol or a 'choice'.
# "Menu" here means a menu in the interface.
menu = node.parent
while not menu.is_menuconfig:
menu = menu.parent
return menu
def _trace_write(var, fn):
# Makes fn() be called whenever the Tkinter Variable 'var' changes value
# trace_variable() is deprecated according to the docstring,
# which recommends trace_add()
if hasattr(var, "trace_add"):
var.trace_add("write", fn)
else:
var.trace_variable("w", fn)
def _info_str(node):
# Returns information about the menu node 'node' as a string.
#
# The helper functions are responsible for adding newlines. This allows
# them to return "" if they don't want to add any output.
if isinstance(node.item, Symbol):
sym = node.item
return (
_name_info(sym) +
_help_info(sym) +
_direct_dep_info(sym) +
_defaults_info(sym) +
_select_imply_info(sym) +
_kconfig_def_info(sym)
)
if isinstance(node.item, Choice):
choice = node.item
return (
_name_info(choice) +
_help_info(choice) +
'Mode: {}\n\n'.format(choice.str_value) +
_choice_syms_info(choice) +
_direct_dep_info(choice) +
_defaults_info(choice) +
_kconfig_def_info(choice)
)
# node.item in (MENU, COMMENT)
return _kconfig_def_info(node)
def _name_info(sc):
# Returns a string with the name of the symbol/choice. Choices are shown as
# <choice (name if any)>.
return (sc.name if sc.name else standard_sc_expr_str(sc)) + "\n\n"
def _value_info(sym):
# Returns a string showing 'sym's value
# Only put quotes around the value for string symbols
return "Value: {}\n".format(
'"{}"'.format(sym.str_value)
if sym.orig_type == STRING
else sym.str_value)
def _choice_syms_info(choice):
# Returns a string listing the choice symbols in 'choice'. Adds
# "(selected)" next to the selected one.
s = "Choice symbols:\n"
for sym in choice.syms:
s += " - " + sym.name
if sym is choice.selection:
s += " (selected)"
s += "\n"
return s + "\n"
def _help_info(sc):
# Returns a string with the help text(s) of 'sc' (Symbol or Choice).
# Symbols and choices defined in multiple locations can have multiple help
# texts.
s = ""
for node in sc.nodes:
if node.help is not None:
s += node.help + "\n\n"
return s
def _direct_dep_info(sc):
# Returns a string describing the direct dependencies of 'sc' (Symbol or
# Choice). The direct dependencies are the OR of the dependencies from each
# definition location. The dependencies at each definition location come
# from 'depends on' and dependencies inherited from parent items.
return "" if sc.direct_dep is _kconf.y else \
'Direct dependencies (={}):\n{}\n' \
.format(TRI_TO_STR[expr_value(sc.direct_dep)],
_split_expr_info(sc.direct_dep, 2))
def _defaults_info(sc):
# Returns a string describing the defaults of 'sc' (Symbol or Choice)
if not sc.defaults:
return ""
s = "Default"
if len(sc.defaults) > 1:
s += "s"
s += ":\n"
for val, cond in sc.orig_defaults:
s += " - "
if isinstance(sc, Symbol):
s += _expr_str(val)
# Skip the tristate value hint if the expression is just a single
# symbol. _expr_str() already shows its value as a string.
#
# This also avoids showing the tristate value for string/int/hex
# defaults, which wouldn't make any sense.
if isinstance(val, tuple):
s += ' (={})'.format(TRI_TO_STR[expr_value(val)])
else:
# Don't print the value next to the symbol name for choice
# defaults, as it looks a bit confusing
s += val.name
s += "\n"
if cond is not _kconf.y:
s += " Condition (={}):\n{}" \
.format(TRI_TO_STR[expr_value(cond)],
_split_expr_info(cond, 4))
return s + "\n"
def _split_expr_info(expr, indent):
# Returns a string with 'expr' split into its top-level && or || operands,
# with one operand per line, together with the operand's value. This is
# usually enough to get something readable for long expressions. A fancier
# recursive thingy would be possible too.
#
# indent:
# Number of leading spaces to add before the split expression.
if len(split_expr(expr, AND)) > 1:
split_op = AND
op_str = "&&"
else:
split_op = OR
op_str = "||"
s = ""
for i, term in enumerate(split_expr(expr, split_op)):
s += "{}{} {}".format(indent*" ",
" " if i == 0 else op_str,
_expr_str(term))
# Don't bother showing the value hint if the expression is just a
# single symbol. _expr_str() already shows its value.
if isinstance(term, tuple):
s += " (={})".format(TRI_TO_STR[expr_value(term)])
s += "\n"
return s
def _select_imply_info(sym):
# Returns a string with information about which symbols 'select' or 'imply'
# 'sym'. The selecting/implying symbols are grouped according to which
# value they select/imply 'sym' to (n/m/y).
def sis(expr, val, title):
# sis = selects/implies
sis = [si for si in split_expr(expr, OR) if expr_value(si) == val]
if not sis:
return ""
res = title
for si in sis:
res += " - {}\n".format(split_expr(si, AND)[0].name)
return res + "\n"
s = ""
if sym.rev_dep is not _kconf.n:
s += sis(sym.rev_dep, 2,
"Symbols currently y-selecting this symbol:\n")
s += sis(sym.rev_dep, 1,
"Symbols currently m-selecting this symbol:\n")
s += sis(sym.rev_dep, 0,
"Symbols currently n-selecting this symbol (no effect):\n")
if sym.weak_rev_dep is not _kconf.n:
s += sis(sym.weak_rev_dep, 2,
"Symbols currently y-implying this symbol:\n")
s += sis(sym.weak_rev_dep, 1,
"Symbols currently m-implying this symbol:\n")
s += sis(sym.weak_rev_dep, 0,
"Symbols currently n-implying this symbol (no effect):\n")
return s
def _kconfig_def_info(item):
# Returns a string with the definition of 'item' in Kconfig syntax,
# together with the definition location(s) and their include and menu paths
nodes = [item] if isinstance(item, MenuNode) else item.nodes
s = "Kconfig definition{}, with parent deps. propagated to 'depends on'\n" \
.format("s" if len(nodes) > 1 else "")
s += (len(s) - 1)*"="
for node in nodes:
s += "\n\n" \
"At {}:{}\n" \
"{}" \
"Menu path: {}\n\n" \
"{}" \
.format(node.filename, node.linenr,
_include_path_info(node),
_menu_path_info(node),
node.custom_str(_name_and_val_str))
return s
def _include_path_info(node):
if not node.include_path:
# In the top-level Kconfig file
return ""
return "Included via {}\n".format(
" -> ".join("{}:{}".format(filename, linenr)
for filename, linenr in node.include_path))
def _menu_path_info(node):
# Returns a string describing the menu path leading up to 'node'
path = ""
while node.parent is not _kconf.top_node:
node = node.parent
# Promptless choices might appear among the parents. Use
# standard_sc_expr_str() for them, so that they show up as
# '<choice (name if any)>'.
path = " -> " + (node.prompt[0] if node.prompt else
standard_sc_expr_str(node.item)) + path
return "(Top)" + path
def _name_and_val_str(sc):
# Custom symbol/choice printer that shows symbol values after symbols
# Show the values of non-constant (non-quoted) symbols that don't look like
# numbers. Things like 123 are actually symbol references, and only work as
# expected due to undefined symbols getting their name as their value.
# Showing the symbol value for those isn't helpful though.
if isinstance(sc, Symbol) and not sc.is_constant and not _is_num(sc.name):
if not sc.nodes:
# Undefined symbol reference
return "{}(undefined/n)".format(sc.name)
return '{}(={})'.format(sc.name, sc.str_value)
# For other items, use the standard format
return standard_sc_expr_str(sc)
def _expr_str(expr):
# Custom expression printer that shows symbol values
return expr_str(expr, _name_and_val_str)
def _is_num(name):
# Heuristic to see if a symbol name looks like a number, for nicer output
# when printing expressions. Things like 16 are actually symbol names, only
# they get their name as their value when the symbol is undefined.
try:
int(name)
except ValueError:
if not name.startswith(("0x", "0X")):
return False
try:
int(name, 16)
except ValueError:
return False
return True
if __name__ == "__main__":
_main()
``` | /content/code_sandbox/scripts/kconfig/guiconfig.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 18,170 |
```python
#!/usr/bin/env python3
#
"""
Script to prepare the LLEXT exports table of a Zephyr ELF
This script performs compile-time processing of the LLEXT exports
table for usage at runtime by the LLEXT subsystem code. The table
is a special section filled with 'llext_const_symbol' structures
generated by the EXPORT_SYMBOL macro.
Currently, the preparatory work consists mostly of sorting the
exports table to allow usage of binary search algorithms at runtime.
If CONFIG_LLEXT_EXPORT_BUILTINS_BY_SLID option is enabled, SLIDs
of all exported functions are also injected in the export table by
this script. (In this case, the preparation process is destructive)
"""
import llext_slidlib
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import Section
import argparse
import logging
import pathlib
import struct
import sys
#!!!!! WARNING !!!!!
#
#These constants MUST be kept in sync with the linker scripts
#and the EXPORT_SYMBOL macro located in 'subsys/llext/llext.h'.
#Otherwise, the LLEXT subsystem will be broken!
#
#!!!!! WARNING !!!!!
LLEXT_EXPORT_TABLE_SECTION_NAME = "llext_const_symbol_area"
LLEXT_EXPORT_NAMES_SECTION_NAME = "llext_exports_strtab"
def _llext_const_symbol_struct(ptr_size: int, endianness: str):
"""
ptr_size -- Platform pointer size in bytes
endianness -- Platform endianness ('little'/'big')
"""
endspec = "<" if endianness == 'little' else ">"
if ptr_size == 4:
ptrspec = "I"
elif ptr_size == 8:
ptrspec = "Q"
# struct llext_const_symbol
# contains just two pointers.
lcs_spec = endspec + 2 * ptrspec
return struct.Struct(lcs_spec)
#ELF Shdr flag applied to the export table section, to indicate
#the section has already been prepared by this script. This is
#mostly a security measure to prevent the script from running
#twice on the same ELF file, which can result in catastrophic
#failures if SLID-based linking is enabled (in this case, the
#preparation process is destructive).
#
#This flag is part of the SHF_MASKOS mask, of which all bits
#are "reserved for operating system-specific semantics".
#See: path_to_url
SHF_LLEXT_PREPARATION_DONE = 0x08000000
class SectionDescriptor():
"""ELF Section descriptor
This is a wrapper class around pyelftools' "Section" object.
"""
def __init__(self, elffile, section_name):
self.name = section_name
self.section = elffile.get_section_by_name(section_name)
if not isinstance(self.section, Section):
raise KeyError(f"section {section_name} not found")
self.shdr_index = elffile.get_section_index(section_name)
self.shdr_offset = elffile['e_shoff'] + \
self.shdr_index * elffile['e_shentsize']
self.size = self.section['sh_size']
self.flags = self.section['sh_flags']
self.offset = self.section['sh_offset']
class LLEXTExptabManipulator():
"""Class used to wrap the LLEXT export table manipulation."""
def __init__(self, elf_fd, exptab_file_offset, lcs_struct, exports_count):
self.fd = elf_fd
self.exports_count = exports_count
self.base_offset = exptab_file_offset
self.lcs_struct = lcs_struct
def _seek_to_sym(self, index):
self.fd.seek(self.base_offset + index * self.lcs_struct.size)
def __getitem__(self, index):
if not isinstance(index, int):
raise TypeError(f"invalid type {type(index)} for index")
if index >= self.exports_count:
raise IndexError(f"index {index} is out of bounds (max {self.exports_count})")
self._seek_to_sym(index)
return self.lcs_struct.unpack(self.fd.read(self.lcs_struct.size))
def __setitem__(self, index, item):
if not isinstance(index, int):
raise TypeError(f"invalid type {type(index)} for index")
if index >= self.exports_count:
raise IndexError(f"index {index} is out of bounds (max {self.exports_count})")
(addr_or_slid, sym_addr) = item
self._seek_to_sym(index)
self.fd.write(self.lcs_struct.pack(addr_or_slid, sym_addr))
class ZephyrElfExptabPreparator():
"""Prepares the LLEXT export table of a Zephyr ELF.
Attributes:
elf_path: path to the Zephyr ELF to prepare
log: a logging.Logger object
slid_listing_path: path to the file where SLID listing should be saved
"""
def __init__(self, elf_path: str, log: logging.Logger, slid_listing_path: str | None):
self.elf_path = elf_path
self.elf_fd = open(self.elf_path, 'rb+')
self.elf = ELFFile(self.elf_fd)
self.log = log
# Lazy-open the SLID listing file to ensure it is only created when necessary
self.slid_listing_path = slid_listing_path
self.slid_listing_fd = None
def _prepare_exptab_for_slid_linking(self):
"""
IMPLEMENTATION NOTES:
In the linker script, we declare the export names table
as starting at address 0. Thanks to this, all "pointers"
to that section are equal to the offset inside the section.
Also note that symbol names are always NUL-terminated.
The export table is sorted by SLID in ASCENDING order.
"""
def read_symbol_name(name_ptr):
raw_name = b''
self.elf_fd.seek(self.expstrtab_section.offset + name_ptr)
c = self.elf_fd.read(1)
while c != b'\0':
raw_name += c
c = self.elf_fd.read(1)
return raw_name.decode("utf-8")
#1) Load the export table
exports_list = []
for (name_ptr, export_address) in self.exptab_manipulator:
export_name = read_symbol_name(name_ptr)
exports_list.append((export_name, export_address))
#2) Generate the SLID for all exports
collided = False
sorted_exptab = dict()
for export_name, export_addr in exports_list:
slid = llext_slidlib.generate_slid(export_name, self.ptrsize)
collision = sorted_exptab.get(slid)
if collision:
#Don't abort immediately on collision: if there are others, we want to log them all.
self.log.error(f"SLID collision: {export_name} and {collision[0]} have the same SLID 0x{slid:X}")
collided = True
else:
sorted_exptab[slid] = (export_name, export_addr)
if collided:
return 1
#3) Sort the export table (order specified above)
sorted_exptab = dict(sorted(sorted_exptab.items()))
#4) Write the updated export table to ELF, and dump
#to SLID listing if requested by caller
if self.slid_listing_path:
self.slid_listing_fd = open(self.slid_listing_path, "w")
def slidlist_write(msg):
if self.slid_listing_fd:
self.slid_listing_fd.write(msg + "\n")
slidlist_write(f"/* SLID listing generated by {__file__} */")
slidlist_write("//")
slidlist_write("// This file contains the 'SLID -> name' mapping for all")
slidlist_write("// symbols exported to LLEXT by this Zephyr executable.")
slidlist_write("")
self.log.info("SLID -> export name mapping:")
i = 0
for (slid, name_and_symaddr) in sorted_exptab.items():
slid_as_str = llext_slidlib.format_slid(slid, self.ptrsize)
msg = f"{slid_as_str} -> {name_and_symaddr[0]}"
self.log.info(msg)
slidlist_write(msg)
self.exptab_manipulator[i] = (slid, name_and_symaddr[1])
i += 1
if self.slid_listing_fd:
self.slid_listing_fd.close()
return 0
def _prepare_exptab_for_str_linking(self):
#TODO: sort the export table by symbol
# name to allow binary search too
#
# Plan of action:
# 1) Locate in which section the names are located
# 2) Load the export table and resolve names
# 3) Sort the exports by name
# WARN: THIS MUST USE THE SAME SORTING RULES
# AS LLEXT CODE OR DICHOTOMIC SEARCH WILL BREAK
# Using a custom sorting function might be required.
# 4) Write back the updated export table
#
# N.B.: reusing part of the code in _prepare_elf_for_slid_linking
# might be possible and desireable.
#
# As of writing, this function will never be called as this script
# is only called if CONFIG_LLEXT_EXPORT_BUILTINS_BY_SLID is enabled,
# which makes _prepare_exptab_for_slid_linking be called instead.
#
self.log.warn(f"_prepare_exptab_for_str_linking: do nothing")
return 0
def _set_prep_done_shdr_flag(self):
#Offset and size of the 'sh_flags' member of
#the Elf_Shdr structure. The offset does not
#change between ELF32 and ELF64. Size in both
#is equal to pointer size (4 bytes for ELF32,
#8 bytes for ELF64).
SHF_OFFSET = 8
SHF_SIZE = self.ptrsize
off = self.exptab_section.shdr_offset + SHF_OFFSET
#Read existing sh_flags, set the PREPARATION_DONE flag
#and write back the new value.
self.elf_fd.seek(off)
sh_flags = int.from_bytes(self.elf_fd.read(SHF_SIZE), self.endianness)
sh_flags |= SHF_LLEXT_PREPARATION_DONE
self.elf_fd.seek(off)
self.elf_fd.write(int.to_bytes(sh_flags, self.ptrsize, self.endianness))
def _prepare_inner(self):
# Locate the export table section
try:
self.exptab_section = SectionDescriptor(
self.elf, LLEXT_EXPORT_TABLE_SECTION_NAME)
except KeyError as e:
self.log.error(e.args[0])
return 1
# Abort if the ELF has already been processed
if (self.exptab_section.flags & SHF_LLEXT_PREPARATION_DONE) != 0:
self.log.warning("exptab section flagged with LLEXT_PREPARATION_DONE "
"- not preparing again")
return 0
# Get the struct.Struct for export table entry
self.ptrsize = self.elf.elfclass // 8
self.endianness = 'little' if self.elf.little_endian else 'big'
self.lcs_struct = _llext_const_symbol_struct(self.ptrsize, self.endianness)
# Verify that the export table size is coherent
if (self.exptab_section.size % self.lcs_struct.size) != 0:
self.log.error(f"export table size (0x{self.exptab_section.size:X}) "
f"not aligned to 'llext_const_symbol' size (0x{self.lcs_struct.size:X})")
return 1
# Create the export table manipulator
num_exports = self.exptab_section.size // self.lcs_struct.size
self.exptab_manipulator = LLEXTExptabManipulator(
self.elf_fd, self.exptab_section.offset, self.lcs_struct, num_exports)
# Attempt to locate the export names section
try:
self.expstrtab_section = SectionDescriptor(
self.elf, LLEXT_EXPORT_NAMES_SECTION_NAME)
except KeyError:
self.expstrtab_section = None
self.log.debug(f"exports table section at file offset 0x{self.exptab_section.offset:X}")
if self.expstrtab_section:
self.log.debug(f"exports strtab section at file offset 0x{self.expstrtab_section.offset:X}")
else:
self.log.debug("no exports strtab section in ELF")
self.log.info(f"{num_exports} symbols are exported to LLEXTs by this ELF")
# Perform the export table preparation
if self.expstrtab_section:
res = self._prepare_exptab_for_slid_linking()
else:
res = self._prepare_exptab_for_str_linking()
if res == 0: # Add the "prepared" flag to export table section
self._set_prep_done_shdr_flag()
def prepare_elf(self):
res = self._prepare_inner()
self.elf_fd.close()
return res
# pylint: disable=duplicate-code
def _parse_args(argv):
"""Parse the command line arguments."""
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
allow_abbrev=False)
parser.add_argument("-f", "--elf-file", default=pathlib.Path("build", "zephyr", "zephyr.elf"),
help="ELF file to process")
parser.add_argument("-sl", "--slid-listing",
help=("write the SLID listing to a file (only useful"
"when CONFIG_LLEXT_EXPORT_BUILTINS_BY_SLID is enabled) "))
parser.add_argument("-v", "--verbose", action="count",
help=("enable verbose output, can be used multiple times "
"to increase verbosity level"))
parser.add_argument("--always-succeed", action="store_true",
help="always exit with a return code of 0, used for testing")
return parser.parse_args(argv)
def _init_log(verbose):
"""Initialize a logger object."""
log = logging.getLogger(__file__)
console = logging.StreamHandler()
console.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
log.addHandler(console)
if verbose and verbose > 1:
log.setLevel(logging.DEBUG)
elif verbose and verbose > 0:
log.setLevel(logging.INFO)
else:
log.setLevel(logging.WARNING)
return log
def main(argv=None):
args = _parse_args(argv)
log = _init_log(args.verbose)
log.info(f"prepare_llext_exptab: {args.elf_file}")
preparator = ZephyrElfExptabPreparator(args.elf_file, log, args.slid_listing)
res = preparator.prepare_elf()
if args.always_succeed:
return 0
return res
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
``` | /content/code_sandbox/scripts/build/llext_prepare_exptab.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 3,314 |
```python
#!/usr/bin/env python3
#
#
#
import argparse
import sys
import os
import importlib
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import SymbolTableSection
class gen_isr_log:
def __init__(self, debug = False):
self.__debug = debug
def debug(self, text):
"""Print debug message if debugging is enabled.
Note - this function requires config global variable to be initialized.
"""
if self.__debug:
sys.stdout.write(os.path.basename(sys.argv[0]) + ": " + text + "\n")
@staticmethod
def error(text):
sys.exit(os.path.basename(sys.argv[0]) + ": error: " + text + "\n")
def set_debug(self, state):
self.__debug = state
log = gen_isr_log()
class gen_isr_config:
"""All the constants and configuration gathered in single class for readability.
"""
# Constants
__ISR_FLAG_DIRECT = 1 << 0
__swt_spurious_handler = "z_irq_spurious"
__swt_shared_handler = "z_shared_isr"
__vt_spurious_handler = "z_irq_spurious"
__vt_irq_handler = "_isr_wrapper"
__shared_array_name = "z_shared_sw_isr_table"
__sw_isr_array_name = "_sw_isr_table"
__irq_vector_array_name = "_irq_vector_table"
@staticmethod
def __bm(bits):
return (1 << bits) - 1
def __init__(self, args, syms, log):
"""Initialize the configuration object.
The configuration object initialization takes only arguments as a parameter.
This is done to allow debug function work as soon as possible.
"""
# Store the arguments required for work
self.__args = args
self.__syms = syms
self.__log = log
# Select the default interrupt vector handler
if self.args.sw_isr_table:
self.__vt_default_handler = self.__vt_irq_handler
else:
self.__vt_default_handler = self.__vt_spurious_handler
# Calculate interrupt bits
self.__int_bits = [8, 8, 8]
# The below few hardware independent magic numbers represent various
# levels of interrupts in a multi-level interrupt system.
# 0x000000FF - represents the 1st level (i.e. the interrupts
# that directly go to the processor).
# 0x0000FF00 - represents the 2nd level (i.e. the interrupts funnel
# into 1 line which then goes into the 1st level)
# 0x00FF0000 - represents the 3rd level (i.e. the interrupts funnel
# into 1 line which then goes into the 2nd level)
self.__int_lvl_masks = [0x000000FF, 0x0000FF00, 0x00FF0000]
self.__irq2_baseoffset = None
self.__irq3_baseoffset = None
self.__irq2_offsets = None
self.__irq3_offsets = None
if self.check_multi_level_interrupts():
self.__max_irq_per = self.get_sym("CONFIG_MAX_IRQ_PER_AGGREGATOR")
self.__int_bits[0] = self.get_sym("CONFIG_1ST_LEVEL_INTERRUPT_BITS")
self.__int_bits[1] = self.get_sym("CONFIG_2ND_LEVEL_INTERRUPT_BITS")
self.__int_bits[2] = self.get_sym("CONFIG_3RD_LEVEL_INTERRUPT_BITS")
if sum(self.int_bits) > 32:
raise ValueError("Too many interrupt bits")
self.__int_lvl_masks[0] = self.__bm(self.int_bits[0])
self.__int_lvl_masks[1] = self.__bm(self.int_bits[1]) << self.int_bits[0]
self.__int_lvl_masks[2] = self.__bm(self.int_bits[2]) << (self.int_bits[0] + self.int_bits[1])
self.__log.debug("Level Bits Bitmask")
self.__log.debug("----------------------------")
for i in range(3):
bitmask_str = "0x" + format(self.__int_lvl_masks[i], '08X')
self.__log.debug(f"{i + 1:>5} {self.__int_bits[i]:>7} {bitmask_str:>14}")
if self.check_sym("CONFIG_2ND_LEVEL_INTERRUPTS"):
num_aggregators = self.get_sym("CONFIG_NUM_2ND_LEVEL_AGGREGATORS")
self.__irq2_baseoffset = self.get_sym("CONFIG_2ND_LVL_ISR_TBL_OFFSET")
self.__irq2_offsets = [self.get_sym('CONFIG_2ND_LVL_INTR_{}_OFFSET'.
format(str(i).zfill(2))) for i in
range(num_aggregators)]
self.__log.debug('2nd level offsets: {}'.format(self.__irq2_offsets))
if self.check_sym("CONFIG_3RD_LEVEL_INTERRUPTS"):
num_aggregators = self.get_sym("CONFIG_NUM_3RD_LEVEL_AGGREGATORS")
self.__irq3_baseoffset = self.get_sym("CONFIG_3RD_LVL_ISR_TBL_OFFSET")
self.__irq3_offsets = [self.get_sym('CONFIG_3RD_LVL_INTR_{}_OFFSET'.
format(str(i).zfill(2))) for i in
range(num_aggregators)]
self.__log.debug('3rd level offsets: {}'.format(self.__irq3_offsets))
@property
def args(self):
return self.__args
@property
def swt_spurious_handler(self):
return self.__swt_spurious_handler
@property
def swt_shared_handler(self):
return self.__swt_shared_handler
@property
def vt_default_handler(self):
return self.__vt_default_handler
@property
def shared_array_name(self):
return self.__shared_array_name
@property
def sw_isr_array_name(self):
return self.__sw_isr_array_name
@property
def irq_vector_array_name(self):
return self.__irq_vector_array_name
@property
def int_bits(self):
return self.__int_bits
@property
def int_lvl_masks(self):
return self.__int_lvl_masks
def endian_prefix(self):
if self.args.big_endian:
return ">"
else:
return "<"
def get_irq_baseoffset(self, lvl):
if lvl == 2:
return self.__irq2_baseoffset
if lvl == 3:
return self.__irq3_baseoffset
self.__log.error("Unsupported irq level: {}".format(lvl))
def get_irq_index(self, irq, lvl):
if lvl == 2:
offsets = self.__irq2_offsets
elif lvl == 3:
offsets = self.__irq3_offsets
else:
self.__log.error("Unsupported irq level: {}".format(lvl))
try:
return offsets.index(irq)
except ValueError:
self.__log.error("IRQ {} not present in parent offsets ({}). ".
format(irq, offsets) +
" Recheck interrupt configuration.")
def get_swt_table_index(self, offset, irq):
if not self.check_multi_level_interrupts():
return irq - offset
# Calculate index for multi level interrupts
self.__log.debug('IRQ = ' + hex(irq))
irq3 = (irq & self.int_lvl_masks[2]) >> (self.int_bits[0] + self.int_bits[1])
irq2 = (irq & self.int_lvl_masks[1]) >> (self.int_bits[0])
irq1 = irq & self.int_lvl_masks[0]
# Figure out third level interrupt position
if irq3:
list_index = self.get_irq_index(irq2 - 1, 3)
irq3_pos = self.get_irq_baseoffset(3) + self.__max_irq_per * list_index + irq3 - 1
self.__log.debug('IRQ_level = 3')
self.__log.debug('IRQ_Indx = ' + str(irq3))
self.__log.debug('IRQ_Pos = ' + str(irq3_pos))
return irq3_pos - offset
# Figure out second level interrupt position
if irq2:
list_index = self.get_irq_index(irq1, 2)
irq2_pos = self.get_irq_baseoffset(2) + self.__max_irq_per * list_index + irq2 - 1
self.__log.debug('IRQ_level = 2')
self.__log.debug('IRQ_Indx = ' + str(irq2))
self.__log.debug('IRQ_Pos = ' + str(irq2_pos))
return irq2_pos - offset
# Figure out first level interrupt position
self.__log.debug('IRQ_level = 1')
self.__log.debug('IRQ_Indx = ' + str(irq1))
self.__log.debug('IRQ_Pos = ' + str(irq1))
return irq1 - offset
def get_intlist_snames(self):
return self.args.intlist_section
def test_isr_direct(self, flags):
return flags & self.__ISR_FLAG_DIRECT
def get_sym_from_addr(self, addr):
for key, value in self.__syms.items():
if addr == value:
return key
return None
def get_sym(self, name):
return self.__syms.get(name)
def check_sym(self, name):
return name in self.__syms
def check_multi_level_interrupts(self):
return self.check_sym("CONFIG_MULTI_LEVEL_INTERRUPTS")
def check_shared_interrupts(self):
return self.check_sym("CONFIG_SHARED_INTERRUPTS")
def check_64b(self):
return self.check_sym("CONFIG_64BIT")
def get_symbols(obj):
for section in obj.iter_sections():
if isinstance(section, SymbolTableSection):
return {sym.name: sym.entry.st_value
for sym in section.iter_symbols()}
log.error("Could not find symbol table")
def read_intList_sect(elfobj, snames):
"""
Load the raw intList section data in a form of byte array.
"""
intList_sect = None
for sname in snames:
intList_sect = elfobj.get_section_by_name(sname)
if intList_sect is not None:
log.debug("Found intlist section: \"{}\"".format(sname))
break
if intList_sect is None:
log.error("Cannot find the intlist section!")
intdata = intList_sect.data()
return intdata
def parse_args():
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument("-e", "--big-endian", action="store_true",
help="Target encodes data in big-endian format (little endian is "
"the default)")
parser.add_argument("-d", "--debug", action="store_true",
help="Print additional debugging information")
parser.add_argument("-o", "--output-source", required=True,
help="Output source file")
parser.add_argument("-l", "--linker-output-files",
nargs=2,
metavar=("vector_table_link", "software_interrupt_link"),
help="Output linker files. "
"Used only if CONFIG_ISR_TABLES_LOCAL_DECLARATION is enabled. "
"In other case empty file would be generated.")
parser.add_argument("-k", "--kernel", required=True,
help="Zephyr kernel image")
parser.add_argument("-s", "--sw-isr-table", action="store_true",
help="Generate SW ISR table")
parser.add_argument("-V", "--vector-table", action="store_true",
help="Generate vector table")
parser.add_argument("-i", "--intlist-section", action="append", required=True,
help="The name of the section to search for the interrupt data. "
"This is accumulative argument. The first section found would be used.")
return parser.parse_args()
def main():
args = parse_args()
# Configure logging as soon as possible
log.set_debug(args.debug)
with open(args.kernel, "rb") as fp:
kernel = ELFFile(fp)
config = gen_isr_config(args, get_symbols(kernel), log)
intlist_data = read_intList_sect(kernel, config.get_intlist_snames())
if config.check_sym("CONFIG_ISR_TABLES_LOCAL_DECLARATION"):
parser_module = importlib.import_module('gen_isr_tables_parser_local')
parser = parser_module.gen_isr_parser(intlist_data, config, log)
else:
parser_module = importlib.import_module('gen_isr_tables_parser_carrays')
parser = parser_module.gen_isr_parser(intlist_data, config, log)
with open(args.output_source, "w") as fp:
parser.write_source(fp)
if args.linker_output_files is not None:
with open(args.linker_output_files[0], "w") as fp_vt, \
open(args.linker_output_files[1], "w") as fp_swi:
if hasattr(parser, 'write_linker_vt'):
parser.write_linker_vt(fp_vt)
else:
log.debug("Chosen parser does not support vector table linker file")
fp_vt.write('/* Empty */\n')
if hasattr(parser, 'write_linker_swi'):
parser.write_linker_swi(fp_swi)
else:
log.debug("Chosen parser does not support software interrupt linker file")
fp_swi.write('/* Empty */\n')
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/gen_isr_tables.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,954 |
```python
#!/usr/bin/env python3
#
#
#
import struct
class gen_isr_parser:
source_header = """
/* AUTO-GENERATED by gen_isr_tables.py, do not edit! */
#include <zephyr/toolchain.h>
#include <zephyr/linker/sections.h>
#include <zephyr/sw_isr_table.h>
#include <zephyr/arch/cpu.h>
typedef void (* ISR)(const void *);
"""
source_assembly_header = """
#ifndef ARCH_IRQ_VECTOR_JUMP_CODE
#error "ARCH_IRQ_VECTOR_JUMP_CODE not defined"
#endif
"""
def __init__(self, intlist_data, config, log):
"""Initialize the parser.
The function prepares parser to work.
Parameters:
- intlist_data: The binnary data from intlist section
- config: The configuration object
- log: The logging object, has to have error and debug methods
"""
self.__config = config
self.__log = log
intlist = self.__read_intlist(intlist_data)
self.__vt, self.__swt, self.__nv = self.__parse_intlist(intlist)
def __read_intlist(self, intlist_data):
"""read a binary file containing the contents of the kernel's .intList
section. This is an instance of a header created by
include/zephyr/linker/intlist.ld:
struct {
uint32_t num_vectors; <- typically CONFIG_NUM_IRQS
struct _isr_list isrs[]; <- Usually of smaller size than num_vectors
}
Followed by instances of struct _isr_list created by IRQ_CONNECT()
calls:
struct _isr_list {
/** IRQ line number */
int32_t irq;
/** Flags for this IRQ, see ISR_FLAG_* definitions */
int32_t flags;
/** ISR to call */
void *func;
/** Parameter for non-direct IRQs */
const void *param;
};
"""
intlist = {}
prefix = self.__config.endian_prefix()
# Extract header and the rest of the data
intlist_header_fmt = prefix + "II"
header_sz = struct.calcsize(intlist_header_fmt)
header_raw = struct.unpack_from(intlist_header_fmt, intlist_data, 0)
self.__log.debug(str(header_raw))
intlist["num_vectors"] = header_raw[0]
intlist["offset"] = header_raw[1]
intdata = intlist_data[header_sz:]
# Extract information about interrupts
if self.__config.check_64b():
intlist_entry_fmt = prefix + "iiQQ"
else:
intlist_entry_fmt = prefix + "iiII"
intlist["interrupts"] = [i for i in
struct.iter_unpack(intlist_entry_fmt, intdata)]
self.__log.debug("Configured interrupt routing")
self.__log.debug("handler irq flags param")
self.__log.debug("--------------------------")
for irq in intlist["interrupts"]:
self.__log.debug("{0:<10} {1:<3} {2:<3} {3}".format(
hex(irq[2]), irq[0], irq[1], hex(irq[3])))
return intlist
def __parse_intlist(self, intlist):
"""All the intlist data are parsed into swt and vt arrays.
The vt array is prepared for hardware interrupt table.
Every entry in the selected position would contain None or the name of the function pointer
(address or string).
The swt is a little more complex. At every position it would contain an array of parameter and
function pointer pairs. If CONFIG_SHARED_INTERRUPTS is enabled there may be more than 1 entry.
If empty array is placed on selected position - it means that the application does not implement
this interrupt.
Parameters:
- intlist: The preprocessed list of intlist section content (see read_intlist)
Return:
vt, swt - parsed vt and swt arrays (see function description above)
"""
nvec = intlist["num_vectors"]
offset = intlist["offset"]
if nvec > pow(2, 15):
raise ValueError('nvec is too large, check endianness.')
self.__log.debug('offset is ' + str(offset))
self.__log.debug('num_vectors is ' + str(nvec))
# Set default entries in both tables
if not(self.__config.args.sw_isr_table or self.__config.args.vector_table):
self.__log.error("one or both of -s or -V needs to be specified on command line")
if self.__config.args.vector_table:
vt = [None for i in range(nvec)]
else:
vt = None
if self.__config.args.sw_isr_table:
swt = [[] for i in range(nvec)]
else:
swt = None
# Process intlist and write to the tables created
for irq, flags, func, param in intlist["interrupts"]:
if self.__config.test_isr_direct(flags):
if not vt:
self.__log.error("Direct Interrupt %d declared with parameter 0x%x "
"but no vector table in use"
% (irq, param))
if param != 0:
self.__log.error("Direct irq %d declared, but has non-NULL parameter"
% irq)
if not 0 <= irq - offset < len(vt):
self.__log.error("IRQ %d (offset=%d) exceeds the maximum of %d"
% (irq - offset, offset, len(vt) - 1))
vt[irq - offset] = func
else:
# Regular interrupt
if not swt:
self.__log.error("Regular Interrupt %d declared with parameter 0x%x "
"but no SW ISR_TABLE in use"
% (irq, param))
table_index = self.__config.get_swt_table_index(offset, irq)
if not 0 <= table_index < len(swt):
self.__log.error("IRQ %d (offset=%d) exceeds the maximum of %d" %
(table_index, offset, len(swt) - 1))
if self.__config.check_shared_interrupts():
lst = swt[table_index]
if (param, func) in lst:
self.__log.error("Attempting to register the same ISR/arg pair twice.")
if len(lst) >= self.__config.get_sym("CONFIG_SHARED_IRQ_MAX_NUM_CLIENTS"):
self.__log.error(f"Reached shared interrupt client limit. Maybe increase"
+ f" CONFIG_SHARED_IRQ_MAX_NUM_CLIENTS?")
else:
if len(swt[table_index]) > 0:
self.__log.error(f"multiple registrations at table_index {table_index} for irq {irq} (0x{irq:x})"
+ f"\nExisting handler 0x{swt[table_index][0][1]:x}, new handler 0x{func:x}"
+ "\nHas IRQ_CONNECT or IRQ_DIRECT_CONNECT accidentally been invoked on the same irq multiple times?"
)
swt[table_index].append((param, func))
return vt, swt, nvec
def __write_code_irq_vector_table(self, fp):
fp.write(self.source_assembly_header)
fp.write("void __irq_vector_table __attribute__((naked)) _irq_vector_table(void) {\n")
for i in range(self.__nv):
func = self.__vt[i]
if func is None:
func = self.__config.vt_default_handler
if isinstance(func, int):
func_as_string = self.__config.get_sym_from_addr(func)
else:
func_as_string = func
fp.write("\t__asm(ARCH_IRQ_VECTOR_JUMP_CODE({}));\n".format(func_as_string))
fp.write("}\n")
def __write_address_irq_vector_table(self, fp):
fp.write("uintptr_t __irq_vector_table _irq_vector_table[%d] = {\n" % self.__nv)
for i in range(self.__nv):
func = self.__vt[i]
if func is None:
func = self.__config.vt_default_handler
if isinstance(func, int):
fp.write("\t{},\n".format(func))
else:
fp.write("\t((uintptr_t)&{}),\n".format(func))
fp.write("};\n")
def __write_shared_table(self, fp):
fp.write("struct z_shared_isr_table_entry __shared_sw_isr_table"
" z_shared_sw_isr_table[%d] = {\n" % self.__nv)
for i in range(self.__nv):
if self.__swt[i] is None:
client_num = 0
client_list = None
else:
client_num = len(self.__swt[i])
client_list = self.__swt[i]
if client_num <= 1:
fp.write("\t{ },\n")
else:
fp.write(f"\t{{ .client_num = {client_num}, .clients = {{ ")
for j in range(0, client_num):
routine = client_list[j][1]
arg = client_list[j][0]
fp.write(f"{{ .isr = (ISR){ hex(routine) if isinstance(routine, int) else routine }, "
f".arg = (const void *){hex(arg)} }},")
fp.write(" },\n},\n")
fp.write("};\n")
def write_source(self, fp):
fp.write(self.source_header)
if self.__config.check_shared_interrupts():
self.__write_shared_table(fp)
if self.__vt:
if self.__config.check_sym("CONFIG_IRQ_VECTOR_TABLE_JUMP_BY_ADDRESS"):
self.__write_address_irq_vector_table(fp)
elif self.__config.check_sym("CONFIG_IRQ_VECTOR_TABLE_JUMP_BY_CODE"):
self.__write_code_irq_vector_table(fp)
else:
self.__log.error("CONFIG_IRQ_VECTOR_TABLE_JUMP_BY_{ADDRESS,CODE} not set")
if not self.__swt:
return
fp.write("struct _isr_table_entry __sw_isr_table _sw_isr_table[%d] = {\n"
% self.__nv)
level2_offset = self.__config.get_irq_baseoffset(2)
level3_offset = self.__config.get_irq_baseoffset(3)
for i in range(self.__nv):
if len(self.__swt[i]) == 0:
# Not used interrupt
param = "0x0"
func = self.__config.swt_spurious_handler
elif len(self.__swt[i]) == 1:
# Single interrupt
param = "{0:#x}".format(self.__swt[i][0][0])
func = self.__swt[i][0][1]
else:
# Shared interrupt
param = "&z_shared_sw_isr_table[{0}]".format(i)
func = self.__config.swt_shared_handler
if isinstance(func, int):
func_as_string = "{0:#x}".format(func)
else:
func_as_string = func
if level2_offset is not None and i == level2_offset:
fp.write("\t/* Level 2 interrupts start here (offset: {}) */\n".
format(level2_offset))
if level3_offset is not None and i == level3_offset:
fp.write("\t/* Level 3 interrupts start here (offset: {}) */\n".
format(level3_offset))
fp.write("\t{{(const void *){0}, (ISR){1}}}, /* {2} */\n".format(param, func_as_string, i))
fp.write("};\n")
``` | /content/code_sandbox/scripts/build/gen_isr_tables_parser_carrays.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,517 |
```python
#!/usr/bin/env python3
#
#
"""Convert a file to a list of hex characters
The list of hex characters can then be included to a source file. Optionally,
the output can be compressed.
"""
import argparse
import codecs
import gzip
import io
def parse_args():
global args
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument("-f", "--file", required=True, help="Input file")
parser.add_argument("-o", "--offset", type=lambda x: int(x, 0), default=0,
help="Byte offset in the input file")
parser.add_argument("-l", "--length", type=lambda x: int(x, 0), default=-1,
help="""Length in bytes to read from the input file.
Defaults to reading till the end of the input file.""")
parser.add_argument("-g", "--gzip", action="store_true",
help="Compress the file using gzip before output")
parser.add_argument("-t", "--gzip-mtime", type=int, default=0,
nargs='?', const=None,
help="""mtime seconds in the gzip header.
Defaults to zero to keep builds deterministic. For
current date and time (= "now") use this option
without any value.""")
args = parser.parse_args()
def get_nice_string(list_or_iterator):
return ", ".join("0x" + str(x) for x in list_or_iterator)
def make_hex(chunk):
hexdata = codecs.encode(chunk, 'hex').decode("utf-8")
hexlist = map(''.join, zip(*[iter(hexdata)] * 2))
print(get_nice_string(hexlist) + ',')
def main():
parse_args()
if args.gzip:
with io.BytesIO() as content:
with open(args.file, 'rb') as fg:
fg.seek(args.offset)
with gzip.GzipFile(fileobj=content, mode='w',
mtime=args.gzip_mtime,
compresslevel=9) as gz_obj:
gz_obj.write(fg.read(args.length))
content.seek(0)
for chunk in iter(lambda: content.read(8), b''):
make_hex(chunk)
else:
with open(args.file, "rb") as fp:
fp.seek(args.offset)
if args.length < 0:
for chunk in iter(lambda: fp.read(8), b''):
make_hex(chunk)
else:
remainder = args.length
for chunk in iter(lambda: fp.read(min(8, remainder)), b''):
make_hex(chunk)
remainder = remainder - len(chunk)
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/file2hex.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 586 |
```python
#!/usr/bin/env python3
#
#
"""
gperf C file post-processor
We use gperf to build up a perfect hashtable of pointer values. The way gperf
does this is to create a table 'wordlist' indexed by a string representation
of a pointer address, and then doing memcmp() on a string passed in for
comparison
We are exclusively working with 4-byte pointer values. This script adjusts
the generated code so that we work with pointers directly and not strings.
This saves a considerable amount of space.
"""
import sys
import argparse
import os
import re
from packaging import version
# --- debug stuff ---
def debug(text):
if not args.verbose:
return
sys.stdout.write(os.path.basename(sys.argv[0]) + ": " + text + "\n")
def error(text):
sys.exit(os.path.basename(sys.argv[0]) + " ERROR: " + text)
def warn(text):
sys.stdout.write(
os.path.basename(
sys.argv[0]) +
" WARNING: " +
text +
"\n")
def reformat_str(match_obj):
addr_str = match_obj.group(0)
# Nip quotes
addr_str = addr_str[1:-1]
addr_vals = [0, 0, 0, 0, 0, 0, 0 , 0]
ctr = 7
i = 0
while True:
if i >= len(addr_str):
break
if addr_str[i] == "\\":
if addr_str[i + 1].isdigit():
# Octal escape sequence
val_str = addr_str[i + 1:i + 4]
addr_vals[ctr] = int(val_str, 8)
i += 4
else:
# Char value that had to be escaped by C string rules
addr_vals[ctr] = ord(addr_str[i + 1])
i += 2
else:
addr_vals[ctr] = ord(addr_str[i])
i += 1
ctr -= 1
return "(char *)0x%02x%02x%02x%02x%02x%02x%02x%02x" % tuple(addr_vals)
def process_line(line, fp):
if line.startswith("#"):
fp.write(line)
return
# Set the lookup function to static inline so it gets rolled into
# k_object_find(), nothing else will use it
if re.search(args.pattern + " [*]$", line):
fp.write("static inline " + line)
return
m = re.search("gperf version (.*) [*][/]$", line)
if m:
v = version.parse(m.groups()[0])
v_lo = version.parse("3.0")
v_hi = version.parse("3.1")
if (v < v_lo or v > v_hi):
warn("gperf %s is not tested, versions %s through %s supported" %
(v, v_lo, v_hi))
# Replace length lookups with constant len since we're always
# looking at pointers
line = re.sub(r'lengthtable\[key\]', r'sizeof(void *)', line)
# Empty wordlist entries to have NULLs instead of ""
line = re.sub(r'[{]["]["][}]', r'{}', line)
# Suppress a compiler warning since this table is no longer necessary
line = re.sub(r'static unsigned char lengthtable',
r'static unsigned char __unused lengthtable', line)
# drop all use of register keyword, let compiler figure that out,
# we have to do this since we change stuff to take the address of some
# parameters
line = re.sub(r'register', r'', line)
# Hashing the address of the string
line = re.sub(r"hash [(]str, len[)]",
r"hash((const char *)&str, len)", line)
# Just compare pointers directly instead of using memcmp
if re.search("if [(][*]str", line):
fp.write(" if (str == s)\n")
return
# Take the strings with the binary information for the pointer values,
# and just turn them into pointers
line = re.sub(r'["].*["]', reformat_str, line)
fp.write(line)
def parse_args():
global args
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
allow_abbrev=False)
parser.add_argument("-i", "--input", required=True,
help="Input C file from gperf")
parser.add_argument("-o", "--output", required=True,
help="Output C file with processing done")
parser.add_argument("-p", "--pattern", required=True,
help="Search pattern for objects")
parser.add_argument("-v", "--verbose", action="store_true",
help="Print extra debugging information")
args = parser.parse_args()
if "VERBOSE" in os.environ:
args.verbose = 1
def main():
parse_args()
with open(args.input, "r") as in_fp, open(args.output, "w") as out_fp:
for line in in_fp.readlines():
process_line(line, out_fp)
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/process_gperf.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,150 |
```python
#!/usr/bin/env python3
#
#
"""
Script to generate a linker script organizing application memory partitions
Applications may declare build-time memory domain partitions with
K_APPMEM_PARTITION_DEFINE, and assign globals to them using K_APP_DMEM
or K_APP_BMEM macros. For each of these partitions, we need to
route all their data into appropriately-sized memory areas which meet the
size/alignment constraints of the memory protection hardware.
This linker script is created very early in the build process, before
the build attempts to link the kernel binary, as the linker script this
tool generates is a necessary pre-condition for kernel linking. We extract
the set of memory partitions to generate by looking for variables which
have been assigned to input sections that follow a defined naming convention.
We also allow entire libraries to be pulled in to assign their globals
to a particular memory partition via command line directives.
This script takes as inputs:
- The base directory to look for compiled objects
- key/value pairs mapping static library files to what partitions their globals
should end up in.
The output is a linker script fragment containing the definition of the
app shared memory section, which is further divided, for each partition
found, into data and BSS for each partition.
"""
import sys
import argparse
import json
import os
import re
from collections import OrderedDict
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import SymbolTableSection
import elftools.common.exceptions
SZ = 'size'
SRC = 'sources'
LIB = 'libraries'
# This script will create sections and linker variables to place the
# application shared memory partitions.
# these are later read by the macros defined in app_memdomain.h for
# initialization purpose when USERSPACE is enabled.
data_template = """
/* Auto generated code do not modify */
SMEM_PARTITION_ALIGN(z_data_smem_{0}_bss_end - z_data_smem_{0}_part_start);
z_data_smem_{0}_part_start = .;
KEEP(*(data_smem_{0}_data*))
"""
library_data_template = """
*{0}:*(.data .data.* .sdata .sdata.*)
"""
bss_template = """
z_data_smem_{0}_bss_start = .;
KEEP(*(data_smem_{0}_bss*))
"""
library_bss_template = """
*{0}:*(.bss .bss.* .sbss .sbss.* COMMON COMMON.*)
"""
footer_template = """
z_data_smem_{0}_bss_end = .;
SMEM_PARTITION_ALIGN(z_data_smem_{0}_bss_end - z_data_smem_{0}_part_start);
z_data_smem_{0}_part_end = .;
"""
linker_start_seq = """
SECTION_PROLOGUE(_APP_SMEM{1}_SECTION_NAME,,)
{{
APP_SHARED_ALIGN;
_app_smem{0}_start = .;
"""
linker_end_seq = """
APP_SHARED_ALIGN;
_app_smem{0}_end = .;
}} GROUP_DATA_LINK_IN(RAMABLE_REGION, ROMABLE_REGION)
"""
empty_app_smem = """
SECTION_PROLOGUE(_APP_SMEM{1}_SECTION_NAME,,)
{{
#ifdef EMPTY_APP_SHARED_ALIGN
EMPTY_APP_SHARED_ALIGN;
#endif
_app_smem{0}_start = .;
_app_smem{0}_end = .;
}} GROUP_DATA_LINK_IN(RAMABLE_REGION, ROMABLE_REGION)
"""
size_cal_string = """
z_data_smem_{0}_part_size = z_data_smem_{0}_part_end - z_data_smem_{0}_part_start;
z_data_smem_{0}_bss_size = z_data_smem_{0}_bss_end - z_data_smem_{0}_bss_start;
"""
section_regex = re.compile(r'data_smem_([A-Za-z0-9_]*)_(data|bss)*')
elf_part_size_regex = re.compile(r'z_data_smem_(.*)_part_size')
def find_obj_file_partitions(filename, partitions):
with open(filename, 'rb') as f:
try:
full_lib = ELFFile(f)
except elftools.common.exceptions.ELFError as e:
exit(f"Error: {filename}: {e}")
if not full_lib:
sys.exit("Error parsing file: " + filename)
sections = [x for x in full_lib.iter_sections()]
for section in sections:
m = section_regex.match(section.name)
if not m:
continue
partition_name = m.groups()[0]
if partition_name not in partitions:
partitions[partition_name] = {SZ: section.header.sh_size}
if args.verbose:
partitions[partition_name][SRC] = filename
else:
partitions[partition_name][SZ] += section.header.sh_size
return partitions
def parse_obj_files(partitions):
# Iterate over all object files to find partitions
for dirpath, _, files in os.walk(args.directory):
for filename in files:
if re.match(r".*\.obj$", filename):
fullname = os.path.join(dirpath, filename)
fsize = os.path.getsize(fullname)
if fsize != 0:
find_obj_file_partitions(fullname, partitions)
def parse_compile_command_file(partitions):
# Iterate over all entries to find object files.
# Thereafter process each object file to find partitions
object_pattern = re.compile(r'-o\s+(\S*)')
with open(args.compile_commands_file, 'rb') as f:
commands = json.load(f)
for command in commands:
build_dir = command.get('directory')
compile_command = command.get('command')
compile_arg = object_pattern.search(compile_command)
obj_file = None if compile_arg is None else compile_arg.group(1)
if obj_file:
fullname = os.path.join(build_dir, obj_file)
# Because of issue #40635, then not all objects referenced by
# the compile_commands.json file may be available, therefore
# only include existing files.
if os.path.exists(fullname):
find_obj_file_partitions(fullname, partitions)
def parse_elf_file(partitions):
with open(args.elf, 'rb') as f:
try:
elffile = ELFFile(f)
except elftools.common.exceptions.ELFError as e:
exit(f"Error: {args.elf}: {e}")
symbol_tbls = [s for s in elffile.iter_sections()
if isinstance(s, SymbolTableSection)]
for section in symbol_tbls:
for symbol in section.iter_symbols():
if symbol['st_shndx'] != "SHN_ABS":
continue
x = elf_part_size_regex.match(symbol.name)
if not x:
continue
partition_name = x.groups()[0]
size = symbol['st_value']
if partition_name not in partitions:
partitions[partition_name] = {SZ: size}
if args.verbose:
partitions[partition_name][SRC] = args.elf
else:
partitions[partition_name][SZ] += size
def generate_final_linker(linker_file, partitions, lnkr_sect=""):
string = ""
if len(partitions) > 0:
string = linker_start_seq.format(lnkr_sect, lnkr_sect.upper())
size_string = ''
for partition, item in partitions.items():
string += data_template.format(partition)
if LIB in item:
for lib in item[LIB]:
string += library_data_template.format(lib)
string += bss_template.format(partition, lnkr_sect)
if LIB in item:
for lib in item[LIB]:
string += library_bss_template.format(lib)
string += footer_template.format(partition)
size_string += size_cal_string.format(partition)
string += linker_end_seq.format(lnkr_sect)
string += size_string
else:
string = empty_app_smem.format(lnkr_sect, lnkr_sect.upper())
with open(linker_file, "w") as fw:
fw.write(string)
def parse_args():
global args
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument("-d", "--directory", required=False, default=None,
help="Root build directory")
parser.add_argument("-e", "--elf", required=False, default=None,
help="ELF file")
parser.add_argument("-f", "--compile-commands-file", required=False,
default=None, help="CMake compile commands file")
parser.add_argument("-o", "--output", required=False,
help="Output ld file")
parser.add_argument("-v", "--verbose", action="count", default=0,
help="Verbose Output")
parser.add_argument("-l", "--library", nargs=2, action="append", default=[],
metavar=("LIBRARY", "PARTITION"),
help="Include globals for a particular library or object filename into a designated partition")
parser.add_argument("--pinoutput", required=False,
help="Output ld file for pinned sections")
parser.add_argument("--pinpartitions", action="store", required=False, default="",
help="Comma separated names of partitions to be pinned in physical memory")
args = parser.parse_args()
def main():
parse_args()
partitions = {}
if args.directory is not None:
parse_obj_files(partitions)
if args.compile_commands_file is not None:
parse_compile_command_file(partitions)
elif args.elf is not None:
parse_elf_file(partitions)
else:
return
for lib, ptn in args.library:
if ptn not in partitions:
partitions[ptn] = {}
if LIB not in partitions[ptn]:
partitions[ptn][LIB] = [lib]
else:
partitions[ptn][LIB].append(lib)
if args.pinoutput:
pin_part_names = args.pinpartitions.split(',')
generic_partitions = {key: value for key, value in partitions.items()
if key not in pin_part_names}
pinned_partitions = {key: value for key, value in partitions.items()
if key in pin_part_names}
else:
generic_partitions = partitions
# Sample partitions.items() list before sorting:
# [ ('part1', {'size': 64}), ('part3', {'size': 64}, ...
# ('part0', {'size': 334}) ]
decreasing_tuples = sorted(generic_partitions.items(),
key=lambda x: (x[1][SZ], x[0]), reverse=True)
partsorted = OrderedDict(decreasing_tuples)
generate_final_linker(args.output, partsorted)
if args.verbose:
print("Partitions retrieved:")
for key in partsorted:
print(" {0}: size {1}: {2}".format(key,
partsorted[key][SZ],
partsorted[key][SRC]))
if args.pinoutput:
decreasing_tuples = sorted(pinned_partitions.items(),
key=lambda x: (x[1][SZ], x[0]), reverse=True)
partsorted = OrderedDict(decreasing_tuples)
generate_final_linker(args.pinoutput, partsorted, lnkr_sect="_pinned")
if args.verbose:
print("Pinned partitions retrieved:")
for key in partsorted:
print(" {0}: size {1}: {2}".format(key,
partsorted[key][SZ],
partsorted[key][SRC]))
if __name__ == '__main__':
main()
``` | /content/code_sandbox/scripts/build/gen_app_partitions.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,478 |
```python
#!/usr/bin/env python3
#
#
import argparse
import os
import re
def front_matter():
return f'''
/*
* This file is generated by {__file__}
*/
#include <zephyr/posix/signal.h>
'''
def gen_strsignal_table(input, output):
with open(input, 'r') as inf:
highest_signo = 0
symbols = []
msgs = {}
for line in inf.readlines():
# Select items of the form below (note: SIGNO is numeric)
# #define SYMBOL SIGNO /**< MSG */
pat = r'^#define[\s]+(SIG[A-Z_]*)[\s]+([1-9][0-9]*)[\s]+/\*\*<[\s]+(.*)[\s]+\*/[\s]*$'
match = re.match(pat, line)
if not match:
continue
symbol = match[1]
signo = int(match[2])
msg = match[3]
symbols.append(symbol)
msgs[symbol] = msg
highest_signo = max(int(signo), highest_signo)
try:
os.makedirs(os.path.dirname(output))
except BaseException:
# directory already present
pass
with open(output, 'w') as outf:
print(front_matter(), file=outf)
# Generate string table
print(
f'static const char *const strsignal_list[{highest_signo + 1}] = {{', file=outf)
for symbol in symbols:
print(f'\t[{symbol}] = "{msgs[symbol]}",', file=outf)
print('};', file=outf)
def parse_args():
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument(
'-i',
'--input',
dest='input',
required=True,
help='input file (e.g. include/zephyr/posix/signal.h)')
parser.add_argument(
'-o',
'--output',
dest='output',
required=True,
help='output file (e.g. build/zephyr/misc/generated/lib/posix/strsignal_table.h)')
args = parser.parse_args()
return args
def main():
args = parse_args()
gen_strsignal_table(args.input, args.output)
if __name__ == '__main__':
main()
``` | /content/code_sandbox/scripts/build/gen_strsignal_table.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 501 |
```python
#!/usr/bin/env python3
#
#
"""
Script to generate system call invocation macros
This script parses the system call metadata JSON file emitted by
parse_syscalls.py to create several files:
- A file containing weak aliases of any potentially unimplemented system calls,
as well as the system call dispatch table, which maps system call type IDs
to their handler functions.
- A header file defining the system call type IDs, as well as function
prototypes for all system call handler functions.
- A directory containing header files. Each header corresponds to a header
that was identified as containing system call declarations. These
generated headers contain the inline invocation functions for each system
call in that header.
"""
import sys
import re
import argparse
import os
import json
# Some kernel headers cannot include automated tracing without causing unintended recursion or
# other serious issues.
# These headers typically already have very specific tracing hooks for all relevant things
# written by hand so are excluded.
notracing = ["kernel.h", "zephyr/kernel.h", "errno_private.h",
"zephyr/errno_private.h"]
types64 = ["int64_t", "uint64_t"]
# The kernel linkage is complicated. These functions from
# userspace_handlers.c are present in the kernel .a library after
# userspace.c, which contains the weak fallbacks defined here. So the
# linker finds the weak one first and stops searching, and thus won't
# see the real implementation which should override. Yet changing the
# order runs afoul of a comment in CMakeLists.txt that the order is
# critical. These are core syscalls that won't ever be unconfigured,
# just disable the fallback mechanism as a simple workaround.
noweak = ["z_mrsh_k_object_release",
"z_mrsh_k_object_access_grant",
"z_mrsh_k_object_alloc"]
table_template = """/* auto-generated by gen_syscalls.py, don't edit */
#include <zephyr/llext/symbol.h>
/* Weak handler functions that get replaced by the real ones unless a system
* call is not implemented due to kernel configuration.
*/
%s
const _k_syscall_handler_t _k_syscall_table[K_SYSCALL_LIMIT] = {
\t%s
};
"""
list_template = """/* auto-generated by gen_syscalls.py, don't edit */
#ifndef ZEPHYR_SYSCALL_LIST_H
#define ZEPHYR_SYSCALL_LIST_H
%s
#ifndef _ASMLANGUAGE
#include <stdarg.h>
#include <stdint.h>
#endif /* _ASMLANGUAGE */
#endif /* ZEPHYR_SYSCALL_LIST_H */
"""
syscall_template = """/* auto-generated by gen_syscalls.py, don't edit */
{include_guard}
{tracing_include}
#ifndef _ASMLANGUAGE
#include <stdarg.h>
#include <zephyr/syscall_list.h>
#include <zephyr/syscall.h>
#include <zephyr/linker/sections.h>
#ifdef __cplusplus
extern "C" {{
#endif
{invocations}
#ifdef __cplusplus
}}
#endif
#endif
#endif /* include guard */
"""
handler_template = """
extern uintptr_t z_hdlr_%s(uintptr_t arg1, uintptr_t arg2, uintptr_t arg3,
uintptr_t arg4, uintptr_t arg5, uintptr_t arg6, void *ssf);
"""
weak_template = """
__weak ALIAS_OF(handler_no_syscall)
uintptr_t %s(uintptr_t arg1, uintptr_t arg2, uintptr_t arg3,
uintptr_t arg4, uintptr_t arg5, uintptr_t arg6, void *ssf);
"""
# defines a macro wrapper which supersedes the syscall when used
# and provides tracing enter/exit hooks while allowing per compilation unit
# enable/disable of syscall tracing. Used for returning functions
# Note that the last argument to the exit macro is the return value.
syscall_tracer_with_return_template = """
#if defined(CONFIG_TRACING_SYSCALL)
#ifndef DISABLE_SYSCALL_TRACING
{trace_diagnostic}
#define {func_name}({argnames}) ({{ \
{func_type} syscall__retval; \
sys_port_trace_syscall_enter({syscall_id}, {func_name}{trace_argnames}); \
syscall__retval = {func_name}({argnames}); \
sys_port_trace_syscall_exit({syscall_id}, {func_name}{trace_argnames}, syscall__retval); \
syscall__retval; \
}})
#endif
#endif
"""
# defines a macro wrapper which supersedes the syscall when used
# and provides tracing enter/exit hooks while allowing per compilation unit
# enable/disable of syscall tracing. Used for non-returning (void) functions
syscall_tracer_void_template = """
#if defined(CONFIG_TRACING_SYSCALL)
#ifndef DISABLE_SYSCALL_TRACING
{trace_diagnostic}
#define {func_name}({argnames}) do {{ \
sys_port_trace_syscall_enter({syscall_id}, {func_name}{trace_argnames}); \
{func_name}({argnames}); \
sys_port_trace_syscall_exit({syscall_id}, {func_name}{trace_argnames}); \
}} while(false)
#endif
#endif
"""
exported_template = """
/* Export syscalls for extensions */
static void * const no_handler = NULL;
/* Weak references, if something is not found by the linker, it will be NULL
* and simply fail during extension load
*/
%s
/* Exported symbols */
%s
"""
typename_regex = re.compile(r'(.*?)([A-Za-z0-9_]+)$')
class SyscallParseException(Exception):
pass
def typename_split(item):
if "[" in item:
raise SyscallParseException(
"Please pass arrays to syscalls as pointers, unable to process '%s'" %
item)
if "(" in item:
raise SyscallParseException(
"Please use typedefs for function pointers")
mo = typename_regex.match(item)
if not mo:
raise SyscallParseException("Malformed system call invocation")
m = mo.groups()
return (m[0].strip(), m[1])
def need_split(argtype):
return (not args.long_registers) and (argtype in types64)
# Note: "lo" and "hi" are named in little endian conventions,
# but it doesn't matter as long as they are consistently
# generated.
def union_decl(type, split):
middle = "struct { uintptr_t lo, hi; } split" if split else "uintptr_t x"
return "union { %s; %s val; }" % (middle, type)
def wrapper_defs(func_name, func_type, args, fn, userspace_only):
ret64 = need_split(func_type)
mrsh_args = [] # List of rvalue expressions for the marshalled invocation
decl_arglist = ", ".join([" ".join(argrec) for argrec in args]) or "void"
syscall_id = "K_SYSCALL_" + func_name.upper()
wrap = ''
if not userspace_only:
wrap += "extern %s z_impl_%s(%s);\n" % (func_type, func_name, decl_arglist)
wrap += "\n"
wrap += "__pinned_func\n"
wrap += "static inline %s %s(%s)\n" % (func_type, func_name, decl_arglist)
wrap += "{\n"
if not userspace_only:
wrap += "#ifdef CONFIG_USERSPACE\n"
wrap += ("\t" + "uint64_t ret64;\n") if ret64 else ""
if not userspace_only:
wrap += "\t" + "if (z_syscall_trap()) {\n"
valist_args = []
for argnum, (argtype, argname) in enumerate(args):
split = need_split(argtype)
wrap += "\t\t%s parm%d" % (union_decl(argtype, split), argnum)
if argtype != "va_list":
wrap += " = { .val = %s };\n" % argname
else:
# va_list objects are ... peculiar.
wrap += ";\n" + "\t\t" + "va_copy(parm%d.val, %s);\n" % (argnum, argname)
valist_args.append("parm%d.val" % argnum)
if split:
mrsh_args.append("parm%d.split.lo" % argnum)
mrsh_args.append("parm%d.split.hi" % argnum)
else:
mrsh_args.append("parm%d.x" % argnum)
if ret64:
mrsh_args.append("(uintptr_t)&ret64")
if len(mrsh_args) > 6:
wrap += "\t\t" + "uintptr_t more[] = {\n"
wrap += "\t\t\t" + (",\n\t\t\t".join(mrsh_args[5:])) + "\n"
wrap += "\t\t" + "};\n"
mrsh_args[5:] = ["(uintptr_t) &more"]
invoke = ("arch_syscall_invoke%d(%s)"
% (len(mrsh_args),
", ".join(mrsh_args + [syscall_id])))
if ret64:
invoke = "\t\t" + "(void) %s;\n" % invoke
retcode = "\t\t" + "return (%s) ret64;\n" % func_type
elif func_type == "void":
invoke = "\t\t" + "(void) %s;\n" % invoke
retcode = "\t\t" + "return;\n"
elif valist_args:
invoke = "\t\t" + "%s invoke__retval = %s;\n" % (func_type, invoke)
retcode = "\t\t" + "return invoke__retval;\n"
else:
invoke = "\t\t" + "return (%s) %s;\n" % (func_type, invoke)
retcode = ""
wrap += invoke
for argname in valist_args:
wrap += "\t\t" + "va_end(%s);\n" % argname
wrap += retcode
if not userspace_only:
wrap += "\t" + "}\n"
wrap += "#endif\n"
# Otherwise fall through to direct invocation of the impl func.
# Note the compiler barrier: that is required to prevent code from
# the impl call from being hoisted above the check for user
# context.
impl_arglist = ", ".join([argrec[1] for argrec in args])
impl_call = "z_impl_%s(%s)" % (func_name, impl_arglist)
wrap += "\t" + "compiler_barrier();\n"
wrap += "\t" + "%s%s;\n" % ("return " if func_type != "void" else "",
impl_call)
wrap += "}\n"
if fn not in notracing:
argnames = ", ".join([f"{argname}" for _, argname in args])
trace_argnames = ""
if len(args) > 0:
trace_argnames = ", " + argnames
trace_diagnostic = ""
if os.getenv('TRACE_DIAGNOSTICS'):
trace_diagnostic = f"#warning Tracing {func_name}"
if func_type != "void":
wrap += syscall_tracer_with_return_template.format(func_type=func_type, func_name=func_name,
argnames=argnames, trace_argnames=trace_argnames,
syscall_id=syscall_id, trace_diagnostic=trace_diagnostic)
else:
wrap += syscall_tracer_void_template.format(func_type=func_type, func_name=func_name,
argnames=argnames, trace_argnames=trace_argnames,
syscall_id=syscall_id, trace_diagnostic=trace_diagnostic)
return wrap
# Returns an expression for the specified (zero-indexed!) marshalled
# parameter to a syscall, with handling for a final "more" parameter.
def mrsh_rval(mrsh_num, total):
if mrsh_num < 5 or total <= 6:
return "arg%d" % mrsh_num
else:
return "(((uintptr_t *)more)[%d])" % (mrsh_num - 5)
def marshall_defs(func_name, func_type, args):
mrsh_name = "z_mrsh_" + func_name
nmrsh = 0 # number of marshalled uintptr_t parameter
vrfy_parms = [] # list of (argtype, bool_is_split)
for (argtype, _) in args:
split = need_split(argtype)
vrfy_parms.append((argtype, split))
nmrsh += 2 if split else 1
# Final argument for a 64 bit return value?
if need_split(func_type):
nmrsh += 1
decl_arglist = ", ".join([" ".join(argrec) for argrec in args])
mrsh = "extern %s z_vrfy_%s(%s);\n" % (func_type, func_name, decl_arglist)
mrsh += "uintptr_t %s(uintptr_t arg0, uintptr_t arg1, uintptr_t arg2,\n" % mrsh_name
if nmrsh <= 6:
mrsh += "\t\t" + "uintptr_t arg3, uintptr_t arg4, uintptr_t arg5, void *ssf)\n"
else:
mrsh += "\t\t" + "uintptr_t arg3, uintptr_t arg4, void *more, void *ssf)\n"
mrsh += "{\n"
mrsh += "\t" + "_current->syscall_frame = ssf;\n"
for unused_arg in range(nmrsh, 6):
mrsh += "\t(void) arg%d;\t/* unused */\n" % unused_arg
if nmrsh > 6:
mrsh += ("\tK_OOPS(K_SYSCALL_MEMORY_READ(more, "
+ str(nmrsh - 5) + " * sizeof(uintptr_t)));\n")
argnum = 0
for i, (argtype, split) in enumerate(vrfy_parms):
mrsh += "\t%s parm%d;\n" % (union_decl(argtype, split), i)
if split:
mrsh += "\t" + "parm%d.split.lo = %s;\n" % (i, mrsh_rval(argnum, nmrsh))
argnum += 1
mrsh += "\t" + "parm%d.split.hi = %s;\n" % (i, mrsh_rval(argnum, nmrsh))
else:
mrsh += "\t" + "parm%d.x = %s;\n" % (i, mrsh_rval(argnum, nmrsh))
argnum += 1
# Finally, invoke the verify function
out_args = ", ".join(["parm%d.val" % i for i in range(len(args))])
vrfy_call = "z_vrfy_%s(%s)" % (func_name, out_args)
if func_type == "void":
mrsh += "\t" + "%s;\n" % vrfy_call
mrsh += "\t" + "_current->syscall_frame = NULL;\n"
mrsh += "\t" + "return 0;\n"
else:
mrsh += "\t" + "%s ret = %s;\n" % (func_type, vrfy_call)
if need_split(func_type):
ptr = "((uint64_t *)%s)" % mrsh_rval(nmrsh - 1, nmrsh)
mrsh += "\t" + "K_OOPS(K_SYSCALL_MEMORY_WRITE(%s, 8));\n" % ptr
mrsh += "\t" + "*%s = ret;\n" % ptr
mrsh += "\t" + "_current->syscall_frame = NULL;\n"
mrsh += "\t" + "return 0;\n"
else:
mrsh += "\t" + "_current->syscall_frame = NULL;\n"
mrsh += "\t" + "return (uintptr_t) ret;\n"
mrsh += "}\n"
return mrsh, mrsh_name
def analyze_fn(match_group, fn, userspace_only):
func, args = match_group
try:
if args == "void":
args = []
else:
args = [typename_split(a.strip()) for a in args.split(",")]
func_type, func_name = typename_split(func)
except SyscallParseException:
sys.stderr.write("In declaration of %s\n" % func)
raise
sys_id = "K_SYSCALL_" + func_name.upper()
marshaller = None
marshaller, handler = marshall_defs(func_name, func_type, args)
invocation = wrapper_defs(func_name, func_type, args, fn, userspace_only)
# Entry in _k_syscall_table
table_entry = "[%s] = %s" % (sys_id, handler)
return (handler, invocation, marshaller, sys_id, table_entry)
def parse_args():
global args
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument("-i", "--json-file", required=True,
help="Read syscall information from json file")
parser.add_argument("-d", "--syscall-dispatch", required=True,
help="output C system call dispatch table file")
parser.add_argument("-l", "--syscall-list", required=True,
help="output C system call list header")
parser.add_argument("-o", "--base-output", required=True,
help="Base output directory for syscall macro headers")
parser.add_argument("-s", "--split-type", action="append",
help="A long type that must be split/marshalled on 32-bit systems")
parser.add_argument("-x", "--long-registers", action="store_true",
help="Indicates we are on system with 64-bit registers")
parser.add_argument("--gen-mrsh-files", action="store_true",
help="Generate marshalling files (*_mrsh.c)")
parser.add_argument("-e", "--syscall-export-llext",
help="output C system call export for extensions")
parser.add_argument("-u", "--userspace-only", action="store_true",
help="Only generate the userpace path of wrappers")
args = parser.parse_args()
def main():
parse_args()
if args.split_type is not None:
for t in args.split_type:
types64.append(t)
with open(args.json_file, 'r') as fd:
syscalls = json.load(fd)
invocations = {}
mrsh_defs = {}
mrsh_includes = {}
ids_emit = []
ids_not_emit = []
table_entries = []
handlers = []
emit_list = []
exported = []
for match_group, fn, to_emit in syscalls:
handler, inv, mrsh, sys_id, entry = analyze_fn(match_group, fn, args.userspace_only)
if fn not in invocations:
invocations[fn] = []
invocations[fn].append(inv)
handlers.append(handler)
if to_emit:
ids_emit.append(sys_id)
table_entries.append(entry)
emit_list.append(handler)
exported.append(handler.replace("z_mrsh_", "z_impl_"))
else:
ids_not_emit.append(sys_id)
if mrsh and to_emit:
syscall = typename_split(match_group[0])[1]
mrsh_defs[syscall] = mrsh
mrsh_includes[syscall] = "#include <zephyr/syscalls/%s>" % fn
with open(args.syscall_dispatch, "w") as fp:
table_entries.append("[K_SYSCALL_BAD] = handler_bad_syscall")
weak_defines = "".join([weak_template % name
for name in handlers
if not name in noweak and name in emit_list])
# The "noweak" ones just get a regular declaration
weak_defines += "\n".join(["extern uintptr_t %s(uintptr_t arg1, uintptr_t arg2, uintptr_t arg3, uintptr_t arg4, uintptr_t arg5, uintptr_t arg6, void *ssf);"
% s for s in noweak])
fp.write(table_template % (weak_defines,
",\n\t".join(table_entries)))
if args.syscall_export_llext:
with open(args.syscall_export_llext, "w") as fp:
# Export symbols for emitted syscalls
weak_refs = "\n".join("extern __weak ALIAS_OF(no_handler) void * const %s;"
% e for e in exported)
exported_symbols = "\n".join("EXPORT_SYMBOL(%s);"
% e for e in exported)
fp.write(exported_template % (weak_refs, exported_symbols))
# Listing header emitted to stdout
ids_emit.sort()
ids_emit.extend(["K_SYSCALL_BAD", "K_SYSCALL_LIMIT"])
ids_as_defines = ""
for i, item in enumerate(ids_emit):
ids_as_defines += "#define {} {}\n".format(item, i)
if ids_not_emit:
# There are syscalls that are not used in the image but
# their IDs are used in the generated stubs. So need to
# make them usable but outside the syscall ID range.
ids_as_defines += "\n\n/* Following syscalls are not used in image */\n"
ids_not_emit.sort()
num_emitted_ids = len(ids_emit)
for i, item in enumerate(ids_not_emit):
ids_as_defines += "#define {} {}\n".format(item, i + num_emitted_ids)
with open(args.syscall_list, "w") as fp:
fp.write(list_template % ids_as_defines)
os.makedirs(args.base_output, exist_ok=True)
for fn, invo_list in invocations.items():
out_fn = os.path.join(args.base_output, fn)
ig = re.sub("[^a-zA-Z0-9]", "_", "Z_INCLUDE_SYSCALLS_" + fn).upper()
include_guard = "#ifndef %s\n#define %s\n" % (ig, ig)
tracing_include = ""
if fn not in notracing:
tracing_include = "#include <zephyr/tracing/tracing_syscall.h>"
header = syscall_template.format(include_guard=include_guard, tracing_include=tracing_include, invocations="\n\n".join(invo_list))
with open(out_fn, "w") as fp:
fp.write(header)
# Likewise emit _mrsh.c files for syscall inclusion
if args.gen_mrsh_files:
for fn in mrsh_defs:
mrsh_fn = os.path.join(args.base_output, fn + "_mrsh.c")
with open(mrsh_fn, "w") as fp:
fp.write("/* auto-generated by gen_syscalls.py, don't edit */\n\n")
fp.write(mrsh_includes[fn] + "\n")
fp.write("\n")
fp.write(mrsh_defs[fn] + "\n")
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/gen_syscalls.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 4,996 |
```python
#!/usr/bin/env python3
#
#
#
import struct
class gen_isr_parser:
source_header = """
/* AUTO-GENERATED by gen_isr_tables.py, do not edit! */
#include <zephyr/toolchain.h>
#include <zephyr/linker/sections.h>
#include <zephyr/sw_isr_table.h>
#include <zephyr/arch/cpu.h>
"""
shared_isr_table_header = """
/* For this parser to work, we have to be sure that shared interrupts table entry
* and the normal isr table entry have exactly the same layout
*/
BUILD_ASSERT(sizeof(struct _isr_table_entry)
==
sizeof(struct z_shared_isr_table_entry),
"Shared ISR and ISR table entries layout do not match");
BUILD_ASSERT(offsetof(struct _isr_table_entry, arg)
==
offsetof(struct z_shared_isr_table_entry, arg),
"Shared ISR and ISR table entries layout do not match");
BUILD_ASSERT(offsetof(struct _isr_table_entry, isr)
==
offsetof(struct z_shared_isr_table_entry, isr),
"Shared ISR and ISR table entries layout do not match");
"""
def __init__(self, intlist_data, config, log):
"""Initialize the parser.
The function prepares parser to work.
Parameters:
- intlist_data: The binnary data from intlist section
- config: The configuration object
- log: The logging object, has to have error and debug methods
"""
self.__config = config
self.__log = log
intlist = self.__read_intlist(intlist_data)
self.__vt, self.__swt, self.__nv, header = self.__parse_intlist(intlist)
self.__swi_table_entry_size = header["swi_table_entry_size"]
self.__shared_isr_table_entry_size = header["shared_isr_table_entry_size"]
self.__shared_isr_client_num_offset = header["shared_isr_client_num_offset"]
def __read_intlist(self, intlist_data):
"""read an intList section from the elf file.
This is version 2 of a header created by include/zephyr/linker/intlist.ld:
struct {
uint32_t num_vectors; <- typically CONFIG_NUM_IRQS
uint8_t stream[]; <- the stream with the interrupt data
};
The stream is contained from variable length records in a form:
struct _isr_list_sname {
/** IRQ line number */
int32_t irq;
/** Flags for this IRQ, see ISR_FLAG_* definitions */
int32_t flags;
/** The section name */
const char sname[];
};
The flexible array member here (sname) contains the name of the section where the structure
with interrupt data is located.
It is always Null-terminated string thus we have to search through the input data for the
structure end.
"""
intlist = {}
prefix = self.__config.endian_prefix()
# Extract header and the rest of the data
intlist_header_fmt = prefix + "IIIII"
header_sz = struct.calcsize(intlist_header_fmt)
header_raw = struct.unpack_from(intlist_header_fmt, intlist_data, 0)
self.__log.debug(str(header_raw))
intlist["num_vectors"] = header_raw[0]
intlist["offset"] = header_raw[1]
intlist["swi_table_entry_size"] = header_raw[2]
intlist["shared_isr_table_entry_size"] = header_raw[3]
intlist["shared_isr_client_num_offset"] = header_raw[4]
intdata = intlist_data[header_sz:]
# Extract information about interrupts
intlist_entry_fmt = prefix + "ii"
entry_sz = struct.calcsize(intlist_entry_fmt)
intlist["interrupts"] = []
while len(intdata) > entry_sz:
entry_raw = struct.unpack_from(intlist_entry_fmt, intdata, 0)
intdata = intdata[entry_sz:]
null_idx = intdata.find(0)
if null_idx < 0:
self.__log.error("Cannot find sname null termination at IRQ{}".format(entry_raw[0]))
bname = intdata[:null_idx]
# Next structure starts with 4B alignment
next_idx = null_idx + 1
next_idx = (next_idx + 3) & ~3
intdata = intdata[next_idx:]
sname = bname.decode()
intlist["interrupts"].append([entry_raw[0], entry_raw[1], sname])
self.__log.debug("Unpacked IRQ{}, flags: {}, sname: \"{}\"\n".format(
entry_raw[0], entry_raw[1], sname))
# If any data left at the end - it has to be all the way 0 - this is just a check
if (len(intdata) and not all([d == 0 for d in intdata])):
self.__log.error("Non-zero data found at the end of the intList data.\n")
self.__log.debug("Configured interrupt routing with linker")
self.__log.debug("irq flags sname")
self.__log.debug("--------------------------")
for irq in intlist["interrupts"]:
self.__log.debug("{0:<3} {1:<5} {2}".format(
hex(irq[0]), irq[1], irq[2]))
return intlist
def __parse_intlist(self, intlist):
"""All the intlist data are parsed into swt and vt arrays.
The vt array is prepared for hardware interrupt table.
Every entry in the selected position would contain None or the name of the function pointer
(address or string).
The swt is a little more complex. At every position it would contain an array of parameter and
function pointer pairs. If CONFIG_SHARED_INTERRUPTS is enabled there may be more than 1 entry.
If empty array is placed on selected position - it means that the application does not implement
this interrupt.
Parameters:
- intlist: The preprocessed list of intlist section content (see read_intlist)
Return:
vt, swt - parsed vt and swt arrays (see function description above)
"""
nvec = intlist["num_vectors"]
offset = intlist["offset"]
header = {
"swi_table_entry_size": intlist["swi_table_entry_size"],
"shared_isr_table_entry_size": intlist["shared_isr_table_entry_size"],
"shared_isr_client_num_offset": intlist["shared_isr_client_num_offset"]
}
if nvec > pow(2, 15):
raise ValueError('nvec is too large, check endianness.')
self.__log.debug('offset is ' + str(offset))
self.__log.debug('num_vectors is ' + str(nvec))
# Set default entries in both tables
if not(self.__config.args.sw_isr_table or self.__config.args.vector_table):
self.__log.error("one or both of -s or -V needs to be specified on command line")
if self.__config.args.vector_table:
vt = [None for i in range(nvec)]
else:
vt = None
if self.__config.args.sw_isr_table:
swt = [[] for i in range(nvec)]
else:
swt = None
# Process intlist and write to the tables created
for irq, flags, sname in intlist["interrupts"]:
if self.__config.test_isr_direct(flags):
if not 0 <= irq - offset < len(vt):
self.__log.error("IRQ %d (offset=%d) exceeds the maximum of %d" %
(irq - offset, offset, len(vt) - 1))
vt[irq - offset] = sname
else:
# Regular interrupt
if not swt:
self.__log.error("Regular Interrupt %d declared with section name %s "
"but no SW ISR_TABLE in use"
% (irq, sname))
table_index = self.__config.get_swt_table_index(offset, irq)
if not 0 <= table_index < len(swt):
self.__log.error("IRQ %d (offset=%d) exceeds the maximum of %d" %
(table_index, offset, len(swt) - 1))
# Check if the given section name does not repeat outside of current interrupt
for i in range(nvec):
if i == irq:
continue
if sname in swt[i]:
self.__log.error(("Attempting to register the same section name \"{}\"for" +
"different interrupts: {} and {}").format(sname, i, irq))
if self.__config.check_shared_interrupts():
lst = swt[table_index]
if len(lst) >= self.__config.get_sym("CONFIG_SHARED_IRQ_MAX_NUM_CLIENTS"):
self.__log.error(f"Reached shared interrupt client limit. Maybe increase"
+ f" CONFIG_SHARED_IRQ_MAX_NUM_CLIENTS?")
else:
if len(swt[table_index]) > 0:
self.__log.error(f"multiple registrations at table_index {table_index} for irq {irq} (0x{irq:x})"
+ f"\nExisting section {swt[table_index]}, new section {sname}"
+ "\nHas IRQ_CONNECT or IRQ_DIRECT_CONNECT accidentally been invoked on the same irq multiple times?"
)
swt[table_index].append(sname)
return vt, swt, nvec, header
@staticmethod
def __irq_spurious_section(irq):
return '.irq_spurious.0x{:x}'.format(irq)
@staticmethod
def __isr_generated_section(irq):
return '.isr_generated.0x{:x}'.format(irq)
@staticmethod
def __shared_entry_section(irq, ent):
return '.isr_shared.0x{:x}_0x{:x}'.format(irq, ent)
@staticmethod
def __shared_client_num_section(irq):
return '.isr_shared.0x{:x}_client_num'.format(irq)
def __isr_spurious_entry(self, irq):
return '_Z_ISR_TABLE_ENTRY({irq}, {func}, NULL, "{sect}");'.format(
irq = irq,
func = self.__config.swt_spurious_handler,
sect = self.__isr_generated_section(irq)
)
def __isr_shared_entry(self, irq):
return '_Z_ISR_TABLE_ENTRY({irq}, {func}, {arg}, "{sect}");'.format(
irq = irq,
arg = '&{}[{}]'.format(self.__config.shared_array_name, irq),
func = self.__config.swt_shared_handler,
sect = self.__isr_generated_section(irq)
)
def __irq_spurious_entry(self, irq):
return '_Z_ISR_DIRECT_TABLE_ENTRY({irq}, {func}, "{sect}");'.format(
irq = irq,
func = self.__config.vt_default_handler,
sect = self.__irq_spurious_section(irq)
)
def __write_isr_handlers(self, fp):
for i in range(self.__nv):
if len(self.__swt[i]) <= 0:
fp.write(self.__isr_spurious_entry(i) + '\n')
elif len(self.__swt[i]) > 1:
# Connect to shared handlers
fp.write(self.__isr_shared_entry(i) + '\n')
else:
fp.write('/* ISR: {} implemented in app in "{}" section. */\n'.format(
i, self.__swt[i][0]))
def __write_irq_handlers(self, fp):
for i in range(self.__nv):
if self.__vt[i] is None:
fp.write(self.__irq_spurious_entry(i) + '\n')
else:
fp.write('/* ISR: {} implemented in app. */\n'.format(i))
def __write_shared_handlers(self, fp):
fp.write("extern struct z_shared_isr_table_entry "
"{}[{}];\n".format(self.__config.shared_array_name, self.__nv))
shared_cnt = self.__config.get_sym('CONFIG_SHARED_IRQ_MAX_NUM_CLIENTS')
for i in range(self.__nv):
swt_len = len(self.__swt[i])
for j in range(shared_cnt):
if (swt_len <= 1) or (swt_len <= j):
# Add all unused entry
fp.write('static Z_DECL_ALIGN(struct _isr_table_entry)\n' +
'\tZ_GENERIC_SECTION({})\n'.format(self.__shared_entry_section(i, j)) +
'\t__used isr_shared_empty_entry_0x{:x}_0x{:x} = {{\n'.format(i, j) +
'\t\t.arg = (const void *)NULL,\n' +
'\t\t.isr = (void (*)(const void *))(void *)0\n' +
'};\n'
)
else:
# Add information about entry implemented by application
fp.write('/* Shared isr {} entry {} implemented in "{}" section*/\n'.format(
i, j, self.__swt[i][j]))
# Add information about clients count
fp.write(('static size_t Z_GENERIC_SECTION({}) __used\n' +
'isr_shared_client_num_0x{:x} = {};\n\n').format(
self.__shared_client_num_section(i),
i,
0 if swt_len < 2 else swt_len)
)
def write_source(self, fp):
fp.write(self.source_header)
if self.__vt:
self.__write_irq_handlers(fp)
if not self.__swt:
return
if self.__config.check_shared_interrupts():
self.__write_shared_handlers(fp)
self.__write_isr_handlers(fp)
def __write_linker_irq(self, fp):
fp.write('{} = .;\n'.format(self.__config.irq_vector_array_name))
for i in range(self.__nv):
if self.__vt[i] is None:
sname = self.__irq_spurious_section(i)
else:
sname = self.__vt[i]
fp.write('KEEP(*("{}"))\n'.format(sname))
def __write_linker_shared(self, fp):
fp.write(". = ALIGN({});\n".format(self.__shared_isr_table_entry_size))
fp.write('{} = .;\n'.format(self.__config.shared_array_name))
shared_cnt = self.__config.get_sym('CONFIG_SHARED_IRQ_MAX_NUM_CLIENTS')
client_num_pads = self.__shared_isr_client_num_offset - \
shared_cnt * self.__swi_table_entry_size
if client_num_pads < 0:
self.__log.error("Invalid __shared_isr_client_num_offset header value")
for i in range(self.__nv):
swt_len = len(self.__swt[i])
# Add all entries
for j in range(shared_cnt):
if (swt_len <= 1) or (swt_len <= j):
fp.write('KEEP(*("{}"))\n'.format(self.__shared_entry_section(i, j)))
else:
sname = self.__swt[i][j]
if (j != 0) and (sname in self.__swt[i][0:j]):
fp.write('/* Repetition of "{}" section */\n'.format(sname))
else:
fp.write('KEEP(*("{}"))\n'.format(sname))
fp.write('. = . + {};\n'.format(client_num_pads))
fp.write('KEEP(*("{}"))\n'.format(self.__shared_client_num_section(i)))
fp.write(". = ALIGN({});\n".format(self.__shared_isr_table_entry_size))
def __write_linker_isr(self, fp):
fp.write(". = ALIGN({});\n".format(self.__swi_table_entry_size))
fp.write('{} = .;\n'.format(self.__config.sw_isr_array_name))
for i in range(self.__nv):
if (len(self.__swt[i])) == 1:
sname = self.__swt[i][0]
else:
sname = self.__isr_generated_section(i)
fp.write('KEEP(*("{}"))\n'.format(sname))
def write_linker_vt(self, fp):
if self.__vt:
self.__write_linker_irq(fp)
def write_linker_swi(self, fp):
if self.__swt:
self.__write_linker_isr(fp)
if self.__config.check_shared_interrupts():
self.__write_linker_shared(fp)
``` | /content/code_sandbox/scripts/build/gen_isr_tables_parser_local.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 3,582 |
```python
#!/usr/bin/env python3
#
#
#
"""
This script scans a specified object file and generates a header file
that defined macros for the offsets of various found structure members
(particularly symbols ending with ``_OFFSET`` or ``_SIZEOF``), primarily
intended for use in assembly code.
"""
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import SymbolTableSection
import argparse
import sys
def get_symbol_table(obj):
for section in obj.iter_sections():
if isinstance(section, SymbolTableSection):
return section
raise LookupError("Could not find symbol table")
def gen_offset_header(input_name, input_file, output_file):
include_guard = "__GEN_OFFSETS_H__"
output_file.write("""/* THIS FILE IS AUTO GENERATED. PLEASE DO NOT EDIT.
*
* This header file provides macros for the offsets of various structure
* members. These offset macros are primarily intended to be used in
* assembly code.
*/
#ifndef %s
#define %s\n\n""" % (include_guard, include_guard))
obj = ELFFile(input_file)
for sym in get_symbol_table(obj).iter_symbols():
if isinstance(sym.name, bytes):
sym.name = str(sym.name, 'ascii')
if not sym.name.endswith(('_OFFSET', '_SIZEOF')):
continue
if sym.entry['st_shndx'] != 'SHN_ABS':
continue
if sym.entry['st_info']['bind'] != 'STB_GLOBAL':
continue
output_file.write(
"#define %s 0x%x\n" %
(sym.name, sym.entry['st_value']))
output_file.write("\n#endif /* %s */\n" % include_guard)
return 0
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument(
"-i",
"--input",
required=True,
help="Input object file")
parser.add_argument(
"-o",
"--output",
required=True,
help="Output header file")
args = parser.parse_args()
input_file = open(args.input, 'rb')
output_file = open(args.output, 'w')
ret = gen_offset_header(args.input, input_file, output_file)
sys.exit(ret)
``` | /content/code_sandbox/scripts/build/gen_offset_header.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 501 |
```python
#!/usr/bin/env python3
"""
Tests for check_init_priorities
"""
import mock
import pathlib
import unittest
from elftools.elf.relocation import Section
from elftools.elf.sections import SymbolTableSection
import check_init_priorities
class TestPriority(unittest.TestCase):
"""Tests for the Priority class."""
def test_priority_parsing(self):
prio1 = check_init_priorities.Priority("POST_KERNEL", 12)
self.assertEqual(prio1._level_priority, (3, 12))
prio1 = check_init_priorities.Priority("APPLICATION", 9999)
self.assertEqual(prio1._level_priority, (4, 9999))
with self.assertRaises(ValueError):
check_init_priorities.Priority("i-am-not-a-priority", 0)
check_init_priorities.Priority("_DOESNOTEXIST0_", 0)
def test_priority_levels(self):
prios = [
check_init_priorities.Priority("EARLY", 0),
check_init_priorities.Priority("EARLY", 1),
check_init_priorities.Priority("PRE_KERNEL_1", 0),
check_init_priorities.Priority("PRE_KERNEL_1", 1),
check_init_priorities.Priority("PRE_KERNEL_2", 0),
check_init_priorities.Priority("PRE_KERNEL_2", 1),
check_init_priorities.Priority("POST_KERNEL", 0),
check_init_priorities.Priority("POST_KERNEL", 1),
check_init_priorities.Priority("APPLICATION", 0),
check_init_priorities.Priority("APPLICATION", 1),
check_init_priorities.Priority("SMP", 0),
check_init_priorities.Priority("SMP", 1),
]
self.assertListEqual(prios, sorted(prios))
def test_priority_strings(self):
prio = check_init_priorities.Priority("POST_KERNEL", 12)
self.assertEqual(str(prio), "POST_KERNEL+12")
self.assertEqual(repr(prio), "<Priority POST_KERNEL 12>")
class testZephyrInitLevels(unittest.TestCase):
"""Tests for the ZephyrInitLevels class."""
@mock.patch("check_init_priorities.ZephyrInitLevels.__init__", return_value=None)
def test_load_objects(self, mock_zilinit):
mock_elf = mock.Mock()
sts = mock.Mock(spec=SymbolTableSection)
rel = mock.Mock(spec=Section)
mock_elf.iter_sections.return_value = [sts, rel]
s0 = mock.Mock()
s0.name = "a"
s0.entry.st_info.type = "STT_OBJECT"
s0.entry.st_size = 4
s0.entry.st_value = 0xaa
s0.entry.st_shndx = 1
s1 = mock.Mock()
s1.name = None
s2 = mock.Mock()
s2.name = "b"
s2.entry.st_info.type = "STT_FUNC"
s2.entry.st_size = 8
s2.entry.st_value = 0xbb
s2.entry.st_shndx = 2
sts.iter_symbols.return_value = [s0, s1, s2]
obj = check_init_priorities.ZephyrInitLevels("")
obj._elf = mock_elf
obj._load_objects()
self.assertDictEqual(obj._objects, {0xaa: ("a", 4, 1), 0xbb: ("b", 8, 2)})
@mock.patch("check_init_priorities.ZephyrInitLevels.__init__", return_value=None)
def test_load_level_addr(self, mock_zilinit):
mock_elf = mock.Mock()
sts = mock.Mock(spec=SymbolTableSection)
rel = mock.Mock(spec=Section)
mock_elf.iter_sections.return_value = [sts, rel]
s0 = mock.Mock()
s0.name = "__init_EARLY_start"
s0.entry.st_value = 0x00
s1 = mock.Mock()
s1.name = "__init_PRE_KERNEL_1_start"
s1.entry.st_value = 0x11
s2 = mock.Mock()
s2.name = "__init_PRE_KERNEL_2_start"
s2.entry.st_value = 0x22
s3 = mock.Mock()
s3.name = "__init_POST_KERNEL_start"
s3.entry.st_value = 0x33
s4 = mock.Mock()
s4.name = "__init_APPLICATION_start"
s4.entry.st_value = 0x44
s5 = mock.Mock()
s5.name = "__init_SMP_start"
s5.entry.st_value = 0x55
s6 = mock.Mock()
s6.name = "__init_end"
s6.entry.st_value = 0x66
sts.iter_symbols.return_value = [s0, s1, s2, s3, s4, s5, s6]
obj = check_init_priorities.ZephyrInitLevels("")
obj._elf = mock_elf
obj._load_level_addr()
self.assertDictEqual(obj._init_level_addr, {
"EARLY": 0x00,
"PRE_KERNEL_1": 0x11,
"PRE_KERNEL_2": 0x22,
"POST_KERNEL": 0x33,
"APPLICATION": 0x44,
"SMP": 0x55,
})
self.assertEqual(obj._init_level_end, 0x66)
@mock.patch("check_init_priorities.ZephyrInitLevels.__init__", return_value=None)
def test_device_ord_from_name(self, mock_zilinit):
obj = check_init_priorities.ZephyrInitLevels("")
self.assertEqual(obj._device_ord_from_name(None), None)
self.assertEqual(obj._device_ord_from_name("hey, hi!"), None)
self.assertEqual(obj._device_ord_from_name("__device_dts_ord_123"), 123)
@mock.patch("check_init_priorities.ZephyrInitLevels.__init__", return_value=None)
def test_object_name(self, mock_zilinit):
obj = check_init_priorities.ZephyrInitLevels("")
obj._objects = {0x123: ("name", 4)}
self.assertEqual(obj._object_name(0), "NULL")
self.assertEqual(obj._object_name(73), "unknown")
self.assertEqual(obj._object_name(0x123), "name")
@mock.patch("check_init_priorities.ZephyrInitLevels.__init__", return_value=None)
def test_initlevel_pointer_32(self, mock_zilinit):
obj = check_init_priorities.ZephyrInitLevels("")
obj._elf = mock.Mock()
obj._elf.elfclass = 32
mock_section = mock.Mock()
obj._elf.get_section.return_value = mock_section
mock_section.header.sh_addr = 0x100
mock_section.data.return_value = (b"\x01\x00\x00\x00"
b"\x02\x00\x00\x00"
b"\x03\x00\x00\x00")
self.assertEqual(obj._initlevel_pointer(0x100, 0, 0), 1)
self.assertEqual(obj._initlevel_pointer(0x100, 1, 0), 2)
self.assertEqual(obj._initlevel_pointer(0x104, 0, 0), 2)
self.assertEqual(obj._initlevel_pointer(0x104, 1, 0), 3)
@mock.patch("check_init_priorities.ZephyrInitLevels.__init__", return_value=None)
def test_initlevel_pointer_64(self, mock_zilinit):
obj = check_init_priorities.ZephyrInitLevels("")
obj._elf = mock.Mock()
obj._elf.elfclass = 64
mock_section = mock.Mock()
obj._elf.get_section.return_value = mock_section
mock_section.header.sh_addr = 0x100
mock_section.data.return_value = (b"\x01\x00\x00\x00\x00\x00\x00\x00"
b"\x02\x00\x00\x00\x00\x00\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x00")
self.assertEqual(obj._initlevel_pointer(0x100, 0, 0), 1)
self.assertEqual(obj._initlevel_pointer(0x100, 1, 0), 2)
self.assertEqual(obj._initlevel_pointer(0x108, 0, 0), 2)
self.assertEqual(obj._initlevel_pointer(0x108, 1, 0), 3)
@mock.patch("check_init_priorities.ZephyrInitLevels._object_name")
@mock.patch("check_init_priorities.ZephyrInitLevels._initlevel_pointer")
@mock.patch("check_init_priorities.ZephyrInitLevels.__init__", return_value=None)
def test_process_initlevels(self, mock_zilinit, mock_ip, mock_on):
obj = check_init_priorities.ZephyrInitLevels("")
obj._init_level_addr = {
"EARLY": 0x00,
"PRE_KERNEL_1": 0x00,
"PRE_KERNEL_2": 0x00,
"POST_KERNEL": 0x08,
"APPLICATION": 0x0c,
"SMP": 0x0c,
}
obj._init_level_end = 0x0c
obj._objects = {
0x00: ("a", 4, 0),
0x04: ("b", 4, 0),
0x08: ("c", 4, 0),
}
mock_ip.side_effect = lambda *args: args
def mock_obj_name(*args):
if args[0] == (0, 0, 0):
return "i0"
elif args[0] == (0, 1, 0):
return "__device_dts_ord_11"
elif args[0] == (4, 0, 0):
return "i1"
elif args[0] == (4, 1, 0):
return "__device_dts_ord_22"
return f"name_{args[0][0]}_{args[0][1]}"
mock_on.side_effect = mock_obj_name
obj._process_initlevels()
self.assertDictEqual(obj.initlevels, {
"EARLY": [],
"PRE_KERNEL_1": [],
"PRE_KERNEL_2": ["a: i0(__device_dts_ord_11)", "b: i1(__device_dts_ord_22)"],
"POST_KERNEL": ["c: name_8_0(name_8_1)"],
"APPLICATION": [],
"SMP": [],
})
self.assertDictEqual(obj.devices, {
11: (check_init_priorities.Priority("PRE_KERNEL_2", 0), "i0"),
22: (check_init_priorities.Priority("PRE_KERNEL_2", 1), "i1"),
})
class testValidator(unittest.TestCase):
"""Tests for the Validator class."""
@mock.patch("check_init_priorities.ZephyrInitLevels")
@mock.patch("pickle.load")
def test_initialize(self, mock_pl, mock_zil):
mock_log = mock.Mock()
mock_prio = mock.Mock()
mock_obj = mock.Mock()
mock_obj.defined_devices = {123: mock_prio}
mock_zil.return_value = mock_obj
with mock.patch("builtins.open", mock.mock_open()) as mock_open:
validator = check_init_priorities.Validator("path", "pickle", mock_log)
self.assertEqual(validator._obj, mock_obj)
mock_zil.assert_called_once_with("path")
mock_open.assert_called_once_with(pathlib.Path("pickle"), "rb")
@mock.patch("check_init_priorities.Validator.__init__", return_value=None)
def test_check_dep_same_node(self, mock_vinit):
validator = check_init_priorities.Validator("", "", None)
validator.log = mock.Mock()
validator._check_dep(123, 123)
self.assertFalse(validator.log.info.called)
self.assertFalse(validator.log.warning.called)
self.assertFalse(validator.log.error.called)
@mock.patch("check_init_priorities.Validator.__init__", return_value=None)
def test_check_dep_no_prio(self, mock_vinit):
validator = check_init_priorities.Validator("", "", None)
validator.log = mock.Mock()
validator._obj = mock.Mock()
validator._ord2node = {1: mock.Mock(), 2: mock.Mock()}
validator._ord2node[1]._binding = None
validator._ord2node[2]._binding = None
validator._obj.devices = {1: (10, "i1")}
validator._check_dep(1, 2)
validator._obj.devices = {2: (20, "i2")}
validator._check_dep(1, 2)
self.assertFalse(validator.log.info.called)
self.assertFalse(validator.log.warning.called)
self.assertFalse(validator.log.error.called)
@mock.patch("check_init_priorities.Validator.__init__", return_value=None)
def test_check(self, mock_vinit):
validator = check_init_priorities.Validator("", "", None)
validator.log = mock.Mock()
validator._obj = mock.Mock()
validator.errors = 0
validator._ord2node = {1: mock.Mock(), 2: mock.Mock()}
validator._ord2node[1]._binding = None
validator._ord2node[1].path = "/1"
validator._ord2node[2]._binding = None
validator._ord2node[2].path = "/2"
validator._obj.devices = {1: (10, "i1"), 2: (20, "i2")}
validator._check_dep(2, 1)
validator._check_dep(1, 2)
validator.log.info.assert_called_once_with("/2 <i2> 20 > /1 <i1> 10")
validator.log.error.assert_has_calls([
mock.call("/1 <i1> is initialized before its dependency /2 <i2> (10 < 20)")
])
self.assertEqual(validator.errors, 1)
@mock.patch("check_init_priorities.Validator.__init__", return_value=None)
def test_check_same_prio_assert(self, mock_vinit):
validator = check_init_priorities.Validator("", "", None)
validator.log = mock.Mock()
validator._obj = mock.Mock()
validator.errors = 0
validator._ord2node = {1: mock.Mock(), 2: mock.Mock()}
validator._ord2node[1]._binding = None
validator._ord2node[1].path = "/1"
validator._ord2node[2]._binding = None
validator._ord2node[2].path = "/2"
validator._obj.devices = {1: (10, "i1"), 2: (10, "i2")}
with self.assertRaises(ValueError):
validator._check_dep(1, 2)
@mock.patch("check_init_priorities.Validator.__init__", return_value=None)
def test_check_swapped(self, mock_vinit):
validator = check_init_priorities.Validator("", "", None)
validator.log = mock.Mock()
validator._obj = mock.Mock()
validator.errors = 0
save_inverted_priorities = check_init_priorities._INVERTED_PRIORITY_COMPATIBLES
check_init_priorities._INVERTED_PRIORITY_COMPATIBLES = set([("compat-3", "compat-1")])
validator._ord2node = {1: mock.Mock(), 3: mock.Mock()}
validator._ord2node[1]._binding.compatible = "compat-1"
validator._ord2node[1].path = "/1"
validator._ord2node[3]._binding.compatible = "compat-3"
validator._ord2node[3].path = "/3"
validator._obj.devices = {1: (20, "i1"), 3: (10, "i3")}
validator._check_dep(3, 1)
self.assertListEqual(validator.log.info.call_args_list, [
mock.call("Swapped priority: compat-3, compat-1"),
mock.call("/3 <i1> 20 > /1 <i3> 10"),
])
self.assertEqual(validator.errors, 0)
check_init_priorities._INVERTED_PRIORITY_COMPATIBLES = save_inverted_priorities
@mock.patch("check_init_priorities.Validator.__init__", return_value=None)
def test_check_ignored(self, mock_vinit):
validator = check_init_priorities.Validator("", "", None)
validator.log = mock.Mock()
validator._obj = mock.Mock()
validator.errors = 0
save_ignore_compatibles = check_init_priorities._IGNORE_COMPATIBLES
check_init_priorities._IGNORE_COMPATIBLES = set(["compat-3"])
validator._ord2node = {1: mock.Mock(), 3: mock.Mock()}
validator._ord2node[1]._binding.compatible = "compat-1"
validator._ord2node[1].path = "/1"
validator._ord2node[3]._binding.compatible = "compat-3"
validator._ord2node[3].path = "/3"
validator._obj.devices = {1: 20, 3: 10}
validator._check_dep(3, 1)
self.assertListEqual(validator.log.info.call_args_list, [
mock.call("Ignoring priority: compat-3"),
])
self.assertEqual(validator.errors, 0)
check_init_priorities._IGNORE_COMPATIBLES = save_ignore_compatibles
@mock.patch("check_init_priorities.Validator._check_dep")
@mock.patch("check_init_priorities.Validator.__init__", return_value=None)
def test_check_edt(self, mock_vinit, mock_cd):
d0 = mock.Mock()
d0.dep_ordinal = 1
d1 = mock.Mock()
d1.dep_ordinal = 2
d2 = mock.Mock()
d2.dep_ordinal = 3
dev0 = mock.Mock()
dev0.depends_on = [d0]
dev1 = mock.Mock()
dev1.depends_on = [d1]
dev2 = mock.Mock()
dev2.depends_on = [d2]
validator = check_init_priorities.Validator("", "", None)
validator._ord2node = {1: dev0, 2: dev1, 3: dev2}
validator._obj = mock.Mock()
validator._obj.devices = {1: 10, 2: 10, 3: 20}
validator.check_edt()
self.assertListEqual(mock_cd.call_args_list, [
mock.call(1, 1),
mock.call(2, 2),
mock.call(3, 3),
])
if __name__ == "__main__":
unittest.main()
``` | /content/code_sandbox/scripts/build/check_init_priorities_test.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 4,160 |
```python
#!/usr/bin/env python3
#
#
"""Translate generic handles into ones optimized for the application.
Immutable device data includes information about dependencies,
e.g. that a particular sensor is controlled through a specific I2C bus
and that it signals event on a pin on a specific GPIO controller.
This information is encoded in the first-pass binary using identifiers
derived from the devicetree. This script extracts those identifiers
and replaces them with ones optimized for use with the devices
actually present.
For example the sensor might have a first-pass handle defined by its
devicetree ordinal 52, with the I2C driver having ordinal 24 and the
GPIO controller ordinal 14. The runtime ordinal is the index of the
corresponding device in the static devicetree array, which might be 6,
5, and 3, respectively.
The output is a C source file that provides alternative definitions
for the array contents referenced from the immutable device objects.
In the final link these definitions supersede the ones in the
driver-specific object file.
"""
import sys
import argparse
import os
import pickle
from elf_parser import ZephyrElf
# This is needed to load edt.pickle files.
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..',
'dts', 'python-devicetree', 'src'))
def parse_args():
global args
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument("-k", "--kernel", required=True,
help="Input zephyr ELF binary")
parser.add_argument("--dynamic-deps", action="store_true",
help="Indicates if device dependencies are dynamic")
parser.add_argument("-d", "--num-dynamic-devices", required=False, default=0,
type=int, help="Input number of dynamic devices allowed")
parser.add_argument("-o", "--output-source", required=True,
help="Output source file")
parser.add_argument("-g", "--output-graphviz",
help="Output file for graphviz dependency graph")
parser.add_argument("-z", "--zephyr-base",
help="Path to current Zephyr base. If this argument \
is not provided the environment will be checked for \
the ZEPHYR_BASE environment variable.")
parser.add_argument("-s", "--start-symbol", required=True,
help="Symbol name of the section which contains the \
devices. The symbol name must point to the first \
device in that section.")
args = parser.parse_args()
ZEPHYR_BASE = args.zephyr_base or os.getenv("ZEPHYR_BASE")
if ZEPHYR_BASE is None:
sys.exit("-z / --zephyr-base not provided. Please provide "
"--zephyr-base or set ZEPHYR_BASE in environment")
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/dts"))
def c_handle_comment(dev, handles):
def dev_path_str(dev):
return dev.edt_node and dev.edt_node.path or dev.sym.name
lines = [
'',
'/* {:d} : {:s}:'.format(dev.handle, (dev_path_str(dev))),
]
if len(handles["depends"]) > 0:
lines.append(' * Direct Dependencies:')
for dep in handles["depends"]:
lines.append(' * - {:s}'.format(dev_path_str(dep)))
if len(handles["injected"]) > 0:
lines.append(' * Injected Dependencies:')
for dep in handles["injected"]:
lines.append(' * - {:s}'.format(dev_path_str(dep)))
if len(handles["supports"]) > 0:
lines.append(' * Supported:')
for sup in handles["supports"]:
lines.append(' * - {:s}'.format(dev_path_str(sup)))
lines.append(' */')
return lines
def c_handle_array(dev, handles, dynamic_deps, extra_support_handles=0):
handles = [
*[str(d.handle) for d in handles["depends"]],
'Z_DEVICE_DEPS_SEP',
*[str(d.handle) for d in handles["injected"]],
'Z_DEVICE_DEPS_SEP',
*[str(d.handle) for d in handles["supports"]],
*(extra_support_handles * ['DEVICE_HANDLE_NULL']),
'Z_DEVICE_DEPS_ENDS',
]
ctype = (
'{:s}Z_DECL_ALIGN(device_handle_t) '
'__attribute__((__section__(".__device_deps_pass2")))'
).format('const ' if not dynamic_deps else '')
return [
# The `extern` line pretends this was first declared in some .h
# file to silence "should it be static?" warnings in some
# compilers and static analyzers.
'extern {:s} {:s}[{:d}];'.format(ctype, dev.ordinals.sym.name, len(handles)),
ctype,
'{:s}[] = {{ {:s} }};'.format(dev.ordinals.sym.name, ', '.join(handles)),
]
def main():
parse_args()
edtser = os.path.join(os.path.split(args.kernel)[0], "edt.pickle")
with open(edtser, 'rb') as f:
edt = pickle.load(f)
parsed_elf = ZephyrElf(args.kernel, edt, args.start_symbol)
if parsed_elf.relocatable:
# While relocatable elf files will load cleanly, the pointers pulled from
# the symbol table are invalid (as expected, because the structures have not
# yet been allocated addresses). Fixing this will require iterating over
# the relocation sections to find the symbols those pointers will end up
# referring to.
sys.exit('Relocatable elf files are not yet supported')
if args.output_graphviz:
# Try and output the dependency tree
try:
dot = parsed_elf.device_dependency_graph('Device dependency graph', args.kernel)
with open(args.output_graphviz, 'w') as f:
f.write(dot.source)
except ImportError:
pass
with open(args.output_source, "w") as fp:
fp.write('#include <zephyr/device.h>\n')
fp.write('#include <zephyr/toolchain.h>\n')
for dev in parsed_elf.devices:
# The device handle are collected up in a set, which has no
# specified order. Sort each sub-category of device handle types
# separately, so that the generated C array is reproducible across
# builds.
sorted_handles = {
"depends": sorted(dev.devs_depends_on, key=lambda d: d.handle),
"injected": sorted(dev.devs_depends_on_injected, key=lambda d: d.handle),
"supports": sorted(dev.devs_supports, key=lambda d: d.handle),
}
extra_sups = args.num_dynamic_devices if dev.pm and dev.pm.is_power_domain else 0
lines = c_handle_comment(dev, sorted_handles)
lines.extend(
c_handle_array(dev, sorted_handles, args.dynamic_deps, extra_sups)
)
lines.extend([''])
fp.write('\n'.join(lines))
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/gen_device_deps.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,548 |
```python
#!/usr/bin/env python3
#
#
import argparse
import sys
from PIL import ImageFont
from PIL import Image
from PIL import ImageDraw
PRINTABLE_MIN = 32
PRINTABLE_MAX = 126
def generate_element(image, charcode):
"""Generate CFB font element for a given character code from an image"""
blackwhite = image.convert("1", dither=Image.NONE)
pixels = blackwhite.load()
width, height = image.size
if args.dump:
blackwhite.save("{}_{}.png".format(args.name, charcode))
if PRINTABLE_MIN <= charcode <= PRINTABLE_MAX:
char = " ({:c})".format(charcode)
else:
char = ""
args.output.write("""\t/* {:d}{} */\n\t{{\n""".format(charcode, char))
glyph = []
if args.hpack:
for row in range(0, height):
packed = []
for octet in range(0, int(width / 8)):
value = ""
for bit in range(0, 8):
col = octet * 8 + bit
if pixels[col, row]:
value = value + "0"
else:
value = value + "1"
packed.append(value)
glyph.append(packed)
else:
for col in range(0, width):
packed = []
for octet in range(0, int(height / 8)):
value = ""
for bit in range(0, 8):
row = octet * 8 + bit
if pixels[col, row]:
value = value + "0"
else:
value = value + "1"
packed.append(value)
glyph.append(packed)
for packed in glyph:
args.output.write("\t\t")
bits = []
for value in packed:
bits.append(value)
if not args.msb_first:
value = value[::-1]
args.output.write("0x{:02x},".format(int(value, 2)))
args.output.write(" /* {} */\n".format(''.join(bits).replace('0', ' ').replace('1', '#')))
args.output.write("\t},\n")
def extract_font_glyphs():
"""Extract font glyphs from a TrueType/OpenType font file"""
font = ImageFont.truetype(args.input, args.size)
# Figure out the bounding box for the desired glyphs
fw_max = 0
fh_max = 0
for i in range(args.first, args.last + 1):
# returns (left, top, right, bottom) bounding box
size = font.getbbox(chr(i))
# calculate width + height
fw = size[2] - size[0] # right - left
fh = size[3] - size[1] # bottom - top
if fw > fw_max:
fw_max = fw
if fh > fh_max:
fh_max = fh
# Round the packed length up to pack into bytes.
if args.hpack:
width = 8 * int((fw_max + 7) / 8)
height = fh_max + args.y_offset
else:
width = fw_max
height = 8 * int((fh_max + args.y_offset + 7) / 8)
# Diagnose inconsistencies with arguments
if width != args.width:
raise Exception('text width {} mismatch with -x {}'.format(width, args.width))
if height != args.height:
raise Exception('text height {} mismatch with -y {}'.format(height, args.height))
for i in range(args.first, args.last + 1):
image = Image.new('1', (width, height), 'white')
draw = ImageDraw.Draw(image)
# returns (left, top, right, bottom) bounding box
size = draw.textbbox((0, 0), chr(i), font=font)
# calculate width + height
fw = size[2] - size[0] # right - left
fh = size[3] - size[1] # bottom - top
xpos = 0
if args.center_x:
xpos = (width - fw) / 2 + 1
ypos = args.y_offset
draw.text((xpos, ypos), chr(i), font=font)
generate_element(image, i)
def extract_image_glyphs():
"""Extract font glyphs from an image file"""
image = Image.open(args.input)
x_offset = 0
for i in range(args.first, args.last + 1):
glyph = image.crop((x_offset, 0, x_offset + args.width, args.height))
generate_element(glyph, i)
x_offset += args.width
def generate_header():
"""Generate CFB font header file"""
caps = []
if args.hpack:
caps.append('MONO_HPACKED')
else:
caps.append('MONO_VPACKED')
if args.msb_first:
caps.append('MSB_FIRST')
caps = ' | '.join(['CFB_FONT_' + f for f in caps])
clean_cmd = []
for arg in sys.argv:
if arg.startswith("--bindir"):
# Drop. Assumes --bindir= was passed with '=' sign.
continue
if args.bindir and arg.startswith(args.bindir):
# +1 to also strip '/' or '\' separator
striplen = min(len(args.bindir)+1, len(arg))
clean_cmd.append(arg[striplen:])
continue
if args.zephyr_base is not None:
clean_cmd.append(arg.replace(args.zephyr_base, '"${ZEPHYR_BASE}"'))
else:
clean_cmd.append(arg)
args.output.write("""/*
* This file was automatically generated using the following command:
* {cmd}
*
*/
#include <zephyr/kernel.h>
#include <zephyr/display/cfb.h>
static const uint8_t cfb_font_{name:s}_{width:d}{height:d}[{elem:d}][{b:.0f}] = {{\n"""
.format(cmd=" ".join(clean_cmd),
name=args.name,
width=args.width,
height=args.height,
elem=args.last - args.first + 1,
b=args.width / 8 * args.height))
if args.type == "font":
extract_font_glyphs()
elif args.type == "image":
extract_image_glyphs()
elif args.input.name.lower().endswith((".otf", ".otc", ".ttf", ".ttc")):
extract_font_glyphs()
else:
extract_image_glyphs()
args.output.write("""
}};
FONT_ENTRY_DEFINE({name}_{width}{height},
{width},
{height},
{caps},
cfb_font_{name}_{width}{height},
{first},
{last}
);
""" .format(name=args.name, width=args.width, height=args.height,
caps=caps, first=args.first, last=args.last))
def parse_args():
"""Parse arguments"""
global args
parser = argparse.ArgumentParser(
description="Character Frame Buffer (CFB) font header file generator",
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument(
"-z", "--zephyr-base",
help="Zephyr base directory")
parser.add_argument(
"-d", "--dump", action="store_true",
help="dump generated CFB font elements as images for preview")
group = parser.add_argument_group("input arguments")
group.add_argument(
"-i", "--input", required=True, type=argparse.FileType('rb'), metavar="FILE",
help="TrueType/OpenType file or image input file")
group.add_argument(
"-t", "--type", default="auto", choices=["auto", "font", "image"],
help="Input file type (default: %(default)s)")
group = parser.add_argument_group("font arguments")
group.add_argument(
"-s", "--size", type=int, default=10, metavar="POINTS",
help="TrueType/OpenType font size in points (default: %(default)s)")
group = parser.add_argument_group("output arguments")
group.add_argument(
"-o", "--output", type=argparse.FileType('w'), default="-", metavar="FILE",
help="CFB font header file (default: stdout)")
group.add_argument(
"--bindir", type=str,
help="CMAKE_BINARY_DIR for pure logging purposes. No trailing slash.")
group.add_argument(
"-x", "--width", required=True, type=int,
help="width of the CFB font elements in pixels")
group.add_argument(
"-y", "--height", required=True, type=int,
help="height of the CFB font elements in pixels")
group.add_argument(
"-n", "--name", default="custom",
help="name of the CFB font entry (default: %(default)s)")
group.add_argument(
"--first", type=int, default=PRINTABLE_MIN, metavar="CHARCODE",
help="character code mapped to the first CFB font element (default: %(default)s)")
group.add_argument(
"--last", type=int, default=PRINTABLE_MAX, metavar="CHARCODE",
help="character code mapped to the last CFB font element (default: %(default)s)")
group.add_argument(
"--center-x", action='store_true',
help="center character glyphs horizontally")
group.add_argument(
"--y-offset", type=int, default=0,
help="vertical offset for character glyphs (default: %(default)s)")
group.add_argument(
"--hpack", dest='hpack', default=False, action='store_true',
help="generate bytes encoding row data rather than column data (default: %(default)s)")
group.add_argument(
"--msb-first", action='store_true',
help="packed content starts at high bit of each byte (default: lsb-first)")
args = parser.parse_args()
def main():
"""Parse arguments and generate CFB font header file"""
parse_args()
generate_header()
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/gen_cfb_font_header.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,198 |
```python
#!/usr/bin/env python3
#
#
"""
Script to scan Zephyr include directories and emit system call and subsystem metadata
System calls require a great deal of boilerplate code in order to implement
completely. This script is the first step in the build system's process of
auto-generating this code by doing a text scan of directories containing
C or header files, and building up a database of system calls and their
function call prototypes. This information is emitted to a generated
JSON file for further processing.
This script also scans for struct definitions such as __subsystem and
__net_socket, emitting a JSON dictionary mapping tags to all the struct
declarations found that were tagged with them.
If the output JSON file already exists, its contents are checked against
what information this script would have outputted; if the result is that the
file would be unchanged, it is not modified to prevent unnecessary
incremental builds.
"""
import sys
import re
import argparse
import os
import json
from pathlib import PurePath
regex_flags = re.MULTILINE | re.VERBOSE
syscall_regex = re.compile(r'''
(?:__syscall|__syscall_always_inline)\s+ # __syscall attribute, must be first
([^(]+) # type and name of system call (split later)
[(] # Function opening parenthesis
([^)]*) # Arg list (split later)
[)] # Closing parenthesis
''', regex_flags)
struct_tags = ["__subsystem", "__net_socket"]
tagged_struct_decl_template = r'''
%s\s+ # tag, must be first
struct\s+ # struct keyword is next
([^{]+) # name of subsystem
[{] # Open curly bracket
'''
def tagged_struct_update(target_list, tag, contents):
regex = re.compile(tagged_struct_decl_template % tag, regex_flags)
items = [mo.groups()[0].strip() for mo in regex.finditer(contents)]
target_list.extend(items)
def analyze_headers(include_dir, scan_dir, file_list):
syscall_ret = []
tagged_ret = {}
for tag in struct_tags:
tagged_ret[tag] = []
syscall_files = dict()
# Get the list of header files which contains syscalls to be emitted.
# If file_list does not exist, we emit all syscalls.
if file_list:
with open(file_list, "r", encoding="utf-8") as fp:
contents = fp.read()
for one_file in contents.split(";"):
if os.path.isfile(one_file):
syscall_files[one_file] = {"emit": True}
else:
sys.stderr.write(f"{one_file} does not exists!\n")
sys.exit(1)
multiple_directories = set()
if include_dir:
multiple_directories |= set(include_dir)
if scan_dir:
multiple_directories |= set(scan_dir)
# Convert to a list to keep the output deterministic
multiple_directories = sorted(multiple_directories)
# Look for source files under various directories.
# Due to "syscalls/*.h" being included unconditionally in various
# other header files. We must generate the associated syscall
# header files (e.g. for function stubs).
for base_path in multiple_directories:
for root, dirs, files in os.walk(base_path, topdown=True):
dirs.sort()
files.sort()
for fn in files:
# toolchain/common.h has the definitions of these tags which we
# don't want to trip over
path = os.path.join(root, fn)
if (not (path.endswith(".h") or path.endswith(".c")) or
path.endswith(os.path.join(os.sep, 'toolchain',
'common.h'))):
continue
path = PurePath(os.path.normpath(path)).as_posix()
if path not in syscall_files:
if include_dir and base_path in include_dir:
syscall_files[path] = {"emit" : True}
else:
syscall_files[path] = {"emit" : False}
# Parse files to extract syscall functions
for one_file in syscall_files:
with open(one_file, "r", encoding="utf-8") as fp:
try:
contents = fp.read()
except Exception:
sys.stderr.write("Error decoding %s\n" % path)
raise
fn = os.path.basename(one_file)
try:
to_emit = syscall_files[one_file]["emit"] | args.emit_all_syscalls
syscall_result = [(mo.groups(), fn, to_emit)
for mo in syscall_regex.finditer(contents)]
for tag in struct_tags:
tagged_struct_update(tagged_ret[tag], tag, contents)
except Exception:
sys.stderr.write("While parsing %s\n" % fn)
raise
syscall_ret.extend(syscall_result)
return syscall_ret, tagged_ret
def update_file_if_changed(path, new):
if os.path.exists(path):
with open(path, 'r') as fp:
old = fp.read()
if new != old:
with open(path, 'w') as fp:
fp.write(new)
else:
with open(path, 'w') as fp:
fp.write(new)
def parse_args():
global args
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument(
"-i", "--include", required=False, action="append",
help="Include directories recursively scanned for .h files "
"containing syscalls that must be present in final binary. "
"Can be specified multiple times: -i topdir1 -i topdir2 ...")
parser.add_argument(
"--scan", required=False, action="append",
help="Scan directories recursively for .h files containing "
"syscalls that need stubs generated but may not need to "
"be present in final binary. Can be specified multiple "
"times.")
parser.add_argument(
"-j", "--json-file", required=True,
help="Write system call prototype information as json to file")
parser.add_argument(
"-t", "--tag-struct-file", required=True,
help="Write tagged struct name information as json to file")
parser.add_argument(
"--file-list", required=False,
help="Text file containing semi-colon separated list of "
"header file where only syscalls in these files "
"are emitted.")
parser.add_argument(
"--emit-all-syscalls", required=False, action="store_true",
help="Emit all potential syscalls in the tree")
args = parser.parse_args()
def main():
parse_args()
syscalls, tagged = analyze_headers(args.include, args.scan,
args.file_list)
# Only write json files if they don't exist or have changes since
# they will force an incremental rebuild.
syscalls_in_json = json.dumps(
syscalls,
indent=4,
sort_keys=True
)
update_file_if_changed(args.json_file, syscalls_in_json)
tagged_struct_in_json = json.dumps(
tagged,
indent=4,
sort_keys=True
)
update_file_if_changed(args.tag_struct_file, tagged_struct_in_json)
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/parse_syscalls.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,550 |
```python
#!/usr/bin/env python3
#
#
# This merges a set of input hex files into a single output hex file.
# Any conflicts will result in an error being reported.
from intelhex import IntelHex
from intelhex import AddressOverlapError
import argparse
def merge_hex_files(output, input_hex_files, overlap):
ih = IntelHex()
for hex_file_path in input_hex_files:
to_merge = IntelHex(hex_file_path)
# Since 'arm-none-eabi-objcopy' incorrectly inserts record
# type '03 - Start Segment Address', we need to remove the
# start_addr to avoid conflicts when merging.
to_merge.start_addr = None
try:
ih.merge(to_merge, overlap=overlap)
except AddressOverlapError:
raise AddressOverlapError("{} has merge issues".format(hex_file_path))
ih.write_hex_file(output)
def parse_args():
parser = argparse.ArgumentParser(
description="Merge hex files.",
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument("-o", "--output", required=False, default="merged.hex",
type=argparse.FileType('w', encoding='UTF-8'),
help="Output file name.")
parser.add_argument("--overlap", default="error",
help="What to do when files overlap (error, ignore, replace). "
"See IntelHex.merge() for more info.")
parser.add_argument("input_files", nargs='*')
return parser.parse_args()
def main():
args = parse_args()
merge_hex_files(args.output, args.input_files, args.overlap)
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/mergehex.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 350 |
```python
import os
import sys
def main():
is_writeable = os.access(sys.argv[1], os.W_OK)
return_code = int(not is_writeable)
sys.exit(return_code)
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/dir_is_writeable.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 53 |
```python
#!/usr/bin/env python3
#
#
import argparse
import sys
import os
import re
from elftools.elf.elffile import ELFFile
from elftools.elf.descriptions import (
describe_symbol_type,
)
class gen_symtab_log:
def __init__(self, debug=False):
self.__debug = debug
def debug(self, text):
"""Print debug message if debugging is enabled.
Note - this function requires config global variable to be initialized.
"""
if self.__debug:
sys.stdout.write(os.path.basename(
sys.argv[0]) + ": " + text + "\n")
@staticmethod
def error(text):
sys.exit(os.path.basename(sys.argv[0]) + ": error: " + text + "\n")
def set_debug(self, state):
self.__debug = state
log = gen_symtab_log()
def parse_args():
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument("-k", "--kernel", required=True,
help="Zephyr kernel image")
parser.add_argument("-o", "--output", required=True,
help="Output source file")
parser.add_argument("-d", "--debug", action="store_true",
help="Print additional debugging information")
return parser.parse_args()
class symtab_entry:
def __init__(self, addr, size, offset, name):
self.addr = addr
self.size = size
self.offset = offset
self.name = name
def __eq__(self, other):
return self.addr == other.addr
first_addr = 0
symtab_list = []
def sanitize_func_name(name):
pattern = r'(^[a-zA-Z_][a-zA-Z0-9_]*)'
match = re.match(pattern, name)
if match:
return match.group(0)
else:
log.error(f"Failed to sanitize function name: {name}")
return name
def main():
args = parse_args()
log.set_debug(args.debug)
with open(args.kernel, "rb") as rf:
elf = ELFFile(rf)
# Find the symbol table.
symtab = elf.get_section_by_name('.symtab')
i = 1
for nsym, symbol in enumerate(symtab.iter_symbols()): # pylint: disable=unused-variable
symbol_type = describe_symbol_type(symbol['st_info']['type'])
symbol_addr = symbol['st_value']
symbol_size = symbol['st_size']
if symbol_type == 'FUNC' and symbol_addr != 0:
symbol_name = sanitize_func_name(symbol.name)
dummy_offset = 0 # offsets will be calculated later after we know the first address
entry = symtab_entry(
symbol_addr, symbol_size, dummy_offset, symbol_name)
# Prevent entries with duplicated addresses
if entry not in symtab_list:
symtab_list.append(entry)
# Sort the address in ascending order
symtab_list.sort(key=lambda x: x.addr, reverse=False)
# Get the address of the first symbol
first_addr = symtab_list[0].addr
for i, entry in enumerate(symtab_list):
# Offset is calculated here
entry.offset = entry.addr - first_addr
# Debug print
log.debug('%6d: %s %s %.25s' % (
i,
hex(entry.addr),
hex(entry.size),
entry.name))
with open(args.output, 'w') as wf:
print("/* AUTO-GENERATED by gen_symtab.py, do not edit! */", file=wf)
print("", file=wf)
print("#include <zephyr/linker/sections.h>", file=wf)
print("#include <zephyr/debug/symtab.h>", file=wf)
print("", file=wf)
print(
f"const struct z_symtab_entry __symtab_entry z_symtab_entries[{len(symtab_list) + 1}] = {{", file=wf)
for i, entry in enumerate(symtab_list):
print(
f"\t/* ADDR: {hex(entry.addr)} SIZE: {hex(entry.size)} */", file=wf)
print(
f"\t[{i}] = {{.offset = {hex(entry.offset)}, .name = \"{entry.name}\"}},", file=wf)
# Append a dummy entry at the end to facilitate the binary search
if symtab_list[-1].size == 0:
dummy_offset = f"{hex(symtab_list[-1].offset)} + sizeof(uintptr_t)"
else:
dummy_offset = f"{hex(symtab_list[-1].offset + symtab_list[-1].size)}"
print("\t/* dummy entry */", file=wf)
print(
f"\t[{len(symtab_list)}] = {{.offset = {dummy_offset}, .name = \"?\"}},", file=wf)
print(f"}};\n", file=wf)
print(f"const struct symtab_info __symtab_info z_symtab = {{", file=wf)
print(f"\t.first_addr = {hex(first_addr)},", file=wf)
print(f"\t.length = {len(symtab_list)},", file=wf)
print(f"\t.entries = z_symtab_entries,", file=wf)
print(f"}};\n", file=wf)
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/gen_symtab.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,169 |
```python
#
import struct
import sys
def main():
print(struct.calcsize("P") * 8)
sys.exit(0)
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/user_wordsize.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 40 |
```python
#!/usr/bin/env python3
#
#
#
"""
This script will relocate .text, .rodata, .data and .bss sections from required files
and places it in the required memory region. This memory region and file
are given to this python script in the form of a string.
Example of such a string would be::
SRAM2:COPY:/home/xyz/zephyr/samples/hello_world/src/main.c,\
SRAM1:COPY:/home/xyz/zephyr/samples/hello_world/src/main2.c, \
FLASH2:NOCOPY:/home/xyz/zephyr/samples/hello_world/src/main3.c
One can also specify the program header for a given memory region:
SRAM2\\ :phdr0:COPY:/home/xyz/zephyr/samples/hello_world/src/main.c
To invoke this script::
python3 gen_relocate_app.py -i input_string -o generated_linker -c generated_code
Configuration that needs to be sent to the python script.
- If the memory is like SRAM1/SRAM2/CCD/AON then place full object in
the sections
- If the memory type is appended with _DATA / _TEXT/ _RODATA/ _BSS only the
selected memory is placed in the required memory region. Others are
ignored.
- COPY/NOCOPY defines whether the script should generate the relocation code in
code_relocation.c or not
- NOKEEP will suppress the default behavior of marking every relocated symbol
with KEEP() in the generated linker script.
Multiple regions can be appended together like SRAM2_DATA_BSS
this will place data and bss inside SRAM2.
"""
import sys
import argparse
import os
import glob
import warnings
from collections import defaultdict
from enum import Enum
from pathlib import Path
from typing import NamedTuple
from typing import NewType
from typing import Tuple
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import SymbolTableSection
MemoryRegion = NewType('MemoryRegion', str)
class SectionKind(Enum):
TEXT = "text"
RODATA = "rodata"
DATA = "data"
BSS = "bss"
LITERAL = "literal"
def __str__(self):
return self.name
@classmethod
def for_section_named(cls, name: str):
"""
Return the kind of section that includes a section with the given name.
>>> SectionKind.for_section_with_name(".rodata.str1.4")
<SectionKind.RODATA: 'rodata'>
>>> SectionKind.for_section_with_name(".device_deps")
None
"""
if ".text." in name:
return cls.TEXT
elif ".rodata." in name:
return cls.RODATA
elif ".data." in name:
return cls.DATA
elif ".bss." in name:
return cls.BSS
elif ".literal." in name:
return cls.LITERAL
else:
return None
class OutputSection(NamedTuple):
obj_file_name: str
section_name: str
keep: bool = True
PRINT_TEMPLATE = """
KEEP(*{obj_file_name}({section_name}))
"""
PRINT_TEMPLATE_NOKEEP = """
*{obj_file_name}({section_name})
"""
SECTION_LOAD_MEMORY_SEQ = """
__{0}_{1}_rom_start = LOADADDR(.{0}_{1}_reloc);
"""
LOAD_ADDRESS_LOCATION_FLASH = """
#ifdef CONFIG_XIP
GROUP_DATA_LINK_IN({0}, ROMABLE_REGION)
#else
GROUP_DATA_LINK_IN({0}, {0})
#endif
"""
LOAD_ADDRESS_LOCATION_FLASH_NOCOPY = """
GROUP_LINK_IN({0})
"""
LOAD_ADDRESS_LOCATION_BSS = "GROUP_LINK_IN({0})"
MPU_RO_REGION_START = """
_{0}_mpu_ro_region_start = ORIGIN({1});
"""
MPU_RO_REGION_END = """
_{0}_mpu_ro_region_end = .;
"""
# generic section creation format
LINKER_SECTION_SEQ = """
/* Linker section for memory region {2} for {3} section */
SECTION_PROLOGUE(.{0}_{1}_reloc,,)
{{
. = ALIGN(4);
{4}
. = ALIGN(4);
}} {5}
__{0}_{1}_reloc_end = .;
__{0}_{1}_reloc_start = ADDR(.{0}_{1}_reloc);
__{0}_{1}_reloc_size = __{0}_{1}_reloc_end - __{0}_{1}_reloc_start;
"""
LINKER_SECTION_SEQ_MPU = """
/* Linker section for memory region {2} for {3} section */
SECTION_PROLOGUE(.{0}_{1}_reloc,,)
{{
__{0}_{1}_reloc_start = .;
{4}
#if {6}
. = ALIGN({6});
#else
MPU_ALIGN(__{0}_{1}_reloc_size);
#endif
__{0}_{1}_reloc_end = .;
}} {5}
__{0}_{1}_reloc_size = __{0}_{1}_reloc_end - __{0}_{1}_reloc_start;
"""
SOURCE_CODE_INCLUDES = """
/* Auto generated code. Do not modify.*/
#include <zephyr/kernel.h>
#include <zephyr/linker/linker-defs.h>
#include <zephyr/kernel_structs.h>
#include <kernel_internal.h>
"""
EXTERN_LINKER_VAR_DECLARATION = """
extern char __{0}_{1}_reloc_start[];
extern char __{0}_{1}_rom_start[];
extern char __{0}_{1}_reloc_size[];
"""
DATA_COPY_FUNCTION = """
void data_copy_xip_relocation(void)
{{
{0}
}}
"""
BSS_ZEROING_FUNCTION = """
void bss_zeroing_relocation(void)
{{
{0}
}}
"""
MEMCPY_TEMPLATE = """
z_early_memcpy(&__{0}_{1}_reloc_start, &__{0}_{1}_rom_start,
(size_t) &__{0}_{1}_reloc_size);
"""
MEMSET_TEMPLATE = """
z_early_memset(&__{0}_bss_reloc_start, 0,
(size_t) &__{0}_bss_reloc_size);
"""
def region_is_default_ram(region_name: str) -> bool:
"""
Test whether a memory region with the given name is the system's default
RAM region or not.
This is used to determine whether some items need to be omitted from
custom regions and instead be placed in the default. In particular, mutable
data placed in the default RAM section is ignored and is allowed to be
handled normally by the linker because it is placed in that region anyway.
"""
return region_name == args.default_ram_region
def find_sections(filename: str) -> 'dict[SectionKind, list[OutputSection]]':
"""
Locate relocatable sections in the given object file.
The output value maps categories of sections to the list of actual sections
located in the object file that fit in that category.
"""
obj_file_path = Path(filename)
with open(obj_file_path, 'rb') as obj_file_desc:
full_lib = ELFFile(obj_file_desc)
if not full_lib:
sys.exit("Error parsing file: " + filename)
sections = [x for x in full_lib.iter_sections()]
out = defaultdict(list)
for section in sections:
section_kind = SectionKind.for_section_named(section.name)
if section_kind is None:
continue
out[section_kind].append(
OutputSection(obj_file_path.name, section.name)
)
# Common variables will be placed in the .bss section
# only after linking in the final executable. This "if" finds
# common symbols and warns the user of the problem.
# The solution to which is simply assigning a 0 to
# bss variable and it will go to the required place.
if isinstance(section, SymbolTableSection):
def is_common_symbol(s):
return s.entry["st_shndx"] == "SHN_COMMON"
for symbol in filter(is_common_symbol, section.iter_symbols()):
warnings.warn("Common variable found. Move "+
symbol.name + " to bss by assigning it to 0/NULL")
return out
def assign_to_correct_mem_region(
memory_region: str,
full_list_of_sections: 'dict[SectionKind, list[OutputSection]]'
) -> 'dict[MemoryRegion, dict[SectionKind, list[OutputSection]]]':
"""
Generate a mapping of memory region to collection of output sections to be
placed in each region.
"""
use_section_kinds, memory_region = section_kinds_from_memory_region(memory_region)
memory_region, _, align_size = memory_region.partition('_')
if align_size:
mpu_align[memory_region] = int(align_size)
keep_sections = '|NOKEEP' not in memory_region
memory_region = memory_region.replace('|NOKEEP', '')
output_sections = {}
for used_kind in use_section_kinds:
# Pass through section kinds that go into this memory region
output_sections[used_kind] = [
section._replace(keep=keep_sections)
for section in full_list_of_sections[used_kind]
]
return {MemoryRegion(memory_region): output_sections}
def section_kinds_from_memory_region(memory_region: str) -> 'Tuple[set[SectionKind], str]':
"""
Get the section kinds requested by the given memory region name.
Region names can be like RAM_RODATA_TEXT or just RAM; a section kind may
follow the region name. If no kinds are specified all are assumed.
In addition to the parsed kinds, the input region minus specifiers for those
kinds is returned.
>>> section_kinds_from_memory_region('SRAM2_TEXT')
({<SectionKind.TEXT: 'text'>}, 'SRAM2')
"""
out = set()
for kind in SectionKind:
specifier = f"_{kind}"
if specifier in memory_region:
out.add(kind)
memory_region = memory_region.replace(specifier, "")
if not out:
# No listed kinds implies all of the kinds
out = set(SectionKind)
return (out, memory_region)
def print_linker_sections(list_sections: 'list[OutputSection]'):
out = ''
for section in sorted(list_sections):
template = PRINT_TEMPLATE if section.keep else PRINT_TEMPLATE_NOKEEP
out += template.format(obj_file_name=section.obj_file_name,
section_name=section.section_name)
return out
def add_phdr(memory_type, phdrs):
return f'{memory_type} {phdrs[memory_type] if memory_type in phdrs else ""}'
def string_create_helper(
kind: SectionKind,
memory_type,
full_list_of_sections: 'dict[SectionKind, list[OutputSection]]',
load_address_in_flash,
is_copy,
phdrs
):
linker_string = ''
if load_address_in_flash:
if is_copy:
load_address_string = LOAD_ADDRESS_LOCATION_FLASH.format(add_phdr(memory_type, phdrs))
else:
load_address_string = LOAD_ADDRESS_LOCATION_FLASH_NOCOPY.format(add_phdr(memory_type, phdrs))
else:
load_address_string = LOAD_ADDRESS_LOCATION_BSS.format(add_phdr(memory_type, phdrs))
if full_list_of_sections[kind]:
# Create a complete list of funcs/ variables that goes in for this
# memory type
tmp = print_linker_sections(full_list_of_sections[kind])
if region_is_default_ram(memory_type) and kind in (SectionKind.DATA, SectionKind.BSS):
linker_string += tmp
else:
if not region_is_default_ram(memory_type) and kind is SectionKind.RODATA:
align_size = 0
if memory_type in mpu_align:
align_size = mpu_align[memory_type]
linker_string += LINKER_SECTION_SEQ_MPU.format(memory_type.lower(), kind.value, memory_type.upper(),
kind, tmp, load_address_string, align_size)
else:
if region_is_default_ram(memory_type) and kind in (SectionKind.TEXT, SectionKind.LITERAL):
align_size = 0
linker_string += LINKER_SECTION_SEQ_MPU.format(memory_type.lower(), kind.value, memory_type.upper(),
kind, tmp, load_address_string, align_size)
else:
linker_string += LINKER_SECTION_SEQ.format(memory_type.lower(), kind.value, memory_type.upper(),
kind, tmp, load_address_string)
if load_address_in_flash:
linker_string += SECTION_LOAD_MEMORY_SEQ.format(memory_type.lower(), kind.value, memory_type.upper(),
kind)
return linker_string
def generate_linker_script(linker_file, sram_data_linker_file, sram_bss_linker_file,
complete_list_of_sections, phdrs):
gen_string = ''
gen_string_sram_data = ''
gen_string_sram_bss = ''
for memory_type, full_list_of_sections in \
sorted(complete_list_of_sections.items()):
is_copy = bool("|COPY" in memory_type)
memory_type = memory_type.split("|", 1)[0]
if region_is_default_ram(memory_type) and is_copy:
gen_string += MPU_RO_REGION_START.format(memory_type.lower(), memory_type.upper())
gen_string += string_create_helper(SectionKind.LITERAL, memory_type, full_list_of_sections, 1, is_copy, phdrs)
gen_string += string_create_helper(SectionKind.TEXT, memory_type, full_list_of_sections, 1, is_copy, phdrs)
gen_string += string_create_helper(SectionKind.RODATA, memory_type, full_list_of_sections, 1, is_copy, phdrs)
if region_is_default_ram(memory_type) and is_copy:
gen_string += MPU_RO_REGION_END.format(memory_type.lower())
if region_is_default_ram(memory_type):
gen_string_sram_data += string_create_helper(SectionKind.DATA, memory_type, full_list_of_sections, 1, 1, phdrs)
gen_string_sram_bss += string_create_helper(SectionKind.BSS, memory_type, full_list_of_sections, 0, 1, phdrs)
else:
gen_string += string_create_helper(SectionKind.DATA, memory_type, full_list_of_sections, 1, 1, phdrs)
gen_string += string_create_helper(SectionKind.BSS, memory_type, full_list_of_sections, 0, 1, phdrs)
# finally writing to the linker file
with open(linker_file, "w") as file_desc:
file_desc.write(gen_string)
with open(sram_data_linker_file, "w") as file_desc:
file_desc.write(gen_string_sram_data)
with open(sram_bss_linker_file, "w") as file_desc:
file_desc.write(gen_string_sram_bss)
def generate_memcpy_code(memory_type, full_list_of_sections, code_generation):
generate_sections, memory_type = section_kinds_from_memory_region(memory_type)
# Non-BSS sections get copied to the destination memory, except data in
# main memory which gets copied automatically.
for kind in (SectionKind.TEXT, SectionKind.RODATA, SectionKind.DATA):
if region_is_default_ram(memory_type) and kind is SectionKind.DATA:
continue
if kind in generate_sections and full_list_of_sections[kind]:
code_generation["copy_code"] += MEMCPY_TEMPLATE.format(memory_type.lower(), kind.value)
code_generation["extern"] += EXTERN_LINKER_VAR_DECLARATION.format(
memory_type.lower(), kind.value)
# BSS sections in main memory are automatically zeroed; others need to have
# zeroing code generated.
if (SectionKind.BSS in generate_sections
and full_list_of_sections[SectionKind.BSS]
and not region_is_default_ram(memory_type)
):
code_generation["zero_code"] += MEMSET_TEMPLATE.format(memory_type.lower())
code_generation["extern"] += EXTERN_LINKER_VAR_DECLARATION.format(
memory_type.lower(), SectionKind.BSS.value)
return code_generation
def dump_header_file(header_file, code_generation):
code_string = ''
# create a dummy void function if there is no code to generate for
# bss/data/text regions
code_string += code_generation["extern"]
if code_generation["copy_code"]:
code_string += DATA_COPY_FUNCTION.format(code_generation["copy_code"])
else:
code_string += DATA_COPY_FUNCTION.format("return;")
if code_generation["zero_code"]:
code_string += BSS_ZEROING_FUNCTION.format(code_generation["zero_code"])
else:
code_string += BSS_ZEROING_FUNCTION.format("return;")
with open(header_file, "w") as header_file_desc:
header_file_desc.write(SOURCE_CODE_INCLUDES)
header_file_desc.write(code_string)
def parse_args():
global args
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument("-d", "--directory", required=True,
help="obj file's directory")
parser.add_argument("-i", "--input_rel_dict", required=True, type=argparse.FileType('r'),
help="input file with dict src:memory type(sram2 or ccm or aon etc)")
parser.add_argument("-o", "--output", required=False, help="Output ld file")
parser.add_argument("-s", "--output_sram_data", required=False,
help="Output sram data ld file")
parser.add_argument("-b", "--output_sram_bss", required=False,
help="Output sram bss ld file")
parser.add_argument("-c", "--output_code", required=False,
help="Output relocation code header file")
parser.add_argument("-R", "--default_ram_region", default='SRAM',
help="Name of default RAM memory region for system")
parser.add_argument("-v", "--verbose", action="count", default=0,
help="Verbose Output")
args = parser.parse_args()
# return the absolute path for the object file.
def get_obj_filename(searchpath, filename):
# get the object file name which is almost always pended with .obj
obj_filename = filename.split("/")[-1] + ".obj"
for dirpath, _, files in os.walk(searchpath):
for filename1 in files:
if filename1 == obj_filename:
if filename.split("/")[-2] in dirpath.split("/")[-1]:
fullname = os.path.join(dirpath, filename1)
return fullname
# Extracts all possible components for the input string:
# <mem_region>[\ :program_header]:<flag_1>[;<flag_2>...]:<file_1>[;<file_2>...]
# Returns a 4-tuple with them: (mem_region, program_header, flags, files)
# If no `program_header` is defined, returns an empty string
def parse_input_string(line):
# Be careful when splitting by : to avoid breaking absolute paths on Windows
mem_region, rest = line.split(':', 1)
phdr = ''
if mem_region.endswith(' '):
mem_region = mem_region.rstrip()
phdr, rest = rest.split(':', 1)
# Split lists by semicolons, in part to support generator expressions
flag_list, file_list = (lst.split(';') for lst in rest.split(':', 1))
return mem_region, phdr, flag_list, file_list
# Create a dict with key as memory type and files as a list of values.
# Also, return another dict with program headers for memory regions
def create_dict_wrt_mem():
# need to support wild card *
rel_dict = dict()
phdrs = dict()
input_rel_dict = args.input_rel_dict.read()
if input_rel_dict == '':
sys.exit("Disable CONFIG_CODE_DATA_RELOCATION if no file needs relocation")
for line in input_rel_dict.split('|'):
if ':' not in line:
continue
mem_region, phdr, flag_list, file_list = parse_input_string(line)
# Handle any program header
if phdr != '':
phdrs[mem_region] = f':{phdr}'
file_name_list = []
# Use glob matching on each file in the list
for file_glob in file_list:
glob_results = glob.glob(file_glob)
if not glob_results:
warnings.warn("File: "+file_glob+" Not found")
continue
elif len(glob_results) > 1:
warnings.warn("Regex in file lists is deprecated, please use file(GLOB) instead")
file_name_list.extend(glob_results)
if len(file_name_list) == 0:
continue
if mem_region == '':
continue
if args.verbose:
print("Memory region ", mem_region, " Selected for files:", file_name_list)
mem_region = "|".join((mem_region, *flag_list))
if mem_region in rel_dict:
rel_dict[mem_region].extend(file_name_list)
else:
rel_dict[mem_region] = file_name_list
return rel_dict, phdrs
def main():
global mpu_align
mpu_align = {}
parse_args()
searchpath = args.directory
linker_file = args.output
sram_data_linker_file = args.output_sram_data
sram_bss_linker_file = args.output_sram_bss
rel_dict, phdrs = create_dict_wrt_mem()
complete_list_of_sections: 'dict[MemoryRegion, dict[SectionKind, list[OutputSection]]]' \
= defaultdict(lambda: defaultdict(list))
# Create/or truncate file contents if it already exists
# raw = open(linker_file, "w")
# for each memory_type, create text/rodata/data/bss sections for all obj files
for memory_type, files in rel_dict.items():
full_list_of_sections: 'dict[SectionKind, list[OutputSection]]' = defaultdict(list)
for filename in files:
obj_filename = get_obj_filename(searchpath, filename)
# the obj file wasn't found. Probably not compiled.
if not obj_filename:
continue
file_sections = find_sections(obj_filename)
# Merge sections from file into collection of sections for all files
for category, sections in file_sections.items():
full_list_of_sections[category].extend(sections)
# cleanup and attach the sections to the memory type after cleanup.
sections_by_category = assign_to_correct_mem_region(memory_type, full_list_of_sections)
for (region, section_category_map) in sections_by_category.items():
for (category, sections) in section_category_map.items():
complete_list_of_sections[region][category].extend(sections)
generate_linker_script(linker_file, sram_data_linker_file,
sram_bss_linker_file, complete_list_of_sections, phdrs)
code_generation = {"copy_code": '', "zero_code": '', "extern": ''}
for mem_type, list_of_sections in sorted(complete_list_of_sections.items()):
if "|COPY" in mem_type:
mem_type = mem_type.split("|", 1)[0]
code_generation = generate_memcpy_code(mem_type,
list_of_sections, code_generation)
dump_header_file(args.output_code, code_generation)
if __name__ == '__main__':
main()
``` | /content/code_sandbox/scripts/build/gen_relocate_app.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 5,027 |
```python
#!/usr/bin/env python3
#
"""
This file implements the Symbol Link Identifer (SLID)
generation code, for use by the LLEXT subsystem.
SLID-based linking is enabled by the Kconfig
option 'CONFIG_LLEXT_EXPORT_BUILTINS_BY_SLID'.
When executed as a script, this file can be used as
an interactive prompt to calculate the SLID of arbitrary
symbols, which can be useful for debugging purposes.
IMPLEMENTATION NOTES:
Currently, SLIDs are generated by taking the first
[pointer size] bytes of the symbol name's SHA-256
hash, taken in big-endian order.
This ordering provides one advantage: the 32-bit
SLID for an export is present in the top 32 bits of
the 64-bit SLID for the same export.
"""
from hashlib import sha256
def generate_slid(symbol_name: str, slid_size: int) -> int:
"""
Generates the Symbol Link Identifier (SLID) for a symbol.
symbol_name: Name of the symbol for which to generate a SLID
slid_side: Size of the SLID in bytes (4/8)
"""
if slid_size not in (4, 8):
raise AssertionError(f"Invalid SLID size {slid_size}")
m = sha256()
m.update(symbol_name.encode("utf-8"))
hash = m.digest()
return int.from_bytes(hash[0:slid_size], byteorder='big', signed=False)
def format_slid(slid: int, slid_size: int) -> str:
if slid_size == 4:
fmt = f"0x{slid:08X}"
elif slid_size == 8:
fmt = f"0x{slid:016X}"
return fmt
def repl():
while True:
sym_name = input("Symbol name? ")
slid32 = generate_slid(sym_name, 4)
slid64 = generate_slid(sym_name, 8)
print(f" 32-bit SLID for '{sym_name}': {format_slid(slid32, 4)}")
print(f" 64-bit SLID for '{sym_name}': {format_slid(slid64, 8)}")
print()
if __name__ == "__main__":
print("LLEXT SLID calculation REPL")
print("Press ^C to exit.")
try:
repl()
except KeyboardInterrupt:
print()
``` | /content/code_sandbox/scripts/build/llext_slidlib.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 526 |
```python
#!/usr/bin/env python3
"""
Checks the initialization priorities
This script parses a Zephyr executable file, creates a list of known devices
and their effective initialization priorities and compares that with the device
dependencies inferred from the devicetree hierarchy.
This can be used to detect devices that are initialized in the incorrect order,
but also devices that are initialized at the same priority but depends on each
other, which can potentially break if the linking order is changed.
Optionally, it can also produce a human readable list of the initialization
calls for the various init levels.
"""
import argparse
import logging
import os
import pathlib
import pickle
import sys
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import SymbolTableSection
# This is needed to load edt.pickle files.
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..",
"dts", "python-devicetree", "src"))
from devicetree import edtlib # pylint: disable=unused-import
# Prefix used for "struct device" reference initialized based on devicetree
# entries with a known ordinal.
_DEVICE_ORD_PREFIX = "__device_dts_ord_"
# Defined init level in order of priority.
_DEVICE_INIT_LEVELS = ["EARLY", "PRE_KERNEL_1", "PRE_KERNEL_2", "POST_KERNEL",
"APPLICATION", "SMP"]
# List of compatibles for node where the initialization priority should be the
# opposite of the device tree inferred dependency.
_INVERTED_PRIORITY_COMPATIBLES = frozenset()
# List of compatibles for nodes where we don't check the priority.
_IGNORE_COMPATIBLES = frozenset([
# There is no direct dependency between the CDC ACM UART and the USB
# device controller, the logical connection is established after USB
# device support is enabled.
"zephyr,cdc-acm-uart",
])
class Priority:
"""Parses and holds a device initialization priority.
The object can be used for comparing levels with one another.
Attributes:
name: the section name
"""
def __init__(self, level, priority):
for idx, level_name in enumerate(_DEVICE_INIT_LEVELS):
if level_name == level:
self._level = idx
self._priority = priority
# Tuples compare elementwise in order
self._level_priority = (self._level, self._priority)
return
raise ValueError("Unknown level in %s" % level)
def __repr__(self):
return "<%s %s %d>" % (self.__class__.__name__,
_DEVICE_INIT_LEVELS[self._level], self._priority)
def __str__(self):
return "%s+%d" % (_DEVICE_INIT_LEVELS[self._level], self._priority)
def __lt__(self, other):
return self._level_priority < other._level_priority
def __eq__(self, other):
return self._level_priority == other._level_priority
def __hash__(self):
return self._level_priority
class ZephyrInitLevels:
"""Load an executable file and find the initialization calls and devices.
Load a Zephyr executable file and scan for the list of initialization calls
and defined devices.
The list of devices is available in the "devices" class variable in the
{ordinal: Priority} format, the list of initilevels is in the "initlevels"
class variables in the {"level name": ["call", ...]} format.
Attributes:
file_path: path of the file to be loaded.
"""
def __init__(self, file_path):
self.file_path = file_path
self._elf = ELFFile(open(file_path, "rb"))
self._load_objects()
self._load_level_addr()
self._process_initlevels()
def _load_objects(self):
"""Initialize the object table."""
self._objects = {}
for section in self._elf.iter_sections():
if not isinstance(section, SymbolTableSection):
continue
for sym in section.iter_symbols():
if (sym.name and
sym.entry.st_size > 0 and
sym.entry.st_info.type in ["STT_OBJECT", "STT_FUNC"]):
self._objects[sym.entry.st_value] = (
sym.name, sym.entry.st_size, sym.entry.st_shndx)
def _load_level_addr(self):
"""Find the address associated with known init levels."""
self._init_level_addr = {}
for section in self._elf.iter_sections():
if not isinstance(section, SymbolTableSection):
continue
for sym in section.iter_symbols():
for level in _DEVICE_INIT_LEVELS:
name = f"__init_{level}_start"
if sym.name == name:
self._init_level_addr[level] = sym.entry.st_value
elif sym.name == "__init_end":
self._init_level_end = sym.entry.st_value
if len(self._init_level_addr) != len(_DEVICE_INIT_LEVELS):
raise ValueError(f"Missing init symbols, found: {self._init_level_addr}")
if not self._init_level_end:
raise ValueError(f"Missing init section end symbol")
def _device_ord_from_name(self, sym_name):
"""Find a device ordinal from a symbol name."""
if not sym_name:
return None
if not sym_name.startswith(_DEVICE_ORD_PREFIX):
return None
_, device_ord = sym_name.split(_DEVICE_ORD_PREFIX)
return int(device_ord)
def _object_name(self, addr):
if not addr:
return "NULL"
elif addr in self._objects:
return self._objects[addr][0]
else:
return "unknown"
def _initlevel_pointer(self, addr, idx, shidx):
elfclass = self._elf.elfclass
if elfclass == 32:
ptrsize = 4
elif elfclass == 64:
ptrsize = 8
else:
raise ValueError(f"Unknown pointer size for ELF class f{elfclass}")
section = self._elf.get_section(shidx)
start = section.header.sh_addr
data = section.data()
offset = addr - start
start = offset + ptrsize * idx
stop = offset + ptrsize * (idx + 1)
return int.from_bytes(data[start:stop], byteorder="little")
def _process_initlevels(self):
"""Process the init level and find the init functions and devices."""
self.devices = {}
self.initlevels = {}
for i, level in enumerate(_DEVICE_INIT_LEVELS):
start = self._init_level_addr[level]
if i + 1 == len(_DEVICE_INIT_LEVELS):
stop = self._init_level_end
else:
stop = self._init_level_addr[_DEVICE_INIT_LEVELS[i + 1]]
self.initlevels[level] = []
priority = 0
addr = start
while addr < stop:
if addr not in self._objects:
raise ValueError(f"no symbol at addr {addr:08x}")
obj, size, shidx = self._objects[addr]
arg0_name = self._object_name(self._initlevel_pointer(addr, 0, shidx))
arg1_name = self._object_name(self._initlevel_pointer(addr, 1, shidx))
self.initlevels[level].append(f"{obj}: {arg0_name}({arg1_name})")
ordinal = self._device_ord_from_name(arg1_name)
if ordinal:
prio = Priority(level, priority)
self.devices[ordinal] = (prio, arg0_name)
addr += size
priority += 1
class Validator():
"""Validates the initialization priorities.
Scans through a build folder for object files and list all the device
initialization priorities. Then compares that against the EDT derived
dependency list and log any found priority issue.
Attributes:
elf_file_path: path of the ELF file
edt_pickle: name of the EDT pickle file
log: a logging.Logger object
"""
def __init__(self, elf_file_path, edt_pickle, log):
self.log = log
edt_pickle_path = pathlib.Path(
pathlib.Path(elf_file_path).parent,
edt_pickle)
with open(edt_pickle_path, "rb") as f:
edt = pickle.load(f)
self._ord2node = edt.dep_ord2node
self._obj = ZephyrInitLevels(elf_file_path)
self.errors = 0
def _check_dep(self, dev_ord, dep_ord):
"""Validate the priority between two devices."""
if dev_ord == dep_ord:
return
dev_node = self._ord2node[dev_ord]
dep_node = self._ord2node[dep_ord]
if dev_node._binding:
dev_compat = dev_node._binding.compatible
if dev_compat in _IGNORE_COMPATIBLES:
self.log.info(f"Ignoring priority: {dev_node._binding.compatible}")
return
if dev_node._binding and dep_node._binding:
dev_compat = dev_node._binding.compatible
dep_compat = dep_node._binding.compatible
if (dev_compat, dep_compat) in _INVERTED_PRIORITY_COMPATIBLES:
self.log.info(f"Swapped priority: {dev_compat}, {dep_compat}")
dev_ord, dep_ord = dep_ord, dev_ord
dev_prio, dev_init = self._obj.devices.get(dev_ord, (None, None))
dep_prio, dep_init = self._obj.devices.get(dep_ord, (None, None))
if not dev_prio or not dep_prio:
return
if dev_prio == dep_prio:
raise ValueError(f"{dev_node.path} and {dep_node.path} have the "
f"same priority: {dev_prio}")
elif dev_prio < dep_prio:
if not self.errors:
self.log.error("Device initialization priority validation failed, "
"the sequence of initialization calls does not match "
"the devicetree dependencies.")
self.errors += 1
self.log.error(
f"{dev_node.path} <{dev_init}> is initialized before its dependency "
f"{dep_node.path} <{dep_init}> ({dev_prio} < {dep_prio})")
else:
self.log.info(
f"{dev_node.path} <{dev_init}> {dev_prio} > "
f"{dep_node.path} <{dep_init}> {dep_prio}")
def check_edt(self):
"""Scan through all known devices and validate the init priorities."""
for dev_ord in self._obj.devices:
dev = self._ord2node[dev_ord]
for dep in dev.depends_on:
self._check_dep(dev_ord, dep.dep_ordinal)
def print_initlevels(self):
for level, calls in self._obj.initlevels.items():
print(level)
for call in calls:
print(f" {call}")
def _parse_args(argv):
"""Parse the command line arguments."""
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
allow_abbrev=False)
parser.add_argument("-f", "--elf-file", default=pathlib.Path("build", "zephyr", "zephyr.elf"),
help="ELF file to use")
parser.add_argument("-v", "--verbose", action="count",
help=("enable verbose output, can be used multiple times "
"to increase verbosity level"))
parser.add_argument("--always-succeed", action="store_true",
help="always exit with a return code of 0, used for testing")
parser.add_argument("-o", "--output",
help="write the output to a file in addition to stdout")
parser.add_argument("-i", "--initlevels", action="store_true",
help="print the initlevel functions instead of checking the device dependencies")
parser.add_argument("--edt-pickle", default=pathlib.Path("edt.pickle"),
help="name of the pickled edtlib.EDT file",
type=pathlib.Path)
return parser.parse_args(argv)
def _init_log(verbose, output):
"""Initialize a logger object."""
log = logging.getLogger(__file__)
console = logging.StreamHandler()
console.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
log.addHandler(console)
if output:
file = logging.FileHandler(output, mode="w")
file.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
log.addHandler(file)
if verbose and verbose > 1:
log.setLevel(logging.DEBUG)
elif verbose and verbose > 0:
log.setLevel(logging.INFO)
else:
log.setLevel(logging.WARNING)
return log
def main(argv=None):
args = _parse_args(argv)
log = _init_log(args.verbose, args.output)
log.info(f"check_init_priorities: {args.elf_file}")
validator = Validator(args.elf_file, args.edt_pickle, log)
if args.initlevels:
validator.print_initlevels()
else:
validator.check_edt()
if args.always_succeed:
return 0
if validator.errors:
return 1
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
``` | /content/code_sandbox/scripts/build/check_init_priorities.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,899 |
```python
#!/usr/bin/env python3
#
#
import struct
import sys
from packaging import version
import elftools
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import SymbolTableSection
if version.parse(elftools.__version__) < version.parse('0.24'):
sys.exit("pyelftools is out of date, need version 0.24 or later")
class _Symbol:
"""
Parent class for objects derived from an elf symbol.
"""
def __init__(self, elf, sym):
self.elf = elf
self.sym = sym
self.data = self.elf.symbol_data(sym)
def __lt__(self, other):
return self.sym.entry.st_value < other.sym.entry.st_value
def _data_native_read(self, offset):
(format, size) = self.elf.native_struct_format
return struct.unpack(format, self.data[offset:offset + size])[0]
class DevicePM(_Symbol):
"""
Represents information about device PM capabilities.
"""
required_ld_consts = [
"_PM_DEVICE_STRUCT_FLAGS_OFFSET",
"_PM_DEVICE_FLAG_PD"
]
def __init__(self, elf, sym):
super().__init__(elf, sym)
self.flags = self._data_native_read(self.elf.ld_consts['_PM_DEVICE_STRUCT_FLAGS_OFFSET'])
@property
def is_power_domain(self):
return self.flags & (1 << self.elf.ld_consts["_PM_DEVICE_FLAG_PD"])
class DeviceOrdinals(_Symbol):
"""
Represents information about device dependencies.
"""
DEVICE_HANDLE_SEP = -32768
DEVICE_HANDLE_ENDS = 32767
DEVICE_HANDLE_NULL = 0
def __init__(self, elf, sym):
super().__init__(elf, sym)
format = "<" if self.elf.little_endian else ">"
format += "{:d}h".format(len(self.data) // 2)
self._ordinals = struct.unpack(format, self.data)
self._ordinals_split = []
# Split ordinals on DEVICE_HANDLE_SEP
prev = 1
for idx, val in enumerate(self._ordinals, 1):
if val == self.DEVICE_HANDLE_SEP:
self._ordinals_split.append(self._ordinals[prev:idx-1])
prev = idx
self._ordinals_split.append(self._ordinals[prev:])
@property
def self_ordinal(self):
return self._ordinals[0]
@property
def ordinals(self):
return self._ordinals_split
class Device(_Symbol):
"""
Represents information about a device object and its references to other objects.
"""
required_ld_consts = [
"_DEVICE_STRUCT_HANDLES_OFFSET",
"_DEVICE_STRUCT_PM_OFFSET"
]
def __init__(self, elf, sym):
super().__init__(elf, sym)
self.edt_node = None
self.handle = None
self.ordinals = None
self.pm = None
# Devicetree dependencies, injected dependencies, supported devices
self.devs_depends_on = set()
self.devs_depends_on_injected = set()
self.devs_supports = set()
# Point to the handles instance associated with the device;
# assigned by correlating the device struct handles pointer
# value with the addr of a Handles instance.
self.obj_ordinals = None
if '_DEVICE_STRUCT_HANDLES_OFFSET' in self.elf.ld_consts:
ordinal_offset = self.elf.ld_consts['_DEVICE_STRUCT_HANDLES_OFFSET']
self.obj_ordinals = self._data_native_read(ordinal_offset)
self.obj_pm = None
if '_DEVICE_STRUCT_PM_OFFSET' in self.elf.ld_consts:
pm_offset = self.elf.ld_consts['_DEVICE_STRUCT_PM_OFFSET']
self.obj_pm = self._data_native_read(pm_offset)
@property
def ordinal(self):
return self.ordinals.self_ordinal
class ZephyrElf:
"""
Represents information about devices in an elf file.
"""
def __init__(self, kernel, edt, device_start_symbol):
self.elf = ELFFile(open(kernel, "rb"))
self.relocatable = self.elf['e_type'] == 'ET_REL'
self.edt = edt
self.devices = []
self.ld_consts = self._symbols_find_value(set([device_start_symbol, *Device.required_ld_consts, *DevicePM.required_ld_consts]))
self._device_parse_and_link()
@property
def little_endian(self):
"""
True if the elf file is for a little-endian architecture.
"""
return self.elf.little_endian
@property
def native_struct_format(self):
"""
Get the struct format specifier and byte size of the native machine type.
"""
format = "<" if self.little_endian else ">"
if self.elf.elfclass == 32:
format += "I"
size = 4
else:
format += "Q"
size = 8
return (format, size)
def symbol_data(self, sym):
"""
Retrieve the raw bytes associated with a symbol from the elf file.
"""
# Symbol data parameters
addr = sym.entry.st_value
length = sym.entry.st_size
# Section associated with the symbol
section = self.elf.get_section(sym.entry['st_shndx'])
data = section.data()
# Relocatable data does not appear to be shifted
offset = addr - (0 if self.relocatable else section['sh_addr'])
# Validate data extraction
assert offset + length <= len(data)
# Extract symbol bytes from section
return bytes(data[offset:offset + length])
def _symbols_find_value(self, names):
symbols = {}
for section in self.elf.iter_sections():
if isinstance(section, SymbolTableSection):
for sym in section.iter_symbols():
if sym.name in names:
symbols[sym.name] = sym.entry.st_value
return symbols
def _object_find_named(self, prefix, cb):
for section in self.elf.iter_sections():
if isinstance(section, SymbolTableSection):
for sym in section.iter_symbols():
if sym.entry.st_info.type != 'STT_OBJECT':
continue
if sym.name.startswith(prefix):
cb(sym)
def _link_devices(self, devices):
# Compute the dependency graph induced from the full graph restricted to the
# the nodes that exist in the application. Note that the edges in the
# induced graph correspond to paths in the full graph.
root = self.edt.dep_ord2node[0]
for ord, dev in devices.items():
n = self.edt.dep_ord2node[ord]
deps = set(n.depends_on)
while len(deps) > 0:
dn = deps.pop()
if dn.dep_ordinal in devices:
# this is used
dev.devs_depends_on.add(devices[dn.dep_ordinal])
elif dn != root:
# forward the dependency up one level
for ddn in dn.depends_on:
deps.add(ddn)
sups = set(n.required_by)
while len(sups) > 0:
sn = sups.pop()
if sn.dep_ordinal in devices:
dev.devs_supports.add(devices[sn.dep_ordinal])
else:
# forward the support down one level
for ssn in sn.required_by:
sups.add(ssn)
def _link_injected(self, devices):
for dev in devices.values():
injected = dev.ordinals.ordinals[1]
for inj in injected:
if inj in devices:
dev.devs_depends_on_injected.add(devices[inj])
devices[inj].devs_supports.add(dev)
def _device_parse_and_link(self):
# Find all PM structs
pm_structs = {}
def _on_pm(sym):
pm_structs[sym.entry.st_value] = DevicePM(self, sym)
self._object_find_named('__pm_device_', _on_pm)
# Find all ordinal arrays
ordinal_arrays = {}
def _on_ordinal(sym):
ordinal_arrays[sym.entry.st_value] = DeviceOrdinals(self, sym)
self._object_find_named('__devicedeps_', _on_ordinal)
# Find all device structs
def _on_device(sym):
self.devices.append(Device(self, sym))
self._object_find_named('__device_', _on_device)
# Sort the device array by address (st_value) for handle calculation
self.devices = sorted(self.devices)
# Assign handles to the devices
for idx, dev in enumerate(self.devices):
dev.handle = 1 + idx
# Link devices structs with PM and ordinals
for dev in self.devices:
if dev.obj_pm in pm_structs:
dev.pm = pm_structs[dev.obj_pm]
if dev.obj_ordinals in ordinal_arrays:
dev.ordinals = ordinal_arrays[dev.obj_ordinals]
if dev.ordinal != DeviceOrdinals.DEVICE_HANDLE_NULL:
dev.edt_node = self.edt.dep_ord2node[dev.ordinal]
# Create mapping of ordinals to devices
devices_by_ord = {d.ordinal: d for d in self.devices if d.edt_node}
# Link devices to each other based on the EDT tree
self._link_devices(devices_by_ord)
# Link injected devices to each other
self._link_injected(devices_by_ord)
def device_dependency_graph(self, title, comment):
"""
Construct a graphviz Digraph of the relationships between devices.
"""
import graphviz
dot = graphviz.Digraph(title, comment=comment)
# Split iteration so nodes and edges are grouped in source
for dev in self.devices:
if dev.ordinal == DeviceOrdinals.DEVICE_HANDLE_NULL:
text = '{:s}\\nHandle: {:d}'.format(dev.sym.name, dev.handle)
else:
n = self.edt.dep_ord2node[dev.ordinal]
text = '{:s}\\nOrdinal: {:d} | Handle: {:d}\\n{:s}'.format(
n.name, dev.ordinal, dev.handle, n.path
)
dot.node(str(dev.ordinal), text)
for dev in self.devices:
for sup in sorted(dev.devs_supports):
dot.edge(str(dev.ordinal), str(sup.ordinal))
return dot
``` | /content/code_sandbox/scripts/build/elf_parser.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,253 |
```python
#!/usr/bin/env python3
#
#
"""
Process ELF file to generate placeholders for kobject
hash table and lookup functions produced by gperf,
since their sizes depend on how many kobjects have
been declared. The output header files will be used
during linking for intermediate output binaries so
that the addresses of these kobjects would remain
the same during later stages of linking.
"""
import sys
import argparse
import os
from packaging import version
import elftools
from elftools.elf.elffile import ELFFile
if version.parse(elftools.__version__) < version.parse('0.24'):
sys.exit("pyelftools is out of date, need version 0.24 or later")
def write_define(out_fp, prefix, name, value):
"""Write the #define to output file"""
define_name = f"KOBJECT_{prefix}_{name}"
out_fp.write(f"#ifndef {define_name}\n")
out_fp.write(f"#define {define_name} {value}\n")
out_fp.write("#endif\n\n")
def output_simple_header(one_sect):
"""Write the header for kobject section"""
out_fn = os.path.join(args.outdir,
f"linker-kobject-prebuilt-{one_sect['name']}.h")
out_fp = open(out_fn, "w")
if one_sect['exists']:
align = one_sect['align']
size = one_sect['size']
prefix = one_sect['define_prefix']
write_define(out_fp, prefix, 'ALIGN', align)
write_define(out_fp, prefix, 'SZ', size)
out_fp.close()
def generate_linker_headers(obj):
"""Generate linker header files to be included by the linker script"""
# Sections we are interested in
sections = {
".data": {
"name": "data",
"define_prefix": "DATA",
"exists": False,
"multiplier": int(args.datapct) + 100,
},
".rodata": {
"name": "rodata",
"define_prefix": "RODATA",
"exists": False,
"extra_bytes": args.rodata,
},
".priv_stacks.noinit": {
"name": "priv-stacks",
"define_prefix": "PRIV_STACKS",
"exists": False,
},
}
for one_sect in obj.iter_sections():
# REALLY NEED to match exact type as all other sections
# (symbol, debug, etc.) are descendants where
# isinstance() would match.
if type(one_sect) is not elftools.elf.sections.Section: # pylint: disable=unidiomatic-typecheck
continue
name = one_sect.name
if name in sections:
# Need section alignment and size
sections[name]['align'] = one_sect['sh_addralign']
sections[name]['size'] = one_sect['sh_size']
sections[name]['exists'] = True
if "multiplier" in sections[name]:
sections[name]['size'] *= sections[name]['multiplier'] / 100
sections[name]['size'] = int(sections[name]['size'])
if "extra_bytes" in sections[name]:
sections[name]['size'] += int(sections[name]['extra_bytes'])
for one_sect in sections:
output_simple_header(sections[one_sect])
def parse_args():
"""Parse command line arguments"""
global args
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument("--object", required=True,
help="Points to kobject_prebuilt_hash.c.obj")
parser.add_argument("--outdir", required=True,
help="Output directory (<build_dir>/include/generated)")
parser.add_argument("--datapct", required=True,
help="Multiplier to the size of reserved space for DATA region")
parser.add_argument("--rodata", required=True,
help="Extra bytes to reserve for RODATA region")
parser.add_argument("-v", "--verbose", action="store_true",
help="Verbose messages")
args = parser.parse_args()
if "VERBOSE" in os.environ:
args.verbose = 1
def main():
"""Main program"""
parse_args()
with open(args.object, "rb") as obj_fp:
obj = ELFFile(obj_fp)
generate_linker_headers(obj)
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/gen_kobject_placeholders.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 953 |
```python
#!/usr/bin/env python3
#
"""Injects SLIDs in LLEXT ELFs' symbol tables.
When Kconfig option CONFIG_LLEXT_EXPORT_BUILTINS_BY_SLID is enabled,
all imports from the Zephyr kernel & application are resolved using
SLIDs instead of symbol names. This script stores the SLID of all
imported symbols in their associated entry in the ELF symbol table
to allow the LLEXT subsystem to link it properly at runtime.
Note that this script is idempotent in theory. However, to prevent
any catastrophic problem, the script will abort if the 'st_value'
field of the `ElfX_Sym` structure is found to be non-zero, which is
the case after one invocation. For this reason, in practice, the script
cannot actually be executed twice on the same ELF file.
"""
import argparse
import logging
import shutil
import sys
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import SymbolTableSection
import llext_slidlib
class LLEXTSymtabPreparator():
def __init__(self, elf_path, log):
self.log = log
self.elf_path = elf_path
self.elf_fd = open(elf_path, "rb+")
self.elf = ELFFile(self.elf_fd)
def _find_symtab(self):
supported_symtab_sections = [
".symtab",
".dynsym",
]
symtab = None
for section_name in supported_symtab_sections:
symtab = self.elf.get_section_by_name(section_name)
if not isinstance(symtab, SymbolTableSection):
self.log.debug(f"section {section_name} not found.")
else:
self.log.info(f"processing '{section_name}' symbol table...")
self.log.debug(f"(symbol table is at file offset 0x{symtab['sh_offset']:X})")
break
return symtab
def _find_imports_in_symtab(self, symtab):
i = 0
imports = []
for sym in symtab.iter_symbols():
#Check if symbol is an import
if sym.entry['st_info']['type'] == 'STT_NOTYPE' and \
sym.entry['st_info']['bind'] == 'STB_GLOBAL' and \
sym.entry['st_shndx'] == 'SHN_UNDEF':
self.log.debug(f"found imported symbol '{sym.name}' at index {i}")
imports.append((i, sym))
i += 1
return imports
def _prepare_inner(self):
#1) Locate the symbol table
symtab = self._find_symtab()
if symtab is None:
self.log.error("no symbol table found in file")
return 1
#2) Find imported symbols in symbol table
imports = self._find_imports_in_symtab(symtab)
self.log.info(f"LLEXT has {len(imports)} import(s)")
#3) Write SLIDs in each symbol's 'st_value' field
def make_stvalue_reader_writer():
byteorder = "little" if self.elf.little_endian else "big"
if self.elf.elfclass == 32:
sizeof_Elf_Sym = 0x10 #sizeof(Elf32_Sym)
offsetof_st_value = 0x4 #offsetof(Elf32_Sym, st_value)
sizeof_st_value = 0x4 #sizeof(Elf32_Sym.st_value)
else:
sizeof_Elf_Sym = 0x18
offsetof_st_value = 0x8
sizeof_st_value = 0x8
def seek(symidx):
self.elf_fd.seek(
symtab['sh_offset'] +
symidx * sizeof_Elf_Sym +
offsetof_st_value)
def reader(symbol_index):
seek(symbol_index)
return int.from_bytes(self.elf_fd.read(sizeof_st_value), byteorder)
def writer(symbol_index, st_value):
seek(symbol_index)
self.elf_fd.write(int.to_bytes(st_value, sizeof_st_value, byteorder))
return reader, writer
rd_st_val, wr_st_val = make_stvalue_reader_writer()
slid_size = self.elf.elfclass // 8
for (index, symbol) in imports:
slid = llext_slidlib.generate_slid(symbol.name, slid_size)
slid_as_str = llext_slidlib.format_slid(slid, slid_size)
msg = f"{symbol.name} -> {slid_as_str}"
self.log.info(msg)
# Make sure we're not overwriting something actually important
original_st_value = rd_st_val(index)
if original_st_value != 0:
self.log.error(f"unexpected non-zero st_value for symbol {symbol.name}")
return 1
wr_st_val(index, slid)
return 0
def prepare_llext(self):
res = self._prepare_inner()
self.elf_fd.close()
return res
# Disable duplicate code warning for the code that follows,
# as it is expected for these functions to be similar.
# pylint: disable=duplicate-code
def _parse_args(argv):
"""Parse the command line arguments."""
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
allow_abbrev=False)
parser.add_argument("-f", "--elf-file", required=True,
help="LLEXT ELF file to process")
parser.add_argument("-o", "--output-file",
help=("Additional output file where processed ELF "
"will be copied"))
parser.add_argument("-sl", "--slid-listing",
help="write the SLID listing to a file")
parser.add_argument("-v", "--verbose", action="count",
help=("enable verbose output, can be used multiple times "
"to increase verbosity level"))
parser.add_argument("--always-succeed", action="store_true",
help="always exit with a return code of 0, used for testing")
return parser.parse_args(argv)
def _init_log(verbose):
"""Initialize a logger object."""
log = logging.getLogger(__file__)
console = logging.StreamHandler()
console.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
log.addHandler(console)
if verbose and verbose > 1:
log.setLevel(logging.DEBUG)
elif verbose and verbose > 0:
log.setLevel(logging.INFO)
else:
log.setLevel(logging.WARNING)
return log
def main(argv=None):
args = _parse_args(argv)
log = _init_log(args.verbose)
log.info(f"inject_slids_in_llext: {args.elf_file}")
preparator = LLEXTSymtabPreparator(args.elf_file, log)
res = preparator.prepare_llext()
if args.always_succeed:
return 0
if res == 0 and args.output_file:
shutil.copy(args.elf_file, args.output_file)
return res
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
``` | /content/code_sandbox/scripts/build/llext_inject_slids.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,518 |
```python
#!/usr/bin/env python3
"""Write subfolder list to a file
This script will walk the specified directory and write the file specified with
the list of all sub-directories found. If the output file already exists, the
file will only be updated in case sub-directories have been added or removed
since the previous invocation.
"""
import os
import argparse
def parse_args():
"""Parse command line arguments and options"""
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
allow_abbrev=False)
parser.add_argument('-d', '--directory', required=True,
help='Directory to walk for sub-directory discovery')
parser.add_argument('-c', '--create-links', required=False,
help='Create links for each directory found in \
directory given')
parser.add_argument('-o', '--out-file', required=True,
help='File to write containing a list of all \
directories found')
parser.add_argument('-t', '--trigger-file', required=False,
help='Trigger file to be touched to re-run CMake')
args = parser.parse_args()
return args
def get_subfolder_list(directory, create_links=None):
"""Return subfolder list of a directory"""
dirlist = []
if create_links is not None:
if not os.path.exists(create_links):
os.makedirs(create_links)
symbase = os.path.basename(directory)
symlink = create_links + os.path.sep + symbase
if not os.path.exists(symlink):
os.symlink(directory, symlink)
dirlist.append(symlink)
else:
dirlist.append(directory)
for root, dirs, _ in os.walk(directory, topdown=True):
dirs.sort()
for subdir in dirs:
if create_links is not None:
targetdirectory = os.path.join(root, subdir)
reldir = os.path.relpath(targetdirectory, directory)
linkname = symbase + '_' + reldir.replace(os.path.sep, '_')
symlink = create_links + os.path.sep + linkname
if not os.path.exists(symlink):
os.symlink(targetdirectory, symlink)
dirlist.append(symlink)
else:
dirlist.append(os.path.join(root, subdir))
return dirlist
def gen_out_file(out_file, dirs):
"""Generate file with the list of directories
File won't be updated if it already exists and has the same content
"""
dirs_nl = "\n".join(dirs) + "\n"
if os.path.exists(out_file):
with open(out_file, 'r', encoding="utf-8") as out_file_fo:
out_file_dirs = out_file_fo.read()
if out_file_dirs == dirs_nl:
return
with open(out_file, 'w', encoding="utf-8") as out_file_fo:
out_file_fo.writelines(dirs_nl)
def touch(trigger):
"""Touch the trigger file
If no trigger file is provided then do a return.
"""
if trigger is None:
return
if os.path.exists(trigger):
os.utime(trigger, None)
else:
with open(trigger, 'w') as trigger_fo:
trigger_fo.write("")
def main():
"""Parse command line arguments and take respective actions"""
args = parse_args()
dirs = get_subfolder_list(args.directory, args.create_links)
gen_out_file(args.out_file, dirs)
# Always touch trigger file to ensure json files are updated
touch(args.trigger_file)
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/subfolder_list.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 765 |
```python
#!/usr/bin/env python3
#
#
"""
Script to generate gperf tables of kernel object metadata
User mode threads making system calls reference kernel objects by memory
address, as the kernel/driver APIs in Zephyr are the same for both user
and supervisor contexts. It is necessary for the kernel to be able to
validate accesses to kernel objects to make the following assertions:
- That the memory address points to a kernel object
- The kernel object is of the expected type for the API being invoked
- The kernel object is of the expected initialization state
- The calling thread has sufficient permissions on the object
For more details see the :ref:`kernelobjects` section in the documentation.
The zephyr build generates an intermediate ELF binary, zephyr_prebuilt.elf,
which this script scans looking for kernel objects by examining the DWARF
debug information to look for instances of data structures that are considered
kernel objects. For device drivers, the API struct pointer populated at build
time is also examined to disambiguate between various device driver instances
since they are all 'struct device'.
This script can generate five different output files:
- A gperf script to generate the hash table mapping kernel object memory
addresses to kernel object metadata, used to track permissions,
object type, initialization state, and any object-specific data.
- A header file containing generated macros for validating driver instances
inside the system call handlers for the driver subsystem APIs.
- A code fragment included by kernel.h with one enum constant for
each kernel object type and each driver instance.
- The inner cases of a switch/case C statement, included by
kernel/userspace.c, mapping the kernel object types and driver
instances to their human-readable representation in the
otype_to_str() function.
- The inner cases of a switch/case C statement, included by
kernel/userspace.c, mapping kernel object types to their sizes.
This is used for allocating instances of them at runtime
(CONFIG_DYNAMIC_OBJECTS) in the obj_size_get() function.
"""
import sys
import argparse
import math
import os
import struct
import json
from packaging import version
import elftools
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import SymbolTableSection
if version.parse(elftools.__version__) < version.parse('0.24'):
sys.exit("pyelftools is out of date, need version 0.24 or later")
from collections import OrderedDict
# Keys in this dictionary are structs which should be recognized as kernel
# objects. Values are a tuple:
#
# - The first item is None, or the name of a Kconfig that
# indicates the presence of this object's definition in case it is not
# available in all configurations.
#
# - The second item is a boolean indicating whether it is permissible for
# the object to be located in user-accessible memory.
#
# - The third items is a boolean indicating whether this item can be
# dynamically allocated with k_object_alloc(). Keep this in sync with
# the switch statement in z_impl_k_object_alloc().
#
# Key names in all caps do not correspond to a specific data type but instead
# indicate that objects of its type are of a family of compatible data
# structures
# Regular dictionaries are ordered only with Python 3.6 and
# above. Good summary and pointers to official documents at:
# path_to_url
kobjects = OrderedDict([
("k_mem_slab", (None, False, True)),
("k_msgq", (None, False, True)),
("k_mutex", (None, False, True)),
("k_pipe", (None, False, True)),
("k_queue", (None, False, True)),
("k_poll_signal", (None, False, True)),
("k_sem", (None, False, True)),
("k_stack", (None, False, True)),
("k_thread", (None, False, True)), # But see #
("k_timer", (None, False, True)),
("z_thread_stack_element", (None, False, False)),
("device", (None, False, False)),
("NET_SOCKET", (None, False, False)),
("net_if", (None, False, False)),
("sys_mutex", (None, True, False)),
("k_futex", (None, True, False)),
("k_condvar", (None, False, True)),
("k_event", ("CONFIG_EVENTS", False, True)),
("ztest_suite_node", ("CONFIG_ZTEST", True, False)),
("ztest_suite_stats", ("CONFIG_ZTEST", True, False)),
("ztest_unit_test", ("CONFIG_ZTEST", True, False)),
("ztest_test_rule", ("CONFIG_ZTEST", True, False)),
("rtio", ("CONFIG_RTIO", False, False)),
("rtio_iodev", ("CONFIG_RTIO", False, False)),
("sensor_decoder_api", ("CONFIG_SENSOR_ASYNC_API", True, False))
])
def kobject_to_enum(kobj):
if kobj.startswith("k_") or kobj.startswith("z_"):
name = kobj[2:]
else:
name = kobj
return "K_OBJ_%s" % name.upper()
subsystems = [
# Editing the list is deprecated, add the __subsystem sentinel to your driver
# api declaration instead. e.x.
#
# __subsystem struct my_driver_api {
# ....
#};
]
# Names of all structs tagged with __net_socket, found by parse_syscalls.py
net_sockets = [ ]
def subsystem_to_enum(subsys):
if not subsys.endswith("_driver_api"):
raise Exception("__subsystem is missing _driver_api suffix: (%s)" % subsys)
return "K_OBJ_DRIVER_" + subsys[:-11].upper()
# --- debug stuff ---
scr = os.path.basename(sys.argv[0])
def debug(text):
if not args.verbose:
return
sys.stdout.write(scr + ": " + text + "\n")
def error(text):
sys.exit("%s ERROR: %s" % (scr, text))
def debug_die(die, text):
lp_header = die.dwarfinfo.line_program_for_CU(die.cu).header
files = lp_header["file_entry"]
includes = lp_header["include_directory"]
fileinfo = files[die.attributes["DW_AT_decl_file"].value - 1]
filename = fileinfo.name.decode("utf-8")
filedir = includes[fileinfo.dir_index - 1].decode("utf-8")
path = os.path.join(filedir, filename)
lineno = die.attributes["DW_AT_decl_line"].value
debug(str(die))
debug("File '%s', line %d:" % (path, lineno))
debug(" %s" % text)
# -- ELF processing
DW_OP_addr = 0x3
DW_OP_plus_uconst = 0x23
DW_OP_fbreg = 0x91
STACK_TYPE = "z_thread_stack_element"
thread_counter = 0
sys_mutex_counter = 0
futex_counter = 0
stack_counter = 0
# Global type environment. Populated by pass 1.
type_env = {}
extern_env = {}
class KobjectInstance:
def __init__(self, type_obj, addr):
self.addr = addr
self.type_obj = type_obj
# Type name determined later since drivers needs to look at the
# API struct address
self.type_name = None
self.data = 0
class KobjectType:
def __init__(self, offset, name, size, api=False):
self.name = name
self.size = size
self.offset = offset
self.api = api
def __repr__(self):
return "<kobject %s>" % self.name
@staticmethod
def has_kobject():
return True
def get_kobjects(self, addr):
return {addr: KobjectInstance(self, addr)}
class ArrayType:
def __init__(self, offset, elements, member_type):
self.elements = elements
self.member_type = member_type
self.offset = offset
def __repr__(self):
return "<array of %d>" % self.member_type
def has_kobject(self):
if self.member_type not in type_env:
return False
return type_env[self.member_type].has_kobject()
def get_kobjects(self, addr):
mt = type_env[self.member_type]
# Stacks are arrays of _k_stack_element_t but we want to treat
# the whole array as one kernel object (a thread stack)
# Data value gets set to size of entire region
if isinstance(mt, KobjectType) and mt.name == STACK_TYPE:
# An array of stacks appears as a multi-dimensional array.
# The last size is the size of each stack. We need to track
# each stack within the array, not as one huge stack object.
*dimensions, stacksize = self.elements
num_members = 1
for e in dimensions:
num_members = num_members * e
ret = {}
for i in range(num_members):
a = addr + (i * stacksize)
o = mt.get_kobjects(a)
o[a].data = stacksize
ret.update(o)
return ret
objs = {}
# Multidimensional array flattened out
num_members = 1
for e in self.elements:
num_members = num_members * e
for i in range(num_members):
objs.update(mt.get_kobjects(addr + (i * mt.size)))
return objs
class AggregateTypeMember:
def __init__(self, offset, member_name, member_type, member_offset):
self.member_name = member_name
self.member_type = member_type
if isinstance(member_offset, list):
# DWARF v2, location encoded as set of operations
# only "DW_OP_plus_uconst" with ULEB128 argument supported
if member_offset[0] == 0x23:
self.member_offset = member_offset[1] & 0x7f
for i in range(1, len(member_offset)-1):
if member_offset[i] & 0x80:
self.member_offset += (
member_offset[i+1] & 0x7f) << i*7
else:
raise Exception("not yet supported location operation (%s:%d:%d)" %
(self.member_name, self.member_type, member_offset[0]))
else:
self.member_offset = member_offset
def __repr__(self):
return "<member %s, type %d, offset %d>" % (
self.member_name, self.member_type, self.member_offset)
def has_kobject(self):
if self.member_type not in type_env:
return False
return type_env[self.member_type].has_kobject()
def get_kobjects(self, addr):
mt = type_env[self.member_type]
return mt.get_kobjects(addr + self.member_offset)
class ConstType:
def __init__(self, child_type):
self.child_type = child_type
def __repr__(self):
return "<const %d>" % self.child_type
def has_kobject(self):
if self.child_type not in type_env:
return False
return type_env[self.child_type].has_kobject()
def get_kobjects(self, addr):
return type_env[self.child_type].get_kobjects(addr)
class AggregateType:
def __init__(self, offset, name, size):
self.name = name
self.size = size
self.offset = offset
self.members = []
def add_member(self, member):
self.members.append(member)
def __repr__(self):
return "<struct %s, with %s>" % (self.name, self.members)
def has_kobject(self):
result = False
bad_members = []
for member in self.members:
if member.has_kobject():
result = True
else:
bad_members.append(member)
# Don't need to consider this again, just remove it
for bad_member in bad_members:
self.members.remove(bad_member)
return result
def get_kobjects(self, addr):
objs = {}
for member in self.members:
objs.update(member.get_kobjects(addr))
return objs
# --- helper functions for getting data from DIEs ---
def die_get_spec(die):
if 'DW_AT_specification' not in die.attributes:
return None
spec_val = die.attributes["DW_AT_specification"].value
# offset of the DW_TAG_variable for the extern declaration
offset = spec_val + die.cu.cu_offset
return extern_env.get(offset)
def die_get_name(die):
if 'DW_AT_name' not in die.attributes:
die = die_get_spec(die)
if not die:
return None
return die.attributes["DW_AT_name"].value.decode("utf-8")
def die_get_type_offset(die):
if 'DW_AT_type' not in die.attributes:
die = die_get_spec(die)
if not die:
return None
return die.attributes["DW_AT_type"].value + die.cu.cu_offset
def die_get_byte_size(die):
if 'DW_AT_byte_size' not in die.attributes:
return 0
return die.attributes["DW_AT_byte_size"].value
def analyze_die_struct(die):
name = die_get_name(die) or "<anon>"
offset = die.offset
size = die_get_byte_size(die)
# Incomplete type
if not size:
return
if name in kobjects:
type_env[offset] = KobjectType(offset, name, size)
elif name in subsystems:
type_env[offset] = KobjectType(offset, name, size, api=True)
elif name in net_sockets:
type_env[offset] = KobjectType(offset, "NET_SOCKET", size)
else:
at = AggregateType(offset, name, size)
type_env[offset] = at
for child in die.iter_children():
if child.tag != "DW_TAG_member":
continue
data_member_location = child.attributes.get("DW_AT_data_member_location")
if not data_member_location:
continue
child_type = die_get_type_offset(child)
member_offset = data_member_location.value
cname = die_get_name(child) or "<anon>"
m = AggregateTypeMember(child.offset, cname, child_type,
member_offset)
at.add_member(m)
return
def analyze_die_const(die):
type_offset = die_get_type_offset(die)
if not type_offset:
return
type_env[die.offset] = ConstType(type_offset)
def analyze_die_array(die):
type_offset = die_get_type_offset(die)
elements = []
for child in die.iter_children():
if child.tag != "DW_TAG_subrange_type":
continue
if "DW_AT_upper_bound" in child.attributes:
ub = child.attributes["DW_AT_upper_bound"]
if not ub.form.startswith("DW_FORM_data"):
continue
elements.append(ub.value + 1)
# in DWARF 4, e.g. ARC Metaware toolchain, DW_AT_count is used
# not DW_AT_upper_bound
elif "DW_AT_count" in child.attributes:
ub = child.attributes["DW_AT_count"]
if not ub.form.startswith("DW_FORM_data"):
continue
elements.append(ub.value)
else:
continue
if not elements:
if type_offset in type_env:
mt = type_env[type_offset]
if mt.has_kobject():
if isinstance(mt, KobjectType) and mt.name == STACK_TYPE:
elements.append(1)
type_env[die.offset] = ArrayType(die.offset, elements, type_offset)
else:
type_env[die.offset] = ArrayType(die.offset, elements, type_offset)
def analyze_typedef(die):
type_offset = die_get_type_offset(die)
if type_offset not in type_env:
return
type_env[die.offset] = type_env[type_offset]
def unpack_pointer(elf, data, offset):
endian_code = "<" if elf.little_endian else ">"
if elf.elfclass == 32:
size_code = "I"
size = 4
else:
size_code = "Q"
size = 8
return struct.unpack(endian_code + size_code,
data[offset:offset + size])[0]
def addr_deref(elf, addr):
for section in elf.iter_sections():
start = section['sh_addr']
end = start + section['sh_size']
if start <= addr < end:
data = section.data()
offset = addr - start
return unpack_pointer(elf, data, offset)
return 0
def device_get_api_addr(elf, addr):
# See include/device.h for a description of struct device
offset = 8 if elf.elfclass == 32 else 16
return addr_deref(elf, addr + offset)
def find_kobjects(elf, syms):
global thread_counter
global sys_mutex_counter
global futex_counter
global stack_counter
if not elf.has_dwarf_info():
sys.exit("ELF file has no DWARF information")
app_smem_start = syms["_app_smem_start"]
app_smem_end = syms["_app_smem_end"]
if "CONFIG_LINKER_USE_PINNED_SECTION" in syms and "_app_smem_pinned_start" in syms:
app_smem_pinned_start = syms["_app_smem_pinned_start"]
app_smem_pinned_end = syms["_app_smem_pinned_end"]
else:
app_smem_pinned_start = app_smem_start
app_smem_pinned_end = app_smem_end
user_stack_start = syms["z_user_stacks_start"]
user_stack_end = syms["z_user_stacks_end"]
di = elf.get_dwarf_info()
variables = []
# Step 1: collect all type information.
for CU in di.iter_CUs():
for die in CU.iter_DIEs():
# Unions are disregarded, kernel objects should never be union
# members since the memory is not dedicated to that object and
# could be something else
if die.tag == "DW_TAG_structure_type":
analyze_die_struct(die)
elif die.tag == "DW_TAG_const_type":
analyze_die_const(die)
elif die.tag == "DW_TAG_array_type":
analyze_die_array(die)
elif die.tag == "DW_TAG_typedef":
analyze_typedef(die)
elif die.tag == "DW_TAG_variable":
variables.append(die)
# Step 2: filter type_env to only contain kernel objects, or structs
# and arrays of kernel objects
bad_offsets = []
for offset, type_object in type_env.items():
if not type_object.has_kobject():
bad_offsets.append(offset)
for offset in bad_offsets:
del type_env[offset]
# Step 3: Now that we know all the types we are looking for, examine
# all variables
all_objs = {}
for die in variables:
name = die_get_name(die)
if not name:
continue
if name.startswith("__init_sys_init"):
# Boot-time initialization function; not an actual device
continue
type_offset = die_get_type_offset(die)
# Is this a kernel object, or a structure containing kernel
# objects?
if type_offset not in type_env:
continue
if "DW_AT_declaration" in die.attributes:
# Extern declaration, only used indirectly
extern_env[die.offset] = die
continue
if "DW_AT_location" not in die.attributes:
debug_die(die,
"No location information for object '%s'; possibly stack allocated"
% name)
continue
loc = die.attributes["DW_AT_location"]
if loc.form not in ("DW_FORM_exprloc", "DW_FORM_block1"):
debug_die(die, "kernel object '%s' unexpected location format" %
name)
continue
opcode = loc.value[0]
if opcode != DW_OP_addr:
# Check if frame pointer offset DW_OP_fbreg
if opcode == DW_OP_fbreg:
debug_die(die, "kernel object '%s' found on stack" % name)
else:
debug_die(die,
"kernel object '%s' unexpected exprloc opcode %s" %
(name, hex(opcode)))
continue
if "CONFIG_64BIT" in syms:
addr = ((loc.value[1] << 0 ) | (loc.value[2] << 8) |
(loc.value[3] << 16) | (loc.value[4] << 24) |
(loc.value[5] << 32) | (loc.value[6] << 40) |
(loc.value[7] << 48) | (loc.value[8] << 56))
else:
addr = ((loc.value[1] << 0 ) | (loc.value[2] << 8) |
(loc.value[3] << 16) | (loc.value[4] << 24))
# Handle a DW_FORM_exprloc that contains a DW_OP_addr, followed immediately by
# a DW_OP_plus_uconst.
if len(loc.value) >= 7 and loc.value[5] == DW_OP_plus_uconst:
addr += (loc.value[6])
if addr == 0:
# Never linked; gc-sections deleted it
continue
type_obj = type_env[type_offset]
objs = type_obj.get_kobjects(addr)
all_objs.update(objs)
debug("symbol '%s' at %s contains %d object(s)"
% (name, hex(addr), len(objs)))
# Step 4: objs is a dictionary mapping variable memory addresses to
# their associated type objects. Now that we have seen all variables
# and can properly look up API structs, convert this into a dictionary
# mapping variables to the C enumeration of what kernel object type it
# is.
ret = {}
for addr, ko in all_objs.items():
# API structs don't get into the gperf table
if ko.type_obj.api:
continue
_, user_ram_allowed, _ = kobjects[ko.type_obj.name]
if (not user_ram_allowed and
((app_smem_start <= addr < app_smem_end)
or (app_smem_pinned_start <= addr < app_smem_pinned_end))):
debug("object '%s' found in invalid location %s"
% (ko.type_obj.name, hex(addr)))
continue
if (ko.type_obj.name == STACK_TYPE and
(addr < user_stack_start or addr >= user_stack_end)):
debug("skip kernel-only stack at %s" % hex(addr))
continue
# At this point we know the object will be included in the gperf table
if ko.type_obj.name == "k_thread":
# Assign an ID for this thread object, used to track its
# permissions to other kernel objects
ko.data = thread_counter
thread_counter = thread_counter + 1
elif ko.type_obj.name == "sys_mutex":
ko.data = "&kernel_mutexes[%d]" % sys_mutex_counter
sys_mutex_counter += 1
elif ko.type_obj.name == "k_futex":
ko.data = "&futex_data[%d]" % futex_counter
futex_counter += 1
elif ko.type_obj.name == STACK_TYPE:
stack_counter += 1
if ko.type_obj.name != "device":
# Not a device struct so we immediately know its type
ko.type_name = kobject_to_enum(ko.type_obj.name)
ret[addr] = ko
continue
# Device struct. Need to get the address of its API struct,
# if it has one.
apiaddr = device_get_api_addr(elf, addr)
if apiaddr not in all_objs:
if apiaddr == 0:
debug("device instance at 0x%x has no associated subsystem"
% addr)
else:
debug("device instance at 0x%x has unknown API 0x%x"
% (addr, apiaddr))
# API struct does not correspond to a known subsystem, skip it
continue
apiobj = all_objs[apiaddr]
ko.type_name = subsystem_to_enum(apiobj.type_obj.name)
ret[addr] = ko
debug("found %d kernel object instances total" % len(ret))
# 1. Before python 3.7 dict order is not guaranteed. With Python
# 3.5 it doesn't seem random with *integer* keys but can't
# rely on that.
# 2. OrderedDict means _insertion_ order, so not enough because
# built from other (random!) dicts: need to _sort_ first.
# 3. Sorting memory address looks good.
return OrderedDict(sorted(ret.items()))
def get_symbols(elf):
for section in elf.iter_sections():
if isinstance(section, SymbolTableSection):
return {sym.name: sym.entry.st_value
for sym in section.iter_symbols()}
raise LookupError("Could not find symbol table")
# -- GPERF generation logic
header = """%compare-lengths
%define lookup-function-name z_object_lookup
%language=ANSI-C
%global-table
%struct-type
%{
#include <zephyr/kernel.h>
#include <zephyr/toolchain.h>
#include <zephyr/internal/syscall_handler.h>
#include <string.h>
%}
struct k_object;
"""
# Different versions of gperf have different prototypes for the lookup
# function, best to implement the wrapper here. The pointer value itself is
# turned into a string, we told gperf to expect binary strings that are not
# NULL-terminated.
footer = """%%
struct k_object *z_object_gperf_find(const void *obj)
{
return z_object_lookup((const char *)obj, sizeof(void *));
}
void z_object_gperf_wordlist_foreach(_wordlist_cb_func_t func, void *context)
{
int i;
for (i = MIN_HASH_VALUE; i <= MAX_HASH_VALUE; i++) {
if (wordlist[i].name != NULL) {
func(&wordlist[i], context);
}
}
}
#ifndef CONFIG_DYNAMIC_OBJECTS
struct k_object *k_object_find(const void *obj)
ALIAS_OF(z_object_gperf_find);
void k_object_wordlist_foreach(_wordlist_cb_func_t func, void *context)
ALIAS_OF(z_object_gperf_wordlist_foreach);
#endif
"""
def write_gperf_table(fp, syms, objs, little_endian, static_begin, static_end):
fp.write(header)
if sys_mutex_counter != 0:
fp.write("static struct k_mutex kernel_mutexes[%d] = {\n"
% sys_mutex_counter)
for i in range(sys_mutex_counter):
fp.write("Z_MUTEX_INITIALIZER(kernel_mutexes[%d])" % i)
if i != sys_mutex_counter - 1:
fp.write(", ")
fp.write("};\n")
if futex_counter != 0:
fp.write("static struct z_futex_data futex_data[%d] = {\n"
% futex_counter)
for i in range(futex_counter):
fp.write("Z_FUTEX_DATA_INITIALIZER(futex_data[%d])" % i)
if i != futex_counter - 1:
fp.write(", ")
fp.write("};\n")
metadata_names = {
"K_OBJ_THREAD" : "thread_id",
"K_OBJ_SYS_MUTEX" : "mutex",
"K_OBJ_FUTEX" : "futex_data"
}
if "CONFIG_GEN_PRIV_STACKS" in syms:
metadata_names["K_OBJ_THREAD_STACK_ELEMENT"] = "stack_data"
if stack_counter != 0:
# Same as K_KERNEL_STACK_ARRAY_DEFINE, but routed to a different
# memory section.
fp.write("static uint8_t Z_GENERIC_SECTION(.priv_stacks.noinit) "
" __aligned(Z_KERNEL_STACK_OBJ_ALIGN)"
" priv_stacks[%d][K_KERNEL_STACK_LEN(CONFIG_PRIVILEGED_STACK_SIZE)];\n"
% stack_counter)
fp.write("static const struct z_stack_data stack_data[%d] = {\n"
% stack_counter)
counter = 0
for _, ko in objs.items():
if ko.type_name != "K_OBJ_THREAD_STACK_ELEMENT":
continue
# ko.data currently has the stack size. fetch the value to
# populate the appropriate entry in stack_data, and put
# a reference to the entry in stack_data into the data value
# instead
size = ko.data
ko.data = "&stack_data[%d]" % counter
fp.write("\t{ %d, (uint8_t *)(&priv_stacks[%d]) }"
% (size, counter))
if counter != (stack_counter - 1):
fp.write(",")
fp.write("\n")
counter += 1
fp.write("};\n")
else:
metadata_names["K_OBJ_THREAD_STACK_ELEMENT"] = "stack_size"
fp.write("%%\n")
# Setup variables for mapping thread indexes
thread_max_bytes = syms["CONFIG_MAX_THREAD_BYTES"]
thread_idx_map = {}
for i in range(0, thread_max_bytes):
thread_idx_map[i] = 0xFF
for obj_addr, ko in objs.items():
obj_type = ko.type_name
# pre-initialized objects fall within this memory range, they are
# either completely initialized at build time, or done automatically
# at boot during some PRE_KERNEL_* phase
initialized = static_begin <= obj_addr < static_end
is_driver = obj_type.startswith("K_OBJ_DRIVER_")
if "CONFIG_64BIT" in syms:
format_code = "Q"
else:
format_code = "I"
if little_endian:
endian = "<"
else:
endian = ">"
byte_str = struct.pack(endian + format_code, obj_addr)
fp.write("\"")
for byte in byte_str:
val = "\\x%02x" % byte
fp.write(val)
flags = "0"
if initialized:
flags += " | K_OBJ_FLAG_INITIALIZED"
if is_driver:
flags += " | K_OBJ_FLAG_DRIVER"
if ko.type_name in metadata_names:
tname = metadata_names[ko.type_name]
else:
tname = "unused"
fp.write("\", {0}, %s, %s, { .%s = %s }\n" % (obj_type, flags,
tname, str(ko.data)))
if obj_type == "K_OBJ_THREAD":
idx = math.floor(ko.data / 8)
bit = ko.data % 8
thread_idx_map[idx] = thread_idx_map[idx] & ~(2**bit)
fp.write(footer)
# Generate the array of already mapped thread indexes
fp.write('\n')
fp.write('Z_GENERIC_DOT_SECTION(data)\n')
fp.write('uint8_t _thread_idx_map[%d] = {' % (thread_max_bytes))
for i in range(0, thread_max_bytes):
fp.write(' 0x%x, ' % (thread_idx_map[i]))
fp.write('};\n')
driver_macro_tpl = """
#define K_SYSCALL_DRIVER_%(driver_upper)s(ptr, op) K_SYSCALL_DRIVER_GEN(ptr, op, %(driver_lower)s, %(driver_upper)s)
"""
def write_validation_output(fp):
fp.write("#ifndef DRIVER_VALIDATION_GEN_H\n")
fp.write("#define DRIVER_VALIDATION_GEN_H\n")
fp.write("""#define K_SYSCALL_DRIVER_GEN(ptr, op, driver_lower_case, driver_upper_case) \\
(K_SYSCALL_OBJ(ptr, K_OBJ_DRIVER_##driver_upper_case) || \\
K_SYSCALL_DRIVER_OP(ptr, driver_lower_case##_driver_api, op))
""")
for subsystem in subsystems:
subsystem = subsystem.replace("_driver_api", "")
fp.write(driver_macro_tpl % {
"driver_lower": subsystem.lower(),
"driver_upper": subsystem.upper(),
})
fp.write("#endif /* DRIVER_VALIDATION_GEN_H */\n")
def write_kobj_types_output(fp):
fp.write("/* Core kernel objects */\n")
for kobj, obj_info in kobjects.items():
dep, _, _ = obj_info
if kobj == "device":
continue
if dep:
fp.write("#ifdef %s\n" % dep)
fp.write("%s,\n" % kobject_to_enum(kobj))
if dep:
fp.write("#endif\n")
fp.write("/* Driver subsystems */\n")
for subsystem in subsystems:
subsystem = subsystem.replace("_driver_api", "").upper()
fp.write("K_OBJ_DRIVER_%s,\n" % subsystem)
def write_kobj_otype_output(fp):
fp.write("/* Core kernel objects */\n")
for kobj, obj_info in kobjects.items():
dep, _, _ = obj_info
if kobj == "device":
continue
if dep:
fp.write("#ifdef %s\n" % dep)
fp.write('case %s: ret = "%s"; break;\n' %
(kobject_to_enum(kobj), kobj))
if dep:
fp.write("#endif\n")
fp.write("/* Driver subsystems */\n")
for subsystem in subsystems:
subsystem = subsystem.replace("_driver_api", "")
fp.write('case K_OBJ_DRIVER_%s: ret = "%s driver"; break;\n' % (
subsystem.upper(),
subsystem
))
def write_kobj_size_output(fp):
fp.write("/* Non device/stack objects */\n")
for kobj, obj_info in kobjects.items():
dep, _, alloc = obj_info
if not alloc:
continue
if dep:
fp.write("#ifdef %s\n" % dep)
fp.write('case %s: ret = sizeof(struct %s); break;\n' %
(kobject_to_enum(kobj), kobj))
if dep:
fp.write("#endif\n")
def parse_subsystems_list_file(path):
with open(path, "r") as fp:
subsys_list = json.load(fp)
subsystems.extend(subsys_list["__subsystem"])
net_sockets.extend(subsys_list["__net_socket"])
def parse_args():
global args
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument("-k", "--kernel", required=False,
help="Input zephyr ELF binary")
parser.add_argument(
"-g", "--gperf-output", required=False,
help="Output list of kernel object addresses for gperf use")
parser.add_argument(
"-V", "--validation-output", required=False,
help="Output driver validation macros")
parser.add_argument(
"-K", "--kobj-types-output", required=False,
help="Output k_object enum constants")
parser.add_argument(
"-S", "--kobj-otype-output", required=False,
help="Output case statements for otype_to_str()")
parser.add_argument(
"-Z", "--kobj-size-output", required=False,
help="Output case statements for obj_size_get()")
parser.add_argument("-i", "--include-subsystem-list", required=False, action='append',
help='''Specifies a file with a JSON encoded list of subsystem names to append to
the driver subsystems list. Can be specified multiple times:
-i file1 -i file2 ...''')
parser.add_argument("-v", "--verbose", action="store_true",
help="Print extra debugging information")
args = parser.parse_args()
if "VERBOSE" in os.environ:
args.verbose = 1
def main():
parse_args()
if args.include_subsystem_list is not None:
for list_file in args.include_subsystem_list:
parse_subsystems_list_file(list_file)
if args.gperf_output:
assert args.kernel, "--kernel ELF required for --gperf-output"
elf = ELFFile(open(args.kernel, "rb"))
syms = get_symbols(elf)
max_threads = syms["CONFIG_MAX_THREAD_BYTES"] * 8
objs = find_kobjects(elf, syms)
if not objs:
sys.stderr.write("WARNING: zero kobject found in %s\n"
% args.kernel)
if thread_counter > max_threads:
sys.exit("Too many thread objects ({})\n"
"Increase CONFIG_MAX_THREAD_BYTES to {}"
.format(thread_counter, -(-thread_counter // 8)))
with open(args.gperf_output, "w") as fp:
write_gperf_table(fp, syms, objs, elf.little_endian,
syms["_static_kernel_objects_begin"],
syms["_static_kernel_objects_end"])
if args.validation_output:
with open(args.validation_output, "w") as fp:
write_validation_output(fp)
if args.kobj_types_output:
with open(args.kobj_types_output, "w") as fp:
write_kobj_types_output(fp)
if args.kobj_otype_output:
with open(args.kobj_otype_output, "w") as fp:
write_kobj_otype_output(fp)
if args.kobj_size_output:
with open(args.kobj_size_output, "w") as fp:
write_kobj_size_output(fp)
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/gen_kobject_list.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 8,218 |
```python
#!/usr/bin/env python3
#
#
'''
Script to generate image information files.
This script creates a image information header which can be included by a
second build system.
This allows a second stage build system to use image information from a Zephyr
build by including the generated header.
Information included in the image information header:
- Number of segments in the image
- LMA address of each segment
- VMA address of each segment
- LMA adjusted of each segment if the LMA addresses has been adjusted after linking
- Size of each segment
'''
import argparse
import re
from elftools.elf.elffile import ELFFile
def write_header(filename, segments, adjusted_lma):
content = []
filename_we = re.sub(r'[\W]','_', filename).upper()
content.append(f'#ifndef {filename_we}_H')
content.append(f'#define {filename_we}_H')
content.append(f'')
content.append(f'#define SEGMENT_NUM {len(segments)}')
content.append(f'#define ADJUSTED_LMA {adjusted_lma}')
for idx, segment in enumerate(segments):
segment_header = segment['segment'].header
hex_lma_addr = hex(segment_header.p_paddr)
hex_vma_addr = hex(segment_header.p_vaddr)
hex_size = hex(segment_header.p_filesz)
content.append(f'')
content.append(f'#define SEGMENT_LMA_ADDRESS_{idx} {hex_lma_addr}')
content.append(f'#define SEGMENT_VMA_ADDRESS_{idx} {hex_vma_addr}')
content.append(f'#define SEGMENT_SIZE_{idx} {hex_size}')
content.append(f'')
content.append(f'#endif /* {filename_we}_H */')
with open(filename, 'w') as out_file:
out_file.write('\n'.join(content))
def read_segments(filename):
elffile = ELFFile(open(filename, 'rb'))
segments = list()
for segment_idx in range(elffile.num_segments()):
segments.insert(segment_idx, dict())
segments[segment_idx]['segment'] = elffile.get_segment(segment_idx)
return segments
def main():
parser = argparse.ArgumentParser(description='''
Process ELF file and extract image information.
Create header file with extracted image information which can be included
in other build systems.''', allow_abbrev=False)
parser.add_argument('--header-file', required=True,
help="""Header file to write with image data.""")
parser.add_argument('--elf-file', required=True,
help="""ELF File to process.""")
parser.add_argument('--adjusted-lma', required=False, default=0,
help="""Adjusted LMA address value.""")
args = parser.parse_args()
segments = read_segments(args.elf_file)
write_header(args.header_file, segments, args.adjusted_lma)
if __name__ == "__main__":
main()
``` | /content/code_sandbox/scripts/build/gen_image_info.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 609 |
```unknown
// Check violations for rule 14.4
// path_to_url
//
// Confidence: Moderate
//
virtual report
@initialize:python@
@@
@rule1_base@
identifier function, v;
type T1, T2;
parameter list[n] P1;
parameter list[n1] P2;
@@
(
T1 function(P1, T2 v, P2) {...}
|
T1 function(P1, T2 *v, P2) {...}
)
@ script:python @
t << rule1_base.T2;
v << rule1_base.v;
@@
if t == "bool":
cocci.include_match(False)
@rule1@
identifier rule1_base.v;
position p;
@@
(
while (v@p) {...}
|
if (v@p) {...}
)
@ script:python @
p << rule1.p;
@@
msg = "WARNING: Violation to rule 14.4 (Controlling expression shall have essentially Boolean type)"
coccilib.report.print_report(p[0], msg)
@rule2_base@
identifier v;
type T;
@@
T v;
...
@ script:python @
t << rule2_base.T;
v << rule2_base.v;
@@
if t == "bool":
cocci.include_match(False)
@rule2@
position p;
identifier rule2_base.v;
@@
while (v@p) {...}
@ script:python @
p << rule2.p;
@@
msg = "WARNING: Violation to rule 14.4 (Controlling expression shall have essentially Boolean type)"
coccilib.report.print_report(p[0], msg)
@rule3@
position p;
constant c;
@@
(
while (c@p) {...}
|
while (!c@p) {...}
|
if (c@p) {...}
|
if (!c@p) {...}
)
@ script:python @
p << rule3.p;
@@
msg = "WARNING: Violation to rule 14.4 (Controlling expression shall have essentially Boolean type)"
coccilib.report.print_report(p[0], msg)
``` | /content/code_sandbox/scripts/coccinelle/boolean.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 442 |
```python
#!/usr/bin/env python3
#
#
import argparse
import os
import re
def front_matter(sys_nerr):
return f'''
/*
* This file generated by {__file__}
*/
#include <errno.h>
#include <stdint.h>
#include <string.h>
#include <zephyr/sys/util.h>
#define sys_nerr {sys_nerr}'''
def gen_strerror_table(input, output):
with open(input, 'r') as inf:
highest_errno = 0
symbols = []
msgs = {}
for line in inf.readlines():
# Select items of the form below (note: ERRNO is numeric)
# #define SYMBOL ERRNO /**< MSG */
pat = r'^#define[\s]+(E[A-Z_]*)[\s]+([1-9][0-9]*)[\s]+/\*\*<[\s]+(.*)[\s]+\*/[\s]*$'
match = re.match(pat, line)
if not match:
continue
symbol = match[1]
errno = int(match[2])
msg = match[3]
symbols.append(symbol)
msgs[symbol] = msg
highest_errno = max(int(errno), highest_errno)
try:
os.makedirs(os.path.dirname(output))
except BaseException:
# directory already present
pass
with open(output, 'w') as outf:
print(front_matter(highest_errno + 1), file=outf)
# Generate string table
print(
f'static const char *const sys_errlist[sys_nerr] = {{', file=outf)
print('[0] = "Success",', file=outf)
for symbol in symbols:
print(f'[{symbol}] = "{msgs[symbol]}",', file=outf)
print('};', file=outf)
# Generate string lengths (includes trailing '\0')
print(
f'static const uint8_t sys_errlen[sys_nerr] = {{', file=outf)
print('[0] = 8,', file=outf)
for symbol in symbols:
print(f'[{symbol}] = {len(msgs[symbol]) + 1},', file=outf)
print('};', file=outf)
def parse_args():
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument(
'-i',
'--input',
dest='input',
required=True,
help='input file (e.g. lib/libc/minimal/include/errno.h)')
parser.add_argument(
'-o',
'--output',
dest='output',
required=True,
help='output file (e.g. build/zephyr/misc/generated/libc/minimal/strerror_table.h)')
args = parser.parse_args()
return args
def main():
args = parse_args()
gen_strerror_table(args.input, args.output)
if __name__ == '__main__':
main()
``` | /content/code_sandbox/scripts/build/gen_strerror_table.py | python | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 625 |
```objective-c
/*
*
*/
/* File to be specified by -macro-file in invocations of coccinelle
* to avoid parse errors that prevent application of rules.
*
* This is not exhaustive: only defines that have been proven to
* inhibit context recognition are listed. The structure of the file
* is expected to follow that of the Coccinelle standard.h macro file.
*/
/* Zephyr macros */
#define ZTEST(suite, fn) static void _##suite##_##fn##_wrapper(void)
/* Attributes */
/* Confirmed problematic */
#define __noinit
#define __syscall
``` | /content/code_sandbox/scripts/coccinelle/macros.h | objective-c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 123 |
```unknown
virtual report
@r_idlen@
type T;
identifier I;
constant C;
position p;
@@
(
T I@p (...);
|
I@p (...)
|
T I@p = C;
|
T I@p;
)
@script:python depends on report@
id << r_idlen.I;
pos << r_idlen.p;
@@
if (len(id) > 31):
msg="WARNING: Violation to rule 5.1 or 5.2 (Identifiers shall be distinct) %s length %d > 31" % (id, len(id))
coccilib.report.print_report(pos[0], msg)
``` | /content/code_sandbox/scripts/coccinelle/identifier_length.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 143 |
```unknown
/// Cast void to memset to ignore its return value
///
//# The return of memset and memcpy is never checked and therefore
//# cast it to void to explicitly ignore while adhering to MISRA-C.
//
// Confidence: High
//
virtual patch
@depends on patch && !(file in "ext")@
expression e1,e2,e3;
@@
(
+ (void)
memset(e1,e2,e3);
|
+ (void)
memcpy(e1,e2,e3);
)
``` | /content/code_sandbox/scripts/coccinelle/ignore_return.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 101 |
```unknown
/// Find cases where ztest string comparisons macros can be used
// Confidence: LOW
// Options: --no-includes --include-headers
virtual patch
virtual context
virtual org
virtual report
// Comparing result of strcmp with 0
@@ expression E1,E2; @@
- zassert_equal(strcmp(E1, E2), 0);
+ zassert_str_equal(E1, E2);
@@ expression E1,E2,E3; @@
- zassert_equal(strcmp(E1, E2), 0, E3);
+ zassert_str_equal(E1, E2, E3);
@@ expression E1,E2; @@
- zassert_equal(0, strcmp(E1, E2));
+ zassert_str_equal(E1, E2);
@@ expression E1,E2,E3; @@
- zassert_equal(0, !strcmp(E1, E2), E3);
+ zassert_str_equal(E1, E2, E3);
// Using assert_true with !strcmp
@@ expression E1,E2; @@
- zassert_true(!strcmp(E1, E2));
+ zassert_str_equal(E1, E2);
@@ expression E1,E2,E3; @@
- zassert_true(!strcmp(E1, E2), E3);
+ zassert_str_equal(E1, E2, E3);
// using zassert_true with strcmp(E1, E2) == 0
@@expression E1,E2; @@
- zassert_true(strcmp(E1, E2) == 0);
+ zassert_str_equal(E1, E2);
@@expression E1,E2; @@
- zassert_true((strcmp(E1, E2) == 0));
+ zassert_str_equal(E1, E2);
@@expression E1,E2,E3; @@
- zassert_true(strcmp(E1, E2) == 0, E3);
+ zassert_str_equal(E1, E2, E3);
@@expression E1,E2,E3; @@
- zassert_true((strcmp(E1, E2) == 0), E3);
+ zassert_str_equal(E1, E2, E3);
// using zassert_true with 0 == strcmp(E1, E2)
@@expression E1,E2; @@
- zassert_true(0 == strcmp(E1, E2));
+ zassert_str_equal(E1, E2);
@@expression E1,E2; @@
- zassert_true((0 == strcmp(E1, E2)));
+ zassert_str_equal(E1, E2);
@@expression E1,E2,E3; @@
- zassert_true(0 == strcmp(E1, E2), E3);
+ zassert_str_equal(E1, E2, E3);
@@expression E1,E2,E3; @@
- zassert_true((0 == strcmp(E1, E2)), E3);
+ zassert_str_equal(E1, E2, E3);
``` | /content/code_sandbox/scripts/coccinelle/ztest_strcmp.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 615 |
```unknown
///
/// A variable is dereferenced under a NULL test.
/// Even though it is known to be NULL.
///
// Confidence: Moderate
// URL: path_to_url
// Comments: -I ... -all_includes can give more complete results
// Options:
virtual context
virtual org
virtual report
// The following two rules are separate, because both can match a single
// expression in different ways
@pr1 depends on !(file in "ext") expression@
expression E;
identifier f;
position p1;
@@
(E != NULL && ...) ? <+...E->f@p1...+> : ...
@pr2 depends on !(file in "ext")@
expression E;
identifier f;
position p2;
@@
(
(E != NULL) && ... && <+...E->f@p2...+>
|
(E == NULL) || ... || <+...E->f@p2...+>
|
sizeof(<+...E->f@p2...+>)
)
@ifm depends on !(file in "ext")@
expression *E;
statement S1,S2;
position p1;
@@
if@p1 ((E == NULL && ...) || ...) S1 else S2
// For org and report modes
@r depends on !context && (org || report) && !(file in "ext") exists@
expression subE <= ifm.E;
expression *ifm.E;
expression E1,E2;
identifier f;
statement S1,S2,S3,S4;
iterator iter;
position p!={pr1.p1,pr2.p2};
position ifm.p1;
@@
if@p1 ((E == NULL && ...) || ...)
{
... when != if (...) S1 else S2
(
iter(subE,...) S4 // no use
|
list_remove_head(E2,subE,...)
|
subE = E1
|
for(subE = E1;...;...) S4
|
subE++
|
++subE
|
--subE
|
subE--
|
&subE
|
E->f@p // bad use
)
... when any
return ...;
}
else S3
@script:python depends on !context && !org && report@
p << r.p;
p1 << ifm.p1;
x << ifm.E;
@@
msg="ERROR: %s is NULL but dereferenced." % (x)
coccilib.report.print_report(p[0], msg)
cocci.include_match(False)
@script:python depends on !context && org && !report@
p << r.p;
p1 << ifm.p1;
x << ifm.E;
@@
msg="ERROR: %s is NULL but dereferenced." % (x)
msg_safe=msg.replace("[","@(").replace("]",")")
cocci.print_main(msg_safe,p)
cocci.include_match(False)
@s depends on !context && (org || report) exists@
expression subE <= ifm.E;
expression *ifm.E;
expression E1,E2;
identifier f;
statement S1,S2,S3,S4;
iterator iter;
position p!={pr1.p1,pr2.p2};
position ifm.p1;
@@
if@p1 ((E == NULL && ...) || ...)
{
... when != if (...) S1 else S2
(
iter(subE,...) S4 // no use
|
list_remove_head(E2,subE,...)
|
subE = E1
|
for(subE = E1;...;...) S4
|
subE++
|
++subE
|
--subE
|
subE--
|
&subE
|
E->f@p // bad use
)
... when any
}
else S3
@script:python depends on !context && !org && report@
p << s.p;
p1 << ifm.p1;
x << ifm.E;
@@
msg="ERROR: %s is NULL but dereferenced." % (x)
coccilib.report.print_report(p[0], msg)
@script:python depends on !context && org && !report@
p << s.p;
p1 << ifm.p1;
x << ifm.E;
@@
msg="ERROR: %s is NULL but dereferenced." % (x)
msg_safe=msg.replace("[","@(").replace("]",")")
cocci.print_main(msg_safe,p)
// For context mode
@depends on context && !org && !report && !(file in "ext") exists@
expression subE <= ifm.E;
expression *ifm.E;
expression E1,E2;
identifier f;
statement S1,S2,S3,S4;
iterator iter;
position p!={pr1.p1,pr2.p2};
position ifm.p1;
@@
if@p1 ((E == NULL && ...) || ...)
{
... when != if (...) S1 else S2
(
iter(subE,...) S4 // no use
|
list_remove_head(E2,subE,...)
|
subE = E1
|
for(subE = E1;...;...) S4
|
subE++
|
++subE
|
--subE
|
subE--
|
&subE
|
* E->f@p // bad use
)
... when any
return ...;
}
else S3
// The following three rules are duplicates of ifm, pr1 and pr2 respectively.
// It is need because the previous rule as already made a "change".
@pr11 depends on context && !org && !report && !(file in "ext") && pr1 expression@
expression E;
identifier f;
position p1;
@@
(E != NULL && ...) ? <+...E->f@p1...+> : ...
@pr12 depends on context && !org && !report && pr2@
expression E;
identifier f;
position p2;
@@
(
(E != NULL) && ... && <+...E->f@p2...+>
|
(E == NULL) || ... || <+...E->f@p2...+>
|
sizeof(<+...E->f@p2...+>)
)
@ifm1 depends on context && !org && !report && !(file in "ext") && ifm@
expression *E;
statement S1,S2;
position p1;
@@
if@p1 ((E == NULL && ...) || ...) S1 else S2
@depends on context && !org && !report exists@
expression subE <= ifm1.E;
expression *ifm1.E;
expression E1,E2;
identifier f;
statement S1,S2,S3,S4;
iterator iter;
position p!={pr11.p1,pr12.p2};
position ifm1.p1;
@@
if@p1 ((E == NULL && ...) || ...)
{
... when != if (...) S1 else S2
(
iter(subE,...) S4 // no use
|
list_remove_head(E2,subE,...)
|
subE = E1
|
for(subE = E1;...;...) S4
|
subE++
|
++subE
|
--subE
|
subE--
|
&subE
|
* E->f@p // bad use
)
... when any
}
else S3
``` | /content/code_sandbox/scripts/coccinelle/deref_null.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,550 |
```unknown
// Enforce preservation of const qualifier on config_info casts
//
// Drivers cast the device config_info pointer to a driver-specific
// structure. The object is const-qualified; make sure the cast
// doesn't inadvertently remove that qualifier.
//
// Also add the qualifier to pointer definitions where it's missing.
//
// Note that this patch may produce incorrect results if config_info
// appears as a tag in non-device aggregate types.
//
// Options: --include-headers
virtual patch
virtual report
// bare: (struct T*)E
@r_cci_bare_patch
depends on patch
disable optional_qualifier
@
identifier T;
expression E;
@@
(
+const
struct T*)E->config_info
// bare const: (struct T* const)E
@r_cci_bare_lc_patch
depends on patch
disable optional_qualifier
@
identifier T;
expression E;
@@
(
+const
struct T * const)E->config_info
// asg: struct T *D = (const struct T*)
@r_cci_asg_patch
depends on patch
disable optional_qualifier
@
identifier T;
identifier D;
expression E;
@@
+const
struct T * D = (const struct T*)E->config_info;
// asg to const local: struct T * const D = (const struct T*)
@r_cci_lc_asg_patch
depends on patch
disable optional_qualifier
@
identifier T;
identifier D;
expression E;
@@
+const
struct T * const D = (const struct T*)E->config_info;
// asg via macro: struct T * D = DEV_CFG()
@r_cci_asg_macro_patch
depends on patch
disable optional_qualifier
@
identifier T;
identifier D;
expression E;
@@
+const
struct T * D = DEV_CFG(E);
// asg via macro to const local: struct T * const D = DEV_CFG()
@r_cci_lc_asg_macro_patch
depends on patch
disable optional_qualifier
@
identifier T;
identifier D;
expression E;
@@
+const
struct T * const D = DEV_CFG(E);
// asg via macro: struct T * D; ... ; D = (const struct T*)CI;
@r_cci_delayed_asg_patch
depends on patch
disable optional_qualifier
@
identifier T;
identifier D;
expression E;
@@
+const
struct T * D;
...
D = (const struct T*)E->config_info;
// delayed asg via macro: struct T * D; ... ; D = DEV_CFG();
@r_cci_delayed_asg_macro_patch
depends on patch
disable optional_qualifier
@
identifier T;
identifier D;
expression E;
@@
+const
struct T * D;
...
D = DEV_CFG(E);
@r_cci_report
depends on report
disable optional_qualifier
@
identifier T;
expression E;
position p;
@@
(struct T*)E->config_info@p
@script:python
depends on report
@
t << r_cci_report.T;
p << r_cci_report.p;
@@
msg = "WARNING: cast of config_info to struct {} requires 'const'".format(t)
coccilib.report.print_report(p[0], msg)
``` | /content/code_sandbox/scripts/coccinelle/const_config_info.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 690 |
```unknown
///
/// Remove unneeded variable used to store return value.
///
// Confidence: Moderate
// URL: path_to_url
// Comments: Comments on code can be deleted if near code that is removed.
// "when strict" can be removed to get more hits, but adds false
// positives
virtual patch
virtual report
virtual context
virtual org
@depends on patch && !(file in "ext")@
type T;
constant C;
identifier ret;
@@
- T ret = C;
... when != ret
when strict
return
- ret
+ C
;
@depends on context && !(file in "ext")@
type T;
constant C;
identifier ret;
@@
* T ret = C;
... when != ret
when strict
* return ret;
@r1 depends on (report || org) && !(file in "ext")@
type T;
constant C;
identifier ret;
position p1, p2;
@@
T ret@p1 = C;
... when != ret
when strict
return ret@p2;
@script:python depends on report@
p1 << r1.p1;
p2 << r1.p2;
C << r1.C;
ret << r1.ret;
@@
coccilib.report.print_report(p1[0], "Unneeded variable: \"" + ret +
"\". Return \"" + C + "\" on line "
+ p2[0].line)
@script:python depends on org@
p1 << r1.p1;
p2 << r1.p2;
C << r1.C;
ret << r1.ret;
@@
cocci.print_main("unneeded \"" + ret + "\" variable", p1)
cocci.print_sec("return " + C + " here", p2)
``` | /content/code_sandbox/scripts/coccinelle/returnvar.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 374 |
```unknown
/// Use BIT() helper macro instead of hardcoding using bitshifting
///
// Confidence: High
//
virtual patch
@depends on patch && !(file in "ext")@
expression A;
@@
- (1 << A)
+ BIT(A)
``` | /content/code_sandbox/scripts/coccinelle/unsigned_shift.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 53 |
```unknown
// Check violations for rule 5.7
// path_to_url
//
// Confidence: Moderate
//
virtual report
@initialize:python@
@@
@common_case@
position p;
identifier t, v;
@@
(
struct t *v@p;
|
struct t v@p;
|
union t v@p;
)
@ script:python @
t << common_case.t;
v << common_case.v;
p << common_case.p;
@@
msg = "WARNING: Violation to rule 5.7 (Tag name should be unique) tag: {}".format(v)
if t == v:
coccilib.report.print_report(p[0], msg)
@per_type@
type T;
identifier v;
position p;
@@
(
T v@p;
|
T *v@p;
)
@ script:python @
t << per_type.T;
v << per_type.v;
p << per_type.p;
@@
msg = "WARNING: Violation to rule 5.7 (Tag name should be unique) tag: {}".format(v)
if t == v:
coccilib.report.print_report(p[0], msg)
@function_match@
type T1, T2;
identifier function, v;
position p;
parameter list[n] P1;
parameter list[n1] P2;
@@
T1 function(P1, T2 *v@p, P2) {
...
}
@ script:python @
v << function_match.v;
t << function_match.T2;
p << function_match.p;
@@
msg = "WARNING: Violation to rule 5.7 (Tag name should be unique) tag: {}".format(v)
if v == t.split(" ")[-1]:
coccilib.report.print_report(p[0], msg)
``` | /content/code_sandbox/scripts/coccinelle/same_identifier.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 368 |
```unknown
// Replace use of K_NO_WAIT and K_FOREVER in API that requires
// timeouts be specified as integral milliseconds.
//
// These constants used to have the values 0 and -1 respectively; they
// are now timeout values which are opaque non-integral values that
// can't be converted to integers automatically. For K_NO_WAIT replace
// with 0; for K_FOREVER replace with SYS_FOREVER_MS, the value of
// which is -1.
//
// Options: --include-headers
virtual patch
virtual report
// ** Handle millisecond timeout as the last parameter
// Match identifier passed as timeout
@match_fn_l1@
identifier fn =~ "(?x)^
(dmic_read
|bt_buf_get_(rx|cmd_complete|evt)
|bt_mesh_cfg_cli_timeout_set
|isotp_(bind|recv(_net)?|send(_net)?(_ctx)?_buf)
|tty_set_(tx|rx)_timeout
|can_(write|recover)
|uart_(tx|rx_enable)
|dns_(resolve_name|get_addr_info)
|net_config_init
|net_ppp_ping
|websocket_(send|recv)_msg
)$";
identifier T;
@@
fn(..., T);
@report_fn_l1
extends match_fn_l1
depends on report
@
identifier K_NO_WAIT =~ "^K_NO_WAIT$";
identifier K_FOREVER =~ "^K_FOREVER$";
position p;
@@
fn@p(...,
(
K_NO_WAIT
|
K_FOREVER
)
)
@script:python
depends on report
@
fn << match_fn_l1.fn;
T << match_fn_l1.T;
p << report_fn_l1.p;
@@
msg = "WARNING: [msl1] replace constant {} with ms duration in {}".format(T, fn)
coccilib.report.print_report(p[0], msg);
@fix_fn_l1
extends match_fn_l1
depends on patch
@
identifier K_NO_WAIT =~ "^K_NO_WAIT$";
identifier K_FOREVER =~ "^K_FOREVER$";
@@
fn(...,
(
- K_NO_WAIT
+ 0
|
- K_FOREVER
+ SYS_FOREVER_MS
))
// ** Handle millisecond timeout as second from last parameter
// Match identifier passed as timeout
@match_fn_l2@
identifier fn =~ "(?x)^
(http_client_req
|websocket_connect
)$";
expression L1;
identifier T;
@@
fn(..., T, L1)
@report_fn_l2
extends match_fn_l2
depends on report
@
identifier K_NO_WAIT =~ "^K_NO_WAIT$";
identifier K_FOREVER =~ "^K_FOREVER$";
expression X1;
position p;
@@
fn@p(...,
(
K_NO_WAIT
|
K_FOREVER
)
, X1)
@script:python
depends on report
@
fn << match_fn_l2.fn;
T << match_fn_l2.T;
p << report_fn_l2.p;
@@
msg = "WARNING: [msl2] replace constant {} with ms duration in {}".format(T, fn)
coccilib.report.print_report(p[0], msg);
@fix_fn_l2
extends match_fn_l2
depends on patch
@
identifier K_NO_WAIT =~ "^K_NO_WAIT$";
identifier K_FOREVER =~ "^K_FOREVER$";
expression X1;
@@
fn(...,
(
- K_NO_WAIT
+ 0
|
- K_FOREVER
+ SYS_FOREVER_MS
)
, X1)
// ** Handle millisecond timeout as third from last parameter
// Match identifier passed as timeout
@match_fn_l3@
identifier fn =~ "(?x)^
(can_send
|lora_recv
)$";
expression L1;
expression L2;
identifier T;
@@
fn(..., T, L2, L1)
@report_fn_l3
extends match_fn_l3
depends on report
@
identifier K_NO_WAIT =~ "^K_NO_WAIT$";
identifier K_FOREVER =~ "^K_FOREVER$";
position p;
@@
fn@p(...,
(
K_NO_WAIT
|
K_FOREVER
)
, L2, L1)
@script:python
depends on report
@
fn << match_fn_l3.fn;
T << match_fn_l3.T;
p << report_fn_l3.p;
@@
msg = "WARNING: [msl3] replace constant {} with ms duration in {}".format(T, fn)
coccilib.report.print_report(p[0], msg);
@fix_fn_l3
extends match_fn_l3
depends on patch
@
identifier K_NO_WAIT =~ "^K_NO_WAIT$";
identifier K_FOREVER =~ "^K_FOREVER$";
@@
fn(...,
(
- K_NO_WAIT
+ 0
|
- K_FOREVER
+ SYS_FOREVER_MS
)
, L2, L1)
``` | /content/code_sandbox/scripts/coccinelle/ms_timeout.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,010 |
```unknown
///
/// Remove unneeded semicolon.
///
// Confidence: Moderate
// URL: path_to_url
// Comments: Some false positives on empty default cases in switch statements.
// Options: --no-includes --include-headers
virtual patch
virtual report
virtual context
virtual org
@r_default depends on !(file in "ext")@
position p;
@@
switch (...)
{
default: ...;@p
}
@r_case depends on !(file in "ext")@
position p;
@@
(
switch (...)
{
case ...:;@p
}
|
switch (...)
{
case ...:...
case ...:;@p
}
|
switch (...)
{
case ...:...
case ...:
case ...:;@p
}
)
@r1 depends on !(file in "ext")@
statement S;
position p1;
position p != {r_default.p, r_case.p};
identifier label;
@@
(
label:;
|
S@p1;@p
)
@script:python@
p << r1.p;
p1 << r1.p1;
@@
if p[0].line != p1[0].line_end:
cocci.include_match(False)
@depends on patch@
position r1.p;
@@
-;@p
@script:python depends on report@
p << r1.p;
@@
coccilib.report.print_report(p[0],"Unneeded semicolon")
@depends on context@
position r1.p;
@@
*;@p
@script:python depends on org@
p << r1.p;
@@
cocci.print_main("Unneeded semicolon",p)
``` | /content/code_sandbox/scripts/coccinelle/semicolon.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 342 |
```unknown
/// Use ARRAY_SIZE instead of dividing sizeof array with sizeof an element
///
//# This makes an effort to find cases where ARRAY_SIZE can be used such as
//# where there is a division of sizeof the array by the sizeof its first
//# element or by any indexed element or the element type. It replaces the
//# division of the two sizeofs by ARRAY_SIZE.
//
// Confidence: High
// Comments:
// Options: --no-includes --include-headers
virtual patch
virtual context
virtual org
virtual report
@i@
@@
#include <include/misc/util.h>
//----------------------------------------------------------
// For context mode
//----------------------------------------------------------
@depends on i&&context && !(file in "ext")@
type T;
T[] E;
@@
(
* (sizeof(E)/sizeof(*E))
|
* (sizeof(E)/sizeof(E[...]))
|
* (sizeof(E)/sizeof(T))
)
//----------------------------------------------------------
// For patch mode
//----------------------------------------------------------
@depends on i&&patch && !(file in "ext")@
type T;
T[] E;
@@
(
- (sizeof(E)/sizeof(*E))
+ ARRAY_SIZE(E)
|
- (sizeof(E)/sizeof(E[...]))
+ ARRAY_SIZE(E)
|
- (sizeof(E)/sizeof(T))
+ ARRAY_SIZE(E)
)
//----------------------------------------------------------
// For org and report mode
//----------------------------------------------------------
@r depends on (org || report) && !(file in "ext")@
type T;
T[] E;
position p;
@@
(
(sizeof(E)@p /sizeof(*E))
|
(sizeof(E)@p /sizeof(E[...]))
|
(sizeof(E)@p /sizeof(T))
)
@script:python depends on org@
p << r.p;
@@
coccilib.org.print_todo(p[0], "WARNING should use ARRAY_SIZE")
@script:python depends on report@
p << r.p;
@@
msg="WARNING: Use ARRAY_SIZE"
coccilib.report.print_report(p[0], msg)
``` | /content/code_sandbox/scripts/coccinelle/array_size.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 409 |
```unknown
/// Find missing unlocks. This semantic match considers the specific case
/// where the unlock is missing from an if branch, and there is a lock
/// before the if and an unlock after the if. False positives are due to
/// cases where the if branch represents a case where the function is
/// supposed to exit with the lock held, or where there is some preceding
/// function call that releases the lock.
///
// Confidence: Moderate
// URL: path_to_url
virtual context
virtual org
virtual report
@prelocked depends on !(file in "ext")@
position p1,p;
expression E1;
@@
(
irq_lock@p1
|
k_mutex_lock@p1
|
k_sem_take@p1
|
k_spin_lock@p1
) (E1@p,...);
@looped depends on !(file in "ext")@
position r;
@@
for(...;...;...) { <+... return@r ...; ...+> }
@balanced exists@
position p1 != prelocked.p1;
position prelocked.p;
position pif;
identifier lock,unlock;
expression x <= prelocked.E1;
expression E,prelocked.E1;
expression E2;
@@
if (E) {
... when != E1
lock(E1@p,...)
... when any
}
... when != E1
when != \(x = E2\|&x\)
if@pif (E) {
... when != E1
unlock@p1(E1,...)
... when any
}
@err depends on !(file in "ext") exists@
expression E1;
position prelocked.p,balanced.pif;
position up != prelocked.p1;
position r!=looped.r;
identifier lock,unlock;
statement S1,S2;
@@
*lock(E1@p,...);
... when != E1
when any
when != if@pif (...) S1
if (...) {
... when != E1
when != if@pif (...) S2
* return@r ...;
}
... when != E1
when any
*unlock@up(E1,...);
@script:python depends on org@
p << prelocked.p1;
lock << err.lock;
unlock << err.unlock;
p2 << err.r;
@@
cocci.print_main(lock,p)
cocci.print_secs(unlock,p2)
@script:python depends on report@
p << prelocked.p1;
lock << err.lock;
unlock << err.unlock;
p2 << err.r;
@@
msg = "preceding lock on line %s" % (p[0].line)
coccilib.report.print_report(p2[0],msg)
``` | /content/code_sandbox/scripts/coccinelle/mini_lock.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 570 |
```unknown
// Uses a python database (a dict) to find where const struct device
// variable are being used in zephyr functions and, if it's being in place
// of a void*, it will print an ERROR for loosing the const qualifier.
// If it's being used on an unknown functions from an external module such
// as a HAL, it will print a WARNING in order to check if the const qualifier
// is not lost.
virtual report
////////////////////
// Initialization //
////////////////////
@initialize:python
depends on report
@
@@
import pickle
def check_and_report(F, f, D, nb_args, p):
if f in f_void and int(nb_args) in f_void[f]:
msg = "ERROR: in {} calling {} param with {}, \
loosing const qualifier, please wrap".format(F, f, D)
coccilib.report.print_report(p[0], msg)
elif f not in f_void and f not in f_other and not f.isupper():
msg = "WARNING: in {} calling {} param with {}, \
check if const qualifier is not lost".format(F, f, D)
coccilib.report.print_report(p[0], msg)
// Loading function data base
with open("function_names.pickle", "rb") as f:
data = pickle.load(f)
f_void = data["f_void"]
f_other = data["f_other"]
///////////
// Rules //
///////////
// Find usage of a device instance
@r_find_dev_usage
depends on report
@
local idexpression struct device *D;
expression list[nb_args] args;
identifier f;
position p;
@@
f(args, D@p, ...)
@script:python
depends on r_find_dev_usage
@
f << r_find_dev_usage.f;
D << r_find_dev_usage.D;
nb_args << r_find_dev_usage.nb_args;
p << r_find_dev_usage.p;
@@
check_and_report(p[0].current_element, f, D, nb_args, p)
``` | /content/code_sandbox/scripts/coccinelle/find_dev_usage.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 428 |
```unknown
/// Find assignments to unsigned variables and add an 'U' to the value
// Confidence: High
virtual patch
virtual report
@r_unsigned@
typedef uint8_t, uint16_t, uint32_t, uint64_t, u8_t, u16_t, u32_t, u64_t;
{unsigned char, unsigned short, unsigned int, uint8_t, uint16_t, uint32_t, uint64_t, u8_t, u16_t, u32_t, u64_t} v;
constant C;
position p;
@@
(
v = C@p
|
v == C@p
|
v != C@p
|
v <= C@p
|
v >= C@p
|
v += C@p
|
v -= C@p
|
v * C@p
|
v / C@p
|
v *= C@p
|
v /= C@p
)
@script:python r_rewrite@
C << r_unsigned.C;
z;
@@
if C.isdigit() != True:
cocci.include_match(False)
coccinelle.z = C + "U"
@r_subst depends on patch@
{unsigned char, unsigned short, unsigned int, uint8_t, uint16_t, uint32_t, uint64_t, u8_t, u16_t, u32_t, u64_t} r_unsigned.v;
constant r_unsigned.C;
identifier r_rewrite.z;
@@
(
v =
- C
+ z
|
v ==
- C
+ z
|
v !=
- C
+ z
|
v <=
- C
+ z
|
v >=
- C
+ z
|
v +=
- C
+ z
|
v -=
- C
+ z
|
v +
- C
+ z
|
v -
- C
+ z
|
v +=
- C
+ z
|
v -=
- C
+ z
|
- v * C
+ v * z
|
v /
- C
+ z
|
v *=
- C
+ z
|
v /=
- C
+ z
)
@script: python depends on report@
p << r_unsigned.p;
@@
msg="WARNING: Unsigned 'U' suffix missing"
coccilib.report.print_report(p[0], msg)
``` | /content/code_sandbox/scripts/coccinelle/unsigned_suffix.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 510 |
```unknown
/// Use unsigned int as the return value for irq_lock()
///
// Confidence: High
//
virtual patch
@find depends on !(file in "ext")@
type T;
identifier i;
typedef uint32_t,uint32_t;
@@
(
uint32_t i = irq_lock();
|
unsigned int i = irq_lock();
|
uint32_t i = irq_lock();
|
- T
+ unsigned int
i = irq_lock();
)
@find2 depends on !(file in "ext") exists@
type T;
identifier i;
@@
(
uint32_t i;
|
unsigned int i;
|
uint32_t i;
|
- T
+ unsigned int
i;
...
i = irq_lock();
)
``` | /content/code_sandbox/scripts/coccinelle/irq_lock.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 149 |
```unknown
// Convert legacy integer timeouts to timeout API
//
// Some existing code assumes that timeout parameters are provided as
// integer milliseconds, when they were intended to be timeout values
// produced by specific constants and macros. Convert integer
// literals and parameters to the desired equivalent
//
// A few expressions that are clearly integer values are also
// converted.
//
// Options: --include-headers
virtual patch
virtual report
// ** Handle timeouts at the last position of kernel API arguments
// Base rule provides the complex identifier regular expression
@r_last_timeout@
identifier last_timeout =~ "(?x)^k_
( delayed_work_submit(|_to_queue)
| futex_wait
| mbox_data_block_get
| (mbox|msgq)_get
| mem_(pool|slab)_alloc
| mutex_lock
| pipe_(get|put)
| poll
| queue_get
| sem_take
| sleep
| stack_pop
| thread_create
| timer_start
| work_poll_submit(|_to_queue)
)$";
@@
last_timeout(...)
// Identify call sites where an identifier is used for the timeout
@r_last_timeout_id
extends r_last_timeout
@
identifier D;
position p;
@@
last_timeout@p(..., D)
// Select call sites where a constant literal (not identifier) is used
// for the timeout and replace the constant with the appropriate macro
@r_last_timeout_const_patch
extends r_last_timeout
depends on patch
@
constant C;
position p != r_last_timeout_id.p;
@@
last_timeout@p(...,
(
- 0
+ K_NO_WAIT
|
- -1
+ K_FOREVER
|
- C
+ K_MSEC(C)
)
)
@r_last_timeout_const_report
extends r_last_timeout
depends on report
@
constant C;
position p != r_last_timeout_id.p;
@@
last_timeout@p(..., C)
@script:python
depends on report
@
fn << r_last_timeout.last_timeout;
p << r_last_timeout_const_report.p;
C << r_last_timeout_const_report.C;
@@
msg = "WARNING: replace constant {} with timeout in {}".format(C, fn)
coccilib.report.print_report(p[0], msg);
// ** Handle call sites where a timeout is specified by an expression
// ** scaled by MSEC_PER_SEC and replace with the corresponding
// ** K_SECONDS() expression.
@r_last_timeout_scaled_patch
extends r_last_timeout
depends on patch
@
// identifier K_MSEC =~ "^K_MSEC$";
symbol K_MSEC;
identifier MSEC_PER_SEC =~ "^MSEC_PER_SEC$";
expression V;
position p;
@@
last_timeout@p(...,
(
- MSEC_PER_SEC
+ K_SECONDS(1)
|
- V * MSEC_PER_SEC
+ K_SECONDS(V)
|
- K_MSEC(MSEC_PER_SEC)
+ K_SECONDS(1)
|
- K_MSEC(V * MSEC_PER_SEC)
+ K_SECONDS(V)
)
)
@r_last_timeout_scaled_report_req
extends r_last_timeout
depends on report
@
identifier MSEC_PER_SEC =~ "^MSEC_PER_SEC$";
expression V;
position p;
@@
last_timeout@p(...,
(
MSEC_PER_SEC
| V * MSEC_PER_SEC
)
)
@r_last_timeout_scaled_report_opt
extends r_last_timeout
depends on report
@
identifier MSEC_PER_SEC =~ "^MSEC_PER_SEC$";
expression V;
position p;
@@
last_timeout@p(...,
(
K_MSEC(MSEC_PER_SEC)
| K_MSEC(V * MSEC_PER_SEC)
)
)
@script:python
depends on report
@
fn << r_last_timeout.last_timeout;
p << r_last_timeout_scaled_report_req.p;
@@
msg = "WARNING: use K_SECONDS() for timeout in {}".format(fn)
coccilib.report.print_report(p[0], msg);
@script:python
depends on report
@
fn << r_last_timeout.last_timeout;
p << r_last_timeout_scaled_report_opt.p;
@@
msg = "NOTE: use K_SECONDS() for timeout in {}".format(fn)
coccilib.report.print_report(p[0], msg);
// ** Handle call sites where an integer parameter is used in a
// ** position that requires a timeout value.
@r_last_timeout_int_param_patch
extends r_last_timeout
depends on patch
@
identifier FN;
identifier P;
typedef int32_t, uint32_t;
@@
FN(...,
(int
|int32_t
|uint32_t
)
P, ...) {
...
last_timeout(...,
-P
+K_MSEC(P)
)
...
}
@r_last_timeout_int_param_report
extends r_last_timeout
depends on report
@
identifier FN;
identifier P;
position p;
typedef int32_t, uint32_t;
@@
FN(...,
(int
|int32_t
|uint32_t
)
P, ...) {
...
last_timeout@p(..., P)
...
}
@script:python
depends on report
@
param << r_last_timeout_int_param_report.P;
fn << r_last_timeout.last_timeout;
p << r_last_timeout_int_param_report.p;
@@
msg = "WARNING: replace integer parameter {} with timeout in {}".format(param, fn)
coccilib.report.print_report(p[0], msg);
// ** Convert timeout-valued delays in K_THREAD_DEFINE with durations
// ** in milliseconds
// Select declarers where the startup delay is a timeout expression
// and replace with the corresponding millisecond duration.
@r_thread_decl_patch
depends on patch@
declarer name K_THREAD_DEFINE;
identifier K_NO_WAIT =~ "^K_NO_WAIT$";
identifier K_FOREVER =~ "^K_FOREVER$";
expression E;
position p;
@@
K_THREAD_DEFINE@p(...,
(
- K_NO_WAIT
+ 0
|
- K_FOREVER
+ -1
|
- K_MSEC(E)
+ E
)
);
//
@r_thread_decl_report
depends on report@
declarer name K_THREAD_DEFINE;
identifier K_NO_WAIT =~ "^K_NO_WAIT$";
identifier K_FOREVER =~ "^K_FOREVER$";
expression V;
position p;
@@
K_THREAD_DEFINE@p(...,
(
K_NO_WAIT
|
K_FOREVER
|
K_MSEC(V)
)
);
@script:python
depends on report
@
p << r_thread_decl_report.p;
@@
msg = "WARNING: replace timeout-valued delay with millisecond duration".format()
coccilib.report.print_report(p[0], msg);
// ** Handle k_timer_start where the second (not last) argument is a
// ** constant literal.
// Select call sites where an identifier is used for the duration timeout
@r_timer_duration@
expression T;
identifier D;
expression I;
position p;
@@
k_timer_start@p(T, D, I)
// Select call sites where a constant literal (not identifier) is used
// for the timeout and replace the constant with the appropriate macro
@depends on patch@
expression T;
constant C;
expression I;
position p != r_timer_duration.p;
@@
k_timer_start@p(T,
(
- 0
+ K_NO_WAIT
|
- -1
+ K_FOREVER
|
- C
+ K_MSEC(C)
)
, I)
@r_timer_duration_report
depends on report
@
expression T;
constant C;
expression I;
position p != r_timer_duration.p;
@@
k_timer_start@p(T, C, I)
@script:python
depends on report
@
p << r_timer_duration_report.p;
C << r_timer_duration_report.C;
@@
msg = "WARNING: replace constant {} with duration timeout in k_timer_start".format(C)
coccilib.report.print_report(p[0], msg);
``` | /content/code_sandbox/scripts/coccinelle/int_ms_to_timeout.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,611 |
```unknown
/// sizeof when applied to a pointer typed expression gives the size of
/// the pointer
///
// Confidence: High
// URL: path_to_url
// Comments:
// Options: --no-includes --include-headers
virtual org
virtual report
virtual context
virtual patch
@depends on patch && !(file in "ext")@
expression *x;
expression f;
expression i;
type T;
@@
(
x = <+... sizeof(
- x
+ *x
) ...+>
|
f(...,(T)(x),...,sizeof(
- x
+ *x
),...)
|
f(...,sizeof(
- x
+ *x
),...,(T)(x),...)
|
f(...,(T)(x),...,i*sizeof(
- x
+ *x
),...)
|
f(...,i*sizeof(
- x
+ *x
),...,(T)(x),...)
)
@r depends on !patch && !(file in "ext")@
expression *x;
expression f;
expression i;
position p;
type T;
@@
(
*x = <+... sizeof@p(x) ...+>
|
*f(...,(T)(x),...,sizeof@p(x),...)
|
*f(...,sizeof@p(x),...,(T)(x),...)
|
*f(...,(T)(x),...,i*sizeof@p(x),...)
|
*f(...,i*sizeof@p(x),...,(T)(x),...)
)
@script:python depends on org@
p << r.p;
@@
cocci.print_main("application of sizeof to pointer",p)
@script:python depends on report@
p << r.p;
@@
msg = "ERROR: application of sizeof to pointer"
coccilib.report.print_report(p[0],msg)
``` | /content/code_sandbox/scripts/coccinelle/noderef.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 370 |
```unknown
// Check violations for rule 5.7
// path_to_url
//
// Confidence: Moderate
//
virtual report
@initialize:python@
@@
@common_case@
position p;
identifier t, v;
expression E;
type T;
@@
(
struct t *v@p;
|
struct t v@p;
|
union t v@p;
|
T v@p;
|
T *v@p;
|
struct t *v@p = E;
|
struct t v@p = E;
|
union t v@p = E;
|
T v@p = E;
|
T *v@p = E;
)
@ script:python @
v << common_case.v;
p << common_case.p;
@@
msg = "WARNING: Violation to rule 21.2 (Should not used a reserved identifier) - {}".format(v)
with open("scripts/coccinelle/symbols.txt", "r") as fp:
symbols = fp.read().splitlines()
if v in symbols:
coccilib.report.print_report(p[0], msg)
@function_match@
type T;
identifier f;
position p;
@@
T f@p(...) {
...
}
@ script:python @
v << function_match.f;
@@
msg = "WARNING: Violation to rule 21.2 (Should not used a reserved identifier) - {}".format(v)
with open("scripts/coccinelle/symbols.txt", "r") as fp:
symbols = fp.read().splitlines()
if v in symbols:
coccilib.report.print_report(p[0], msg)
@function_parameter@
type T1, T2;
identifier function, v;
position p;
parameter list[n] P1;
parameter list[n1] P2;
@@
T1 function(P1, T2 *v@p, P2) {
...
}
@ script:python @
v << function_parameter.v;
p << function_parameter.p;
@@
msg = "WARNING: Violation to rule 21.2 (Should not used a reserved identifier) - {}".format(v)
with open("scripts/coccinelle/symbols.txt", "r") as fp:
symbols = fp.read().splitlines()
if v in symbols:
coccilib.report.print_report(p[0], msg)
``` | /content/code_sandbox/scripts/coccinelle/reserved_names.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 482 |
```unknown
// In patch mode, patch all device instance to const (if not already).
// In report mode:
// Generate a q&d python database (a dict actually) of all the
// declared zephyr functions. It will store each function name in 2
// separate dicts: one storing all function having 1+ void* parameter
// and one for all the other functions. It will store the positions
// of the void* parameter in the first dict, and the actual number of
// parameters in the second.
// Then find_dev_usage.cocci can be used to verify if device instance
// are not loosing their const qualifier.
virtual patch
virtual report
////////////////////
// Initialization //
////////////////////
@initialize:python
depends on report
@
@@
import pickle
f_void = {}
f_other = {}
// Insert a function into right dict depending on parameters
def insert_function(f, params):
void_pos = []
i = 0
for prm in params:
if prm.startswith("void *"):
void_pos.append(i)
i += 1
if len(void_pos) != 0:
f_void[f] = void_pos
else:
f_other[f] = i + 1
///////////
// Rules //
///////////
// Switch device instance to constant.
@r_const_dev
depends on patch
disable optional_qualifier
@
@@
-struct device *
+const struct device *
// Find function declarations
@r_find_func_declare
depends on report
@
identifier f;
type ret_type;
parameter list[nb_params] params;
@@
ret_type f(params);
// Insert function declaration
@script:python
depends on report
@
f << r_find_func_declare.f;
params << r_find_func_declare.params;
@@
insert_function(f, params)
// Find function implementations and inlines
// (maybe it should focus on static only?
// but then first rule should not match statics.)
@r_find_func_impl_inlines
depends on report
@
identifier f;
type ret_type;
parameter list[nb_params] params;
@@
(
ret_type f(params)
{
...
}
|
static inline ret_type f(params)
{
...
}
)
// Insert function implementations and inlines
@script:python
depends on report
@
f << r_find_func_impl_inlines.f;
params << r_find_func_impl_inlines.params;
@@
insert_function(f, params)
//////////////////
// Finalization //
//////////////////
@finalize:python
depends on report
@
@@
with open("function_names.pickle", "wb") as f:
data = {}
data['f_void'] = f_void
data['f_other'] = f_other
pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
``` | /content/code_sandbox/scripts/coccinelle/find_functions.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 595 |
```unknown
/// Unsigned expressions cannot be lesser than zero. Presence of
/// comparisons 'unsigned (<|<=) 0' often indicates a bug,
/// usually wrong type of variable.
///
// Confidence: High
// URL: path_to_url
virtual org
virtual report
@r_cmp depends on !(file in "ext")@
position p;
typedef uint8_t, uint16_t, uint32_t, uint64_t;
{unsigned char, unsigned short, unsigned int, unsigned long, unsigned long long,
size_t, uint8_t, uint16_t, uint32_t, uint64_t} v;
@@
(\( v@p < 0 \| v@p <= 0 \))
@script:python depends on org@
p << r_cmp.p;
@@
msg = "WARNING: Unsigned expression compared with zero."
coccilib.org.print_todo(p[0], msg)
@script:python depends on report@
p << r_cmp.p;
@@
msg = "WARNING: Unsigned expression compared with zero."
coccilib.report.print_report(p[0], msg)
``` | /content/code_sandbox/scripts/coccinelle/unsigned_lesser_than_zero.cocci | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 223 |
```linker script
/*
*
*/
/**
* @file
* @brief Linker command/script file for the native simulator runner
*/
#define NSI_INIT_LEVEL(level) \
__nsi_##level##_tasks_start = .; \
KEEP(*(SORT(.nsi_##level[0-9]_task))); \
KEEP(*(SORT(.nsi_##level[1-9][0-9]_task))); \
KEEP(*(SORT(.nsi_##level[1-9][0-9][0-9]_task))); \
SECTIONS
{
nsi_tasks :
{
__nsi_tasks_start = .;
NSI_INIT_LEVEL(PRE_BOOT_1)
NSI_INIT_LEVEL(PRE_BOOT_2)
NSI_INIT_LEVEL(HW_INIT)
NSI_INIT_LEVEL(PRE_BOOT_3)
NSI_INIT_LEVEL(FIRST_SLEEP)
NSI_INIT_LEVEL(ON_EXIT_PRE)
NSI_INIT_LEVEL(ON_EXIT_POST)
__nsi_tasks_end = .;
}
nsi_hw_events :
{
__nsi_hw_events_start = .;
KEEP(*(SORT(.nsi_hw_event_[0-9]))); \
KEEP(*(SORT(.nsi_hw_event_[1-9][0-9]))); \
KEEP(*(SORT(.nsi_hw_event_[1-9][0-9][0-9])));
__nsi_hw_events_end = .;
}
} INSERT AFTER .data;
/*
* Note this script augments the default host linker script
*/
``` | /content/code_sandbox/scripts/native_simulator/common/other/linker_script.pre.ld | linker script | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 343 |
```objective-c
/*
*
*/
#ifndef NSI_COMMON_SRC_NSI_CONFIG_H
#define NSI_COMMON_SRC_NSI_CONFIG_H
#ifndef NSI_N_CPUS
#define NSI_N_CPUS 1
#endif
#endif /* NSI_COMMON_SRC_NSI_CONFIG_H */
``` | /content/code_sandbox/scripts/native_simulator/common/src/nsi_config.h | objective-c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 53 |
```c
/*
*
*/
#include "nsi_cpu_if.h"
/*
* These trampolines forward a call from the runner into the corresponding embedded CPU hook
* for ex., nsif_cpun_boot(4) -> nsif_cpu4_boot()
*/
TRAMPOLINES(nsif_cpu, _pre_cmdline_hooks)
TRAMPOLINES(nsif_cpu, _pre_hw_init_hooks)
TRAMPOLINES(nsif_cpu, _boot)
TRAMPOLINES_i_(nsif_cpu, _cleanup)
TRAMPOLINES(nsif_cpu, _irq_raised)
TRAMPOLINES(nsif_cpu, _irq_raised_from_sw)
TRAMPOLINES_i_vp(nsif_cpu, _test_hook)
``` | /content/code_sandbox/scripts/native_simulator/common/src/nsi_cpun_if.c | c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 146 |
```unknown
# Native Simulator (NSI) Makefile.
# It builds the simulator runner itself, and produces the final
# Linux executable by linking it to the embedded cpu library
# By default all the build output is placed under the _build folder, but the user can override it
# setting the NSI_BUILD_PATH
#
# The caller can provide an optional configuration file and point to it with NSI_CONFIG_FILE
# See "Configurable/user overridible variables" below.
#
# By default only the core of the runner will be built, but the caller can set NSI_NATIVE
# to also build the components in native/src/
NSI_CONFIG_FILE?=nsi_config
-include ${NSI_CONFIG_FILE}
#If the file does not exist, we don't use it as a build dependency
NSI_CONFIG_FILE:=$(wildcard ${NSI_CONFIG_FILE})
# Configurable/user overridible variables:
# Path to the native_simulator (this folder):
NSI_PATH?=./
# Folder where the build output will be placed
NSI_BUILD_PATH?=$(abspath _build/)
EXE_NAME?=native_simulator.exe
# Final executable path/file_name which will be produced
NSI_EXE?=${NSI_BUILD_PATH}/${EXE_NAME}
# Number of embedded CPUs/MCUs
NSI_N_CPUS?=1
# Path to all CPUs embedded SW which will be linked with the final executable
NSI_EMBEDDED_CPU_SW?=
# Host architecture configuration switch
NSI_ARCH?=-m32
# Coverage switch (GCOV coverage is enabled by default)
NSI_COVERAGE?=--coverage
NSI_LOCALIZE_OPTIONS?=
NSI_BUILD_OPTIONS?=${NSI_ARCH} ${NSI_COVERAGE}
NSI_LINK_OPTIONS?=${NSI_ARCH} ${NSI_COVERAGE}
# Extra source files to be built in the runner context
NSI_EXTRA_SRCS?=
# Extra include directories to be used while building in the runner context
NSI_EXTRA_INCLUDES?=
# Extra libraries to be linked to the final executable
NSI_EXTRA_LIBS?=
SHELL?=bash
# Compiler
NSI_CC?=gcc
# Archive program (it is unlikely you'll need to change this)
NSI_AR?=ar
# Objcopy program (it is unlikely you'll need to change this)
NSI_OBJCOPY?=objcopy
# Build debug switch (by default enabled)
NSI_DEBUG?=-g
# Build optimization level (by default disabled to ease debugging)
NSI_OPT?=-O0
# Warnings switches (for the runner itself)
NSI_WARNINGS?=-Wall -Wpedantic
# Preprocessor flags
NSI_CPPFLAGS?=-D_POSIX_C_SOURCE=200809L -D_XOPEN_SOURCE=600 -D_XOPEN_SOURCE_EXTENDED
NO_PIE_CO:=-fno-pie -fno-pic
DEPENDFLAGS:=-MMD -MP
CFLAGS:=${NSI_DEBUG} ${NSI_WARNINGS} ${NSI_OPT} ${NO_PIE_CO} -DNSI_N_CPUS=${NSI_N_CPUS} \
-ffunction-sections -fdata-sections ${DEPENDFLAGS} -std=c11 ${NSI_BUILD_OPTIONS}
FINALLINK_FLAGS:=${NO_PIE_CO} -no-pie ${NSI_WARNINGS} \
-Wl,--gc-sections -ldl -pthread \
${NSI_LINK_OPTIONS} -lm
no_default:
@echo "There is no default rule, please specify what you want to build,\
or run make help for more info"
RUNNER_LIB:=runner.a
SRCS:=$(shell ls ${NSI_PATH}common/src/*.c)
ifdef NSI_NATIVE
SRCS+=$(shell ls ${NSI_PATH}native/src/*.c)
endif
INCLUDES:=-I${NSI_PATH}common/src/include/ \
-I${NSI_PATH}common/src \
${NSI_EXTRA_INCLUDES}
ifdef NSI_NATIVE
INCLUDES+=-I${NSI_PATH}native/src/include/
endif
EXTRA_OBJS:=$(abspath $(addprefix $(NSI_BUILD_PATH)/,$(sort ${NSI_EXTRA_SRCS:%.c=%.o})))
OBJS:=$(abspath $(addprefix $(NSI_BUILD_PATH)/,${SRCS:${NSI_PATH}%.c=%.o})) ${EXTRA_OBJS}
DEPENDFILES:=$(addsuffix .d,$(basename ${OBJS}))
-include ${DEPENDFILES}
LOCALIZED_EMBSW:=$(abspath $(addprefix $(NSI_BUILD_PATH)/,$(addsuffix .loc_cpusw.o,${NSI_EMBEDDED_CPU_SW})))
${NSI_BUILD_PATH}:
@if [ ! -d ${NSI_BUILD_PATH} ]; then mkdir -p ${NSI_BUILD_PATH}; fi
#Extra sources build:
${NSI_BUILD_PATH}/%.o: /%.c ${NSI_PATH}Makefile ${NSI_CONFIG_FILE}
@if [ ! -d $(dir $@) ]; then mkdir -p $(dir $@); fi
${NSI_CC} ${NSI_CPPFLAGS} ${INCLUDES} ${CFLAGS} -c $< -o $@
${NSI_BUILD_PATH}/%.o: ${NSI_PATH}/%.c ${NSI_PATH}Makefile ${NSI_CONFIG_FILE}
@if [ ! -d $(dir $@) ]; then mkdir -p $(dir $@); fi
${NSI_CC} ${NSI_CPPFLAGS} ${INCLUDES} ${CFLAGS} -c $< -o $@
${NSI_BUILD_PATH}/linker_script.ld : ${NSI_PATH}/common/other/linker_script.pre.ld | ${NSI_BUILD_PATH}
${NSI_CC} -x c -E -P $< -o $@ ${DEPENDFLAGS}
${NSI_BUILD_PATH}/${RUNNER_LIB}: ${OBJS}
if [ -f $@ ]; then rm $@ ; fi
${NSI_AR} -cr $@ ${OBJS}
${NSI_BUILD_PATH}/%.loc_cpusw.o: /% ${NSI_CONFIG_FILE}
@if [ -z $< ] || [ ! -f $< ]; then \
echo "Error: Input embedded CPU SW ($<) not found \
(NSI_EMBEDDED_CPU_SW=${NSI_EMBEDDED_CPU_SW} )"; \
false; \
fi
@if [ ! -d $(dir $@) ]; then mkdir -p $(dir $@); fi
${NSI_OBJCOPY} --localize-hidden $< $@ -w --localize-symbol=_* ${NSI_LOCALIZE_OPTIONS}
${NSI_EXE}: ${NSI_BUILD_PATH}/${RUNNER_LIB} ${LOCALIZED_EMBSW} ${NSI_EXTRA_LIBS} \
${NSI_BUILD_PATH}/linker_script.ld
${NSI_CC} -Wl,--whole-archive ${LOCALIZED_EMBSW} ${NSI_BUILD_PATH}/${RUNNER_LIB} \
${NSI_EXTRA_LIBS} -Wl,--no-whole-archive \
-o $@ ${FINALLINK_FLAGS} -T ${NSI_BUILD_PATH}/linker_script.ld
Makefile: ;
link_with_esw: ${NSI_EXE};
runner_lib: ${NSI_BUILD_PATH}/${RUNNER_LIB}
all: link_with_esw
clean:
@echo "Deleting intermediate compilation results + libraries + executables (*.d .o .a .exe)"
find $(NSI_BUILD_PATH) -name "*.o" -or -name "*.exe" -or -name "*.a" -or -name "*.d" | xargs rm -f
clean_coverage:
find $(NSI_BUILD_PATH) -name "*.gcda" -or -name "*.gcno" | xargs rm -f ; true
clean_all: clean clean_coverage ;
.PHONY: clean clean_coverage clean_all link_with_esw runner_lib no_default all ${DEPENDFILES}
ifndef NSI_BUILD_VERBOSE
.SILENT:
endif
help:
@echo "*******************************"
@echo "* Native Simulator makefile *"
@echo "*******************************"
@echo "Provided rules:"
@echo " clean : clean all build output"
@echo " clean_coverage : clean all coverage files"
@echo " clean_all : clean + clean_coverage"
@echo " link_with_esw : Link the runner with the CPU embedded sw"
@echo " runner_lib : Build the runner itself (pending the embedded SW)"
@echo " all : link_with_esw"
@echo "Note that you can use TAB to autocomplete rules in the command line in modern OSs"
``` | /content/code_sandbox/scripts/native_simulator/Makefile | unknown | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,846 |
```objective-c
/*
*
*/
#ifndef NSI_COMMON_SRC_NSI_TASKS_H
#define NSI_COMMON_SRC_NSI_TASKS_H
#include "nsi_utils.h"
#ifdef __cplusplus
extern "C" {
#endif
#define NSITASK_PRE_BOOT_1_LEVEL 0
#define NSITASK_PRE_BOOT_2_LEVEL 1
#define NSITASK_HW_INIT_LEVEL 2
#define NSITASK_PRE_BOOT_3_LEVEL 3
#define NSITASK_FIRST_SLEEP_LEVEL 4
#define NSITASK_ON_EXIT_PRE_LEVEL 5
#define NSITASK_ON_EXIT_POST_LEVEL 6
/**
* NSI_TASK
*
* Register a function to be called at particular moments
* during the Native Simulator execution.
*
* There is 5 choices for when the function will be called (level):
* * PRE_BOOT_1: Will be called before the command line parameters are parsed,
* or the HW models are initialized
*
* * PRE_BOOT_2: Will be called after the command line parameters are parsed,
* but before the HW models are initialized
*
* * HW_INIT: Will be called during HW models initialization
*
* * PRE_BOOT_3: Will be called after the HW models initialization, right before
* the "CPUs are booted" and embedded SW in them is started.
*
* * FIRST_SLEEP: Will be called after the 1st time all CPUs are sent to sleep
*
* * ON_EXIT_PRE: Will be called during termination of the runner
* execution, as a first set.
*
* * ON_EXIT_POST: Will be called during termination of the runner
* execution, as the very last set before the program returns.
*
* The function must take no parameters and return nothing.
*/
#define NSI_TASK(fn, level, prio) \
static void (* const NSI_CONCAT(__nsi_task_, fn))(void) \
__attribute__((__used__)) NSI_NOASAN \
__attribute__((__section__(".nsi_" #level NSI_STRINGIFY(prio) "_task")))\
= fn; \
/* Let's cross-check the macro level is a valid one, so we don't silently drop it */ \
_Static_assert(NSITASK_##level##_LEVEL >= 0, \
"Using a non pre-defined level, it will be dropped")
/**
* @brief Run the set of special native tasks corresponding to the given level
*
* @param level One of NSITASK_*_LEVEL as defined in soc.h
*/
void nsi_run_tasks(int level);
#ifdef __cplusplus
}
#endif
#endif /* NSI_COMMON_SRC_NSI_TASKS_H */
``` | /content/code_sandbox/scripts/native_simulator/common/src/nsi_tasks.h | objective-c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 550 |
```objective-c
/*
*
*/
#ifndef NSI_COMMON_SRC_NSI_CPUN_IF_H
#define NSI_COMMON_SRC_NSI_CPUN_IF_H
#ifdef __cplusplus
extern "C" {
#endif
/*
* Equivalent interfaces to nsi_cpu<n>_* but for the native simulator internal use
*/
void nsif_cpun_pre_cmdline_hooks(int n);
void nsif_cpun_pre_hw_init_hooks(int n);
void nsif_cpun_boot(int n);
int nsif_cpun_cleanup(int n);
void nsif_cpun_irq_raised(int n);
void nsif_cpun_irq_raised_from_sw(int n);
void nsif_cpun_test_hook(int n, void *p);
#ifdef __cplusplus
}
#endif
#endif /* NSI_COMMON_SRC_NSI_CPUN_IF_H */
``` | /content/code_sandbox/scripts/native_simulator/common/src/nsi_cpun_if.h | objective-c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 159 |
```c
/*
*
*/
#include <stdarg.h>
#include "nsi_tracing.h"
void nsi_print_error_and_exit(const char *format, ...)
{
va_list variable_args;
va_start(variable_args, format);
nsi_vprint_error_and_exit(format, variable_args);
va_end(variable_args);
}
void nsi_print_warning(const char *format, ...)
{
va_list variable_args;
va_start(variable_args, format);
nsi_vprint_warning(format, variable_args);
va_end(variable_args);
}
void nsi_print_trace(const char *format, ...)
{
va_list variable_args;
va_start(variable_args, format);
nsi_vprint_trace(format, variable_args);
va_end(variable_args);
}
``` | /content/code_sandbox/scripts/native_simulator/common/src/nsi_trace_varg.c | c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 141 |
```c
/*
*
*
* See description in header
*/
#include <stdlib.h>
#include <unistd.h>
#include <fcntl.h>
#include <string.h>
void *nsi_host_calloc(unsigned long nmemb, unsigned long size)
{
return calloc(nmemb, size);
}
int nsi_host_close(int fd)
{
return close(fd);
}
void nsi_host_free(void *ptr)
{
free(ptr);
}
char *nsi_host_getcwd(char *buf, unsigned long size)
{
return getcwd(buf, size);
}
int nsi_host_isatty(int fd)
{
return isatty(fd);
}
void *nsi_host_malloc(unsigned long size)
{
return malloc(size);
}
int nsi_host_open(const char *pathname, int flags)
{
return open(pathname, flags);
}
long nsi_host_random(void)
{
return random();
}
long nsi_host_read(int fd, void *buffer, unsigned long size)
{
return read(fd, buffer, size);
}
void *nsi_host_realloc(void *ptr, unsigned long size)
{
return realloc(ptr, size);
}
void nsi_host_srandom(unsigned int seed)
{
srandom(seed);
}
char *nsi_host_strdup(const char *s)
{
return strdup(s);
}
long nsi_host_write(int fd, const void *buffer, unsigned long size)
{
return write(fd, buffer, size);
}
``` | /content/code_sandbox/scripts/native_simulator/common/src/nsi_host_trampolines.c | c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 284 |
```objective-c
/*
*
*/
#ifndef NSI_COMMON_SRC_NSI_INTERNAL_H
#define NSI_COMMON_SRC_NSI_INTERNAL_H
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
/**
* @brief find least significant bit set in a 32-bit word
*
* This routine finds the first bit set starting from the least significant bit
* in the argument passed in and returns the index of that bit. Bits are
* numbered starting at 1 from the least significant bit. A return value of
* zero indicates that the value passed is zero.
*
* @return least significant bit set, 0 if @a op is 0
*/
static inline unsigned int nsi_find_lsb_set(uint32_t op)
{
return __builtin_ffs(op);
}
/**
* @brief find least significant bit set in a 64-bit word
*
* This routine finds the first bit set starting from the least significant bit
* in the argument passed in and returns the index of that bit. Bits are
* numbered starting at 1 from the least significant bit. A return value of
* zero indicates that the value passed is zero.
*
* @return least significant bit set, 0 if @a op is 0
*/
static inline unsigned int nsi_find_lsb_set64(uint64_t op)
{
return __builtin_ffsll(op);
}
#ifdef __cplusplus
}
#endif
#endif /* NSI_COMMON_SRC_NSI_INTERNAL_H */
``` | /content/code_sandbox/scripts/native_simulator/common/src/nsi_internal.h | objective-c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 302 |
```c
/*
*
*/
#include <stdbool.h>
#include "nsi_config.h"
#include "nsi_cpun_if.h"
#include "nsi_tracing.h"
static bool cpu_auto_start[NSI_N_CPUS] = {true}; /* Only the first core starts on its own */
static bool cpu_booted[NSI_N_CPUS];
#define CPU_N_RANGE_CHECK(cpu_n) \
if (cpu_n >= NSI_N_CPUS) { \
nsi_print_error_and_exit("%s called with cpu_n(%i) >= NSI_N_CPUS (%i)\n", \
__func__, cpu_n, NSI_N_CPUS); \
}
void nsi_cpu_set_auto_start(int cpu_n, bool auto_start)
{
CPU_N_RANGE_CHECK(cpu_n);
cpu_auto_start[cpu_n] = auto_start;
}
bool nsi_cpu_get_auto_start(int cpu_n)
{
return cpu_auto_start[cpu_n];
}
void nsi_cpu_auto_boot(void)
{
for (int i = 0; i < NSI_N_CPUS; i++) {
if (cpu_auto_start[i] == true) {
cpu_booted[i] = true;
nsif_cpun_boot(i);
}
}
}
void nsi_cpu_boot(int cpu_n)
{
CPU_N_RANGE_CHECK(cpu_n);
if (cpu_booted[cpu_n]) {
nsi_print_warning("%s called with cpu_n(%i) which was already booted\n",
__func__, cpu_n);
}
cpu_booted[cpu_n] = true;
nsif_cpun_boot(cpu_n);
}
``` | /content/code_sandbox/scripts/native_simulator/common/src/nsi_cpu_ctrl.c | c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 334 |
```c
/*
*
*/
/*
* Native simulator CPU emulator,
* an *optional* module provided by the native simulator
* the hosted embedded OS / SW can use to emulate the CPU
* being started and stopped.
*
* Its mode of operation is that it step-locks the HW
* and SW operation, so that only one of them executes at
* a time. Check the docs for more info.
*/
#include <pthread.h>
#include <stdbool.h>
#include <unistd.h>
#include <stdlib.h>
#include "nce_if.h"
#include "nsi_safe_call.h"
struct nce_status_t {
/* Conditional variable to know if the CPU is running or halted/idling */
pthread_cond_t cond_cpu;
/* Mutex for the conditional variable cond_cpu */
pthread_mutex_t mtx_cpu;
/* Variable which tells if the CPU is halted (1) or not (0) */
bool cpu_halted;
bool terminate; /* Are we terminating the program == cleaning up */
void (*start_routine)(void);
};
#define NCE_DEBUG_PRINTS 0
#define PREFIX "NCE: "
#define ERPREFIX PREFIX"error on "
#define NO_MEM_ERR PREFIX"Can't allocate memory\n"
#if NCE_DEBUG_PRINTS
#define NCE_DEBUG(fmt, ...) nsi_print_trace(PREFIX fmt, __VA_ARGS__)
#else
#define NCE_DEBUG(...)
#endif
extern void nsi_exit(int exit_code);
/*
* Initialize an instance of the native simulator CPU emulator
* and return a pointer to it.
* That pointer should be passed to all subsequent calls to this module.
*/
void *nce_init(void)
{
struct nce_status_t *this;
this = calloc(1, sizeof(struct nce_status_t));
if (this == NULL) { /* LCOV_EXCL_BR_LINE */
nsi_print_error_and_exit(NO_MEM_ERR); /* LCOV_EXCL_LINE */
}
this->cpu_halted = true;
this->terminate = false;
NSI_SAFE_CALL(pthread_cond_init(&this->cond_cpu, NULL));
NSI_SAFE_CALL(pthread_mutex_init(&this->mtx_cpu, NULL));
return (void *)this;
}
/*
* This function will:
*
* If called from a SW thread, release the HW thread which is blocked in
* a nce_wake_cpu() and never return.
*
* If called from a HW thread, do the necessary clean up of this nce instance
* and return right away.
*/
void nce_terminate(void *this_arg)
{
struct nce_status_t *this = (struct nce_status_t *)this_arg;
/* LCOV_EXCL_START */ /* See Note1 */
/*
* If we are being called from a HW thread we can cleanup
*
* Otherwise (!cpu_halted) we give back control to the HW thread and
* tell it to terminate ASAP
*/
if (this == NULL || this->cpu_halted) {
/*
* Note: The nce_status structure cannot be safely free'd up
* as the user is allowed to call nce_clean_up()
* repeatedly on the same structure.
* Instead we rely of on the host OS process cleanup.
* If you got here due to valgrind's leak report, please use the
* provided valgrind suppression file valgrind.supp
*/
return;
} else if (this->terminate == false) {
this->terminate = true;
NSI_SAFE_CALL(pthread_mutex_lock(&this->mtx_cpu));
this->cpu_halted = true;
NSI_SAFE_CALL(pthread_cond_broadcast(&this->cond_cpu));
NSI_SAFE_CALL(pthread_mutex_unlock(&this->mtx_cpu));
while (1) {
sleep(1);
/* This SW thread will wait until being cancelled from
* the HW thread. sleep() is a cancellation point, so it
* won't really wait 1 second
*/
}
}
/* LCOV_EXCL_STOP */
}
/**
* Helper function which changes the status of the CPU (halted or running)
* and waits until somebody else changes it to the opposite
*
* Both HW and SW threads will use this function to transfer control to the
* other side.
*
* This is how the idle thread halts the CPU and gets halted until the HW models
* raise a new interrupt; and how the HW models awake the CPU, and wait for it
* to complete and go to idle.
*/
static void change_cpu_state_and_wait(struct nce_status_t *this, bool halted)
{
NSI_SAFE_CALL(pthread_mutex_lock(&this->mtx_cpu));
NCE_DEBUG("Going to halted = %d\n", halted);
this->cpu_halted = halted;
/* We let the other side know the CPU has changed state */
NSI_SAFE_CALL(pthread_cond_broadcast(&this->cond_cpu));
/* We wait until the CPU state has been changed. Either:
* we just awoke it, and therefore wait until the CPU has run until
* completion before continuing (before letting the HW models do
* anything else)
* or
* we are just hanging it, and therefore wait until the HW models awake
* it again
*/
while (this->cpu_halted == halted) {
/* Here we unlock the mutex while waiting */
pthread_cond_wait(&this->cond_cpu, &this->mtx_cpu);
}
NCE_DEBUG("Awaken after halted = %d\n", halted);
NSI_SAFE_CALL(pthread_mutex_unlock(&this->mtx_cpu));
}
/*
* Helper function that wraps the SW start_routine
*/
static void *sw_wrapper(void *this_arg)
{
struct nce_status_t *this = (struct nce_status_t *)this_arg;
/* Ensure nce_boot_cpu has reached the cond loop */
NSI_SAFE_CALL(pthread_mutex_lock(&this->mtx_cpu));
NSI_SAFE_CALL(pthread_mutex_unlock(&this->mtx_cpu));
#if (NCE_DEBUG_PRINTS)
pthread_t sw_thread = pthread_self();
NCE_DEBUG("SW init started (%lu)\n",
sw_thread);
#endif
this->start_routine();
return NULL;
}
/*
* Boot the emulated CPU, that is:
* * Spawn a new pthread which will run the first embedded SW thread <start_routine>
* * Hold the caller until that embedded SW thread (or a child it spawns)
* calls nce_halt_cpu()
*
* Note that during this, an embedded SW thread may call nsi_exit(), which would result
* in this function never returning.
*/
void nce_boot_cpu(void *this_arg, void (*start_routine)(void))
{
struct nce_status_t *this = (struct nce_status_t *)this_arg;
NSI_SAFE_CALL(pthread_mutex_lock(&this->mtx_cpu));
this->cpu_halted = false;
this->start_routine = start_routine;
/* Create a thread for the embedded SW init: */
pthread_t sw_thread;
NSI_SAFE_CALL(pthread_create(&sw_thread, NULL, sw_wrapper, this_arg));
/* And we wait until the embedded OS has send the CPU to sleep for the first time */
while (this->cpu_halted == false) {
pthread_cond_wait(&this->cond_cpu, &this->mtx_cpu);
}
NSI_SAFE_CALL(pthread_mutex_unlock(&this->mtx_cpu));
if (this->terminate) {
nsi_exit(0);
}
}
/*
* Halt the CPU, that is:
* * Hold this embedded SW thread until the CPU is awaken again,
* and release the HW thread which had been held on
* nce_boot_cpu() or nce_wake_cpu().
*
* Note: Can only be called from embedded SW threads
* Calling it from a HW thread is a programming error.
*/
void nce_halt_cpu(void *this_arg)
{
struct nce_status_t *this = (struct nce_status_t *)this_arg;
if (this->cpu_halted == true) {
nsi_print_error_and_exit("Programming error on: %s ",
"This CPU was already halted\n");
}
change_cpu_state_and_wait(this, true);
}
/*
* Awake the CPU, that is:
* * Hold this HW thread until the CPU is set to idle again
* * Release the SW thread which had been held on nce_halt_cpu()
*
* Note: Can only be called from HW threads
* Calling it from a SW thread is a programming error.
*/
void nce_wake_cpu(void *this_arg)
{
struct nce_status_t *this = (struct nce_status_t *)this_arg;
if (this->cpu_halted == false) {
nsi_print_error_and_exit("Programming error on: %s ",
"This CPU was already awake\n");
}
change_cpu_state_and_wait(this, false);
/*
* If while the SW was running it was decided to terminate the execution
* we stop immediately.
*/
if (this->terminate) {
nsi_exit(0);
}
}
/*
* Return 0 if the CPU is sleeping (or terminated)
* and !=0 if the CPU is running
*/
int nce_is_cpu_running(void *this_arg)
{
struct nce_status_t *this = (struct nce_status_t *)this_arg;
if (this != NULL) {
return !this->cpu_halted;
} else {
return false;
}
}
/*
* Notes about coverage:
*
* Note1: When the application is closed due to a SIGTERM, the path in this
* function will depend on when that signal was received. Typically during a
* regression run, both paths will be covered. But in some cases they won't.
* Therefore and to avoid confusing developers with spurious coverage changes
* we exclude this function from the coverage check
*/
``` | /content/code_sandbox/scripts/native_simulator/common/src/nce.c | c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 2,067 |
```c
/*
*
*/
/*
* Native simulator entry point (main)
*
* Documentation can be found starting in docs/README.md
*/
#include <stdio.h>
#include <stdlib.h>
#include <stdbool.h>
#include "nsi_cpun_if.h"
#include "nsi_tasks.h"
#include "nsi_cmdline_main_if.h"
#include "nsi_utils.h"
#include "nsi_hw_scheduler.h"
#include "nsi_config.h"
#include "nsi_cpu_ctrl.h"
int nsi_exit_inner(int exit_code)
{
static int max_exit_code;
int cpu_ret;
max_exit_code = NSI_MAX(exit_code, max_exit_code);
/*
* nsif_cpun_cleanup may not return if this is called from a SW thread,
* but instead it would get nsi_exit() recalled again
* ASAP from the HW thread
*/
for (int i = 0; i < NSI_N_CPUS; i++) {
cpu_ret = nsif_cpun_cleanup(i);
max_exit_code = NSI_MAX(cpu_ret, max_exit_code);
}
nsi_run_tasks(NSITASK_ON_EXIT_PRE_LEVEL);
nsi_hws_cleanup();
nsi_run_tasks(NSITASK_ON_EXIT_POST_LEVEL);
return max_exit_code;
}
NSI_FUNC_NORETURN void nsi_exit(int exit_code)
{
exit(nsi_exit_inner(exit_code));
}
/**
* Run all early native simulator initialization steps, including command
* line parsing and CPU start, until we are ready to let the HW models
* run via nsi_hws_one_event()
*
* Note: This API should normally only be called by the native simulator main()
*/
void nsi_init(int argc, char *argv[])
{
/*
* Let's ensure that even if we are redirecting to a file, we get stdout
* and stderr line buffered (default for console)
* Note that glibc ignores size. But just in case we set a reasonable
* number in case somebody tries to compile against a different library
*/
setvbuf(stdout, NULL, _IOLBF, 512);
setvbuf(stderr, NULL, _IOLBF, 512);
nsi_run_tasks(NSITASK_PRE_BOOT_1_LEVEL);
for (int i = 0; i < NSI_N_CPUS; i++) {
nsif_cpun_pre_cmdline_hooks(i);
}
nsi_handle_cmd_line(argc, argv);
nsi_run_tasks(NSITASK_PRE_BOOT_2_LEVEL);
for (int i = 0; i < NSI_N_CPUS; i++) {
nsif_cpun_pre_hw_init_hooks(i);
}
nsi_run_tasks(NSITASK_HW_INIT_LEVEL);
nsi_hws_init();
nsi_run_tasks(NSITASK_PRE_BOOT_3_LEVEL);
nsi_cpu_auto_boot();
nsi_run_tasks(NSITASK_FIRST_SLEEP_LEVEL);
}
/**
* Execute the simulator for at least the specified timeout, then
* return. Note that this does not affect event timing, so the "next
* event" may be significantly after the request if the hardware has
* not been configured to e.g. send an interrupt when expected.
*
* Note: This API should normally only be called by the native simulator main()
*/
void nsi_exec_for(uint64_t us)
{
uint64_t start = nsi_hws_get_time();
do {
nsi_hws_one_event();
} while (nsi_hws_get_time() < (start + us));
}
#ifndef NSI_NO_MAIN
/**
*
* Note that this main() is not used when building fuzz cases,
* as libfuzzer has its own main(),
* and calls the "OS" through a per-case fuzz test entry point.
*/
int main(int argc, char *argv[])
{
nsi_init(argc, argv);
while (true) {
nsi_hws_one_event();
}
/* This line should be unreachable */
return 1; /* LCOV_EXCL_LINE */
}
#endif /* NSI_NO_MAIN */
``` | /content/code_sandbox/scripts/native_simulator/common/src/main.c | c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 836 |
```c
/*
*
*/
/**
* @brief Run the set of special NSI tasks corresponding to the given level
*
* @param level One of NSITASK_*_LEVEL as defined in nsi_tasks.h
*/
void nsi_run_tasks(int level)
{
extern void (*__nsi_PRE_BOOT_1_tasks_start[])(void);
extern void (*__nsi_PRE_BOOT_2_tasks_start[])(void);
extern void (*__nsi_HW_INIT_tasks_start[])(void);
extern void (*__nsi_PRE_BOOT_3_tasks_start[])(void);
extern void (*__nsi_FIRST_SLEEP_tasks_start[])(void);
extern void (*__nsi_ON_EXIT_PRE_tasks_start[])(void);
extern void (*__nsi_ON_EXIT_POST_tasks_start[])(void);
extern void (*__nsi_tasks_end[])(void);
static void (**nsi_pre_tasks[])(void) = {
__nsi_PRE_BOOT_1_tasks_start,
__nsi_PRE_BOOT_2_tasks_start,
__nsi_HW_INIT_tasks_start,
__nsi_PRE_BOOT_3_tasks_start,
__nsi_FIRST_SLEEP_tasks_start,
__nsi_ON_EXIT_PRE_tasks_start,
__nsi_ON_EXIT_POST_tasks_start,
__nsi_tasks_end
};
void (**fptr)(void);
for (fptr = nsi_pre_tasks[level]; fptr < nsi_pre_tasks[level+1];
fptr++) {
if (*fptr) { /* LCOV_EXCL_BR_LINE */
(*fptr)();
}
}
}
``` | /content/code_sandbox/scripts/native_simulator/common/src/nsi_tasks.c | c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 334 |
```objective-c
/*
*
*/
#ifndef NSI_COMMON_SRC_NSI_SAFE_CALLL_H
#define NSI_COMMON_SRC_NSI_SAFE_CALLL_H
#include <stdbool.h>
#include "nsi_tracing.h"
#ifdef __cplusplus
extern "C" {
#endif
#ifndef nsi_unlikely
#define nsi_unlikely(x) (__builtin_expect((bool)!!(x), false) != 0L)
#endif
#define NSI_SAFE_CALL(a) nsi_safe_call(a, #a)
static inline void nsi_safe_call(int test, const char *test_str)
{
/* LCOV_EXCL_START */ /* See Note1 */
if (nsi_unlikely(test)) {
nsi_print_error_and_exit("Error on: %s\n",
test_str);
}
/* LCOV_EXCL_STOP */
}
#ifdef __cplusplus
}
#endif
#endif /* NSI_COMMON_SRC_NSI_SAFE_CALLL_H */
``` | /content/code_sandbox/scripts/native_simulator/common/src/nsi_safe_call.h | objective-c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 191 |
```c
/*
*
*/
/*
* Native simulator, CPU Thread emulation (nct)
*/
/**
* Native simulator single CPU threading emulation,
* an *optional* module provided by the Native simulator
* the hosted embedded OS / SW can use to emulate the threading
* context switching which would be handled by a OS CPU AL
*
* Principle of operation:
*
* The embedded OS threads are run as a set of native Linux pthreads.
* The embedded OS only sees one of this thread executing at a time.
*
* The hosted OS shall call nct_init() to initialize the state of an
* instance of this module, and nct_clean_up() once it desires to destroy it.
*
* For SOCs with several micro-controllers (AMP) one instance of this module
* would be instantiated per simulated uC and embedded OS.
*
* To create a new embedded thread, the hosted OS shall call nct_new_thread().
* To swap to a thread nct_swap_threads(), and to terminate a thread
* nct_abort_thread().
* The hosted OS can optionally use nct_first_thread_start() to swap
* to the "first thread".
*
* Whenever a thread calls nct_swap_threads(next_thread_idx) it will be blocked,
* and the thread identified by next_thread_idx will continue executing.
*
*
* Internal design:
*
* Which thread is running is controlled using {cond|mtx}_threads and
* currently_allowed_thread.
*
* The main part of the execution of each thread will occur in a fully
* synchronous and deterministic manner, and only when commanded by
* the embedded operating system kernel.
*
* The creation of a thread will spawn a new pthread whose start
* is asynchronous to the rest, until synchronized in nct_wait_until_allowed()
* below.
* Similarly aborting and canceling threads execute a tail in a quite an
* asynchronous manner.
*
* This implementation is meant to be portable in between fully compatible
* POSIX systems.
* A table (threads_table) is used to abstract the native pthreads.
* An index in this table is used to identify threads in the IF to the
* embedded OS.
*/
#define NCT_DEBUG_PRINTS 0
#include <pthread.h>
#include <stdbool.h>
#include <stdlib.h>
#include <string.h>
#include <stdint.h>
#include "nct_if.h"
#include "nsi_internal.h"
#include "nsi_safe_call.h"
#if NCT_DEBUG_PRINTS
#define NCT_DEBUG(fmt, ...) nsi_print_trace(PREFIX fmt, __VA_ARGS__)
#else
#define NCT_DEBUG(...)
#endif
#define PREFIX "Tread Simulator: "
#define ERPREFIX PREFIX"error on "
#define NO_MEM_ERR PREFIX"Can't allocate memory\n"
#define NCT_ENABLE_CANCEL 0 /* See Note.c1 */
#define NCT_ALLOC_CHUNK_SIZE 64 /* In how big chunks we grow the thread table */
#define NCT_REUSE_ABORTED_ENTRIES 0
/* For the Zephyr OS, tests/kernel/threads/scheduling/schedule_api fails when setting
* NCT_REUSE_ABORTED_ENTRIES => don't set it by now
*/
struct te_status_t;
struct threads_table_el {
/* Pointer to the overall status of the threading emulator instance */
struct te_status_t *ts_status;
struct threads_table_el *next; /* Pointer to the next element of the table */
int thread_idx; /* Index of this element in the threads_table*/
enum {NOTUSED = 0, USED, ABORTING, ABORTED, FAILED} state;
bool running; /* Is this the currently running thread */
pthread_t thread; /* Actual pthread_t as returned by the native kernel */
int thead_cnt; /* For debugging: Unique, consecutive, thread number */
/*
* Pointer to data from the hosted OS architecture
* What that is, if anything, is up to that the hosted OS
*/
void *payload;
};
struct te_status_t {
struct threads_table_el *threads_table; /* Pointer to the threads table */
int thread_create_count; /* (For debugging) Thread creation counter */
int threads_table_size; /* Size of threads_table */
/* Pointer to the hosted OS function to be called when a thread is started */
void (*fptr)(void *payload);
/*
* Conditional variable to block/awake all threads during swaps.
* (we only need 1 mutex and 1 cond variable for all threads)
*/
pthread_cond_t cond_threads;
/* Mutex for the conditional variable cond_threads */
pthread_mutex_t mtx_threads;
/* Token which tells which thread is allowed to run now */
int currently_allowed_thread;
bool terminate; /* Are we terminating the program == cleaning up */
};
static void nct_exit_and_cleanup(struct te_status_t *this);
static struct threads_table_el *ttable_get_element(struct te_status_t *this, int index);
/**
* Helper function, run by a thread which is being aborted
*/
static void abort_tail(struct te_status_t *this, int this_th_nbr)
{
struct threads_table_el *tt_el = ttable_get_element(this, this_th_nbr);
NCT_DEBUG("Thread [%i] %i: %s: Aborting (exiting) (rel mut)\n",
tt_el->thead_cnt,
this_th_nbr,
__func__);
tt_el->running = false;
tt_el->state = ABORTED;
nct_exit_and_cleanup(this);
}
/**
* Helper function to block this thread until it is allowed again
*
* Note that we go out of this function (the while loop below)
* with the mutex locked by this particular thread.
* In normal circumstances, the mutex is only unlocked internally in
* pthread_cond_wait() while waiting for cond_threads to be signaled
*/
static void nct_wait_until_allowed(struct te_status_t *this, int this_th_nbr)
{
struct threads_table_el *tt_el = ttable_get_element(this, this_th_nbr);
tt_el->running = false;
NCT_DEBUG("Thread [%i] %i: %s: Waiting to be allowed to run (rel mut)\n",
tt_el->thead_cnt,
this_th_nbr,
__func__);
while (this_th_nbr != this->currently_allowed_thread) {
pthread_cond_wait(&this->cond_threads, &this->mtx_threads);
if (tt_el->state == ABORTING) {
abort_tail(this, this_th_nbr);
}
}
tt_el->running = true;
NCT_DEBUG("Thread [%i] %i: %s(): I'm allowed to run! (hav mut)\n",
tt_el->thead_cnt,
this_th_nbr,
__func__);
}
/**
* Helper function to let the thread <next_allowed_th> run
*
* Note: nct_let_run() can only be called with the mutex locked
*/
static void nct_let_run(struct te_status_t *this, int next_allowed_th)
{
#if NCT_DEBUG_PRINTS
struct threads_table_el *tt_el = ttable_get_element(this, next_allowed_th);
NCT_DEBUG("%s: We let thread [%i] %i run\n",
__func__,
tt_el->thead_cnt,
next_allowed_th);
#endif
this->currently_allowed_thread = next_allowed_th;
/*
* We let all threads know one is able to run now (it may even be us
* again if fancied)
* Note that as we hold the mutex, they are going to be blocked until
* we reach our own nct_wait_until_allowed() while loop or abort_tail()
* mutex release
*/
NSI_SAFE_CALL(pthread_cond_broadcast(&this->cond_threads));
}
/**
* Helper function, run by a thread which is being ended
*/
static void nct_exit_and_cleanup(struct te_status_t *this)
{
/*
* Release the mutex so the next allowed thread can run
*/
NSI_SAFE_CALL(pthread_mutex_unlock(&this->mtx_threads));
/* We detach ourselves so nobody needs to join to us */
pthread_detach(pthread_self());
pthread_exit(NULL);
}
/**
* Let the ready thread run and block this managed thread until it is allowed again
*
* The hosted OS shall call this when it has decided to swap in/out two of its threads,
* from the thread that is being swapped out.
*
* Note: If called without having ever let another managed thread run / from a thread not
* managed by this nct instance, it will behave like nct_first_thread_start(),
* and terminate the calling thread while letting the managed thread
* <next_allowed_thread_nbr> continue.
*
* inputs:
* this_arg: Pointer to this thread emulator instance as returned by nct_init()
* next_allowed_thread_nbr: Identifier of the thread the hosted OS wants to swap in
*/
void nct_swap_threads(void *this_arg, int next_allowed_thread_nbr)
{
struct te_status_t *this = (struct te_status_t *)this_arg;
int this_th_nbr = this->currently_allowed_thread;
nct_let_run(this, next_allowed_thread_nbr);
if (this_th_nbr == -1) { /* This is the first time a thread was swapped in */
NCT_DEBUG("%s: called from an unmanaged thread, terminating it\n",
__func__);
nct_exit_and_cleanup(this);
}
struct threads_table_el *tt_el = ttable_get_element(this, this_th_nbr);
if (tt_el->state == ABORTING) {
NCT_DEBUG("Thread [%i] %i: %s: Aborting curr.\n",
tt_el->thead_cnt,
this_th_nbr,
__func__);
abort_tail(this, this_th_nbr);
} else {
nct_wait_until_allowed(this, this_th_nbr);
}
}
/**
* Let the very first hosted thread run, and exit this thread.
*
* The hosted OS shall call this when it has decided to swap in into another
* thread, and wants to terminate the currently executing thread, which is not
* a thread managed by the thread emulator.
*
* This function allows to emulate a hosted OS doing its first swapping into one
* of its hosted threads from the init thread, abandoning/terminating the init
* thread.
*/
void nct_first_thread_start(void *this_arg, int next_allowed_thread_nbr)
{
struct te_status_t *this = (struct te_status_t *)this_arg;
nct_let_run(this, next_allowed_thread_nbr);
NCT_DEBUG("%s: Init thread dying now (rel mut)\n",
__func__);
nct_exit_and_cleanup(this);
}
/**
* Handler called when any thread is cancelled or exits
*/
static void nct_cleanup_handler(void *arg)
{
struct threads_table_el *element = (struct threads_table_el *)arg;
struct te_status_t *this = element->ts_status;
/*
* If we are not terminating, this is just an aborted thread,
* and the mutex was already released
* Otherwise, release the mutex so other threads which may be
* caught waiting for it could terminate
*/
if (!this->terminate) {
return;
}
NCT_DEBUG("Thread %i: %s: Canceling (rel mut)\n",
element->thread_idx,
__func__);
NSI_SAFE_CALL(pthread_mutex_unlock(&this->mtx_threads));
/* We detach ourselves so nobody needs to join to us */
pthread_detach(pthread_self());
}
/**
* Helper function to start a hosted thread as a POSIX thread:
* It will block the pthread until the embedded OS devices to "swap in"
* this thread.
*/
static void *nct_thread_starter(void *arg_el)
{
struct threads_table_el *tt_el = (struct threads_table_el *)arg_el;
struct te_status_t *this = tt_el->ts_status;
int thread_idx = tt_el->thread_idx;
NCT_DEBUG("Thread [%i] %i: %s: Starting\n",
tt_el->thead_cnt,
thread_idx,
__func__);
/*
* We block until all other running threads reach the while loop
* in nct_wait_until_allowed() and they release the mutex
*/
NSI_SAFE_CALL(pthread_mutex_lock(&this->mtx_threads));
/*
* The program may have been finished before this thread ever got to run
*/
/* LCOV_EXCL_START */ /* See Note1 */
if (!this->threads_table || this->terminate) {
nct_cleanup_handler(arg_el);
pthread_exit(NULL);
}
/* LCOV_EXCL_STOP */
pthread_cleanup_push(nct_cleanup_handler, arg_el);
NCT_DEBUG("Thread [%i] %i: %s: After start mutex (hav mut)\n",
tt_el->thead_cnt,
thread_idx,
__func__);
/*
* The thread would try to execute immediately, so we block it
* until allowed
*/
nct_wait_until_allowed(this, thread_idx);
this->fptr(tt_el->payload);
/*
* We only reach this point if the thread actually returns which should
* not happen. But we handle it gracefully just in case
*/
/* LCOV_EXCL_START */
nsi_print_trace(PREFIX"Thread [%i] %i [%lu] ended!?!\n",
tt_el->thead_cnt,
thread_idx,
pthread_self());
tt_el->running = false;
tt_el->state = FAILED;
pthread_cleanup_pop(1);
return NULL;
/* LCOV_EXCL_STOP */
}
static struct threads_table_el *ttable_get_element(struct te_status_t *this, int index)
{
struct threads_table_el *threads_table = this->threads_table;
if (index >= this->threads_table_size) { /* LCOV_EXCL_BR_LINE */
nsi_print_error_and_exit("%s: Programming error, attempted out of bound access to "
"thread table (%i>=%i)\n",
index, this->threads_table_size); /* LCOV_EXCL_LINE */
}
while (index >= NCT_ALLOC_CHUNK_SIZE) {
index -= NCT_ALLOC_CHUNK_SIZE;
threads_table = threads_table[NCT_ALLOC_CHUNK_SIZE - 1].next;
}
return &threads_table[index];
}
/**
* Return the first free entry index in the threads table
*/
static int ttable_get_empty_slot(struct te_status_t *this)
{
struct threads_table_el *tt_el = this->threads_table;
for (int i = 0; i < this->threads_table_size; i++, tt_el = tt_el->next) {
if ((tt_el->state == NOTUSED)
|| (NCT_REUSE_ABORTED_ENTRIES
&& (tt_el->state == ABORTED))) {
return i;
}
}
/*
* else, we run out of table without finding an index
* => we expand the table
*/
struct threads_table_el *new_chunk;
new_chunk = calloc(NCT_ALLOC_CHUNK_SIZE, sizeof(struct threads_table_el));
if (new_chunk == NULL) { /* LCOV_EXCL_BR_LINE */
nsi_print_error_and_exit(NO_MEM_ERR); /* LCOV_EXCL_LINE */
}
/* Link new chunk to last element */
tt_el = ttable_get_element(this, this->threads_table_size-1);
tt_el->next = new_chunk;
this->threads_table_size += NCT_ALLOC_CHUNK_SIZE;
/* Link all new elements together */
for (int i = 0 ; i < NCT_ALLOC_CHUNK_SIZE - 1; i++) {
new_chunk[i].next = &new_chunk[i+1];
}
new_chunk[NCT_ALLOC_CHUNK_SIZE - 1].next = NULL;
/* The first newly created entry is good, we return it */
return this->threads_table_size - NCT_ALLOC_CHUNK_SIZE;
}
/**
* Create a new pthread for the new hosted OS thread.
*
* Returns a unique integer thread identifier/index, which should be used
* to refer to this thread for future calls to the thread emulator.
*
* It takes as parameter a pointer which will be passed to
* function registered in nct_init when the thread is swapped in.
*
* Note that the thread is created but not swapped in.
* The new thread execution will be held until nct_swap_threads()
* (or nct_first_thread_start()) is called with this newly created
* thread number.
*/
int nct_new_thread(void *this_arg, void *payload)
{
struct te_status_t *this = (struct te_status_t *)this_arg;
struct threads_table_el *tt_el;
int t_slot;
t_slot = ttable_get_empty_slot(this);
tt_el = ttable_get_element(this, t_slot);
tt_el->state = USED;
tt_el->running = false;
tt_el->thead_cnt = this->thread_create_count++;
tt_el->payload = payload;
tt_el->ts_status = this;
tt_el->thread_idx = t_slot;
NSI_SAFE_CALL(pthread_create(&tt_el->thread,
NULL,
nct_thread_starter,
(void *)tt_el));
NCT_DEBUG("%s created thread [%i] %i [%lu]\n",
__func__,
tt_el->thead_cnt,
t_slot,
tt_el->thread);
return t_slot;
}
/**
* Initialize an instance of the threading emulator.
*
* Returns a pointer to the initialize threading emulator instance.
* This pointer shall be passed to all subsequent calls of the
* threading emulator when interacting with this particular instance.
*
* The input fptr is a pointer to the hosted OS function
* to be called each time a thread which is created on its request
* with nct_new_thread() is swapped in (from that thread context)
*/
void *nct_init(void (*fptr)(void *))
{
struct te_status_t *this;
/*
* Note: This (and the calloc below) won't be free'd by this code
* but left for the OS to clear at process end.
* This is a conscious choice, see nct_clean_up() for more info.
* If you got here due to valgrind's leak report, please use the
* provided valgrind suppression file valgrind.supp
*/
this = calloc(1, sizeof(struct te_status_t));
if (this == NULL) { /* LCOV_EXCL_BR_LINE */
nsi_print_error_and_exit(NO_MEM_ERR); /* LCOV_EXCL_LINE */
}
this->fptr = fptr;
this->thread_create_count = 0;
this->currently_allowed_thread = -1;
NSI_SAFE_CALL(pthread_cond_init(&this->cond_threads, NULL));
NSI_SAFE_CALL(pthread_mutex_init(&this->mtx_threads, NULL));
this->threads_table = calloc(NCT_ALLOC_CHUNK_SIZE,
sizeof(struct threads_table_el));
if (this->threads_table == NULL) { /* LCOV_EXCL_BR_LINE */
nsi_print_error_and_exit(NO_MEM_ERR); /* LCOV_EXCL_LINE */
}
this->threads_table_size = NCT_ALLOC_CHUNK_SIZE;
for (int i = 0 ; i < NCT_ALLOC_CHUNK_SIZE - 1; i++) {
this->threads_table[i].next = &this->threads_table[i+1];
}
this->threads_table[NCT_ALLOC_CHUNK_SIZE - 1].next = NULL;
NSI_SAFE_CALL(pthread_mutex_lock(&this->mtx_threads));
return (void *)this;
}
/**
* Free any allocated memory by the threading emulator and clean up.
* Note that this function cannot be called from a SW thread
* (the CPU is assumed halted. Otherwise we would cancel ourselves)
*
* Note: This function cannot guarantee the threads will be cancelled before the HW
* thread exists. The only way to do that, would be to wait for each of them in
* a join without detaching them, but that could lead to locks in some
* convoluted cases; as a call to this function can come due to a hosted OS
* assert or other error termination, we better do not assume things are working fine.
* => we prefer the supposed memory leak report from valgrind, and ensure we
* will not hang.
*/
void nct_clean_up(void *this_arg)
{
struct te_status_t *this = (struct te_status_t *)this_arg;
if (!this || !this->threads_table) { /* LCOV_EXCL_BR_LINE */
return; /* LCOV_EXCL_LINE */
}
this->terminate = true;
#if (NCT_ENABLE_CANCEL)
struct threads_table_el *tt_el = this->threads_table;
for (int i = 0; i < this->threads_table_size; i++, tt_el = tt_el->next) {
if (tt_el->state != USED) {
continue;
}
/* LCOV_EXCL_START */
if (pthread_cancel(tt_el->thread)) {
nsi_print_warning(
PREFIX"cleanup: could not stop thread %i\n",
i);
}
/* LCOV_EXCL_STOP */
}
#endif
/*
* This is the cleanup we do not do:
*
* free(this->threads_table);
* Including all chunks
* this->threads_table = NULL;
*
* (void)pthread_cond_destroy(&this->cond_threads);
* (void)pthread_mutex_destroy(&this->mtx_threads);
*
* free(this);
*/
}
/*
* Mark a thread as being aborted. This will result in the underlying pthread
* being terminated some time later:
* If the thread is marking itself as aborting, as soon as it is swapped out
* by the hosted (embedded) OS
* If it is marking another thread, at some non-specific time in the future
* (But note that no embedded part of the aborted thread will execute anymore)
*
* * thread_idx : The thread identifier as provided during creation (return from nct_new_thread())
*/
void nct_abort_thread(void *this_arg, int thread_idx)
{
struct te_status_t *this = (struct te_status_t *)this_arg;
struct threads_table_el *tt_el = ttable_get_element(this, thread_idx);
if (thread_idx == this->currently_allowed_thread) {
NCT_DEBUG("Thread [%i] %i: %s Marked myself "
"as aborting\n",
tt_el->thead_cnt,
thread_idx,
__func__);
} else {
if (tt_el->state != USED) { /* LCOV_EXCL_BR_LINE */
/* The thread may have been already aborted before */
return; /* LCOV_EXCL_LINE */
}
NCT_DEBUG("Aborting not scheduled thread [%i] %i\n",
tt_el->thead_cnt,
thread_idx);
}
tt_el->state = ABORTING;
/*
* Note: the native thread will linger in RAM until it catches the
* mutex or awakes on the condition.
* Note that even if we would pthread_cancel() the thread here, that
* would be the case, but with a pthread_cancel() the mutex state would
* be uncontrolled
*/
}
/*
* Return a unique thread identifier for this thread for this
* run. This identifier is only meant for debug purposes
*
* thread_idx is the value returned by nct_new_thread()
*/
int nct_get_unique_thread_id(void *this_arg, int thread_idx)
{
struct te_status_t *this = (struct te_status_t *)this_arg;
struct threads_table_el *tt_el = ttable_get_element(this, thread_idx);
return tt_el->thead_cnt;
}
/*
* Notes about coverage:
*
* Note1:
*
* This condition will only be triggered in very unlikely cases
* (once every few full regression runs).
* It is therefore excluded from the coverage report to avoid confusing
* developers.
*
* Background: A pthread is created as soon as the hosted kernel creates
* a hosted thread. A pthread creation is an asynchronous process handled by the
* host kernel.
*
* This emulator normally keeps only 1 thread executing at a time.
* But part of the pre-initialization during creation of a new thread
* and some cleanup at the tail of the thread termination are executed
* in parallel to other threads.
* That is, the execution of those code paths is a bit indeterministic.
*
* Only when the hosted kernel attempts to swap to a new thread does this
* emulator need to wait until its pthread is ready and initialized
* (has reached nct_wait_until_allowed())
*
* In some cases (tests) hosted threads are created which are never actually needed
* (typically the idle thread). That means the test may finish before that
* thread's underlying pthread has reached nct_wait_until_allowed().
*
* In this unlikely cases the initialization or cleanup of the thread follows
* non-typical code paths.
* This code paths are there to ensure things work always, no matter
* the load of the host. Without them, very rare & mysterious segfault crashes
* would occur.
* But as they are very atypical and only triggered with some host loads,
* they will be covered in the coverage reports only rarely.
*
* Note2:
*
* Some other code will never or only very rarely trigger and is therefore
* excluded with LCOV_EXCL_LINE
*
*
* Notes about (memory) cleanup:
*
* Note.c1:
*
* In some very rare cases in very loaded machines, a race in the glibc pthread_cancel()
* seems to be triggered.
* In this, the cancelled thread cleanup overtakes the pthread_cancel() code, and frees the
* pthread structure before pthread_cancel() has finished, resulting in a dereference into already
* free'd memory, and therefore a segfault.
* Calling pthread_cancel() during cleanup is not required beyond preventing a valgrind
* memory leak report (all threads will be canceled immediately on exit).
* Therefore we do not do this, to avoid this very rare crashes.
*/
``` | /content/code_sandbox/scripts/native_simulator/common/src/nct.c | c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 5,506 |
```c
/*
*
*/
/**
* Overall HW models scheduler for the native simulator
*
* Models events are registered with NSI_HW_EVENT().
*/
#include <stdint.h>
#include <signal.h>
#include <stddef.h>
#include <inttypes.h>
#include "nsi_tracing.h"
#include "nsi_main.h"
#include "nsi_safe_call.h"
#include "nsi_hw_scheduler.h"
#include "nsi_hws_models_if.h"
uint64_t nsi_simu_time; /* The actual time as known by the HW models */
static uint64_t end_of_time = NSI_NEVER; /* When will this device stop */
extern struct nsi_hw_event_st __nsi_hw_events_start[];
extern struct nsi_hw_event_st __nsi_hw_events_end[];
static unsigned int number_of_events;
static unsigned int next_timer_index;
static uint64_t next_timer_time;
/* Have we received a SIGTERM or SIGINT */
static volatile sig_atomic_t signaled_end;
/**
* Handler for SIGTERM and SIGINT
*/
static void nsi_hws_signal_end_handler(int sig)
{
signaled_end = 1;
}
/**
* Set the handler for SIGTERM and SIGINT which will cause the
* program to exit gracefully when they are received the 1st time
*
* Note that our handler only sets a variable indicating the signal was
* received, and in each iteration of the hw main loop this variable is
* evaluated.
* If for some reason (the program is stuck) we never evaluate it, the program
* would never exit.
* Therefore we set SA_RESETHAND: This way, the 2nd time the signal is received
* the default handler would be called to terminate the program no matter what.
*
* Note that SA_RESETHAND requires either _POSIX_C_SOURCE>=200809L or
* _XOPEN_SOURCE>=500
*/
static void nsi_hws_set_sig_handler(void)
{
struct sigaction act;
act.sa_handler = nsi_hws_signal_end_handler;
NSI_SAFE_CALL(sigemptyset(&act.sa_mask));
act.sa_flags = SA_RESETHAND;
NSI_SAFE_CALL(sigaction(SIGTERM, &act, NULL));
NSI_SAFE_CALL(sigaction(SIGINT, &act, NULL));
}
static void nsi_hws_sleep_until_next_event(void)
{
if (next_timer_time >= nsi_simu_time) { /* LCOV_EXCL_BR_LINE */
nsi_simu_time = next_timer_time;
} else {
/* LCOV_EXCL_START */
nsi_print_warning("next_timer_time corrupted (%"PRIu64"<= %"
PRIu64", timer idx=%i)\n",
(uint64_t)next_timer_time,
(uint64_t)nsi_simu_time,
next_timer_index);
/* LCOV_EXCL_STOP */
}
if (signaled_end || (nsi_simu_time > end_of_time)) {
nsi_print_trace("\nStopped at %.3Lfs\n",
((long double)nsi_simu_time)/1.0e6L);
nsi_exit(0);
}
}
/**
* Find in between all events timers which is the next one.
* (and update the internal next_timer_* accordingly)
*/
void nsi_hws_find_next_event(void)
{
next_timer_index = 0;
next_timer_time = *__nsi_hw_events_start[0].timer;
for (unsigned int i = 1; i < number_of_events ; i++) {
if (next_timer_time > *__nsi_hw_events_start[i].timer) {
next_timer_index = i;
next_timer_time = *__nsi_hw_events_start[i].timer;
}
}
}
uint64_t nsi_hws_get_next_event_time(void)
{
return next_timer_time;
}
/**
* Execute the next scheduled HW event
* (advancing time until that event would trigger)
*/
void nsi_hws_one_event(void)
{
nsi_hws_sleep_until_next_event();
if (next_timer_index < number_of_events) { /* LCOV_EXCL_BR_LINE */
__nsi_hw_events_start[next_timer_index].callback();
} else {
nsi_print_error_and_exit("next_timer_index corrupted\n"); /* LCOV_EXCL_LINE */
}
nsi_hws_find_next_event();
}
/**
* Set the simulated time when the process will stop
*/
void nsi_hws_set_end_of_time(uint64_t new_end_of_time)
{
end_of_time = new_end_of_time;
}
/**
* Function to initialize the HW scheduler
*
* Note that the HW models should register their initialization functions
* as NSI_TASKS of HW_INIT level.
*/
void nsi_hws_init(void)
{
number_of_events = __nsi_hw_events_end - __nsi_hw_events_start;
nsi_hws_set_sig_handler();
nsi_hws_find_next_event();
}
/**
* Function to free any resources allocated by the HW scheduler
*
* Note that the HW models should register their initialization functions
* as NSI_TASKS of ON_EXIT_PRE/POST levels.
*/
void nsi_hws_cleanup(void)
{
}
``` | /content/code_sandbox/scripts/native_simulator/common/src/nsi_hw_scheduler.c | c | 2016-05-26T17:54:19 | 2024-08-16T18:09:06 | zephyr | zephyrproject-rtos/zephyr | 10,307 | 1,087 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.